[Telink] Change factory data generator utility (#24213)
* [Telink] Manufacturing Partition Generator Utility
This tool can be used to generate factory data for more bunch of
devices.
Signed-off-by: Maciej Bojczuk <maciej.bojczuk@telink-semi.com>
* [Telink] Add additional python requirements
MFG tool require some additional python modules like:
* future
* pypng
* PyQRCode
Signed-off-by: Maciej Bojczuk <maciej.bojczuk@telink-semi.com>
* [Telink] MFG tool integration with build system
Use new tool for factory data generation during building when below
config is set:
CONFIG_CHIP_FACTORY_DATA=y
CONFIG_CHIP_FACTORY_DATA_BUILD=y
Signed-off-by: Maciej Bojczuk <maciej.bojczuk@telink-semi.com>
* [Telink] Remove old, not needed scripts
Remove old, not needed python scripts for factory data generation.
Signed-off-by: Maciej Bojczuk <maciej.bojczuk@telink-semi.com>
Signed-off-by: Maciej Bojczuk <maciej.bojczuk@telink-semi.com>
diff --git a/config/telink/chip-module/CMakeLists.txt b/config/telink/chip-module/CMakeLists.txt
index 6113325..2328ef2 100644
--- a/config/telink/chip-module/CMakeLists.txt
+++ b/config/telink/chip-module/CMakeLists.txt
@@ -327,7 +327,7 @@
if (CONFIG_CHIP_FACTORY_DATA_MERGE_WITH_FIRMWARE)
add_custom_target(merge_factory_data ALL
COMMAND
- dd if=${PROJECT_BINARY_DIR}/factory_data.bin of=${PROJECT_BINARY_DIR}/zephyr.bin bs=1024 seek=2000
+ dd if=${PROJECT_BINARY_DIR}/factory/factory_data.bin of=${PROJECT_BINARY_DIR}/zephyr.bin bs=1024 seek=2000
)
if (CONFIG_CHIP_OTA_IMAGE_BUILD)
add_dependencies(merge_factory_data merge_mcuboot)
diff --git a/config/telink/chip-module/generate_factory_data.cmake b/config/telink/chip-module/generate_factory_data.cmake
index a1c9f0e..6b7681d 100644
--- a/config/telink/chip-module/generate_factory_data.cmake
+++ b/config/telink/chip-module/generate_factory_data.cmake
@@ -15,49 +15,63 @@
#
-# Create a JSON file based on factory data given via kConfigs.
+# Create a binary file with factory data given via kConfigs.
#
-# This function creates a list of arguments for external script and then run it to write a JSON file.
-# Created JSON file can be checked using JSON SCHEMA file if it is provided.
+# This function creates a list of arguments for external script and then run it to write a factory data file.
#
# This script can be manipulated using following kConfigs:
# - To merge generated factory data with final zephyr.hex file set kConfig CONFIG_CHIP_FACTORY_DATA_MERGE_WITH_FIRMWARE=y
# - To use default certification paths set CONFIG_CHIP_FACTORY_DATA_USE_DEFAULTS_CERTS_PATH=y
#
-# During generation process a some file will be created in zephyr's build directory:
-# - <factory_data_target>.json a file containing all factory data written in JSON format.
+# During generation process a some file will be created in zephyr's build directory under factory subdirectory:
+# - factory_data.bin
+# - factory_data.hex
+# - DAC_cert.der
+# - DAC_cert.pem
+# - DAC_key.pem
+# - DAC_private_key.bin
+# - DAC_public_key.bin
+# - pai_cert.der
+# - onb_codes.csv
+# - pin_disc.csv
+# - qrcode.png
+# - summary.json
#
# [Args]:
# factory_data_target - a name for target to generate factory_data.
# script_path - a path to script that makes a JSON factory data file from given arguments.
-# schema_path - a path to JSON schema file which can be used to verify generated factory data JSON file.
-# This argument is optional, if you don't want to verify the JSON file put it empty "".
# output_path - a path to output directory, where created JSON file will be stored.
-function(telink_create_factory_data_json factory_data_target script_path schema_path output_path)
+function(telink_create_factory_data_json factory_data_target script_path output_path)
# set script args for future purpose
set(script_args)
## generate all script arguments
-string(APPEND script_args "--sn \"${CONFIG_CHIP_DEVICE_SERIAL_NUMBER}\"\n")
-string(APPEND script_args "--date \"${CONFIG_CHIP_DEVICE_MANUFACTURING_DATE}\"\n")
-string(APPEND script_args "--vendor_id ${CONFIG_CHIP_DEVICE_VENDOR_ID}\n")
-string(APPEND script_args "--product_id ${CONFIG_CHIP_DEVICE_PRODUCT_ID}\n")
-string(APPEND script_args "--vendor_name \"${CONFIG_CHIP_DEVICE_VENDOR_NAME}\"\n")
-string(APPEND script_args "--product_name \"${CONFIG_CHIP_DEVICE_PRODUCT_NAME}\"\n")
-string(APPEND script_args "--hw_ver ${CONFIG_CHIP_DEVICE_HARDWARE_VERSION}\n")
-string(APPEND script_args "--hw_ver_str \"${CONFIG_CHIP_DEVICE_HARDWARE_VERSION_STRING}\"\n")
+string(APPEND script_args "--serial-num \"${CONFIG_CHIP_DEVICE_SERIAL_NUMBER}\"\n")
+string(APPEND script_args "--mfg-date \"${CONFIG_CHIP_DEVICE_MANUFACTURING_DATE}\"\n")
+string(APPEND script_args "--vendor-id ${CONFIG_CHIP_DEVICE_VENDOR_ID}\n")
+string(APPEND script_args "--product-id ${CONFIG_CHIP_DEVICE_PRODUCT_ID}\n")
+string(APPEND script_args "--vendor-name \"${CONFIG_CHIP_DEVICE_VENDOR_NAME}\"\n")
+string(APPEND script_args "--product-name \"${CONFIG_CHIP_DEVICE_PRODUCT_NAME}\"\n")
+string(APPEND script_args "--hw-ver ${CONFIG_CHIP_DEVICE_HARDWARE_VERSION}\n")
+string(APPEND script_args "--hw-ver-str \"${CONFIG_CHIP_DEVICE_HARDWARE_VERSION_STRING}\"\n")
+string(APPEND script_args "--overwrite\n")
+string(APPEND script_args "--in-tree\n")
# check if Rotating Device Id Unique Id should be generated
if(NOT CONFIG_CHIP_DEVICE_GENERATE_ROTATING_DEVICE_UID)
if(NOT DEFINED CONFIG_CHIP_DEVICE_ROTATING_DEVICE_UID)
message(FATAL_ERROR "CHIP_DEVICE_ROTATING_DEVICE_UID was not provided. To generate it use CONFIG_CHIP_DEVICE_GENERATE_ROTATING_DEVICE_UID=y")
else()
- string(APPEND script_args "--rd_uid \"${CONFIG_CHIP_DEVICE_ROTATING_DEVICE_UID}\"\n")
+ string(APPEND script_args "--rd-id-uid \"${CONFIG_CHIP_DEVICE_ROTATING_DEVICE_UID}\"\n")
endif()
else()
- string(APPEND script_args "--generate_rd_uid\n")
+ string(APPEND script_args "--enable-rotating-device-id\n")
endif()
+# find chip cert tool
+find_program(chip_cert_exe NAMES chip-cert REQUIRED)
+string(APPEND script_args "--chip-cert-path ${chip_cert_exe}\n")
+
# for development purpose user can use default certs instead of generating or providing them
if(CONFIG_CHIP_FACTORY_DATA_USE_DEFAULT_CERTS)
# convert decimal PID to its hexadecimal representation to find out certification files in repository
@@ -66,99 +80,57 @@
string(TOUPPER ${raw_pid} raw_pid_upper)
# all certs are located in ${CHIP_ROOT}/credentials/development/attestation
# it can be used during development without need to generate new certifications
- string(APPEND script_args "--dac_cert \"${CHIP_ROOT}/credentials/development/attestation/Matter-Development-DAC-${raw_pid_upper}-Cert.der\"\n")
- string(APPEND script_args "--dac_key \"${CHIP_ROOT}/credentials/development/attestation/Matter-Development-DAC-${raw_pid_upper}-Key.der\"\n")
- string(APPEND script_args "--pai_cert \"${CHIP_ROOT}/credentials/development/attestation/Matter-Development-PAI-noPID-Cert.der\"\n")
+ string(APPEND script_args "--dac-cert \"${CHIP_ROOT}/credentials/development/attestation/Matter-Development-DAC-${raw_pid_upper}-Cert.pem\"\n")
+ string(APPEND script_args "--dac-key \"${CHIP_ROOT}/credentials/development/attestation/Matter-Development-DAC-${raw_pid_upper}-Key.pem\"\n")
+ string(APPEND script_args "--cert \"${CHIP_ROOT}/credentials/development/attestation/Matter-Development-PAI-noPID-Cert.pem\"\n")
+ string(APPEND script_args "--key \"${CHIP_ROOT}/credentials/development/attestation/Matter-Development-PAI-noPID-Key.pem\"\n")
+ string(APPEND script_args "-cd \"${CHIP_ROOT}/credentials/development/cd-certs/Chip-Test-CD-Cert.der\"\n")
+ string(APPEND script_args "--pai\n")
else()
- find_program(chip_cert_exe NAMES chip-cert REQUIRED)
- string(APPEND script_args "--gen_cd\n")
- string(APPEND script_args "--chip_cert_path ${chip_cert_exe}\n")
+ # generate PAI and DAC certs
+ string(APPEND script_args "--cert \"${CHIP_ROOT}/credentials/test/attestation/Chip-Test-PAA-NoVID-Cert.pem\"\n")
+ string(APPEND script_args "--key \"${CHIP_ROOT}/credentials/test/attestation/Chip-Test-PAA-NoVID-Key.pem\"\n")
+ string(APPEND script_args "-cd \"${CHIP_ROOT}/credentials/development/cd-certs/Chip-Test-CD-Cert.der\"\n")
+ string(APPEND script_args "--paa\n")
endif()
+# find chip tool requied for generating QRCode
+find_program(chip_tool_exe NAMES chip-tool REQUIRED)
+string(APPEND script_args "--chip-tool-path ${chip_tool_exe}\n")
+
# add Password-Authenticated Key Exchange parameters
-string(APPEND script_args "--spake2_it \"${CONFIG_CHIP_DEVICE_SPAKE2_IT}\"\n")
-string(APPEND script_args "--spake2_salt \"${CONFIG_CHIP_DEVICE_SPAKE2_SALT}\"\n")
+string(APPEND script_args "--spake2-it \"${CONFIG_CHIP_DEVICE_SPAKE2_IT}\"\n")
string(APPEND script_args "--discriminator ${CONFIG_CHIP_DEVICE_DISCRIMINATOR}\n")
string(APPEND script_args "--passcode ${CONFIG_CHIP_DEVICE_SPAKE2_PASSCODE}\n")
-string(APPEND script_args "--include_passcode\n")
-string(APPEND script_args "--overwrite\n")
-# check if spake2 verifier should be generated using script
-if(CONFIG_CHIP_FACTORY_DATA_GENERATE_SPAKE2_VERIFIER)
- # request script to generate a new spake2_verifier
- # by adding an argument to script_args
- find_program(spake_exe NAMES spake2p REQUIRED)
- string(APPEND script_args "--spake2p_path ${spake_exe}\n")
-else()
- # Spake2 verifier should be provided using kConfig
- string(APPEND script_args "--spake2_verifier \"${CONFIG_CHIP_DEVICE_SPAKE2_TEST_VERIFIER}\"\n")
-endif()
+# request spake2p to generate a new spake2_verifier
+find_program(spake_exe NAMES spake2p REQUIRED)
+string(APPEND script_args "--spake2-path ${spake_exe}\n")
if(CONFIG_CHIP_DEVICE_ENABLE_KEY)
# Add optional EnableKey that triggers user-specific action.
-string(APPEND script_args "--enable_key \"${CONFIG_CHIP_DEVICE_ENABLE_KEY}\"\n")
+string(APPEND script_args "--enable-key \"${CONFIG_CHIP_DEVICE_ENABLE_KEY}\"\n")
endif()
-# Set output JSON file and path to SCHEMA file to validate generated factory data
-set(factory_data_json ${output_path}/${factory_data_target}.json)
-string(APPEND script_args "-o \"${factory_data_json}\"\n")
-string(APPEND script_args "-s \"${schema_path}\"\n")
+string(APPEND script_args "--output \"${output_path}\"/factory\n")
-# execute first script to create a JSON file
+set(factory_data_bin ${output_path}/factory/factory_data.bin)
+
+# execute a script to create a factory data file
separate_arguments(separated_script_args NATIVE_COMMAND ${script_args})
add_custom_command(
- OUTPUT ${factory_data_json}
+ OUTPUT ${factory_data_bin}
DEPENDS ${FACTORY_DATA_SCRIPT_PATH}
COMMAND ${Python3_EXECUTABLE} ${FACTORY_DATA_SCRIPT_PATH} ${separated_script_args}
COMMENT "Generating new Factory Data..."
)
add_custom_target(${factory_data_target} ALL
- DEPENDS ${factory_data_json}
+ DEPENDS ${factory_data_bin}
)
endfunction()
-# Create a .hex file with factory data in CBOR format.
-#
-# This function creates a .hex and .cbor files from given JSON factory data file.
-#
-#
-# During generation process some files will be created in zephyr's build directory:
-# - <factory_data_target>.hex a file containing all factory data in CBOR format.
-# - <factory_data_target>.bin a binary file containing all raw factory data in CBOR format.
-# - <factory_data_target>.cbor a file containing all factory data in CBOR format.
-#
-# [Args]:
-# factory_data_hex_target - a name for target to generate factory data HEX file.
-# factory_data_target - a name for target to generate factory data JSON file.
-# script_path - a path to script that makes a factory data .hex file from given arguments.
-# output_path - a path to output directory, where created JSON file will be stored.
-function(telink_create_factory_data_hex_file factory_data_hex_target factory_data_target script_path output_path)
-
-# Pass the argument list via file
-set(cbor_script_args "-i ${output_path}/${factory_data_target}.json\n")
-string(APPEND cbor_script_args "-o ${output_path}/${factory_data_target}\n")
-# get partition address and offset from partition manager during compilation
-string(APPEND cbor_script_args "--offset 0x1f4000\n")
-string(APPEND cbor_script_args "--size 0x1000\n")
-string(APPEND cbor_script_args "-r\n")
-
-# execute second script to create a hex file containing factory data in cbor format
-separate_arguments(separated_cbor_script_args NATIVE_COMMAND ${cbor_script_args})
-set(factory_data_hex ${output_path}/${factory_data_target}.hex)
-
-add_custom_command(OUTPUT ${factory_data_hex}
- COMMAND ${Python3_EXECUTABLE} ${script_path} ${separated_cbor_script_args}
- COMMENT "Generating factory data HEX file..."
- DEPENDS ${factory_data_target} ${script_path}
- )
-add_custom_target(${factory_data_hex_target} ALL
- DEPENDS ${factory_data_hex}
- )
-
-endfunction()
-
# Generate factory data partition using given args
#
#
@@ -176,21 +148,12 @@
endif()
# Localize all scripts needed to generate factory data partition
-set(FACTORY_DATA_SCRIPT_PATH ${CHIP_ROOT}/scripts/tools/telink/generate_telink_chip_factory_data.py)
-set(GENERATE_CBOR_SCRIPT_PATH ${CHIP_ROOT}/scripts/tools/telink/telink_generate_partition.py)
-set(FACTORY_DATA_SCHEMA_PATH ${CHIP_ROOT}/scripts/tools/telink/telink_factory_data.schema)
+set(FACTORY_DATA_SCRIPT_PATH ${CHIP_ROOT}/scripts/tools/telink/mfg_tool.py)
set(OUTPUT_FILE_PATH ${APPLICATION_BINARY_DIR}/zephyr)
-# create a JSON file with all factory data
+# create a binary file with all factory data
telink_create_factory_data_json(factory_data
- ${FACTORY_DATA_SCRIPT_PATH}
- ${FACTORY_DATA_SCHEMA_PATH}
+ ${FACTORY_DATA_SCRIPT_PATH}
${OUTPUT_FILE_PATH})
-# create a .hex file with factory data in CBOR format based on the JSON file created previously
-telink_create_factory_data_hex_file(factory_data_hex
- factory_data
- ${GENERATE_CBOR_SCRIPT_PATH}
- ${OUTPUT_FILE_PATH})
-
endfunction()
diff --git a/scripts/requirements.telink.txt b/scripts/requirements.telink.txt
new file mode 100644
index 0000000..7d20935
--- /dev/null
+++ b/scripts/requirements.telink.txt
@@ -0,0 +1,3 @@
+future==0.18.2
+pypng==0.0.21
+PyQRCode==1.2.1
diff --git a/scripts/requirements.txt b/scripts/requirements.txt
index 6fec358..b39ec86 100644
--- a/scripts/requirements.txt
+++ b/scripts/requirements.txt
@@ -20,6 +20,9 @@
# TI
-r requirements.ti.txt
+# Telink
+-r requirements.telink.txt
+
# cirque tests
requests>=2.24.0
diff --git a/scripts/tools/telink/generate_telink_chip_factory_data.py b/scripts/tools/telink/generate_telink_chip_factory_data.py
deleted file mode 100644
index 7605184..0000000
--- a/scripts/tools/telink/generate_telink_chip_factory_data.py
+++ /dev/null
@@ -1,493 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (c) 2022 Project CHIP Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from os.path import exists
-import os
-import sys
-import json
-import jsonschema
-import secrets
-import argparse
-import subprocess
-import logging as log
-import base64
-from collections import namedtuple
-from cryptography.hazmat.backends import default_backend
-from cryptography.hazmat.primitives.serialization import load_der_private_key
-
-# A user can not change the factory data version and must be coherent with
-# the factory data version set in the Telink platform Kconfig file (CHIP_FACTORY_DATA_VERSION).
-FACTORY_DATA_VERSION = 1
-
-MATTER_ROOT = os.path.dirname(os.path.realpath(__file__))[:-len("/scripts/tools/telink")]
-HEX_PREFIX = "hex:"
-PUB_KEY_PREFIX = b'\x04'
-INVALID_PASSCODES = [00000000, 11111111, 22222222, 33333333, 44444444,
- 55555555, 66666666, 77777777, 88888888, 99999999, 12345678, 87654321]
-
-
-def get_raw_private_key_der(der_file: str, password: str):
- """ Split given der file to get separated key pair consisting of public and private keys.
-
- Args:
- der_file (str): Path to .der file containing public and private keys
- password (str): Password to decrypt Keys. It can be None, and then KEY is not encrypted.
-
- Returns:
- hex string: return a hex string containing extracted and decrypted private KEY from given .der file.
- """
- try:
- with open(der_file, 'rb') as file:
- key_data = file.read()
- if password is None:
- log.warning("KEY password has not been provided. It means that DAC key is not encrypted.")
- keys = load_der_private_key(key_data, password, backend=default_backend())
- private_key = keys.private_numbers().private_value.to_bytes(32, byteorder='big')
-
- return private_key
-
- except IOError or ValueError:
- return None
-
-
-def gen_test_certs(chip_cert_exe: str,
- output: str,
- vendor_id: int,
- product_id: int,
- device_name: str,
- generate_cd: bool = False,
- cd_type: int = 1,
- paa_cert_path: str = None,
- paa_key_path: str = None):
- """
- Generate Matter certificates according to given Vendor ID and Product ID using the chip-cert executable.
- To use own Product Attestation Authority certificate provide paa_cert_path and paa_key_path arguments.
- Without providing these arguments a PAA certificate will be get from /credentials/test/attestation directory
- in the Matter repository.
-
- Args:
- chip_cert_exe (str): path to chip-cert executable
- output (str): output path to store a newly generated certificates (CD, DAC, PAI)
- vendor_id (int): an identification number specific to Vendor
- product_id (int): an identification number specific to Product
- device_name (str): human-readable device name
- generate_cd (bool, optional): Generate Certificate Declaration and store it in thee output directory. Defaults to False.
- paa_cert_path (str, optional): provide PAA certification path. Defaults to None - a path will be set to /credentials/test/attestation directory.
- paa_key_path (str, optional): provide PAA key path. Defaults to None - a path will be set to /credentials/test/attestation directory.
-
- Returns:
- dictionary: ["PAI_CERT": (str)<path to PAI cert .der file>,
- "DAC_CERT": (str)<path to DAC cert .der file>,
- "DAC_KEY": (str)<path to DAC key .der file>]
- """
-
- CD_PATH = MATTER_ROOT + "/credentials/test/certification-declaration/Chip-Test-CD-Signing-Cert.pem"
- CD_KEY_PATH = MATTER_ROOT + "/credentials/test/certification-declaration/Chip-Test-CD-Signing-Key.pem"
- PAA_PATH = paa_cert_path if paa_cert_path != None else MATTER_ROOT + "/credentials/test/attestation/Chip-Test-PAA-NoVID-Cert.pem"
- PAA_KEY_PATH = paa_key_path if paa_key_path != None else MATTER_ROOT + "/credentials/test/attestation/Chip-Test-PAA-NoVID-Key.pem"
-
- attestation_certs = namedtuple("attestation_certs", ["dac_cert", "dac_key", "pai_cert"])
-
- log.info("Generating new certificates using chip-cert...")
-
- if generate_cd:
- # generate Certification Declaration
- cmd = [chip_cert_exe, "gen-cd",
- "--key", CD_KEY_PATH,
- "--cert", CD_PATH,
- "--out", output + "/CD.der",
- "--format-version", "1",
- "--vendor-id", hex(vendor_id),
- "--product-id", hex(product_id),
- "--device-type-id", "0",
- "--certificate-id", "FFFFFFFFFFFFFFFFFFF",
- "--security-level", "0",
- "--security-info", "0",
- "--certification-type", str(cd_type),
- "--version-number", "0xFFFF",
- ]
- subprocess.run(cmd)
-
- new_certificates = {"PAI_CERT": output + "/PAI_cert",
- "PAI_KEY": output + "/PAI_key",
- "DAC_CERT": output + "/DAC_cert",
- "DAC_KEY": output + "/DAC_key"
- }
-
- # generate PAI
- cmd = [chip_cert_exe, "gen-att-cert",
- "-t", "i",
- "-c", device_name,
- "-V", hex(vendor_id),
- "-C", PAA_PATH,
- "-K", PAA_KEY_PATH,
- "-o", new_certificates["PAI_CERT"] + ".pem",
- "-O", new_certificates["PAI_KEY"] + ".pem",
- "-l", str(10000),
- ]
- subprocess.run(cmd)
-
- # generate DAC
- cmd = [chip_cert_exe, "gen-att-cert",
- "-t", "d",
- "-c", device_name,
- "-V", hex(vendor_id),
- "-P", hex(product_id),
- "-C", new_certificates["PAI_CERT"] + ".pem",
- "-K", new_certificates["PAI_KEY"] + ".pem",
- "-o", new_certificates["DAC_CERT"] + ".pem",
- "-O", new_certificates["DAC_KEY"] + ".pem",
- "-l", str(10000),
- ]
- subprocess.run(cmd)
-
- # convert to .der files
- for cert_k, cert_v in new_certificates.items():
- action_type = "convert-cert" if cert_k.find("CERT") != -1 else "convert-key"
- log.info(cert_v + ".der")
- cmd = [chip_cert_exe, action_type,
- cert_v + ".pem",
- cert_v + ".der",
- "--x509-der",
- ]
- subprocess.run(cmd)
-
- return attestation_certs(new_certificates["DAC_CERT"] + ".der",
- new_certificates["DAC_KEY"] + ".der",
- new_certificates["PAI_CERT"] + ".der")
-
-
-def gen_spake2p_verifier(passcode: int, it: int, salt: bytes) -> str:
- """ Generate Spake2+ verifier using SPAKE2+ Python Tool
-
- Args:
- passcode (int): Pairing passcode using in Spake2+
- it (int): Iteration counter for Spake2+ verifier generation
- salt (str): Salt used to generate Spake2+ verifier
-
- Returns:
- verifier encoded in Base64
- """
-
- cmd = [
- os.path.join(MATTER_ROOT, 'scripts/tools/spake2p/spake2p.py'), 'gen-verifier',
- '--passcode', str(passcode),
- '--salt', base64.b64encode(salt).decode('ascii'),
- '--iteration-count', str(it),
- ]
- return subprocess.check_output(cmd)
-
-
-class FactoryDataGenerator:
- """
- Class to generate factory data from given arguments and generate a JSON file
-
- """
-
- def __init__(self, arguments) -> None:
- """
- Args:
- arguments (any):All input arguments parsed using ArgParse
- """
- self._args = arguments
- self._factory_data = list()
- self._user_data = dict()
-
- try:
- self._validate_args()
- except AssertionError as e:
- log.error(e)
- sys.exit(-1)
-
- def _validate_args(self):
- if self._args.user:
- try:
- self._user_data = json.loads(self._args.user)
- except json.decoder.JSONDecodeError as e:
- raise AssertionError("Provided wrong user data, this is not a JSON format! {}".format(e))
- assert self._args.spake2_verifier or self._args.passcode, \
- "Cannot find Spake2+ verifier, to generate a new one please provide passcode (--passcode)"
- assert (self._args.chip_cert_path or (self._args.dac_cert and self._args.pai_cert and self._args.dac_key)), \
- "Cannot find paths to DAC or PAI certificates .der files. To generate a new ones please provide a path to chip-cert executable (--chip_cert_path)"
- assert self._args.output.endswith(".json"), \
- "Output path doesn't contain .json file path. ({})".format(self._args.output)
- assert not (self._args.passcode in INVALID_PASSCODES), \
- "Provided invalid passcode!"
-
- def generate_json(self):
- """
- This function generates JSON data, .json file and validates it.
-
- To validate generated JSON data a scheme must be provided within script's arguments.
-
- - In the first part, if the rotating device id unique id has been not provided
- as an argument, it will be created.
- - If user-provided passcode and Spake2+ verifier have been not provided
- as an argument, it will be created using an external script
- - Passcode is not stored in JSON by default. To store it for debugging purposes, add --include_passcode argument.
- - Validating output JSON is not mandatory, but highly recommended.
-
- """
- # generate missing data if needed
- if not self._args.rd_uid:
- if self._args.generate_rd_uid:
- rd_uid = self._generate_rotating_device_uid()
- else:
- # rotating device ID unique ID was not provided, so do not store it in factory data.
- rd_uid = None
- else:
- rd_uid = HEX_PREFIX + self._args.rd_uid
-
- if not self._args.spake2_verifier:
- spake_2_verifier = self._generate_spake2_verifier()
- else:
- spake_2_verifier = self._args.spake2_verifier
-
- # convert salt to bytestring to be coherent with Spake2+ verifier type
- spake_2_salt = self._args.spake2_salt
-
- if self._args.chip_cert_path:
- certs = gen_test_certs(self._args.chip_cert_path,
- self._args.output[:self._args.output.rfind("/")],
- self._args.vendor_id,
- self._args.product_id,
- self._args.vendor_name + "_" + self._args.product_name,
- self._args.gen_cd,
- self._args.cd_type,
- self._args.paa_cert,
- self._args.paa_key)
- dac_cert = certs.dac_cert
- pai_cert = certs.pai_cert
- dac_key = certs.dac_key
- else:
- dac_cert = self._args.dac_cert
- dac_key = self._args.dac_key
- pai_cert = self._args.pai_cert
-
- # try to read DAC public and private keys
- dac_priv_key = get_raw_private_key_der(dac_key, self._args.dac_key_password)
- if dac_priv_key is None:
- log.error("Cannot read DAC keys from : {}".format(dac_key))
- sys.exit(-1)
-
- try:
- json_file = open(self._args.output, "w+")
- except FileNotFoundError:
- print("Cannot create JSON file in this location: {}".format(self._args.output))
- sys.exit(-1)
- with json_file:
- # serialize data
- self._add_entry("version", FACTORY_DATA_VERSION)
- self._add_entry("sn", self._args.sn)
- self._add_entry("vendor_id", self._args.vendor_id)
- self._add_entry("product_id", self._args.product_id)
- self._add_entry("vendor_name", self._args.vendor_name)
- self._add_entry("product_name", self._args.product_name)
- self._add_entry("product_label", self._args.product_label)
- self._add_entry("product_url", self._args.product_url)
- self._add_entry("part_number", self._args.part_number)
- self._add_entry("date", self._args.date)
- self._add_entry("hw_ver", self._args.hw_ver)
- self._add_entry("hw_ver_str", self._args.hw_ver_str)
- self._add_entry("dac_cert", self._process_der(dac_cert))
- self._add_entry("dac_key", dac_priv_key)
- self._add_entry("pai_cert", self._process_der(pai_cert))
- if self._args.include_passcode:
- self._add_entry("passcode", self._args.passcode)
- self._add_entry("spake2_it", self._args.spake2_it)
- self._add_entry("spake2_salt", spake_2_salt)
- self._add_entry("spake2_verifier", spake_2_verifier)
- self._add_entry("discriminator", self._args.discriminator)
- if rd_uid:
- self._add_entry("rd_uid", rd_uid)
- if self._args.enable_key:
- self._add_entry("enable_key", HEX_PREFIX + self._args.enable_key)
- if self._args.user:
- self._add_entry("user", self._args.user)
-
- factory_data_dict = dict(self._factory_data)
-
- json_object = json.dumps(factory_data_dict)
- is_json_valid = True
-
- if self._args.schema:
- is_json_valid = self._validate_output_json(json_object)
- else:
- log.warning("JSON Schema file has not been provided, the output file can be wrong. Be aware of that.")
- try:
- if is_json_valid:
- json_file.write(json_object)
- except IOError as e:
- log.error("Cannot save output file into directory: {}".format(self._args.output))
-
- def _add_entry(self, name: str, value: any):
- """ Add single entry to list of tuples ("key", "value") """
- if(isinstance(value, bytes) or isinstance(value, bytearray)):
- value = HEX_PREFIX + value.hex()
- if value or (isinstance(value, int) and value == 0):
- log.debug("Adding entry '{}' with size {} and type {}".format(name, sys.getsizeof(value), type(value)))
- self._factory_data.append((name, value))
-
- def _generate_spake2_verifier(self):
- """ If verifier has not been provided in arguments list it should be generated via external script """
- return base64.b64decode(gen_spake2p_verifier(self._args.passcode, self._args.spake2_it, self._args.spake2_salt))
-
- def _generate_rotating_device_uid(self):
- """ If rotating device unique ID has not been provided it should be generated """
- log.warning("Cannot find rotating device UID in provided arguments list. A new one will be generated.")
- rdu = secrets.token_bytes(16)
- log.info("\n\nThe new rotate device UID: {}\n".format(rdu.hex()))
- return rdu
-
- def _validate_output_json(self, output_json: str):
- """
- Validate output JSON data with provided .scheme file
- This function will raise error if JSON does not match schema.
-
- """
- try:
- with open(self._args.schema) as schema_file:
- log.info("Validating JSON with schema...")
- schema = json.loads(schema_file.read())
- validator = jsonschema.Draft202012Validator(schema=schema)
- validator.validate(instance=json.loads(output_json))
- except IOError as e:
- log.error("Provided JSON schema file is wrong: {}".format(self._args.schema))
- return False
- else:
- log.info("Validate OK")
- return True
-
- def _process_der(self, path: str):
- log.debug("Processing der file...")
- try:
- with open(path, 'rb') as f:
- data = f.read()
- return data
- except IOError as e:
- log.error(e)
- raise e
-
-
-def main():
- parser = argparse.ArgumentParser(description="Telink Factory Data NVS generator tool")
-
- def allow_any_int(i): return int(i, 0)
- def base64_str(s): return base64.b64decode(s)
-
- mandatory_arguments = parser.add_argument_group("Mandatory keys", "These arguments must be provided to generate JSON file")
- optional_arguments = parser.add_argument_group(
- "Optional keys", "These arguments are optional and they depend on the user-purpose")
- parser.add_argument("-s", "--schema", type=str,
- help="JSON schema file to validate JSON output data")
- parser.add_argument("-o", "--output", type=str, required=True,
- help="Output path to store .json file, e.g. my_dir/output.json")
- parser.add_argument("-v", "--verbose", action="store_true",
- help="Run this script with DEBUG logging level")
- parser.add_argument("--include_passcode", action="store_true",
- help="Include passcode in factory data. By default, it is used only for generating Spake2+ verifier.")
- parser.add_argument("--overwrite", action="store_true",
- help="If output JSON file exist this argument allows to generate new factory data and overwrite it.")
- # Json known-keys values
- # mandatory keys
- mandatory_arguments.add_argument("--sn", type=str, required=True,
- help="[ascii string] Serial number of a device which can be used to identify \
- the serial number field in the Matter certificate structure. \
- Maximum length of serial number is 20 bytes. \
- Strings longer than 20 bytes will be declined in script")
- mandatory_arguments.add_argument("--vendor_id", type=allow_any_int,
- help="[int | hex int] Provide Vendor Identification Number")
- mandatory_arguments.add_argument("--product_id", type=allow_any_int,
- help="[int | hex int] Provide Product Identification Number")
- mandatory_arguments.add_argument("--vendor_name", type=str,
- help="[string] provide human-readable vendor name")
- mandatory_arguments.add_argument("--product_name", type=str,
- help="[string] provide human-readable product name")
- mandatory_arguments.add_argument("--date", type=str, required=True,
- help="[ascii string] Provide manufacturing date \
- A manufacturing date specifies the date that the Node was manufactured. \
- Used format for providing a manufacturing date is ISO 8601 e.g. YYYY-MM-DD.")
- mandatory_arguments.add_argument("--hw_ver", type=allow_any_int, required=True,
- help="[int | hex int] Provide hardware version in int format.")
- mandatory_arguments.add_argument("--hw_ver_str", type=str, required=True,
- help="[ascii string] Provide hardware version in string format.")
- mandatory_arguments.add_argument("--spake2_it", type=allow_any_int, required=True,
- help="[int | hex int] Provide Spake2+ iteration count.")
- mandatory_arguments.add_argument("--spake2_salt", type=base64_str, required=True,
- help="[base64 string] Provide Spake2+ salt.")
- mandatory_arguments.add_argument("--discriminator", type=allow_any_int, required=True,
- help="[int] Provide BLE pairing discriminator. \
- A 12-bit value matching the field of the same name in \
- the setup code. Discriminator is used during a discovery process.")
-
- # optional keys
- optional_arguments.add_argument("--product_url", type=str,
- help="[string] provide link to product-specific web page")
- optional_arguments.add_argument("--product_label", type=str,
- help="[string] provide human-readable product label")
- optional_arguments.add_argument("--part_number", type=str,
- help="[string] provide human-readable product number")
- optional_arguments.add_argument("--chip_cert_path", type=str,
- help="Generate DAC and PAI certificates instead giving a path to .der files. This option requires a path to chip-cert executable."
- "By default you can find chip-cert in connectedhomeip/src/tools/chip-cert directory and build it there.")
- optional_arguments.add_argument("--dac_cert", type=str,
- help="[.der] Provide the path to .der file containing DAC certificate.")
- optional_arguments.add_argument("--dac_key", type=str,
- help="[.der] Provide the path to .der file containing DAC keys.")
- optional_arguments.add_argument("--generate_rd_uid", action="store_true",
- help="Generate a new rotating device unique ID, print it out to console output and store it in factory data.")
- optional_arguments.add_argument("--dac_key_password", type=str,
- help="Provide a password to decode dac key. If dac key is not encrypted do not provide this argument.")
- optional_arguments.add_argument("--pai_cert", type=str,
- help="[.der] Provide the path to .der file containing PAI certificate.")
- optional_arguments.add_argument("--rd_uid", type=str,
- help="[hex string] [128-bit hex-encoded] Provide the rotating device unique ID. If this argument is not provided a new rotating device id unique id will be generated.")
- optional_arguments.add_argument("--passcode", type=allow_any_int,
- help="[int | hex] Default PASE session passcode. (This is mandatory to generate Spake2+ verifier).")
- optional_arguments.add_argument("--spake2_verifier", type=base64_str,
- help="[base64 string] Provide Spake2+ verifier without generating it.")
- optional_arguments.add_argument("--enable_key", type=str,
- help="[hex string] [128-bit hex-encoded] The Enable Key is a 128-bit value that triggers manufacturer-specific action while invoking the TestEventTrigger Command."
- "This value is used during Certification Tests, and should not be present on production devices.")
- optional_arguments.add_argument("--user", type=str,
- help="[string] Provide additional user-specific keys in JSON format: {'name_1': 'value_1', 'name_2': 'value_2', ... 'name_n', 'value_n'}.")
- optional_arguments.add_argument("--gen_cd", action="store_true", default=False,
- help="Generate a new Certificate Declaration in .der format according to used Vendor ID and Product ID. This certificate will not be included to the factory data.")
- optional_arguments.add_argument("--cd_type", type=int, default=1,
- help="[int] Type of generated Certification Declaration: 0 - development, 1 - provisional, 2 - official")
- optional_arguments.add_argument("--paa_cert", type=str,
- help="Provide a path to the Product Attestation Authority (PAA) certificate to generate the PAI certificate. Without providing it, a testing PAA stored in the Matter repository will be used.")
- optional_arguments.add_argument("--paa_key", type=str,
- help="Provide a path to the Product Attestation Authority (PAA) key to generate the PAI certificate. Without providing it, a testing PAA key stored in the Matter repository will be used.")
- args = parser.parse_args()
-
- if args.verbose:
- log.basicConfig(format='[%(asctime)s][%(levelname)s] %(message)s', level=log.DEBUG)
- else:
- log.basicConfig(format='[%(levelname)s] %(message)s', level=log.INFO)
-
- # check if json file already exist
- if(exists(args.output) and not args.overwrite):
- log.error("Output file: {} already exist, to create a new one add argument '--overwrite'. By default overwriting is disabled".format(args.output))
- return
-
- generator = FactoryDataGenerator(args)
- generator.generate_json()
-
-
-if __name__ == "__main__":
- main()
diff --git a/scripts/tools/telink/mfg_tool.py b/scripts/tools/telink/mfg_tool.py
new file mode 100644
index 0000000..2c4390c
--- /dev/null
+++ b/scripts/tools/telink/mfg_tool.py
@@ -0,0 +1,755 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2022 Project CHIP Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+import base64
+import binascii
+import csv
+import json
+import os
+import random
+import shutil
+import subprocess
+import logging as logger
+import sys
+import cryptography.hazmat.backends
+import cryptography.x509
+import pyqrcode
+import cbor2 as cbor
+from intelhex import IntelHex
+
+TOOLS = {
+ 'spake2p': None,
+ 'chip-cert': None,
+ 'chip-tool': None,
+}
+
+INVALID_PASSCODES = [00000000, 11111111, 22222222, 33333333, 44444444, 55555555,
+ 66666666, 77777777, 88888888, 99999999, 12345678, 87654321]
+
+FACTORY_DATA_VERSION = 1
+SERIAL_NUMBER_LEN = 32
+
+# Lengths for manual pairing codes and qrcode
+SHORT_MANUALCODE_LEN = 11
+LONG_MANUALCODE_LEN = 21
+QRCODE_LEN = 22
+ROTATING_DEVICE_ID_UNIQUE_ID_LEN_BITS = 128
+HEX_PREFIX = "hex:"
+DEV_SN_CSV_HDR = "Serial Number,\n"
+
+NVS_MEMORY = dict()
+
+
+def nvs_memory_append(key, value):
+ if isinstance(value, str):
+ NVS_MEMORY[key] = value.encode("utf-8")
+ else:
+ NVS_MEMORY[key] = value
+
+
+def nvs_memory_update(key, value):
+ if isinstance(value, str):
+ NVS_MEMORY.update({key: value.encode("utf-8")})
+ else:
+ NVS_MEMORY.update({key: value})
+
+
+def check_tools_exists(args):
+ if args.spake2_path:
+ TOOLS['spake2p'] = shutil.which(args.spake2_path)
+ else:
+ TOOLS['spake2p'] = shutil.which('spake2p')
+
+ if TOOLS['spake2p'] is None:
+ logger.error('spake2p not found, please specify --spake2-path argument')
+ sys.exit(1)
+ # if the certs and keys are not in the generated partitions or the specific dac cert and key are used,
+ # the chip-cert is not needed.
+ if args.paa or (args.pai and (args.dac_cert is None and args.dac_key is None)):
+ if args.chip_cert_path:
+ TOOLS['chip-cert'] = shutil.which(args.chip_cert_path)
+ else:
+ TOOLS['chip-cert'] = shutil.which('chip-cert')
+ if TOOLS['chip-cert'] is None:
+ logger.error('chip-cert not found, please specify --chip-cert-path argument')
+ sys.exit(1)
+
+ if args.chip_tool_path:
+ TOOLS['chip-tool'] = shutil.which(args.chip_tool_path)
+ else:
+ TOOLS['chip-tool'] = shutil.which('chip-tool')
+ if TOOLS['chip-tool'] is None:
+ logger.error('chip-tool not found, please specify --chip-tool-path argument')
+ sys.exit(1)
+
+ logger.debug('Using following tools:')
+ logger.debug('spake2p: {}'.format(TOOLS['spake2p']))
+ logger.debug('chip-cert: {}'.format(TOOLS['chip-cert']))
+ logger.debug('chip-tool: {}'.format(TOOLS['chip-tool']))
+
+
+def execute_cmd(cmd):
+ logger.debug('Executing Command: {}'.format(cmd))
+ status = subprocess.run(cmd, capture_output=True)
+
+ try:
+ status.check_returncode()
+ except subprocess.CalledProcessError as e:
+ if status.stderr:
+ logger.error('[stderr]: {}'.format(status.stderr.decode('utf-8').strip()))
+ logger.error('Command failed with error: {}'.format(e))
+ sys.exit(1)
+
+
+def check_str_range(s, min_len, max_len, name):
+ if s and ((len(s) < min_len) or (len(s) > max_len)):
+ logger.error('%s must be between %d and %d characters', name, min_len, max_len)
+ sys.exit(1)
+
+
+def check_int_range(value, min_value, max_value, name):
+ if value and ((value < min_value) or (value > max_value)):
+ logger.error('%s is out of range, should be in range [%d, %d]', name, min_value, max_value)
+ sys.exit(1)
+
+
+def vid_pid_str(vid, pid):
+ return '_'.join([hex(vid)[2:], hex(pid)[2:]])
+
+
+def read_der_file(path: str):
+ logger.debug("Reading der file {}...", path)
+ try:
+ with open(path, 'rb') as f:
+ data = f.read()
+ return data
+ except IOError as e:
+ logger.error(e)
+ raise e
+
+
+def read_key_bin_file(path: str):
+ try:
+ with open(path, 'rb') as file:
+ key_data = file.read()
+
+ return key_data
+
+ except IOError or ValueError:
+ return None
+
+
+def setup_out_dir(out_dir_top, args, serial: str):
+ out_dir = os.sep.join([out_dir_top, vid_pid_str(args.vendor_id, args.product_id)])
+
+ if args.in_tree:
+ out_dir = out_dir_top
+
+ os.makedirs(out_dir, exist_ok=True)
+
+ dirs = {
+ 'output': os.sep.join([out_dir, serial]),
+ 'internal': os.sep.join([out_dir, serial, 'internal']),
+ }
+
+ if args.in_tree:
+ dirs['output'] = out_dir
+ dirs['internal'] = os.sep.join([out_dir, 'internal'])
+
+ os.makedirs(dirs['output'], exist_ok=True)
+ os.makedirs(dirs['internal'], exist_ok=True)
+
+ return dirs
+
+
+def convert_x509_cert_from_pem_to_der(pem_file, out_der_file):
+ with open(pem_file, 'rb') as f:
+ pem_data = f.read()
+
+ pem_cert = cryptography.x509.load_pem_x509_certificate(pem_data, cryptography.hazmat.backends.default_backend())
+ der_cert = pem_cert.public_bytes(cryptography.hazmat.primitives.serialization.Encoding.DER)
+
+ with open(out_der_file, 'wb') as f:
+ f.write(der_cert)
+
+
+def generate_passcode(args, out_dirs):
+ salt_len_max = 32
+
+ cmd = [
+ TOOLS['spake2p'], 'gen-verifier',
+ '--iteration-count', str(args.spake2_it),
+ '--salt-len', str(salt_len_max),
+ '--out', os.sep.join([out_dirs['output'], 'pin.csv'])
+ ]
+
+ # If passcode is provided, use it
+ if (args.passcode):
+ cmd.extend(['--pin-code', str(args.passcode)])
+
+ execute_cmd(cmd)
+
+
+def generate_discriminator(args, out_dirs):
+ # If discriminator is provided, use it
+ if args.discriminator:
+ disc = args.discriminator
+ else:
+ disc = random.randint(0x0000, 0x0FFF)
+ # Append discriminator to each line of the passcode file
+ with open(os.sep.join([out_dirs['output'], 'pin.csv']), 'r') as fd:
+ lines = fd.readlines()
+
+ lines[0] = ','.join([lines[0].strip(), 'Discriminator'])
+ for i in range(1, len(lines)):
+ lines[i] = ','.join([lines[i].strip(), str(disc)])
+
+ with open(os.sep.join([out_dirs['output'], 'pin_disc.csv']), 'w') as fd:
+ fd.write('\n'.join(lines) + '\n')
+
+ os.remove(os.sep.join([out_dirs['output'], 'pin.csv']))
+
+
+def generate_pai_certs(args, ca_key, ca_cert, out_key, out_cert):
+ cmd = [
+ TOOLS['chip-cert'], 'gen-att-cert',
+ '--type', 'i',
+ '--subject-cn', '"{} PAI {}"'.format(args.cn_prefix, '00'),
+ '--out-key', out_key,
+ '--out', out_cert,
+ ]
+
+ if args.lifetime:
+ cmd.extend(['--lifetime', str(args.lifetime)])
+ if args.valid_from:
+ cmd.extend(['--valid-from', str(args.valid_from)])
+
+ cmd.extend([
+ '--subject-vid', hex(args.vendor_id)[2:],
+ '--subject-pid', hex(args.product_id)[2:],
+ '--ca-key', ca_key,
+ '--ca-cert', ca_cert,
+ ])
+
+ execute_cmd(cmd)
+ logger.info('Generated PAI certificate: {}'.format(out_cert))
+ logger.info('Generated PAI private key: {}'.format(out_key))
+
+
+def setup_root_certificates(args, dirs):
+ pai_cert = {
+ 'cert_pem': None,
+ 'cert_der': None,
+ 'key_pem': None,
+ }
+ # If PAA is passed as input, then generate PAI certificate
+ if args.paa:
+ # output file names
+ pai_cert['cert_pem'] = os.sep.join([dirs['internal'], 'pai_cert.pem'])
+ pai_cert['cert_der'] = os.sep.join([dirs['internal'], 'pai_cert.der'])
+ pai_cert['key_pem'] = os.sep.join([dirs['internal'], 'pai_key.pem'])
+
+ generate_pai_certs(args, args.key, args.cert, pai_cert['key_pem'], pai_cert['cert_pem'])
+ convert_x509_cert_from_pem_to_der(pai_cert['cert_pem'], pai_cert['cert_der'])
+ logger.info('Generated PAI certificate in DER format: {}'.format(pai_cert['cert_der']))
+
+ # If PAI is passed as input, generate DACs
+ elif args.pai:
+ pai_cert['cert_pem'] = args.cert
+ pai_cert['key_pem'] = args.key
+ pai_cert['cert_der'] = os.sep.join([dirs['internal'], 'pai_cert.der'])
+
+ convert_x509_cert_from_pem_to_der(pai_cert['cert_pem'], pai_cert['cert_der'])
+ logger.info('Generated PAI certificate in DER format: {}'.format(pai_cert['cert_der']))
+
+ return pai_cert
+
+
+# Generate the Public and Private key pair binaries
+def generate_keypair_bin(pem_file, out_privkey_bin, out_pubkey_bin):
+ with open(pem_file, 'rb') as f:
+ pem_data = f.read()
+
+ key_pem = cryptography.hazmat.primitives.serialization.load_pem_private_key(pem_data, None)
+ private_number_val = key_pem.private_numbers().private_value
+ public_number_x = key_pem.public_key().public_numbers().x
+ public_number_y = key_pem.public_key().public_numbers().y
+ public_key_first_byte = 0x04
+
+ with open(out_privkey_bin, 'wb') as f:
+ f.write(private_number_val.to_bytes(32, byteorder='big'))
+
+ with open(out_pubkey_bin, 'wb') as f:
+ f.write(public_key_first_byte.to_bytes(1, byteorder='big'))
+ f.write(public_number_x.to_bytes(32, byteorder='big'))
+ f.write(public_number_y.to_bytes(32, byteorder='big'))
+
+
+def generate_dac_cert(iteration, args, out_dirs, discriminator, passcode, ca_key, ca_cert):
+ out_key_pem = os.sep.join([out_dirs['internal'], 'DAC_key.pem'])
+ out_cert_pem = out_key_pem.replace('key.pem', 'cert.pem')
+ out_cert_der = out_key_pem.replace('key.pem', 'cert.der')
+ out_private_key_bin = out_key_pem.replace('key.pem', 'private_key.bin')
+ out_public_key_bin = out_key_pem.replace('key.pem', 'public_key.bin')
+
+ cmd = [
+ TOOLS['chip-cert'], 'gen-att-cert',
+ '--type', 'd',
+ '--subject-cn', '"{} DAC {}"'.format(args.cn_prefix, iteration),
+ '--out-key', out_key_pem,
+ '--out', out_cert_pem,
+ ]
+
+ if args.lifetime:
+ cmd.extend(['--lifetime', str(args.lifetime)])
+ if args.valid_from:
+ cmd.extend(['--valid-from', str(args.valid_from)])
+
+ cmd.extend(['--subject-vid', hex(args.vendor_id)[2:],
+ '--subject-pid', hex(args.product_id)[2:],
+ '--ca-key', ca_key,
+ '--ca-cert', ca_cert,
+ ])
+
+ execute_cmd(cmd)
+ logger.info('Generated DAC certificate: {}'.format(out_cert_pem))
+ logger.info('Generated DAC private key: {}'.format(out_key_pem))
+
+ convert_x509_cert_from_pem_to_der(out_cert_pem, out_cert_der)
+ logger.info('Generated DAC certificate in DER format: {}'.format(out_cert_der))
+
+ generate_keypair_bin(out_key_pem, out_private_key_bin, out_public_key_bin)
+ logger.info('Generated DAC private key in binary format: {}'.format(out_private_key_bin))
+ logger.info('Generated DAC public key in binary format: {}'.format(out_public_key_bin))
+
+ return out_cert_der, out_private_key_bin, out_public_key_bin
+
+
+def use_dac_cert_from_args(args, out_dirs):
+ logger.info('Using DAC from command line arguments...')
+ logger.info('DAC Certificate: {}'.format(args.dac_cert))
+ logger.info('DAC Private Key: {}'.format(args.dac_key))
+
+ # There should be only one UUID in the UUIDs list if DAC is specified
+ out_cert_der = os.sep.join([out_dirs['internal'], 'DAC_cert.der'])
+ out_private_key_bin = out_cert_der.replace('cert.der', 'private_key.bin')
+ out_public_key_bin = out_cert_der.replace('cert.der', 'public_key.bin')
+
+ convert_x509_cert_from_pem_to_der(args.dac_cert, out_cert_der)
+ logger.info('Generated DAC certificate in DER format: {}'.format(out_cert_der))
+
+ generate_keypair_bin(args.dac_key, out_private_key_bin, out_public_key_bin)
+ logger.info('Generated DAC private key in binary format: {}'.format(out_private_key_bin))
+ logger.info('Generated DAC public key in binary format: {}'.format(out_public_key_bin))
+
+ return out_cert_der, out_private_key_bin, out_public_key_bin
+
+
+def get_manualcode_args(vid, pid, flow, discriminator, passcode):
+ payload_args = list()
+ payload_args.append('--discriminator')
+ payload_args.append(str(discriminator))
+ payload_args.append('--setup-pin-code')
+ payload_args.append(str(passcode))
+ payload_args.append('--version')
+ payload_args.append('0')
+ payload_args.append('--vendor-id')
+ payload_args.append(str(vid))
+ payload_args.append('--product-id')
+ payload_args.append(str(pid))
+ payload_args.append('--commissioning-mode')
+ payload_args.append(str(flow))
+ return payload_args
+
+
+def get_qrcode_args(vid, pid, flow, discriminator, passcode, disc_mode):
+ payload_args = get_manualcode_args(vid, pid, flow, discriminator, passcode)
+ payload_args.append('--rendezvous')
+ payload_args.append(str(1 << disc_mode))
+ return payload_args
+
+
+def get_chip_qrcode(chip_tool, vid, pid, flow, discriminator, passcode, disc_mode):
+ payload_args = get_qrcode_args(vid, pid, flow, discriminator, passcode, disc_mode)
+ cmd_args = [chip_tool, 'payload', 'generate-qrcode']
+ cmd_args.extend(payload_args)
+ data = subprocess.check_output(cmd_args)
+
+ # Command output is as below:
+ # \x1b[0;32m[1655386003372] [23483:7823617] CHIP: [TOO] QR Code: MT:Y.K90-WB010E7648G00\x1b[0m
+ return data.decode('utf-8').split('QR Code: ')[1][:QRCODE_LEN]
+
+
+def get_chip_manualcode(chip_tool, vid, pid, flow, discriminator, passcode):
+ payload_args = get_manualcode_args(vid, pid, flow, discriminator, passcode)
+ cmd_args = [chip_tool, 'payload', 'generate-manualcode']
+ cmd_args.extend(payload_args)
+ data = subprocess.check_output(cmd_args)
+
+ # Command output is as below:
+ # \x1b[0;32m[1655386909774] [24424:7837894] CHIP: [TOO] Manual Code: 749721123365521327689\x1b[0m\n
+ # OR
+ # \x1b[0;32m[1655386926028] [24458:7838229] CHIP: [TOO] Manual Code: 34972112338\x1b[0m\n
+ # Length of manual code depends on the commissioning flow:
+ # For standard commissioning flow it is 11 digits
+ # For User-intent and custom commissioning flow it is 21 digits
+ manual_code_len = LONG_MANUALCODE_LEN if flow else SHORT_MANUALCODE_LEN
+ return data.decode('utf-8').split('Manual Code: ')[1][:manual_code_len]
+
+
+def generate_onboarding_data(args, out_dirs, discriminator, passcode):
+ chip_manualcode = get_chip_manualcode(TOOLS['chip-tool'], args.vendor_id, args.product_id,
+ args.commissioning_flow, discriminator, passcode)
+ chip_qrcode = get_chip_qrcode(TOOLS['chip-tool'], args.vendor_id, args.product_id,
+ args.commissioning_flow, discriminator, passcode, args.discovery_mode)
+
+ logger.info('Generated QR code: ' + chip_qrcode)
+ logger.info('Generated manual code: ' + chip_manualcode)
+
+ csv_data = 'qrcode,manualcode,discriminator,passcode\n'
+ csv_data += chip_qrcode + ',' + chip_manualcode + ',' + str(discriminator) + ',' + str(passcode) + '\n'
+
+ onboarding_data_file = os.sep.join([out_dirs['output'], 'onb_codes.csv'])
+ with open(onboarding_data_file, 'w') as f:
+ f.write(csv_data)
+
+ # Create QR code image as mentioned in the spec
+ qrcode_file = os.sep.join([out_dirs['output'], 'qrcode.png'])
+ chip_qr = pyqrcode.create(chip_qrcode, version=2, error='M')
+ chip_qr.png(qrcode_file, scale=6)
+
+ logger.info('Generated onboarding data and QR Code')
+
+
+# This function generates the DACs, picks the commissionable data from the already present csv file,
+# and generates the onboarding payloads, and writes everything to the master csv
+def write_device_unique_data(args, out_dirs, pai_cert):
+ with open(os.sep.join([out_dirs['output'], 'pin_disc.csv']), 'r') as csvf:
+ pin_disc_dict = csv.DictReader(csvf)
+ row = pin_disc_dict.__next__()
+
+ nvs_memory_append('discriminator', int(row['Discriminator']))
+ nvs_memory_append('spake2_it', int(row['Iteration Count']))
+ nvs_memory_append('spake2_salt', base64.b64decode(row['Salt']))
+ nvs_memory_append('spake2_verifier', base64.b64decode(row['Verifier']))
+ nvs_memory_append('passcode', int(row['PIN Code']))
+
+ if args.paa or args.pai:
+ if args.dac_key is not None and args.dac_cert is not None:
+ dacs = use_dac_cert_from_args(args, out_dirs)
+ else:
+ dacs = generate_dac_cert(int(row['Index']), args, out_dirs, int(row['Discriminator']),
+ int(row['PIN Code']), pai_cert['key_pem'], pai_cert['cert_pem'])
+
+ nvs_memory_append('dac_cert', read_der_file(dacs[0]))
+ nvs_memory_append('dac_key', read_key_bin_file(dacs[1]))
+ nvs_memory_append('pai_cert', read_der_file(pai_cert['cert_der']))
+
+ nvs_memory_append('cert_dclrn', read_der_file(args.cert_dclrn))
+
+ if (args.enable_rotating_device_id is True) and (args.rd_id_uid is None):
+ nvs_memory_update('rd_uid', binascii.b2a_hex(os.urandom(
+ int(ROTATING_DEVICE_ID_UNIQUE_ID_LEN_BITS / 8))).decode('utf-8'))
+
+ # Generate onboarding data
+ generate_onboarding_data(args, out_dirs, int(row['Discriminator']), int(row['PIN Code']))
+
+ return dacs
+
+
+def generate_partition(args, out_dirs):
+ logger.info('Generating partition image: offset: 0x{:X} size: 0x{:X}'.format(args.offset, args.size))
+ cbor_data = cbor.dumps(NVS_MEMORY)
+ # Create hex file
+ if len(cbor_data) > args.size:
+ raise ValueError("generated CBOR file exceeds declared maximum partition size! {} > {}".format(len(cbor_data), args.size))
+ ih = IntelHex()
+ ih.putsz(args.offset, cbor_data)
+ ih.write_hex_file(os.sep.join([out_dirs['output'], 'factory_data.hex']), True)
+ ih.tobinfile(os.sep.join([out_dirs['output'], 'factory_data.bin']))
+
+
+def generate_json_summary(args, out_dirs, pai_certs, dacs_cert, serial_num: str):
+ json_dict = dict()
+
+ json_dict['serial_num'] = serial_num
+
+ for key, nvs_value in NVS_MEMORY.items():
+ if (not isinstance(nvs_value, bytes) and not isinstance(nvs_value, bytearray)):
+ json_dict[key] = nvs_value
+
+ with open(os.sep.join([out_dirs['output'], 'pin_disc.csv']), 'r') as csvf:
+ pin_disc_dict = csv.DictReader(csvf)
+ row = pin_disc_dict.__next__()
+ json_dict['passcode'] = row['PIN Code']
+ json_dict['spake2_salt'] = row['Salt']
+ json_dict['spake2_verifier'] = row['Verifier']
+
+ with open(os.sep.join([out_dirs['output'], 'onb_codes.csv']), 'r') as csvf:
+ pin_disc_dict = csv.DictReader(csvf)
+ row = pin_disc_dict.__next__()
+ for key, value in row.items():
+ json_dict[key] = value
+
+ for key, value in pai_certs.items():
+ json_dict[key] = value
+
+ if dacs_cert is not None:
+ json_dict['dac_cert'] = dacs_cert[0]
+ json_dict['dac_priv_key'] = dacs_cert[1]
+ json_dict['dac_pub_key'] = dacs_cert[2]
+
+ json_dict['cert_dclrn'] = args.cert_dclrn
+
+ # Format vid & pid as hex
+ json_dict['vendor_id'] = hex(json_dict['vendor_id'])
+ json_dict['product_id'] = hex(json_dict['product_id'])
+
+ with open(os.sep.join([out_dirs['output'], 'summary.json']), 'w') as json_file:
+ json.dump(json_dict, json_file, indent=4)
+
+
+def add_additional_kv(args, serial_num):
+ # Device instance information
+ if args.vendor_id is not None:
+ nvs_memory_append('vendor_id', args.vendor_id)
+ if args.vendor_name is not None:
+ nvs_memory_append('vendor_name', args.vendor_name)
+ if args.product_id is not None:
+ nvs_memory_append('product_id', args.product_id)
+ if args.product_name is not None:
+ nvs_memory_append('product_name', args.product_name)
+ if args.hw_ver is not None:
+ nvs_memory_append('hw_ver', args.hw_ver)
+ if args.hw_ver_str is not None:
+ nvs_memory_append('hw_ver_str', args.hw_ver_str)
+ if args.mfg_date is not None:
+ nvs_memory_append('date', args.mfg_date)
+ if args.enable_rotating_device_id:
+ nvs_memory_append('rd_uid', args.rd_id_uid)
+
+ # Add the serial-num
+ nvs_memory_append('sn', serial_num)
+
+ nvs_memory_append('version', FACTORY_DATA_VERSION)
+
+ if args.enable_key:
+ nvs_memory_append('enable_key', args.enable_key)
+
+ # Keys from basic clusters
+ if args.product_label is not None:
+ nvs_memory_append('product_label', args.product_label)
+ if args.product_url is not None:
+ nvs_memory_append('product_url', args.product_url)
+ if args.part_number is not None:
+ nvs_memory_append('part_number', args.part_number)
+
+
+def get_and_validate_args():
+ def allow_any_int(i): return int(i, 0)
+ def base64_str(s): return base64.b64decode(s)
+
+ parser = argparse.ArgumentParser(description='Manufacuring partition generator tool',
+ formatter_class=lambda prog: argparse.HelpFormatter(prog, max_help_position=50))
+ mandatory_args = parser.add_argument_group("Mandatory keys", "These arguments must be provided to generate JSON file")
+ optional_args = parser.add_argument_group("Optional keys", "These arguments are optional and they depend on the user-purpose")
+
+ # General options
+ general_args = parser.add_argument_group('General options')
+ general_args.add_argument('-n', '--count', type=allow_any_int, default=1,
+ help='The number of manufacturing partition binaries to generate. Default is 1.')
+ general_args.add_argument("--output", type=str, required=False, default="out",
+ help="[string] Output path where generated data will be stored.")
+ general_args.add_argument("--spake2-path", type=str, required=False,
+ help="[string] Provide Spake2+ tool path")
+ general_args.add_argument("--chip-tool-path", type=str, required=False,
+ help="[string] Provide chip-tool path")
+ general_args.add_argument("--chip-cert-path", type=str, required=False,
+ help="[string] Provide chip-cert path")
+ general_args.add_argument("--overwrite", action="store_true", default=False,
+ help="If output directory exist this argument allows to generate new factory data and overwrite it.")
+ general_args.add_argument("--in-tree", action="store_true", default=False,
+ help="Use it only when building factory data from Matter source code.")
+ general_args.add_argument("--enable-key", type=str,
+ help="[hex string] [128-bit hex-encoded] The Enable Key is a 128-bit value that triggers manufacturer-specific action while invoking the TestEventTrigger Command."
+ "This value is used during Certification Tests, and should not be present on production devices.")
+ # Commissioning options
+ commissioning_args = parser.add_argument_group('Commisioning options')
+ commissioning_args.add_argument('--passcode', type=allow_any_int,
+ help='The passcode for pairing. Randomly generated if not specified.')
+ commissioning_args.add_argument("--spake2-it", type=allow_any_int, default=1000,
+ help="[int] Provide Spake2+ iteration count.")
+ commissioning_args.add_argument('--discriminator', type=allow_any_int,
+ help='The discriminator for pairing. Randomly generated if not specified.')
+ commissioning_args.add_argument('-cf', '--commissioning-flow', type=allow_any_int, default=0,
+ help='Device commissioning flow, 0:Standard, 1:User-Intent, 2:Custom. \
+ Default is 0.', choices=[0, 1, 2])
+ commissioning_args.add_argument('-dm', '--discovery-mode', type=allow_any_int, default=1,
+ help='Commissionable device discovery networking technology. \
+ 0:WiFi-SoftAP, 1:BLE, 2:On-network. Default is BLE.', choices=[0, 1, 2])
+
+ # Device insrance information
+ dev_inst_args = parser.add_argument_group('Device instance information options')
+ dev_inst_args.add_argument('-v', '--vendor-id', type=allow_any_int, required=False, help='Vendor id')
+ dev_inst_args.add_argument('--vendor-name', type=str, required=False, help='Vendor name')
+ dev_inst_args.add_argument('-p', '--product-id', type=allow_any_int, required=False, help='Product id')
+ dev_inst_args.add_argument('--product-name', type=str, required=False, help='Product name')
+ dev_inst_args.add_argument('--hw-ver', type=allow_any_int, required=False, help='Hardware version')
+ dev_inst_args.add_argument('--hw-ver-str', type=str, required=False, help='Hardware version string')
+ dev_inst_args.add_argument('--mfg-date', type=str, required=False, help='Manufacturing date in format YYYY-MM-DD')
+ dev_inst_args.add_argument('--serial-num', type=str, required=False, help='Serial number in hex format')
+ dev_inst_args.add_argument('--enable-rotating-device-id', action='store_true',
+ help='Enable Rotating device id in the generated binaries')
+ dev_inst_args.add_argument('--rd-id-uid', type=str, required=False,
+ help='128-bit unique identifier for generating rotating device identifier, provide 32-byte hex string, e.g. "1234567890abcdef1234567890abcdef"')
+
+ dac_args = parser.add_argument_group('Device attestation credential options')
+ # If DAC is present then PAI key is not required, so it is marked as not required here
+ # but, if DAC is not present then PAI key is required and that case is validated in validate_args()
+ dac_args.add_argument('-c', '--cert', type=str, required=False, help='The input certificate file in PEM format.')
+ dac_args.add_argument('-k', '--key', type=str, required=False, help='The input key file in PEM format.')
+ dac_args.add_argument('-cd', '--cert-dclrn', type=str, required=True, help='The certificate declaration file in DER format.')
+ dac_args.add_argument('--dac-cert', type=str, help='The input DAC certificate file in PEM format.')
+ dac_args.add_argument('--dac-key', type=str, help='The input DAC private key file in PEM format.')
+ dac_args.add_argument('-cn', '--cn-prefix', type=str, default='Telink',
+ help='The common name prefix of the subject of the generated certificate.')
+ dac_args.add_argument('-lt', '--lifetime', default=4294967295, type=allow_any_int,
+ help='Lifetime of the generated certificate. Default is 4294967295 if not specified, \
+ this indicate that certificate does not have well defined expiration date.')
+ dac_args.add_argument('-vf', '--valid-from', type=str,
+ help='The start date for the certificate validity period in format <YYYY>-<MM>-<DD> [ <HH>:<MM>:<SS> ]. \
+ Default is current date.')
+ input_cert_group = dac_args.add_mutually_exclusive_group(required=False)
+ input_cert_group.add_argument('--paa', action='store_true', help='Use input certificate as PAA certificate.')
+ input_cert_group.add_argument('--pai', action='store_true', help='Use input certificate as PAI certificate.')
+
+ basic_args = parser.add_argument_group('Few more Basic clusters options')
+ basic_args.add_argument('--product-label', type=str, required=False, help='Product label')
+ basic_args.add_argument('--product-url', type=str, required=False, help='Product URL')
+ basic_args.add_argument('--part_number', type=str, required=False, help='Provide human-readable product number')
+
+ part_gen_args = parser.add_argument_group('Partition generator options')
+ part_gen_args.add_argument('--offset', type=allow_any_int, default=0x1F4000,
+ help='Partition offset - an address in devices NVM memory, where factory data will be stored')
+ part_gen_args.add_argument('--size', type=allow_any_int, default=0x1000,
+ help='The maximum partition size')
+
+ args = parser.parse_args()
+
+ # Validate in-tree parameter
+ if args.count > 1 and args.in_tree:
+ logger.error('Option --in-tree can not be use together with --count > 1')
+ sys.exit(1)
+
+ # Validate discriminator and passcode
+ check_int_range(args.discriminator, 0x0000, 0x0FFF, 'Discriminator')
+ if args.passcode is not None:
+ if ((args.passcode < 0x0000001 and args.passcode > 0x5F5E0FE) or (args.passcode in INVALID_PASSCODES)):
+ logger.error('Invalid passcode' + str(args.passcode))
+ sys.exit(1)
+
+ # Validate the device instance information
+ check_int_range(args.product_id, 0x0000, 0xFFFF, 'Product id')
+ check_int_range(args.vendor_id, 0x0000, 0xFFFF, 'Vendor id')
+ check_int_range(args.hw_ver, 0x0000, 0xFFFF, 'Hardware version')
+ check_int_range(args.spake2_it, 1, 10000, 'Spake2+ iteration count')
+ check_str_range(args.serial_num, 1, SERIAL_NUMBER_LEN, 'Serial number')
+ check_str_range(args.vendor_name, 1, 32, 'Vendor name')
+ check_str_range(args.product_name, 1, 32, 'Product name')
+ check_str_range(args.hw_ver_str, 1, 64, 'Hardware version string')
+ check_str_range(args.mfg_date, 8, 16, 'Manufacturing date')
+ check_str_range(args.rd_id_uid, 32, 32, 'Rotating device Unique id')
+
+ # Validates the attestation related arguments
+ # DAC key and DAC cert both should be present or none
+ if (args.dac_key is not None) != (args.dac_cert is not None):
+ logger.error("dac_key and dac_cert should be both present or none")
+ sys.exit(1)
+ else:
+ # Make sure PAI certificate is present if DAC is present
+ if (args.dac_key is not None) and (args.pai is False):
+ logger.error('Please provide PAI certificate along with DAC certificate and DAC key')
+ sys.exit(1)
+
+ # Validate the input certificate type, if DAC is not present
+ if args.dac_key is None and args.dac_cert is None:
+ if args.paa:
+ logger.info('Input Root certificate type PAA')
+ elif args.pai:
+ logger.info('Input Root certificate type PAI')
+ else:
+ logger.info('Do not include the device attestation certificates and keys in partition binaries')
+
+ # Check if Key and certificate are present
+ if (args.paa or args.pai) and (args.key is None or args.cert is None):
+ logger.error('CA key and certificate are required to generate DAC key and certificate')
+ sys.exit(1)
+
+ check_str_range(args.product_label, 1, 64, 'Product Label')
+ check_str_range(args.product_url, 1, 256, 'Product URL')
+ check_str_range(args.part_number, 1, 32, 'Part Number')
+
+ return args
+
+
+def main():
+ logger.basicConfig(format='[%(asctime)s] [%(levelname)7s] - %(message)s', level=logger.INFO)
+ args = get_and_validate_args()
+ check_tools_exists(args)
+
+ if os.path.exists(args.output):
+ if args.overwrite:
+ logger.info("Output directory already exists. All data will be overwritten.")
+ shutil.rmtree(args.output)
+ else:
+ logger.error("Output directory exists! Please use different or remove existing.")
+ exit(1)
+
+ # If serial number is not passed, then generate one
+ if args.serial_num is None:
+ serial_num_int = int(binascii.b2a_hex(os.urandom(SERIAL_NUMBER_LEN)), 16)
+ logger.info("Serial number not provided. Using generated one: {}".format(hex(serial_num_int)))
+ else:
+ serial_num_int = int(args.serial_num, 16)
+
+ out_dir_top = os.path.realpath(args.output)
+ os.makedirs(out_dir_top, exist_ok=True)
+
+ dev_sn_file = open(os.sep.join([out_dir_top, "device_sn.csv"]), "w")
+ dev_sn_file.write(DEV_SN_CSV_HDR)
+
+ for i in range(args.count):
+ pai_cert = {}
+ serial_num_str = format(serial_num_int + i, 'x')
+ logger.info("Generating for {}".format(serial_num_str))
+ dev_sn_file.write(serial_num_str + '\n')
+ out_dirs = setup_out_dir(out_dir_top, args, serial_num_str)
+ add_additional_kv(args, serial_num_str)
+ generate_passcode(args, out_dirs)
+ generate_discriminator(args, out_dirs)
+ if args.paa or args.pai:
+ pai_cert = setup_root_certificates(args, out_dirs)
+ dacs_cert = write_device_unique_data(args, out_dirs, pai_cert)
+ generate_partition(args, out_dirs)
+ generate_json_summary(args, out_dirs, pai_cert, dacs_cert, serial_num_str)
+
+ dev_sn_file.close()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/tools/telink/readme.md b/scripts/tools/telink/readme.md
new file mode 100644
index 0000000..d3539b5
--- /dev/null
+++ b/scripts/tools/telink/readme.md
@@ -0,0 +1,212 @@
+# Manufacturing Partition Generator Utility
+
+This tool is designed to generate factory partitions for mass production.
+
+## Dependencies
+
+Please make sure you have had the following tools before using the generator
+tool.
+
+- [CHIP Certificate Tool](https://github.com/project-chip/connectedhomeip/tree/master/src/tools/chip-cert)
+
+- [SPAKE2P Parameters Tool](https://github.com/project-chip/connectedhomeip/tree/master/src/tools/spake2p)
+
+- [chip-tool](https://github.com/project-chip/connectedhomeip/tree/master/examples/chip-tool)
+
+### [Build Matter tools](https://github.com/project-chip/connectedhomeip/blob/master/docs/guides/BUILDING.md#build-for-the-host-os-linux-or-macos)
+
+1. Using the following commands to generate chip-tool, spake2p and chip-cert at
+ `path/to/connectedhomeip/build/out/host`.
+
+ ```shell
+ cd path/to/connectedhomeip
+ source scripts/activate.sh
+ gn gen build/out/host
+ ninja -C build/out/host
+ ```
+
+2. Add the tools path to \$PATH
+
+ ```shell
+ export PATH="$PATH:path/to/connectedhomeip/build/out/host"
+ ```
+
+### Install python dependencies
+
+```shell
+cd path/to/connectedhomeip/scripts/tools/telink/
+python3 -m pip install -r requirements.txt
+```
+
+## Usage
+
+The following commands generate factory partitions using the default testing PAI
+keys, certificates, and CD in Matter project. You can make it using yours
+instead in real production.
+
+### Generate a factory partition
+
+```shell
+python3 mfg_tool.py -v 0xFFF2 -p 0x8001 \
+--serial-num AABBCCDDEEFF11223344556677889900 \
+--vendor-name "Telink Semiconductor" \
+--product-name "not-specified" \
+--mfg-date 2022-12-12 \
+--hw-ver 1 \
+--hw-ver-str "prerelase" \
+--pai \
+--key /path/to/connectedhomeip/credentials/test/attestation/Chip-Test-PAI-FFF2-8001-Key.pem \
+--cert /path/to/connectedhomeip/credentials/test/attestation/Chip-Test-PAI-FFF2-8001-Cert.pem \
+-cd /path/to/connectedhomeip/credentials/test/certification-declaration/Chip-Test-CD-FFF2-8001.der \
+--spake2-path /path/to/spake2p \
+--chip-tool-path /path/to/chip-tool \
+--chip-cert-path /path/to/chip-cert
+```
+
+### Generate 5 factory partitions [Optional argument : --count]
+
+```shell
+python3 mfg_tool.py --count 5 -v 0xFFF2 -p 0x8001 \
+--serial-num AABBCCDDEEFF11223344556677889900 \
+--vendor-name "Telink Semiconductor" \
+--product-name "not-specified" \
+--mfg-date 2022-02-02 \
+--hw-ver 1 \
+--hw-ver-str "prerelase" \
+--pai \
+--key /path/to/connectedhomeip/credentials/test/attestation/Chip-Test-PAI-FFF2-8001-Key.pem \
+--cert /path/to/connectedhomeip/credentials/test/attestation/Chip-Test-PAI-FFF2-8001-Cert.pem \
+-cd /path/to/connectedhomeip/credentials/test/certification-declaration/Chip-Test-CD-FFF2-8001.der \
+--spake2-path /path/to/spake2p \
+--chip-tool-path /path/to/chip-tool \
+--chip-cert-path /path/to/chip-cert
+```
+
+## Output files and directory structure
+
+```
+out
+├── device_sn.csv
+└── fff2_8001
+ ├── aabbccddeeff11223344556677889900
+ │ ├── factory_data.bin
+ │ ├── factory_data.hex
+ │ ├── internal
+ │ │ ├── DAC_cert.der
+ │ │ ├── DAC_cert.pem
+ │ │ ├── DAC_key.pem
+ │ │ ├── DAC_private_key.bin
+ │ │ ├── DAC_public_key.bin
+ │ │ └── pai_cert.der
+ │ ├── onb_codes.csv
+ │ ├── pin_disc.csv
+ │ ├── qrcode.png
+ │ └── summary.json
+ ├── aabbccddeeff11223344556677889901
+ │ ├── factory_data.bin
+ │ ├── factory_data.hex
+ │ ├── internal
+ │ │ ├── DAC_cert.der
+ │ │ ├── DAC_cert.pem
+ │ │ ├── DAC_key.pem
+ │ │ ├── DAC_private_key.bin
+ │ │ ├── DAC_public_key.bin
+ │ │ └── pai_cert.der
+ │ ├── onb_codes.csv
+ │ ├── pin_disc.csv
+ │ ├── qrcode.png
+ │ └── summary.json
+ ├── aabbccddeeff11223344556677889902
+ │ ├── factory_data.bin
+ │ ├── factory_data.hex
+ │ ├── internal
+ │ │ ├── DAC_cert.der
+ │ │ ├── DAC_cert.pem
+ │ │ ├── DAC_key.pem
+ │ │ ├── DAC_private_key.bin
+ │ │ ├── DAC_public_key.bin
+ │ │ └── pai_cert.der
+ │ ├── onb_codes.csv
+ │ ├── pin_disc.csv
+ │ ├── qrcode.png
+ │ └── summary.json
+ └── aabbccddeeff11223344556677889903
+ ├── factory_data.bin
+ ├── factory_data.hex
+ ├── internal
+ │ ├── DAC_cert.der
+ │ ├── DAC_cert.pem
+ │ ├── DAC_key.pem
+ │ ├── DAC_private_key.bin
+ │ ├── DAC_public_key.bin
+ │ └── pai_cert.der
+ ├── onb_codes.csv
+ ├── pin_disc.csv
+ ├── qrcode.png
+ └── summary.json
+```
+
+Tool generates following output files:
+
+- Partition Binary : `factory_data.bin` and `factory_data.hex`
+- Partition JSON : `summary.json`
+- Onboarding codes : `onb_codes.csv`
+- QR Code image : `qrcode.png`
+
+Other intermediate files are stored in `internal/` directory:
+
+- PAI Certificate : `pai_cert.der`
+- DAC Certificates : `DAC_cert.der` and `DAC_cert.pem`
+- DAC Private Key : `DAC_private_key.bin`
+- DAC Public Key : `DAC_public_key.bin`
+
+Above files are stored at `out/<vid_pid>/<SN>`. Each device is identified with
+an unique SN.
+
+## Flashing the factory partition FW into Matter App
+
+You can try one of these factory partition FW on developing stage.
+
+1. Prepare a Matter App FW with empty factory data partition.
+
+ For example, `lighting-app`. Please generate the FW as below:
+
+ ```shell
+ cd path/to/connectedhomeip/example/ligting-app/telink/
+ west build -- -DCONFIG_CHIP_FACTORY_DATA=y
+ ```
+
+ The output FW is stored at `./build/zephyr/zephyr.bin`.
+
+2. Then flash Matter App FW onto B91 board.
+
+3. Then flash the `factory_data.bin` generated from the generator tool at
+ specific address:
+
+ > Note: The offset for Matter
+ > [v1.0-branch](https://github.com/telink-semi/zephyr/blob/telink_matter_v1.0-branch/boards/riscv/tlsr9518adk80d/tlsr9518adk80d.dts)
+ > is `0xF4000` and for
+ > [master branch](https://github.com/telink-semi/zephyr/blob/telink_matter/boards/riscv/tlsr9518adk80d/tlsr9518adk80d.dts)
+ > is `0x1F4000`. You can check the `factory_partition` reg at
+ > `tlsr9518adk80d.dts` for details.
+
+ For example, the `factory_data_bin` with serial number
+ `aabbccddeeff11223344556677889900`. Here is the expected output in logging:
+
+ ```shell
+ ...
+ I: 947 [DL]Device Configuration:
+ I: 951 [DL] Serial Number: aabbccddeeff11223344556677889900
+ I: 957 [DL] Vendor Id: 65522 (0xFFF2)
+ I: 961 [DL] Product Id: 32769 (0x8001)
+ I: 965 [DL] Hardware Version: 1
+ I: 969 [DL] Setup Pin Code (0 for UNKNOWN/ERROR): 93320241
+ I: 975 [DL] Setup Discriminator (0xFFFF for UNKNOWN/ERROR): 3008 (0xBC0)
+ I: 983 [DL] Manufacturing Date: 2022-02-02
+ I: 988 [DL] Device Type: 65535 (0xFFFF)
+ I: 993 [SVR]SetupQRCode: [MT:634J042C00O-KB7Z-10]
+ I: 999 [SVR]Copy/paste the below URL in a browser to see the QR Code:
+ I: 1006 [SVR]https://project-chip.github.io/connectedhomeip/qrcode.html?data=MT%3A634J042C00O-KB7Z-10
+ I: 1017 [SVR]Manual pairing code: [26251356956]
+ ...
+ ```
diff --git a/scripts/tools/telink/requirements.txt b/scripts/tools/telink/requirements.txt
new file mode 100644
index 0000000..6832340
--- /dev/null
+++ b/scripts/tools/telink/requirements.txt
@@ -0,0 +1,6 @@
+cryptography==36.0.2
+cffi==1.15.0
+future==0.18.2
+pycparser==2.21
+pypng==0.0.21
+PyQRCode==1.2.1
\ No newline at end of file
diff --git a/scripts/tools/telink/telink_factory_data.schema b/scripts/tools/telink/telink_factory_data.schema
deleted file mode 100644
index 561bf4d..0000000
--- a/scripts/tools/telink/telink_factory_data.schema
+++ /dev/null
@@ -1,164 +0,0 @@
-{
- "$id": "Telink_Factory_Data_schema",
- "$schema": "https://json-schema.org/draft/2020-12/schema",
- "description": "A representation of all factory data used in Matter's Telink device",
- "type": "object",
- "required": [
- "version",
- "sn",
- "vendor_id",
- "product_id",
- "vendor_name",
- "product_name",
- "date",
- "hw_ver",
- "hw_ver_str",
- "dac_cert",
- "dac_key",
- "pai_cert",
- "spake2_it",
- "spake2_salt",
- "spake2_verifier",
- "discriminator"
- ],
- "properties": {
- "version": {
- "description": "Current version of the factory data set",
- "type": "integer",
- "minimum": 0,
- "maximum": 255
- },
- "sn": {
- "description": "Serial number of device",
- "type": "string",
- "maxLength": 32
- },
- "vendor_id": {
- "description": "Vendor Identifier",
- "type": "integer",
- "minimum": 0,
- "maximum": 65524
- },
- "product_id": {
- "description": "Product Identifier",
- "type": "integer",
- "minimum": 1,
- "maximum": 65535
- },
- "vendor_name": {
- "description": "human-readable vendor name",
- "type": "string",
- "maxLength": 32
- },
- "product_name": {
- "description": "human-readable product name",
- "type": "string",
- "maxLength": 32
- },
- "product_label": {
- "description": "more user-friendly human-readable product name",
- "type": "string",
- "maxLength": 64
- },
- "product_url": {
- "description": "link to product-specific web page",
- "type": "string",
- "maxLength": 256
- },
- "part_number": {
- "description": "human-readable vendor assigned part number",
- "type": "string",
- "maxLength": 32
- },
- "date": {
- "description": "Manufacturing date according to ISO 8601 in notation YYYY-MM-DD",
- "type": "string",
- "format": "date",
- "minLength": 10,
- "maxLength": 10,
- "pattern": "^\\d{4}-\\d{2}-\\d{2}$"
- },
- "hw_ver": {
- "description": "Hardware version - integer",
- "type": "integer",
- "minimum": 0,
- "maximum": 65536
- },
- "hw_ver_str": {
- "description": "A string representation of hardware version",
- "type": "string",
- "minLength": 1,
- "maxLength": 64
- },
- "rd_uid": {
- "description": "A randomly-generated 128-bit or longer octet string. Length has been expanded with 'hex:' prefix",
- "type": "string",
- "pattern": "^hex:([0-9A-Fa-f]{2}){16,}$",
- "minLength": 20,
- "minLength": 36
- },
- "dac_cert": {
- "description": "DAC certificate in hex-string format",
- "type": "string",
- "pattern": "^hex:([0-9A-Fa-f]{2})+$",
- "minLength": 6,
- "maxLength": 1204
- },
- "dac_key": {
- "description": "DAC Private Key in hex-string format",
- "type": "string",
- "pattern": "^hex:([0-9A-Fa-f]{2}){32}$",
- "minLength": 68,
- "maxLength": 68
- },
- "pai_cert": {
- "description": "PAI certificate in hex-string format",
- "type": "string",
- "pattern": "^hex:([0-9A-Fa-f]{2})+$",
- "minLength": 6,
- "maxLength": 1204
- },
- "passcode": {
- "description": "A default PASE session passcode",
- "type": "integer",
- "minimum": 1,
- "maximum": 99999998
- },
- "spake2_it": {
- "description": "An Iteration counter for the Symmetric Password-Authenticated Key Exchange",
- "type": "integer",
- "minimum": 1000,
- "maximum": 100000
- },
- "spake2_salt": {
- "description": "A key-derivation function for the Symmetric Password-Authenticated Key Exchange.",
- "type": "string",
- "pattern": "^hex:([0-9A-Fa-f]{2})+$",
- "minLength": 36,
- "maxLength": 68
- },
- "spake2_verifier": {
- "description": "A verifier for the Symmetric Password-Authenticated Key Exchange",
- "type": "string",
- "pattern": "^hex:([0-9A-Fa-f]{2})+$",
- "minLength": 97
- },
- "discriminator": {
- "description": "The Discriminator value helps to further identify potential devices during the setup process.",
- "type": "integer",
- "minimum": 0,
- "maximum": 4095
- },
- "enable_key": {
- "description": "The Enable Key is a 128-bit value that triggers manufacturer-specific action while invoking the TestEventTrigger Command",
- "type": "string",
- "pattern": "^hex:([0-9A-Fa-f]{2}){16}$",
- "minLength": 36,
- "maxLength": 36
- },
- "user": {
- "description": "A user-specific additional data which should be added to factory data. This should be a Json format.",
- "type": "object"
- }
- }
-}
diff --git a/scripts/tools/telink/telink_generate_partition.py b/scripts/tools/telink/telink_generate_partition.py
deleted file mode 100644
index ddd3b63..0000000
--- a/scripts/tools/telink/telink_generate_partition.py
+++ /dev/null
@@ -1,169 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (c) 2022 Project CHIP Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import codecs
-import sys
-from intelhex import IntelHex
-import argparse
-import json
-import logging as log
-import cbor2 as cbor
-
-HEX_PREFIX = "hex:"
-
-
-class PartitionCreator:
- """
- Class to create telink partition containing FactoryData
-
- :param offset: This is a partition offset where data will be stored in device's flash memory
- :param length: This is a maximum partition size
- :param input: This is a path to input JSON file
- :param output: This is a path to output directory
-
- """
-
- def __init__(self, offset: int, length: int, input: str, output: str) -> None:
- self._ih = IntelHex()
- self._length = length
- self._offset = offset
- self._data_ready = False
- self._output = output
- self._input = input
- try:
- self.__data_to_save = self._convert_to_dict(self._load_json())
- except IOError:
- sys.exit(-1)
-
- def generate_cbor(self):
- """
- Generates .cbor file using cbor2 library.
- It generate a CBORTag 55799 which is user-specific tag
-
- """
- if self.__data_to_save:
- # prepare raw data from Json
- cbor_data = cbor.dumps(self.__data_to_save)
- return cbor_data
-
- def create_hex(self, data: bytes):
- """
- Creates .hex file from CBOR.
- This file can be write directly to device.
-
- """
- if len(data) > self._length:
- raise ValueError("generated CBOR file exceeds declared maximum partition size! {} > {}".format(len(data), self._length))
- self._ih.putsz(self._offset, data)
- self._ih.write_hex_file(self._output + ".hex", True)
- self._data_ready = True
- return True
-
- def create_bin(self):
- """
- Creates raw binary data of created previously .hex file
-
- """
- if not self._data_ready:
- log.error("Please create hex file first!")
- return False
- self._ih.tobinfile(self._output + ".bin")
- return True
-
- @staticmethod
- def _convert_to_dict(data):
- """
- Converts a list containing tuples ("key_name", "key_value") to a dictionary
-
- If "key_value" of data entry is a string-type variable and contains a HEX_PREFIX algorithm decodes it
- to hex format to be sure that a cbor file will contain proper bytes.
-
- If "key_value" of data entry is a dictionary, algorithm appends it to the created dictionary.
- """
- output_dict = dict()
- for entry in data:
- if not isinstance(entry, dict):
- log.debug("Processing entry {}".format(entry))
- if isinstance(data[entry], str) and data[entry].startswith(HEX_PREFIX):
- output_dict[entry] = codecs.decode(data[entry][len(HEX_PREFIX):], "hex")
- elif isinstance(data[entry], str):
- output_dict[entry] = data[entry].encode("utf-8")
- else:
- output_dict[entry] = data[entry]
- else:
- output_dict[entry] = entry
- return output_dict
-
- def _load_json(self):
- """
- Loads file containing a JSON data and converts it to JSON format
-
- :raises IOError: if provided JSON file can not be read out.
- """
- try:
- with open(self._input, "rb") as json_file:
- return json.loads(json_file.read())
- except IOError as e:
- log.error("Can not read Json file {}".format(self._input))
- raise e
-
-
-def print_flashing_help():
- print("\nTo flash the generated hex/bin containing factory data, use BDT tool")
-
-
-def main():
-
- def allow_any_int(i): return int(i, 0)
-
- parser = argparse.ArgumentParser(description="Telink Factory Data NVS partition generator tool")
- parser.add_argument("-i", "--input", type=str, required=True,
- help="Path to input .json file")
- parser.add_argument("-o", "--output", type=str, required=True,
- help="Prefix for output file paths, e.g. setting dir/output causes creation of the following files: dir/output.hex, and dir/output.bin")
- parser.add_argument("--offset", type=allow_any_int, required=True,
- help="Partition offset - an address in device's NVM memory, where factory data will be stored")
- parser.add_argument("--size", type=allow_any_int, required=True,
- help="The maximum partition size")
- parser.add_argument("-v", "--verbose", action="store_true",
- help="Run this script with DEBUG logging level")
- parser.add_argument("-r", "--raw", action="store_true",
- help="Do not print flashing help and other logs, only generate a .hex file. It can be useful when the script is used by other script.")
- args = parser.parse_args()
-
- if args.verbose:
- log.basicConfig(format='[%(asctime)s][%(levelname)s] %(message)s', level=log.DEBUG)
- elif args.raw:
- log.basicConfig(format='%(message)s', level=log.ERROR)
- else:
- log.basicConfig(format='[%(asctime)s] %(message)s', level=log.INFO)
-
- partition_creator = PartitionCreator(args.offset, args.size, args.input, args.output)
- cbor_data = partition_creator.generate_cbor()
- try:
- if not args.raw:
- print("Generating .hex file: {}.hex with offset: {} and size: {}".format(args.output, hex(args.offset), hex(args.size)))
- if partition_creator.create_hex(cbor_data) and partition_creator.create_bin():
- if not args.raw:
- print_flashing_help()
- except ValueError as e:
- log.error(e)
- sys.exit(-1)
-
-
-if __name__ == "__main__":
- main()