From 1be78b9bb91a8da73e982d4e7f95bbe9cbee1248 Mon Sep 17 00:00:00 2001 From: Sam_Aleksov Date: Wed, 6 Oct 2021 16:42:26 +0300 Subject: [PATCH] adding poc support for microbit v2 --- Firmware/Microbit_v2/.clang-format | 8 + Firmware/Microbit_v2/.gitignore | 16 + Firmware/Microbit_v2/.yotta_ignore | 2 + Firmware/Microbit_v2/AUTHORS | 8 + Firmware/Microbit_v2/CMakeLists.txt | 265 ++++ Firmware/Microbit_v2/Dockerfile | 18 + Firmware/Microbit_v2/LICENSE | 21 + Firmware/Microbit_v2/README.md | 57 + Firmware/Microbit_v2/build.py | 168 +++ Firmware/Microbit_v2/codal.json | 21 + Firmware/Microbit_v2/module.json | 15 + Firmware/Microbit_v2/source/main.cpp | 74 + Firmware/Microbit_v2/utils/__init__.py | 0 .../Microbit_v2/utils/cmake/JSONParser.cmake | 309 ++++ .../utils/cmake/buildtools/codal.cmake | 85 ++ .../utils/cmake/buildtools/yotta.cmake | 23 + .../Microbit_v2/utils/cmake/colours.cmake | 19 + .../toolchains/ARM_GCC/bin-generator.cmake | 9 + .../toolchains/ARM_GCC/compiler-flags.cmake | 49 + .../toolchains/ARM_GCC/hex-generator.cmake | 9 + .../toolchains/ARM_GCC/platform_includes.h | 10 + .../cmake/toolchains/ARM_GCC/toolchain.cmake | 26 + .../toolchains/AVR_GCC/bin-generator.cmake | 9 + .../toolchains/AVR_GCC/compiler-flags.cmake | 43 + .../toolchains/AVR_GCC/hex-generator.cmake | 9 + .../toolchains/AVR_GCC/platform_includes.h | 14 + .../cmake/toolchains/AVR_GCC/toolchain.cmake | 29 + .../toolchains/XTENSA_GCC/bin-generator.cmake | 9 + .../XTENSA_GCC/compiler-flags.cmake | 43 + .../toolchains/XTENSA_GCC/hex-generator.cmake | 9 + .../toolchains/XTENSA_GCC/platform_includes.h | 10 + .../toolchains/XTENSA_GCC/toolchain.cmake | 26 + Firmware/Microbit_v2/utils/cmake/util.cmake | 156 ++ Firmware/Microbit_v2/utils/debug/dmesg.js | 86 ++ Firmware/Microbit_v2/utils/debug/meminfo.js | 65 + Firmware/Microbit_v2/utils/esptool.py | 1274 +++++++++++++++++ .../Microbit_v2/utils/generate_libraries.py | 159 ++ Firmware/Microbit_v2/utils/merge_hex.py | 93 ++ Firmware/Microbit_v2/utils/python/__init__.py | 0 .../Microbit_v2/utils/python/codal_utils.py | 186 +++ .../utils/python/doc_gen/__init__.py | 0 .../utils/python/doc_gen/doc_gen.py | 93 ++ .../utils/python/doc_gen/doxygen_extractor.py | 242 ++++ .../utils/python/doc_gen/md_converter.py | 242 ++++ .../utils/python/doc_gen/system_utils.py | 137 ++ Firmware/Microbit_v2/utils/targets.json | 105 ++ Firmware/Microbit_v2/utils/uf2conv.py | 172 +++ 47 files changed, 4423 insertions(+) create mode 100644 Firmware/Microbit_v2/.clang-format create mode 100644 Firmware/Microbit_v2/.gitignore create mode 100644 Firmware/Microbit_v2/.yotta_ignore create mode 100644 Firmware/Microbit_v2/AUTHORS create mode 100644 Firmware/Microbit_v2/CMakeLists.txt create mode 100644 Firmware/Microbit_v2/Dockerfile create mode 100644 Firmware/Microbit_v2/LICENSE create mode 100644 Firmware/Microbit_v2/README.md create mode 100644 Firmware/Microbit_v2/build.py create mode 100644 Firmware/Microbit_v2/codal.json create mode 100644 Firmware/Microbit_v2/module.json create mode 100644 Firmware/Microbit_v2/source/main.cpp create mode 100644 Firmware/Microbit_v2/utils/__init__.py create mode 100644 Firmware/Microbit_v2/utils/cmake/JSONParser.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/buildtools/codal.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/buildtools/yotta.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/colours.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/bin-generator.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/compiler-flags.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/hex-generator.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/platform_includes.h create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/toolchain.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/bin-generator.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/compiler-flags.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/hex-generator.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/platform_includes.h create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/toolchain.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/bin-generator.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/compiler-flags.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/hex-generator.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/platform_includes.h create mode 100644 Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/toolchain.cmake create mode 100644 Firmware/Microbit_v2/utils/cmake/util.cmake create mode 100644 Firmware/Microbit_v2/utils/debug/dmesg.js create mode 100644 Firmware/Microbit_v2/utils/debug/meminfo.js create mode 100644 Firmware/Microbit_v2/utils/esptool.py create mode 100644 Firmware/Microbit_v2/utils/generate_libraries.py create mode 100644 Firmware/Microbit_v2/utils/merge_hex.py create mode 100644 Firmware/Microbit_v2/utils/python/__init__.py create mode 100644 Firmware/Microbit_v2/utils/python/codal_utils.py create mode 100644 Firmware/Microbit_v2/utils/python/doc_gen/__init__.py create mode 100644 Firmware/Microbit_v2/utils/python/doc_gen/doc_gen.py create mode 100644 Firmware/Microbit_v2/utils/python/doc_gen/doxygen_extractor.py create mode 100644 Firmware/Microbit_v2/utils/python/doc_gen/md_converter.py create mode 100644 Firmware/Microbit_v2/utils/python/doc_gen/system_utils.py create mode 100644 Firmware/Microbit_v2/utils/targets.json create mode 100644 Firmware/Microbit_v2/utils/uf2conv.py diff --git a/Firmware/Microbit_v2/.clang-format b/Firmware/Microbit_v2/.clang-format new file mode 100644 index 0000000..9854c77 --- /dev/null +++ b/Firmware/Microbit_v2/.clang-format @@ -0,0 +1,8 @@ +BasedOnStyle: LLVM +IndentWidth: 4 +UseTab: Never +ColumnLimit: 100 +BreakBeforeBraces: Allman +AccessModifierOffset: -4 +AllowShortFunctionsOnASingleLine: Inline +SortIncludes: false diff --git a/Firmware/Microbit_v2/.gitignore b/Firmware/Microbit_v2/.gitignore new file mode 100644 index 0000000..161c360 --- /dev/null +++ b/Firmware/Microbit_v2/.gitignore @@ -0,0 +1,16 @@ +build +libraries +.yotta.json +yotta_modules +yotta_targets +*.swp +*~ +Makefile +*.hex +*.DS_Store +.vscode +*.uf2 +*.bin +pxtapp +buildcache.json +*.pyc diff --git a/Firmware/Microbit_v2/.yotta_ignore b/Firmware/Microbit_v2/.yotta_ignore new file mode 100644 index 0000000..2dceefc --- /dev/null +++ b/Firmware/Microbit_v2/.yotta_ignore @@ -0,0 +1,2 @@ +# build.py uses CMake as well and this top level file is not Yotta compatible +CMakeLists.txt diff --git a/Firmware/Microbit_v2/AUTHORS b/Firmware/Microbit_v2/AUTHORS new file mode 100644 index 0000000..cbbd23e --- /dev/null +++ b/Firmware/Microbit_v2/AUTHORS @@ -0,0 +1,8 @@ +Joe Finney (@finneyj) +James Devine (@jamesadevine) +Martin Wiliams (@martinwork) +MichaƂ Moskal (@mmoskal) +Raphael Gault (@raphaelgault) +Sam Kent (@microbit-sam) +Jonny Auston (@jaustin) +Sam Aleksov (@samaleksov) diff --git a/Firmware/Microbit_v2/CMakeLists.txt b/Firmware/Microbit_v2/CMakeLists.txt new file mode 100644 index 0000000..1a049f2 --- /dev/null +++ b/Firmware/Microbit_v2/CMakeLists.txt @@ -0,0 +1,265 @@ +# The MIT License (MIT) + +# Copyright (c) 2017 Lancaster University. + +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. +cmake_minimum_required(VERSION 3.3) + +# include additional cmake +include(utils/cmake/JSONParser.cmake) +include(utils/cmake/util.cmake) +include(utils/cmake/colours.cmake) + +if (NOT "${BUILD_TOOL}" STRGREATER "") + set(BUILD_TOOL "CODAL") +endif() + +# +# Supress unecessary (and often inaccurate) validity check of the toolchain +# +set(CMAKE_C_COMPILER_WORKS 1) +set(CMAKE_CXX_COMPILER_WORKS 1) + + +#read our config file... +file(READ "./codal.json" codal_json) +sbeParseJson(codal codal_json) + +set(CODAL_APP_OUTPUT_DIR ".") +set(CODAL_APP_SOURCE_DIR "source") + +if("${codal.application}" STRGREATER "") + set(CODAL_APP_SOURCE_DIR "${codal.application}") +endif() + +if("${codal.output_folder}" STRGREATER "") + set(CODAL_APP_OUTPUT_DIR "${codal.output_folder}") +endif() + +if(NOT "${codal.target.name}" STRGREATER "") + message(FATAL_ERROR "${BoldRed}INVALID TARGET.${ColourReset}") +endif() + +set(CODAL_DEPS "") +set(LIB_DEST "libraries") + +#install the target +INSTALL_DEPENDENCY(${LIB_DEST} ${codal.target.name} ${codal.target.url} ${codal.target.branch} ${codal.target.type}) +message("${BoldMagenta}Set target: ${codal.target.name} ${ColourReset}") +list(APPEND CODAL_DEPS ${codal.target.name}) + +if("${codal.target.dev}" STRGREATER "") + file(READ "./${LIB_DEST}/${codal.target.name}/target.json" device_json) + message("${BoldMagenta}Using target.json (dev version) ${ColourReset}") +else() + file(READ "./${LIB_DEST}/${codal.target.name}/target-locked.json" device_json) + message("${BoldMagenta}Using target-locked.json${ColourReset}") +endif() + +message("${BoldBlue}Targeting ${codal.target.name}${ColourReset}") + +sbeParseJson(device device_json) + +SET(CODAL_TARGET_NAME ${device.target.name}) +SET(CODAL_OUTPUT_NAME ${device.device}) +SET(CODAL_TARGET_PROCESSOR ${device.processor}) +SET(CODAL_TARGET_CPU_ARCHITECTURE ${device.architecture}) + +# if this is the first build, lets copy a sample main.cpp from the target if available. +if(NOT EXISTS ${CMAKE_CURRENT_LIST_DIR}/${CODAL_APP_SOURCE_DIR} AND EXISTS ${CMAKE_CURRENT_LIST_DIR}/${LIB_DEST}/${codal.target.name}/samples/main.cpp) + FILE(COPY ${CMAKE_CURRENT_LIST_DIR}/${LIB_DEST}/${codal.target.name}/samples/main.cpp DESTINATION ${CMAKE_CURRENT_LIST_DIR}/${CODAL_APP_SOURCE_DIR}) +endif() + +#copy samples and remove main.cpp +if(NOT EXISTS ${CMAKE_CURRENT_LIST_DIR}/samples AND EXISTS ${CMAKE_CURRENT_LIST_DIR}/${LIB_DEST}/${codal.target.name}/samples/) + FILE(COPY ${CMAKE_CURRENT_LIST_DIR}/${LIB_DEST}/${codal.target.name}/samples DESTINATION ${CMAKE_CURRENT_LIST_DIR}) + FILE(REMOVE ${CMAKE_CURRENT_LIST_DIR}/samples/main.cpp) +endif() + +#################### + +SET(TOOLCHAIN ${device.toolchain}) +SET(TOOLCHAIN_FOLDER "./utils/cmake/toolchains/${device.toolchain}") + +# include toolchain file +set(CMAKE_TOOLCHAIN_FILE "${TOOLCHAIN_FOLDER}/toolchain.cmake" CACHE PATH "toolchain file") + +# required to force TOOLCHAIN settings... +project(codal) +enable_language(ASM) + +# include compiler flags overrides +include(${TOOLCHAIN_FOLDER}/compiler-flags.cmake) +set(PLATFORM_INCLUDES_PATH "${PROJECT_SOURCE_DIR}/utils/cmake/toolchains/${device.toolchain}") + +file(MAKE_DIRECTORY "${PROJECT_SOURCE_DIR}/build") + +# configure output directories +set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${PROJECT_SOURCE_DIR}/build") +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${PROJECT_SOURCE_DIR}/build") + +SET(CODAL_DEFINITIONS "") + +EXTRACT_JSON_ARRAY(codal "codal\.config\." CODAL_FIELDS CODAL_VALUES) +EXTRACT_JSON_ARRAY(device "device\.config\." DEVICE_FIELDS DEVICE_VALUES) +UNIQUE_JSON_KEYS(CODAL_FIELDS CODAL_VALUES DEVICE_FIELDS DEVICE_VALUES FINAL_FIELDS FINAL_VALUES) +FORM_DEFINITIONS(FINAL_FIELDS FINAL_VALUES CODAL_DEFINITIONS) + +# extract any CMAKE definitions specified in the target.json object, and set as native cmake vars +# cmake definitions require special handling as types are not safe in cmake, any semi-colon would need escaped, which would be ugly. +foreach(var ${device}) + #if it is not prefixed by codal.cmake_definitions, do not consider the key. + + if(NOT "${var}" MATCHES "device\.cmake_definitions\.") + continue() + endif() + + string(REGEX MATCH "[^device\.cmake_definitions\.]([A-Z,a-z,0-9,_,]+)" CODAL_CMAKE_DEFINITION "${var}") + + set(${CODAL_CMAKE_DEFINITION} ${${var}}) + endforeach() + +#define any additional symbols specified by the target. +if("${device.definitions}" STRGREATER "") + add_definitions("${device.definitions}") +endif() + +#################### +# optional JSON flags for compilation + assembly +################### +if("${device.cpu_opts}" STRGREATER "") + set(_CPU_COMPILATION_OPTIONS "${device.cpu_opts}") + set(CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} ${device.cpu_opts}") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${device.cpu_opts}") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${device.cpu_opts}") + set(CMAKE_LINKER_FLAGS "${CMAKE_LINKER_FLAGS} ${device.cpu_opts}") +endif() + +set(_C_FAMILY_FLAGS_INIT "-fno-exceptions -fno-unwind-tables -ffunction-sections -fdata-sections -Wall -Wextra -Wno-unused-parameter") + +# asm +if("${device.asm_flags}" STRGREATER "") + set(CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} ${device.asm_flags}") +endif() + +# c +if("${device.c_flags}" STRGREATER "") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${_C_FAMILY_FLAGS_INIT} ${device.c_flags}") + set(CMAKE_C_LINK_FLAGS "${CMAKE_C_LINK_FLAGS} ${device.c_flags}") +endif() + +# cpp +if("${device.cpp_flags}" STRGREATER "") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${_C_FAMILY_FLAGS_INIT} ${device.cpp_flags}") + set(CMAKE_CXX_LINK_FLAGS "${device.cpp_flags}") +endif() + +# linker opts +if("${device.linker_flags}" STRGREATER "") + set(CMAKE_LINKER_FLAGS "${CMAKE_LINKER_FLAGS} ${device.linker_flags}") + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${device.linker_flags}") +endif() + +# create a header file from the definitions specified in JSON +if("${CODAL_DEFINITIONS}" STRGREATER "") + set(EXTRA_INCLUDES_NEW_PATH "${PROJECT_SOURCE_DIR}/build/codal_extra_definitions_new.h") + set(EXTRA_INCLUDES_PATH "${PROJECT_SOURCE_DIR}/build/codal_extra_definitions.h") + file(WRITE "${EXTRA_INCLUDES_NEW_PATH}" ${CODAL_DEFINITIONS}) + configure_file(${EXTRA_INCLUDES_NEW_PATH} ${EXTRA_INCLUDES_PATH} COPYONLY) + + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -include ${EXTRA_INCLUDES_PATH}") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -include ${EXTRA_INCLUDES_PATH}") +endif() + +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -I${PLATFORM_INCLUDES_PATH}") +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -I${PLATFORM_INCLUDES_PATH}") + +# a define for cmake if statements to detect if within the CODAL build environment +set(CODAL_BUILD_SYSTEM TRUE) + +# a define specificying common utils used in codal +set(CODAL_UTILS_LOCATION "${PROJECT_SOURCE_DIR}/utils/cmake/util.cmake") + +# this variable is used in the linking step of the final binary. +set(LIB_FOLDERS "") + +# Add the root of the libraries folder as a search path. Useful for disambiguating header files with duplicated names. +MESSAGE (STATUS "Adding library path: (${PROJECT_SOURCE_DIR}/${LIB_DEST})") +include_directories(${PROJECT_SOURCE_DIR}/${LIB_DEST}) + +#add_subdirectory("${PROJECT_SOURCE_DIR}/${LIB_DEST}/${dep}") + +# "import" and add any specified libraries to the build list +if("${device.libraries}" STRGREATER "") + message("Installing dependencies...") + set(DEVICE_LIBS ${device.libraries}) + + foreach(i ${DEVICE_LIBS}) + SET(BRANCH "NONE") + SET(URL "${device.libraries_${i}.url}") + if("${device.libraries_${i}.branch}" STRGREATER "") + SET(BRANCH "${device.libraries_${i}.branch}") + endif() + if("${codal.target.branches.${URL}}" STRGREATER "") + SET(BRANCH "${codal.target.branches.${URL}}") + MESSAGE (STATUS "Override branch: ${BRANCH}") + endif() + + INSTALL_DEPENDENCY(${LIB_DEST} ${device.libraries_${i}.name} ${URL} ${BRANCH} ${device.libraries_${i}.type}) + list(APPEND CODAL_DEPS "${device.libraries_${i}.name}") + endforeach() + + foreach(dep ${CODAL_DEPS}) + message("${BoldGreen}Using library: ${dep}${ColourReset}") + add_subdirectory("${PROJECT_SOURCE_DIR}/${LIB_DEST}/${dep}") + endforeach() +endif() + +#finally, find sources and includes of the application, and create a target. +RECURSIVE_FIND_DIR(INCLUDE_DIRS "${PROJECT_SOURCE_DIR}/${CODAL_APP_SOURCE_DIR}" "*.h") +# *.c?? only catches .cpp, not .c, so let's be precise +RECURSIVE_FIND_FILE(SOURCE_FILES "${PROJECT_SOURCE_DIR}/${CODAL_APP_SOURCE_DIR}" "*.cpp") + +RECURSIVE_FIND_FILE(S_FILES "${PROJECT_SOURCE_DIR}/${CODAL_APP_SOURCE_DIR}" "*.s") +RECURSIVE_FIND_FILE(C_FILES "${PROJECT_SOURCE_DIR}/${CODAL_APP_SOURCE_DIR}" "*.c") +list(APPEND SOURCE_FILES ${S_FILES}) +list(APPEND SOURCE_FILES ${C_FILES}) + +if("${SOURCE_FILES}" STREQUAL "") + message(FATAL_ERROR "${BoldRed}No user application to build, please add a main.cpp at: ${PROJECT_SOURCE_DIR}/${CODAL_APP_SOURCE_DIR}${ColourReset}") +endif() + +if ("${BUILD_TOOL}" STRGREATER "") + string(COMPARE EQUAL "${BUILD_TOOL}" "YOTTA" YOTTA_BUILD) + if (${YOTTA_BUILD}) + include("${PROJECT_SOURCE_DIR}/utils/cmake/buildtools/yotta.cmake") + endif () + + string(COMPARE EQUAL "${BUILD_TOOL}" "CODAL" CODAL_BUILD) + if (${CODAL_BUILD}) + include("${PROJECT_SOURCE_DIR}/utils/cmake/buildtools/codal.cmake") + endif() +endif() + +# +# Supress the addition of implicit linker flags (such as -rdynamic) +# +set(CMAKE_SHARED_LIBRARY_LINK_C_FLAGS "") +set(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS "") +set(CMAKE_EXE_EXPORTS_C_FLAG "") +set(CMAKE_EXE_EXPORTS_CXX_FLAG "") diff --git a/Firmware/Microbit_v2/Dockerfile b/Firmware/Microbit_v2/Dockerfile new file mode 100644 index 0000000..7af0490 --- /dev/null +++ b/Firmware/Microbit_v2/Dockerfile @@ -0,0 +1,18 @@ +FROM ubuntu:18.04 + +RUN apt-get update -qq && \ + apt-get install -y --no-install-recommends \ + software-properties-common && \ + add-apt-repository -y ppa:team-gcc-arm-embedded/ppa && \ + apt-get update -qq && \ + apt-get install -y --no-install-recommends \ + git make cmake python3 \ + gcc-arm-embedded && \ + apt-get autoremove -y && \ + apt-get clean -y && \ + rm -rf /var/lib/apt/lists/* + +# Project sources volume should be mounted at /app +WORKDIR /app + +ENTRYPOINT ["python3", "build.py"] diff --git a/Firmware/Microbit_v2/LICENSE b/Firmware/Microbit_v2/LICENSE new file mode 100644 index 0000000..46e04fd --- /dev/null +++ b/Firmware/Microbit_v2/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Lancaster University + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Firmware/Microbit_v2/README.md b/Firmware/Microbit_v2/README.md new file mode 100644 index 0000000..e99778a --- /dev/null +++ b/Firmware/Microbit_v2/README.md @@ -0,0 +1,57 @@ +# OpenHaystack Firmware for nRF52833 + +This project contains a PoC firmware for Nordic nRF52833 chips such as used by the [BBC micro:bit](https://microbit.org). +After flashing our firmware, the device sends out Bluetooth Low Energy advertisements such that it can be found by [Apple's Find My network](https://developer.apple.com/find-my/). + +This firmware builds partially on top of the [microbit-v2-samples](https://github.com/lancaster-university/microbit-v2-samples). + +## Disclaimer + +Note that the firmware is just a proof-of-concept and currently only implements advertising a single static key. This means that **devices running this firmware are trackable** by other devices in proximity. + +## Requirements + +This PoC supports builds using docker. You need a working [docker setup](https://docs.docker.com/engine/install/) or [GNU Arm Embedded Toolchain](https://developer.arm.com/tools-and-software/open-source-software/developer-tools/gnu-toolchain/gnu-rm/downloads) to build the firmware on your own. + +On macOS, you can install the toolchain via [Homebrew](https://brew.sh): + +```bash +brew install --cask gcc-arm-embedded +``` + +With docker, you can build the microbit-tools image via: + +```bash +docker build -t microbit-tools . +``` + +## Build + +You need to specify a public key in the firmware image. You can either directly do so in the [source](offline-finding/main.c) (`public_key`) or patch the string `OFFLINEFINDINGPUBLICKEYHERE!` in the final firmware image. + +To build the firmware, it should suffice to run: + +```bash +docker run -v $(pwd):/app --rm microbit-tools +``` + +from the main directory, which also takes care of downloading all dependencies. The deploy-ready image is then available at `./MICROBIT.hex`. + +## Deploy + +To deploy the image on a connected nRF device, you can run: + +```bash +cp MICROBIT.hex /Volumes/MICROBIT/ +``` + +*We tested this procedure with the BBC micro:bit V2 only, but other nRF52833-based devices should work as well.* + +## Author +- This firmware: **Sam Aleksov** ([@samaleksov](https://github.com/samaleksov), [email](mailto:samuelalexdev@gmail.com), [web](https://refractionx.com)) + +- **Milan Stute** ([@schmittner](https://github.com/schmittner), [email](mailto:mstute@seemoo.tu-darmstadt.de), [web](https://seemoo.de/mstute)) + +## License + +This firmware is licensed under the [**MIT License**](LICENSE). diff --git a/Firmware/Microbit_v2/build.py b/Firmware/Microbit_v2/build.py new file mode 100644 index 0000000..594c5d7 --- /dev/null +++ b/Firmware/Microbit_v2/build.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python + +# The MIT License (MIT) + +# Copyright (c) 2017 Lancaster University. + +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. + +import os +import sys +import optparse +import platform +import json +import shutil +import re +from utils.python.codal_utils import system, build, read_json, checkgit, read_config, update, revision, printstatus, status, get_next_version, lock, delete_build_folder, generate_docs + +parser = optparse.OptionParser(usage="usage: %prog target-name-or-url [options]", description="This script manages the build system for a codal device. Passing a target-name generates a codal.json for that devices, to list all devices available specify the target-name as 'ls'.") +parser.add_option('-c', '--clean', dest='clean', action="store_true", help='Whether to clean before building. Applicable only to unix based builds.', default=False) +parser.add_option('-t', '--test-platforms', dest='test_platform', action="store_true", help='Specify whether the target platform is a test platform or not.', default=False) +parser.add_option('-l', '--lock', dest='lock_target', action="store_true", help='Create target-lock.json, updating patch version', default=False) +parser.add_option('-b', '--branch', dest='branch', action="store_true", help='With -l, use vX.X.X-BRANCH.Y', default=False) +parser.add_option('-m', '--minor', dest='update_minor', action="store_true", help='With -l, update minor version', default=False) +parser.add_option('-M', '--major', dest='update_major', action="store_true", help='With -l, update major version', default=False) +parser.add_option('-V', '--version', dest='version', metavar="VERSION", help='With -l, set the version; use "-V v0.0.1" to bootstrap', default=False) +parser.add_option('-u', '--update', dest='update', action="store_true", help='git pull target and libraries', default=False) +parser.add_option('-s', '--status', dest='status', action="store_true", help='git status target and libraries', default=False) +parser.add_option('-r', '--revision', dest='revision', action="store", help='Checkout a specific revision of the target', default=False) +parser.add_option('-d', '--dev', dest='dev', action="store_true", help='enable developer mode (does not use target-locked.json)', default=False) +parser.add_option('-g', '--generate-docs', dest='generate_docs', action="store_true", help='generate documentation for the current target', default=False) + +(options, args) = parser.parse_args() + +if not os.path.exists("build"): + os.mkdir("build") + +if options.lock_target: + lock(options) + exit(0) + +if options.update: + update() + exit(0) + +if options.status: + status() + exit(0) + +if options.revision: + revision(options.revision) + exit(0) + +# out of source build! +os.chdir("build") + +test_json = read_json("../utils/targets.json") + +# configure the target a user has specified: +if len(args) == 1: + + target_name = args[0] + target_config = None + + # list all targets + if target_name == "ls": + for json_obj in test_json: + s = "%s: %s" % (json_obj["name"], json_obj["info"]) + if "device_url" in json_obj.keys(): + s += "(%s)" % json_obj["device_url"] + print(s) + exit(0) + + # cycle through out targets and check for a match + for json_obj in test_json: + if json_obj["name"] != target_name: + continue + + del json_obj["device_url"] + del json_obj["info"] + + target_config = json_obj + break + + if target_config == None and target_name.startswith("http"): + target_config = { + "name": re.sub("^.*/", "", target_name), + "url": target_name, + "branch": "master", + "type": "git" + } + + if target_config == None: + print("'" + target_name + "'" + " is not a valid target.") + exit(1) + + # developer mode is for users who wish to contribute, it will clone and checkout commitable branches. + if options.dev: + target_config["dev"] = True + + config = { + "target":target_config + } + + with open("../codal.json", 'w') as codal_json: + json.dump(config, codal_json, indent=4) + + # remove the build folder, a user could be swapping targets. + delete_build_folder() + + +elif len(args) > 1: + print("Too many arguments supplied, only one target can be specified.") + exit(1) + +if not options.test_platform: + + if not os.path.exists("../codal.json"): + print("No target specified in codal.json, does codal.json exist?") + exit(1) + + if options.generate_docs: + generate_docs() + exit(0) + + build(options.clean) + exit(0) + +for json_obj in test_json: + + # some platforms aren't supported by travis, ignore them when testing. + if "test_ignore" in json_obj: + print("ignoring: " + json_obj["name"]) + continue + + # ensure we have a clean build tree. + delete_build_folder() + + # clean libs + if os.path.exists("../libraries"): + shutil.rmtree('../libraries') + + # configure the target and tests... + config = { + "target":json_obj, + "output":".", + "application":"libraries/"+json_obj["name"]+"/tests/" + } + + with open("../codal.json", 'w') as codal_json: + json.dump(config, codal_json, indent=4) + + build(True, True) diff --git a/Firmware/Microbit_v2/codal.json b/Firmware/Microbit_v2/codal.json new file mode 100644 index 0000000..d024d9f --- /dev/null +++ b/Firmware/Microbit_v2/codal.json @@ -0,0 +1,21 @@ +{ + "target": { + "name": "codal-microbit-v2", + "url": "https://github.com/lancaster-university/codal-microbit-v2", + "branch": "master", + "type": "git", + "test_ignore": true, + "dev": true + } , + "config":{ + "SOFTDEVICE_PRESENT": 1, + "DEVICE_BLE": 1, + "MICROBIT_BLE_ENABLED" : 1, + "MICROBIT_BLE_PAIRING_MODE": 1, + "MICROBIT_BLE_DFU_SERVICE": 1, + "MICROBIT_BLE_DEVICE_INFORMATION_SERVICE": 1, + "MICROBIT_BLE_EVENT_SERVICE" : 1, + "MICROBIT_BLE_PARTIAL_FLASHING" : 0, + "MICROBIT_BLE_SECURITY_LEVEL": "SECURITY_MODE_ENCRYPTION_NO_MITM" + } +} diff --git a/Firmware/Microbit_v2/module.json b/Firmware/Microbit_v2/module.json new file mode 100644 index 0000000..9abda12 --- /dev/null +++ b/Firmware/Microbit_v2/module.json @@ -0,0 +1,15 @@ +{ + "name": "codal", + "version": "0.0.1", + "description": "Component Oriented Device Abstraction Layer (CODAL) - root", + "license": "MIT", + "dependencies": { + "codal-microbit": "git@github.com:lancaster-university/codal-microbit-v2.git#master" + }, + "targetDependencies": {}, + "extraIncludes": [ + "yotta_modules/" + ], + "bin": "./source", + "scripts": {} +} diff --git a/Firmware/Microbit_v2/source/main.cpp b/Firmware/Microbit_v2/source/main.cpp new file mode 100644 index 0000000..74e53cc --- /dev/null +++ b/Firmware/Microbit_v2/source/main.cpp @@ -0,0 +1,74 @@ +#include +#include "ble_dfu.h" +#include "nrf_sdh.h" +#include "MicroBit.h" + +#define microbit_ble_CONN_CFG_TAG 1 +#define ADV_INTERVAL 2000 + +static uint8_t m_adv_handle = BLE_GAP_ADV_SET_HANDLE_NOT_SET; +static char public_key[28] = {'O', 'F', 'F', 'L', 'I', 'N', 'E', 'F', 'I', 'N', 'D', 'I', 'N', 'G', 'P', 'U', 'B', 'L', 'I', 'C', 'K', 'E', 'Y', 'H', 'E', 'R', 'E', '!'}; +static uint8_t offline_finding_adv_template[] = { + 0x1e, /* Length (30) */ + 0xff, /* Manufacturer Specific Data (type 0xff) */ + 0x4c, 0x00, /* Company ID (Apple) */ + 0x12, 0x19, /* Offline Finding type and length */ + 0x00, /* State */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, /* First two bits */ + 0x00, /* Hint (0x00) */ +}; + +void fill_adv_template_from_key() { + /* copy last 22 bytes */ + memcpy(&offline_finding_adv_template[7], &public_key[6], 22); + /* append two bits of public key */ + offline_finding_adv_template[29] = public_key[0] >> 6; +} + +MicroBit uBit; + +int main() { + nrf_sdh_enable_request(); + uint32_t ram_start = 0; + nrf_sdh_ble_default_cfg_set(microbit_ble_CONN_CFG_TAG, &ram_start); + nrf_sdh_ble_enable(&ram_start); + + ble_gap_addr_t p_addrset; + p_addrset.addr_id_peer = 1; + p_addrset.addr_type = BLE_GAP_ADDR_TYPE_RANDOM_STATIC; + p_addrset.addr[5] = public_key[0] | 0b11000000; + p_addrset.addr[4] = public_key[1]; + p_addrset.addr[3] = public_key[2]; + p_addrset.addr[2] = public_key[3]; + p_addrset.addr[1] = public_key[4]; + p_addrset.addr[0] = public_key[5]; + sd_ble_gap_addr_set(&p_addrset); + + fill_adv_template_from_key(); + + ble_gap_adv_params_t gap_adv_params; + memset(&gap_adv_params, 0, sizeof( gap_adv_params)); + gap_adv_params.properties.type = BLE_GAP_ADV_TYPE_NONCONNECTABLE_SCANNABLE_UNDIRECTED; + gap_adv_params.interval = (1000 * ADV_INTERVAL) / 625; // 625 us units + if (gap_adv_params.interval < BLE_GAP_ADV_INTERVAL_MIN) gap_adv_params.interval = BLE_GAP_ADV_INTERVAL_MIN; + if (gap_adv_params.interval > BLE_GAP_ADV_INTERVAL_MAX) gap_adv_params.interval = BLE_GAP_ADV_INTERVAL_MAX; + gap_adv_params.duration = 0; //10 ms units + gap_adv_params.filter_policy = BLE_GAP_ADV_FP_ANY; + gap_adv_params.primary_phy = BLE_GAP_PHY_1MBPS; + + ble_gap_adv_data_t gap_adv_data; + memset( &gap_adv_data, 0, sizeof( gap_adv_data)); + gap_adv_data.adv_data.p_data = offline_finding_adv_template; + gap_adv_data.adv_data.len = BLE_GAP_ADV_SET_DATA_SIZE_MAX; + + sd_ble_gap_adv_set_configure(&m_adv_handle, &gap_adv_data, &gap_adv_params); + sd_ble_gap_adv_start( m_adv_handle, microbit_ble_CONN_CFG_TAG); + + while(1) { + uBit.display.scroll("OpenHaystack"); + } + return 0; +} diff --git a/Firmware/Microbit_v2/utils/__init__.py b/Firmware/Microbit_v2/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Firmware/Microbit_v2/utils/cmake/JSONParser.cmake b/Firmware/Microbit_v2/utils/cmake/JSONParser.cmake new file mode 100644 index 0000000..7f45f14 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/JSONParser.cmake @@ -0,0 +1,309 @@ +# The MIT License (MIT) + +# Copyright (c) 2015 Stefan Bellus + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +cmake_minimum_required(VERSION 3.1) + +if (DEFINED JSonParserGuard) + return() +endif() + +set(JSonParserGuard yes) + +macro(sbeParseJson prefix jsonString) + cmake_policy(PUSH) + + set(json_string "${${jsonString}}") + string(LENGTH "${json_string}" json_jsonLen) + set(json_index 0) + set(json_AllVariables ${prefix}) + set(json_ArrayNestingLevel 0) + set(json_MaxArrayNestingLevel 0) + + _sbeParse(${prefix}) + + unset(json_index) + unset(json_AllVariables) + unset(json_jsonLen) + unset(json_string) + unset(json_value) + unset(json_inValue) + unset(json_name) + unset(json_inName) + unset(json_newPrefix) + unset(json_reservedWord) + unset(json_arrayIndex) + unset(json_char) + unset(json_end) + unset(json_ArrayNestingLevel) + foreach(json_nestingLevel RANGE ${json_MaxArrayNestingLevel}) + unset(json_${json_nestingLevel}_arrayIndex) + endforeach() + unset(json_nestingLevel) + unset(json_MaxArrayNestingLevel) + + cmake_policy(POP) +endmacro() + +macro(sbeClearJson prefix) + foreach(json_var ${${prefix}}) + unset(${json_var}) + endforeach() + + unset(${prefix}) + unset(json_var) +endmacro() + +macro(sbePrintJson prefix) + foreach(json_var ${${prefix}}) + message("${json_var} = ${${json_var}}") + endforeach() +endmacro() + +macro(_sbeParse prefix) + + while(${json_index} LESS ${json_jsonLen}) + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + + if("\"" STREQUAL "${json_char}") + _sbeParseNameValue(${prefix}) + elseif("{" STREQUAL "${json_char}") + _sbeMoveToNextNonEmptyCharacter() + _sbeParseObject(${prefix}) + elseif("[" STREQUAL "${json_char}") + _sbeMoveToNextNonEmptyCharacter() + _sbeParseArray(${prefix}) + endif() + + if(${json_index} LESS ${json_jsonLen}) + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + else() + break() + endif() + + if ("}" STREQUAL "${json_char}" OR "]" STREQUAL "${json_char}") + break() + endif() + + _sbeMoveToNextNonEmptyCharacter() + endwhile() +endmacro() + +macro(_sbeParseNameValue prefix) + set(json_name "") + set(json_inName no) + + while(${json_index} LESS ${json_jsonLen}) + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + + # check if name ends + if("\"" STREQUAL "${json_char}" AND json_inName) + set(json_inName no) + _sbeMoveToNextNonEmptyCharacter() + if(NOT ${json_index} LESS ${json_jsonLen}) + break() + endif() + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + set(json_newPrefix ${prefix}.${json_name}) + set(json_name "") + + if(":" STREQUAL "${json_char}") + _sbeMoveToNextNonEmptyCharacter() + if(NOT ${json_index} LESS ${json_jsonLen}) + break() + endif() + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + + if("\"" STREQUAL "${json_char}") + _sbeParseValue(${json_newPrefix}) + break() + elseif("{" STREQUAL "${json_char}") + _sbeMoveToNextNonEmptyCharacter() + _sbeParseObject(${json_newPrefix}) + break() + elseif("[" STREQUAL "${json_char}") + _sbeMoveToNextNonEmptyCharacter() + _sbeParseArray(${json_newPrefix}) + break() + else() + # reserved word starts + _sbeParseReservedWord(${json_newPrefix}) + break() + endif() + else() + # name without value + list(APPEND ${json_AllVariables} ${json_newPrefix}) + set(${json_newPrefix} "") + break() + endif() + endif() + + if(json_inName) + # remove escapes + if("\\" STREQUAL "${json_char}") + math(EXPR json_index "${json_index} + 1") + if(NOT ${json_index} LESS ${json_jsonLen}) + break() + endif() + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + endif() + + set(json_name "${json_name}${json_char}") + endif() + + # check if name starts + if("\"" STREQUAL "${json_char}" AND NOT json_inName) + set(json_inName yes) + endif() + + _sbeMoveToNextNonEmptyCharacter() + endwhile() +endmacro() + +macro(_sbeParseReservedWord prefix) + set(json_reservedWord "") + set(json_end no) + while(${json_index} LESS ${json_jsonLen} AND NOT json_end) + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + + if("," STREQUAL "${json_char}" OR "}" STREQUAL "${json_char}" OR "]" STREQUAL "${json_char}") + set(json_end yes) + else() + set(json_reservedWord "${json_reservedWord}${json_char}") + math(EXPR json_index "${json_index} + 1") + endif() + endwhile() + + list(APPEND ${json_AllVariables} ${prefix}) + string(STRIP "${json_reservedWord}" json_reservedWord) + set(${prefix} ${json_reservedWord}) +endmacro() + +macro(_sbeParseValue prefix) + cmake_policy(SET CMP0054 NEW) # turn off implicit expansions in if statement + + set(json_value "") + set(json_inValue no) + + while(${json_index} LESS ${json_jsonLen}) + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + + # check if json_value ends, it is ended by " + if("\"" STREQUAL "${json_char}" AND json_inValue) + set(json_inValue no) + + set(${prefix} ${json_value}) + list(APPEND ${json_AllVariables} ${prefix}) + _sbeMoveToNextNonEmptyCharacter() + break() + endif() + + if(json_inValue) + # if " is escaped consume + if("\\" STREQUAL "${json_char}") + math(EXPR json_index "${json_index} + 1") + if(NOT ${json_index} LESS ${json_jsonLen}) + break() + endif() + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + if(NOT "\"" STREQUAL "${json_char}") + # if it is not " then copy also escape character + set(json_char "\\${json_char}") + endif() + endif() + + _sbeAddEscapedCharacter("${json_char}") + endif() + + # check if value starts + if("\"" STREQUAL "${json_char}" AND NOT json_inValue) + set(json_inValue yes) + endif() + + math(EXPR json_index "${json_index} + 1") + endwhile() +endmacro() + +macro(_sbeAddEscapedCharacter char) + string(CONCAT json_value "${json_value}" "${char}") +endmacro() + +macro(_sbeParseObject prefix) + _sbeParse(${prefix}) + _sbeMoveToNextNonEmptyCharacter() +endmacro() + +macro(_sbeParseArray prefix) + math(EXPR json_ArrayNestingLevel "${json_ArrayNestingLevel} + 1") + set(json_${json_ArrayNestingLevel}_arrayIndex 0) + + set(${prefix} "") + list(APPEND ${json_AllVariables} ${prefix}) + + while(${json_index} LESS ${json_jsonLen}) + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + + if("\"" STREQUAL "${json_char}") + # simple value + list(APPEND ${prefix} ${json_${json_ArrayNestingLevel}_arrayIndex}) + _sbeParseValue(${prefix}_${json_${json_ArrayNestingLevel}_arrayIndex}) + elseif("{" STREQUAL "${json_char}") + # object + _sbeMoveToNextNonEmptyCharacter() + list(APPEND ${prefix} ${json_${json_ArrayNestingLevel}_arrayIndex}) + _sbeParseObject(${prefix}_${json_${json_ArrayNestingLevel}_arrayIndex}) + else() + list(APPEND ${prefix} ${json_${json_ArrayNestingLevel}_arrayIndex}) + _sbeParseReservedWord(${prefix}_${json_${json_ArrayNestingLevel}_arrayIndex}) + endif() + + if(NOT ${json_index} LESS ${json_jsonLen}) + break() + endif() + + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + + if("]" STREQUAL "${json_char}") + _sbeMoveToNextNonEmptyCharacter() + break() + elseif("," STREQUAL "${json_char}") + math(EXPR json_${json_ArrayNestingLevel}_arrayIndex "${json_${json_ArrayNestingLevel}_arrayIndex} + 1") + endif() + + _sbeMoveToNextNonEmptyCharacter() + endwhile() + + if(${json_MaxArrayNestingLevel} LESS ${json_ArrayNestingLevel}) + set(json_MaxArrayNestingLevel ${json_ArrayNestingLevel}) + endif() + math(EXPR json_ArrayNestingLevel "${json_ArrayNestingLevel} - 1") +endmacro() + +macro(_sbeMoveToNextNonEmptyCharacter) + math(EXPR json_index "${json_index} + 1") + if(${json_index} LESS ${json_jsonLen}) + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + while(${json_char} MATCHES "[ \t\n\r]" AND ${json_index} LESS ${json_jsonLen}) + math(EXPR json_index "${json_index} + 1") + string(SUBSTRING "${json_string}" ${json_index} 1 json_char) + endwhile() + endif() +endmacro() diff --git a/Firmware/Microbit_v2/utils/cmake/buildtools/codal.cmake b/Firmware/Microbit_v2/utils/cmake/buildtools/codal.cmake new file mode 100644 index 0000000..ea8b23c --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/buildtools/codal.cmake @@ -0,0 +1,85 @@ +add_executable( + ${device.device} + ${SOURCE_FILES} +) + +if("${INCLUDE_DIRS}" STRGREATER "") + target_include_directories(${device.device} PUBLIC "${INCLUDE_DIRS}") +endif() + +set_target_properties(${device.device} PROPERTIES SUFFIX "" ENABLE_EXPORTS ON) + +# link the executable with supporting libraries. +target_link_libraries( + ${device.device} + ${CODAL_DEPS} +) + +# import toolchain bin generation command +if(${device.generate_bin}) + include(${TOOLCHAIN_FOLDER}/bin-generator.cmake) +endif() + +# import toolchain hex generation command +if(${device.generate_hex}) + include(${TOOLCHAIN_FOLDER}/hex-generator.cmake) +endif() + +# post process command hook, depends on the hex file generated by the build system. +if("${device.post_process.command}" STRGREATER "" OR "${device.post_process}" STRGREATER "") + + if("${device.post_process}" STRGREATER "") + set(POST_PROCESS_COMMAND ${device.post_process}) + else() + set(POST_PROCESS_COMMAND ${device.post_process.command}) + endif() + + set(POST_PROCESS_DEPENDS "${device.post_process.depends}") + + # replace specific strings in the command, this gives users flexibility, they don't have to manually specify the location of files + string(REPLACE "" ${PROJECT_SOURCE_DIR}/${CODAL_APP_OUTPUT_DIR}/${device.device}.hex CODAL_POSTPROCESS_COMMAND ${POST_PROCESS_COMMAND}) + string(REPLACE "" ${PROJECT_SOURCE_DIR}/${CODAL_APP_OUTPUT_DIR} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND}) + string(REPLACE "" ${device.device} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND}) + + string(REPLACE "" ${PROJECT_SOURCE_DIR}/${CODAL_APP_OUTPUT_DIR}/${device.device}.bin CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND}) + string(REPLACE "" ${PROJECT_SOURCE_DIR}/${CODAL_APP_OUTPUT_DIR} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND}) + string(REPLACE "" ${device.device}.bin CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND}) + + string(REPLACE "" ${PROJECT_SOURCE_DIR}/build/${device.device} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND}) + string(REPLACE "" ${PROJECT_SOURCE_DIR}/${CODAL_APP_OUTPUT_DIR} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND}) + string(REPLACE "" ${device.device} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND}) + + string(REPLACE "" ${PROJECT_SOURCE_DIR}/${CODAL_APP_OUTPUT_DIR} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND}) + + #convert to a command + separate_arguments(FINAL_COMMAND UNIX_COMMAND ${CODAL_POSTPROCESS_COMMAND}) + + # execute + if(POST_PROCESS_DEPENDS STREQUAL "ELF") + add_custom_command( + TARGET ${device.device} + COMMAND ${FINAL_COMMAND} + DEPENDS ${device.device} + WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}" + COMMENT "Executing post process command" + ) + elseif(POST_PROCESS_DEPENDS STREQUAL "HEX") + add_custom_command( + TARGET ${device.device}_hex + COMMAND ${FINAL_COMMAND} + DEPENDS ${device.device} + WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}" + COMMENT "Executing post process command" + ) + else() + #by default post process should depend on hex + add_custom_command( + TARGET ${device.device}_bin + COMMAND ${FINAL_COMMAND} + DEPENDS ${device.device} + WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}" + COMMENT "Executing post process command" + ) + endif() + +endif() \ No newline at end of file diff --git a/Firmware/Microbit_v2/utils/cmake/buildtools/yotta.cmake b/Firmware/Microbit_v2/utils/cmake/buildtools/yotta.cmake new file mode 100644 index 0000000..8002006 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/buildtools/yotta.cmake @@ -0,0 +1,23 @@ +if("${INCLUDE_DIRS}" STRGREATER "") + target_include_directories(codal PUBLIC "${INCLUDE_DIRS}") +endif() + +add_library(codal "${SOURCE_FILES}") +set_target_properties(codal PROPERTIES SUFFIX "" ENABLE_EXPORTS ON) + +target_compile_definitions(codal PUBLIC "${device.definitions}") +target_include_directories(codal PUBLIC ${PLATFORM_INCLUDES_PATH}) +target_compile_options(codal PUBLIC -include ${EXTRA_INCLUDES_PATH}) + +set(STRIPPED "") +string(STRIP "${CMAKE_LINKER_FLAGS}" STRIPPED) +# link the executable with supporting libraries. +target_link_libraries(codal "${CODAL_DEPS};${STRIPPED}") + +# +# Supress the addition of implicit linker flags (such as -rdynamic) +# +set(CMAKE_SHARED_LIBRARY_LINK_C_FLAGS "") +set(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS "") +set(CMAKE_EXE_EXPORTS_C_FLAG "") +set(CMAKE_EXE_EXPORTS_CXX_FLAG "") \ No newline at end of file diff --git a/Firmware/Microbit_v2/utils/cmake/colours.cmake b/Firmware/Microbit_v2/utils/cmake/colours.cmake new file mode 100644 index 0000000..2786b49 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/colours.cmake @@ -0,0 +1,19 @@ +if(NOT WIN32) + string(ASCII 27 Esc) + set(ColourReset "${Esc}[m") + set(ColourBold "${Esc}[1m") + set(Red "${Esc}[31m") + set(Green "${Esc}[32m") + set(Yellow "${Esc}[33m") + set(Blue "${Esc}[34m") + set(Magenta "${Esc}[35m") + set(Cyan "${Esc}[36m") + set(White "${Esc}[37m") + set(BoldRed "${Esc}[1;31m") + set(BoldGreen "${Esc}[1;32m") + set(BoldYellow "${Esc}[1;33m") + set(BoldBlue "${Esc}[1;34m") + set(BoldMagenta "${Esc}[1;35m") + set(BoldCyan "${Esc}[1;36m") + set(BoldWhite "${Esc}[1;37m") +endif() diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/bin-generator.cmake b/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/bin-generator.cmake new file mode 100644 index 0000000..d18d098 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/bin-generator.cmake @@ -0,0 +1,9 @@ +add_custom_command( + OUTPUT "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin" + COMMAND "${ARM_NONE_EABI_OBJCOPY}" -O binary "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${device.device}" "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin" + DEPENDS ${device.device} + COMMENT "converting to bin file." +) + +#specify a dependency on the elf file so that bin is automatically rebuilt when elf is changed. +add_custom_target(${device.device}_bin ALL DEPENDS "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin") diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/compiler-flags.cmake b/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/compiler-flags.cmake new file mode 100644 index 0000000..16aacaa --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/compiler-flags.cmake @@ -0,0 +1,49 @@ +set(EXPLICIT_INCLUDES "") +if((CMAKE_VERSION VERSION_GREATER "3.4.0") OR (CMAKE_VERSION VERSION_EQUAL "3.4.0")) + # from CMake 3.4 are separate to in the + # CMAKE__COMPILE_OBJECT, CMAKE__CREATE_ASSEMBLY_SOURCE, and + # CMAKE__CREATE_PREPROCESSED_SOURCE commands + set(EXPLICIT_INCLUDES " ") +endif() + +# Override the link rules: +set(CMAKE_C_CREATE_SHARED_LIBRARY "echo 'shared libraries not supported' && 1") +set(CMAKE_C_CREATE_SHARED_MODULE "echo 'shared modules not supported' && 1") +set(CMAKE_C_CREATE_STATIC_LIBRARY " -cr ") +set(CMAKE_C_COMPILE_OBJECT " ${EXPLICIT_INCLUDES} -o -c ") + +set(CMAKE_C_LINK_EXECUTABLE " -Wl,-Map,.map -Wl,--start-group -lm -lc -lgcc -lm -lc -lgcc -Wl,--end-group --specs=nano.specs -o ") + +set(CMAKE_CXX_OUTPUT_EXTENSION ".o") +set(CMAKE_DEPFILE_FLAGS_CXX "-MMD -MT -MF ") +set(CMAKE_C_OUTPUT_EXTENSION ".o") +set(CMAKE_DEPFILE_FLAGS_C "-MMD -MT -MF ") + +set(CMAKE_C_FLAGS_DEBUG_INIT "-g -gdwarf-3") +set(CMAKE_C_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG") +set(CMAKE_C_FLAGS_RELEASE_INIT "-Os -DNDEBUG") +set(CMAKE_C_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG") +set(CMAKE_INCLUDE_SYSTEM_FLAG_C "-isystem ") + + +set(CMAKE_ASM_FLAGS_DEBUG_INIT "-g -gdwarf-3") +set(CMAKE_ASM_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG") +set(CMAKE_ASM_FLAGS_RELEASE_INIT "-Os -DNDEBUG") +set(CMAKE_ASM_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG") +set(CMAKE_INCLUDE_SYSTEM_FLAG_ASM "-isystem ") + +set(CMAKE_CXX_CREATE_STATIC_LIBRARY " -cr ") + +set(CMAKE_CXX_LINK_EXECUTABLE " -Wl,-Map,.map -Wl,--start-group -lnosys -lstdc++ -lsupc++ -lm -lc -lgcc -lstdc++ -lsupc++ -lm -lc -lgcc -Wl,--end-group --specs=nano.specs -o ") + +set(CMAKE_CXX_FLAGS_DEBUG_INIT "-g -gdwarf-3") +set(CMAKE_CXX_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG") +set(CMAKE_CXX_FLAGS_RELEASE_INIT "-Os -DNDEBUG") +set(CMAKE_CXX_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG") +set(CMAKE_INCLUDE_SYSTEM_FLAG_CXX "-isystem ") + +if (CMAKE_C_COMPILER_VERSION VERSION_GREATER "7.1.0" OR CMAKE_C_COMPILER_VERSION VERSION_EQUAL "7.1.0") + message("${BoldRed}Supressing -Wexpansion-to-defined.${ColourReset}") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-expansion-to-defined") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-expansion-to-defined") +endif () \ No newline at end of file diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/hex-generator.cmake b/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/hex-generator.cmake new file mode 100644 index 0000000..4948935 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/hex-generator.cmake @@ -0,0 +1,9 @@ +add_custom_command( + OUTPUT "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex" + COMMAND "${ARM_NONE_EABI_OBJCOPY}" -O ihex "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${device.device}" "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex" + DEPENDS ${device.device} + COMMENT "converting to hex file." +) + +#specify a dependency on the elf file so that hex is automatically rebuilt when elf is changed. +add_custom_target(${device.device}_hex ALL DEPENDS "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex") diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/platform_includes.h b/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/platform_includes.h new file mode 100644 index 0000000..3417ef3 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/platform_includes.h @@ -0,0 +1,10 @@ +#ifndef PLATFORM_INCLUDES +#define PLATFORM_INCLUDES + +#include +#include +#include +#include +#include + +#endif diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/toolchain.cmake b/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/toolchain.cmake new file mode 100644 index 0000000..eafdfbc --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/ARM_GCC/toolchain.cmake @@ -0,0 +1,26 @@ +find_program(ARM_NONE_EABI_RANLIB arm-none-eabi-ranlib) +find_program(ARM_NONE_EABI_AR arm-none-eabi-ar) +find_program(ARM_NONE_EABI_GCC arm-none-eabi-gcc) +find_program(ARM_NONE_EABI_GPP arm-none-eabi-g++) +find_program(ARM_NONE_EABI_OBJCOPY arm-none-eabi-objcopy) + +set(CMAKE_OSX_SYSROOT "/") +set(CMAKE_OSX_DEPLOYMENT_TARGET "") + +set(CODAL_TOOLCHAIN "ARM_GCC") + +if(CMAKE_VERSION VERSION_LESS "3.5.0") + include(CMakeForceCompiler) + cmake_force_c_compiler("${ARM_NONE_EABI_GCC}" GNU) + cmake_force_cxx_compiler("${ARM_NONE_EABI_GPP}" GNU) +else() + # from 3.5 the force_compiler macro is deprecated: CMake can detect + # arm-none-eabi-gcc as being a GNU compiler automatically + set(CMAKE_TRY_COMPILE_TARGET_TYPE "STATIC_LIBRARY") + set(CMAKE_C_COMPILER "${ARM_NONE_EABI_GCC}") + set(CMAKE_CXX_COMPILER "${ARM_NONE_EABI_GPP}") +endif() + +SET(CMAKE_AR "${ARM_NONE_EABI_AR}" CACHE FILEPATH "Archiver") +SET(CMAKE_RANLIB "${ARM_NONE_EABI_RANLIB}" CACHE FILEPATH "rlib") +set(CMAKE_CXX_OUTPUT_EXTENSION ".o") diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/bin-generator.cmake b/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/bin-generator.cmake new file mode 100644 index 0000000..a3a1c01 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/bin-generator.cmake @@ -0,0 +1,9 @@ +add_custom_command( + OUTPUT "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin" + COMMAND "${AVR_OBJCOPY}" -O binary "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${device.device}" "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin" + DEPENDS ${device.device} + COMMENT "converting to bin file." +) + +#specify a dependency on the elf file so that bin is automatically rebuilt when elf is changed. +add_custom_target(${device.device}_bin ALL DEPENDS "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin") diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/compiler-flags.cmake b/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/compiler-flags.cmake new file mode 100644 index 0000000..c6dcfc7 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/compiler-flags.cmake @@ -0,0 +1,43 @@ +set(EXPLICIT_INCLUDES "") +if((CMAKE_VERSION VERSION_GREATER "3.4.0") OR (CMAKE_VERSION VERSION_EQUAL "3.4.0")) + # from CMake 3.4 are separate to in the + # CMAKE__COMPILE_OBJECT, CMAKE__CREATE_ASSEMBLY_SOURCE, and + # CMAKE__CREATE_PREPROCESSED_SOURCE commands + set(EXPLICIT_INCLUDES " ") +endif() + +# Override the link rules: +set(CMAKE_C_CREATE_SHARED_LIBRARY "echo 'shared libraries not supported' && 1") +set(CMAKE_C_CREATE_SHARED_MODULE "echo 'shared modules not supported' && 1") +set(CMAKE_C_CREATE_STATIC_LIBRARY " rcs ") +set(CMAKE_C_COMPILE_OBJECT " ${EXPLICIT_INCLUDES} -o -c ") + +set(CMAKE_C_LINK_EXECUTABLE " -Wl,-Map,.map -Wl,--start-group -lm -lc -lgcc -lm -lc -lgcc -Wl,--end-group --specs=nano.specs -o ") + +set(CMAKE_CXX_OUTPUT_EXTENSION ".o") +set(CMAKE_DEPFILE_FLAGS_CXX "-MMD -MT -MF ") +set(CMAKE_C_OUTPUT_EXTENSION ".o") +set(CMAKE_DEPFILE_FLAGS_C "-MMD -MT -MF ") + +set(CMAKE_C_FLAGS_DEBUG_INIT "-g -gdwarf-3") +set(CMAKE_C_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG") +set(CMAKE_C_FLAGS_RELEASE_INIT "-Os -DNDEBUG") +set(CMAKE_C_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG") +set(CMAKE_INCLUDE_SYSTEM_FLAG_C "-isystem ") + + +set(CMAKE_ASM_FLAGS_DEBUG_INIT "-g -gdwarf-3") +set(CMAKE_ASM_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG") +set(CMAKE_ASM_FLAGS_RELEASE_INIT "-Os -DNDEBUG") +set(CMAKE_ASM_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG") +set(CMAKE_INCLUDE_SYSTEM_FLAG_ASM "-isystem ") + +set(CMAKE_CXX_CREATE_STATIC_LIBRARY " rcs ") + +set(CMAKE_CXX_LINK_EXECUTABLE " -Wl,-Map,.map -Wl,--start-group -lm -lc -lgcc -Wl,--end-group -o ") + +set(CMAKE_CXX_FLAGS_DEBUG_INIT "-g -gdwarf-3") +set(CMAKE_CXX_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG") +set(CMAKE_CXX_FLAGS_RELEASE_INIT "-Os -DNDEBUG") +set(CMAKE_CXX_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG") +set(CMAKE_INCLUDE_SYSTEM_FLAG_CXX "-isystem ") diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/hex-generator.cmake b/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/hex-generator.cmake new file mode 100644 index 0000000..5be3c67 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/hex-generator.cmake @@ -0,0 +1,9 @@ +add_custom_command( + OUTPUT "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex" + COMMAND "${AVR_OBJCOPY}" -O ihex "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${device.device}" "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex" + DEPENDS ${device.device} + COMMENT "converting to hex file." +) + +#specify a dependency on the elf file so that hex is automatically rebuilt when elf is changed. +add_custom_target(${device.device}_hex ALL DEPENDS "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex") diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/platform_includes.h b/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/platform_includes.h new file mode 100644 index 0000000..ac788a5 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/platform_includes.h @@ -0,0 +1,14 @@ +#ifndef PLATFORM_INCLUDES +#define PLATFORM_INCLUDES + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#endif diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/toolchain.cmake b/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/toolchain.cmake new file mode 100644 index 0000000..891aff2 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/AVR_GCC/toolchain.cmake @@ -0,0 +1,29 @@ +find_program(AVR_GCC_RANLIB avr-gcc-ranlib) +find_program(AVR_AR avr-ar) +find_program(AVR_AS avr-as) +find_program(AVR_GCC avr-gcc) +find_program(AVR_GPP avr-g++) +find_program(AVR_OBJCOPY avr-objcopy) + +set(CMAKE_OSX_SYSROOT "/") +set(CMAKE_OSX_DEPLOYMENT_TARGET "") + +set(CODAL_TOOLCHAIN "AVR_GCC") + +if(CMAKE_VERSION VERSION_LESS "3.5.0") + include(CMakeForceCompiler) + cmake_force_c_compiler("${AVR_GCC}" GNU) + cmake_force_cxx_compiler("${AVR_GPP}" GNU) +else() + #-Wl,-flto -flto -fno-fat-lto-objects + # from 3.5 the force_compiler macro is deprecated: CMake can detect + # arm-none-eabi-gcc as being a GNU compiler automatically + set(CMAKE_TRY_COMPILE_TARGET_TYPE "STATIC_LIBRARY") + set(CMAKE_C_COMPILER "${AVR_GCC}") + set(CMAKE_CXX_COMPILER "${AVR_GPP}") +endif() + +SET(CMAKE_ASM_COMPILER "${AVR_GCC}") +SET(CMAKE_AR "${AVR_AR}" CACHE FILEPATH "Archiver") +SET(CMAKE_RANLIB "${AVR_GCC_RANLIB}" CACHE FILEPATH "rlib") +set(CMAKE_CXX_OUTPUT_EXTENSION ".o") diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/bin-generator.cmake b/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/bin-generator.cmake new file mode 100644 index 0000000..74fdb35 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/bin-generator.cmake @@ -0,0 +1,9 @@ +add_custom_command( + OUTPUT "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin" + COMMAND "${XTENSA_OBJCOPY}" -O binary "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${device.device}" "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin" + DEPENDS ${device.device} + COMMENT "converting to bin file." +) + +#specify a dependency on the elf file so that bin is automatically rebuilt when elf is changed. +add_custom_target(${device.device}_bin ALL DEPENDS "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin") diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/compiler-flags.cmake b/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/compiler-flags.cmake new file mode 100644 index 0000000..e0f7e2d --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/compiler-flags.cmake @@ -0,0 +1,43 @@ +set(EXPLICIT_INCLUDES "") +if((CMAKE_VERSION VERSION_GREATER "3.4.0") OR (CMAKE_VERSION VERSION_EQUAL "3.4.0")) + # from CMake 3.4 are separate to in the + # CMAKE__COMPILE_OBJECT, CMAKE__CREATE_ASSEMBLY_SOURCE, and + # CMAKE__CREATE_PREPROCESSED_SOURCE commands + set(EXPLICIT_INCLUDES " ") +endif() + +# Override the link rules: +set(CMAKE_C_CREATE_SHARED_LIBRARY "echo 'shared libraries not supported' && 1") +set(CMAKE_C_CREATE_SHARED_MODULE "echo 'shared modules not supported' && 1") +set(CMAKE_C_CREATE_STATIC_LIBRARY " -cr ") +set(CMAKE_C_COMPILE_OBJECT " ${EXPLICIT_INCLUDES} -o -c ") + +set(CMAKE_C_LINK_EXECUTABLE " -nostdlib -Wl,-Map,.map -Wl,--start-group -lupgrade -lssl -lmesh -lwpa2 -lsmartconfig -lespnow -lpp -lmain -lwpa -llwip -lnet80211 -lwps -lcrypto -lphy -lhal -lgcc -ldriver -lm -lat -lc -lstdc++ -Wl,--end-group -lgcc -o ") + +set(CMAKE_CXX_OUTPUT_EXTENSION ".o") +set(CMAKE_DEPFILE_FLAGS_CXX "-MMD -MT -MF ") +set(CMAKE_C_OUTPUT_EXTENSION ".o") +set(CMAKE_DEPFILE_FLAGS_C "-MMD -MT -MF ") + +set(CMAKE_C_FLAGS_DEBUG_INIT "-g -gdwarf-3") +set(CMAKE_C_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG") +set(CMAKE_C_FLAGS_RELEASE_INIT "-Os -DNDEBUG") +set(CMAKE_C_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG") +set(CMAKE_INCLUDE_SYSTEM_FLAG_C "-isystem ") + + +set(CMAKE_ASM_FLAGS_DEBUG_INIT "-g -gdwarf-3") +set(CMAKE_ASM_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG") +set(CMAKE_ASM_FLAGS_RELEASE_INIT "-Os -DNDEBUG") +set(CMAKE_ASM_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG") +set(CMAKE_INCLUDE_SYSTEM_FLAG_ASM "-isystem ") + +set(CMAKE_CXX_CREATE_STATIC_LIBRARY " -cr ") +set(CMAKE_CXX_LINK_EXECUTABLE " -nostdlib -Wl,-Map,.map -Wl,--start-group -lupgrade -lssl -lmesh -lwpa2 -lsmartconfig -lespnow -lpp -lmain -lwpa -llwip -lnet80211 -lwps -lcrypto -ldriver -lat -lphy -lhal -lgcc -lm -lc -lstdc++ -o ") +#set(CMAKE_CXX_LINK_EXECUTABLE " -nostdlib -Wl,-Map,.map -Wl,--start-group -lpwm -lupgrade -lssl -lgcc -lhal -lphy -lpp -lnet80211 -lwpa -lmain -llwip -lcrypto -lm -lc -o ") + +set(CMAKE_CXX_FLAGS_DEBUG_INIT "-g -gdwarf-3") +set(CMAKE_CXX_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG") +set(CMAKE_CXX_FLAGS_RELEASE_INIT "-Os -DNDEBUG") +set(CMAKE_CXX_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG") +set(CMAKE_INCLUDE_SYSTEM_FLAG_CXX "-isystem ") diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/hex-generator.cmake b/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/hex-generator.cmake new file mode 100644 index 0000000..4948935 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/hex-generator.cmake @@ -0,0 +1,9 @@ +add_custom_command( + OUTPUT "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex" + COMMAND "${ARM_NONE_EABI_OBJCOPY}" -O ihex "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${device.device}" "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex" + DEPENDS ${device.device} + COMMENT "converting to hex file." +) + +#specify a dependency on the elf file so that hex is automatically rebuilt when elf is changed. +add_custom_target(${device.device}_hex ALL DEPENDS "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex") diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/platform_includes.h b/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/platform_includes.h new file mode 100644 index 0000000..3417ef3 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/platform_includes.h @@ -0,0 +1,10 @@ +#ifndef PLATFORM_INCLUDES +#define PLATFORM_INCLUDES + +#include +#include +#include +#include +#include + +#endif diff --git a/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/toolchain.cmake b/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/toolchain.cmake new file mode 100644 index 0000000..6789edd --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/toolchains/XTENSA_GCC/toolchain.cmake @@ -0,0 +1,26 @@ +find_program(XTENSA_RANLIB xtensa-lx106-elf-gcc-ranlib) +find_program(XTENSA_AR xtensa-lx106-elf-gcc-ar) +find_program(XTENSA_GCC xtensa-lx106-elf-gcc) +find_program(XTENSA_GPP xtensa-lx106-elf-g++) +find_program(XTENSA_OBJCOPY xtensa-lx106-elf-objcopy) + +set(CMAKE_OSX_SYSROOT "/") +set(CMAKE_OSX_DEPLOYMENT_TARGET "") + +set(CODAL_TOOLCHAIN "XTENSA_GCC") + +if(CMAKE_VERSION VERSION_LESS "3.5.0") + include(CMakeForceCompiler) + cmake_force_c_compiler("${XTENSA_GCC}" GNU) + cmake_force_cxx_compiler("${XTENSA_GPP}" GNU) +else() + # from 3.5 the force_compiler macro is deprecated: CMake can detect + # arm-none-eabi-gcc as being a GNU compiler automatically + set(CMAKE_TRY_COMPILE_TARGET_TYPE "STATIC_LIBRARY") + set(CMAKE_C_COMPILER "${XTENSA_GCC}") + set(CMAKE_CXX_COMPILER "${XTENSA_GPP}") +endif() + +SET(CMAKE_AR "${XTENSA_AR}" CACHE FILEPATH "Archiver") +SET(CMAKE_RANLIB "${XTENSA_RANLIB}" CACHE FILEPATH "rlib") +set(CMAKE_CXX_OUTPUT_EXTENSION ".o") diff --git a/Firmware/Microbit_v2/utils/cmake/util.cmake b/Firmware/Microbit_v2/utils/cmake/util.cmake new file mode 100644 index 0000000..6e3a815 --- /dev/null +++ b/Firmware/Microbit_v2/utils/cmake/util.cmake @@ -0,0 +1,156 @@ +MACRO(RECURSIVE_FIND_DIR return_list dir pattern) + FILE(GLOB_RECURSE new_list "${dir}/${pattern}") + SET(dir_list "") + FOREACH(file_path ${new_list}) + GET_FILENAME_COMPONENT(dir_path ${file_path} PATH) + SET(dir_list ${dir_list} ${dir_path}) + ENDFOREACH() + LIST(REMOVE_DUPLICATES dir_list) + SET(${return_list} ${dir_list}) +ENDMACRO() + +MACRO(RECURSIVE_FIND_FILE return_list dir pattern) + FILE(GLOB_RECURSE new_list "${dir}/${pattern}") + SET(dir_list "") + FOREACH(file_path ${new_list}) + SET(dir_list ${dir_list} ${file_path}) + ENDFOREACH() + LIST(REMOVE_DUPLICATES dir_list) + SET(${return_list} ${dir_list}) +ENDMACRO() + +MACRO(SOURCE_FILES return_list dir pattern) + FILE(GLOB new_list "${dir}/${pattern}") + SET(dir_list "") + FOREACH(file_path ${new_list}) + LIST(APPEND dir_list ${file_path}) + ENDFOREACH() + LIST(REMOVE_DUPLICATES dir_list) + SET(${return_list} ${dir_list}) +ENDMACRO() + +function(EXTRACT_JSON_ARRAY json_file json_field_path fields values) + + set(VALUES "") + set(FIELDS "") + + foreach(var ${${json_file}}) + # extract any cmd line definitions specified in the json object, and add them + # if it is not prefixed by json_field_path, do not consider the key. + if("${var}" MATCHES "${json_field_path}") + string(REGEX MATCH "[^${json_field_path}]([A-Z,a-z,0-9,_,]+)" VALUE "${var}") + + # never quote the value - gives more flexibility + list(APPEND FIELDS ${VALUE}) + list(APPEND VALUES "${${var}}") + endif() + endforeach() + + set(${fields} ${FIELDS} PARENT_SCOPE) + set(${values} ${VALUES} PARENT_SCOPE) +endfunction() + +function(FORM_DEFINITIONS fields values definitions) + + set(DEFINITIONS "") + list(LENGTH ${fields} LEN) + + # - 1 for for loop index... + MATH(EXPR LEN "${LEN}-1") + + foreach(i RANGE ${LEN}) + list(GET ${fields} ${i} DEFINITION) + list(GET ${values} ${i} VALUE) + + set(DEFINITIONS "${DEFINITIONS} #define ${DEFINITION}\t ${VALUE}\n") + endforeach() + + set(${definitions} ${DEFINITIONS} PARENT_SCOPE) +endfunction() + +function(UNIQUE_JSON_KEYS priority_fields priority_values secondary_fields secondary_values merged_fields merged_values) + + # always keep the first fields and values + set(MERGED_FIELDS ${${priority_fields}}) + set(MERGED_VALUES ${${priority_values}}) + + # measure the second set... + list(LENGTH ${secondary_fields} LEN) + # - 1 for for loop index... + MATH(EXPR LEN "${LEN}-1") + + # iterate, dropping any duplicate fields regardless of the value + foreach(i RANGE ${LEN}) + list(GET ${secondary_fields} ${i} FIELD) + list(GET ${secondary_values} ${i} VALUE) + + list(FIND MERGED_FIELDS ${FIELD} INDEX) + + if (${INDEX} GREATER -1) + continue() + endif() + + list(APPEND MERGED_FIELDS ${FIELD}) + list(APPEND MERGED_VALUES ${VALUE}) + endforeach() + + set(${merged_fields} ${MERGED_FIELDS} PARENT_SCOPE) + set(${merged_values} ${MERGED_VALUES} PARENT_SCOPE) +endfunction() + +MACRO(HEADER_FILES return_list dir) + FILE(GLOB new_list "${dir}/*.h") + SET(${return_list} ${new_list}) +ENDMACRO() + +function(INSTALL_DEPENDENCY dir name url branch type) + if(NOT EXISTS "${CMAKE_CURRENT_LIST_DIR}/${dir}") + message("Creating libraries folder") + FILE(MAKE_DIRECTORY "${CMAKE_CURRENT_LIST_DIR}/${dir}") + endif() + + if(EXISTS "${CMAKE_CURRENT_LIST_DIR}/${dir}/${name}") + message("${name} is already installed") + return() + endif() + + if(${type} STREQUAL "git") + message("Cloning into: ${url}") + # git clone -b doesn't work with SHAs + execute_process( + COMMAND git clone --recurse-submodules ${url} ${name} + WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/${dir} + ) + + if(NOT "${branch}" STREQUAL "") + message("Checking out branch: ${branch}") + execute_process( + COMMAND git -c advice.detachedHead=false checkout ${branch} + WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/${dir}/${name} + ) + execute_process( + COMMAND git submodule update --init + WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/${dir}/${name} + ) + execute_process( + COMMAND git submodule sync + WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/${dir}/${name} + ) + execute_process( + COMMAND git submodule update + WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/${dir}/${name} + ) + endif() + else() + message("No mechanism exists to install this library.") + endif() +endfunction() + +MACRO(SUB_DIRS return_dirs dir) + FILE(GLOB list "${PROJECT_SOURCE_DIR}/${dir}/*") + SET(dir_list "") + FOREACH(file_path ${list}) + SET(dir_list ${dir_list} ${file_path}) + ENDFOREACH() + set(${return_dirs} ${dir_list}) +ENDMACRO() diff --git a/Firmware/Microbit_v2/utils/debug/dmesg.js b/Firmware/Microbit_v2/utils/debug/dmesg.js new file mode 100644 index 0000000..e62b5e3 --- /dev/null +++ b/Firmware/Microbit_v2/utils/debug/dmesg.js @@ -0,0 +1,86 @@ +#!/usr/bin/env node +"use strict"; + +let fs = require("fs") +let child_process = require("child_process") + +function fatal(msg) { + console.log("Fatal error:", msg) + process.exit(1) +} + +function main() { + let mapFileName = process.argv[2] + if (!mapFileName) { + console.log("usage: node " + process.argv[1] + " build/mytarget/source/myprog.map") + return + } + console.log("Map file: " + mapFileName) + let mapFile = fs.readFileSync(mapFileName, "utf8") + let addr = 0 + let logSize = 1024 * 4 + 4 + for (let ln of mapFile.split(/\r?\n/)) { + let m = /^\s*0x00000([0-9a-f]+)\s+(\S+)/.exec(ln) + if (m && m[2] == "codalLogStore") { + addr = parseInt(m[1], 16) + break + } + } + if (!addr) fatal("Cannot find codalLogStore symbol in map file") + + let dirs = [ + process.env["HOME"] + "/Library/Arduino15", + process.env["USERPROFILE"] + "/AppData/Local/Arduino15", + process.env["HOME"] + "/.arduino15", + ] + + let pkgDir = "" + + for (let d of dirs) { + pkgDir = d + "/packages/arduino/" + if (fs.existsSync(pkgDir)) break + pkgDir = "" + } + + if (!pkgDir) fatal("cannot find Arduino packages directory") + + let openocdPath = pkgDir + "tools/openocd/0.9.0-arduino/" + if (!fs.existsSync(openocdPath)) fatal("openocd not installed in Arduino") + + let openocdBin = openocdPath + "bin/openocd" + + if (process.platform == "win32") + openocdBin += ".exe" + + let zeroCfg = pkgDir + "hardware/samd/1.6.8/variants/arduino_zero/openocd_scripts/arduino_zero.cfg" + let cmd = `init; set M(0) 0; mem2array M 8 ${addr} ${logSize}; parray M; exit` + + console.log("Starting openocd") + child_process.execFile(openocdBin, ["-d2", + "-s", openocdPath + "/share/openocd/scripts/", + "-f", zeroCfg, + "-c", cmd], { + maxBuffer: 1 * 1024 * 1024, + }, (err, stdout, stderr) => { + if (err) { + fatal("error: " + err.message) + } + let buf = new Buffer(logSize) + for (let l of stdout.split(/\r?\n/)) { + let m = /^M\((\d+)\)\s*=\s*(\d+)/.exec(l) + if (m) { + buf[parseInt(m[1])] = parseInt(m[2]) + } + } + let len = buf.readUInt32LE(0) + if (len == 0 || len > buf.length) { + console.log(stderr) + console.log("No logs.") + } else { + console.log("*\n* Logs\n*\n") + console.log(buf.slice(4, 4 + len).toString("binary")) + } + }) +} + +main() \ No newline at end of file diff --git a/Firmware/Microbit_v2/utils/debug/meminfo.js b/Firmware/Microbit_v2/utils/debug/meminfo.js new file mode 100644 index 0000000..e25b043 --- /dev/null +++ b/Firmware/Microbit_v2/utils/debug/meminfo.js @@ -0,0 +1,65 @@ +#!/usr/bin/env node +"use strict"; + +function main() { + let fs = require("fs"); + let mfn = process.argv[2] + if (!mfn) { + console.log("usage: node " + process.argv[1] + " build/mytarget/source/myprog.map") + return + } + console.log("Map file: " + mfn) + let map = fs.readFileSync(mfn, "utf8") + let inSect = 0 + let byFileRAM = {} + let byFileROM = {} + for (let ln of map.split(/\r?\n/)) { + if (ln == "Linker script and memory map") { + inSect = 1 + } + if (/^OUTPUT\(/.test(ln)) { + inSect = 2 + } + if (inSect == 1) { + let m = /^\s*(\S*)\s+0x00000([0-9a-f]+)\s+0x([0-9a-f]+)\s+(\S+)/.exec(ln) + if (m) { + let mark = m[1] + if (mark == "*fill*" || mark == ".bss" || mark == ".relocate") + continue; + let addr = parseInt(m[2], 16) + let sz = parseInt(m[3], 16) + let fn = m[4] + if (fn == "load" && mark) fn = mark; + fn = fn.replace(/.*armv6-m/, "") + if (sz) { + let mm = addr < 0x10000000 ? byFileROM : byFileRAM + mm[fn] = (mm[fn] || 0) + sz + } + } + } + } + + console.log("*\n* ROM\n*") + dumpMap(byFileROM) + console.log("*\n* RAM\n*") + dumpMap(byFileRAM) +} + +function printEnt(sz, s) { + let ff = (" " + sz).slice(-7) + console.log(ff + " " + s) +} + +function dumpMap(m) { + let k = Object.keys(m) + k.sort((a, b) => m[a] - m[b]) + let sum = 0 + for (let s of k) { + printEnt(m[s], s) + sum += m[s] + } + printEnt(sum, "TOTAL") +} + + +main() \ No newline at end of file diff --git a/Firmware/Microbit_v2/utils/esptool.py b/Firmware/Microbit_v2/utils/esptool.py new file mode 100644 index 0000000..63eae28 --- /dev/null +++ b/Firmware/Microbit_v2/utils/esptool.py @@ -0,0 +1,1274 @@ +#!/usr/bin/env python +# NB: Before sending a PR to change the above line to '#!/usr/bin/env python2', please read https://github.com/themadinventor/esptool/issues/21 +# +# ESP8266 ROM Bootloader Utility +# https://github.com/themadinventor/esptool +# +# Copyright (C) 2014-2016 Fredrik Ahlberg, Angus Gratton, other contributors as noted. +# +# This program is free software; you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation; either version 2 of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# this program; if not, write to the Free Software Foundation, Inc., 51 Franklin +# Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import argparse +import hashlib +import inspect +import json +import os +import serial +import struct +import subprocess +import sys +import tempfile +import time + + +__version__ = "1.2" + + +class ESPROM(object): + # These are the currently known commands supported by the ROM + ESP_FLASH_BEGIN = 0x02 + ESP_FLASH_DATA = 0x03 + ESP_FLASH_END = 0x04 + ESP_MEM_BEGIN = 0x05 + ESP_MEM_END = 0x06 + ESP_MEM_DATA = 0x07 + ESP_SYNC = 0x08 + ESP_WRITE_REG = 0x09 + ESP_READ_REG = 0x0a + + # Maximum block sized for RAM and Flash writes, respectively. + ESP_RAM_BLOCK = 0x1800 + ESP_FLASH_BLOCK = 0x400 + + # Default baudrate. The ROM auto-bauds, so we can use more or less whatever we want. + ESP_ROM_BAUD = 115200 + + # First byte of the application image + ESP_IMAGE_MAGIC = 0xe9 + + # Initial state for the checksum routine + ESP_CHECKSUM_MAGIC = 0xef + + # OTP ROM addresses + ESP_OTP_MAC0 = 0x3ff00050 + ESP_OTP_MAC1 = 0x3ff00054 + ESP_OTP_MAC3 = 0x3ff0005c + + # Flash sector size, minimum unit of erase. + ESP_FLASH_SECTOR = 0x1000 + + def __init__(self, port=0, baud=ESP_ROM_BAUD): + self._port = serial.serial_for_url(port) + self._slip_reader = slip_reader(self._port) + # setting baud rate in a separate step is a workaround for + # CH341 driver on some Linux versions (this opens at 9600 then + # sets), shouldn't matter for other platforms/drivers. See + # https://github.com/themadinventor/esptool/issues/44#issuecomment-107094446 + self._port.baudrate = baud + + """ Read a SLIP packet from the serial port """ + def read(self): + return self._slip_reader.next() + + """ Write bytes to the serial port while performing SLIP escaping """ + def write(self, packet): + buf = '\xc0' \ + + (packet.replace('\xdb','\xdb\xdd').replace('\xc0','\xdb\xdc')) \ + + '\xc0' + self._port.write(buf) + + """ Calculate checksum of a blob, as it is defined by the ROM """ + @staticmethod + def checksum(data, state=ESP_CHECKSUM_MAGIC): + for b in data: + state ^= ord(b) + return state + + """ Send a request and read the response """ + def command(self, op=None, data=None, chk=0): + if op is not None: + pkt = struct.pack('> 16) & 0xff, (mac3 >> 8) & 0xff, mac3 & 0xff) + elif ((mac1 >> 16) & 0xff) == 0: + oui = (0x18, 0xfe, 0x34) + elif ((mac1 >> 16) & 0xff) == 1: + oui = (0xac, 0xd0, 0x74) + else: + raise FatalError("Unknown OUI") + return oui + ((mac1 >> 8) & 0xff, mac1 & 0xff, (mac0 >> 24) & 0xff) + + """ Read Chip ID from OTP ROM - see http://esp8266-re.foogod.com/wiki/System_get_chip_id_%28IoT_RTOS_SDK_0.9.9%29 """ + def chip_id(self): + id0 = self.read_reg(self.ESP_OTP_MAC0) + id1 = self.read_reg(self.ESP_OTP_MAC1) + return (id0 >> 24) | ((id1 & 0xffffff) << 8) + + """ Read SPI flash manufacturer and device id """ + def flash_id(self): + self.flash_begin(0, 0) + self.write_reg(0x60000240, 0x0, 0xffffffff) + self.write_reg(0x60000200, 0x10000000, 0xffffffff) + flash_id = self.read_reg(0x60000240) + return flash_id + + """ Abuse the loader protocol to force flash to be left in write mode """ + def flash_unlock_dio(self): + # Enable flash write mode + self.flash_begin(0, 0) + # Reset the chip rather than call flash_finish(), which would have + # write protected the chip again (why oh why does it do that?!) + self.mem_begin(0,0,0,0x40100000) + self.mem_finish(0x40000080) + + """ Perform a chip erase of SPI flash """ + def flash_erase(self): + # Trick ROM to initialize SFlash + self.flash_begin(0, 0) + + # This is hacky: we don't have a custom stub, instead we trick + # the bootloader to jump to the SPIEraseChip() routine and then halt/crash + # when it tries to boot an unconfigured system. + self.mem_begin(0,0,0,0x40100000) + self.mem_finish(0x40004984) + + # Yup - there's no good way to detect if we succeeded. + # It it on the other hand unlikely to fail. + + def run_stub(self, stub, params, read_output=True): + stub = dict(stub) + stub['code'] = unhexify(stub['code']) + if 'data' in stub: + stub['data'] = unhexify(stub['data']) + + if stub['num_params'] != len(params): + raise FatalError('Stub requires %d params, %d provided' + % (stub['num_params'], len(params))) + + params = struct.pack('<' + ('I' * stub['num_params']), *params) + pc = params + stub['code'] + + # Upload + self.mem_begin(len(pc), 1, len(pc), stub['params_start']) + self.mem_block(pc, 0) + if 'data' in stub: + self.mem_begin(len(stub['data']), 1, len(stub['data']), stub['data_start']) + self.mem_block(stub['data'], 0) + self.mem_finish(stub['entry']) + + if read_output: + print 'Stub executed, reading response:' + while True: + p = self.read() + print hexify(p) + if p == '': + return + + +class ESPBOOTLOADER(object): + """ These are constants related to software ESP bootloader, working with 'v2' image files """ + + # First byte of the "v2" application image + IMAGE_V2_MAGIC = 0xea + + # First 'segment' value in a "v2" application image, appears to be a constant version value? + IMAGE_V2_SEGMENT = 4 + + +def LoadFirmwareImage(filename): + """ Load a firmware image, without knowing what kind of file (v1 or v2) it is. + + Returns a BaseFirmwareImage subclass, either ESPFirmwareImage (v1) or OTAFirmwareImage (v2). + """ + with open(filename, 'rb') as f: + magic = ord(f.read(1)) + f.seek(0) + if magic == ESPROM.ESP_IMAGE_MAGIC: + return ESPFirmwareImage(f) + elif magic == ESPBOOTLOADER.IMAGE_V2_MAGIC: + return OTAFirmwareImage(f) + else: + raise FatalError("Invalid image magic number: %d" % magic) + + +class BaseFirmwareImage(object): + """ Base class with common firmware image functions """ + def __init__(self): + self.segments = [] + self.entrypoint = 0 + + def add_segment(self, addr, data, pad_to=4): + """ Add a segment to the image, with specified address & data + (padded to a boundary of pad_to size) """ + # Data should be aligned on word boundary + l = len(data) + if l % pad_to: + data += b"\x00" * (pad_to - l % pad_to) + if l > 0: + self.segments.append((addr, len(data), data)) + + def load_segment(self, f, is_irom_segment=False): + """ Load the next segment from the image file """ + (offset, size) = struct.unpack(' 0x40200000 or offset < 0x3ffe0000 or size > 65536: + raise FatalError('Suspicious segment 0x%x, length %d' % (offset, size)) + segment_data = f.read(size) + if len(segment_data) < size: + raise FatalError('End of file reading segment 0x%x, length %d (actual length %d)' % (offset, size, len(segment_data))) + segment = (offset, size, segment_data) + self.segments.append(segment) + return segment + + def save_segment(self, f, segment, checksum=None): + """ Save the next segment to the image file, return next checksum value if provided """ + (offset, size, data) = segment + f.write(struct.pack(' 16: + raise FatalError('Invalid firmware image magic=%d segments=%d' % (magic, segments)) + + for i in xrange(segments): + self.load_segment(load_file) + self.checksum = self.read_checksum(load_file) + + def save(self, filename): + with open(filename, 'wb') as f: + self.write_v1_header(f, self.segments) + checksum = ESPROM.ESP_CHECKSUM_MAGIC + for segment in self.segments: + checksum = self.save_segment(f, segment, checksum) + self.append_checksum(f, checksum) + + +class OTAFirmwareImage(BaseFirmwareImage): + """ 'Version 2' firmware image, segments loaded by software bootloader stub + (ie Espressif bootloader or rboot) + """ + def __init__(self, load_file=None): + super(OTAFirmwareImage, self).__init__() + self.version = 2 + if load_file is not None: + (magic, segments, first_flash_mode, first_flash_size_freq, first_entrypoint) = struct.unpack(' 16: + raise FatalError('Invalid V2 second header magic=%d segments=%d' % (magic, segments)) + + # load all the usual segments + for _ in xrange(segments): + self.load_segment(load_file) + self.checksum = self.read_checksum(load_file) + + def save(self, filename): + with open(filename, 'wb') as f: + # Save first header for irom0 segment + f.write(struct.pack(' 0: + esp._port.baudrate = baud_rate + # Read the greeting. + p = esp.read() + if p != 'OHAI': + raise FatalError('Failed to connect to the flasher (got %s)' % hexify(p)) + + def flash_write(self, addr, data, show_progress=False): + assert addr % self._esp.ESP_FLASH_SECTOR == 0, 'Address must be sector-aligned' + assert len(data) % self._esp.ESP_FLASH_SECTOR == 0, 'Length must be sector-aligned' + sys.stdout.write('Writing %d @ 0x%x... ' % (len(data), addr)) + sys.stdout.flush() + self._esp.write(struct.pack(' length: + raise FatalError('Read more than expected') + p = self._esp.read() + if len(p) != 16: + raise FatalError('Expected digest, got: %s' % hexify(p)) + expected_digest = hexify(p).upper() + digest = hashlib.md5(data).hexdigest().upper() + print + if digest != expected_digest: + raise FatalError('Digest mismatch: expected %s, got %s' % (expected_digest, digest)) + p = self._esp.read() + if len(p) != 1: + raise FatalError('Expected status, got: %s' % hexify(p)) + status_code = struct.unpack(', ) or a single +# argument. + +def load_ram(esp, args): + image = LoadFirmwareImage(args.filename) + + print 'RAM boot...' + for (offset, size, data) in image.segments: + print 'Downloading %d bytes at %08x...' % (size, offset), + sys.stdout.flush() + esp.mem_begin(size, div_roundup(size, esp.ESP_RAM_BLOCK), esp.ESP_RAM_BLOCK, offset) + + seq = 0 + while len(data) > 0: + esp.mem_block(data[0:esp.ESP_RAM_BLOCK], seq) + data = data[esp.ESP_RAM_BLOCK:] + seq += 1 + print 'done!' + + print 'All segments done, executing at %08x' % image.entrypoint + esp.mem_finish(image.entrypoint) + + +def read_mem(esp, args): + print '0x%08x = 0x%08x' % (args.address, esp.read_reg(args.address)) + + +def write_mem(esp, args): + esp.write_reg(args.address, args.value, args.mask, 0) + print 'Wrote %08x, mask %08x to %08x' % (args.value, args.mask, args.address) + + +def dump_mem(esp, args): + f = file(args.filename, 'wb') + for i in xrange(args.size / 4): + d = esp.read_reg(args.address + (i * 4)) + f.write(struct.pack('> 16 + args.flash_size = {18: '2m', 19: '4m', 20: '8m', 21: '16m', 22: '32m'}.get(size_id) + if args.flash_size is None: + print 'Warning: Could not auto-detect Flash size (FlashID=0x%x, SizeID=0x%x), defaulting to 4m' % (flash_id, size_id) + args.flash_size = '4m' + else: + print 'Auto-detected Flash size:', args.flash_size + + +def write_flash(esp, args): + detect_flash_size(esp, args) + flash_mode = {'qio':0, 'qout':1, 'dio':2, 'dout': 3}[args.flash_mode] + flash_size_freq = {'4m':0x00, '2m':0x10, '8m':0x20, '16m':0x30, '32m':0x40, '16m-c1': 0x50, '32m-c1':0x60, '32m-c2':0x70}[args.flash_size] + flash_size_freq += {'40m':0, '26m':1, '20m':2, '80m': 0xf}[args.flash_freq] + flash_params = struct.pack('BB', flash_mode, flash_size_freq) + + flasher = CesantaFlasher(esp, args.baud) + + for address, argfile in args.addr_filename: + image = argfile.read() + argfile.seek(0) # rewind in case we need it again + if address + len(image) > int(args.flash_size.split('m')[0]) * (1 << 17): + print 'WARNING: Unlikely to work as data goes beyond end of flash. Hint: Use --flash_size' + # Fix sflash config data. + if address == 0 and image[0] == '\xe9': + print 'Flash params set to 0x%02x%02x' % (flash_mode, flash_size_freq) + image = image[0:2] + flash_params + image[4:] + # Pad to sector size, which is the minimum unit of writing (erasing really). + if len(image) % esp.ESP_FLASH_SECTOR != 0: + image += '\xff' * (esp.ESP_FLASH_SECTOR - (len(image) % esp.ESP_FLASH_SECTOR)) + t = time.time() + flasher.flash_write(address, image, not args.no_progress) + t = time.time() - t + print ('\rWrote %d bytes at 0x%x in %.1f seconds (%.1f kbit/s)...' + % (len(image), address, t, len(image) / t * 8 / 1000)) + print 'Leaving...' + if args.verify: + print 'Verifying just-written flash...' + _verify_flash(flasher, args, flash_params) + flasher.boot_fw() + + +def image_info(args): + image = LoadFirmwareImage(args.filename) + print('Image version: %d' % image.version) + print('Entry point: %08x' % image.entrypoint) if image.entrypoint != 0 else 'Entry point not set' + print '%d segments' % len(image.segments) + print + checksum = ESPROM.ESP_CHECKSUM_MAGIC + for (idx, (offset, size, data)) in enumerate(image.segments): + if image.version == 2 and idx == 0: + print 'Segment 1: %d bytes IROM0 (no load address)' % size + else: + print 'Segment %d: %5d bytes at %08x' % (idx + 1, size, offset) + checksum = ESPROM.checksum(data, checksum) + print + print 'Checksum: %02x (%s)' % (image.checksum, 'valid' if image.checksum == checksum else 'invalid!') + + +def make_image(args): + image = ESPFirmwareImage() + if len(args.segfile) == 0: + raise FatalError('No segments specified') + if len(args.segfile) != len(args.segaddr): + raise FatalError('Number of specified files does not match number of specified addresses') + for (seg, addr) in zip(args.segfile, args.segaddr): + data = file(seg, 'rb').read() + image.add_segment(addr, data) + image.entrypoint = args.entrypoint + image.save(args.output) + + +def elf2image(args): + e = ELFFile(args.input) + if args.version == '1': + image = ESPFirmwareImage() + else: + image = OTAFirmwareImage() + irom_data = e.load_section('.irom0.text') + if len(irom_data) == 0: + raise FatalError(".irom0.text section not found in ELF file - can't create V2 image.") + image.add_segment(0, irom_data, 16) + image.entrypoint = e.get_entry_point() + for section, start in ((".text", "_text_start"), (".data", "_data_start"), (".rodata", "_rodata_start")): + data = e.load_section(section) + image.add_segment(e.get_symbol_addr(start), data) + + image.flash_mode = {'qio':0, 'qout':1, 'dio':2, 'dout': 3}[args.flash_mode] + image.flash_size_freq = {'4m':0x00, '2m':0x10, '8m':0x20, '16m':0x30, '32m':0x40, '16m-c1': 0x50, '32m-c1':0x60, '32m-c2':0x70}[args.flash_size] + image.flash_size_freq += {'40m':0, '26m':1, '20m':2, '80m': 0xf}[args.flash_freq] + + irom_offs = e.get_symbol_addr("_irom0_text_start") - 0x40200000 + + if args.version == '1': + if args.output is None: + args.output = os.path.splitext(args.input)[-1] + '-' + image.save(args.output + "0x00000.bin") + data = e.load_section(".irom0.text") + if irom_offs < 0: + raise FatalError('Address of symbol _irom0_text_start in ELF is located before flash mapping address. Bad linker script?') + if (irom_offs & 0xFFF) != 0: # irom0 isn't flash sector aligned + print "WARNING: irom0 section offset is 0x%08x. ELF is probably linked for 'elf2image --version=2'" % irom_offs + with open(args.output + "0x%05x.bin" % irom_offs, "wb") as f: + f.write(data) + f.close() + else: # V2 OTA image + + if args.output is None: + args.output = "%s-0x%05x.bin" % (os.path.splitext(args.input)[-1], irom_offs & ~(ESPROM.ESP_FLASH_SECTOR - 1)) + image.save(args.output) + + +def read_mac(esp, args): + mac = esp.read_mac() + print 'MAC: %s' % ':'.join(map(lambda x: '%02x' % x, mac)) + + +def chip_id(esp, args): + chipid = esp.chip_id() + print 'Chip ID: 0x%08x' % chipid + + +def erase_flash(esp, args): + flasher = CesantaFlasher(esp, args.baud) + print 'Erasing flash (this may take a while)...' + t = time.time() + flasher.flash_erase_chip() + t = time.time() - t + print 'Erase took %.1f seconds' % t + + +def run(esp, args): + esp.run() + + +def flash_id(esp, args): + flash_id = esp.flash_id() + esp.flash_finish(False) + print 'Manufacturer: %02x' % (flash_id & 0xff) + print 'Device: %02x%02x' % ((flash_id >> 8) & 0xff, (flash_id >> 16) & 0xff) + + +def read_flash(esp, args): + flasher = CesantaFlasher(esp, args.baud) + t = time.time() + data = flasher.flash_read(args.address, args.size, not args.no_progress) + t = time.time() - t + print ('\rRead %d bytes at 0x%x in %.1f seconds (%.1f kbit/s)...' + % (len(data), args.address, t, len(data) / t * 8 / 1000)) + file(args.filename, 'wb').write(data) + + +def _verify_flash(flasher, args, flash_params=None): + differences = False + for address, argfile in args.addr_filename: + image = argfile.read() + argfile.seek(0) # rewind in case we need it again + if address == 0 and image[0] == '\xe9' and flash_params is not None: + image = image[0:2] + flash_params + image[4:] + image_size = len(image) + print 'Verifying 0x%x (%d) bytes @ 0x%08x in flash against %s...' % (image_size, image_size, address, argfile.name) + # Try digest first, only read if there are differences. + digest, _ = flasher.flash_digest(address, image_size) + digest = hexify(digest).upper() + expected_digest = hashlib.md5(image).hexdigest().upper() + if digest == expected_digest: + print '-- verify OK (digest matched)' + continue + else: + differences = True + if getattr(args, 'diff', 'no') != 'yes': + print '-- verify FAILED (digest mismatch)' + continue + + flash = flasher.flash_read(address, image_size) + assert flash != image + diff = [i for i in xrange(image_size) if flash[i] != image[i]] + print '-- verify FAILED: %d differences, first @ 0x%08x' % (len(diff), address + diff[0]) + for d in diff: + print ' %08x %02x %02x' % (address + d, ord(flash[d]), ord(image[d])) + if differences: + raise FatalError("Verify failed.") + + +def verify_flash(esp, args, flash_params=None): + flasher = CesantaFlasher(esp) + _verify_flash(flasher, args, flash_params) + + +def version(args): + print __version__ + +# +# End of operations functions +# + + +def main(): + parser = argparse.ArgumentParser(description='esptool.py v%s - ESP8266 ROM Bootloader Utility' % __version__, prog='esptool') + + parser.add_argument( + '--port', '-p', + help='Serial port device', + default=os.environ.get('ESPTOOL_PORT', '/dev/ttyUSB0')) + + parser.add_argument( + '--baud', '-b', + help='Serial port baud rate used when flashing/reading', + type=arg_auto_int, + default=os.environ.get('ESPTOOL_BAUD', ESPROM.ESP_ROM_BAUD)) + + subparsers = parser.add_subparsers( + dest='operation', + help='Run esptool {command} -h for additional help') + + parser_load_ram = subparsers.add_parser( + 'load_ram', + help='Download an image to RAM and execute') + parser_load_ram.add_argument('filename', help='Firmware image') + + parser_dump_mem = subparsers.add_parser( + 'dump_mem', + help='Dump arbitrary memory to disk') + parser_dump_mem.add_argument('address', help='Base address', type=arg_auto_int) + parser_dump_mem.add_argument('size', help='Size of region to dump', type=arg_auto_int) + parser_dump_mem.add_argument('filename', help='Name of binary dump') + + parser_read_mem = subparsers.add_parser( + 'read_mem', + help='Read arbitrary memory location') + parser_read_mem.add_argument('address', help='Address to read', type=arg_auto_int) + + parser_write_mem = subparsers.add_parser( + 'write_mem', + help='Read-modify-write to arbitrary memory location') + parser_write_mem.add_argument('address', help='Address to write', type=arg_auto_int) + parser_write_mem.add_argument('value', help='Value', type=arg_auto_int) + parser_write_mem.add_argument('mask', help='Mask of bits to write', type=arg_auto_int) + + def add_spi_flash_subparsers(parent, auto_detect=False): + """ Add common parser arguments for SPI flash properties """ + parent.add_argument('--flash_freq', '-ff', help='SPI Flash frequency', + choices=['40m', '26m', '20m', '80m'], + default=os.environ.get('ESPTOOL_FF', '40m')) + parent.add_argument('--flash_mode', '-fm', help='SPI Flash mode', + choices=['qio', 'qout', 'dio', 'dout'], + default=os.environ.get('ESPTOOL_FM', 'qio')) + choices = ['4m', '2m', '8m', '16m', '32m', '16m-c1', '32m-c1', '32m-c2'] + default = '4m' + if auto_detect: + default = 'detect' + choices.insert(0, 'detect') + parent.add_argument('--flash_size', '-fs', help='SPI Flash size in Mbit', type=str.lower, + choices=choices, + default=os.environ.get('ESPTOOL_FS', default)) + + parser_write_flash = subparsers.add_parser( + 'write_flash', + help='Write a binary blob to flash') + parser_write_flash.add_argument('addr_filename', metavar='
', help='Address followed by binary filename, separated by space', + action=AddrFilenamePairAction) + add_spi_flash_subparsers(parser_write_flash, auto_detect=True) + parser_write_flash.add_argument('--no-progress', '-p', help='Suppress progress output', action="store_true") + parser_write_flash.add_argument('--verify', help='Verify just-written data (only necessary if very cautious, data is already CRCed', action='store_true') + + subparsers.add_parser( + 'run', + help='Run application code in flash') + + parser_image_info = subparsers.add_parser( + 'image_info', + help='Dump headers from an application image') + parser_image_info.add_argument('filename', help='Image file to parse') + + parser_make_image = subparsers.add_parser( + 'make_image', + help='Create an application image from binary files') + parser_make_image.add_argument('output', help='Output image file') + parser_make_image.add_argument('--segfile', '-f', action='append', help='Segment input file') + parser_make_image.add_argument('--segaddr', '-a', action='append', help='Segment base address', type=arg_auto_int) + parser_make_image.add_argument('--entrypoint', '-e', help='Address of entry point', type=arg_auto_int, default=0) + + parser_elf2image = subparsers.add_parser( + 'elf2image', + help='Create an application image from ELF file') + parser_elf2image.add_argument('input', help='Input ELF file') + parser_elf2image.add_argument('--output', '-o', help='Output filename prefix (for version 1 image), or filename (for version 2 single image)', type=str) + parser_elf2image.add_argument('--version', '-e', help='Output image version', choices=['1','2'], default='1') + add_spi_flash_subparsers(parser_elf2image) + + subparsers.add_parser( + 'read_mac', + help='Read MAC address from OTP ROM') + + subparsers.add_parser( + 'chip_id', + help='Read Chip ID from OTP ROM') + + subparsers.add_parser( + 'flash_id', + help='Read SPI flash manufacturer and device ID') + + parser_read_flash = subparsers.add_parser( + 'read_flash', + help='Read SPI flash content') + parser_read_flash.add_argument('address', help='Start address', type=arg_auto_int) + parser_read_flash.add_argument('size', help='Size of region to dump', type=arg_auto_int) + parser_read_flash.add_argument('filename', help='Name of binary dump') + parser_read_flash.add_argument('--no-progress', '-p', help='Suppress progress output', action="store_true") + + parser_verify_flash = subparsers.add_parser( + 'verify_flash', + help='Verify a binary blob against flash') + parser_verify_flash.add_argument('addr_filename', help='Address and binary file to verify there, separated by space', + action=AddrFilenamePairAction) + parser_verify_flash.add_argument('--diff', '-d', help='Show differences', + choices=['no', 'yes'], default='no') + + subparsers.add_parser( + 'erase_flash', + help='Perform Chip Erase on SPI flash') + + subparsers.add_parser( + 'version', help='Print esptool version') + + # internal sanity check - every operation matches a module function of the same name + for operation in subparsers.choices.keys(): + assert operation in globals(), "%s should be a module function" % operation + + args = parser.parse_args() + + print 'esptool.py v%s' % __version__ + + # operation function can take 1 arg (args), 2 args (esp, arg) + # or be a member function of the ESPROM class. + + operation_func = globals()[args.operation] + operation_args,_,_,_ = inspect.getargspec(operation_func) + if operation_args[0] == 'esp': # operation function takes an ESPROM connection object + initial_baud = min(ESPROM.ESP_ROM_BAUD, args.baud) # don't sync faster than the default baud rate + esp = ESPROM(args.port, initial_baud) + esp.connect() + operation_func(esp, args) + else: + operation_func(args) + + +class AddrFilenamePairAction(argparse.Action): + """ Custom parser class for the address/filename pairs passed as arguments """ + def __init__(self, option_strings, dest, nargs='+', **kwargs): + super(AddrFilenamePairAction, self).__init__(option_strings, dest, nargs, **kwargs) + + def __call__(self, parser, namespace, values, option_string=None): + # validate pair arguments + pairs = [] + for i in range(0,len(values),2): + try: + address = int(values[i],0) + except ValueError as e: + raise argparse.ArgumentError(self,'Address "%s" must be a number' % values[i]) + try: + argfile = open(values[i + 1], 'rb') + except IOError as e: + raise argparse.ArgumentError(self, e) + except IndexError: + raise argparse.ArgumentError(self,'Must be pairs of an address and the binary filename to write there') + pairs.append((address, argfile)) + setattr(namespace, self.dest, pairs) + +# This is "wrapped" stub_flasher.c, to be loaded using run_stub. +_CESANTA_FLASHER_STUB = """\ +{"code_start": 1074790404, "code": "080000601C000060000000601000006031FCFF71FCFF\ +81FCFFC02000680332D218C020004807404074DCC48608005823C0200098081BA5A9239245005803\ +1B555903582337350129230B446604DFC6F3FF21EEFFC0200069020DF0000000010078480040004A\ +0040B449004012C1F0C921D911E901DD0209312020B4ED033C2C56C2073020B43C3C56420701F5FF\ +C000003C4C569206CD0EEADD860300202C4101F1FFC0000056A204C2DCF0C02DC0CC6CCAE2D1EAFF\ +0606002030F456D3FD86FBFF00002020F501E8FFC00000EC82D0CCC0C02EC0C73DEB2ADC46030020\ +2C4101E1FFC00000DC42C2DCF0C02DC056BCFEC602003C5C8601003C6C4600003C7C08312D0CD811\ +C821E80112C1100DF0000C180000140010400C0000607418000064180000801800008C1800008418\ +0000881800009018000018980040880F0040A80F0040349800404C4A0040740F0040800F0040980F\ +00400099004012C1E091F5FFC961CD0221EFFFE941F9310971D9519011C01A223902E2D1180C0222\ +6E1D21E4FF31E9FF2AF11A332D0F42630001EAFFC00000C030B43C2256A31621E1FF1A2228022030\ +B43C3256B31501ADFFC00000DD023C4256ED1431D6FF4D010C52D90E192E126E0101DDFFC0000021\ +D2FF32A101C020004802303420C0200039022C0201D7FFC00000463300000031CDFF1A333803D023\ +C03199FF27B31ADC7F31CBFF1A3328030198FFC0000056C20E2193FF2ADD060E000031C6FF1A3328\ +030191FFC0000056820DD2DD10460800000021BEFF1A2228029CE231BCFFC020F51A33290331BBFF\ +C02C411A332903C0F0F4222E1D22D204273D9332A3FFC02000280E27B3F721ABFF381E1A2242A400\ +01B5FFC00000381E2D0C42A40001B3FFC0000056120801B2FFC00000C02000280EC2DC0422D2FCC0\ +2000290E01ADFFC00000222E1D22D204226E1D281E22D204E7B204291E860000126E012198FF32A0\ +042A21C54C003198FF222E1D1A33380337B202C6D6FF2C02019FFFC000002191FF318CFF1A223A31\ +019CFFC00000218DFF1C031A22C549000C02060300003C528601003C624600003C72918BFF9A1108\ +71C861D851E841F83112C1200DF00010000068100000581000007010000074100000781000007C10\ +0000801000001C4B0040803C004091FDFF12C1E061F7FFC961E941F9310971D9519011C01A662906\ +21F3FFC2D1101A22390231F2FF0C0F1A33590331EAFFF26C1AED045C2247B3028636002D0C016DFF\ +C0000021E5FF41EAFF2A611A4469040622000021E4FF1A222802F0D2C0D7BE01DD0E31E0FF4D0D1A\ +3328033D0101E2FFC00000561209D03D2010212001DFFFC000004D0D2D0C3D01015DFFC0000041D5\ +FFDAFF1A444804D0648041D2FF1A4462640061D1FF106680622600673F1331D0FF10338028030C43\ +853A002642164613000041CAFF222C1A1A444804202FC047328006F6FF222C1A273F3861C2FF222C\ +1A1A6668066732B921BDFF3D0C1022800148FFC0000021BAFF1C031A2201BFFFC000000C02460300\ +5C3206020000005C424600005C5291B7FF9A110871C861D851E841F83112C1200DF0B0100000C010\ +0000D010000012C1E091FEFFC961D951E9410971F931CD039011C0ED02DD0431A1FF9C1422A06247\ +B302062D0021F4FF1A22490286010021F1FF1A223902219CFF2AF12D0F011FFFC00000461C0022D1\ +10011CFFC0000021E9FFFD0C1A222802C7B20621E6FF1A22F8022D0E3D014D0F0195FFC000008C52\ +22A063C6180000218BFF3D01102280F04F200111FFC00000AC7D22D1103D014D0F010DFFC0000021\ +D6FF32D110102280010EFFC0000021D3FF1C031A220185FFC00000FAEEF0CCC056ACF821CDFF317A\ +FF1A223A310105FFC0000021C9FF1C031A22017CFFC000002D0C91C8FF9A110871C861D851E841F8\ +3112C1200DF0000200600000001040020060FFFFFF0012C1E00C02290131FAFF21FAFF026107C961\ +C02000226300C02000C80320CC10564CFF21F5FFC02000380221F4FF20231029010C432D010163FF\ +C0000008712D0CC86112C1200DF00080FE3F8449004012C1D0C9A109B17CFC22C1110C13C51C0026\ +1202463000220111C24110B68202462B0031F5FF3022A02802A002002D011C03851A0066820A2801\ +32210105A6FF0607003C12C60500000010212032A01085180066A20F2221003811482105B3FF2241\ +10861A004C1206FDFF2D011C03C5160066B20E280138114821583185CFFF06F7FF005C1286F5FF00\ +10212032A01085140066A20D2221003811482105E1FF06EFFF0022A06146EDFF45F0FFC6EBFF0000\ +01D2FFC0000006E9FF000C022241100C1322C110C50F00220111060600000022C1100C13C50E0022\ +011132C2FA303074B6230206C8FF08B1C8A112C1300DF0000000000010404F484149007519031027\ +000000110040A8100040BC0F0040583F0040CC2E00401CE20040D83900408000004021F4FF12C1E0\ +C961C80221F2FF097129010C02D951C91101F4FFC0000001F3FFC00000AC2C22A3E801F2FFC00000\ +21EAFFC031412A233D0C01EFFFC000003D0222A00001EDFFC00000C1E4FF2D0C01E8FFC000002D01\ +32A004450400C5E7FFDD022D0C01E3FFC00000666D1F4B2131DCFF4600004B22C0200048023794F5\ +31D9FFC0200039023DF08601000001DCFFC000000871C861D85112C1200DF000000012C1F0026103\ +01EAFEC00000083112C1100DF000643B004012C1D0E98109B1C9A1D991F97129013911E2A0C001FA\ +FFC00000CD02E792F40C0DE2A0C0F2A0DB860D00000001F4FFC00000204220E71240F7921C226102\ +01EFFFC0000052A0DC482157120952A0DD571205460500004D0C3801DA234242001BDD3811379DC5\ +C6000000000C0DC2A0C001E3FFC00000C792F608B12D0DC8A1D891E881F87112C1300DF00000", "\ +entry": 1074792180, "num_params": 1, "params_start": 1074790400, "data": "FE0510\ +401A0610403B0610405A0610407A061040820610408C0610408C061040", "data_start": 10736\ +43520} +""" + +if __name__ == '__main__': + try: + main() + except FatalError as e: + print '\nA fatal error occurred: %s' % e + sys.exit(2) diff --git a/Firmware/Microbit_v2/utils/generate_libraries.py b/Firmware/Microbit_v2/utils/generate_libraries.py new file mode 100644 index 0000000..45db25c --- /dev/null +++ b/Firmware/Microbit_v2/utils/generate_libraries.py @@ -0,0 +1,159 @@ +import os +import git +from git import Actor +import optparse +import fnmatch +import glob +import shutil +import ntpath +import json + +def make_cmake(lib_name, lib_file_name, include_path, dest): + print "LIB NAME " + lib_name + with open(dest + "/CMakeLists.txt", 'w') as f: + lines = [ + "project(" + lib_name + ")\r\n" + "add_library(" + lib_name + " STATIC " + lib_file_name + ")\r\n", + "set_target_properties(" + lib_name +" PROPERTIES LINKER_LANGUAGE CXX)\r\n", + "target_include_directories(" + lib_name + " PUBLIC \"" + include_path + "\")\r\n", + ] + print "LINES : " + str(lines) + f.writelines(lines) + f.close() + +def copytree(src, dst, symlinks=False, ignore=None): + if not os.path.exists(dst): + os.makedirs(dst) + for item in os.listdir(src): + s = os.path.join(src, item) + d = os.path.join(dst, item) + if os.path.isdir(s): + copytree(s, d, symlinks, ignore) + else: + if not os.path.exists(d) or os.stat(s).st_mtime - os.stat(d).st_mtime > 1: + shutil.copy2(s, d) + +def path_leaf(path): + head, tail = ntpath.split(path) + return tail or ntpath.basename(head) + +def recursive_glob(treeroot, pattern): + results = [] + for base, dirs, files in os.walk(treeroot): + goodfiles = fnmatch.filter(files, pattern) + results.extend(os.path.join(base, f) for f in goodfiles) + return results + +parser = optparse.OptionParser() +parser.add_option('-c', '--clean', dest='clean', action="store_true", help='Whether to clean before building.', default=False) + +(options, args) = parser.parse_args() + +os.chdir("..") + +if not os.path.exists("build"): + os.mkdir("build") + +# out of source build! +os.chdir("build") + +# configure os.system("cmake ..") +os.system("cmake .. -DCODAL_HEADER_EXTRACTION:BOOL=TRUE") + +if options.clean: + os.system("make clean") + +# build +os.system("make -j 10") + +with open('../codal.json') as data_file: + codal = json.load(data_file) + +#ntpath.basename(f) +folders = [path_leaf(f) for f in glob.glob("../libraries/*/")] +header_folders = [path_leaf(f) for f in glob.glob("./build/*/")] + +print folders +print header_folders + +mapping = [] + +#note for next time, need to copy all lib files to their appropriate build/lib place otherwise they get auto cleaned. + +valid_libs = [] + +for folder in header_folders: + lib_file_name = "lib" + folder + ".a" + if not os.path.exists("./"+lib_file_name): + print "No library exists, skipping: " + lib_file_name + continue + + shutil.copy("./" + lib_file_name, "./build/"+folder) + valid_libs = valid_libs + [folder] + + +for folder in valid_libs: + lib_name = folder + lib_file_name = "lib" + folder + ".a" + folder_path = '../libraries/' + folder + header_folder = "./build/" + folder + header_ext = "includes" + + with open(folder_path + "CMakeLists.txt") as cmake: + + "target_link_libraries\((?:\s*(.+))+\s*\)" + + for line in cmake.lines(): + if "target_link_libraries" in line + + + + # get the repo + try: + repo = git.Repo('../libraries/' + folder) + except: + print folder + " is not a valid git repository." + continue + + active_branch = repo.active_branch.name + + # check for any uncommitted changes + if len(repo.index.diff(None)) > 0 : + print folder + " has uncommitted changes, skipping." + continue; + + branch_names = [b.name for b in repo.branches] + + lib_branch_name = "lib_" + codal["target"]["processor"] + codal["target"]["device"] + + # tag using above + version specified in target.json + + # swap to an orphaned branch if none exists + if lib_branch_name not in branch_names: + repo.active_branch.checkout(orphan=lib_branch_name) + + for f in glob.glob(folder_path + "/*/"): + shutil.rmtree(f) + + files = [f for f in os.listdir('.') if os.path.isfile(f)] + + for file in files: + os.remove(file) + else: + repo.active_branch.checkout(lib_branch_name) + + repo.index.remove("*", r=True) + + copytree(header_folder, folder_path + "/") + + make_cmake(lib_name, lib_file_name, header_ext, folder_path + "/") + + repo.index.add("*") + + author = Actor("codal", "codal@example.com") + + repo.index.commit("Library generated", author=author, committer=author) + + #repo.git.checkout(active_branch) + + #break diff --git a/Firmware/Microbit_v2/utils/merge_hex.py b/Firmware/Microbit_v2/utils/merge_hex.py new file mode 100644 index 0000000..c2e9916 --- /dev/null +++ b/Firmware/Microbit_v2/utils/merge_hex.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python + +# Copyright (c) 2015 ARM Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script will merge two hex files and write the output to a hex file. + USAGE: merge_hex.py input_file1 input_file2 output_file. +""" + +from optparse import OptionParser +import sys + +parser = OptionParser() + +#command line options +parser.add_option("-o", "--output", + action="store", + type="string", + dest="output", + default="", + help="The relative path to the headers for the microbit-dal.") + +(options, args) = parser.parse_args() + +fail_color = '' + +# If colorama is present, set the fail color to red +try: + from colorama import init, deinit, Fore + fail_color = Fore.RED +except: + pass + +def fail(message): + print(fail_color + message) + + # If we've included ANSI color in output, reset the output style + if fail_color: + print(Fore.RESET) + deinit() + + return 1 + +def convert_start_addr(hex_file): + if hex_file.start_addr and 'CS' in hex_file.start_addr: + start_addr = {'EIP': (hex_file.start_addr['CS'] * 16) + hex_file.start_addr['IP']} + hex_file.start_addr = start_addr + +def main(options, args): + # If using ANSI coloring is available, initialize colorama + if fail_color: + init() + + # Import intelhex if avaialable, otherwise fail + try: + from intelhex import IntelHex + except: + return fail('error: You do not have \'intelhex\' installed. Please run \'pip install intelhex\' then retry.') + + if len(options.output) is 0: + print "No output file specified" + exit(1) + + if len(args) < 2: + return fail('Only one file was provided to merge.') + exit(0) + + # Get the two hex files, merge them, and save the result + orig = IntelHex(args[0]) + convert_start_addr(orig) + + args = args[1:] + + for arg in args: + other = IntelHex(arg) + convert_start_addr(other) + orig.merge(other, overlap='replace') + + orig.write_hex_file(options.output) + +if __name__ == '__main__': + sys.exit(main(options,args)) diff --git a/Firmware/Microbit_v2/utils/python/__init__.py b/Firmware/Microbit_v2/utils/python/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Firmware/Microbit_v2/utils/python/codal_utils.py b/Firmware/Microbit_v2/utils/python/codal_utils.py new file mode 100644 index 0000000..68cb017 --- /dev/null +++ b/Firmware/Microbit_v2/utils/python/codal_utils.py @@ -0,0 +1,186 @@ +import os +import sys +import optparse +import platform +import json +import shutil +import re + +import os, re, json, xml.etree.ElementTree +from optparse import OptionParser + + +def system(cmd): + if os.system(cmd) != 0: + sys.exit(1) + +def build(clean, verbose = False): + if platform.system() == "Windows": + # configure + system("cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo -G \"Ninja\"") + + # build + system("ninja") + else: + # configure + system("cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo -G \"Unix Makefiles\"") + + if clean: + system("make clean") + + # build + if verbose: + system("make -j 10 VERBOSE=1") + else: + system("make -j 10") + +def read_json(fn): + json_file = "" + with open(fn) as f: + json_file = f.read() + return json.loads(json_file) + +def checkgit(): + stat = os.popen('git status --porcelain').read().strip() + if stat != "": + print("Missing checkin in", os.getcwd(), "\n" + stat) + exit(1) + +def read_config(): + codal = read_json("codal.json") + targetdir = codal['target']['name'] + target = read_json("libraries/" + targetdir + "/target.json") + return (codal, targetdir, target) + +def update(allow_detached=False): + (codal, targetdir, target) = read_config() + dirname = os.getcwd() + for ln in target['libraries']: + os.chdir(dirname + "/libraries/" + ln['name']) + system("git checkout " + ln['branch']) + system("git pull") + os.chdir(dirname + "/libraries/" + targetdir) + if ("HEAD detached" in os.popen('git branch').read().strip() and + allow_detached == False): + system("git checkout master") + system("git pull") + os.chdir(dirname) + +def revision(rev): + (codal, targetdir, target) = read_config() + dirname = os.getcwd() + os.chdir("libraries/" + targetdir) + system("git checkout " + rev) + os.chdir(dirname) + update(True) + +def printstatus(): + print("\n***%s" % os.getcwd()) + system("git status -s") + system("git rev-parse HEAD") + system("git branch") + +def status(): + (codal, targetdir, target) = read_config() + dirname = os.getcwd() + for ln in target['libraries']: + os.chdir(dirname + "/libraries/" + ln['name']) + printstatus() + os.chdir(dirname + "/libraries/" + targetdir) + printstatus() + os.chdir(dirname) + printstatus() + +def get_next_version(options): + if options.version: + return options.version + log = os.popen('git log -n 100').read().strip() + m = re.search('Snapshot v(\d+)\.(\d+)\.(\d+)(-([\w\-]+).(\d+))?', log) + if m is None: + print("Cannot determine next version from git log") + exit(1) + v0 = int(m.group(1)) + v1 = int(m.group(2)) + v2 = int(m.group(3)) + vB = -1 + branchName = os.popen('git rev-parse --abbrev-ref HEAD').read().strip() + if not options.branch and branchName != "master": + print("On non-master branch use -l -b") + exit(1) + suff = "" + if options.branch: + if m.group(4) and branchName == m.group(5): + vB = int(m.group(6)) + suff = "-%s.%d" % (branchName, vB + 1) + elif options.update_major: + v0 += 1 + v1 = 0 + v2 = 0 + elif options.update_minor: + v1 += 1 + v2 = 0 + else: + v2 += 1 + return "v%d.%d.%d%s" % (v0, v1, v2, suff) + +def lock(options): + (codal, targetdir, target) = read_config() + dirname = os.getcwd() + for ln in target['libraries']: + os.chdir(dirname + "/libraries/" + ln['name']) + checkgit() + stat = os.popen('git status --porcelain -b').read().strip() + if "ahead" in stat: + print("Missing push in", os.getcwd()) + exit(1) + sha = os.popen('git rev-parse HEAD').read().strip() + ln['branch'] = sha + print(ln['name'], sha) + os.chdir(dirname + "/libraries/" + targetdir) + ver = get_next_version(options) + print("Creating snaphot", ver) + system("git checkout target-locked.json") + checkgit() + target["snapshot_version"] = ver + with open("target-locked.json", "w") as f: + f.write(json.dumps(target, indent=4, sort_keys=True)) + system("git commit -am \"Snapshot %s\"" % ver) # must match get_next_version() regex + sha = os.popen('git rev-parse HEAD').read().strip() + system("git tag %s" % ver) + system("git pull") + system("git push") + system("git push --tags") + os.chdir(dirname) + print("\nNew snapshot: %s [%s]" % (ver, sha)) + +def delete_build_folder(in_folder = True): + if in_folder: + os.chdir("..") + + shutil.rmtree('./build') + os.mkdir("./build") + + if in_folder: + os.chdir("./build") + +def generate_docs(): + from doc_gen.doxygen_extractor import DoxygenExtractor + from doc_gen.md_converter import MarkdownConverter + from doc_gen.system_utils import SystemUtils + from doc_gen.doc_gen import generate_mkdocs + + os.chdir("..") + (codal, targetdir, target) = read_config() + + lib_dir = os.getcwd() + "/libraries/" + + libraries = [lib_dir + targetdir] + + for l in target["libraries"]: + libraries = libraries + [ lib_dir + l["name"]] + + os.chdir(lib_dir + targetdir) + + generate_mkdocs(libraries) + + diff --git a/Firmware/Microbit_v2/utils/python/doc_gen/__init__.py b/Firmware/Microbit_v2/utils/python/doc_gen/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Firmware/Microbit_v2/utils/python/doc_gen/doc_gen.py b/Firmware/Microbit_v2/utils/python/doc_gen/doc_gen.py new file mode 100644 index 0000000..97d6e65 --- /dev/null +++ b/Firmware/Microbit_v2/utils/python/doc_gen/doc_gen.py @@ -0,0 +1,93 @@ +import os, re, json, xml.etree.ElementTree +from optparse import OptionParser + +from doxygen_extractor import DoxygenExtractor +from md_converter import MarkdownConverter +from system_utils import SystemUtils + +member_func_filter = ["idleCallback", "systemCallback", "~"] + +filters = True + +utils = SystemUtils() + +### +# the trigger for generating our documentation +### +def generate_mkdocs(header_paths, type_colour = "#a71d5d", function_name_colour = "#795da3"): + + global member_func_filter + doxygen = DoxygenExtractor(os.path.abspath("."), header_paths) + markdown = MarkdownConverter(type_colour, function_name_colour, separate_defaults = True, display_defaults = False) + + doxygen.generate_doxygen() + #utils.validate_version(doxygen.working_dir, header_paths, "./docs/archive") + + file_names = utils.find_files('docs','*.md') + section_kind = ["public-func"] + meta_data_regex = re.compile( r'\[comment\]: <> \((.*?)\)', re.MULTILINE | re.DOTALL ) + + for filename in file_names: + print(filename) + + read_lines = utils.read(filename) + + file_lines = markdown.clean(read_lines, meta_data_regex) + + utils.write(filename, file_lines) + + previous = "" + + for line_number, line in enumerate(file_lines, 1): + + result = re.findall(meta_data_regex,line) + + if len(result) is not 0: + + meta_data = json.loads(result[0]) + + if previous is not "" and "end" in meta_data.keys() and meta_data['end'] == previous: + previous = "" + continue + elif previous is "": + try: + previous = meta_data['className'] + except: + raise Exception('There isn\'t a match for the meta_data '+ meta_data) + else: + raise Exception('There isn\'t a match for the meta_data \''+ previous + "'") + + local_filter = member_func_filter + + if "filter" in meta_data: + for member_function in meta_data["filter"]: + local_filter = local_filter + [ str(member_function) ] + + print "Custom filter applied: " + str(member_func_filter) + + class_xml_files = list(utils.find_files("./xml","*class*"+meta_data['className'] + ".xml")) + + print class_xml_files + + if len(class_xml_files) == 0: + raise Exception("Invalid classname: " + meta_data['className']) + elif len(class_xml_files) > 1: + class_xml_files + + doxygen_class_xml = xml.etree.ElementTree.parse(class_xml_files[0]).getroot() + + member_functions = [] + + for section_def in doxygen_class_xml.iter('sectiondef'): + if section_def.attrib['kind'] in section_kind: + for member_func in section_def.iter('memberdef'): + new_member = doxygen.extract_member_function(member_func, local_filter, filter= filters) + if new_member is not None: + member_functions.append(new_member) + + before = file_lines[:line_number] + after = file_lines[line_number:] + + between = markdown.gen_member_func_doc(meta_data['className'], member_functions) + + utils.write(filename, before + between + after) diff --git a/Firmware/Microbit_v2/utils/python/doc_gen/doxygen_extractor.py b/Firmware/Microbit_v2/utils/python/doc_gen/doxygen_extractor.py new file mode 100644 index 0000000..9207c9d --- /dev/null +++ b/Firmware/Microbit_v2/utils/python/doc_gen/doxygen_extractor.py @@ -0,0 +1,242 @@ +import os +from system_utils import SystemUtils + +class DoxygenExtractor: + + md_special_chars =[ + { + "md_char": "*", + "replacement": "*" + }, + { + "md_char": "#", + "replacement": "#" + }, + { + "md_char": "`", + "replacement": "·" + } + ] + + #constructor + def __init__(self, root, header_paths, working_dir = "./temp", doxygen_xml_dest = "./xml"): + os.chdir(root) + self.header_paths = header_paths + self.utils = SystemUtils() + self.doxygen_xml_dest = doxygen_xml_dest + self.working_dir = working_dir + + ### + # this function copies headers recursively from a source director to a destination + # directory. + ### + def get_headers(self, from_dir, to_dir): + self.utils.copy_files(from_dir, to_dir, "*.h") + + ### + # Strips out reserved characters used in markdown notation, and replaces them + # with html character codes. + # + # @param text the text to strip and replace the md special characters + # + # @return the stripped text. + ### + def escape_md_chars(self, text): + for char in self.md_special_chars: + text = text.replace(char['md_char'], "\\" + char['md_char']) + return text + + + ### + # this function extracts data from an element tag ignoring the tag 'ref', but + # obtains the textual data it has inside the ref tag. + # + # @param element the element to process + # + # @return a list of extracted strings. + ### + def extract_ignoring_refs(self, element): + list = [] + + if element.text is not None: + list.append(element.text) + + for ref in element.iter(tag="ref"): + list.append(ref.text) + + return list + + ### + # this function extracts data from an element tag including all sub elements + # (recursive) + # + # @param element the element to process + # + # @return a list of extracted strings. + ### + def extract_with_subelements(self, element): + list = [] + + list.append(element.text or "") + + #if element.text is not None: + #list.append(element.text) + + for subelement in element: + if subelement is not None: + list = list + self.extract_with_subelements(subelement) + + list.append(element.tail or "") + + return list + + ### + # this function was at one point intended to fetch a value of a default parameter + # it is now only used to fetch the default parameters' name. + # + # @param document_root the root of the entire document + # @param element the element containing the default parameter + # + # @return a dictionary containing: + # { + # 'name':'', + # 'value':'' + # } + # + # @note this would be more useful if it return the value, it currently does not. + ### + def extract_default(self, element): + ref = element.find("ref") + return {'name':' '.join(element.itertext()), 'value':''} + + ### + # extracts a member function form the xml document + # + # @param root the document root + # @param xml_element the member function xml element. + # + # @return a function dictionary: + # { + # 'short_name':"", + # 'name':"", + # 'return_type':"", + # 'params':[], + # 'description':[], + # 'returns':"", + # 'notes':"", + # 'examples':"" + # } + ### + def extract_member_function(self, xml_element, function_filter = [], filter = True): + + function = { + 'short_name':"", + 'name':"", + 'return_type':"", + 'params':[], + 'description':[], + 'returns':"", + 'notes':"", + 'examples':"" + } + + function['name'] = xml_element.find('definition').text + function['short_name'] = xml_element.find('name').text + + if filter and any(filtered_func in function['short_name'] for filtered_func in function_filter): + print "Filtered out: " + function['short_name'] + return + + print "Generating documentation for: " + function['short_name'] + + if xml_element.find('type') is not None: + function['return_type'] = self.escape_md_chars(' '.join(self.extract_ignoring_refs(xml_element.find('type')))) + + #extract our parameters for this member function + for parameter in xml_element.iter('param'): + + type = "" + name = "" + + if parameter.find('type') is not None: + type = self.escape_md_chars(' '.join(parameter.find('type').itertext())) + + if parameter.find('declname') is not None: + name = ' '.join(self.extract_ignoring_refs(parameter.find('declname'))) + + param_object = { + 'type': type, + 'name': name, + 'default':{ + 'name':"", + 'value':"" + } + } + + if parameter.find('defval') is not None: + extracted = self.extract_default(parameter.find('defval')) + param_object['default']['name'] = extracted['name'] + param_object['default']['value'] = extracted['value'] + + function['params'].append(param_object) + + + detailed_description = xml_element.find('detaileddescription') + + if len(detailed_description.findall("para")) is not 0: + for para in detailed_description.findall("para"): + if len(para.findall("programlisting")) is 0 and len(para.findall("simplesect")) is 0: + function['description'] = function['description'] + self.extract_with_subelements(para) + + #para indicates a new paragraph - we should treat it as such... append \n! + function['description'] = function['description'] + ["\n\n"] + + if len(detailed_description.findall("para/simplesect[@kind='return']/para")) is not 0: + return_section = detailed_description.findall("para/simplesect[@kind='return']/para")[0] + function['returns'] = ' '.join(return_section.itertext()) + + if len(detailed_description.findall("para/simplesect[@kind='note']/para")) is not 0: + return_section = detailed_description.findall("para/simplesect[@kind='note']/para")[0] + function['notes'] = ' '.join(return_section.itertext()) + + examples = detailed_description.find('para/programlisting') + + if examples is not None: + function['examples'] = ''.join([('' if index is 0 else ' ')+word for index, word in enumerate(examples.itertext(),1) ]) + + param_list = detailed_description.findall('para/parameterlist') + + if len(param_list) is not 0: + for parameter_desc in param_list[0].findall('parameteritem'): + + param_descriptor = { + 'name':'', + 'description':'' + } + + param_name = parameter_desc.findall('parameternamelist/parametername') + additional = parameter_desc.findall('parameterdescription/para') + + if len(param_name) is not 0: + param_descriptor['name'] = param_name[0].text + + if len(additional) is not 0: + param_descriptor['description'] = ' '.join(additional[0].itertext()) + + for descriptor in function['params']: + if param_descriptor['name'] in descriptor['name']: + descriptor['description'] = param_descriptor['description'] + + return function + + def generate_doxygen(self): + self.utils.mk_dir(self.working_dir) + self.utils.clean_dir(self.working_dir) + + for path in self.header_paths: + self.get_headers(path, self.working_dir) + + if os.path.exists(self.doxygen_xml_dest): + self.utils.clean_dir(self.doxygen_xml_dest) + + os.system('doxygen doxy-config.cfg') diff --git a/Firmware/Microbit_v2/utils/python/doc_gen/md_converter.py b/Firmware/Microbit_v2/utils/python/doc_gen/md_converter.py new file mode 100644 index 0000000..ff3a1eb --- /dev/null +++ b/Firmware/Microbit_v2/utils/python/doc_gen/md_converter.py @@ -0,0 +1,242 @@ +import re, json, copy + +class MarkdownConverter: + + #constructor + def __init__(self, type_colour, function_name_colour, separate_defaults = True, display_defaults = False): + self.type_colour = type_colour + self.function_name_colour = function_name_colour + self.separate_defaults = separate_defaults + self.display_defaults = display_defaults + + ### + # wraps text in a div element with a given color + # + # @param text the text to wrap + # @param color the desired text color + # + # @return a string representing the now wrapped text + ### + def wrap_text(self, text, color): + return "
" + text + "
" + + ### + # removes previously generated markdown from the file. + # + # @param file_lines a list of lines representing a file. + # @param regexp the regular expression that dictates a match. + ### + def clean(self, file_lines, regexp): + start = 0 + end = 0 + + for line_number, line in enumerate(file_lines, 1): + result = re.findall(regexp,line) + + if len(result) is not 0: + meta_data = json.loads(result[0]) + + keys = meta_data.keys() + + #classname indicates the beginning of a meta_data section + if 'className' in keys: + start = line_number + + #end indicated the end of a meta_data section + if 'end' in keys: + end = line_number - 1 + + return file_lines[:start] + file_lines[end:] + + ### + # given a member function, this function derives the alternative versions + # + # @param member_func the member function that is required to be derrived + # + # @return a list of function dictionaries that contain the alternatives, based on the original + ### + def derive_functions(self, member_func): + member_functions_derived = [] + + if len(member_func['params']) is not 0: + + param_index = 0 + + for param in member_func['params']: + if len(param['default']['name']) is 0: + param_index = param_index + 1 + else: + break + + bare_function = { + 'short_name' : member_func['short_name'], + 'name' : member_func['name'], + 'params' : [], + 'description' : member_func['description'], + 'returns' : member_func['returns'], + 'notes' : member_func['notes'], + 'examples' : member_func['examples'], + 'return_type' : member_func['return_type'], + } + + for i in range(0, param_index): + bare_function['params'] = bare_function['params'] + [member_func['params'][i]] + + member_functions_derived = member_functions_derived + [bare_function] + + current = copy.copy(bare_function) + + #lists retain references, so we have to copy objects to maintain separation + for remainder in range(param_index, len(member_func['params'])): + current['params'] = current['params'] + [member_func['params'][remainder]] + member_functions_derived = member_functions_derived + [current] + current = copy.copy(current) + + else: + member_functions_derived = member_functions_derived + [member_func] + + return member_functions_derived + + ### + # given a parameter, this function generates text + # + # @param param the parameter that needs a textual translation + # + # @return a string representing the parameter + ### + def gen_param_text(self, param): + text = "\n> " + + if param['type'] is not None: + text = text + " " + self.wrap_text(param['type'], self.type_colour) + + text = text + " " + param['name'] + + if self.display_defaults: + if len(param['default']['name']) is not 0: + text = text + " `= " + param['default']['name'] + + if len(param['default']['value']) is not 0: + text = text + param['default']['value'] + + text = text + "`" + + if 'description' in param.keys(): + text = text +" - " + param['description'] + + text = text.encode('ascii','ignore') + + return text + + ### + # given a list of member functions, this function returns a list of new lines for the + # file currently being processed. + # + # @param class_name the name of the current class (found in the meta data) + # @param member_functions the list of member_functions extracted from XML + # + # @return a list containing the new lines to be inserted into the current file. + ### + def gen_member_func_doc(self, class_name, member_functions): + + # this is what a member function dictionary contains. + # function = { + # 'short_name':"", + # 'name':"", + # 'return_type':"", + # 'params':[], + # 'description':[], + # 'returns':"", + # 'notes':"", + # 'examples':"", + # 'default':None + # } + + lines = [] + + for index, member_func in enumerate(member_functions,0): + + member_functions_derived = [] + + if index is 0 or member_func['short_name'] != member_functions[index - 1]['short_name']: + if class_name == member_func["short_name"]: + lines.append("##Constructor\n") + else: + lines.append("##" + member_func["short_name"]+"\n") + + #we want to clearly separate our different level of functions in the DAL + #so we present methods with defaults as overloads. + if self.separate_defaults is True: + member_functions_derived = member_functions_derived + self.derive_functions(member_func) + + for derived_func in member_functions_derived: + #---- short name for urls ---- + lines.append("
\n") + + short_name = "" + + if len(derived_func["return_type"]) is not 0: + short_name = "####" + self.wrap_text(derived_func["return_type"],self.type_colour) + " " +self.wrap_text(derived_func["short_name"], self.function_name_colour) + "(" + else: + short_name = "####" + derived_func["short_name"] + "(" + + last_param = None + + if len(derived_func['params']) is not 0: + last_param = derived_func['params'][-1] + + #generate parameters for the name of this function + for param in derived_func['params']: + text = "" + + if param['type'] is not None: + text = text + " " + self.wrap_text(param['type'], self.type_colour) + + text = text + " " + param['name'] + + if param is not last_param: + short_name = short_name + text +", " + else: + short_name = short_name + text + + lines.append(short_name + ")\n") + #----------------------------- + + #---- description ---- + if len(derived_func['description']) is not 0: + lines.append("#####Description\n") + lines.append(' '.join(derived_func['description']) + "\n") + #----------------------------- + + #---- parameters ---- + if len(derived_func['params']) is not 0: + lines.append("#####Parameters\n") + + for param in derived_func['params']: + lines.append(self.gen_param_text(param) + "\n") + #----------------------------- + + #---- returns ---- + if len(derived_func['returns']) is not 0: + lines.append("#####Returns\n") + lines.append(derived_func['returns'] + "\n") + #----------------------------- + + #---- examples ---- + if len(derived_func['examples']) is not 0: + lines.append("#####Example\n") + lines.append("```cpp\n") + lines.append(derived_func['examples']) + lines.append("```\n") + #----------------------------- + + #---- notes ---- + if len(derived_func['notes']) is not 0: + lines.append("\n!!! note\n") + lines.append(" " + derived_func['notes'].replace('\n','\n ')) + lines.append('\n\n') + #----------------------------- + + lines.append("____\n") + + return lines diff --git a/Firmware/Microbit_v2/utils/python/doc_gen/system_utils.py b/Firmware/Microbit_v2/utils/python/doc_gen/system_utils.py new file mode 100644 index 0000000..326eb21 --- /dev/null +++ b/Firmware/Microbit_v2/utils/python/doc_gen/system_utils.py @@ -0,0 +1,137 @@ +import json, shutil, zipfile, urllib, os, fnmatch + +class SystemUtils: + + folder_filter = ["ble", "ble-nrf51822", "mbed-classic","nrf51-sdk"] + + ### + # reads a file and returns a list of lines + # + # @param path the path where the file is located + # + # @return the list of lines representing the file. + ### + def read(self, path, plain=False): + if plain: + return self.__read_plain(path) + print "Opening: " + path + " \n" + with open(path, 'r') as file: + return file.readlines() + + def __read_plain(self, path): + print "Opening: " + path + " \n" + with open(path, 'r') as file: + return file.read() + + ### + # writes a given set of lines to a path. + # + # @param path the path where the file is located + # @param lines the lines to write + ### + def write(self, path, lines): + print "Writing to: " + path + " \n" + with open(path, 'w') as file: + file.writelines(lines) + + #http://stackoverflow.com/questions/2186525/use-a-glob-to-find-files-recursively-in-python + def find_files(self, directory, pattern): + + print("DIR:") + for root, dirs, files in os.walk(directory): + if any(dir in root for dir in self.folder_filter): + continue + + for basename in files: + if fnmatch.fnmatch(basename, pattern): + filename = os.path.join(root, basename) + yield filename + + ### + # removes files from a folder. + ### + def clean_dir(self, dir): + for root, dirs, files in os.walk(dir): + for f in files: + os.unlink(os.path.join(root, f)) + for d in dirs: + shutil.rmtree(os.path.join(root, d)) + + ### + # this files from one location to another + ### + def copy_files(self, from_dir, to_dir, pattern): + + + files = self.find_files(from_dir, pattern) + + print("FILES!!!! ") + for file in files: + print file + shutil.copy(file,to_dir) + + def mk_dir(self, path): + if not os.path.exists(path): + os.makedirs(path) + + def copytree(self, src, dst, symlinks=False, ignore=None): + if not os.path.exists(dst): + os.makedirs(dst) + for item in os.listdir(src): + s = os.path.join(src, item) + d = os.path.join(dst, item) + if os.path.isdir(s): + self.copytree(s, d, symlinks, ignore) + else: + if not os.path.exists(d) or os.stat(s).st_mtime - os.stat(d).st_mtime > 1: + shutil.copy2(s, d) + + def __add_version_info(self,version_string, extract_location): + content_path = extract_location + "js/base.js" + lines = self.read(content_path) + html_string = '

Warning

You are viewing documentation for ' + version_string + '

' + lines[0]= '$(document).ready(function() { $(\'div[role="main"]\').prepend("' + html_string + '") });' + self.write(content_path, lines) + + def validate_version(self, working_dir, module_paths, extract_location): + import yaml + + module_string = "/module.json" + mkdocs_yml = yaml.load(self.read("./mkdocs.yml", plain=True)) + + module_strings = [] + + for current_path in module_paths: + module_strings = module_strings + [json.loads(self.read(current_path + module_string, plain=True))["version"]] + + if module_strings[1:] != module_strings[:-1]: + raise Exception("Version mismatch exception! microbit-dal and microbit are not compatible versions.") + + module_string = "v" + str(module_strings[0]) + + if mkdocs_yml["versioning"]["runtime"] != module_string: + #capture old site, save in docs/historic/versionNumber + zip_dest = working_dir + "/" + str(mkdocs_yml["versioning"]["runtime"]) + ".zip" + + extract_folder = extract_location+ "/" + mkdocs_yml["versioning"]["runtime"]+"/" + + urllib.urlretrieve("https://github.com/lancaster-university/microbit-docs/archive/gh-pages.zip", zip_dest) + + zip_ref = zipfile.ZipFile(zip_dest) + + #obtain the archive prepended name + archive_name = working_dir + "/" + zip_ref.namelist()[0] + + zip_ref.extractall(working_dir) + zip_ref.close() + + self.copytree(archive_name, extract_folder) + + self.__add_version_info(mkdocs_yml["versioning"]["runtime"], extract_folder) + + self.clean_dir(archive_name) + + mkdocs_yml["versioning"]["runtime"] = module_string + + with open("./mkdocs.yml", "w") as f: + yaml.dump(mkdocs_yml, f, default_flow_style=False ) diff --git a/Firmware/Microbit_v2/utils/targets.json b/Firmware/Microbit_v2/utils/targets.json new file mode 100644 index 0000000..3b485db --- /dev/null +++ b/Firmware/Microbit_v2/utils/targets.json @@ -0,0 +1,105 @@ +[ + { + "name":"codal-arduino-uno", + "info":"This target specifies the arduino uno which is driven by an atmega328p.", + "device_url":"https://store.arduino.cc/arduino-uno-rev3", + "url":"https://github.com/lancaster-university/codal-arduino-uno", + "branch":"master", + "type":"git" + }, + { + "name":"codal-circuit-playground", + "info":"This target specifies the circuit playground which is driven by a SAMD21.", + "device_url":"https://www.adafruit.com/product/3333", + "url":"https://github.com/lancaster-university/codal-circuit-playground", + "branch":"master", + "type":"git" + }, + { + "name":"codal-microbit", + "info":"This target specifies the microbit, which uses the nordic NRF51822.", + "device_url":"https://microbit.org", + "url":"https://github.com/lancaster-university/codal-microbit", + "test_ignore":true, + "branch":"codal-microbit-mbed", + "type":"git" + }, + { + "name":"codal-huzzah", + "info":"This target specifies the HUZZAH which is driven by a ESP8266.", + "device_url":"https://www.adafruit.com/product/3405", + "url":"https://github.com/lancaster-university/codal-huzzah", + "test_ignore":true, + "branch":"master", + "type":"git" + }, + { + "name":"codal-brainpad", + "info":"This target specifies the BRAINPAD which is driven by a STM32f.", + "device_url":"https://brainpad.com", + "url":"https://github.com/lancaster-university/codal-brainpad", + "branch":"master", + "type":"git" + }, + { + "name":"codal-microbit-next", + "info":"version 1.4 revision of the BBC micro:bit.", + "device_url":"https://www.microbit.org", + "url":"https://github.com/microbit-foundation/codal-microbit-next", + "test_ignore":true, + "branch":"nrf52833-mbedos", + "type":"git" + }, + { + "name":"codal-ble-nano", + "info":"This target specifies the ble-nano by RedBear which is driven by a NRF52.", + "device_url":"https://redbear.cc/product/ble-nano-kit-2.html", + "url":"https://github.com/lancaster-university/codal-ble-nano", + "branch":"master", + "type":"git" + }, + { + "name":"codal-stm32-iot-node", + "info":"This target specifies the STM32 IoT Node board which is driven by a STM32L475.", + "device_url":"http://www.st.com/en/evaluation-tools/b-l475e-iot01a.html", + "url":"https://github.com/LabAixBidouille-STM32/codal-stm32-iot-node", + "test_ignore":true, + "branch":"master", + "type":"git" + }, + { + "name":"codal-big-brainpad", + "info":"This target specifies the stm32f401re (The big brain pad).", + "device_url":"", + "url":"https://github.com/lancaster-university/codal-big-brainpad", + "branch":"master", + "type":"git", + "test_ignore":true + }, + { + "name":"codal-mkr1300", + "info":"This target specifies the arduino mkr1300 variant.", + "device_url":"", + "url":"https://github.com/ElectronicCats/codal-mkr1300", + "branch":"master", + "type":"git", + "test_ignore":true + }, + { + "name":"codal-jacdac-feather", + "info":"This target specifies the jacdac-feather board based on the stmf103", + "device_url":"", + "url":"https://github.com/lancaster-university/codal-jacdac-feather", + "branch":"master", + "type":"git", + "test_ignore":true + }, + { + "name":"codal-itsybitsy-m4", + "info":"This target specifies the adafruit itsybitsy board.", + "device_url":"", + "url":"https://github.com/lancaster-university/codal-itsybitsy-m4", + "branch":"master", + "type":"git" + } +] diff --git a/Firmware/Microbit_v2/utils/uf2conv.py b/Firmware/Microbit_v2/utils/uf2conv.py new file mode 100644 index 0000000..3828284 --- /dev/null +++ b/Firmware/Microbit_v2/utils/uf2conv.py @@ -0,0 +1,172 @@ +#!/usr/bin/python + +import sys +import struct +import subprocess +import re +import os +import os.path +import argparse + +UF2_MAGIC_START0 = 0x0A324655 # "UF2\n" +UF2_MAGIC_START1 = 0x9E5D5157 # Randomly selected +UF2_MAGIC_END = 0x0AB16F30 # Ditto + +INFO_FILE = "/INFO_UF2.TXT" + +appstartaddr = 0x2000 + +def isUF2(buf): + w = struct.unpack(" 476: + assert False, "Invalid UF2 data size at " + ptr + newaddr = hd[3] + if curraddr == None: + appstartaddr = newaddr + curraddr = newaddr + padding = newaddr - curraddr + if padding < 0: + assert False, "Block out of order at " + ptr + if padding > 10*1024*1024: + assert False, "More than 10M of padding needed at " + ptr + if padding % 4 != 0: + assert False, "Non-word padding size at " + ptr + while padding > 0: + padding -= 4 + outp += "\x00\x00\x00\x00" + outp += block[32 : 32 + datalen] + curraddr = newaddr + datalen + return outp + +def convertToUF2(fileContent): + datapadding = "" + while len(datapadding) < 512 - 256 - 32 - 4: + datapadding += "\x00\x00\x00\x00" + numblocks = (len(fileContent) + 255) / 256 + outp = "" + for blockno in range(0, numblocks): + ptr = 256 * blockno + chunk = fileContent[ptr:ptr + 256] + hd = struct.pack("= 3 and words[1] == "2" and words[2] == "FAT": + drives.append(words[0]) + else: + rootpath = "/media" + if sys.platform == "darwin": + rootpath = "/Volumes" + elif sys.platform == "linux": + tmp = rootpath + "/" + os.environ["USER"] + if os.path.isdir(tmp): + rootpath = tmp + for d in os.listdir(rootpath): + drives.append(os.path.join(rootpath, d)) + + def hasInfo(d): + try: + return os.path.isfile(d + INFO_FILE) + except: + return False + + return filter(hasInfo, drives) + +def boardID(path): + with open(path + INFO_FILE, mode='r') as file: + fileContent = file.read() + return re.search("Board-ID: ([^\r\n]*)", fileContent).group(1) + +def listdrives(): + for d in getdrives(): + print d, boardID(d) + +def writeFile(name, buf): + with open(name, "wb") as f: + f.write(buf) + print "Wrote %d bytes to %s." % (len(buf), name) + +def main(): + global appstartaddr + def error(msg): + print msg + sys.exit(1) + parser = argparse.ArgumentParser(description='Convert to UF2 or flash directly.') + parser.add_argument('input', metavar='INPUT', type=str, nargs='?', + help='input file (BIN or UF2)') + parser.add_argument('-b' , '--base', dest='base', type=str, + default="0x2000", + help='set base address of application (default: 0x2000)') + parser.add_argument('-o' , '--output', metavar="FILE", dest='output', type=str, + help='write output to named file; defaults to "flash.uf2" or "flash.bin" where sensible') + parser.add_argument('-d' , '--device', dest="device_path", + help='select a device path to flash') + parser.add_argument('-l' , '--list', action='store_true', + help='list connected devices') + parser.add_argument('-c' , '--convert', action='store_true', + help='do not flash, just convert') + args = parser.parse_args() + appstartaddr = int(args.base, 0) + if args.list: + listdrives() + else: + if not args.input: + error("Need input file") + with open(args.input, mode='rb') as file: + inpbuf = file.read() + fromUF2 = isUF2(inpbuf) + ext = "uf2" + if fromUF2: + outbuf = convertFromUF2(inpbuf) + ext = "bin" + else: + outbuf = convertToUF2(inpbuf) + print "Converting to %s, output size: %d, start address: 0x%x" % (ext, len(outbuf), appstartaddr) + + if args.convert: + drives = [] + if args.output == None: + args.output = "flash." + ext + else: + drives = getdrives() + + if args.output: + writeFile(args.output, outbuf) + else: + if len(drives) == 0: + error("No drive to deploy.") + for d in drives: + print "Flashing %s (%s)" % (d, boardID(d)) + writeFile(outbuf, d + "/NEW.UF2") + +if __name__ == "__main__": + main()