aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJakob Stendahl <jakobste@uio.no>2021-01-11 13:41:18 +0100
committerJakob Stendahl <jakobste@uio.no>2021-01-11 13:41:18 +0100
commitd17bc0fc4bb057378fadf3f9feb0de1df60d611a (patch)
treeca3069eeacb0b7379cb289d87be932956e449d9c
parent19d65c7b2e287223113ab916e103638c5c5003f5 (diff)
downloadhoverbit-ble-d17bc0fc4bb057378fadf3f9feb0de1df60d611a.tar.gz
hoverbit-ble-d17bc0fc4bb057378fadf3f9feb0de1df60d611a.zip
:sparkles: Add working bluetooth receiver
-rw-r--r--.clang-format8
-rw-r--r--.gitignore16
-rw-r--r--.yotta_ignore2
-rw-r--r--CMakeLists.txt265
-rw-r--r--Dockerfile18
-rw-r--r--LICENSE21
-rwxr-xr-xbuild.py168
-rw-r--r--codal.json17
-rw-r--r--module.json16
-rw-r--r--source/HoverBitController.cpp182
-rw-r--r--source/HoverBitController.h68
-rw-r--r--source/Screen.cpp41
-rw-r--r--source/Screen.h73
-rw-r--r--source/main.cpp269
-rw-r--r--utils/__init__.py0
-rw-r--r--utils/cmake/JSONParser.cmake309
-rw-r--r--utils/cmake/buildtools/codal.cmake85
-rw-r--r--utils/cmake/buildtools/yotta.cmake23
-rw-r--r--utils/cmake/colours.cmake19
-rw-r--r--utils/cmake/toolchains/ARM_GCC/bin-generator.cmake9
-rw-r--r--utils/cmake/toolchains/ARM_GCC/compiler-flags.cmake49
-rw-r--r--utils/cmake/toolchains/ARM_GCC/hex-generator.cmake9
-rw-r--r--utils/cmake/toolchains/ARM_GCC/platform_includes.h10
-rw-r--r--utils/cmake/toolchains/ARM_GCC/toolchain.cmake26
-rw-r--r--utils/cmake/toolchains/AVR_GCC/bin-generator.cmake9
-rw-r--r--utils/cmake/toolchains/AVR_GCC/compiler-flags.cmake43
-rw-r--r--utils/cmake/toolchains/AVR_GCC/hex-generator.cmake9
-rw-r--r--utils/cmake/toolchains/AVR_GCC/platform_includes.h14
-rw-r--r--utils/cmake/toolchains/AVR_GCC/toolchain.cmake29
-rw-r--r--utils/cmake/toolchains/XTENSA_GCC/bin-generator.cmake9
-rw-r--r--utils/cmake/toolchains/XTENSA_GCC/compiler-flags.cmake43
-rw-r--r--utils/cmake/toolchains/XTENSA_GCC/hex-generator.cmake9
-rw-r--r--utils/cmake/toolchains/XTENSA_GCC/platform_includes.h10
-rw-r--r--utils/cmake/toolchains/XTENSA_GCC/toolchain.cmake26
-rw-r--r--utils/cmake/util.cmake156
-rwxr-xr-xutils/debug/dmesg.js86
-rwxr-xr-xutils/debug/meminfo.js65
-rwxr-xr-xutils/esptool.py1274
-rw-r--r--utils/generate_libraries.py159
-rw-r--r--utils/merge_hex.py93
-rw-r--r--utils/python/__init__.py0
-rw-r--r--utils/python/codal_utils.py186
-rw-r--r--utils/python/doc_gen/__init__.py0
-rw-r--r--utils/python/doc_gen/doc_gen.py93
-rw-r--r--utils/python/doc_gen/doxygen_extractor.py242
-rw-r--r--utils/python/doc_gen/md_converter.py242
-rw-r--r--utils/python/doc_gen/system_utils.py137
-rw-r--r--utils/targets.json105
-rw-r--r--utils/uf2conv.py172
49 files changed, 4914 insertions, 0 deletions
diff --git a/.clang-format b/.clang-format
new file mode 100644
index 0000000..9854c77
--- /dev/null
+++ b/.clang-format
@@ -0,0 +1,8 @@
+BasedOnStyle: LLVM
+IndentWidth: 4
+UseTab: Never
+ColumnLimit: 100
+BreakBeforeBraces: Allman
+AccessModifierOffset: -4
+AllowShortFunctionsOnASingleLine: Inline
+SortIncludes: false
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..161c360
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,16 @@
+build
+libraries
+.yotta.json
+yotta_modules
+yotta_targets
+*.swp
+*~
+Makefile
+*.hex
+*.DS_Store
+.vscode
+*.uf2
+*.bin
+pxtapp
+buildcache.json
+*.pyc
diff --git a/.yotta_ignore b/.yotta_ignore
new file mode 100644
index 0000000..2dceefc
--- /dev/null
+++ b/.yotta_ignore
@@ -0,0 +1,2 @@
+# build.py uses CMake as well and this top level file is not Yotta compatible
+CMakeLists.txt
diff --git a/CMakeLists.txt b/CMakeLists.txt
new file mode 100644
index 0000000..ac32e4b
--- /dev/null
+++ b/CMakeLists.txt
@@ -0,0 +1,265 @@
+# The MIT License (MIT)
+
+# Copyright (c) 2017 Lancaster University.
+
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+cmake_minimum_required(VERSION 3.3)
+
+# include additional cmake
+include(utils/cmake/JSONParser.cmake)
+include(utils/cmake/util.cmake)
+include(utils/cmake/colours.cmake)
+
+if (NOT "${BUILD_TOOL}" STRGREATER "")
+ set(BUILD_TOOL "CODAL")
+endif()
+
+#
+# Supress unecessary (and often inaccurate) validity check of the toolchain
+#
+set(CMAKE_C_COMPILER_WORKS 1)
+set(CMAKE_CXX_COMPILER_WORKS 1)
+
+
+#read our config file...
+file(READ "./codal.json" codal_json)
+sbeParseJson(codal codal_json)
+
+set(CODAL_APP_OUTPUT_DIR ".")
+set(CODAL_APP_SOURCE_DIR "source")
+
+if("${codal.application}" STRGREATER "")
+ set(CODAL_APP_SOURCE_DIR "${codal.application}")
+endif()
+
+if("${codal.output_folder}" STRGREATER "")
+ set(CODAL_APP_OUTPUT_DIR "${codal.output_folder}")
+endif()
+
+if(NOT "${codal.target.name}" STRGREATER "")
+ message(FATAL_ERROR "${BoldRed}INVALID TARGET.${ColourReset}")
+endif()
+
+set(CODAL_DEPS "")
+set(LIB_DEST "libraries")
+
+#install the target
+INSTALL_DEPENDENCY(${LIB_DEST} ${codal.target.name} ${codal.target.url} ${codal.target.branch} ${codal.target.type})
+message("${BoldMagenta}Set target: ${codal.target.name} ${ColourReset}")
+list(APPEND CODAL_DEPS ${codal.target.name})
+
+if("${codal.target.dev}" STRGREATER "")
+ file(READ "./${LIB_DEST}/${codal.target.name}/target.json" device_json)
+ message("${BoldMagenta}Using target.json (dev version) ${ColourReset}")
+else()
+ file(READ "./${LIB_DEST}/${codal.target.name}/target-locked.json" device_json)
+ message("${BoldMagenta}Using target-locked.json${ColourReset}")
+endif()
+
+message("${BoldBlue}Targeting ${codal.target.name}${ColourReset}")
+
+sbeParseJson(device device_json)
+
+SET(CODAL_TARGET_NAME ${device.target.name})
+SET(CODAL_OUTPUT_NAME ${device.device})
+SET(CODAL_TARGET_PROCESSOR ${device.processor})
+SET(CODAL_TARGET_CPU_ARCHITECTURE ${device.architecture})
+
+# if this is the first build, lets copy a sample main.cpp from the target if available.
+if(NOT EXISTS ${CMAKE_CURRENT_LIST_DIR}/${CODAL_APP_SOURCE_DIR} AND EXISTS ${CMAKE_CURRENT_LIST_DIR}/${LIB_DEST}/${codal.target.name}/samples/main.cpp)
+ FILE(COPY ${CMAKE_CURRENT_LIST_DIR}/${LIB_DEST}/${codal.target.name}/samples/main.cpp DESTINATION ${CMAKE_CURRENT_LIST_DIR}/${CODAL_APP_SOURCE_DIR})
+endif()
+
+#copy samples and remove main.cpp
+if(NOT EXISTS ${CMAKE_CURRENT_LIST_DIR}/samples AND EXISTS ${CMAKE_CURRENT_LIST_DIR}/${LIB_DEST}/${codal.target.name}/samples/)
+ FILE(COPY ${CMAKE_CURRENT_LIST_DIR}/${LIB_DEST}/${codal.target.name}/samples DESTINATION ${CMAKE_CURRENT_LIST_DIR})
+ FILE(REMOVE ${CMAKE_CURRENT_LIST_DIR}/samples/main.cpp)
+endif()
+
+####################
+
+SET(TOOLCHAIN ${device.toolchain})
+SET(TOOLCHAIN_FOLDER "./utils/cmake/toolchains/${device.toolchain}")
+
+# include toolchain file
+set(CMAKE_TOOLCHAIN_FILE "${TOOLCHAIN_FOLDER}/toolchain.cmake" CACHE PATH "toolchain file")
+
+# required to force TOOLCHAIN settings...
+project(codal)
+enable_language(ASM)
+
+# include compiler flags overrides
+include(${TOOLCHAIN_FOLDER}/compiler-flags.cmake)
+set(PLATFORM_INCLUDES_PATH "${PROJECT_SOURCE_DIR}/utils/cmake/toolchains/${device.toolchain}")
+
+file(MAKE_DIRECTORY "${PROJECT_SOURCE_DIR}/build")
+
+# configure output directories
+set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${PROJECT_SOURCE_DIR}/build")
+set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${PROJECT_SOURCE_DIR}/build")
+
+SET(CODAL_DEFINITIONS "")
+
+EXTRACT_JSON_ARRAY(codal "codal\.config\." CODAL_FIELDS CODAL_VALUES)
+EXTRACT_JSON_ARRAY(device "device\.config\." DEVICE_FIELDS DEVICE_VALUES)
+UNIQUE_JSON_KEYS(CODAL_FIELDS CODAL_VALUES DEVICE_FIELDS DEVICE_VALUES FINAL_FIELDS FINAL_VALUES)
+FORM_DEFINITIONS(FINAL_FIELDS FINAL_VALUES CODAL_DEFINITIONS)
+
+# extract any CMAKE definitions specified in the target.json object, and set as native cmake vars
+# cmake definitions require special handling as types are not safe in cmake, any semi-colon would need escaped, which would be ugly.
+foreach(var ${device})
+ #if it is not prefixed by codal.cmake_definitions, do not consider the key. +
+ if(NOT "${var}" MATCHES "device\.cmake_definitions\.")
+ continue()
+ endif()
+
+ string(REGEX MATCH "[^device\.cmake_definitions\.]([A-Z,a-z,0-9,_,]+)" CODAL_CMAKE_DEFINITION "${var}")
+
+ set(${CODAL_CMAKE_DEFINITION} ${${var}})
+ endforeach()
+
+#define any additional symbols specified by the target.
+if("${device.definitions}" STRGREATER "")
+ add_definitions("${device.definitions}")
+endif()
+
+####################
+# optional JSON flags for compilation + assembly
+###################
+if("${device.cpu_opts}" STRGREATER "")
+ set(_CPU_COMPILATION_OPTIONS "${device.cpu_opts}")
+ set(CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} ${device.cpu_opts}")
+ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${device.cpu_opts}")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${device.cpu_opts}")
+ set(CMAKE_LINKER_FLAGS "${CMAKE_LINKER_FLAGS} ${device.cpu_opts}")
+endif()
+
+set(_C_FAMILY_FLAGS_INIT "-fno-exceptions -fno-unwind-tables -ffunction-sections -fdata-sections -Wall -Wextra -Wno-unused-parameter")
+
+# asm
+if("${device.asm_flags}" STRGREATER "")
+ set(CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} ${device.asm_flags}")
+endif()
+
+# c
+if("${device.c_flags}" STRGREATER "")
+ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${_C_FAMILY_FLAGS_INIT} ${device.c_flags}")
+ set(CMAKE_C_LINK_FLAGS "${CMAKE_C_LINK_FLAGS} ${device.c_flags}")
+endif()
+
+# cpp
+if("${device.cpp_flags}" STRGREATER "")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${_C_FAMILY_FLAGS_INIT} ${device.cpp_flags}")
+ set(CMAKE_CXX_LINK_FLAGS "${device.cpp_flags}")
+endif()
+
+# linker opts
+if("${device.linker_flags}" STRGREATER "")
+ set(CMAKE_LINKER_FLAGS "${CMAKE_LINKER_FLAGS} ${device.linker_flags}")
+ set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${device.linker_flags}")
+endif()
+
+# create a header file from the definitions specified in JSON
+if("${CODAL_DEFINITIONS}" STRGREATER "")
+ set(EXTRA_INCLUDES_NEW_PATH "${PROJECT_SOURCE_DIR}/build/codal_extra_definitions_new.h")
+ set(EXTRA_INCLUDES_PATH "${PROJECT_SOURCE_DIR}/build/codal_extra_definitions.h")
+ file(WRITE "${EXTRA_INCLUDES_NEW_PATH}" ${CODAL_DEFINITIONS})
+ configure_file(${EXTRA_INCLUDES_NEW_PATH} ${EXTRA_INCLUDES_PATH} COPYONLY)
+
+ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -include ${EXTRA_INCLUDES_PATH}")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -include ${EXTRA_INCLUDES_PATH}")
+endif()
+
+set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -I${PLATFORM_INCLUDES_PATH}")
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -I${PLATFORM_INCLUDES_PATH}")
+
+# a define for cmake if statements to detect if within the CODAL build environment
+set(CODAL_BUILD_SYSTEM TRUE)
+
+# a define specificying common utils used in codal
+set(CODAL_UTILS_LOCATION "${PROJECT_SOURCE_DIR}/utils/cmake/util.cmake")
+
+# this variable is used in the linking step of the final binary.
+set(LIB_FOLDERS "")
+
+# Add the root of the libraries folder as a search path. Useful for disambiguating header files with duplicated names.
+MESSAGE (STATUS "Adding library path: (${PROJECT_SOURCE_DIR}/${LIB_DEST})")
+include_directories(${PROJECT_SOURCE_DIR}/${LIB_DEST})
+
+#add_subdirectory("${PROJECT_SOURCE_DIR}/${LIB_DEST}/${dep}")
+
+# "import" and add any specified libraries to the build list
+if("${device.libraries}" STRGREATER "")
+ message("Installing dependencies...")
+ set(DEVICE_LIBS ${device.libraries})
+
+ foreach(i ${DEVICE_LIBS})
+ SET(BRANCH "NONE")
+ SET(URL "${device.libraries_${i}.url}")
+ if("${device.libraries_${i}.branch}" STRGREATER "")
+ SET(BRANCH "${device.libraries_${i}.branch}")
+ endif()
+ if("${codal.target.branches.${URL}}" STRGREATER "")
+ SET(BRANCH "${codal.target.branches.${URL}}")
+ MESSAGE (STATUS "Override branch: ${BRANCH}")
+ endif()
+
+ INSTALL_DEPENDENCY(${LIB_DEST} ${device.libraries_${i}.name} ${URL} ${BRANCH} ${device.libraries_${i}.type})
+ list(APPEND CODAL_DEPS "${device.libraries_${i}.name}")
+ endforeach()
+
+ foreach(dep ${CODAL_DEPS})
+ message("${BoldGreen}Using library: ${dep}${ColourReset}")
+ add_subdirectory("${PROJECT_SOURCE_DIR}/${LIB_DEST}/${dep}")
+ endforeach()
+endif()
+
+#finally, find sources and includes of the application, and create a target.
+RECURSIVE_FIND_DIR(INCLUDE_DIRS "./inc" "${PROJECT_SOURCE_DIR}/${CODAL_APP_SOURCE_DIR}" "*.h")
+# *.c?? only catches .cpp, not .c, so let's be precise
+RECURSIVE_FIND_FILE(SOURCE_FILES "${PROJECT_SOURCE_DIR}/${CODAL_APP_SOURCE_DIR}" "*.cpp")
+
+RECURSIVE_FIND_FILE(S_FILES "${PROJECT_SOURCE_DIR}/${CODAL_APP_SOURCE_DIR}" "*.s")
+RECURSIVE_FIND_FILE(C_FILES "${PROJECT_SOURCE_DIR}/${CODAL_APP_SOURCE_DIR}" "*.c")
+list(APPEND SOURCE_FILES ${S_FILES})
+list(APPEND SOURCE_FILES ${C_FILES})
+
+if("${SOURCE_FILES}" STREQUAL "")
+ message(FATAL_ERROR "${BoldRed}No user application to build, please add a main.cpp at: ${PROJECT_SOURCE_DIR}/${CODAL_APP_SOURCE_DIR}${ColourReset}")
+endif()
+
+if ("${BUILD_TOOL}" STRGREATER "")
+ string(COMPARE EQUAL "${BUILD_TOOL}" "YOTTA" YOTTA_BUILD)
+ if (${YOTTA_BUILD})
+ include("${PROJECT_SOURCE_DIR}/utils/cmake/buildtools/yotta.cmake")
+ endif ()
+
+ string(COMPARE EQUAL "${BUILD_TOOL}" "CODAL" CODAL_BUILD)
+ if (${CODAL_BUILD})
+ include("${PROJECT_SOURCE_DIR}/utils/cmake/buildtools/codal.cmake")
+ endif()
+endif()
+
+#
+# Supress the addition of implicit linker flags (such as -rdynamic)
+#
+set(CMAKE_SHARED_LIBRARY_LINK_C_FLAGS "")
+set(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS "")
+set(CMAKE_EXE_EXPORTS_C_FLAG "")
+set(CMAKE_EXE_EXPORTS_CXX_FLAG "")
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..7af0490
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,18 @@
+FROM ubuntu:18.04
+
+RUN apt-get update -qq && \
+ apt-get install -y --no-install-recommends \
+ software-properties-common && \
+ add-apt-repository -y ppa:team-gcc-arm-embedded/ppa && \
+ apt-get update -qq && \
+ apt-get install -y --no-install-recommends \
+ git make cmake python3 \
+ gcc-arm-embedded && \
+ apt-get autoremove -y && \
+ apt-get clean -y && \
+ rm -rf /var/lib/apt/lists/*
+
+# Project sources volume should be mounted at /app
+WORKDIR /app
+
+ENTRYPOINT ["python3", "build.py"]
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..46e04fd
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2017 Lancaster University
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/build.py b/build.py
new file mode 100755
index 0000000..594c5d7
--- /dev/null
+++ b/build.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+
+# The MIT License (MIT)
+
+# Copyright (c) 2017 Lancaster University.
+
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+
+import os
+import sys
+import optparse
+import platform
+import json
+import shutil
+import re
+from utils.python.codal_utils import system, build, read_json, checkgit, read_config, update, revision, printstatus, status, get_next_version, lock, delete_build_folder, generate_docs
+
+parser = optparse.OptionParser(usage="usage: %prog target-name-or-url [options]", description="This script manages the build system for a codal device. Passing a target-name generates a codal.json for that devices, to list all devices available specify the target-name as 'ls'.")
+parser.add_option('-c', '--clean', dest='clean', action="store_true", help='Whether to clean before building. Applicable only to unix based builds.', default=False)
+parser.add_option('-t', '--test-platforms', dest='test_platform', action="store_true", help='Specify whether the target platform is a test platform or not.', default=False)
+parser.add_option('-l', '--lock', dest='lock_target', action="store_true", help='Create target-lock.json, updating patch version', default=False)
+parser.add_option('-b', '--branch', dest='branch', action="store_true", help='With -l, use vX.X.X-BRANCH.Y', default=False)
+parser.add_option('-m', '--minor', dest='update_minor', action="store_true", help='With -l, update minor version', default=False)
+parser.add_option('-M', '--major', dest='update_major', action="store_true", help='With -l, update major version', default=False)
+parser.add_option('-V', '--version', dest='version', metavar="VERSION", help='With -l, set the version; use "-V v0.0.1" to bootstrap', default=False)
+parser.add_option('-u', '--update', dest='update', action="store_true", help='git pull target and libraries', default=False)
+parser.add_option('-s', '--status', dest='status', action="store_true", help='git status target and libraries', default=False)
+parser.add_option('-r', '--revision', dest='revision', action="store", help='Checkout a specific revision of the target', default=False)
+parser.add_option('-d', '--dev', dest='dev', action="store_true", help='enable developer mode (does not use target-locked.json)', default=False)
+parser.add_option('-g', '--generate-docs', dest='generate_docs', action="store_true", help='generate documentation for the current target', default=False)
+
+(options, args) = parser.parse_args()
+
+if not os.path.exists("build"):
+ os.mkdir("build")
+
+if options.lock_target:
+ lock(options)
+ exit(0)
+
+if options.update:
+ update()
+ exit(0)
+
+if options.status:
+ status()
+ exit(0)
+
+if options.revision:
+ revision(options.revision)
+ exit(0)
+
+# out of source build!
+os.chdir("build")
+
+test_json = read_json("../utils/targets.json")
+
+# configure the target a user has specified:
+if len(args) == 1:
+
+ target_name = args[0]
+ target_config = None
+
+ # list all targets
+ if target_name == "ls":
+ for json_obj in test_json:
+ s = "%s: %s" % (json_obj["name"], json_obj["info"])
+ if "device_url" in json_obj.keys():
+ s += "(%s)" % json_obj["device_url"]
+ print(s)
+ exit(0)
+
+ # cycle through out targets and check for a match
+ for json_obj in test_json:
+ if json_obj["name"] != target_name:
+ continue
+
+ del json_obj["device_url"]
+ del json_obj["info"]
+
+ target_config = json_obj
+ break
+
+ if target_config == None and target_name.startswith("http"):
+ target_config = {
+ "name": re.sub("^.*/", "", target_name),
+ "url": target_name,
+ "branch": "master",
+ "type": "git"
+ }
+
+ if target_config == None:
+ print("'" + target_name + "'" + " is not a valid target.")
+ exit(1)
+
+ # developer mode is for users who wish to contribute, it will clone and checkout commitable branches.
+ if options.dev:
+ target_config["dev"] = True
+
+ config = {
+ "target":target_config
+ }
+
+ with open("../codal.json", 'w') as codal_json:
+ json.dump(config, codal_json, indent=4)
+
+ # remove the build folder, a user could be swapping targets.
+ delete_build_folder()
+
+
+elif len(args) > 1:
+ print("Too many arguments supplied, only one target can be specified.")
+ exit(1)
+
+if not options.test_platform:
+
+ if not os.path.exists("../codal.json"):
+ print("No target specified in codal.json, does codal.json exist?")
+ exit(1)
+
+ if options.generate_docs:
+ generate_docs()
+ exit(0)
+
+ build(options.clean)
+ exit(0)
+
+for json_obj in test_json:
+
+ # some platforms aren't supported by travis, ignore them when testing.
+ if "test_ignore" in json_obj:
+ print("ignoring: " + json_obj["name"])
+ continue
+
+ # ensure we have a clean build tree.
+ delete_build_folder()
+
+ # clean libs
+ if os.path.exists("../libraries"):
+ shutil.rmtree('../libraries')
+
+ # configure the target and tests...
+ config = {
+ "target":json_obj,
+ "output":".",
+ "application":"libraries/"+json_obj["name"]+"/tests/"
+ }
+
+ with open("../codal.json", 'w') as codal_json:
+ json.dump(config, codal_json, indent=4)
+
+ build(True, True)
diff --git a/codal.json b/codal.json
new file mode 100644
index 0000000..e0df6dd
--- /dev/null
+++ b/codal.json
@@ -0,0 +1,17 @@
+{
+ "target": {
+ "name": "codal-microbit-v2",
+ "url": "https://github.com/lancaster-university/codal-microbit-v2",
+ "branch": "master",
+ "type": "git",
+ "test_ignore": true,
+ "dev": true
+ } ,
+ "config":{
+ "DEVICE_BLE": 1,
+ "MICROBIT_BLE_ENABLED" : 1,
+ "MICROBIT_BLE_EVENT_SERVICE" : 1,
+ "MICROBIT_BLE_OPEN": 1,
+ "MICROBIT_BLE_DEVICE_INFORMATION_SERVICE": 1
+ }
+}
diff --git a/module.json b/module.json
new file mode 100644
index 0000000..5595fe0
--- /dev/null
+++ b/module.json
@@ -0,0 +1,16 @@
+{
+ "name": "hoverbit-ble",
+ "version": "0.0.1",
+ "description": "HOVER BIT Bluetooth receiver.",
+ "license": "MIT",
+ "dependencies": {
+ "codal-microbit": "git@github.com:lancaster-university/codal-microbit-v2.git#master"
+ },
+ "targetDependencies": {},
+ "extraIncludes": [
+ "inc/",
+ "yotta_modules/"
+ ],
+ "bin": "./source",
+ "scripts": {}
+}
diff --git a/source/HoverBitController.cpp b/source/HoverBitController.cpp
new file mode 100644
index 0000000..65ebf17
--- /dev/null
+++ b/source/HoverBitController.cpp
@@ -0,0 +1,182 @@
+/*
+The MIT License (MIT)
+
+Copyright (c) 2016 British Broadcasting Corporation.
+This software is provided by Lancaster University by arrangement with the BBC.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+*/
+#include <MicroBit.h>
+#include "HoverBitController.h"
+
+/**
+ * Init method for HoverBitController, this sets everything to the default values.
+ * It also initializes the airbit-pcb with some protocol magic.
+ *
+ * @param _uBit the MicroBit instance
+ */
+void HoverBitController::init(MicroBit* _uBit) {
+ uBit = _uBit;
+ mainController = false;
+ batteryEmpty = false;
+ batteryMilliVolt = 3700;
+ batteryFactor = 4.42;
+
+ buzzer = 0;
+ servo_1 = 0;
+ arm = 0;
+ roll = 0;
+ yaw = 0;
+ throttle = 0;
+ failSafeC = 0;
+
+ /* I am not completly sure what this does, but it seems to me like this is
+ putting the air:bit board in some kind of "bind-mode", on the spec-sheet
+ there isn't any documentation for what 20 pulses means tho... */
+ (*uBit).sleep(100);
+ int o;
+ for (o = 0; o < 20; o++) {
+ AirBit(-90, 0, 90, 0, 90, 0, 0);
+ (*uBit).sleep(20);
+ }
+}
+
+/**
+ * This is not implemented yet.
+ */
+void HoverBitController::failSafe(void) {
+ // throttle = 0;
+ // roll = 0;
+ // yaw = 0;
+ // arm = 0;
+ // failSafeC++;
+}
+
+/**
+ * This returns the current voltage of the battery.
+ */
+unsigned int HoverBitController::getBatteryVoltage() {
+ float batteryFactor = 4.42;
+ int batteryMilliVolt = 3700;
+ return ((float)((&(*uBit).io.P0)->getAnalogValue()) * batteryFactor * 0.05) + ((float)batteryMilliVolt * 0.95);
+}
+
+/**
+ * Method for sending commands to the AirBit-card,
+ * this code is translated from the ts-code in MakeKit's original hex-file.
+ *
+ * Control TYPR12 (Throttle, Yaw, Pitch, Roll and AUX1 and AUX2) using the Spektsat 2048 protocol
+ * Throttle min: 0, max: 100
+ * Yaw, Pitch Roll: min -90, max 90
+ * Arm: 0 = Disarm, 1 = Arm
+ * Aux1: 0 - 180
+ * Aux2: 0 - 180
+ */
+void HoverBitController::AirBit(int Pitch,int Arm,int Roll,int Throttle,int Yaw,int Aux1,int Aux2) {
+ uint8_t buf[16];
+ float scaling = 1024 / 180;
+ int offset = 512;
+ float scalingServo = 1024 / 90;
+
+ unsigned int armS = 0;
+ if (Arm == 0) { armS = 0; }
+ if (Arm == 1) { armS = 1023; }
+
+ Pitch = - Pitch;
+ unsigned int aux1S = Aux1 * scalingServo;
+ unsigned int aux2S = Aux2 * scalingServo;
+ unsigned int pitchS = static_cast<unsigned int>((float)Pitch * scaling + (float)offset);
+ unsigned int rollS = static_cast<unsigned int>((float)Roll * scaling + (float)offset);
+ unsigned int yawS = static_cast<unsigned int>((float)Yaw * scaling + (float)offset);
+ unsigned int throttleS = (Throttle * 512) / 50;
+ if (Throttle == 0) { throttleS = 0; }
+
+ if (aux1S > 1023) { aux1S = 1023; }
+ if (aux2S > 1023) { aux2S = 1023; }
+
+ if (throttleS > 1023) { throttleS = 1023; }
+ if (yawS > 1023) { yawS = 1023; }
+ if (pitchS > 1023) { pitchS = 1023; }
+ if (rollS > 1023) { rollS = 1023; }
+
+ // Header "Fade" (Spektsat code)
+ buf[0] = 0;
+ // Header "System" (Spektsat code)
+ buf[1] = 0x01;
+ // 0x01 22MS 1024 DSM2
+ // 0x12 11MS 2048 DSM2
+ // 0xa2 22MS 2048 DSMS
+ // 0xb2 11MS 2048 DSMX
+ buf[2] = (0 << 2) | ((rollS >> 8) & 3);
+ buf[3] = rollS & 255;
+ buf[4] = (1 << 2) | ((pitchS >> 8) & 3);
+ buf[5] = pitchS & 255;
+ buf[6] = (2 << 2) | ((throttleS >> 8) & 3);
+ buf[7] = throttleS & 255;
+ buf[8] = (3 << 2) | ((yawS >> 8) & 3);
+ buf[9] = yawS & 255;
+ buf[10] = (4 << 2) | ((armS >> 8) & 3);
+ buf[11] = armS & 255;
+ buf[12] = (5 << 2) | ((aux1S >> 8) & 3);
+ buf[13] = aux1S & 255;
+ buf[14] = (6 << 2) | ((aux2S >> 8) & 3);
+ buf[15] = aux2S & 255;
+ (*uBit).serial.send(buf, 16, SYNC_SPINWAIT);
+}
+
+/**
+ * Method that sends commands with the current values for all parameters.
+ */
+void HoverBitController::HoverControl() {
+ AirBit(0, arm, 0, throttle, roll, roll + 45, servo_1);
+}
+
+int HoverBitController::Throttle() {
+ return throttle;
+}
+void HoverBitController::Throttle(int _throttle) {
+ if (_throttle > 99) { throttle = 100; }
+ else if (_throttle < 0) { throttle = 0; }
+ else { throttle = _throttle; }
+}
+int HoverBitController::Servo1() {
+ return servo_1;
+}
+void HoverBitController::Servo1(int _servo1) {
+ if (_servo1 > 180) { servo_1 = 180; }
+ else if (_servo1 < 0) { servo_1 = 0; }
+ else { servo_1 = _servo1; }
+}
+int HoverBitController::Roll() {
+ return roll;
+}
+void HoverBitController::Roll(int _roll) {
+ if (_roll > 90) { roll = 90; }
+ else if (_roll < -90) { roll = -90; }
+ else { roll = _roll; }
+}
+bool HoverBitController::Arm() {
+ return (arm == 1);
+}
+void HoverBitController::Arm(bool _arm) {
+ arm = (int)_arm;
+}
+bool HoverBitController::BatteryEmpty() {
+ return batteryEmpty;
+}
diff --git a/source/HoverBitController.h b/source/HoverBitController.h
new file mode 100644
index 0000000..4963cd1
--- /dev/null
+++ b/source/HoverBitController.h
@@ -0,0 +1,68 @@
+/*
+The MIT License (MIT)
+
+Copyright (c) 2016 British Broadcasting Corporation.
+This software is provided by Lancaster University by arrangement with the BBC.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+*/
+#ifndef HOVERBITCONTROLLER_H_
+#define HOVERBITCONTROLLER_H_
+
+#include <MicroBit.h>
+
+#define BATTERY_LOW_LIMIT 3500
+
+class HoverBitController {
+ private:
+ MicroBit* uBit;
+
+ int buzzer;
+ int servo_1;
+ int arm;
+ int roll;
+ int pitch;
+ int yaw;
+ int throttle;
+ int failSafeC;
+
+ bool mainController;
+ bool batteryEmpty;
+ int batteryMilliVolt;
+ float batteryFactor;
+
+ public:
+ void init(MicroBit* _uBit);
+ void failSafe(void);
+ unsigned int getBatteryVoltage(void);
+ void AirBit(int Pitch,int Arm,int Roll,int Throttle,int Yaw,int Aux1,int Aux2);
+ void HoverControl();
+
+ int Throttle();
+ void Throttle(int _throttle);
+ int Servo1();
+ void Servo1(int _servo1);
+ int Roll();
+ void Roll(int _roll);
+ bool Arm();
+ void Arm(bool _arm);
+ bool BatteryEmpty();
+};
+
+#endif // HOVERBITCONTROLLER_H_
diff --git a/source/Screen.cpp b/source/Screen.cpp
new file mode 100644
index 0000000..c778798
--- /dev/null
+++ b/source/Screen.cpp
@@ -0,0 +1,41 @@
+/*
+The MIT License (MIT)
+
+Copyright (c) 2016 British Broadcasting Corporation.
+This software is provided by Lancaster University by arrangement with the BBC.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+*/
+#include "Screen.h"
+
+/**
+ * Method for plotting a line, gotten from wonder-bit-source.
+ */
+void plotYLine(MicroBit *uBit, int y1, int y2, int x) {
+ if (y1 >= y2) {
+ for (int y = y2; y <= y1; y++) {
+ (*uBit).display.image.setPixelValue(x, y, 255);
+ }
+ }
+ else if (y1 < y2) {
+ for (int y = y1; y <= y2; y++) {
+ (*uBit).display.image.setPixelValue(x, y, 255);
+ }
+ }
+}
diff --git a/source/Screen.h b/source/Screen.h
new file mode 100644
index 0000000..b60ba2b
--- /dev/null
+++ b/source/Screen.h
@@ -0,0 +1,73 @@
+/*
+The MIT License (MIT)
+
+Copyright (c) 2016 British Broadcasting Corporation.
+This software is provided by Lancaster University by arrangement with the BBC.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+*/
+#ifndef SCREEN_H_
+#define SCREEN_H_
+
+#include <MicroBit.h>
+
+enum DisplayMainScreenMode { GRAPHS, BATTERY, OFF };
+
+const char* const strBattDead = "\
+ 000,255,255,255,000\n\
+ 255,000,255,000,255\n\
+ 255,255,255,255,255\n\
+ 000,255,000,255,000\n\
+ 000,255,000,255,000\n";
+const char* const strBattLow = "\
+ 000,000,255,000,000\n\
+ 000,255,255,255,000\n\
+ 000,255,000,255,000\n\
+ 000,255,000,255,000\n\
+ 000,255,255,255,000\n";
+static const char* const strBattLevel[] = {
+ "\
+ 000,000,255,000,000\n\
+ 000,255,000,255,000\n\
+ 000,255,000,255,000\n\
+ 000,255,000,255,000\n\
+ 000,255,255,255,000\n",
+ "\
+ 000,000,255,000,000\n\
+ 000,255,000,255,000\n\
+ 000,255,000,255,000\n\
+ 000,255,255,255,000\n\
+ 000,255,255,255,000\n",
+ "\
+ 000,000,255,000,000\n\
+ 000,255,000,255,000\n\
+ 000,255,255,255,000\n\
+ 000,255,255,255,000\n\
+ 000,255,255,255,000\n",
+ "\
+ 000,000,255,000,000\n\
+ 000,255,255,255,000\n\
+ 000,255,255,255,000\n\
+ 000,255,255,255,000\n\
+ 000,255,255,255,000\n"
+};
+
+void plotYLine(MicroBit *uBit, int y1, int y2, int x);
+
+#endif // SCREEN_H_
diff --git a/source/main.cpp b/source/main.cpp
new file mode 100644
index 0000000..7687b45
--- /dev/null
+++ b/source/main.cpp
@@ -0,0 +1,269 @@
+/*
+The MIT License (MIT)
+
+Copyright (c) 2016 British Broadcasting Corporation.
+This software is provided by Lancaster University by arrangement with the BBC.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+*/
+#include <MicroBit.h>
+#include "MicroBitUARTService.h"
+#include "HoverBitController.h"
+#include "Screen.h"
+
+MicroBit uBit;
+MicroBitUARTService *uart;
+HoverBitController controller;
+
+bool bConnected = false;
+
+bool batteryEmpty = false;
+bool bCapLogoIsPressed = false;
+int batteryMilliVolt = 3700;
+unsigned long tmpTimer;
+bool bBLEIndicator = false;
+
+DisplayMainScreenMode displayMainScreenMode = GRAPHS;
+
+void onConnected(MicroBitEvent) {
+ bConnected = 1;
+ uBit.audio.setVolume(255);
+ uBit.audio.soundExpressions.play(ManagedString("giggle"));
+
+ // mobile app will send ASCII strings terminated with the colon character
+ ManagedString eom(":");
+
+ while (bConnected) {
+ ManagedString msg = uart->readUntil(eom);
+ char command = msg.substring(0, 1).toCharArray()[0];
+ int value = atoi(msg.substring(1, msg.length() - 1).toCharArray());
+
+ if (command == 'R') {
+ controller.Roll(value);
+ if (displayMainScreenMode == OFF) {
+ uBit.display.scroll(controller.Roll());
+ }
+ } else if (command == 'T') {
+ controller.Throttle(value);
+ if (displayMainScreenMode == OFF) {
+ uBit.display.scroll(controller.Throttle());
+ }
+ } else if (command == 'A') {
+ controller.Arm(value == 1);
+ if (displayMainScreenMode == OFF) {
+ uBit.display.scroll(controller.Arm());
+ }
+ } else if (command == 'S') {
+ controller.Servo1(value);
+ if (displayMainScreenMode == OFF) {
+ uBit.display.scroll(controller.Servo1());
+ }
+ } else {
+ uBit.display.scroll(command);
+ }
+ }
+
+}
+
+void onDisconnected(MicroBitEvent) {
+ bConnected = 0;
+ uBit.audio.soundExpressions.play(ManagedString("sad"));
+}
+
+void iconBatteryDead() {
+ MicroBitImage img(strBattDead);
+ uBit.display.print(img);
+}
+
+void iconBatteryLow() {
+ MicroBitImage img(strBattLow);
+ uBit.display.print(img);
+}
+
+void lowBattery() {
+ if (batteryEmpty) {
+ iconBatteryDead();
+ } else if (batteryMilliVolt > BATTERY_LOW_LIMIT - 50){
+ iconBatteryLow();
+ } else {
+ iconBatteryDead();
+ }
+}
+
+void iconBatteryCharging() {
+ int low = 0;
+ int high = 3;
+ if (batteryMilliVolt >= 4200) {
+ low = 3;
+ } else if (batteryMilliVolt >= 4040) {
+ low = 2;
+ } else if (batteryMilliVolt >= 3900) {
+ low = 1;
+ }
+
+ for (int i = low; i <= high; i++) {
+ MicroBitImage img(strBattLevel[i]);
+ uBit.display.print(img);
+ uBit.sleep(400);
+ }
+}
+
+void batteryLevelFullScreen() {
+ int level = 0;
+ if (controller.Arm()) {
+ level = (((batteryMilliVolt - 3400) * 3) / 500);
+ } else {
+ level = (((batteryMilliVolt - 3700) * 3) / 500);
+ }
+ if (level < 0) { level = 0; }
+ if (level > 3) { level = 3; }
+ MicroBitImage img(strBattLevel[level]);
+ uBit.display.print(img);
+}
+
+void plotYLine(int y1, int y2, int x) {
+ /**
+ * Draw a line along the Y axis. y1: first pixel, y2: last pixel
+ */
+
+ if (y1 >= y2) {
+ for (int y = y2; y <= y1; y++) {
+ uBit.display.image.setPixelValue(x, y, 255);
+ }
+ }
+ else if (y1 < y2) {
+ for (int y = y1; y <= y2; y++) {
+ uBit.display.image.setPixelValue(x, y, 255);
+ }
+ }
+}
+
+void nextMainScreenDisplayMode() {
+ uBit.display.clear();
+ switch (displayMainScreenMode) {
+ case GRAPHS:
+ displayMainScreenMode = BATTERY;
+ break;
+ case BATTERY:
+ displayMainScreenMode = OFF;
+ break;
+ case OFF:
+ displayMainScreenMode = GRAPHS;
+ break;
+ }
+}
+
+void mainScreen() {
+ // uBit.display.clear();
+ bool bDelayElapsed = (uBit.systemTime() - tmpTimer) > 1000;
+ if (bDelayElapsed) { tmpTimer = uBit.systemTime(); }
+
+ switch (displayMainScreenMode) {
+ case OFF:
+ break;
+ case BATTERY:
+ uBit.display.clear();
+ batteryLevelFullScreen();
+ break;
+ case GRAPHS:
+ default:
+ uBit.display.clear();
+ if (batteryMilliVolt > 100) {
+ if (controller.Arm()) {
+ plotYLine(0, (((batteryMilliVolt - 3400) * 4) / 500), 4);
+ } else {
+ plotYLine(0, (((batteryMilliVolt - 3700) * 4) / 500), 4);
+ }
+ }
+ break;
+ }
+
+ if (bConnected) {
+ uBit.display.image.setPixelValue(0, 0, 255);
+ } else {
+ if (bDelayElapsed) { bBLEIndicator = !bBLEIndicator; }
+ if (bBLEIndicator) {
+ uBit.display.image.setPixelValue(0, 0, 0);
+ } else {
+ uBit.display.image.setPixelValue(0, 0, 255);
+ }
+ }
+}
+
+void onButtonA_press(MicroBitEvent e) {
+ controller.Roll(controller.Roll() + 3);
+}
+void onButtonB_press(MicroBitEvent e) {
+ controller.Roll(controller.Roll() - 3);
+}
+
+int main() {
+ uBit.init();
+ tmpTimer = uBit.systemTime();
+
+ // Setup serial for Spektsat communication with air:bit board
+ uBit.serial.setBaud(115200);
+ uBit.serial.redirect(uBit.io.P1, uBit.io.P2);
+
+ /* Initialize hover:bit controller module
+ * the init procedure have to be run within 100ms after air:bit power up */
+ controller.init(&uBit);
+
+ // Setup listeners
+ uBit.messageBus.listen(MICROBIT_ID_BLE, MICROBIT_BLE_EVT_CONNECTED, onConnected);
+ uBit.messageBus.listen(MICROBIT_ID_BLE, MICROBIT_BLE_EVT_DISCONNECTED, onDisconnected);
+ uBit.messageBus.listen(MICROBIT_ID_BUTTON_A, MICROBIT_BUTTON_EVT_CLICK, onButtonA_press);
+ uBit.messageBus.listen(MICROBIT_ID_BUTTON_B, MICROBIT_BUTTON_EVT_CLICK, onButtonB_press);
+
+ // uartService
+ // Note GATT table size increased from default in MicroBitConfig.h
+ // #define MICROBIT_SD_GATT_TABLE_SIZE 0x500
+ uart = new MicroBitUARTService(*uBit.ble, 32, 32);
+
+ uBit.audio.soundExpressions.play(ManagedString("hello"));
+
+ while (1) {
+ batteryMilliVolt = controller.getBatteryVoltage();
+
+ if (uBit.logo.isPressed()) {
+ if (!bCapLogoIsPressed) {
+ bCapLogoIsPressed = true;
+ nextMainScreenDisplayMode();
+ }
+ } else if (bCapLogoIsPressed ){
+ bCapLogoIsPressed = false;
+ }
+
+ if ((((&uBit.io.P0)->getAnalogValue()) < 600) && (((&uBit.io.P0)->getAnalogValue()) >= 400)) {
+ iconBatteryCharging();
+ } else if (controller.BatteryEmpty() || (batteryMilliVolt < BATTERY_LOW_LIMIT && (&uBit.io.P0)->getAnalogValue() > 300)) {
+ lowBattery();
+ } else {
+ mainScreen();
+ }
+
+ controller.HoverControl();
+ uBit.sleep(20);
+ }
+
+ // If main exits, there may still be other fibers running or registered event handlers etc.
+ // Simply release this fiber, which will mean we enter the scheduler. Worse case, we then
+ // sit in the idle task forever, in a power efficient sleep.
+ release_fiber();
+}
diff --git a/utils/__init__.py b/utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/utils/__init__.py
diff --git a/utils/cmake/JSONParser.cmake b/utils/cmake/JSONParser.cmake
new file mode 100644
index 0000000..7f45f14
--- /dev/null
+++ b/utils/cmake/JSONParser.cmake
@@ -0,0 +1,309 @@
+# The MIT License (MIT)
+
+# Copyright (c) 2015 Stefan Bellus
+
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+cmake_minimum_required(VERSION 3.1)
+
+if (DEFINED JSonParserGuard)
+ return()
+endif()
+
+set(JSonParserGuard yes)
+
+macro(sbeParseJson prefix jsonString)
+ cmake_policy(PUSH)
+
+ set(json_string "${${jsonString}}")
+ string(LENGTH "${json_string}" json_jsonLen)
+ set(json_index 0)
+ set(json_AllVariables ${prefix})
+ set(json_ArrayNestingLevel 0)
+ set(json_MaxArrayNestingLevel 0)
+
+ _sbeParse(${prefix})
+
+ unset(json_index)
+ unset(json_AllVariables)
+ unset(json_jsonLen)
+ unset(json_string)
+ unset(json_value)
+ unset(json_inValue)
+ unset(json_name)
+ unset(json_inName)
+ unset(json_newPrefix)
+ unset(json_reservedWord)
+ unset(json_arrayIndex)
+ unset(json_char)
+ unset(json_end)
+ unset(json_ArrayNestingLevel)
+ foreach(json_nestingLevel RANGE ${json_MaxArrayNestingLevel})
+ unset(json_${json_nestingLevel}_arrayIndex)
+ endforeach()
+ unset(json_nestingLevel)
+ unset(json_MaxArrayNestingLevel)
+
+ cmake_policy(POP)
+endmacro()
+
+macro(sbeClearJson prefix)
+ foreach(json_var ${${prefix}})
+ unset(${json_var})
+ endforeach()
+
+ unset(${prefix})
+ unset(json_var)
+endmacro()
+
+macro(sbePrintJson prefix)
+ foreach(json_var ${${prefix}})
+ message("${json_var} = ${${json_var}}")
+ endforeach()
+endmacro()
+
+macro(_sbeParse prefix)
+
+ while(${json_index} LESS ${json_jsonLen})
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+
+ if("\"" STREQUAL "${json_char}")
+ _sbeParseNameValue(${prefix})
+ elseif("{" STREQUAL "${json_char}")
+ _sbeMoveToNextNonEmptyCharacter()
+ _sbeParseObject(${prefix})
+ elseif("[" STREQUAL "${json_char}")
+ _sbeMoveToNextNonEmptyCharacter()
+ _sbeParseArray(${prefix})
+ endif()
+
+ if(${json_index} LESS ${json_jsonLen})
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+ else()
+ break()
+ endif()
+
+ if ("}" STREQUAL "${json_char}" OR "]" STREQUAL "${json_char}")
+ break()
+ endif()
+
+ _sbeMoveToNextNonEmptyCharacter()
+ endwhile()
+endmacro()
+
+macro(_sbeParseNameValue prefix)
+ set(json_name "")
+ set(json_inName no)
+
+ while(${json_index} LESS ${json_jsonLen})
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+
+ # check if name ends
+ if("\"" STREQUAL "${json_char}" AND json_inName)
+ set(json_inName no)
+ _sbeMoveToNextNonEmptyCharacter()
+ if(NOT ${json_index} LESS ${json_jsonLen})
+ break()
+ endif()
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+ set(json_newPrefix ${prefix}.${json_name})
+ set(json_name "")
+
+ if(":" STREQUAL "${json_char}")
+ _sbeMoveToNextNonEmptyCharacter()
+ if(NOT ${json_index} LESS ${json_jsonLen})
+ break()
+ endif()
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+
+ if("\"" STREQUAL "${json_char}")
+ _sbeParseValue(${json_newPrefix})
+ break()
+ elseif("{" STREQUAL "${json_char}")
+ _sbeMoveToNextNonEmptyCharacter()
+ _sbeParseObject(${json_newPrefix})
+ break()
+ elseif("[" STREQUAL "${json_char}")
+ _sbeMoveToNextNonEmptyCharacter()
+ _sbeParseArray(${json_newPrefix})
+ break()
+ else()
+ # reserved word starts
+ _sbeParseReservedWord(${json_newPrefix})
+ break()
+ endif()
+ else()
+ # name without value
+ list(APPEND ${json_AllVariables} ${json_newPrefix})
+ set(${json_newPrefix} "")
+ break()
+ endif()
+ endif()
+
+ if(json_inName)
+ # remove escapes
+ if("\\" STREQUAL "${json_char}")
+ math(EXPR json_index "${json_index} + 1")
+ if(NOT ${json_index} LESS ${json_jsonLen})
+ break()
+ endif()
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+ endif()
+
+ set(json_name "${json_name}${json_char}")
+ endif()
+
+ # check if name starts
+ if("\"" STREQUAL "${json_char}" AND NOT json_inName)
+ set(json_inName yes)
+ endif()
+
+ _sbeMoveToNextNonEmptyCharacter()
+ endwhile()
+endmacro()
+
+macro(_sbeParseReservedWord prefix)
+ set(json_reservedWord "")
+ set(json_end no)
+ while(${json_index} LESS ${json_jsonLen} AND NOT json_end)
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+
+ if("," STREQUAL "${json_char}" OR "}" STREQUAL "${json_char}" OR "]" STREQUAL "${json_char}")
+ set(json_end yes)
+ else()
+ set(json_reservedWord "${json_reservedWord}${json_char}")
+ math(EXPR json_index "${json_index} + 1")
+ endif()
+ endwhile()
+
+ list(APPEND ${json_AllVariables} ${prefix})
+ string(STRIP "${json_reservedWord}" json_reservedWord)
+ set(${prefix} ${json_reservedWord})
+endmacro()
+
+macro(_sbeParseValue prefix)
+ cmake_policy(SET CMP0054 NEW) # turn off implicit expansions in if statement
+
+ set(json_value "")
+ set(json_inValue no)
+
+ while(${json_index} LESS ${json_jsonLen})
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+
+ # check if json_value ends, it is ended by "
+ if("\"" STREQUAL "${json_char}" AND json_inValue)
+ set(json_inValue no)
+
+ set(${prefix} ${json_value})
+ list(APPEND ${json_AllVariables} ${prefix})
+ _sbeMoveToNextNonEmptyCharacter()
+ break()
+ endif()
+
+ if(json_inValue)
+ # if " is escaped consume
+ if("\\" STREQUAL "${json_char}")
+ math(EXPR json_index "${json_index} + 1")
+ if(NOT ${json_index} LESS ${json_jsonLen})
+ break()
+ endif()
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+ if(NOT "\"" STREQUAL "${json_char}")
+ # if it is not " then copy also escape character
+ set(json_char "\\${json_char}")
+ endif()
+ endif()
+
+ _sbeAddEscapedCharacter("${json_char}")
+ endif()
+
+ # check if value starts
+ if("\"" STREQUAL "${json_char}" AND NOT json_inValue)
+ set(json_inValue yes)
+ endif()
+
+ math(EXPR json_index "${json_index} + 1")
+ endwhile()
+endmacro()
+
+macro(_sbeAddEscapedCharacter char)
+ string(CONCAT json_value "${json_value}" "${char}")
+endmacro()
+
+macro(_sbeParseObject prefix)
+ _sbeParse(${prefix})
+ _sbeMoveToNextNonEmptyCharacter()
+endmacro()
+
+macro(_sbeParseArray prefix)
+ math(EXPR json_ArrayNestingLevel "${json_ArrayNestingLevel} + 1")
+ set(json_${json_ArrayNestingLevel}_arrayIndex 0)
+
+ set(${prefix} "")
+ list(APPEND ${json_AllVariables} ${prefix})
+
+ while(${json_index} LESS ${json_jsonLen})
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+
+ if("\"" STREQUAL "${json_char}")
+ # simple value
+ list(APPEND ${prefix} ${json_${json_ArrayNestingLevel}_arrayIndex})
+ _sbeParseValue(${prefix}_${json_${json_ArrayNestingLevel}_arrayIndex})
+ elseif("{" STREQUAL "${json_char}")
+ # object
+ _sbeMoveToNextNonEmptyCharacter()
+ list(APPEND ${prefix} ${json_${json_ArrayNestingLevel}_arrayIndex})
+ _sbeParseObject(${prefix}_${json_${json_ArrayNestingLevel}_arrayIndex})
+ else()
+ list(APPEND ${prefix} ${json_${json_ArrayNestingLevel}_arrayIndex})
+ _sbeParseReservedWord(${prefix}_${json_${json_ArrayNestingLevel}_arrayIndex})
+ endif()
+
+ if(NOT ${json_index} LESS ${json_jsonLen})
+ break()
+ endif()
+
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+
+ if("]" STREQUAL "${json_char}")
+ _sbeMoveToNextNonEmptyCharacter()
+ break()
+ elseif("," STREQUAL "${json_char}")
+ math(EXPR json_${json_ArrayNestingLevel}_arrayIndex "${json_${json_ArrayNestingLevel}_arrayIndex} + 1")
+ endif()
+
+ _sbeMoveToNextNonEmptyCharacter()
+ endwhile()
+
+ if(${json_MaxArrayNestingLevel} LESS ${json_ArrayNestingLevel})
+ set(json_MaxArrayNestingLevel ${json_ArrayNestingLevel})
+ endif()
+ math(EXPR json_ArrayNestingLevel "${json_ArrayNestingLevel} - 1")
+endmacro()
+
+macro(_sbeMoveToNextNonEmptyCharacter)
+ math(EXPR json_index "${json_index} + 1")
+ if(${json_index} LESS ${json_jsonLen})
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+ while(${json_char} MATCHES "[ \t\n\r]" AND ${json_index} LESS ${json_jsonLen})
+ math(EXPR json_index "${json_index} + 1")
+ string(SUBSTRING "${json_string}" ${json_index} 1 json_char)
+ endwhile()
+ endif()
+endmacro()
diff --git a/utils/cmake/buildtools/codal.cmake b/utils/cmake/buildtools/codal.cmake
new file mode 100644
index 0000000..ea8b23c
--- /dev/null
+++ b/utils/cmake/buildtools/codal.cmake
@@ -0,0 +1,85 @@
+add_executable(
+ ${device.device}
+ ${SOURCE_FILES}
+)
+
+if("${INCLUDE_DIRS}" STRGREATER "")
+ target_include_directories(${device.device} PUBLIC "${INCLUDE_DIRS}")
+endif()
+
+set_target_properties(${device.device} PROPERTIES SUFFIX "" ENABLE_EXPORTS ON)
+
+# link the executable with supporting libraries.
+target_link_libraries(
+ ${device.device}
+ ${CODAL_DEPS}
+)
+
+# import toolchain bin generation command
+if(${device.generate_bin})
+ include(${TOOLCHAIN_FOLDER}/bin-generator.cmake)
+endif()
+
+# import toolchain hex generation command
+if(${device.generate_hex})
+ include(${TOOLCHAIN_FOLDER}/hex-generator.cmake)
+endif()
+
+# post process command hook, depends on the hex file generated by the build system.
+if("${device.post_process.command}" STRGREATER "" OR "${device.post_process}" STRGREATER "")
+
+ if("${device.post_process}" STRGREATER "")
+ set(POST_PROCESS_COMMAND ${device.post_process})
+ else()
+ set(POST_PROCESS_COMMAND ${device.post_process.command})
+ endif()
+
+ set(POST_PROCESS_DEPENDS "${device.post_process.depends}")
+
+ # replace specific strings in the command, this gives users flexibility, they don't have to manually specify the location of files
+ string(REPLACE "<OUTPUT_HEX_LOCATION>" ${PROJECT_SOURCE_DIR}/${CODAL_APP_OUTPUT_DIR}/${device.device}.hex CODAL_POSTPROCESS_COMMAND ${POST_PROCESS_COMMAND})
+ string(REPLACE "<OUTPUT_HEX_DESTINATION>" ${PROJECT_SOURCE_DIR}/${CODAL_APP_OUTPUT_DIR} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND})
+ string(REPLACE "<OUTPUT_HEX_NAME>" ${device.device} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND})
+
+ string(REPLACE "<OUTPUT_BIN_LOCATION>" ${PROJECT_SOURCE_DIR}/${CODAL_APP_OUTPUT_DIR}/${device.device}.bin CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND})
+ string(REPLACE "<OUTPUT_BIN_DESTINATION>" ${PROJECT_SOURCE_DIR}/${CODAL_APP_OUTPUT_DIR} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND})
+ string(REPLACE "<OUTPUT_BIN_NAME>" ${device.device}.bin CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND})
+
+ string(REPLACE "<OUTPUT_ELF_LOCATION>" ${PROJECT_SOURCE_DIR}/build/${device.device} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND})
+ string(REPLACE "<OUTPUT_ELF_DESTINATION>" ${PROJECT_SOURCE_DIR}/${CODAL_APP_OUTPUT_DIR} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND})
+ string(REPLACE "<OUTPUT_ELF_NAME>" ${device.device} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND})
+
+ string(REPLACE "<CODAL_APP_OUTPUT_DIR>" ${PROJECT_SOURCE_DIR}/${CODAL_APP_OUTPUT_DIR} CODAL_POSTPROCESS_COMMAND ${CODAL_POSTPROCESS_COMMAND})
+
+ #convert to a command
+ separate_arguments(FINAL_COMMAND UNIX_COMMAND ${CODAL_POSTPROCESS_COMMAND})
+
+ # execute
+ if(POST_PROCESS_DEPENDS STREQUAL "ELF")
+ add_custom_command(
+ TARGET ${device.device}
+ COMMAND ${FINAL_COMMAND}
+ DEPENDS ${device.device}
+ WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}"
+ COMMENT "Executing post process command"
+ )
+ elseif(POST_PROCESS_DEPENDS STREQUAL "HEX")
+ add_custom_command(
+ TARGET ${device.device}_hex
+ COMMAND ${FINAL_COMMAND}
+ DEPENDS ${device.device}
+ WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}"
+ COMMENT "Executing post process command"
+ )
+ else()
+ #by default post process should depend on hex
+ add_custom_command(
+ TARGET ${device.device}_bin
+ COMMAND ${FINAL_COMMAND}
+ DEPENDS ${device.device}
+ WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}"
+ COMMENT "Executing post process command"
+ )
+ endif()
+
+endif() \ No newline at end of file
diff --git a/utils/cmake/buildtools/yotta.cmake b/utils/cmake/buildtools/yotta.cmake
new file mode 100644
index 0000000..8002006
--- /dev/null
+++ b/utils/cmake/buildtools/yotta.cmake
@@ -0,0 +1,23 @@
+if("${INCLUDE_DIRS}" STRGREATER "")
+ target_include_directories(codal PUBLIC "${INCLUDE_DIRS}")
+endif()
+
+add_library(codal "${SOURCE_FILES}")
+set_target_properties(codal PROPERTIES SUFFIX "" ENABLE_EXPORTS ON)
+
+target_compile_definitions(codal PUBLIC "${device.definitions}")
+target_include_directories(codal PUBLIC ${PLATFORM_INCLUDES_PATH})
+target_compile_options(codal PUBLIC -include ${EXTRA_INCLUDES_PATH})
+
+set(STRIPPED "")
+string(STRIP "${CMAKE_LINKER_FLAGS}" STRIPPED)
+# link the executable with supporting libraries.
+target_link_libraries(codal "${CODAL_DEPS};${STRIPPED}")
+
+#
+# Supress the addition of implicit linker flags (such as -rdynamic)
+#
+set(CMAKE_SHARED_LIBRARY_LINK_C_FLAGS "")
+set(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS "")
+set(CMAKE_EXE_EXPORTS_C_FLAG "")
+set(CMAKE_EXE_EXPORTS_CXX_FLAG "") \ No newline at end of file
diff --git a/utils/cmake/colours.cmake b/utils/cmake/colours.cmake
new file mode 100644
index 0000000..2786b49
--- /dev/null
+++ b/utils/cmake/colours.cmake
@@ -0,0 +1,19 @@
+if(NOT WIN32)
+ string(ASCII 27 Esc)
+ set(ColourReset "${Esc}[m")
+ set(ColourBold "${Esc}[1m")
+ set(Red "${Esc}[31m")
+ set(Green "${Esc}[32m")
+ set(Yellow "${Esc}[33m")
+ set(Blue "${Esc}[34m")
+ set(Magenta "${Esc}[35m")
+ set(Cyan "${Esc}[36m")
+ set(White "${Esc}[37m")
+ set(BoldRed "${Esc}[1;31m")
+ set(BoldGreen "${Esc}[1;32m")
+ set(BoldYellow "${Esc}[1;33m")
+ set(BoldBlue "${Esc}[1;34m")
+ set(BoldMagenta "${Esc}[1;35m")
+ set(BoldCyan "${Esc}[1;36m")
+ set(BoldWhite "${Esc}[1;37m")
+endif()
diff --git a/utils/cmake/toolchains/ARM_GCC/bin-generator.cmake b/utils/cmake/toolchains/ARM_GCC/bin-generator.cmake
new file mode 100644
index 0000000..d18d098
--- /dev/null
+++ b/utils/cmake/toolchains/ARM_GCC/bin-generator.cmake
@@ -0,0 +1,9 @@
+add_custom_command(
+ OUTPUT "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin"
+ COMMAND "${ARM_NONE_EABI_OBJCOPY}" -O binary "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${device.device}" "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin"
+ DEPENDS ${device.device}
+ COMMENT "converting to bin file."
+)
+
+#specify a dependency on the elf file so that bin is automatically rebuilt when elf is changed.
+add_custom_target(${device.device}_bin ALL DEPENDS "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin")
diff --git a/utils/cmake/toolchains/ARM_GCC/compiler-flags.cmake b/utils/cmake/toolchains/ARM_GCC/compiler-flags.cmake
new file mode 100644
index 0000000..16aacaa
--- /dev/null
+++ b/utils/cmake/toolchains/ARM_GCC/compiler-flags.cmake
@@ -0,0 +1,49 @@
+set(EXPLICIT_INCLUDES "")
+if((CMAKE_VERSION VERSION_GREATER "3.4.0") OR (CMAKE_VERSION VERSION_EQUAL "3.4.0"))
+ # from CMake 3.4 <INCLUDES> are separate to <FLAGS> in the
+ # CMAKE_<LANG>_COMPILE_OBJECT, CMAKE_<LANG>_CREATE_ASSEMBLY_SOURCE, and
+ # CMAKE_<LANG>_CREATE_PREPROCESSED_SOURCE commands
+ set(EXPLICIT_INCLUDES "<INCLUDES> ")
+endif()
+
+# Override the link rules:
+set(CMAKE_C_CREATE_SHARED_LIBRARY "echo 'shared libraries not supported' && 1")
+set(CMAKE_C_CREATE_SHARED_MODULE "echo 'shared modules not supported' && 1")
+set(CMAKE_C_CREATE_STATIC_LIBRARY "<CMAKE_AR> -cr <LINK_FLAGS> <TARGET> <OBJECTS>")
+set(CMAKE_C_COMPILE_OBJECT "<CMAKE_C_COMPILER> <DEFINES> ${EXPLICIT_INCLUDES}<FLAGS> -o <OBJECT> -c <SOURCE>")
+
+set(CMAKE_C_LINK_EXECUTABLE "<CMAKE_C_COMPILER> <CMAKE_C_LINK_FLAGS> <LINK_FLAGS> -Wl,-Map,<TARGET>.map -Wl,--start-group <OBJECTS> <LINK_LIBRARIES> -lm -lc -lgcc -lm -lc -lgcc -Wl,--end-group --specs=nano.specs -o <TARGET>")
+
+set(CMAKE_CXX_OUTPUT_EXTENSION ".o")
+set(CMAKE_DEPFILE_FLAGS_CXX "-MMD -MT <OBJECT> -MF <DEPFILE>")
+set(CMAKE_C_OUTPUT_EXTENSION ".o")
+set(CMAKE_DEPFILE_FLAGS_C "-MMD -MT <OBJECT> -MF <DEPFILE>")
+
+set(CMAKE_C_FLAGS_DEBUG_INIT "-g -gdwarf-3")
+set(CMAKE_C_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG")
+set(CMAKE_C_FLAGS_RELEASE_INIT "-Os -DNDEBUG")
+set(CMAKE_C_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG")
+set(CMAKE_INCLUDE_SYSTEM_FLAG_C "-isystem ")
+
+
+set(CMAKE_ASM_FLAGS_DEBUG_INIT "-g -gdwarf-3")
+set(CMAKE_ASM_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG")
+set(CMAKE_ASM_FLAGS_RELEASE_INIT "-Os -DNDEBUG")
+set(CMAKE_ASM_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG")
+set(CMAKE_INCLUDE_SYSTEM_FLAG_ASM "-isystem ")
+
+set(CMAKE_CXX_CREATE_STATIC_LIBRARY "<CMAKE_AR> -cr <LINK_FLAGS> <TARGET> <OBJECTS>")
+
+set(CMAKE_CXX_LINK_EXECUTABLE "<CMAKE_CXX_COMPILER> <CMAKE_CXX_LINK_FLAGS> <LINK_FLAGS> -Wl,-Map,<TARGET>.map -Wl,--start-group <OBJECTS> <LINK_LIBRARIES> -lnosys -lstdc++ -lsupc++ -lm -lc -lgcc -lstdc++ -lsupc++ -lm -lc -lgcc -Wl,--end-group --specs=nano.specs -o <TARGET>")
+
+set(CMAKE_CXX_FLAGS_DEBUG_INIT "-g -gdwarf-3")
+set(CMAKE_CXX_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG")
+set(CMAKE_CXX_FLAGS_RELEASE_INIT "-Os -DNDEBUG")
+set(CMAKE_CXX_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG")
+set(CMAKE_INCLUDE_SYSTEM_FLAG_CXX "-isystem ")
+
+if (CMAKE_C_COMPILER_VERSION VERSION_GREATER "7.1.0" OR CMAKE_C_COMPILER_VERSION VERSION_EQUAL "7.1.0")
+ message("${BoldRed}Supressing -Wexpansion-to-defined.${ColourReset}")
+ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-expansion-to-defined")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-expansion-to-defined")
+endif () \ No newline at end of file
diff --git a/utils/cmake/toolchains/ARM_GCC/hex-generator.cmake b/utils/cmake/toolchains/ARM_GCC/hex-generator.cmake
new file mode 100644
index 0000000..4948935
--- /dev/null
+++ b/utils/cmake/toolchains/ARM_GCC/hex-generator.cmake
@@ -0,0 +1,9 @@
+add_custom_command(
+ OUTPUT "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex"
+ COMMAND "${ARM_NONE_EABI_OBJCOPY}" -O ihex "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${device.device}" "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex"
+ DEPENDS ${device.device}
+ COMMENT "converting to hex file."
+)
+
+#specify a dependency on the elf file so that hex is automatically rebuilt when elf is changed.
+add_custom_target(${device.device}_hex ALL DEPENDS "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex")
diff --git a/utils/cmake/toolchains/ARM_GCC/platform_includes.h b/utils/cmake/toolchains/ARM_GCC/platform_includes.h
new file mode 100644
index 0000000..3417ef3
--- /dev/null
+++ b/utils/cmake/toolchains/ARM_GCC/platform_includes.h
@@ -0,0 +1,10 @@
+#ifndef PLATFORM_INCLUDES
+#define PLATFORM_INCLUDES
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdarg.h>
+#include <math.h>
+
+#endif
diff --git a/utils/cmake/toolchains/ARM_GCC/toolchain.cmake b/utils/cmake/toolchains/ARM_GCC/toolchain.cmake
new file mode 100644
index 0000000..eafdfbc
--- /dev/null
+++ b/utils/cmake/toolchains/ARM_GCC/toolchain.cmake
@@ -0,0 +1,26 @@
+find_program(ARM_NONE_EABI_RANLIB arm-none-eabi-ranlib)
+find_program(ARM_NONE_EABI_AR arm-none-eabi-ar)
+find_program(ARM_NONE_EABI_GCC arm-none-eabi-gcc)
+find_program(ARM_NONE_EABI_GPP arm-none-eabi-g++)
+find_program(ARM_NONE_EABI_OBJCOPY arm-none-eabi-objcopy)
+
+set(CMAKE_OSX_SYSROOT "/")
+set(CMAKE_OSX_DEPLOYMENT_TARGET "")
+
+set(CODAL_TOOLCHAIN "ARM_GCC")
+
+if(CMAKE_VERSION VERSION_LESS "3.5.0")
+ include(CMakeForceCompiler)
+ cmake_force_c_compiler("${ARM_NONE_EABI_GCC}" GNU)
+ cmake_force_cxx_compiler("${ARM_NONE_EABI_GPP}" GNU)
+else()
+ # from 3.5 the force_compiler macro is deprecated: CMake can detect
+ # arm-none-eabi-gcc as being a GNU compiler automatically
+ set(CMAKE_TRY_COMPILE_TARGET_TYPE "STATIC_LIBRARY")
+ set(CMAKE_C_COMPILER "${ARM_NONE_EABI_GCC}")
+ set(CMAKE_CXX_COMPILER "${ARM_NONE_EABI_GPP}")
+endif()
+
+SET(CMAKE_AR "${ARM_NONE_EABI_AR}" CACHE FILEPATH "Archiver")
+SET(CMAKE_RANLIB "${ARM_NONE_EABI_RANLIB}" CACHE FILEPATH "rlib")
+set(CMAKE_CXX_OUTPUT_EXTENSION ".o")
diff --git a/utils/cmake/toolchains/AVR_GCC/bin-generator.cmake b/utils/cmake/toolchains/AVR_GCC/bin-generator.cmake
new file mode 100644
index 0000000..a3a1c01
--- /dev/null
+++ b/utils/cmake/toolchains/AVR_GCC/bin-generator.cmake
@@ -0,0 +1,9 @@
+add_custom_command(
+ OUTPUT "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin"
+ COMMAND "${AVR_OBJCOPY}" -O binary "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${device.device}" "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin"
+ DEPENDS ${device.device}
+ COMMENT "converting to bin file."
+)
+
+#specify a dependency on the elf file so that bin is automatically rebuilt when elf is changed.
+add_custom_target(${device.device}_bin ALL DEPENDS "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin")
diff --git a/utils/cmake/toolchains/AVR_GCC/compiler-flags.cmake b/utils/cmake/toolchains/AVR_GCC/compiler-flags.cmake
new file mode 100644
index 0000000..c6dcfc7
--- /dev/null
+++ b/utils/cmake/toolchains/AVR_GCC/compiler-flags.cmake
@@ -0,0 +1,43 @@
+set(EXPLICIT_INCLUDES "")
+if((CMAKE_VERSION VERSION_GREATER "3.4.0") OR (CMAKE_VERSION VERSION_EQUAL "3.4.0"))
+ # from CMake 3.4 <INCLUDES> are separate to <FLAGS> in the
+ # CMAKE_<LANG>_COMPILE_OBJECT, CMAKE_<LANG>_CREATE_ASSEMBLY_SOURCE, and
+ # CMAKE_<LANG>_CREATE_PREPROCESSED_SOURCE commands
+ set(EXPLICIT_INCLUDES "<INCLUDES> ")
+endif()
+
+# Override the link rules:
+set(CMAKE_C_CREATE_SHARED_LIBRARY "echo 'shared libraries not supported' && 1")
+set(CMAKE_C_CREATE_SHARED_MODULE "echo 'shared modules not supported' && 1")
+set(CMAKE_C_CREATE_STATIC_LIBRARY "<CMAKE_AR> rcs <LINK_FLAGS> <TARGET> <OBJECTS>")
+set(CMAKE_C_COMPILE_OBJECT "<CMAKE_C_COMPILER> <DEFINES> ${EXPLICIT_INCLUDES}<FLAGS> -o <OBJECT> -c <SOURCE>")
+
+set(CMAKE_C_LINK_EXECUTABLE "<CMAKE_C_COMPILER> <CMAKE_C_LINK_FLAGS> <LINK_FLAGS> -Wl,-Map,<TARGET>.map -Wl,--start-group <OBJECTS> <LINK_LIBRARIES> -lm -lc -lgcc -lm -lc -lgcc -Wl,--end-group --specs=nano.specs -o <TARGET>")
+
+set(CMAKE_CXX_OUTPUT_EXTENSION ".o")
+set(CMAKE_DEPFILE_FLAGS_CXX "-MMD -MT <OBJECT> -MF <DEPFILE>")
+set(CMAKE_C_OUTPUT_EXTENSION ".o")
+set(CMAKE_DEPFILE_FLAGS_C "-MMD -MT <OBJECT> -MF <DEPFILE>")
+
+set(CMAKE_C_FLAGS_DEBUG_INIT "-g -gdwarf-3")
+set(CMAKE_C_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG")
+set(CMAKE_C_FLAGS_RELEASE_INIT "-Os -DNDEBUG")
+set(CMAKE_C_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG")
+set(CMAKE_INCLUDE_SYSTEM_FLAG_C "-isystem ")
+
+
+set(CMAKE_ASM_FLAGS_DEBUG_INIT "-g -gdwarf-3")
+set(CMAKE_ASM_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG")
+set(CMAKE_ASM_FLAGS_RELEASE_INIT "-Os -DNDEBUG")
+set(CMAKE_ASM_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG")
+set(CMAKE_INCLUDE_SYSTEM_FLAG_ASM "-isystem ")
+
+set(CMAKE_CXX_CREATE_STATIC_LIBRARY "<CMAKE_AR> rcs <LINK_FLAGS> <TARGET> <OBJECTS>")
+
+set(CMAKE_CXX_LINK_EXECUTABLE "<CMAKE_CXX_COMPILER> <CMAKE_CXX_LINK_FLAGS> <LINK_FLAGS> -Wl,-Map,<TARGET>.map -Wl,--start-group <OBJECTS> <LINK_LIBRARIES> -lm -lc -lgcc -Wl,--end-group -o <TARGET>")
+
+set(CMAKE_CXX_FLAGS_DEBUG_INIT "-g -gdwarf-3")
+set(CMAKE_CXX_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG")
+set(CMAKE_CXX_FLAGS_RELEASE_INIT "-Os -DNDEBUG")
+set(CMAKE_CXX_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG")
+set(CMAKE_INCLUDE_SYSTEM_FLAG_CXX "-isystem ")
diff --git a/utils/cmake/toolchains/AVR_GCC/hex-generator.cmake b/utils/cmake/toolchains/AVR_GCC/hex-generator.cmake
new file mode 100644
index 0000000..5be3c67
--- /dev/null
+++ b/utils/cmake/toolchains/AVR_GCC/hex-generator.cmake
@@ -0,0 +1,9 @@
+add_custom_command(
+ OUTPUT "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex"
+ COMMAND "${AVR_OBJCOPY}" -O ihex "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${device.device}" "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex"
+ DEPENDS ${device.device}
+ COMMENT "converting to hex file."
+)
+
+#specify a dependency on the elf file so that hex is automatically rebuilt when elf is changed.
+add_custom_target(${device.device}_hex ALL DEPENDS "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex")
diff --git a/utils/cmake/toolchains/AVR_GCC/platform_includes.h b/utils/cmake/toolchains/AVR_GCC/platform_includes.h
new file mode 100644
index 0000000..ac788a5
--- /dev/null
+++ b/utils/cmake/toolchains/AVR_GCC/platform_includes.h
@@ -0,0 +1,14 @@
+#ifndef PLATFORM_INCLUDES
+#define PLATFORM_INCLUDES
+
+#include <stdint.h>
+#include <stddef.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdio.h>
+#include <stdarg.h>
+#include <math.h>
+#include <avr/interrupt.h>
+#include <avr/io.h>
+
+#endif
diff --git a/utils/cmake/toolchains/AVR_GCC/toolchain.cmake b/utils/cmake/toolchains/AVR_GCC/toolchain.cmake
new file mode 100644
index 0000000..891aff2
--- /dev/null
+++ b/utils/cmake/toolchains/AVR_GCC/toolchain.cmake
@@ -0,0 +1,29 @@
+find_program(AVR_GCC_RANLIB avr-gcc-ranlib)
+find_program(AVR_AR avr-ar)
+find_program(AVR_AS avr-as)
+find_program(AVR_GCC avr-gcc)
+find_program(AVR_GPP avr-g++)
+find_program(AVR_OBJCOPY avr-objcopy)
+
+set(CMAKE_OSX_SYSROOT "/")
+set(CMAKE_OSX_DEPLOYMENT_TARGET "")
+
+set(CODAL_TOOLCHAIN "AVR_GCC")
+
+if(CMAKE_VERSION VERSION_LESS "3.5.0")
+ include(CMakeForceCompiler)
+ cmake_force_c_compiler("${AVR_GCC}" GNU)
+ cmake_force_cxx_compiler("${AVR_GPP}" GNU)
+else()
+ #-Wl,-flto -flto -fno-fat-lto-objects
+ # from 3.5 the force_compiler macro is deprecated: CMake can detect
+ # arm-none-eabi-gcc as being a GNU compiler automatically
+ set(CMAKE_TRY_COMPILE_TARGET_TYPE "STATIC_LIBRARY")
+ set(CMAKE_C_COMPILER "${AVR_GCC}")
+ set(CMAKE_CXX_COMPILER "${AVR_GPP}")
+endif()
+
+SET(CMAKE_ASM_COMPILER "${AVR_GCC}")
+SET(CMAKE_AR "${AVR_AR}" CACHE FILEPATH "Archiver")
+SET(CMAKE_RANLIB "${AVR_GCC_RANLIB}" CACHE FILEPATH "rlib")
+set(CMAKE_CXX_OUTPUT_EXTENSION ".o")
diff --git a/utils/cmake/toolchains/XTENSA_GCC/bin-generator.cmake b/utils/cmake/toolchains/XTENSA_GCC/bin-generator.cmake
new file mode 100644
index 0000000..74fdb35
--- /dev/null
+++ b/utils/cmake/toolchains/XTENSA_GCC/bin-generator.cmake
@@ -0,0 +1,9 @@
+add_custom_command(
+ OUTPUT "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin"
+ COMMAND "${XTENSA_OBJCOPY}" -O binary "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${device.device}" "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin"
+ DEPENDS ${device.device}
+ COMMENT "converting to bin file."
+)
+
+#specify a dependency on the elf file so that bin is automatically rebuilt when elf is changed.
+add_custom_target(${device.device}_bin ALL DEPENDS "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.bin")
diff --git a/utils/cmake/toolchains/XTENSA_GCC/compiler-flags.cmake b/utils/cmake/toolchains/XTENSA_GCC/compiler-flags.cmake
new file mode 100644
index 0000000..e0f7e2d
--- /dev/null
+++ b/utils/cmake/toolchains/XTENSA_GCC/compiler-flags.cmake
@@ -0,0 +1,43 @@
+set(EXPLICIT_INCLUDES "")
+if((CMAKE_VERSION VERSION_GREATER "3.4.0") OR (CMAKE_VERSION VERSION_EQUAL "3.4.0"))
+ # from CMake 3.4 <INCLUDES> are separate to <FLAGS> in the
+ # CMAKE_<LANG>_COMPILE_OBJECT, CMAKE_<LANG>_CREATE_ASSEMBLY_SOURCE, and
+ # CMAKE_<LANG>_CREATE_PREPROCESSED_SOURCE commands
+ set(EXPLICIT_INCLUDES "<INCLUDES> ")
+endif()
+
+# Override the link rules:
+set(CMAKE_C_CREATE_SHARED_LIBRARY "echo 'shared libraries not supported' && 1")
+set(CMAKE_C_CREATE_SHARED_MODULE "echo 'shared modules not supported' && 1")
+set(CMAKE_C_CREATE_STATIC_LIBRARY "<CMAKE_AR> -cr <LINK_FLAGS> <TARGET> <OBJECTS>")
+set(CMAKE_C_COMPILE_OBJECT "<CMAKE_C_COMPILER> <DEFINES> ${EXPLICIT_INCLUDES}<FLAGS> -o <OBJECT> -c <SOURCE>")
+
+set(CMAKE_C_LINK_EXECUTABLE "<CMAKE_C_COMPILER> <CMAKE_C_LINK_FLAGS> <LINK_FLAGS> -nostdlib -Wl,-Map,<TARGET>.map -Wl,--start-group <OBJECTS> <LINK_LIBRARIES> -lupgrade -lssl -lmesh -lwpa2 -lsmartconfig -lespnow -lpp -lmain -lwpa -llwip -lnet80211 -lwps -lcrypto -lphy -lhal -lgcc -ldriver -lm -lat -lc -lstdc++ -Wl,--end-group -lgcc -o <TARGET>")
+
+set(CMAKE_CXX_OUTPUT_EXTENSION ".o")
+set(CMAKE_DEPFILE_FLAGS_CXX "-MMD -MT <OBJECT> -MF <DEPFILE>")
+set(CMAKE_C_OUTPUT_EXTENSION ".o")
+set(CMAKE_DEPFILE_FLAGS_C "-MMD -MT <OBJECT> -MF <DEPFILE>")
+
+set(CMAKE_C_FLAGS_DEBUG_INIT "-g -gdwarf-3")
+set(CMAKE_C_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG")
+set(CMAKE_C_FLAGS_RELEASE_INIT "-Os -DNDEBUG")
+set(CMAKE_C_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG")
+set(CMAKE_INCLUDE_SYSTEM_FLAG_C "-isystem ")
+
+
+set(CMAKE_ASM_FLAGS_DEBUG_INIT "-g -gdwarf-3")
+set(CMAKE_ASM_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG")
+set(CMAKE_ASM_FLAGS_RELEASE_INIT "-Os -DNDEBUG")
+set(CMAKE_ASM_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG")
+set(CMAKE_INCLUDE_SYSTEM_FLAG_ASM "-isystem ")
+
+set(CMAKE_CXX_CREATE_STATIC_LIBRARY "<CMAKE_AR> -cr <LINK_FLAGS> <TARGET> <OBJECTS>")
+set(CMAKE_CXX_LINK_EXECUTABLE "<CMAKE_CXX_COMPILER> <CMAKE_CXX_LINK_FLAGS> <LINK_FLAGS> -nostdlib -Wl,-Map,<TARGET>.map -Wl,--start-group <OBJECTS> <LINK_LIBRARIES> -lupgrade -lssl -lmesh -lwpa2 -lsmartconfig -lespnow -lpp -lmain -lwpa -llwip -lnet80211 -lwps -lcrypto -ldriver -lat -lphy -lhal -lgcc -lm -lc -lstdc++ -o <TARGET>")
+#set(CMAKE_CXX_LINK_EXECUTABLE "<CMAKE_CXX_COMPILER> <CMAKE_CXX_LINK_FLAGS> <LINK_FLAGS> -nostdlib -Wl,-Map,<TARGET>.map -Wl,--start-group <OBJECTS> <LINK_LIBRARIES> -lpwm -lupgrade -lssl -lgcc -lhal -lphy -lpp -lnet80211 -lwpa -lmain -llwip -lcrypto -lm -lc -o <TARGET>")
+
+set(CMAKE_CXX_FLAGS_DEBUG_INIT "-g -gdwarf-3")
+set(CMAKE_CXX_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG")
+set(CMAKE_CXX_FLAGS_RELEASE_INIT "-Os -DNDEBUG")
+set(CMAKE_CXX_FLAGS_RELWITHDEBINFO_INIT "-Os -g -gdwarf-3 -DNDEBUG")
+set(CMAKE_INCLUDE_SYSTEM_FLAG_CXX "-isystem ")
diff --git a/utils/cmake/toolchains/XTENSA_GCC/hex-generator.cmake b/utils/cmake/toolchains/XTENSA_GCC/hex-generator.cmake
new file mode 100644
index 0000000..4948935
--- /dev/null
+++ b/utils/cmake/toolchains/XTENSA_GCC/hex-generator.cmake
@@ -0,0 +1,9 @@
+add_custom_command(
+ OUTPUT "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex"
+ COMMAND "${ARM_NONE_EABI_OBJCOPY}" -O ihex "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${device.device}" "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex"
+ DEPENDS ${device.device}
+ COMMENT "converting to hex file."
+)
+
+#specify a dependency on the elf file so that hex is automatically rebuilt when elf is changed.
+add_custom_target(${device.device}_hex ALL DEPENDS "${PROJECT_SOURCE_DIR}/${codal.output_folder}/${device.device}.hex")
diff --git a/utils/cmake/toolchains/XTENSA_GCC/platform_includes.h b/utils/cmake/toolchains/XTENSA_GCC/platform_includes.h
new file mode 100644
index 0000000..3417ef3
--- /dev/null
+++ b/utils/cmake/toolchains/XTENSA_GCC/platform_includes.h
@@ -0,0 +1,10 @@
+#ifndef PLATFORM_INCLUDES
+#define PLATFORM_INCLUDES
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdarg.h>
+#include <math.h>
+
+#endif
diff --git a/utils/cmake/toolchains/XTENSA_GCC/toolchain.cmake b/utils/cmake/toolchains/XTENSA_GCC/toolchain.cmake
new file mode 100644
index 0000000..6789edd
--- /dev/null
+++ b/utils/cmake/toolchains/XTENSA_GCC/toolchain.cmake
@@ -0,0 +1,26 @@
+find_program(XTENSA_RANLIB xtensa-lx106-elf-gcc-ranlib)
+find_program(XTENSA_AR xtensa-lx106-elf-gcc-ar)
+find_program(XTENSA_GCC xtensa-lx106-elf-gcc)
+find_program(XTENSA_GPP xtensa-lx106-elf-g++)
+find_program(XTENSA_OBJCOPY xtensa-lx106-elf-objcopy)
+
+set(CMAKE_OSX_SYSROOT "/")
+set(CMAKE_OSX_DEPLOYMENT_TARGET "")
+
+set(CODAL_TOOLCHAIN "XTENSA_GCC")
+
+if(CMAKE_VERSION VERSION_LESS "3.5.0")
+ include(CMakeForceCompiler)
+ cmake_force_c_compiler("${XTENSA_GCC}" GNU)
+ cmake_force_cxx_compiler("${XTENSA_GPP}" GNU)
+else()
+ # from 3.5 the force_compiler macro is deprecated: CMake can detect
+ # arm-none-eabi-gcc as being a GNU compiler automatically
+ set(CMAKE_TRY_COMPILE_TARGET_TYPE "STATIC_LIBRARY")
+ set(CMAKE_C_COMPILER "${XTENSA_GCC}")
+ set(CMAKE_CXX_COMPILER "${XTENSA_GPP}")
+endif()
+
+SET(CMAKE_AR "${XTENSA_AR}" CACHE FILEPATH "Archiver")
+SET(CMAKE_RANLIB "${XTENSA_RANLIB}" CACHE FILEPATH "rlib")
+set(CMAKE_CXX_OUTPUT_EXTENSION ".o")
diff --git a/utils/cmake/util.cmake b/utils/cmake/util.cmake
new file mode 100644
index 0000000..6e3a815
--- /dev/null
+++ b/utils/cmake/util.cmake
@@ -0,0 +1,156 @@
+MACRO(RECURSIVE_FIND_DIR return_list dir pattern)
+ FILE(GLOB_RECURSE new_list "${dir}/${pattern}")
+ SET(dir_list "")
+ FOREACH(file_path ${new_list})
+ GET_FILENAME_COMPONENT(dir_path ${file_path} PATH)
+ SET(dir_list ${dir_list} ${dir_path})
+ ENDFOREACH()
+ LIST(REMOVE_DUPLICATES dir_list)
+ SET(${return_list} ${dir_list})
+ENDMACRO()
+
+MACRO(RECURSIVE_FIND_FILE return_list dir pattern)
+ FILE(GLOB_RECURSE new_list "${dir}/${pattern}")
+ SET(dir_list "")
+ FOREACH(file_path ${new_list})
+ SET(dir_list ${dir_list} ${file_path})
+ ENDFOREACH()
+ LIST(REMOVE_DUPLICATES dir_list)
+ SET(${return_list} ${dir_list})
+ENDMACRO()
+
+MACRO(SOURCE_FILES return_list dir pattern)
+ FILE(GLOB new_list "${dir}/${pattern}")
+ SET(dir_list "")
+ FOREACH(file_path ${new_list})
+ LIST(APPEND dir_list ${file_path})
+ ENDFOREACH()
+ LIST(REMOVE_DUPLICATES dir_list)
+ SET(${return_list} ${dir_list})
+ENDMACRO()
+
+function(EXTRACT_JSON_ARRAY json_file json_field_path fields values)
+
+ set(VALUES "")
+ set(FIELDS "")
+
+ foreach(var ${${json_file}})
+ # extract any cmd line definitions specified in the json object, and add them
+ # if it is not prefixed by json_field_path, do not consider the key.
+ if("${var}" MATCHES "${json_field_path}")
+ string(REGEX MATCH "[^${json_field_path}]([A-Z,a-z,0-9,_,]+)" VALUE "${var}")
+
+ # never quote the value - gives more flexibility
+ list(APPEND FIELDS ${VALUE})
+ list(APPEND VALUES "${${var}}")
+ endif()
+ endforeach()
+
+ set(${fields} ${FIELDS} PARENT_SCOPE)
+ set(${values} ${VALUES} PARENT_SCOPE)
+endfunction()
+
+function(FORM_DEFINITIONS fields values definitions)
+
+ set(DEFINITIONS "")
+ list(LENGTH ${fields} LEN)
+
+ # - 1 for for loop index...
+ MATH(EXPR LEN "${LEN}-1")
+
+ foreach(i RANGE ${LEN})
+ list(GET ${fields} ${i} DEFINITION)
+ list(GET ${values} ${i} VALUE)
+
+ set(DEFINITIONS "${DEFINITIONS} #define ${DEFINITION}\t ${VALUE}\n")
+ endforeach()
+
+ set(${definitions} ${DEFINITIONS} PARENT_SCOPE)
+endfunction()
+
+function(UNIQUE_JSON_KEYS priority_fields priority_values secondary_fields secondary_values merged_fields merged_values)
+
+ # always keep the first fields and values
+ set(MERGED_FIELDS ${${priority_fields}})
+ set(MERGED_VALUES ${${priority_values}})
+
+ # measure the second set...
+ list(LENGTH ${secondary_fields} LEN)
+ # - 1 for for loop index...
+ MATH(EXPR LEN "${LEN}-1")
+
+ # iterate, dropping any duplicate fields regardless of the value
+ foreach(i RANGE ${LEN})
+ list(GET ${secondary_fields} ${i} FIELD)
+ list(GET ${secondary_values} ${i} VALUE)
+
+ list(FIND MERGED_FIELDS ${FIELD} INDEX)
+
+ if (${INDEX} GREATER -1)
+ continue()
+ endif()
+
+ list(APPEND MERGED_FIELDS ${FIELD})
+ list(APPEND MERGED_VALUES ${VALUE})
+ endforeach()
+
+ set(${merged_fields} ${MERGED_FIELDS} PARENT_SCOPE)
+ set(${merged_values} ${MERGED_VALUES} PARENT_SCOPE)
+endfunction()
+
+MACRO(HEADER_FILES return_list dir)
+ FILE(GLOB new_list "${dir}/*.h")
+ SET(${return_list} ${new_list})
+ENDMACRO()
+
+function(INSTALL_DEPENDENCY dir name url branch type)
+ if(NOT EXISTS "${CMAKE_CURRENT_LIST_DIR}/${dir}")
+ message("Creating libraries folder")
+ FILE(MAKE_DIRECTORY "${CMAKE_CURRENT_LIST_DIR}/${dir}")
+ endif()
+
+ if(EXISTS "${CMAKE_CURRENT_LIST_DIR}/${dir}/${name}")
+ message("${name} is already installed")
+ return()
+ endif()
+
+ if(${type} STREQUAL "git")
+ message("Cloning into: ${url}")
+ # git clone -b doesn't work with SHAs
+ execute_process(
+ COMMAND git clone --recurse-submodules ${url} ${name}
+ WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/${dir}
+ )
+
+ if(NOT "${branch}" STREQUAL "")
+ message("Checking out branch: ${branch}")
+ execute_process(
+ COMMAND git -c advice.detachedHead=false checkout ${branch}
+ WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/${dir}/${name}
+ )
+ execute_process(
+ COMMAND git submodule update --init
+ WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/${dir}/${name}
+ )
+ execute_process(
+ COMMAND git submodule sync
+ WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/${dir}/${name}
+ )
+ execute_process(
+ COMMAND git submodule update
+ WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/${dir}/${name}
+ )
+ endif()
+ else()
+ message("No mechanism exists to install this library.")
+ endif()
+endfunction()
+
+MACRO(SUB_DIRS return_dirs dir)
+ FILE(GLOB list "${PROJECT_SOURCE_DIR}/${dir}/*")
+ SET(dir_list "")
+ FOREACH(file_path ${list})
+ SET(dir_list ${dir_list} ${file_path})
+ ENDFOREACH()
+ set(${return_dirs} ${dir_list})
+ENDMACRO()
diff --git a/utils/debug/dmesg.js b/utils/debug/dmesg.js
new file mode 100755
index 0000000..e62b5e3
--- /dev/null
+++ b/utils/debug/dmesg.js
@@ -0,0 +1,86 @@
+#!/usr/bin/env node
+"use strict";
+
+let fs = require("fs")
+let child_process = require("child_process")
+
+function fatal(msg) {
+ console.log("Fatal error:", msg)
+ process.exit(1)
+}
+
+function main() {
+ let mapFileName = process.argv[2]
+ if (!mapFileName) {
+ console.log("usage: node " + process.argv[1] + " build/mytarget/source/myprog.map")
+ return
+ }
+ console.log("Map file: " + mapFileName)
+ let mapFile = fs.readFileSync(mapFileName, "utf8")
+ let addr = 0
+ let logSize = 1024 * 4 + 4
+ for (let ln of mapFile.split(/\r?\n/)) {
+ let m = /^\s*0x00000([0-9a-f]+)\s+(\S+)/.exec(ln)
+ if (m && m[2] == "codalLogStore") {
+ addr = parseInt(m[1], 16)
+ break
+ }
+ }
+ if (!addr) fatal("Cannot find codalLogStore symbol in map file")
+
+ let dirs = [
+ process.env["HOME"] + "/Library/Arduino15",
+ process.env["USERPROFILE"] + "/AppData/Local/Arduino15",
+ process.env["HOME"] + "/.arduino15",
+ ]
+
+ let pkgDir = ""
+
+ for (let d of dirs) {
+ pkgDir = d + "/packages/arduino/"
+ if (fs.existsSync(pkgDir)) break
+ pkgDir = ""
+ }
+
+ if (!pkgDir) fatal("cannot find Arduino packages directory")
+
+ let openocdPath = pkgDir + "tools/openocd/0.9.0-arduino/"
+ if (!fs.existsSync(openocdPath)) fatal("openocd not installed in Arduino")
+
+ let openocdBin = openocdPath + "bin/openocd"
+
+ if (process.platform == "win32")
+ openocdBin += ".exe"
+
+ let zeroCfg = pkgDir + "hardware/samd/1.6.8/variants/arduino_zero/openocd_scripts/arduino_zero.cfg"
+ let cmd = `init; set M(0) 0; mem2array M 8 ${addr} ${logSize}; parray M; exit`
+
+ console.log("Starting openocd")
+ child_process.execFile(openocdBin, ["-d2",
+ "-s", openocdPath + "/share/openocd/scripts/",
+ "-f", zeroCfg,
+ "-c", cmd], {
+ maxBuffer: 1 * 1024 * 1024,
+ }, (err, stdout, stderr) => {
+ if (err) {
+ fatal("error: " + err.message)
+ }
+ let buf = new Buffer(logSize)
+ for (let l of stdout.split(/\r?\n/)) {
+ let m = /^M\((\d+)\)\s*=\s*(\d+)/.exec(l)
+ if (m) {
+ buf[parseInt(m[1])] = parseInt(m[2])
+ }
+ }
+ let len = buf.readUInt32LE(0)
+ if (len == 0 || len > buf.length) {
+ console.log(stderr)
+ console.log("No logs.")
+ } else {
+ console.log("*\n* Logs\n*\n")
+ console.log(buf.slice(4, 4 + len).toString("binary"))
+ }
+ })
+}
+
+main() \ No newline at end of file
diff --git a/utils/debug/meminfo.js b/utils/debug/meminfo.js
new file mode 100755
index 0000000..e25b043
--- /dev/null
+++ b/utils/debug/meminfo.js
@@ -0,0 +1,65 @@
+#!/usr/bin/env node
+"use strict";
+
+function main() {
+ let fs = require("fs");
+ let mfn = process.argv[2]
+ if (!mfn) {
+ console.log("usage: node " + process.argv[1] + " build/mytarget/source/myprog.map")
+ return
+ }
+ console.log("Map file: " + mfn)
+ let map = fs.readFileSync(mfn, "utf8")
+ let inSect = 0
+ let byFileRAM = {}
+ let byFileROM = {}
+ for (let ln of map.split(/\r?\n/)) {
+ if (ln == "Linker script and memory map") {
+ inSect = 1
+ }
+ if (/^OUTPUT\(/.test(ln)) {
+ inSect = 2
+ }
+ if (inSect == 1) {
+ let m = /^\s*(\S*)\s+0x00000([0-9a-f]+)\s+0x([0-9a-f]+)\s+(\S+)/.exec(ln)
+ if (m) {
+ let mark = m[1]
+ if (mark == "*fill*" || mark == ".bss" || mark == ".relocate")
+ continue;
+ let addr = parseInt(m[2], 16)
+ let sz = parseInt(m[3], 16)
+ let fn = m[4]
+ if (fn == "load" && mark) fn = mark;
+ fn = fn.replace(/.*armv6-m/, "")
+ if (sz) {
+ let mm = addr < 0x10000000 ? byFileROM : byFileRAM
+ mm[fn] = (mm[fn] || 0) + sz
+ }
+ }
+ }
+ }
+
+ console.log("*\n* ROM\n*")
+ dumpMap(byFileROM)
+ console.log("*\n* RAM\n*")
+ dumpMap(byFileRAM)
+}
+
+function printEnt(sz, s) {
+ let ff = (" " + sz).slice(-7)
+ console.log(ff + " " + s)
+}
+
+function dumpMap(m) {
+ let k = Object.keys(m)
+ k.sort((a, b) => m[a] - m[b])
+ let sum = 0
+ for (let s of k) {
+ printEnt(m[s], s)
+ sum += m[s]
+ }
+ printEnt(sum, "TOTAL")
+}
+
+
+main() \ No newline at end of file
diff --git a/utils/esptool.py b/utils/esptool.py
new file mode 100755
index 0000000..63eae28
--- /dev/null
+++ b/utils/esptool.py
@@ -0,0 +1,1274 @@
+#!/usr/bin/env python
+# NB: Before sending a PR to change the above line to '#!/usr/bin/env python2', please read https://github.com/themadinventor/esptool/issues/21
+#
+# ESP8266 ROM Bootloader Utility
+# https://github.com/themadinventor/esptool
+#
+# Copyright (C) 2014-2016 Fredrik Ahlberg, Angus Gratton, other contributors as noted.
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc., 51 Franklin
+# Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import argparse
+import hashlib
+import inspect
+import json
+import os
+import serial
+import struct
+import subprocess
+import sys
+import tempfile
+import time
+
+
+__version__ = "1.2"
+
+
+class ESPROM(object):
+ # These are the currently known commands supported by the ROM
+ ESP_FLASH_BEGIN = 0x02
+ ESP_FLASH_DATA = 0x03
+ ESP_FLASH_END = 0x04
+ ESP_MEM_BEGIN = 0x05
+ ESP_MEM_END = 0x06
+ ESP_MEM_DATA = 0x07
+ ESP_SYNC = 0x08
+ ESP_WRITE_REG = 0x09
+ ESP_READ_REG = 0x0a
+
+ # Maximum block sized for RAM and Flash writes, respectively.
+ ESP_RAM_BLOCK = 0x1800
+ ESP_FLASH_BLOCK = 0x400
+
+ # Default baudrate. The ROM auto-bauds, so we can use more or less whatever we want.
+ ESP_ROM_BAUD = 115200
+
+ # First byte of the application image
+ ESP_IMAGE_MAGIC = 0xe9
+
+ # Initial state for the checksum routine
+ ESP_CHECKSUM_MAGIC = 0xef
+
+ # OTP ROM addresses
+ ESP_OTP_MAC0 = 0x3ff00050
+ ESP_OTP_MAC1 = 0x3ff00054
+ ESP_OTP_MAC3 = 0x3ff0005c
+
+ # Flash sector size, minimum unit of erase.
+ ESP_FLASH_SECTOR = 0x1000
+
+ def __init__(self, port=0, baud=ESP_ROM_BAUD):
+ self._port = serial.serial_for_url(port)
+ self._slip_reader = slip_reader(self._port)
+ # setting baud rate in a separate step is a workaround for
+ # CH341 driver on some Linux versions (this opens at 9600 then
+ # sets), shouldn't matter for other platforms/drivers. See
+ # https://github.com/themadinventor/esptool/issues/44#issuecomment-107094446
+ self._port.baudrate = baud
+
+ """ Read a SLIP packet from the serial port """
+ def read(self):
+ return self._slip_reader.next()
+
+ """ Write bytes to the serial port while performing SLIP escaping """
+ def write(self, packet):
+ buf = '\xc0' \
+ + (packet.replace('\xdb','\xdb\xdd').replace('\xc0','\xdb\xdc')) \
+ + '\xc0'
+ self._port.write(buf)
+
+ """ Calculate checksum of a blob, as it is defined by the ROM """
+ @staticmethod
+ def checksum(data, state=ESP_CHECKSUM_MAGIC):
+ for b in data:
+ state ^= ord(b)
+ return state
+
+ """ Send a request and read the response """
+ def command(self, op=None, data=None, chk=0):
+ if op is not None:
+ pkt = struct.pack('<BBHI', 0x00, op, len(data), chk) + data
+ self.write(pkt)
+
+ # tries to get a response until that response has the
+ # same operation as the request or a retries limit has
+ # exceeded. This is needed for some esp8266s that
+ # reply with more sync responses than expected.
+ for retry in xrange(100):
+ p = self.read()
+ if len(p) < 8:
+ continue
+ (resp, op_ret, len_ret, val) = struct.unpack('<BBHI', p[:8])
+ if resp != 1:
+ continue
+ body = p[8:]
+ if op is None or op_ret == op:
+ return val, body # valid response received
+
+ raise FatalError("Response doesn't match request")
+
+ """ Perform a connection test """
+ def sync(self):
+ self.command(ESPROM.ESP_SYNC, '\x07\x07\x12\x20' + 32 * '\x55')
+ for i in xrange(7):
+ self.command()
+
+ """ Try connecting repeatedly until successful, or giving up """
+ def connect(self):
+ print 'Connecting...'
+
+ for _ in xrange(4):
+ # issue reset-to-bootloader:
+ # RTS = either CH_PD or nRESET (both active low = chip in reset)
+ # DTR = GPIO0 (active low = boot to flasher)
+ self._port.setDTR(False)
+ self._port.setRTS(True)
+ time.sleep(0.05)
+ self._port.setDTR(True)
+ self._port.setRTS(False)
+ time.sleep(0.05)
+ self._port.setDTR(False)
+
+ # worst-case latency timer should be 255ms (probably <20ms)
+ self._port.timeout = 0.3
+ for _ in xrange(4):
+ try:
+ self._port.flushInput()
+ self._slip_reader = slip_reader(self._port)
+ self._port.flushOutput()
+ self.sync()
+ self._port.timeout = 5
+ return
+ except:
+ time.sleep(0.05)
+ raise FatalError('Failed to connect to ESP8266')
+
+ """ Read memory address in target """
+ def read_reg(self, addr):
+ res = self.command(ESPROM.ESP_READ_REG, struct.pack('<I', addr))
+ if res[1] != "\0\0":
+ raise FatalError('Failed to read target memory')
+ return res[0]
+
+ """ Write to memory address in target """
+ def write_reg(self, addr, value, mask, delay_us=0):
+ if self.command(ESPROM.ESP_WRITE_REG,
+ struct.pack('<IIII', addr, value, mask, delay_us))[1] != "\0\0":
+ raise FatalError('Failed to write target memory')
+
+ """ Start downloading an application image to RAM """
+ def mem_begin(self, size, blocks, blocksize, offset):
+ if self.command(ESPROM.ESP_MEM_BEGIN,
+ struct.pack('<IIII', size, blocks, blocksize, offset))[1] != "\0\0":
+ raise FatalError('Failed to enter RAM download mode')
+
+ """ Send a block of an image to RAM """
+ def mem_block(self, data, seq):
+ if self.command(ESPROM.ESP_MEM_DATA,
+ struct.pack('<IIII', len(data), seq, 0, 0) + data,
+ ESPROM.checksum(data))[1] != "\0\0":
+ raise FatalError('Failed to write to target RAM')
+
+ """ Leave download mode and run the application """
+ def mem_finish(self, entrypoint=0):
+ if self.command(ESPROM.ESP_MEM_END,
+ struct.pack('<II', int(entrypoint == 0), entrypoint))[1] != "\0\0":
+ raise FatalError('Failed to leave RAM download mode')
+
+ """ Start downloading to Flash (performs an erase) """
+ def flash_begin(self, size, offset):
+ old_tmo = self._port.timeout
+ num_blocks = (size + ESPROM.ESP_FLASH_BLOCK - 1) / ESPROM.ESP_FLASH_BLOCK
+
+ sectors_per_block = 16
+ sector_size = self.ESP_FLASH_SECTOR
+ num_sectors = (size + sector_size - 1) / sector_size
+ start_sector = offset / sector_size
+
+ head_sectors = sectors_per_block - (start_sector % sectors_per_block)
+ if num_sectors < head_sectors:
+ head_sectors = num_sectors
+
+ if num_sectors < 2 * head_sectors:
+ erase_size = (num_sectors + 1) / 2 * sector_size
+ else:
+ erase_size = (num_sectors - head_sectors) * sector_size
+
+ self._port.timeout = 20
+ t = time.time()
+ result = self.command(ESPROM.ESP_FLASH_BEGIN,
+ struct.pack('<IIII', erase_size, num_blocks, ESPROM.ESP_FLASH_BLOCK, offset))[1]
+ if size != 0:
+ print "Took %.2fs to erase flash block" % (time.time() - t)
+ if result != "\0\0":
+ raise FatalError.WithResult('Failed to enter Flash download mode (result "%s")', result)
+ self._port.timeout = old_tmo
+
+ """ Write block to flash """
+ def flash_block(self, data, seq):
+ result = self.command(ESPROM.ESP_FLASH_DATA,
+ struct.pack('<IIII', len(data), seq, 0, 0) + data,
+ ESPROM.checksum(data))[1]
+ if result != "\0\0":
+ raise FatalError.WithResult('Failed to write to target Flash after seq %d (got result %%s)' % seq, result)
+
+ """ Leave flash mode and run/reboot """
+ def flash_finish(self, reboot=False):
+ pkt = struct.pack('<I', int(not reboot))
+ if self.command(ESPROM.ESP_FLASH_END, pkt)[1] != "\0\0":
+ raise FatalError('Failed to leave Flash mode')
+
+ """ Run application code in flash """
+ def run(self, reboot=False):
+ # Fake flash begin immediately followed by flash end
+ self.flash_begin(0, 0)
+ self.flash_finish(reboot)
+
+ """ Read MAC from OTP ROM """
+ def read_mac(self):
+ mac0 = self.read_reg(self.ESP_OTP_MAC0)
+ mac1 = self.read_reg(self.ESP_OTP_MAC1)
+ mac3 = self.read_reg(self.ESP_OTP_MAC3)
+ if (mac3 != 0):
+ oui = ((mac3 >> 16) & 0xff, (mac3 >> 8) & 0xff, mac3 & 0xff)
+ elif ((mac1 >> 16) & 0xff) == 0:
+ oui = (0x18, 0xfe, 0x34)
+ elif ((mac1 >> 16) & 0xff) == 1:
+ oui = (0xac, 0xd0, 0x74)
+ else:
+ raise FatalError("Unknown OUI")
+ return oui + ((mac1 >> 8) & 0xff, mac1 & 0xff, (mac0 >> 24) & 0xff)
+
+ """ Read Chip ID from OTP ROM - see http://esp8266-re.foogod.com/wiki/System_get_chip_id_%28IoT_RTOS_SDK_0.9.9%29 """
+ def chip_id(self):
+ id0 = self.read_reg(self.ESP_OTP_MAC0)
+ id1 = self.read_reg(self.ESP_OTP_MAC1)
+ return (id0 >> 24) | ((id1 & 0xffffff) << 8)
+
+ """ Read SPI flash manufacturer and device id """
+ def flash_id(self):
+ self.flash_begin(0, 0)
+ self.write_reg(0x60000240, 0x0, 0xffffffff)
+ self.write_reg(0x60000200, 0x10000000, 0xffffffff)
+ flash_id = self.read_reg(0x60000240)
+ return flash_id
+
+ """ Abuse the loader protocol to force flash to be left in write mode """
+ def flash_unlock_dio(self):
+ # Enable flash write mode
+ self.flash_begin(0, 0)
+ # Reset the chip rather than call flash_finish(), which would have
+ # write protected the chip again (why oh why does it do that?!)
+ self.mem_begin(0,0,0,0x40100000)
+ self.mem_finish(0x40000080)
+
+ """ Perform a chip erase of SPI flash """
+ def flash_erase(self):
+ # Trick ROM to initialize SFlash
+ self.flash_begin(0, 0)
+
+ # This is hacky: we don't have a custom stub, instead we trick
+ # the bootloader to jump to the SPIEraseChip() routine and then halt/crash
+ # when it tries to boot an unconfigured system.
+ self.mem_begin(0,0,0,0x40100000)
+ self.mem_finish(0x40004984)
+
+ # Yup - there's no good way to detect if we succeeded.
+ # It it on the other hand unlikely to fail.
+
+ def run_stub(self, stub, params, read_output=True):
+ stub = dict(stub)
+ stub['code'] = unhexify(stub['code'])
+ if 'data' in stub:
+ stub['data'] = unhexify(stub['data'])
+
+ if stub['num_params'] != len(params):
+ raise FatalError('Stub requires %d params, %d provided'
+ % (stub['num_params'], len(params)))
+
+ params = struct.pack('<' + ('I' * stub['num_params']), *params)
+ pc = params + stub['code']
+
+ # Upload
+ self.mem_begin(len(pc), 1, len(pc), stub['params_start'])
+ self.mem_block(pc, 0)
+ if 'data' in stub:
+ self.mem_begin(len(stub['data']), 1, len(stub['data']), stub['data_start'])
+ self.mem_block(stub['data'], 0)
+ self.mem_finish(stub['entry'])
+
+ if read_output:
+ print 'Stub executed, reading response:'
+ while True:
+ p = self.read()
+ print hexify(p)
+ if p == '':
+ return
+
+
+class ESPBOOTLOADER(object):
+ """ These are constants related to software ESP bootloader, working with 'v2' image files """
+
+ # First byte of the "v2" application image
+ IMAGE_V2_MAGIC = 0xea
+
+ # First 'segment' value in a "v2" application image, appears to be a constant version value?
+ IMAGE_V2_SEGMENT = 4
+
+
+def LoadFirmwareImage(filename):
+ """ Load a firmware image, without knowing what kind of file (v1 or v2) it is.
+
+ Returns a BaseFirmwareImage subclass, either ESPFirmwareImage (v1) or OTAFirmwareImage (v2).
+ """
+ with open(filename, 'rb') as f:
+ magic = ord(f.read(1))
+ f.seek(0)
+ if magic == ESPROM.ESP_IMAGE_MAGIC:
+ return ESPFirmwareImage(f)
+ elif magic == ESPBOOTLOADER.IMAGE_V2_MAGIC:
+ return OTAFirmwareImage(f)
+ else:
+ raise FatalError("Invalid image magic number: %d" % magic)
+
+
+class BaseFirmwareImage(object):
+ """ Base class with common firmware image functions """
+ def __init__(self):
+ self.segments = []
+ self.entrypoint = 0
+
+ def add_segment(self, addr, data, pad_to=4):
+ """ Add a segment to the image, with specified address & data
+ (padded to a boundary of pad_to size) """
+ # Data should be aligned on word boundary
+ l = len(data)
+ if l % pad_to:
+ data += b"\x00" * (pad_to - l % pad_to)
+ if l > 0:
+ self.segments.append((addr, len(data), data))
+
+ def load_segment(self, f, is_irom_segment=False):
+ """ Load the next segment from the image file """
+ (offset, size) = struct.unpack('<II', f.read(8))
+ if not is_irom_segment:
+ if offset > 0x40200000 or offset < 0x3ffe0000 or size > 65536:
+ raise FatalError('Suspicious segment 0x%x, length %d' % (offset, size))
+ segment_data = f.read(size)
+ if len(segment_data) < size:
+ raise FatalError('End of file reading segment 0x%x, length %d (actual length %d)' % (offset, size, len(segment_data)))
+ segment = (offset, size, segment_data)
+ self.segments.append(segment)
+ return segment
+
+ def save_segment(self, f, segment, checksum=None):
+ """ Save the next segment to the image file, return next checksum value if provided """
+ (offset, size, data) = segment
+ f.write(struct.pack('<II', offset, size))
+ f.write(data)
+ if checksum is not None:
+ return ESPROM.checksum(data, checksum)
+
+ def read_checksum(self, f):
+ """ Return ESPROM checksum from end of just-read image """
+ # Skip the padding. The checksum is stored in the last byte so that the
+ # file is a multiple of 16 bytes.
+ align_file_position(f, 16)
+ return ord(f.read(1))
+
+ def append_checksum(self, f, checksum):
+ """ Append ESPROM checksum to the just-written image """
+ align_file_position(f, 16)
+ f.write(struct.pack('B', checksum))
+
+ def write_v1_header(self, f, segments):
+ f.write(struct.pack('<BBBBI', ESPROM.ESP_IMAGE_MAGIC, len(segments),
+ self.flash_mode, self.flash_size_freq, self.entrypoint))
+
+
+class ESPFirmwareImage(BaseFirmwareImage):
+ """ 'Version 1' firmware image, segments loaded directly by the ROM bootloader. """
+ def __init__(self, load_file=None):
+ super(ESPFirmwareImage, self).__init__()
+ self.flash_mode = 0
+ self.flash_size_freq = 0
+ self.version = 1
+
+ if load_file is not None:
+ (magic, segments, self.flash_mode, self.flash_size_freq, self.entrypoint) = struct.unpack('<BBBBI', load_file.read(8))
+
+ # some sanity check
+ if magic != ESPROM.ESP_IMAGE_MAGIC or segments > 16:
+ raise FatalError('Invalid firmware image magic=%d segments=%d' % (magic, segments))
+
+ for i in xrange(segments):
+ self.load_segment(load_file)
+ self.checksum = self.read_checksum(load_file)
+
+ def save(self, filename):
+ with open(filename, 'wb') as f:
+ self.write_v1_header(f, self.segments)
+ checksum = ESPROM.ESP_CHECKSUM_MAGIC
+ for segment in self.segments:
+ checksum = self.save_segment(f, segment, checksum)
+ self.append_checksum(f, checksum)
+
+
+class OTAFirmwareImage(BaseFirmwareImage):
+ """ 'Version 2' firmware image, segments loaded by software bootloader stub
+ (ie Espressif bootloader or rboot)
+ """
+ def __init__(self, load_file=None):
+ super(OTAFirmwareImage, self).__init__()
+ self.version = 2
+ if load_file is not None:
+ (magic, segments, first_flash_mode, first_flash_size_freq, first_entrypoint) = struct.unpack('<BBBBI', load_file.read(8))
+
+ # some sanity check
+ if magic != ESPBOOTLOADER.IMAGE_V2_MAGIC:
+ raise FatalError('Invalid V2 image magic=%d' % (magic))
+ if segments != 4:
+ # segment count is not really segment count here, but we expect to see '4'
+ print 'Warning: V2 header has unexpected "segment" count %d (usually 4)' % segments
+
+ # irom segment comes before the second header
+ self.load_segment(load_file, True)
+
+ (magic, segments, self.flash_mode, self.flash_size_freq, self.entrypoint) = struct.unpack('<BBBBI', load_file.read(8))
+
+ if first_flash_mode != self.flash_mode:
+ print('WARNING: Flash mode value in first header (0x%02x) disagrees with second (0x%02x). Using second value.'
+ % (first_flash_mode, self.flash_mode))
+ if first_flash_size_freq != self.flash_size_freq:
+ print('WARNING: Flash size/freq value in first header (0x%02x) disagrees with second (0x%02x). Using second value.'
+ % (first_flash_size_freq, self.flash_size_freq))
+ if first_entrypoint != self.entrypoint:
+ print('WARNING: Enterypoint address in first header (0x%08x) disagrees with second header (0x%08x). Using second value.'
+ % (first_entrypoint, self.entrypoint))
+
+ if magic != ESPROM.ESP_IMAGE_MAGIC or segments > 16:
+ raise FatalError('Invalid V2 second header magic=%d segments=%d' % (magic, segments))
+
+ # load all the usual segments
+ for _ in xrange(segments):
+ self.load_segment(load_file)
+ self.checksum = self.read_checksum(load_file)
+
+ def save(self, filename):
+ with open(filename, 'wb') as f:
+ # Save first header for irom0 segment
+ f.write(struct.pack('<BBBBI', ESPBOOTLOADER.IMAGE_V2_MAGIC, ESPBOOTLOADER.IMAGE_V2_SEGMENT,
+ self.flash_mode, self.flash_size_freq, self.entrypoint))
+
+ # irom0 segment identified by load address zero
+ irom_segments = [segment for segment in self.segments if segment[0] == 0]
+ if len(irom_segments) != 1:
+ raise FatalError('Found %d segments that could be irom0. Bad ELF file?' % len(irom_segments))
+ # save irom0 segment
+ irom_segment = irom_segments[0]
+ self.save_segment(f, irom_segment)
+
+ # second header, matches V1 header and contains loadable segments
+ normal_segments = [s for s in self.segments if s != irom_segment]
+ self.write_v1_header(f, normal_segments)
+ checksum = ESPROM.ESP_CHECKSUM_MAGIC
+ for segment in normal_segments:
+ checksum = self.save_segment(f, segment, checksum)
+ self.append_checksum(f, checksum)
+
+
+class ELFFile(object):
+ def __init__(self, name):
+ self.name = binutils_safe_path(name)
+ self.symbols = None
+
+ def _fetch_symbols(self):
+ if self.symbols is not None:
+ return
+ self.symbols = {}
+ try:
+ tool_nm = "xtensa-lx106-elf-nm"
+ if os.getenv('XTENSA_CORE') == 'lx106':
+ tool_nm = "xt-nm"
+ proc = subprocess.Popen([tool_nm, self.name], stdout=subprocess.PIPE)
+ except OSError:
+ print "Error calling %s, do you have Xtensa toolchain in PATH?" % tool_nm
+ sys.exit(1)
+ for l in proc.stdout:
+ fields = l.strip().split()
+ try:
+ if fields[0] == "U":
+ print "Warning: ELF binary has undefined symbol %s" % fields[1]
+ continue
+ if fields[0] == "w":
+ continue # can skip weak symbols
+ self.symbols[fields[2]] = int(fields[0], 16)
+ except ValueError:
+ raise FatalError("Failed to strip symbol output from nm: %s" % fields)
+
+ def get_symbol_addr(self, sym):
+ self._fetch_symbols()
+ return self.symbols[sym]
+
+ def get_entry_point(self):
+ tool_readelf = "xtensa-lx106-elf-readelf"
+ if os.getenv('XTENSA_CORE') == 'lx106':
+ tool_readelf = "xt-readelf"
+ try:
+ proc = subprocess.Popen([tool_readelf, "-h", self.name], stdout=subprocess.PIPE)
+ except OSError:
+ print "Error calling %s, do you have Xtensa toolchain in PATH?" % tool_readelf
+ sys.exit(1)
+ for l in proc.stdout:
+ fields = l.strip().split()
+ if fields[0] == "Entry":
+ return int(fields[3], 0)
+
+ def load_section(self, section):
+ tool_objcopy = "xtensa-lx106-elf-objcopy"
+ if os.getenv('XTENSA_CORE') == 'lx106':
+ tool_objcopy = "xt-objcopy"
+ tmpsection = binutils_safe_path(tempfile.mktemp(suffix=".section"))
+ try:
+ subprocess.check_call([tool_objcopy, "--only-section", section, "-Obinary", self.name, tmpsection])
+ with open(tmpsection, "rb") as f:
+ data = f.read()
+ finally:
+ os.remove(tmpsection)
+ return data
+
+
+class CesantaFlasher(object):
+
+ # From stub_flasher.h
+ CMD_FLASH_WRITE = 1
+ CMD_FLASH_READ = 2
+ CMD_FLASH_DIGEST = 3
+ CMD_FLASH_ERASE_CHIP = 5
+ CMD_BOOT_FW = 6
+
+ def __init__(self, esp, baud_rate=0):
+ print 'Running Cesanta flasher stub...'
+ if baud_rate <= ESPROM.ESP_ROM_BAUD: # don't change baud rates if we already synced at that rate
+ baud_rate = 0
+ self._esp = esp
+ esp.run_stub(json.loads(_CESANTA_FLASHER_STUB), [baud_rate], read_output=False)
+ if baud_rate > 0:
+ esp._port.baudrate = baud_rate
+ # Read the greeting.
+ p = esp.read()
+ if p != 'OHAI':
+ raise FatalError('Failed to connect to the flasher (got %s)' % hexify(p))
+
+ def flash_write(self, addr, data, show_progress=False):
+ assert addr % self._esp.ESP_FLASH_SECTOR == 0, 'Address must be sector-aligned'
+ assert len(data) % self._esp.ESP_FLASH_SECTOR == 0, 'Length must be sector-aligned'
+ sys.stdout.write('Writing %d @ 0x%x... ' % (len(data), addr))
+ sys.stdout.flush()
+ self._esp.write(struct.pack('<B', self.CMD_FLASH_WRITE))
+ self._esp.write(struct.pack('<III', addr, len(data), 1))
+ num_sent, num_written = 0, 0
+ while num_written < len(data):
+ p = self._esp.read()
+ if len(p) == 4:
+ num_written = struct.unpack('<I', p)[0]
+ elif len(p) == 1:
+ status_code = struct.unpack('<B', p)[0]
+ raise FatalError('Write failure, status: %x' % status_code)
+ else:
+ raise FatalError('Unexpected packet while writing: %s' % hexify(p))
+ if show_progress:
+ progress = '%d (%d %%)' % (num_written, num_written * 100.0 / len(data))
+ sys.stdout.write(progress + '\b' * len(progress))
+ sys.stdout.flush()
+ while num_sent - num_written < 5120:
+ self._esp._port.write(data[num_sent:num_sent + 1024])
+ num_sent += 1024
+ p = self._esp.read()
+ if len(p) != 16:
+ raise FatalError('Expected digest, got: %s' % hexify(p))
+ digest = hexify(p).upper()
+ expected_digest = hashlib.md5(data).hexdigest().upper()
+ print
+ if digest != expected_digest:
+ raise FatalError('Digest mismatch: expected %s, got %s' % (expected_digest, digest))
+ p = self._esp.read()
+ if len(p) != 1:
+ raise FatalError('Expected status, got: %s' % hexify(p))
+ status_code = struct.unpack('<B', p)[0]
+ if status_code != 0:
+ raise FatalError('Write failure, status: %x' % status_code)
+
+ def flash_read(self, addr, length, show_progress=False):
+ sys.stdout.write('Reading %d @ 0x%x... ' % (length, addr))
+ sys.stdout.flush()
+ self._esp.write(struct.pack('<B', self.CMD_FLASH_READ))
+ # USB may not be able to keep up with the read rate, especially at
+ # higher speeds. Since we don't have flow control, this will result in
+ # data loss. Hence, we use small packet size and only allow small
+ # number of bytes in flight, which we can reasonably expect to fit in
+ # the on-chip FIFO. max_in_flight = 64 works for CH340G, other chips may
+ # have longer FIFOs and could benefit from increasing max_in_flight.
+ self._esp.write(struct.pack('<IIII', addr, length, 32, 64))
+ data = ''
+ while True:
+ p = self._esp.read()
+ data += p
+ self._esp.write(struct.pack('<I', len(data)))
+ if show_progress and (len(data) % 1024 == 0 or len(data) == length):
+ progress = '%d (%d %%)' % (len(data), len(data) * 100.0 / length)
+ sys.stdout.write(progress + '\b' * len(progress))
+ sys.stdout.flush()
+ if len(data) == length:
+ break
+ if len(data) > length:
+ raise FatalError('Read more than expected')
+ p = self._esp.read()
+ if len(p) != 16:
+ raise FatalError('Expected digest, got: %s' % hexify(p))
+ expected_digest = hexify(p).upper()
+ digest = hashlib.md5(data).hexdigest().upper()
+ print
+ if digest != expected_digest:
+ raise FatalError('Digest mismatch: expected %s, got %s' % (expected_digest, digest))
+ p = self._esp.read()
+ if len(p) != 1:
+ raise FatalError('Expected status, got: %s' % hexify(p))
+ status_code = struct.unpack('<B', p)[0]
+ if status_code != 0:
+ raise FatalError('Write failure, status: %x' % status_code)
+ return data
+
+ def flash_digest(self, addr, length, digest_block_size=0):
+ self._esp.write(struct.pack('<B', self.CMD_FLASH_DIGEST))
+ self._esp.write(struct.pack('<III', addr, length, digest_block_size))
+ digests = []
+ while True:
+ p = self._esp.read()
+ if len(p) == 16:
+ digests.append(p)
+ elif len(p) == 1:
+ status_code = struct.unpack('<B', p)[0]
+ if status_code != 0:
+ raise FatalError('Write failure, status: %x' % status_code)
+ break
+ else:
+ raise FatalError('Unexpected packet: %s' % hexify(p))
+ return digests[-1], digests[:-1]
+
+ def boot_fw(self):
+ self._esp.write(struct.pack('<B', self.CMD_BOOT_FW))
+ p = self._esp.read()
+ if len(p) != 1:
+ raise FatalError('Expected status, got: %s' % hexify(p))
+ status_code = struct.unpack('<B', p)[0]
+ if status_code != 0:
+ raise FatalError('Boot failure, status: %x' % status_code)
+
+ def flash_erase_chip(self):
+ self._esp.write(struct.pack('<B', self.CMD_FLASH_ERASE_CHIP))
+ otimeout = self._esp._port.timeout
+ self._esp._port.timeout = 60
+ p = self._esp.read()
+ self._esp._port.timeout = otimeout
+ if len(p) != 1:
+ raise FatalError('Expected status, got: %s' % hexify(p))
+ status_code = struct.unpack('<B', p)[0]
+ if status_code != 0:
+ raise FatalError('Erase chip failure, status: %x' % status_code)
+
+
+def slip_reader(port):
+ """Generator to read SLIP packets from a serial port.
+ Yields one full SLIP packet at a time, raises exception on timeout or invalid data.
+
+ Designed to avoid too many calls to serial.read(1), which can bog
+ down on slow systems.
+ """
+ partial_packet = None
+ in_escape = False
+ while True:
+ waiting = port.inWaiting()
+ read_bytes = port.read(1 if waiting == 0 else waiting)
+ if read_bytes == '':
+ raise FatalError("Timed out waiting for packet %s" % ("header" if partial_packet is None else "content"))
+
+ for b in read_bytes:
+ if partial_packet is None: # waiting for packet header
+ if b == '\xc0':
+ partial_packet = ""
+ else:
+ raise FatalError('Invalid head of packet (%r)' % b)
+ elif in_escape: # part-way through escape sequence
+ in_escape = False
+ if b == '\xdc':
+ partial_packet += '\xc0'
+ elif b == '\xdd':
+ partial_packet += '\xdb'
+ else:
+ raise FatalError('Invalid SLIP escape (%r%r)' % ('\xdb', b))
+ elif b == '\xdb': # start of escape sequence
+ in_escape = True
+ elif b == '\xc0': # end of packet
+ yield partial_packet
+ partial_packet = None
+ else: # normal byte in packet
+ partial_packet += b
+
+
+def arg_auto_int(x):
+ return int(x, 0)
+
+
+def div_roundup(a, b):
+ """ Return a/b rounded up to nearest integer,
+ equivalent result to int(math.ceil(float(int(a)) / float(int(b))), only
+ without possible floating point accuracy errors.
+ """
+ return (int(a) + int(b) - 1) / int(b)
+
+
+def binutils_safe_path(p):
+ """Returns a 'safe' version of path 'p' to pass to binutils
+
+ Only does anything under Cygwin Python, where cygwin paths need to
+ be translated to Windows paths if the binutils wasn't compiled
+ using Cygwin (should also work with binutils compiled using
+ Cygwin, see #73.)
+ """
+ if sys.platform == "cygwin":
+ try:
+ return subprocess.check_output(["cygpath", "-w", p]).rstrip('\n')
+ except subprocess.CalledProcessError:
+ print "WARNING: Failed to call cygpath to sanitise Cygwin path."
+ return p
+
+
+def align_file_position(f, size):
+ """ Align the position in the file to the next block of specified size """
+ align = (size - 1) - (f.tell() % size)
+ f.seek(align, 1)
+
+
+def hexify(s):
+ return ''.join('%02X' % ord(c) for c in s)
+
+
+def unhexify(hs):
+ s = ''
+ for i in range(0, len(hs) - 1, 2):
+ s += chr(int(hs[i] + hs[i + 1], 16))
+ return s
+
+
+class FatalError(RuntimeError):
+ """
+ Wrapper class for runtime errors that aren't caused by internal bugs, but by
+ ESP8266 responses or input content.
+ """
+ def __init__(self, message):
+ RuntimeError.__init__(self, message)
+
+ @staticmethod
+ def WithResult(message, result):
+ """
+ Return a fatal error object that includes the hex values of
+ 'result' as a string formatted argument.
+ """
+ return FatalError(message % ", ".join(hex(ord(x)) for x in result))
+
+
+# "Operation" commands, executable at command line. One function each
+#
+# Each function takes either two args (<ESPROM instance>, <args>) or a single <args>
+# argument.
+
+def load_ram(esp, args):
+ image = LoadFirmwareImage(args.filename)
+
+ print 'RAM boot...'
+ for (offset, size, data) in image.segments:
+ print 'Downloading %d bytes at %08x...' % (size, offset),
+ sys.stdout.flush()
+ esp.mem_begin(size, div_roundup(size, esp.ESP_RAM_BLOCK), esp.ESP_RAM_BLOCK, offset)
+
+ seq = 0
+ while len(data) > 0:
+ esp.mem_block(data[0:esp.ESP_RAM_BLOCK], seq)
+ data = data[esp.ESP_RAM_BLOCK:]
+ seq += 1
+ print 'done!'
+
+ print 'All segments done, executing at %08x' % image.entrypoint
+ esp.mem_finish(image.entrypoint)
+
+
+def read_mem(esp, args):
+ print '0x%08x = 0x%08x' % (args.address, esp.read_reg(args.address))
+
+
+def write_mem(esp, args):
+ esp.write_reg(args.address, args.value, args.mask, 0)
+ print 'Wrote %08x, mask %08x to %08x' % (args.value, args.mask, args.address)
+
+
+def dump_mem(esp, args):
+ f = file(args.filename, 'wb')
+ for i in xrange(args.size / 4):
+ d = esp.read_reg(args.address + (i * 4))
+ f.write(struct.pack('<I', d))
+ if f.tell() % 1024 == 0:
+ print '\r%d bytes read... (%d %%)' % (f.tell(),
+ f.tell() * 100 / args.size),
+ sys.stdout.flush()
+ print 'Done!'
+
+
+def detect_flash_size(esp, args):
+ if args.flash_size == 'detect':
+ flash_id = esp.flash_id()
+ size_id = flash_id >> 16
+ args.flash_size = {18: '2m', 19: '4m', 20: '8m', 21: '16m', 22: '32m'}.get(size_id)
+ if args.flash_size is None:
+ print 'Warning: Could not auto-detect Flash size (FlashID=0x%x, SizeID=0x%x), defaulting to 4m' % (flash_id, size_id)
+ args.flash_size = '4m'
+ else:
+ print 'Auto-detected Flash size:', args.flash_size
+
+
+def write_flash(esp, args):
+ detect_flash_size(esp, args)
+ flash_mode = {'qio':0, 'qout':1, 'dio':2, 'dout': 3}[args.flash_mode]
+ flash_size_freq = {'4m':0x00, '2m':0x10, '8m':0x20, '16m':0x30, '32m':0x40, '16m-c1': 0x50, '32m-c1':0x60, '32m-c2':0x70}[args.flash_size]
+ flash_size_freq += {'40m':0, '26m':1, '20m':2, '80m': 0xf}[args.flash_freq]
+ flash_params = struct.pack('BB', flash_mode, flash_size_freq)
+
+ flasher = CesantaFlasher(esp, args.baud)
+
+ for address, argfile in args.addr_filename:
+ image = argfile.read()
+ argfile.seek(0) # rewind in case we need it again
+ if address + len(image) > int(args.flash_size.split('m')[0]) * (1 << 17):
+ print 'WARNING: Unlikely to work as data goes beyond end of flash. Hint: Use --flash_size'
+ # Fix sflash config data.
+ if address == 0 and image[0] == '\xe9':
+ print 'Flash params set to 0x%02x%02x' % (flash_mode, flash_size_freq)
+ image = image[0:2] + flash_params + image[4:]
+ # Pad to sector size, which is the minimum unit of writing (erasing really).
+ if len(image) % esp.ESP_FLASH_SECTOR != 0:
+ image += '\xff' * (esp.ESP_FLASH_SECTOR - (len(image) % esp.ESP_FLASH_SECTOR))
+ t = time.time()
+ flasher.flash_write(address, image, not args.no_progress)
+ t = time.time() - t
+ print ('\rWrote %d bytes at 0x%x in %.1f seconds (%.1f kbit/s)...'
+ % (len(image), address, t, len(image) / t * 8 / 1000))
+ print 'Leaving...'
+ if args.verify:
+ print 'Verifying just-written flash...'
+ _verify_flash(flasher, args, flash_params)
+ flasher.boot_fw()
+
+
+def image_info(args):
+ image = LoadFirmwareImage(args.filename)
+ print('Image version: %d' % image.version)
+ print('Entry point: %08x' % image.entrypoint) if image.entrypoint != 0 else 'Entry point not set'
+ print '%d segments' % len(image.segments)
+ print
+ checksum = ESPROM.ESP_CHECKSUM_MAGIC
+ for (idx, (offset, size, data)) in enumerate(image.segments):
+ if image.version == 2 and idx == 0:
+ print 'Segment 1: %d bytes IROM0 (no load address)' % size
+ else:
+ print 'Segment %d: %5d bytes at %08x' % (idx + 1, size, offset)
+ checksum = ESPROM.checksum(data, checksum)
+ print
+ print 'Checksum: %02x (%s)' % (image.checksum, 'valid' if image.checksum == checksum else 'invalid!')
+
+
+def make_image(args):
+ image = ESPFirmwareImage()
+ if len(args.segfile) == 0:
+ raise FatalError('No segments specified')
+ if len(args.segfile) != len(args.segaddr):
+ raise FatalError('Number of specified files does not match number of specified addresses')
+ for (seg, addr) in zip(args.segfile, args.segaddr):
+ data = file(seg, 'rb').read()
+ image.add_segment(addr, data)
+ image.entrypoint = args.entrypoint
+ image.save(args.output)
+
+
+def elf2image(args):
+ e = ELFFile(args.input)
+ if args.version == '1':
+ image = ESPFirmwareImage()
+ else:
+ image = OTAFirmwareImage()
+ irom_data = e.load_section('.irom0.text')
+ if len(irom_data) == 0:
+ raise FatalError(".irom0.text section not found in ELF file - can't create V2 image.")
+ image.add_segment(0, irom_data, 16)
+ image.entrypoint = e.get_entry_point()
+ for section, start in ((".text", "_text_start"), (".data", "_data_start"), (".rodata", "_rodata_start")):
+ data = e.load_section(section)
+ image.add_segment(e.get_symbol_addr(start), data)
+
+ image.flash_mode = {'qio':0, 'qout':1, 'dio':2, 'dout': 3}[args.flash_mode]
+ image.flash_size_freq = {'4m':0x00, '2m':0x10, '8m':0x20, '16m':0x30, '32m':0x40, '16m-c1': 0x50, '32m-c1':0x60, '32m-c2':0x70}[args.flash_size]
+ image.flash_size_freq += {'40m':0, '26m':1, '20m':2, '80m': 0xf}[args.flash_freq]
+
+ irom_offs = e.get_symbol_addr("_irom0_text_start") - 0x40200000
+
+ if args.version == '1':
+ if args.output is None:
+ args.output = os.path.splitext(args.input)[-1] + '-'
+ image.save(args.output + "0x00000.bin")
+ data = e.load_section(".irom0.text")
+ if irom_offs < 0:
+ raise FatalError('Address of symbol _irom0_text_start in ELF is located before flash mapping address. Bad linker script?')
+ if (irom_offs & 0xFFF) != 0: # irom0 isn't flash sector aligned
+ print "WARNING: irom0 section offset is 0x%08x. ELF is probably linked for 'elf2image --version=2'" % irom_offs
+ with open(args.output + "0x%05x.bin" % irom_offs, "wb") as f:
+ f.write(data)
+ f.close()
+ else: # V2 OTA image
+
+ if args.output is None:
+ args.output = "%s-0x%05x.bin" % (os.path.splitext(args.input)[-1], irom_offs & ~(ESPROM.ESP_FLASH_SECTOR - 1))
+ image.save(args.output)
+
+
+def read_mac(esp, args):
+ mac = esp.read_mac()
+ print 'MAC: %s' % ':'.join(map(lambda x: '%02x' % x, mac))
+
+
+def chip_id(esp, args):
+ chipid = esp.chip_id()
+ print 'Chip ID: 0x%08x' % chipid
+
+
+def erase_flash(esp, args):
+ flasher = CesantaFlasher(esp, args.baud)
+ print 'Erasing flash (this may take a while)...'
+ t = time.time()
+ flasher.flash_erase_chip()
+ t = time.time() - t
+ print 'Erase took %.1f seconds' % t
+
+
+def run(esp, args):
+ esp.run()
+
+
+def flash_id(esp, args):
+ flash_id = esp.flash_id()
+ esp.flash_finish(False)
+ print 'Manufacturer: %02x' % (flash_id & 0xff)
+ print 'Device: %02x%02x' % ((flash_id >> 8) & 0xff, (flash_id >> 16) & 0xff)
+
+
+def read_flash(esp, args):
+ flasher = CesantaFlasher(esp, args.baud)
+ t = time.time()
+ data = flasher.flash_read(args.address, args.size, not args.no_progress)
+ t = time.time() - t
+ print ('\rRead %d bytes at 0x%x in %.1f seconds (%.1f kbit/s)...'
+ % (len(data), args.address, t, len(data) / t * 8 / 1000))
+ file(args.filename, 'wb').write(data)
+
+
+def _verify_flash(flasher, args, flash_params=None):
+ differences = False
+ for address, argfile in args.addr_filename:
+ image = argfile.read()
+ argfile.seek(0) # rewind in case we need it again
+ if address == 0 and image[0] == '\xe9' and flash_params is not None:
+ image = image[0:2] + flash_params + image[4:]
+ image_size = len(image)
+ print 'Verifying 0x%x (%d) bytes @ 0x%08x in flash against %s...' % (image_size, image_size, address, argfile.name)
+ # Try digest first, only read if there are differences.
+ digest, _ = flasher.flash_digest(address, image_size)
+ digest = hexify(digest).upper()
+ expected_digest = hashlib.md5(image).hexdigest().upper()
+ if digest == expected_digest:
+ print '-- verify OK (digest matched)'
+ continue
+ else:
+ differences = True
+ if getattr(args, 'diff', 'no') != 'yes':
+ print '-- verify FAILED (digest mismatch)'
+ continue
+
+ flash = flasher.flash_read(address, image_size)
+ assert flash != image
+ diff = [i for i in xrange(image_size) if flash[i] != image[i]]
+ print '-- verify FAILED: %d differences, first @ 0x%08x' % (len(diff), address + diff[0])
+ for d in diff:
+ print ' %08x %02x %02x' % (address + d, ord(flash[d]), ord(image[d]))
+ if differences:
+ raise FatalError("Verify failed.")
+
+
+def verify_flash(esp, args, flash_params=None):
+ flasher = CesantaFlasher(esp)
+ _verify_flash(flasher, args, flash_params)
+
+
+def version(args):
+ print __version__
+
+#
+# End of operations functions
+#
+
+
+def main():
+ parser = argparse.ArgumentParser(description='esptool.py v%s - ESP8266 ROM Bootloader Utility' % __version__, prog='esptool')
+
+ parser.add_argument(
+ '--port', '-p',
+ help='Serial port device',
+ default=os.environ.get('ESPTOOL_PORT', '/dev/ttyUSB0'))
+
+ parser.add_argument(
+ '--baud', '-b',
+ help='Serial port baud rate used when flashing/reading',
+ type=arg_auto_int,
+ default=os.environ.get('ESPTOOL_BAUD', ESPROM.ESP_ROM_BAUD))
+
+ subparsers = parser.add_subparsers(
+ dest='operation',
+ help='Run esptool {command} -h for additional help')
+
+ parser_load_ram = subparsers.add_parser(
+ 'load_ram',
+ help='Download an image to RAM and execute')
+ parser_load_ram.add_argument('filename', help='Firmware image')
+
+ parser_dump_mem = subparsers.add_parser(
+ 'dump_mem',
+ help='Dump arbitrary memory to disk')
+ parser_dump_mem.add_argument('address', help='Base address', type=arg_auto_int)
+ parser_dump_mem.add_argument('size', help='Size of region to dump', type=arg_auto_int)
+ parser_dump_mem.add_argument('filename', help='Name of binary dump')
+
+ parser_read_mem = subparsers.add_parser(
+ 'read_mem',
+ help='Read arbitrary memory location')
+ parser_read_mem.add_argument('address', help='Address to read', type=arg_auto_int)
+
+ parser_write_mem = subparsers.add_parser(
+ 'write_mem',
+ help='Read-modify-write to arbitrary memory location')
+ parser_write_mem.add_argument('address', help='Address to write', type=arg_auto_int)
+ parser_write_mem.add_argument('value', help='Value', type=arg_auto_int)
+ parser_write_mem.add_argument('mask', help='Mask of bits to write', type=arg_auto_int)
+
+ def add_spi_flash_subparsers(parent, auto_detect=False):
+ """ Add common parser arguments for SPI flash properties """
+ parent.add_argument('--flash_freq', '-ff', help='SPI Flash frequency',
+ choices=['40m', '26m', '20m', '80m'],
+ default=os.environ.get('ESPTOOL_FF', '40m'))
+ parent.add_argument('--flash_mode', '-fm', help='SPI Flash mode',
+ choices=['qio', 'qout', 'dio', 'dout'],
+ default=os.environ.get('ESPTOOL_FM', 'qio'))
+ choices = ['4m', '2m', '8m', '16m', '32m', '16m-c1', '32m-c1', '32m-c2']
+ default = '4m'
+ if auto_detect:
+ default = 'detect'
+ choices.insert(0, 'detect')
+ parent.add_argument('--flash_size', '-fs', help='SPI Flash size in Mbit', type=str.lower,
+ choices=choices,
+ default=os.environ.get('ESPTOOL_FS', default))
+
+ parser_write_flash = subparsers.add_parser(
+ 'write_flash',
+ help='Write a binary blob to flash')
+ parser_write_flash.add_argument('addr_filename', metavar='<address> <filename>', help='Address followed by binary filename, separated by space',
+ action=AddrFilenamePairAction)
+ add_spi_flash_subparsers(parser_write_flash, auto_detect=True)
+ parser_write_flash.add_argument('--no-progress', '-p', help='Suppress progress output', action="store_true")
+ parser_write_flash.add_argument('--verify', help='Verify just-written data (only necessary if very cautious, data is already CRCed', action='store_true')
+
+ subparsers.add_parser(
+ 'run',
+ help='Run application code in flash')
+
+ parser_image_info = subparsers.add_parser(
+ 'image_info',
+ help='Dump headers from an application image')
+ parser_image_info.add_argument('filename', help='Image file to parse')
+
+ parser_make_image = subparsers.add_parser(
+ 'make_image',
+ help='Create an application image from binary files')
+ parser_make_image.add_argument('output', help='Output image file')
+ parser_make_image.add_argument('--segfile', '-f', action='append', help='Segment input file')
+ parser_make_image.add_argument('--segaddr', '-a', action='append', help='Segment base address', type=arg_auto_int)
+ parser_make_image.add_argument('--entrypoint', '-e', help='Address of entry point', type=arg_auto_int, default=0)
+
+ parser_elf2image = subparsers.add_parser(
+ 'elf2image',
+ help='Create an application image from ELF file')
+ parser_elf2image.add_argument('input', help='Input ELF file')
+ parser_elf2image.add_argument('--output', '-o', help='Output filename prefix (for version 1 image), or filename (for version 2 single image)', type=str)
+ parser_elf2image.add_argument('--version', '-e', help='Output image version', choices=['1','2'], default='1')
+ add_spi_flash_subparsers(parser_elf2image)
+
+ subparsers.add_parser(
+ 'read_mac',
+ help='Read MAC address from OTP ROM')
+
+ subparsers.add_parser(
+ 'chip_id',
+ help='Read Chip ID from OTP ROM')
+
+ subparsers.add_parser(
+ 'flash_id',
+ help='Read SPI flash manufacturer and device ID')
+
+ parser_read_flash = subparsers.add_parser(
+ 'read_flash',
+ help='Read SPI flash content')
+ parser_read_flash.add_argument('address', help='Start address', type=arg_auto_int)
+ parser_read_flash.add_argument('size', help='Size of region to dump', type=arg_auto_int)
+ parser_read_flash.add_argument('filename', help='Name of binary dump')
+ parser_read_flash.add_argument('--no-progress', '-p', help='Suppress progress output', action="store_true")
+
+ parser_verify_flash = subparsers.add_parser(
+ 'verify_flash',
+ help='Verify a binary blob against flash')
+ parser_verify_flash.add_argument('addr_filename', help='Address and binary file to verify there, separated by space',
+ action=AddrFilenamePairAction)
+ parser_verify_flash.add_argument('--diff', '-d', help='Show differences',
+ choices=['no', 'yes'], default='no')
+
+ subparsers.add_parser(
+ 'erase_flash',
+ help='Perform Chip Erase on SPI flash')
+
+ subparsers.add_parser(
+ 'version', help='Print esptool version')
+
+ # internal sanity check - every operation matches a module function of the same name
+ for operation in subparsers.choices.keys():
+ assert operation in globals(), "%s should be a module function" % operation
+
+ args = parser.parse_args()
+
+ print 'esptool.py v%s' % __version__
+
+ # operation function can take 1 arg (args), 2 args (esp, arg)
+ # or be a member function of the ESPROM class.
+
+ operation_func = globals()[args.operation]
+ operation_args,_,_,_ = inspect.getargspec(operation_func)
+ if operation_args[0] == 'esp': # operation function takes an ESPROM connection object
+ initial_baud = min(ESPROM.ESP_ROM_BAUD, args.baud) # don't sync faster than the default baud rate
+ esp = ESPROM(args.port, initial_baud)
+ esp.connect()
+ operation_func(esp, args)
+ else:
+ operation_func(args)
+
+
+class AddrFilenamePairAction(argparse.Action):
+ """ Custom parser class for the address/filename pairs passed as arguments """
+ def __init__(self, option_strings, dest, nargs='+', **kwargs):
+ super(AddrFilenamePairAction, self).__init__(option_strings, dest, nargs, **kwargs)
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ # validate pair arguments
+ pairs = []
+ for i in range(0,len(values),2):
+ try:
+ address = int(values[i],0)
+ except ValueError as e:
+ raise argparse.ArgumentError(self,'Address "%s" must be a number' % values[i])
+ try:
+ argfile = open(values[i + 1], 'rb')
+ except IOError as e:
+ raise argparse.ArgumentError(self, e)
+ except IndexError:
+ raise argparse.ArgumentError(self,'Must be pairs of an address and the binary filename to write there')
+ pairs.append((address, argfile))
+ setattr(namespace, self.dest, pairs)
+
+# This is "wrapped" stub_flasher.c, to be loaded using run_stub.
+_CESANTA_FLASHER_STUB = """\
+{"code_start": 1074790404, "code": "080000601C000060000000601000006031FCFF71FCFF\
+81FCFFC02000680332D218C020004807404074DCC48608005823C0200098081BA5A9239245005803\
+1B555903582337350129230B446604DFC6F3FF21EEFFC0200069020DF0000000010078480040004A\
+0040B449004012C1F0C921D911E901DD0209312020B4ED033C2C56C2073020B43C3C56420701F5FF\
+C000003C4C569206CD0EEADD860300202C4101F1FFC0000056A204C2DCF0C02DC0CC6CCAE2D1EAFF\
+0606002030F456D3FD86FBFF00002020F501E8FFC00000EC82D0CCC0C02EC0C73DEB2ADC46030020\
+2C4101E1FFC00000DC42C2DCF0C02DC056BCFEC602003C5C8601003C6C4600003C7C08312D0CD811\
+C821E80112C1100DF0000C180000140010400C0000607418000064180000801800008C1800008418\
+0000881800009018000018980040880F0040A80F0040349800404C4A0040740F0040800F0040980F\
+00400099004012C1E091F5FFC961CD0221EFFFE941F9310971D9519011C01A223902E2D1180C0222\
+6E1D21E4FF31E9FF2AF11A332D0F42630001EAFFC00000C030B43C2256A31621E1FF1A2228022030\
+B43C3256B31501ADFFC00000DD023C4256ED1431D6FF4D010C52D90E192E126E0101DDFFC0000021\
+D2FF32A101C020004802303420C0200039022C0201D7FFC00000463300000031CDFF1A333803D023\
+C03199FF27B31ADC7F31CBFF1A3328030198FFC0000056C20E2193FF2ADD060E000031C6FF1A3328\
+030191FFC0000056820DD2DD10460800000021BEFF1A2228029CE231BCFFC020F51A33290331BBFF\
+C02C411A332903C0F0F4222E1D22D204273D9332A3FFC02000280E27B3F721ABFF381E1A2242A400\
+01B5FFC00000381E2D0C42A40001B3FFC0000056120801B2FFC00000C02000280EC2DC0422D2FCC0\
+2000290E01ADFFC00000222E1D22D204226E1D281E22D204E7B204291E860000126E012198FF32A0\
+042A21C54C003198FF222E1D1A33380337B202C6D6FF2C02019FFFC000002191FF318CFF1A223A31\
+019CFFC00000218DFF1C031A22C549000C02060300003C528601003C624600003C72918BFF9A1108\
+71C861D851E841F83112C1200DF00010000068100000581000007010000074100000781000007C10\
+0000801000001C4B0040803C004091FDFF12C1E061F7FFC961E941F9310971D9519011C01A662906\
+21F3FFC2D1101A22390231F2FF0C0F1A33590331EAFFF26C1AED045C2247B3028636002D0C016DFF\
+C0000021E5FF41EAFF2A611A4469040622000021E4FF1A222802F0D2C0D7BE01DD0E31E0FF4D0D1A\
+3328033D0101E2FFC00000561209D03D2010212001DFFFC000004D0D2D0C3D01015DFFC0000041D5\
+FFDAFF1A444804D0648041D2FF1A4462640061D1FF106680622600673F1331D0FF10338028030C43\
+853A002642164613000041CAFF222C1A1A444804202FC047328006F6FF222C1A273F3861C2FF222C\
+1A1A6668066732B921BDFF3D0C1022800148FFC0000021BAFF1C031A2201BFFFC000000C02460300\
+5C3206020000005C424600005C5291B7FF9A110871C861D851E841F83112C1200DF0B0100000C010\
+0000D010000012C1E091FEFFC961D951E9410971F931CD039011C0ED02DD0431A1FF9C1422A06247\
+B302062D0021F4FF1A22490286010021F1FF1A223902219CFF2AF12D0F011FFFC00000461C0022D1\
+10011CFFC0000021E9FFFD0C1A222802C7B20621E6FF1A22F8022D0E3D014D0F0195FFC000008C52\
+22A063C6180000218BFF3D01102280F04F200111FFC00000AC7D22D1103D014D0F010DFFC0000021\
+D6FF32D110102280010EFFC0000021D3FF1C031A220185FFC00000FAEEF0CCC056ACF821CDFF317A\
+FF1A223A310105FFC0000021C9FF1C031A22017CFFC000002D0C91C8FF9A110871C861D851E841F8\
+3112C1200DF0000200600000001040020060FFFFFF0012C1E00C02290131FAFF21FAFF026107C961\
+C02000226300C02000C80320CC10564CFF21F5FFC02000380221F4FF20231029010C432D010163FF\
+C0000008712D0CC86112C1200DF00080FE3F8449004012C1D0C9A109B17CFC22C1110C13C51C0026\
+1202463000220111C24110B68202462B0031F5FF3022A02802A002002D011C03851A0066820A2801\
+32210105A6FF0607003C12C60500000010212032A01085180066A20F2221003811482105B3FF2241\
+10861A004C1206FDFF2D011C03C5160066B20E280138114821583185CFFF06F7FF005C1286F5FF00\
+10212032A01085140066A20D2221003811482105E1FF06EFFF0022A06146EDFF45F0FFC6EBFF0000\
+01D2FFC0000006E9FF000C022241100C1322C110C50F00220111060600000022C1100C13C50E0022\
+011132C2FA303074B6230206C8FF08B1C8A112C1300DF0000000000010404F484149007519031027\
+000000110040A8100040BC0F0040583F0040CC2E00401CE20040D83900408000004021F4FF12C1E0\
+C961C80221F2FF097129010C02D951C91101F4FFC0000001F3FFC00000AC2C22A3E801F2FFC00000\
+21EAFFC031412A233D0C01EFFFC000003D0222A00001EDFFC00000C1E4FF2D0C01E8FFC000002D01\
+32A004450400C5E7FFDD022D0C01E3FFC00000666D1F4B2131DCFF4600004B22C0200048023794F5\
+31D9FFC0200039023DF08601000001DCFFC000000871C861D85112C1200DF000000012C1F0026103\
+01EAFEC00000083112C1100DF000643B004012C1D0E98109B1C9A1D991F97129013911E2A0C001FA\
+FFC00000CD02E792F40C0DE2A0C0F2A0DB860D00000001F4FFC00000204220E71240F7921C226102\
+01EFFFC0000052A0DC482157120952A0DD571205460500004D0C3801DA234242001BDD3811379DC5\
+C6000000000C0DC2A0C001E3FFC00000C792F608B12D0DC8A1D891E881F87112C1300DF00000", "\
+entry": 1074792180, "num_params": 1, "params_start": 1074790400, "data": "FE0510\
+401A0610403B0610405A0610407A061040820610408C0610408C061040", "data_start": 10736\
+43520}
+"""
+
+if __name__ == '__main__':
+ try:
+ main()
+ except FatalError as e:
+ print '\nA fatal error occurred: %s' % e
+ sys.exit(2)
diff --git a/utils/generate_libraries.py b/utils/generate_libraries.py
new file mode 100644
index 0000000..45db25c
--- /dev/null
+++ b/utils/generate_libraries.py
@@ -0,0 +1,159 @@
+import os
+import git
+from git import Actor
+import optparse
+import fnmatch
+import glob
+import shutil
+import ntpath
+import json
+
+def make_cmake(lib_name, lib_file_name, include_path, dest):
+ print "LIB NAME " + lib_name
+ with open(dest + "/CMakeLists.txt", 'w') as f:
+ lines = [
+ "project(" + lib_name + ")\r\n"
+ "add_library(" + lib_name + " STATIC " + lib_file_name + ")\r\n",
+ "set_target_properties(" + lib_name +" PROPERTIES LINKER_LANGUAGE CXX)\r\n",
+ "target_include_directories(" + lib_name + " PUBLIC \"" + include_path + "\")\r\n",
+ ]
+ print "LINES : " + str(lines)
+ f.writelines(lines)
+ f.close()
+
+def copytree(src, dst, symlinks=False, ignore=None):
+ if not os.path.exists(dst):
+ os.makedirs(dst)
+ for item in os.listdir(src):
+ s = os.path.join(src, item)
+ d = os.path.join(dst, item)
+ if os.path.isdir(s):
+ copytree(s, d, symlinks, ignore)
+ else:
+ if not os.path.exists(d) or os.stat(s).st_mtime - os.stat(d).st_mtime > 1:
+ shutil.copy2(s, d)
+
+def path_leaf(path):
+ head, tail = ntpath.split(path)
+ return tail or ntpath.basename(head)
+
+def recursive_glob(treeroot, pattern):
+ results = []
+ for base, dirs, files in os.walk(treeroot):
+ goodfiles = fnmatch.filter(files, pattern)
+ results.extend(os.path.join(base, f) for f in goodfiles)
+ return results
+
+parser = optparse.OptionParser()
+parser.add_option('-c', '--clean', dest='clean', action="store_true", help='Whether to clean before building.', default=False)
+
+(options, args) = parser.parse_args()
+
+os.chdir("..")
+
+if not os.path.exists("build"):
+ os.mkdir("build")
+
+# out of source build!
+os.chdir("build")
+
+# configure os.system("cmake ..")
+os.system("cmake .. -DCODAL_HEADER_EXTRACTION:BOOL=TRUE")
+
+if options.clean:
+ os.system("make clean")
+
+# build
+os.system("make -j 10")
+
+with open('../codal.json') as data_file:
+ codal = json.load(data_file)
+
+#ntpath.basename(f)
+folders = [path_leaf(f) for f in glob.glob("../libraries/*/")]
+header_folders = [path_leaf(f) for f in glob.glob("./build/*/")]
+
+print folders
+print header_folders
+
+mapping = []
+
+#note for next time, need to copy all lib files to their appropriate build/lib place otherwise they get auto cleaned.
+
+valid_libs = []
+
+for folder in header_folders:
+ lib_file_name = "lib" + folder + ".a"
+ if not os.path.exists("./"+lib_file_name):
+ print "No library exists, skipping: " + lib_file_name
+ continue
+
+ shutil.copy("./" + lib_file_name, "./build/"+folder)
+ valid_libs = valid_libs + [folder]
+
+
+for folder in valid_libs:
+ lib_name = folder
+ lib_file_name = "lib" + folder + ".a"
+ folder_path = '../libraries/' + folder
+ header_folder = "./build/" + folder
+ header_ext = "includes"
+
+ with open(folder_path + "CMakeLists.txt") as cmake:
+
+ "target_link_libraries\((?:\s*(.+))+\s*\)"
+
+ for line in cmake.lines():
+ if "target_link_libraries" in line
+
+
+
+ # get the repo
+ try:
+ repo = git.Repo('../libraries/' + folder)
+ except:
+ print folder + " is not a valid git repository."
+ continue
+
+ active_branch = repo.active_branch.name
+
+ # check for any uncommitted changes
+ if len(repo.index.diff(None)) > 0 :
+ print folder + " has uncommitted changes, skipping."
+ continue;
+
+ branch_names = [b.name for b in repo.branches]
+
+ lib_branch_name = "lib_" + codal["target"]["processor"] + codal["target"]["device"]
+
+ # tag using above + version specified in target.json
+
+ # swap to an orphaned branch if none exists
+ if lib_branch_name not in branch_names:
+ repo.active_branch.checkout(orphan=lib_branch_name)
+
+ for f in glob.glob(folder_path + "/*/"):
+ shutil.rmtree(f)
+
+ files = [f for f in os.listdir('.') if os.path.isfile(f)]
+
+ for file in files:
+ os.remove(file)
+ else:
+ repo.active_branch.checkout(lib_branch_name)
+
+ repo.index.remove("*", r=True)
+
+ copytree(header_folder, folder_path + "/")
+
+ make_cmake(lib_name, lib_file_name, header_ext, folder_path + "/")
+
+ repo.index.add("*")
+
+ author = Actor("codal", "codal@example.com")
+
+ repo.index.commit("Library generated", author=author, committer=author)
+
+ #repo.git.checkout(active_branch)
+
+ #break
diff --git a/utils/merge_hex.py b/utils/merge_hex.py
new file mode 100644
index 0000000..c2e9916
--- /dev/null
+++ b/utils/merge_hex.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2015 ARM Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This script will merge two hex files and write the output to a hex file.
+ USAGE: merge_hex.py input_file1 input_file2 output_file.
+"""
+
+from optparse import OptionParser
+import sys
+
+parser = OptionParser()
+
+#command line options
+parser.add_option("-o", "--output",
+ action="store",
+ type="string",
+ dest="output",
+ default="",
+ help="The relative path to the headers for the microbit-dal.")
+
+(options, args) = parser.parse_args()
+
+fail_color = ''
+
+# If colorama is present, set the fail color to red
+try:
+ from colorama import init, deinit, Fore
+ fail_color = Fore.RED
+except:
+ pass
+
+def fail(message):
+ print(fail_color + message)
+
+ # If we've included ANSI color in output, reset the output style
+ if fail_color:
+ print(Fore.RESET)
+ deinit()
+
+ return 1
+
+def convert_start_addr(hex_file):
+ if hex_file.start_addr and 'CS' in hex_file.start_addr:
+ start_addr = {'EIP': (hex_file.start_addr['CS'] * 16) + hex_file.start_addr['IP']}
+ hex_file.start_addr = start_addr
+
+def main(options, args):
+ # If using ANSI coloring is available, initialize colorama
+ if fail_color:
+ init()
+
+ # Import intelhex if avaialable, otherwise fail
+ try:
+ from intelhex import IntelHex
+ except:
+ return fail('error: You do not have \'intelhex\' installed. Please run \'pip install intelhex\' then retry.')
+
+ if len(options.output) is 0:
+ print "No output file specified"
+ exit(1)
+
+ if len(args) < 2:
+ return fail('Only one file was provided to merge.')
+ exit(0)
+
+ # Get the two hex files, merge them, and save the result
+ orig = IntelHex(args[0])
+ convert_start_addr(orig)
+
+ args = args[1:]
+
+ for arg in args:
+ other = IntelHex(arg)
+ convert_start_addr(other)
+ orig.merge(other, overlap='replace')
+
+ orig.write_hex_file(options.output)
+
+if __name__ == '__main__':
+ sys.exit(main(options,args))
diff --git a/utils/python/__init__.py b/utils/python/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/utils/python/__init__.py
diff --git a/utils/python/codal_utils.py b/utils/python/codal_utils.py
new file mode 100644
index 0000000..68cb017
--- /dev/null
+++ b/utils/python/codal_utils.py
@@ -0,0 +1,186 @@
+import os
+import sys
+import optparse
+import platform
+import json
+import shutil
+import re
+
+import os, re, json, xml.etree.ElementTree
+from optparse import OptionParser
+
+
+def system(cmd):
+ if os.system(cmd) != 0:
+ sys.exit(1)
+
+def build(clean, verbose = False):
+ if platform.system() == "Windows":
+ # configure
+ system("cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo -G \"Ninja\"")
+
+ # build
+ system("ninja")
+ else:
+ # configure
+ system("cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo -G \"Unix Makefiles\"")
+
+ if clean:
+ system("make clean")
+
+ # build
+ if verbose:
+ system("make -j 10 VERBOSE=1")
+ else:
+ system("make -j 10")
+
+def read_json(fn):
+ json_file = ""
+ with open(fn) as f:
+ json_file = f.read()
+ return json.loads(json_file)
+
+def checkgit():
+ stat = os.popen('git status --porcelain').read().strip()
+ if stat != "":
+ print("Missing checkin in", os.getcwd(), "\n" + stat)
+ exit(1)
+
+def read_config():
+ codal = read_json("codal.json")
+ targetdir = codal['target']['name']
+ target = read_json("libraries/" + targetdir + "/target.json")
+ return (codal, targetdir, target)
+
+def update(allow_detached=False):
+ (codal, targetdir, target) = read_config()
+ dirname = os.getcwd()
+ for ln in target['libraries']:
+ os.chdir(dirname + "/libraries/" + ln['name'])
+ system("git checkout " + ln['branch'])
+ system("git pull")
+ os.chdir(dirname + "/libraries/" + targetdir)
+ if ("HEAD detached" in os.popen('git branch').read().strip() and
+ allow_detached == False):
+ system("git checkout master")
+ system("git pull")
+ os.chdir(dirname)
+
+def revision(rev):
+ (codal, targetdir, target) = read_config()
+ dirname = os.getcwd()
+ os.chdir("libraries/" + targetdir)
+ system("git checkout " + rev)
+ os.chdir(dirname)
+ update(True)
+
+def printstatus():
+ print("\n***%s" % os.getcwd())
+ system("git status -s")
+ system("git rev-parse HEAD")
+ system("git branch")
+
+def status():
+ (codal, targetdir, target) = read_config()
+ dirname = os.getcwd()
+ for ln in target['libraries']:
+ os.chdir(dirname + "/libraries/" + ln['name'])
+ printstatus()
+ os.chdir(dirname + "/libraries/" + targetdir)
+ printstatus()
+ os.chdir(dirname)
+ printstatus()
+
+def get_next_version(options):
+ if options.version:
+ return options.version
+ log = os.popen('git log -n 100').read().strip()
+ m = re.search('Snapshot v(\d+)\.(\d+)\.(\d+)(-([\w\-]+).(\d+))?', log)
+ if m is None:
+ print("Cannot determine next version from git log")
+ exit(1)
+ v0 = int(m.group(1))
+ v1 = int(m.group(2))
+ v2 = int(m.group(3))
+ vB = -1
+ branchName = os.popen('git rev-parse --abbrev-ref HEAD').read().strip()
+ if not options.branch and branchName != "master":
+ print("On non-master branch use -l -b")
+ exit(1)
+ suff = ""
+ if options.branch:
+ if m.group(4) and branchName == m.group(5):
+ vB = int(m.group(6))
+ suff = "-%s.%d" % (branchName, vB + 1)
+ elif options.update_major:
+ v0 += 1
+ v1 = 0
+ v2 = 0
+ elif options.update_minor:
+ v1 += 1
+ v2 = 0
+ else:
+ v2 += 1
+ return "v%d.%d.%d%s" % (v0, v1, v2, suff)
+
+def lock(options):
+ (codal, targetdir, target) = read_config()
+ dirname = os.getcwd()
+ for ln in target['libraries']:
+ os.chdir(dirname + "/libraries/" + ln['name'])
+ checkgit()
+ stat = os.popen('git status --porcelain -b').read().strip()
+ if "ahead" in stat:
+ print("Missing push in", os.getcwd())
+ exit(1)
+ sha = os.popen('git rev-parse HEAD').read().strip()
+ ln['branch'] = sha
+ print(ln['name'], sha)
+ os.chdir(dirname + "/libraries/" + targetdir)
+ ver = get_next_version(options)
+ print("Creating snaphot", ver)
+ system("git checkout target-locked.json")
+ checkgit()
+ target["snapshot_version"] = ver
+ with open("target-locked.json", "w") as f:
+ f.write(json.dumps(target, indent=4, sort_keys=True))
+ system("git commit -am \"Snapshot %s\"" % ver) # must match get_next_version() regex
+ sha = os.popen('git rev-parse HEAD').read().strip()
+ system("git tag %s" % ver)
+ system("git pull")
+ system("git push")
+ system("git push --tags")
+ os.chdir(dirname)
+ print("\nNew snapshot: %s [%s]" % (ver, sha))
+
+def delete_build_folder(in_folder = True):
+ if in_folder:
+ os.chdir("..")
+
+ shutil.rmtree('./build')
+ os.mkdir("./build")
+
+ if in_folder:
+ os.chdir("./build")
+
+def generate_docs():
+ from doc_gen.doxygen_extractor import DoxygenExtractor
+ from doc_gen.md_converter import MarkdownConverter
+ from doc_gen.system_utils import SystemUtils
+ from doc_gen.doc_gen import generate_mkdocs
+
+ os.chdir("..")
+ (codal, targetdir, target) = read_config()
+
+ lib_dir = os.getcwd() + "/libraries/"
+
+ libraries = [lib_dir + targetdir]
+
+ for l in target["libraries"]:
+ libraries = libraries + [ lib_dir + l["name"]]
+
+ os.chdir(lib_dir + targetdir)
+
+ generate_mkdocs(libraries)
+
+
diff --git a/utils/python/doc_gen/__init__.py b/utils/python/doc_gen/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/utils/python/doc_gen/__init__.py
diff --git a/utils/python/doc_gen/doc_gen.py b/utils/python/doc_gen/doc_gen.py
new file mode 100644
index 0000000..97d6e65
--- /dev/null
+++ b/utils/python/doc_gen/doc_gen.py
@@ -0,0 +1,93 @@
+import os, re, json, xml.etree.ElementTree
+from optparse import OptionParser
+
+from doxygen_extractor import DoxygenExtractor
+from md_converter import MarkdownConverter
+from system_utils import SystemUtils
+
+member_func_filter = ["idleCallback", "systemCallback", "~"]
+
+filters = True
+
+utils = SystemUtils()
+
+###
+# the trigger for generating our documentation
+###
+def generate_mkdocs(header_paths, type_colour = "#a71d5d", function_name_colour = "#795da3"):
+
+ global member_func_filter
+ doxygen = DoxygenExtractor(os.path.abspath("."), header_paths)
+ markdown = MarkdownConverter(type_colour, function_name_colour, separate_defaults = True, display_defaults = False)
+
+ doxygen.generate_doxygen()
+ #utils.validate_version(doxygen.working_dir, header_paths, "./docs/archive")
+
+ file_names = utils.find_files('docs','*.md')
+ section_kind = ["public-func"]
+ meta_data_regex = re.compile( r'\[comment\]: <> \((.*?)\)', re.MULTILINE | re.DOTALL )
+
+ for filename in file_names:
+ print(filename)
+
+ read_lines = utils.read(filename)
+
+ file_lines = markdown.clean(read_lines, meta_data_regex)
+
+ utils.write(filename, file_lines)
+
+ previous = ""
+
+ for line_number, line in enumerate(file_lines, 1):
+
+ result = re.findall(meta_data_regex,line)
+
+ if len(result) is not 0:
+
+ meta_data = json.loads(result[0])
+
+ if previous is not "" and "end" in meta_data.keys() and meta_data['end'] == previous:
+ previous = ""
+ continue
+ elif previous is "":
+ try:
+ previous = meta_data['className']
+ except:
+ raise Exception('There isn\'t a match for the meta_data '+ meta_data)
+ else:
+ raise Exception('There isn\'t a match for the meta_data \''+ previous + "'")
+
+ local_filter = member_func_filter
+
+ if "filter" in meta_data:
+ for member_function in meta_data["filter"]:
+ local_filter = local_filter + [ str(member_function) ]
+
+ print "Custom filter applied: " + str(member_func_filter)
+
+ class_xml_files = list(utils.find_files("./xml","*class*"+meta_data['className'] + ".xml"))
+
+ print class_xml_files
+
+ if len(class_xml_files) == 0:
+ raise Exception("Invalid classname: " + meta_data['className'])
+ elif len(class_xml_files) > 1:
+ class_xml_files
+
+ doxygen_class_xml = xml.etree.ElementTree.parse(class_xml_files[0]).getroot()
+
+ member_functions = []
+
+ for section_def in doxygen_class_xml.iter('sectiondef'):
+ if section_def.attrib['kind'] in section_kind:
+ for member_func in section_def.iter('memberdef'):
+ new_member = doxygen.extract_member_function(member_func, local_filter, filter= filters)
+ if new_member is not None:
+ member_functions.append(new_member)
+
+ before = file_lines[:line_number]
+ after = file_lines[line_number:]
+
+ between = markdown.gen_member_func_doc(meta_data['className'], member_functions)
+
+ utils.write(filename, before + between + after)
diff --git a/utils/python/doc_gen/doxygen_extractor.py b/utils/python/doc_gen/doxygen_extractor.py
new file mode 100644
index 0000000..9207c9d
--- /dev/null
+++ b/utils/python/doc_gen/doxygen_extractor.py
@@ -0,0 +1,242 @@
+import os
+from system_utils import SystemUtils
+
+class DoxygenExtractor:
+
+ md_special_chars =[
+ {
+ "md_char": "*",
+ "replacement": "&#42;"
+ },
+ {
+ "md_char": "#",
+ "replacement": "&#35;"
+ },
+ {
+ "md_char": "`",
+ "replacement": "&#183;"
+ }
+ ]
+
+ #constructor
+ def __init__(self, root, header_paths, working_dir = "./temp", doxygen_xml_dest = "./xml"):
+ os.chdir(root)
+ self.header_paths = header_paths
+ self.utils = SystemUtils()
+ self.doxygen_xml_dest = doxygen_xml_dest
+ self.working_dir = working_dir
+
+ ###
+ # this function copies headers recursively from a source director to a destination
+ # directory.
+ ###
+ def get_headers(self, from_dir, to_dir):
+ self.utils.copy_files(from_dir, to_dir, "*.h")
+
+ ###
+ # Strips out reserved characters used in markdown notation, and replaces them
+ # with html character codes.
+ #
+ # @param text the text to strip and replace the md special characters
+ #
+ # @return the stripped text.
+ ###
+ def escape_md_chars(self, text):
+ for char in self.md_special_chars:
+ text = text.replace(char['md_char'], "\\" + char['md_char'])
+ return text
+
+
+ ###
+ # this function extracts data from an element tag ignoring the tag 'ref', but
+ # obtains the textual data it has inside the ref tag.
+ #
+ # @param element the element to process
+ #
+ # @return a list of extracted strings.
+ ###
+ def extract_ignoring_refs(self, element):
+ list = []
+
+ if element.text is not None:
+ list.append(element.text)
+
+ for ref in element.iter(tag="ref"):
+ list.append(ref.text)
+
+ return list
+
+ ###
+ # this function extracts data from an element tag including all sub elements
+ # (recursive)
+ #
+ # @param element the element to process
+ #
+ # @return a list of extracted strings.
+ ###
+ def extract_with_subelements(self, element):
+ list = []
+
+ list.append(element.text or "")
+
+ #if element.text is not None:
+ #list.append(element.text)
+
+ for subelement in element:
+ if subelement is not None:
+ list = list + self.extract_with_subelements(subelement)
+
+ list.append(element.tail or "")
+
+ return list
+
+ ###
+ # this function was at one point intended to fetch a value of a default parameter
+ # it is now only used to fetch the default parameters' name.
+ #
+ # @param document_root the root of the entire document
+ # @param element the element containing the default parameter
+ #
+ # @return a dictionary containing:
+ # {
+ # 'name':'',
+ # 'value':''
+ # }
+ #
+ # @note this would be more useful if it return the value, it currently does not.
+ ###
+ def extract_default(self, element):
+ ref = element.find("ref")
+ return {'name':' '.join(element.itertext()), 'value':''}
+
+ ###
+ # extracts a member function form the xml document
+ #
+ # @param root the document root
+ # @param xml_element the member function xml element.
+ #
+ # @return a function dictionary:
+ # {
+ # 'short_name':"",
+ # 'name':"",
+ # 'return_type':"",
+ # 'params':[],
+ # 'description':[],
+ # 'returns':"",
+ # 'notes':"",
+ # 'examples':""
+ # }
+ ###
+ def extract_member_function(self, xml_element, function_filter = [], filter = True):
+
+ function = {
+ 'short_name':"",
+ 'name':"",
+ 'return_type':"",
+ 'params':[],
+ 'description':[],
+ 'returns':"",
+ 'notes':"",
+ 'examples':""
+ }
+
+ function['name'] = xml_element.find('definition').text
+ function['short_name'] = xml_element.find('name').text
+
+ if filter and any(filtered_func in function['short_name'] for filtered_func in function_filter):
+ print "Filtered out: " + function['short_name']
+ return
+
+ print "Generating documentation for: " + function['short_name']
+
+ if xml_element.find('type') is not None:
+ function['return_type'] = self.escape_md_chars(' '.join(self.extract_ignoring_refs(xml_element.find('type'))))
+
+ #extract our parameters for this member function
+ for parameter in xml_element.iter('param'):
+
+ type = ""
+ name = ""
+
+ if parameter.find('type') is not None:
+ type = self.escape_md_chars(' '.join(parameter.find('type').itertext()))
+
+ if parameter.find('declname') is not None:
+ name = ' '.join(self.extract_ignoring_refs(parameter.find('declname')))
+
+ param_object = {
+ 'type': type,
+ 'name': name,
+ 'default':{
+ 'name':"",
+ 'value':""
+ }
+ }
+
+ if parameter.find('defval') is not None:
+ extracted = self.extract_default(parameter.find('defval'))
+ param_object['default']['name'] = extracted['name']
+ param_object['default']['value'] = extracted['value']
+
+ function['params'].append(param_object)
+
+
+ detailed_description = xml_element.find('detaileddescription')
+
+ if len(detailed_description.findall("para")) is not 0:
+ for para in detailed_description.findall("para"):
+ if len(para.findall("programlisting")) is 0 and len(para.findall("simplesect")) is 0:
+ function['description'] = function['description'] + self.extract_with_subelements(para)
+
+ #para indicates a new paragraph - we should treat it as such... append \n!
+ function['description'] = function['description'] + ["\n\n"]
+
+ if len(detailed_description.findall("para/simplesect[@kind='return']/para")) is not 0:
+ return_section = detailed_description.findall("para/simplesect[@kind='return']/para")[0]
+ function['returns'] = ' '.join(return_section.itertext())
+
+ if len(detailed_description.findall("para/simplesect[@kind='note']/para")) is not 0:
+ return_section = detailed_description.findall("para/simplesect[@kind='note']/para")[0]
+ function['notes'] = ' '.join(return_section.itertext())
+
+ examples = detailed_description.find('para/programlisting')
+
+ if examples is not None:
+ function['examples'] = ''.join([('' if index is 0 else ' ')+word for index, word in enumerate(examples.itertext(),1) ])
+
+ param_list = detailed_description.findall('para/parameterlist')
+
+ if len(param_list) is not 0:
+ for parameter_desc in param_list[0].findall('parameteritem'):
+
+ param_descriptor = {
+ 'name':'',
+ 'description':''
+ }
+
+ param_name = parameter_desc.findall('parameternamelist/parametername')
+ additional = parameter_desc.findall('parameterdescription/para')
+
+ if len(param_name) is not 0:
+ param_descriptor['name'] = param_name[0].text
+
+ if len(additional) is not 0:
+ param_descriptor['description'] = ' '.join(additional[0].itertext())
+
+ for descriptor in function['params']:
+ if param_descriptor['name'] in descriptor['name']:
+ descriptor['description'] = param_descriptor['description']
+
+ return function
+
+ def generate_doxygen(self):
+ self.utils.mk_dir(self.working_dir)
+ self.utils.clean_dir(self.working_dir)
+
+ for path in self.header_paths:
+ self.get_headers(path, self.working_dir)
+
+ if os.path.exists(self.doxygen_xml_dest):
+ self.utils.clean_dir(self.doxygen_xml_dest)
+
+ os.system('doxygen doxy-config.cfg')
diff --git a/utils/python/doc_gen/md_converter.py b/utils/python/doc_gen/md_converter.py
new file mode 100644
index 0000000..ff3a1eb
--- /dev/null
+++ b/utils/python/doc_gen/md_converter.py
@@ -0,0 +1,242 @@
+import re, json, copy
+
+class MarkdownConverter:
+
+ #constructor
+ def __init__(self, type_colour, function_name_colour, separate_defaults = True, display_defaults = False):
+ self.type_colour = type_colour
+ self.function_name_colour = function_name_colour
+ self.separate_defaults = separate_defaults
+ self.display_defaults = display_defaults
+
+ ###
+ # wraps text in a div element with a given color
+ #
+ # @param text the text to wrap
+ # @param color the desired text color
+ #
+ # @return a string representing the now wrapped text
+ ###
+ def wrap_text(self, text, color):
+ return "<div style='color:" + color + "; display:inline-block'>" + text + "</div>"
+
+ ###
+ # removes previously generated markdown from the file.
+ #
+ # @param file_lines a list of lines representing a file.
+ # @param regexp the regular expression that dictates a match.
+ ###
+ def clean(self, file_lines, regexp):
+ start = 0
+ end = 0
+
+ for line_number, line in enumerate(file_lines, 1):
+ result = re.findall(regexp,line)
+
+ if len(result) is not 0:
+ meta_data = json.loads(result[0])
+
+ keys = meta_data.keys()
+
+ #classname indicates the beginning of a meta_data section
+ if 'className' in keys:
+ start = line_number
+
+ #end indicated the end of a meta_data section
+ if 'end' in keys:
+ end = line_number - 1
+
+ return file_lines[:start] + file_lines[end:]
+
+ ###
+ # given a member function, this function derives the alternative versions
+ #
+ # @param member_func the member function that is required to be derrived
+ #
+ # @return a list of function dictionaries that contain the alternatives, based on the original
+ ###
+ def derive_functions(self, member_func):
+ member_functions_derived = []
+
+ if len(member_func['params']) is not 0:
+
+ param_index = 0
+
+ for param in member_func['params']:
+ if len(param['default']['name']) is 0:
+ param_index = param_index + 1
+ else:
+ break
+
+ bare_function = {
+ 'short_name' : member_func['short_name'],
+ 'name' : member_func['name'],
+ 'params' : [],
+ 'description' : member_func['description'],
+ 'returns' : member_func['returns'],
+ 'notes' : member_func['notes'],
+ 'examples' : member_func['examples'],
+ 'return_type' : member_func['return_type'],
+ }
+
+ for i in range(0, param_index):
+ bare_function['params'] = bare_function['params'] + [member_func['params'][i]]
+
+ member_functions_derived = member_functions_derived + [bare_function]
+
+ current = copy.copy(bare_function)
+
+ #lists retain references, so we have to copy objects to maintain separation
+ for remainder in range(param_index, len(member_func['params'])):
+ current['params'] = current['params'] + [member_func['params'][remainder]]
+ member_functions_derived = member_functions_derived + [current]
+ current = copy.copy(current)
+
+ else:
+ member_functions_derived = member_functions_derived + [member_func]
+
+ return member_functions_derived
+
+ ###
+ # given a parameter, this function generates text
+ #
+ # @param param the parameter that needs a textual translation
+ #
+ # @return a string representing the parameter
+ ###
+ def gen_param_text(self, param):
+ text = "\n> "
+
+ if param['type'] is not None:
+ text = text + " " + self.wrap_text(param['type'], self.type_colour)
+
+ text = text + " " + param['name']
+
+ if self.display_defaults:
+ if len(param['default']['name']) is not 0:
+ text = text + " `= " + param['default']['name']
+
+ if len(param['default']['value']) is not 0:
+ text = text + param['default']['value']
+
+ text = text + "`"
+
+ if 'description' in param.keys():
+ text = text +" - " + param['description']
+
+ text = text.encode('ascii','ignore')
+
+ return text
+
+ ###
+ # given a list of member functions, this function returns a list of new lines for the
+ # file currently being processed.
+ #
+ # @param class_name the name of the current class (found in the meta data)
+ # @param member_functions the list of member_functions extracted from XML
+ #
+ # @return a list containing the new lines to be inserted into the current file.
+ ###
+ def gen_member_func_doc(self, class_name, member_functions):
+
+ # this is what a member function dictionary contains.
+ # function = {
+ # 'short_name':"",
+ # 'name':"",
+ # 'return_type':"",
+ # 'params':[],
+ # 'description':[],
+ # 'returns':"",
+ # 'notes':"",
+ # 'examples':"",
+ # 'default':None
+ # }
+
+ lines = []
+
+ for index, member_func in enumerate(member_functions,0):
+
+ member_functions_derived = []
+
+ if index is 0 or member_func['short_name'] != member_functions[index - 1]['short_name']:
+ if class_name == member_func["short_name"]:
+ lines.append("##Constructor\n")
+ else:
+ lines.append("##" + member_func["short_name"]+"\n")
+
+ #we want to clearly separate our different level of functions in the DAL
+ #so we present methods with defaults as overloads.
+ if self.separate_defaults is True:
+ member_functions_derived = member_functions_derived + self.derive_functions(member_func)
+
+ for derived_func in member_functions_derived:
+ #---- short name for urls ----
+ lines.append("<br/>\n")
+
+ short_name = ""
+
+ if len(derived_func["return_type"]) is not 0:
+ short_name = "####" + self.wrap_text(derived_func["return_type"],self.type_colour) + " " +self.wrap_text(derived_func["short_name"], self.function_name_colour) + "("
+ else:
+ short_name = "####" + derived_func["short_name"] + "("
+
+ last_param = None
+
+ if len(derived_func['params']) is not 0:
+ last_param = derived_func['params'][-1]
+
+ #generate parameters for the name of this function
+ for param in derived_func['params']:
+ text = ""
+
+ if param['type'] is not None:
+ text = text + " " + self.wrap_text(param['type'], self.type_colour)
+
+ text = text + " " + param['name']
+
+ if param is not last_param:
+ short_name = short_name + text +", "
+ else:
+ short_name = short_name + text
+
+ lines.append(short_name + ")\n")
+ #-----------------------------
+
+ #---- description ----
+ if len(derived_func['description']) is not 0:
+ lines.append("#####Description\n")
+ lines.append(' '.join(derived_func['description']) + "\n")
+ #-----------------------------
+
+ #---- parameters ----
+ if len(derived_func['params']) is not 0:
+ lines.append("#####Parameters\n")
+
+ for param in derived_func['params']:
+ lines.append(self.gen_param_text(param) + "\n")
+ #-----------------------------
+
+ #---- returns ----
+ if len(derived_func['returns']) is not 0:
+ lines.append("#####Returns\n")
+ lines.append(derived_func['returns'] + "\n")
+ #-----------------------------
+
+ #---- examples ----
+ if len(derived_func['examples']) is not 0:
+ lines.append("#####Example\n")
+ lines.append("```cpp\n")
+ lines.append(derived_func['examples'])
+ lines.append("```\n")
+ #-----------------------------
+
+ #---- notes ----
+ if len(derived_func['notes']) is not 0:
+ lines.append("\n!!! note\n")
+ lines.append(" " + derived_func['notes'].replace('\n','\n '))
+ lines.append('\n\n')
+ #-----------------------------
+
+ lines.append("____\n")
+
+ return lines
diff --git a/utils/python/doc_gen/system_utils.py b/utils/python/doc_gen/system_utils.py
new file mode 100644
index 0000000..326eb21
--- /dev/null
+++ b/utils/python/doc_gen/system_utils.py
@@ -0,0 +1,137 @@
+import json, shutil, zipfile, urllib, os, fnmatch
+
+class SystemUtils:
+
+ folder_filter = ["ble", "ble-nrf51822", "mbed-classic","nrf51-sdk"]
+
+ ###
+ # reads a file and returns a list of lines
+ #
+ # @param path the path where the file is located
+ #
+ # @return the list of lines representing the file.
+ ###
+ def read(self, path, plain=False):
+ if plain:
+ return self.__read_plain(path)
+ print "Opening: " + path + " \n"
+ with open(path, 'r') as file:
+ return file.readlines()
+
+ def __read_plain(self, path):
+ print "Opening: " + path + " \n"
+ with open(path, 'r') as file:
+ return file.read()
+
+ ###
+ # writes a given set of lines to a path.
+ #
+ # @param path the path where the file is located
+ # @param lines the lines to write
+ ###
+ def write(self, path, lines):
+ print "Writing to: " + path + " \n"
+ with open(path, 'w') as file:
+ file.writelines(lines)
+
+ #http://stackoverflow.com/questions/2186525/use-a-glob-to-find-files-recursively-in-python
+ def find_files(self, directory, pattern):
+
+ print("DIR:")
+ for root, dirs, files in os.walk(directory):
+ if any(dir in root for dir in self.folder_filter):
+ continue
+
+ for basename in files:
+ if fnmatch.fnmatch(basename, pattern):
+ filename = os.path.join(root, basename)
+ yield filename
+
+ ###
+ # removes files from a folder.
+ ###
+ def clean_dir(self, dir):
+ for root, dirs, files in os.walk(dir):
+ for f in files:
+ os.unlink(os.path.join(root, f))
+ for d in dirs:
+ shutil.rmtree(os.path.join(root, d))
+
+ ###
+ # this files from one location to another
+ ###
+ def copy_files(self, from_dir, to_dir, pattern):
+
+
+ files = self.find_files(from_dir, pattern)
+
+ print("FILES!!!! ")
+ for file in files:
+ print file
+ shutil.copy(file,to_dir)
+
+ def mk_dir(self, path):
+ if not os.path.exists(path):
+ os.makedirs(path)
+
+ def copytree(self, src, dst, symlinks=False, ignore=None):
+ if not os.path.exists(dst):
+ os.makedirs(dst)
+ for item in os.listdir(src):
+ s = os.path.join(src, item)
+ d = os.path.join(dst, item)
+ if os.path.isdir(s):
+ self.copytree(s, d, symlinks, ignore)
+ else:
+ if not os.path.exists(d) or os.stat(s).st_mtime - os.stat(d).st_mtime > 1:
+ shutil.copy2(s, d)
+
+ def __add_version_info(self,version_string, extract_location):
+ content_path = extract_location + "js/base.js"
+ lines = self.read(content_path)
+ html_string = '<div class=\'admonition warning\' style=\'margin-top:30px;\'><p class=\'admonition-title\'>Warning</p><p>You are viewing documentation for <b>' + version_string + '</b></p></div>'
+ lines[0]= '$(document).ready(function() { $(\'div[role="main"]\').prepend("' + html_string + '") });'
+ self.write(content_path, lines)
+
+ def validate_version(self, working_dir, module_paths, extract_location):
+ import yaml
+
+ module_string = "/module.json"
+ mkdocs_yml = yaml.load(self.read("./mkdocs.yml", plain=True))
+
+ module_strings = []
+
+ for current_path in module_paths:
+ module_strings = module_strings + [json.loads(self.read(current_path + module_string, plain=True))["version"]]
+
+ if module_strings[1:] != module_strings[:-1]:
+ raise Exception("Version mismatch exception! microbit-dal and microbit are not compatible versions.")
+
+ module_string = "v" + str(module_strings[0])
+
+ if mkdocs_yml["versioning"]["runtime"] != module_string:
+ #capture old site, save in docs/historic/versionNumber
+ zip_dest = working_dir + "/" + str(mkdocs_yml["versioning"]["runtime"]) + ".zip"
+
+ extract_folder = extract_location+ "/" + mkdocs_yml["versioning"]["runtime"]+"/"
+
+ urllib.urlretrieve("https://github.com/lancaster-university/microbit-docs/archive/gh-pages.zip", zip_dest)
+
+ zip_ref = zipfile.ZipFile(zip_dest)
+
+ #obtain the archive prepended name
+ archive_name = working_dir + "/" + zip_ref.namelist()[0]
+
+ zip_ref.extractall(working_dir)
+ zip_ref.close()
+
+ self.copytree(archive_name, extract_folder)
+
+ self.__add_version_info(mkdocs_yml["versioning"]["runtime"], extract_folder)
+
+ self.clean_dir(archive_name)
+
+ mkdocs_yml["versioning"]["runtime"] = module_string
+
+ with open("./mkdocs.yml", "w") as f:
+ yaml.dump(mkdocs_yml, f, default_flow_style=False )
diff --git a/utils/targets.json b/utils/targets.json
new file mode 100644
index 0000000..3b485db
--- /dev/null
+++ b/utils/targets.json
@@ -0,0 +1,105 @@
+[
+ {
+ "name":"codal-arduino-uno",
+ "info":"This target specifies the arduino uno which is driven by an atmega328p.",
+ "device_url":"https://store.arduino.cc/arduino-uno-rev3",
+ "url":"https://github.com/lancaster-university/codal-arduino-uno",
+ "branch":"master",
+ "type":"git"
+ },
+ {
+ "name":"codal-circuit-playground",
+ "info":"This target specifies the circuit playground which is driven by a SAMD21.",
+ "device_url":"https://www.adafruit.com/product/3333",
+ "url":"https://github.com/lancaster-university/codal-circuit-playground",
+ "branch":"master",
+ "type":"git"
+ },
+ {
+ "name":"codal-microbit",
+ "info":"This target specifies the microbit, which uses the nordic NRF51822.",
+ "device_url":"https://microbit.org",
+ "url":"https://github.com/lancaster-university/codal-microbit",
+ "test_ignore":true,
+ "branch":"codal-microbit-mbed",
+ "type":"git"
+ },
+ {
+ "name":"codal-huzzah",
+ "info":"This target specifies the HUZZAH which is driven by a ESP8266.",
+ "device_url":"https://www.adafruit.com/product/3405",
+ "url":"https://github.com/lancaster-university/codal-huzzah",
+ "test_ignore":true,
+ "branch":"master",
+ "type":"git"
+ },
+ {
+ "name":"codal-brainpad",
+ "info":"This target specifies the BRAINPAD which is driven by a STM32f.",
+ "device_url":"https://brainpad.com",
+ "url":"https://github.com/lancaster-university/codal-brainpad",
+ "branch":"master",
+ "type":"git"
+ },
+ {
+ "name":"codal-microbit-next",
+ "info":"version 1.4 revision of the BBC micro:bit.",
+ "device_url":"https://www.microbit.org",
+ "url":"https://github.com/microbit-foundation/codal-microbit-next",
+ "test_ignore":true,
+ "branch":"nrf52833-mbedos",
+ "type":"git"
+ },
+ {
+ "name":"codal-ble-nano",
+ "info":"This target specifies the ble-nano by RedBear which is driven by a NRF52.",
+ "device_url":"https://redbear.cc/product/ble-nano-kit-2.html",
+ "url":"https://github.com/lancaster-university/codal-ble-nano",
+ "branch":"master",
+ "type":"git"
+ },
+ {
+ "name":"codal-stm32-iot-node",
+ "info":"This target specifies the STM32 IoT Node board which is driven by a STM32L475.",
+ "device_url":"http://www.st.com/en/evaluation-tools/b-l475e-iot01a.html",
+ "url":"https://github.com/LabAixBidouille-STM32/codal-stm32-iot-node",
+ "test_ignore":true,
+ "branch":"master",
+ "type":"git"
+ },
+ {
+ "name":"codal-big-brainpad",
+ "info":"This target specifies the stm32f401re (The big brain pad).",
+ "device_url":"",
+ "url":"https://github.com/lancaster-university/codal-big-brainpad",
+ "branch":"master",
+ "type":"git",
+ "test_ignore":true
+ },
+ {
+ "name":"codal-mkr1300",
+ "info":"This target specifies the arduino mkr1300 variant.",
+ "device_url":"",
+ "url":"https://github.com/ElectronicCats/codal-mkr1300",
+ "branch":"master",
+ "type":"git",
+ "test_ignore":true
+ },
+ {
+ "name":"codal-jacdac-feather",
+ "info":"This target specifies the jacdac-feather board based on the stmf103",
+ "device_url":"",
+ "url":"https://github.com/lancaster-university/codal-jacdac-feather",
+ "branch":"master",
+ "type":"git",
+ "test_ignore":true
+ },
+ {
+ "name":"codal-itsybitsy-m4",
+ "info":"This target specifies the adafruit itsybitsy board.",
+ "device_url":"",
+ "url":"https://github.com/lancaster-university/codal-itsybitsy-m4",
+ "branch":"master",
+ "type":"git"
+ }
+]
diff --git a/utils/uf2conv.py b/utils/uf2conv.py
new file mode 100644
index 0000000..3828284
--- /dev/null
+++ b/utils/uf2conv.py
@@ -0,0 +1,172 @@
+#!/usr/bin/python
+
+import sys
+import struct
+import subprocess
+import re
+import os
+import os.path
+import argparse
+
+UF2_MAGIC_START0 = 0x0A324655 # "UF2\n"
+UF2_MAGIC_START1 = 0x9E5D5157 # Randomly selected
+UF2_MAGIC_END = 0x0AB16F30 # Ditto
+
+INFO_FILE = "/INFO_UF2.TXT"
+
+appstartaddr = 0x2000
+
+def isUF2(buf):
+ w = struct.unpack("<II", buf[0:8])
+ return w[0] == UF2_MAGIC_START0 and w[1] == UF2_MAGIC_START1
+
+def convertFromUF2(buf):
+ numblocks = len(buf) / 512
+ curraddr = None
+ outp = ""
+ for blockno in range(0, numblocks):
+ ptr = blockno * 512
+ block = buf[ptr:ptr + 512]
+ hd = struct.unpack("<IIIIIIII", block[0:32])
+ if hd[0] != UF2_MAGIC_START0 or hd[1] != UF2_MAGIC_START1:
+ print "Skipping block at " + ptr + "; bad magic"
+ continue
+ if hd[2] & 1:
+ # NO-flash flag set; skip block
+ continue
+ datalen = hd[4]
+ if datalen > 476:
+ assert False, "Invalid UF2 data size at " + ptr
+ newaddr = hd[3]
+ if curraddr == None:
+ appstartaddr = newaddr
+ curraddr = newaddr
+ padding = newaddr - curraddr
+ if padding < 0:
+ assert False, "Block out of order at " + ptr
+ if padding > 10*1024*1024:
+ assert False, "More than 10M of padding needed at " + ptr
+ if padding % 4 != 0:
+ assert False, "Non-word padding size at " + ptr
+ while padding > 0:
+ padding -= 4
+ outp += "\x00\x00\x00\x00"
+ outp += block[32 : 32 + datalen]
+ curraddr = newaddr + datalen
+ return outp
+
+def convertToUF2(fileContent):
+ datapadding = ""
+ while len(datapadding) < 512 - 256 - 32 - 4:
+ datapadding += "\x00\x00\x00\x00"
+ numblocks = (len(fileContent) + 255) / 256
+ outp = ""
+ for blockno in range(0, numblocks):
+ ptr = 256 * blockno
+ chunk = fileContent[ptr:ptr + 256]
+ hd = struct.pack("<IIIIIIII",
+ UF2_MAGIC_START0, UF2_MAGIC_START1,
+ 0, ptr + appstartaddr, 256, blockno, numblocks, 0)
+ while len(chunk) < 256:
+ chunk += "\x00"
+ block = hd + chunk + datapadding + struct.pack("<I", UF2_MAGIC_END)
+ assert len(block) == 512
+ outp += block
+ return outp
+
+def getdrives():
+ drives = []
+ if sys.platform == "win32":
+ r = subprocess.check_output(["wmic", "PATH", "Win32_LogicalDisk", "get", "DeviceID,", "VolumeName,", "FileSystem,", "DriveType"])
+ for line in r.split('\n'):
+ words = re.split('\s+', line)
+ if len(words) >= 3 and words[1] == "2" and words[2] == "FAT":
+ drives.append(words[0])
+ else:
+ rootpath = "/media"
+ if sys.platform == "darwin":
+ rootpath = "/Volumes"
+ elif sys.platform == "linux":
+ tmp = rootpath + "/" + os.environ["USER"]
+ if os.path.isdir(tmp):
+ rootpath = tmp
+ for d in os.listdir(rootpath):
+ drives.append(os.path.join(rootpath, d))
+
+ def hasInfo(d):
+ try:
+ return os.path.isfile(d + INFO_FILE)
+ except:
+ return False
+
+ return filter(hasInfo, drives)
+
+def boardID(path):
+ with open(path + INFO_FILE, mode='r') as file:
+ fileContent = file.read()
+ return re.search("Board-ID: ([^\r\n]*)", fileContent).group(1)
+
+def listdrives():
+ for d in getdrives():
+ print d, boardID(d)
+
+def writeFile(name, buf):
+ with open(name, "wb") as f:
+ f.write(buf)
+ print "Wrote %d bytes to %s." % (len(buf), name)
+
+def main():
+ global appstartaddr
+ def error(msg):
+ print msg
+ sys.exit(1)
+ parser = argparse.ArgumentParser(description='Convert to UF2 or flash directly.')
+ parser.add_argument('input', metavar='INPUT', type=str, nargs='?',
+ help='input file (BIN or UF2)')
+ parser.add_argument('-b' , '--base', dest='base', type=str,
+ default="0x2000",
+ help='set base address of application (default: 0x2000)')
+ parser.add_argument('-o' , '--output', metavar="FILE", dest='output', type=str,
+ help='write output to named file; defaults to "flash.uf2" or "flash.bin" where sensible')
+ parser.add_argument('-d' , '--device', dest="device_path",
+ help='select a device path to flash')
+ parser.add_argument('-l' , '--list', action='store_true',
+ help='list connected devices')
+ parser.add_argument('-c' , '--convert', action='store_true',
+ help='do not flash, just convert')
+ args = parser.parse_args()
+ appstartaddr = int(args.base, 0)
+ if args.list:
+ listdrives()
+ else:
+ if not args.input:
+ error("Need input file")
+ with open(args.input, mode='rb') as file:
+ inpbuf = file.read()
+ fromUF2 = isUF2(inpbuf)
+ ext = "uf2"
+ if fromUF2:
+ outbuf = convertFromUF2(inpbuf)
+ ext = "bin"
+ else:
+ outbuf = convertToUF2(inpbuf)
+ print "Converting to %s, output size: %d, start address: 0x%x" % (ext, len(outbuf), appstartaddr)
+
+ if args.convert:
+ drives = []
+ if args.output == None:
+ args.output = "flash." + ext
+ else:
+ drives = getdrives()
+
+ if args.output:
+ writeFile(args.output, outbuf)
+ else:
+ if len(drives) == 0:
+ error("No drive to deploy.")
+ for d in drives:
+ print "Flashing %s (%s)" % (d, boardID(d))
+ writeFile(outbuf, d + "/NEW.UF2")
+
+if __name__ == "__main__":
+ main()