Commit c2700252 authored by Amy Blank's avatar Amy Blank 🌈
Browse files

Merge branch 'devel' into 'master'

Official merge to master for spring release

See merge request burt/software/burt-util!62
parents ed1b81a7 388d2108
Pipeline #15234 passed with stage
in 28 seconds
......@@ -13,6 +13,7 @@ build/
......@@ -23,6 +24,14 @@ var/
# Auto-generated requirements.txt file
#pipenv stuff
# PyInstaller
# Usually these files are written by a python script from a template
......@@ -103,6 +112,11 @@ ENV/
# .nfs files are created when an open file is removed but is still being accessed
# debian build artifacts
# This file is a template, and might need editing before it works on your project.
# Official language image. Look for the different tagged releases at:
image: "python:3.4"
image: burt/pyinstaller
- build
- release-job
stage: build
- ./ install
- python3 install
- make build
stage: release-job
- ./ test
- python3 install
- make package
- packaging/*.deb
name: "${CI_PROJECT_NAME}-${CI_BUILD_TAG}-deb"
- tags
......@@ -6,12 +6,6 @@ put content here
- put content here.
### Reviewers
- [ ] @tn
- [ ] @amyblank
- [ ] @bz
### Resources
### Issues
......@@ -9,11 +9,5 @@
- put content here.
### Reviewers
- [ ] @tn
- [ ] @amyblank
- [ ] @bz
### Resources and Documentation
> Links for resources and documentation go here.
......@@ -2,7 +2,33 @@
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](
## [3.1.2]
- actually call gdb-multiarch correctly & specify arch when called (JLink only)
## [3.1.1]
- made gdb-multiarch recomended instead of required for install
## [3.1.0]
- Partial merge of shen yixin's devel branch from estun
- Added '-d' flag to elbow calibrate to support new 3.0.0 elbow firmware with dual encoder calibration based on yixin's changes
- Fixed some minor bugs in makefile to make setup less buggy
## [3.0.1]
- Switched default gdb to be gdb instead of arm-none-eabi-gdb
## [3.0.0]
- python.gitlab and dependencies upgraded to use gitlab API v4.
- download artifacts updated to reflect new API
- command restore artifacts added in case a projects pipeline needs to be retriggered to restore artifacts for that tag.
## [2.10.1]
- no code changes. Switched distribution style to a pyinstaller executable bundled in a .deb
## [2.10.0]
- burt-util enumerate now checks all CAN IDs 0-127 by default
- specify python-can 2.2.1 in pipfile
## [2.9.0]
......@@ -129,7 +155,7 @@ This project adheres to [Semantic Versioning](
- Unpack_message for unpacking bytes into data with REAL32, INTEGER and UNSIGNED support
- A yes to all option for inspection program which always accepts defaults
- Pipfile for pipenv support
- Make develop and make lock
- Make develop and make lock
- Update_config command for updateing the config file
- Setup_inspection command
- A command for setting gitlab token in the config file (gl_token)
......@@ -247,7 +273,7 @@ This project adheres to [Semantic Versioning](
## Fixed
- Fixed adc reading issues
## [0.8.3]
### Fixed
SHELL := /bin/bash
WORKSPACE_DIR=$(shell git rev-parse --show-toplevel)
VERSION=$(shell cat burt_util/ | grep --color=never -oh '[0-9]\+\.[0-9]\+\.[0-9]\+')
NAME=$(shell basename -s .git `git config --get remote.origin.url`)
USERNAME=$(shell git config
EMAIL=$(shell git config
all: build
.PHONY: build
build: $(EXE)
pyinstaller $< --onefile -n $(NAME)
.PHONY: package
package: $(DEB)
rm -f $(DEBIAN_DIR)/changelog
mkdir -p $(OUTDIR)/bin/usr/bin
cp $(DIST_DIR)/$(NAME) $(OUTDIR)/bin/usr/bin/
cd $(DEBIAN_DIR)/.. && dch --create --package "$(NAME)" -v $(VERSION) -D UNRELEASED "Latest update"
cd $(OUTDIR) && debuild -us -uc
echo "./bin/* ./" > $(DEBIAN_DIR)/$(NAME).install
$(DEBIAN_DIR)/control: $(DEBIAN_DIR)/control.tmpl
NAME=$(NAME) envsubst < $(DEBIAN_DIR)/control.tmpl > $(DEBIAN_DIR)/control
.PHONY: clean
-rm -f $(DEBIAN_DIR)/control
.PHONY: install
install: $(DEB)
sudo dpkg -i $<
.PHONY: uninstall
sudo apt remove $(NAME)
.PHONY: release
git tag -a ${RELEASE_TYPE}-$(VERSION) -m "Release ${RELEASE_TYPE}-${VERSION}"
git push --tags
.PHONY: setup
setup: ~/.pyenv
sudo apt-get update
sudo apt-get install -y \
python3-dev \
python3-pip \
devscripts \
debhelper \
git \
make \
build-essential \
libssl-dev \
zlib1g-dev \
libbz2-dev \
libreadline-dev \
libsqlite3-dev \
wget \
curl \
llvm \
libncurses5-dev \
libncursesw5-dev \
xz-utils \
tk-dev \
libffi-dev \
liblzma-dev \
# pipenv is the only python package that we need to install globably, all the
# other package get installed inside our virtual enviroment
sudo pip3 install importlib-resources==1.0 zipp==1.0
sudo pip3 install pipenv
# we need to manually build python3.6 with the --enable-shared flag
# because otherwise pyinstaller won't be able to find the shared
# python libraries from within pipenv
env PYTHON_CONFIGURE_OPTS="--enable-shared" pyenv install 3.6.9
# pyenv install 3.6.9
# lastly, we actually set up the pipenv enviroment
pipenv install --dev -v
pipenv lock -r > requirements.txt
# The following pyenv installation is coppied from
git clone ~/.pyenv
echo 'export PYENV_ROOT="$$HOME/.pyenv"' >> ~/.bashrc
echo 'export PATH="$$PYENV_ROOT/bin:$$PATH"' >> ~/.bashrc
#we need to reload bashrc after making changes.
source ~/.bashrc
......@@ -7,26 +7,43 @@ name = "pypi"
burt-util = {editable = true, path = "."}
nosetests-json-extended = "==0.1.0"
nose = "==1.3.7"
pyinstaller = "==4.0"
python_version = "3.4"
python_version = "3.6"
# The Dockerfile that builds the executable installer on the build server
# has all of these packages installed by default so that they don't need to get
# re-downloaded at build time. If you update these packages here, you should
# update the Docker image as well so it doesn't have to download the new stuff
# every time it runs a build.
aiocoap = "==0.3"
arrow = "==0.17.0"
canmatrix = "==0.5"
canopen = "==0.5.1"
certifi = "==2018.8.24"
chardet = "==3.0.4"
click = "==6.6"
click-didyoumean = "==0.0.3"
click-spinner = "==0.1.7"
click-spinner = "==0.1.10"
coverage = "==4.4.1"
docutils = "==0.14"
future = "==0.16.0"
idna = "==2.6"
logbook = "==1.4.0"
lxml = "==4.2.5"
msgpack-python = "==0.5.6"
python-can = "*"
python-gitlab = "==0.21.2"
requests = "==2.18.4"
py = "==1.6.0"
pycryptodome = "==3.6.6"
pyshark = "=="
python-can = "==2.2.1"
python-dateutil = "==2.7.3"
python-gitlab = "==2.5.0"
requests = "==2.22.0"
semver = "==2.7.7"
six = "==1.10.0"
statistics = "=="
toml = "==0.9.2"
"urllib3" = "==1.22"
statistics = "*"
pyshark = "==0.3.8"
wrapt = "==1.10.11"
This diff is collapsed.
......@@ -17,11 +17,10 @@ rehabilitation robot. `util` is short for 'utility'.
### Quick Start
The following commands are all you need:
To install burt-util, go to the latest tag, then download the .deb installer (inside "release-job") and install it with
$ sudo apt-get install -y python3 python3-pip python3-dev libxml2-dev libxslt1-dev zlib1g-dev gdb-arm-none-eabi
$ sudo pip3 install git+
sudo dpkg -i burt-util*.deb
Usage examples:
......@@ -45,6 +44,23 @@ $ burt-util legacy flash 1 puck3_cw.bin // Update f/w over CAN when using a pre-
### Development Setup
#### Development Environment
This project is set up to be developed in a pipenv virtual environment using
python3.6. Since python3.6 cannot be installed with apt on ubuntu 16.04 use
pyenv to provide it to pipenv. The application can be run locally from the
virtual environment, or 'frozen' into a single-file application using
pyinstaller. This one-file application can then be installed on other systems
by building a .deb installer file using make.
The build server builds the .deb slightly differently. The environment in which
make is run is not a pipenv virtual environment, but rather a docker container
defined in the burt-ci-docker-image project. This docker container is set up so
that its internal global python setup is the same as the one defined by the
pipenv Pipfile. It is not defined by the Pipfile though, and if the Pipfile is
changed, the docker image should be updated as well to keep local & server
builds roughly compatible.
Development can be done in one of two ways
#### Recommended: Pipenv
......@@ -57,17 +73,17 @@ First download the repository:
$ git clone && cd burt-util
To install the dependencies (for Ubuntu 16.04):
To install the dependencies
$ sudo apt-get install -y python3 python3-pip
$ sudo pip3 install pipenv
make setup
Then we can run pipenv
among other things this installs pyenv which requires the shell to be
$ pipenv install
exec $SHELL
Now we can run the program in this repo without installing on one of two ways
......@@ -83,9 +99,13 @@ $ pipenv shell
(pipenv) $ burt-util
This allows for faster development because you don't need to do a system wide install everytime you make a change.
You may be prompted to install python3.6 if you do not already have it
installed. Indicate yes when asked.
#### Simple: Install after changes
This allows for faster development because you don't need to do a system wide
install every time you make a change.
#### Advanced: Install after changes
First download the repository:
......@@ -93,26 +113,33 @@ First download the repository:
$ git clone && cd burt-util
To install the dependencies (for Ubuntu 16.04):
To install the system dependencies
$ sudo apt-get install -y python3 python3-pip
$ make setup
All python requirements are managed by the file that is used by PIP.
The application can be installed by running the following command in the
burt-util base directory:
Among other things this installs pyenv which requires the shell to be
$ sudo -H pip3 install .
exec $SHELL
To verify it is installed properly you can now run the unit tests:
The project should be built inside the pipenv virtual environment to ensure
that it is built with the dependencies specified in the Pipfile. To do so run:
$ sudo ./ test
$ pipenv install --dev
$ pipenv run make install
You may be prompted to install python3.6 if you do not already have it
installed. Indicate yes when asked.
This sets up the virtual environment as a development environment, builds the
package inside the virtual environment, and then installs the built debian
on the system. This method is preferable for testing, as it installs burt-util
at the system level.
### Configuration
from burt_util.cli import setup_cli
if __name__ == '__main__':
......@@ -178,5 +178,6 @@ def setup_cli():
......@@ -102,40 +102,33 @@ def reset_all(ctx):
@click.option('--all-ids/--no-all-ids', default=False)
@click.argument('node_ids', nargs=-1, type=const.TYPE_BASED_CAN_INT)
def enumerate(ctx, all_ids, node_ids):
def enumerate(ctx, node_ids):
Search through can ids to find nodes.
Search through CAN IDs to find nodes.
By default the search is limited to ids commonly used by Barrett, but a
custom list can be added. All ids must be between 1 and 127
By default the search goes through all IDs between 1 and 127,
but custom list can be added.
To search of a burt specific set of ids: 127, 31, 10, 1, 2, 3, 4, 5
To search over all possible IDs (1-127)
$ burt-util enumerate
To search over all possible options
$ burt-util enumerate --all-ids
Over a custom list of ids
Over a custom list of IDs
$ burt-util enumerate 1 2 3 4 5
utils = ctx.obj['api']
found_ids = []
if all_ids:
search_ids = list(
range(const.CANOPEN_MIN_ID, const.CANOPEN_MAX_ID + 1))
elif node_ids:
if node_ids:
search_ids = list(node_ids)
search_ids = const.ENUMERATE_DEFAULT_IDS
search_ids = list(
range(const.CANOPEN_MIN_ID, const.CANOPEN_MAX_ID + 1))
click.echo("Searching for nodes")
with click.progressbar(search_ids, length=len(search_ids)) as search_bar:
......@@ -26,6 +26,13 @@ def get_elbow_status(can_node, utils):
return ((result[0] & 8), result[0] & 3)
def format_raw_hall_data(raw):
takes a raw hall data read and returns the value in gauss and volts
gauss = ((raw - 32768) * 1000) / 32768
volts = (raw * 3.3) / 65536
return (gauss, volts)
from .inspection import inspection
from .setup_inspection import setup_inspection
......@@ -16,11 +16,13 @@ import pprint
@click.option("-i", "--id", default=const.EB_DEFAULT_ID,
@click.option("-v", "--verbose", is_flag=True, default=False, help=("Enable "
"debugging information"))
help="Enable debugging information")
help="Store seperate calibration values for both hall sensors. FW v3.0.0 and greater")
def calibrate(ctx, id, verbose):
def calibrate(ctx, id, verbose, dual_hall_cal):
Run the elbow calibration
......@@ -53,11 +55,16 @@ def calibrate(ctx, id, verbose):
def read_raw_hall_effects():
return (utils.read_sdo(id, (0x3500, 0), "UNSIGNED16"),
utils.read_sdo(id, (0x3501, 0), "UNSIGNED16"))
lhandraw = utils.read_sdo(id, (0x3500, 0), "UNSIGNED16")
rhandraw = utils.read_sdo(id, (0x3501, 0), "UNSIGNED16")
return (lhandraw, rhandraw)
click.echo("Error loading read_sdo in read_raw_hall_effects function")
def measure(N=num_samples):
hall_effect_measurements = [read_raw_hall_effects() for i in range(N)]
click.echo("value test") # testing
avg = reduce(
lambda x, y: ((x[0] + y[0])/2, (x[1] + y[1])/2.0),
......@@ -69,6 +76,7 @@ def calibrate(ctx, id, verbose):
u10, u9 = measure(num_samples)
measurements["lhandedness"]["rhand"] = u9
measurements["lhandedness"]["lhand"] = u10
click.echo("calibrated left") #
if verbose:
click.echo("Measurements for lhandedness")
......@@ -78,6 +86,7 @@ def calibrate(ctx, id, verbose):
u10, u9 = measure(num_samples)
measurements["rhandedness"]["rhand"] = u9
measurements["rhandedness"]["lhand"] = u10
if verbose:
click.echo("Measurements for rhandedness")
......@@ -92,13 +101,26 @@ def calibrate(ctx, id, verbose):
continue_prompt("Calibrate Elbow Board?")
for sensor in ["rhand", "lhand"]:
id, (0x3502, int(calibration[sensor]["polarity"])),
"UNSIGNED16", int(calibration[sensor]["mean"]))
# Elbow firmware <= 2.3.0 stores calibration data per polarity, and
# > 3.0.0 stores calibration data per sensor. Default is still per
# polarity, specify new method with -d flag.
if dual_hall_cal:
if verbose:
click.echo("\nSetting calibration for right hand Hall (U9)")
utils.send_sdo(id, (0x3502, 1), "UNSIGNED16", int(calibration["rhand"]["mean"]))
if verbose:
click.echo("\nSetting calibration for left hand Hall (U10)")
utils.send_sdo(id, (0x3502, 2), "UNSIGNED16", int(calibration["lhand"]["mean"]))
for sensor in ["rhand", "lhand"]:
if verbose:
click.echo("\nSetting calibration for polarity {0}".format(calibration[sensor]["polarity"]))
id, (0x3502, int(calibration[sensor]["polarity"])),
"UNSIGNED16", int(calibration[sensor]["mean"]))
echo_banner("Successfully Calibrated Elbow", fg="green")
echo_banner("Failed To Calibrate Elbow", fg="red")
raise SystemExit(-1)
\ No newline at end of file
raise SystemExit(-1)
......@@ -8,7 +8,7 @@ import sys
from ...utils import echo_banner, continue_prompt, ConditionalSpinner
from ... import constants as const
from ...canopen_runner import canopen_runner, DATATYPE, parse_data
from . import elbow, logger, get_elbow_status
from . import elbow, logger, get_elbow_status, format_raw_hall_data
......@@ -38,3 +38,28 @@ def status(ctx, id, verbose):
click.secho("Failed to determine elbow state", fg="red")
raise SystemExit(-1)
if verbose:
lraw = utils.read_sdo(id, (const.EB_ODINDEX_LEFTHALL_RAW,
rraw = utils.read_sdo(id, (const.EB_ODINDEX_RIGHTHALL_RAW,
lgauss, lvolts = format_raw_hall_data(lraw)
rgauss, rvolts = format_raw_hall_data(rraw)
u9Raw = utils.read_sdo(id, (const.EB_ODINDEX_HALLCONFIG,
u9Gauss, u9Volts = format_raw_hall_data(u9Raw)
u10Raw = utils.read_sdo(id, (const.EB_ODINDEX_HALLCONFIG,
u10Gauss, u10Volts = format_raw_hall_data(u10Raw)
click.echo("Right Hall: {0: 3.0f} G, {1:.2f} V, {2:5d} counts".format(
rgauss, rvolts, rraw))
click.echo("Left Hall: {0: 3.0f} G, {1:.2f} V, {2:5d} counts".format(
lgauss, lvolts, lraw))
click.echo("Right Thresh: {0: 3.0f} G, {1:.2f} V, {2:5d} counts".format(
u9Gauss, u9Volts, u9Raw))
click.echo("Left Thresh: {0: 3.0f} G, {1:.2f} V, {2:5d} counts".format(
u10Gauss, u10Volts, u10Raw))
#!/usr/bin/env python3
import traceback
import canopen
from time import sleep
import struct
......@@ -148,11 +149,11 @@ def reset(ctx, node_id, flashloader, other_id, timeout):