Add travis.yml
parent
32e278d4c9
commit
af162a7134
|
@ -0,0 +1,138 @@
|
|||
""" Consumes a stream and returns a dict
|
||||
|
||||
However, the dict won't contain "__doc__", "___version___" etc, but
|
||||
the shortened versions without underscores: "doc", "version".
|
||||
|
||||
Currently not supported:
|
||||
* Dicts
|
||||
* Floating points
|
||||
* ints in list
|
||||
* Strings in any other format then "x" or 'y'
|
||||
* Docstrings with any other delimiter other than triple-" or triple='
|
||||
* Comments
|
||||
|
||||
Feel free to expand if necessary
|
||||
"""
|
||||
|
||||
class ParseException(Exception):
|
||||
"""Indicates a parsing exception"""
|
||||
def __init__(self, message = ""):
|
||||
super().__init__(message)
|
||||
|
||||
def read_metadata(s):
|
||||
result = {}
|
||||
|
||||
result["doc"] = _read_docstring(s)
|
||||
|
||||
while True:
|
||||
key = _read_key(s)
|
||||
if key:
|
||||
result[key] = _read_value(s)
|
||||
else:
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
def _read_docstring(s):
|
||||
delimiter = _read_non_whitespace(s, 3)
|
||||
if delimiter not in ["'''", '"""']:
|
||||
raise ParseException("Docstring delimiter expected")
|
||||
result = _read(s, 3);
|
||||
while result[-3:] != delimiter:
|
||||
result += _read(s)
|
||||
return result[:-3]
|
||||
|
||||
def _read_value(s):
|
||||
char = _read_non_whitespace(s)
|
||||
return _read_value_given_first_char(s, char)
|
||||
|
||||
|
||||
def _read_value_given_first_char(s, first_char):
|
||||
if first_char in ["'", '"']:
|
||||
return _read_string(s, first_char)
|
||||
if first_char in "0123456789":
|
||||
return _read_int(s, first_char)
|
||||
if first_char in "TF":
|
||||
return _read_bool(s, first_char)
|
||||
if first_char == "[":
|
||||
return _read_list(s)
|
||||
raise ParseException("Invalid character %s found" % first_char)
|
||||
|
||||
def _read_string(s, delimiter):
|
||||
result = _read(s)
|
||||
try:
|
||||
while result[-1:] != delimiter:
|
||||
result += _read(s)
|
||||
except ParseException:
|
||||
raise ParseException("Invalid string or not terminated: %s" % result)
|
||||
return result[:-1]
|
||||
|
||||
def _read_int(s, char):
|
||||
result = char
|
||||
while not char.isspace():
|
||||
char = s.read(1)
|
||||
if not char:
|
||||
break
|
||||
result += char
|
||||
if not char in "0123456789":
|
||||
raise ParseException("Invalid int: %s" % result)
|
||||
return int(result)
|
||||
|
||||
def _read_bool(s, char):
|
||||
if char == "T":
|
||||
_assert(char + _read(s, 3), "True", "Invalid boolean")
|
||||
return True
|
||||
else:
|
||||
_assert(char + _read(s, 4), "False", "Invalid boolean")
|
||||
return False
|
||||
|
||||
def _read_list(s):
|
||||
result = []
|
||||
while True:
|
||||
char = _read_non_whitespace(s)
|
||||
if char == "]":
|
||||
break
|
||||
if result:
|
||||
if char != ",":
|
||||
raise ParseException("Expected comma, got '%s'" % char)
|
||||
result.append(_read_value(s))
|
||||
else:
|
||||
result.append(_read_value_given_first_char(s, char))
|
||||
|
||||
return result
|
||||
|
||||
def _read_key(s):
|
||||
delimiter = _read_non_whitespace(s, 3)
|
||||
if delimiter != "___":
|
||||
return None
|
||||
try:
|
||||
result = _read(s, 3);
|
||||
while result[-3:] != delimiter:
|
||||
char = _read(s)
|
||||
if char in [" ", "="]:
|
||||
raise ParseException()
|
||||
result += char
|
||||
except ParseException:
|
||||
raise ParseException("Invalid key: ___%s" % result)
|
||||
_assert(_read_non_whitespace(s), "=", "Expected equals")
|
||||
return result[:-3]
|
||||
|
||||
def _read(s, l=1):
|
||||
result = s.read(l)
|
||||
if len(result)<l:
|
||||
raise ParseException("Expected to read at least %s characters, got '%s'" % (l, result))
|
||||
return result
|
||||
|
||||
def _assert(input, expected, message):
|
||||
if not input == expected:
|
||||
raise ParseException(message + " ('%s' expected, '%s' found)" % (expected, input))
|
||||
|
||||
def _read_non_whitespace(s, l=1):
|
||||
result = s.read(1)
|
||||
while result.isspace():
|
||||
result = s.read(1)
|
||||
if l == 1:
|
||||
return result
|
||||
else:
|
||||
return result + s.read(l - 1)
|
||||
|
|
@ -0,0 +1,208 @@
|
|||
import os, hashlib, binascii
|
||||
from metadata_reader import read_metadata, ParseException
|
||||
|
||||
"""
|
||||
Resources are the basic components of any given library.
|
||||
|
||||
supported the following types:
|
||||
* lib (files in lib/)
|
||||
* shared (file in shared/)
|
||||
* app (all other folders in root)
|
||||
* root (files in root)
|
||||
|
||||
A resource has the following form:
|
||||
{
|
||||
"app1": {"type": "app", dependencies: ["lib/lib1.py"], "files": {"app1/main.py": "abcdef1234", "app1/nested/text.txt": "abcdef1234"}},
|
||||
"app2": {"type": "app", dependencies: ["lib/lib2.py"], "files": {"app2/main.py": "abcdef1234", "app2/other_content.txt": "abcdef1234", "app2/some_binary_file.gif": "abcdef1234"}},
|
||||
"lib/lib1.py": {"type": "lib", , dependencies: ["lib/lib3.py"], "files": {"lib/lib1.py": "abcdef1234"}},
|
||||
"lib/lib2.py": {"type": "lib", "files": {"lib/lib2.py": "abcdef1234"}},
|
||||
"lib/lib3.py": {"type": "lib", "files": {"lib/lib3.py": "abcdef1234"}},
|
||||
"lib/lib4.py": {"type": "lib", "files": {"lib/lib4.py": "abcdef1234"}},
|
||||
"lib/lib5.py": {"type": "lib", "files": {"lib/lib5.py": "abcdef1234"}},
|
||||
"shared/foo.txt": {"type": "shared", "files": {"shared/foo.txt": "abcdef1234"}},
|
||||
"boot.py": {"type": "root", "files": {"boot.py": "abcdef1234"}}
|
||||
}
|
||||
|
||||
Every resource can also contain other metadata fields which are extracted from the body
|
||||
of its main python class (in case of lib or app).
|
||||
|
||||
This module has the following operations:
|
||||
resources = get_resources(path) # Gets resources for a given path
|
||||
add_hashes(path, resources) # Adds hashes to the file dict - not needed for testing
|
||||
add_metadata(path, resources) # Adds metadata
|
||||
resolve_dependencies(resources) # Merges all dependencies into each resource's file dict
|
||||
validate(resources) # Runs basic validation
|
||||
|
||||
This module encapsulates all the main operations the app library is expect to
|
||||
perform on a given checkout. It's intentionally kept in one file to make it easier
|
||||
to share between repositories. The only exception to this rule it metadata_reader
|
||||
(because it's rather complex and I didn't want to make this file impossible to read)
|
||||
|
||||
Please make sure this file can be executes on any operating system running python3.
|
||||
Don't include any external dependencies. It forms part of the local toolchain.
|
||||
"""
|
||||
|
||||
"""
|
||||
scan(path)
|
||||
|
||||
A resource scanner for the Tilda filesystem. Returns a {path: {type:<type>, files:{...}}}
|
||||
|
||||
ignored are the following:
|
||||
* dotfiles
|
||||
* __pycache__
|
||||
"""
|
||||
|
||||
def _scan_files(path, rel_path = ""):
|
||||
result = []
|
||||
for element in os.listdir(path):
|
||||
if element.startswith(".") or element == "__pycache__":
|
||||
continue
|
||||
element_path = os.path.join(path, element)
|
||||
element_rel_path = os.path.join(rel_path, element)
|
||||
if os.path.isdir(element_path):
|
||||
result.extend(_scan_files(element_path, element_rel_path))
|
||||
else:
|
||||
result.append(element_rel_path)
|
||||
|
||||
return result
|
||||
|
||||
def get_resources(path):
|
||||
result = {}
|
||||
for sub_path in os.listdir(path):
|
||||
if sub_path.startswith(".") or sub_path == "__pycache__":
|
||||
continue
|
||||
full_path = os.path.join(path, sub_path)
|
||||
if os.path.isfile(full_path):
|
||||
result[sub_path] = {"type": "root", "files": {sub_path: None}}
|
||||
continue
|
||||
files = _scan_files(full_path, sub_path)
|
||||
if sub_path in ["lib", "shared"]:
|
||||
for rel_path in files:
|
||||
result[rel_path] = {"type": sub_path, "files": {rel_path: None}}
|
||||
else:
|
||||
result[sub_path] = {"type": "app", "files": {}}
|
||||
for rel_path in files:
|
||||
result[sub_path]["files"][rel_path] = None
|
||||
return result
|
||||
|
||||
"""
|
||||
add_hashes(path, resource)
|
||||
|
||||
Adds the first 10 characters of SHA256 hashes to all elements in "files".
|
||||
The hash is calcuated on the file content, not the file name.
|
||||
"""
|
||||
|
||||
def add_hashes(path, resources):
|
||||
for resource in resources.values():
|
||||
for file_path in resource["files"]:
|
||||
resource["files"][file_path] = _hash_file(os.path.join(path, file_path))
|
||||
|
||||
def _hash_file(filename):
|
||||
"""Calculates the SHA256 hash of a file."""
|
||||
with open(filename, "rb") as file:
|
||||
sha256 = hashlib.sha256()
|
||||
buf = file.read(128)
|
||||
while len(buf) > 0:
|
||||
sha256.update(buf)
|
||||
buf = file.read(128)
|
||||
return str(binascii.hexlify(sha256.digest()), "utf8")[:10]
|
||||
|
||||
"""
|
||||
add_metadata(path, resource)
|
||||
|
||||
Reads primary files for app and lib resources and extracts metadata information from its header
|
||||
"""
|
||||
|
||||
def add_metadata(path, resources):
|
||||
for resource in resources.values():
|
||||
file = None
|
||||
if resource['type'] == "app":
|
||||
file = next(f for f in resource['files'] if "/main.py" in f)
|
||||
elif resource['type'] == "lib":
|
||||
file = next(iter(resource['files'].keys()))
|
||||
|
||||
if file:
|
||||
try:
|
||||
with open(os.path.join(path, file), "r") as stream:
|
||||
resource.update(_normalize_metadata(read_metadata(stream)))
|
||||
except ParseException as e:
|
||||
resource.setdefault("errors", []).append(file + ": " + str(e))
|
||||
|
||||
def _normalize_metadata(metadata):
|
||||
metadata['description'] = metadata.pop('doc')
|
||||
if 'dependencies' in metadata:
|
||||
metadata['dependencies'] = [_normalize_lib(l) for l in metadata.pop('dependencies')]
|
||||
|
||||
return metadata
|
||||
|
||||
def _normalize_lib(lib):
|
||||
"""lib dependencies can be shortened to just their module name"""
|
||||
if "." in lib or "/" in lib:
|
||||
return lib
|
||||
return "lib/%s.py" % lib
|
||||
|
||||
"""
|
||||
resolve_dependencies(resources)
|
||||
|
||||
merges files from dependent resources into the original files dict
|
||||
"""
|
||||
|
||||
def resolve_dependencies(resources):
|
||||
for file, resource in resources.items():
|
||||
if 'dependencies' in resource:
|
||||
already_added = [file]
|
||||
to_add = resource['dependencies'].copy()
|
||||
while len(to_add):
|
||||
r = to_add.pop()
|
||||
if r in already_added:
|
||||
continue
|
||||
if r not in resources:
|
||||
resource.setdefault("errors", []).append("Dependency %s not found" % r)
|
||||
continue
|
||||
already_added.append(r)
|
||||
to_add.extend(resources[r].get("dependencies", []))
|
||||
resource['files'].update(resources[r]['files'])
|
||||
|
||||
|
||||
"""
|
||||
validate(path, resources)
|
||||
|
||||
does basic verification:
|
||||
* Is it valid python?
|
||||
* Are metadata fields missing
|
||||
* TBD: Does it have imports that are not dependencies?
|
||||
"""
|
||||
def validate(path, resources):
|
||||
for resource in resources.values():
|
||||
_validate_resource(path, resource)
|
||||
|
||||
def _validate_resource(path, resource):
|
||||
# Compile
|
||||
for file in resource['files'].keys():
|
||||
if file.endswith(".py"):
|
||||
try:
|
||||
filename = os.path.join(path, file)
|
||||
with open(filename, 'r') as s:
|
||||
compile(s.read() + '\n', filename, 'exec')
|
||||
except Exception as e:
|
||||
resource.setdefault("errors", []).append(str(e))
|
||||
|
||||
# Metadata check
|
||||
if resource['type'] in ["app", "lib"]:
|
||||
pass #todo: what exactly are we're making required?
|
||||
|
||||
|
||||
"""
|
||||
helpers
|
||||
"""
|
||||
|
||||
def get_error_summary(resources):
|
||||
summary = ""
|
||||
for key, resource in resources.items():
|
||||
if "errors" in resource:
|
||||
summary += "--- %s ---\n" % key
|
||||
for error in resource['errors']:
|
||||
summary += error + "\n"
|
||||
summary += "\n"
|
||||
return summary.strip()
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
"""Toolchain for working with the TiLDA Mk4
|
||||
|
||||
Usage (currently more of a wishlist)
|
||||
Usage
|
||||
------------------------------------
|
||||
|
||||
Reboot badge
|
||||
|
@ -27,7 +27,7 @@ $ tilda_toold.py sync --run some_other_file.py
|
|||
Executes a single file on the badge without copying anything (Using pyboard.py)
|
||||
$ tilda_tools run my_app/main.py
|
||||
|
||||
Runs local validation against metadata (doesn't require a badge)
|
||||
Runs local validation (doesn't require a badge, but doesn't run unit tests)
|
||||
$ tilda_tools validate
|
||||
|
||||
Runs local validation and badge-side tests
|
||||
|
@ -46,11 +46,12 @@ Common parameters
|
|||
|
||||
import sys, glob
|
||||
import sync, pyboard_util
|
||||
from resources import *
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
cmd_parser = argparse.ArgumentParser(description='Toolchain for working with the TiLDA Mk4')
|
||||
cmd_parser.add_argument('command', nargs=1, help='command')
|
||||
cmd_parser.add_argument('command', nargs=1, help='command [test|reset|sync|run]')
|
||||
cmd_parser.add_argument('-d', '--device', help='the serial device of the badge')
|
||||
cmd_parser.add_argument('-s', '--storage', help='the usb mass storage path of the badge')
|
||||
cmd_parser.add_argument('-b', '--baudrate', default=115200, help='the baud rate of the serial device')
|
||||
|
@ -60,6 +61,23 @@ def main():
|
|||
cmd_parser.add_argument('paths', nargs='*', help='input files')
|
||||
args = cmd_parser.parse_args()
|
||||
command = args.command[0]
|
||||
path = sync.get_root()
|
||||
|
||||
if command in ["test", "validate"]:
|
||||
resources = get_resources(path)
|
||||
add_metadata(path, resources)
|
||||
resolve_dependencies(resources)
|
||||
validate(path, resources)
|
||||
errors = get_error_summary(resources)
|
||||
if errors:
|
||||
print("Problems found:\n")
|
||||
print(errors)
|
||||
sys.exit(1)
|
||||
print("Local Test: PASS")
|
||||
if command == "test":
|
||||
command = "sync"
|
||||
args.path = []
|
||||
args.run = "test/main.py"
|
||||
|
||||
if command in ["reset", "sync"]:
|
||||
pyboard_util.stop_badge(args)
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
script:
|
||||
./tilda_tools validate
|
|
@ -176,4 +176,4 @@ def empty_local_app_cache():
|
|||
global _public_apps_cache, _category_cache
|
||||
_public_apps_cache = None
|
||||
_category_cache = None
|
||||
gc.collect()
|
||||
gc.collect()
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
"""Base libarary for test cases"""
|
||||
|
||||
___license___ = "MIT"
|
||||
|
||||
import sys
|
||||
|
||||
class SkipTest(Exception):
|
||||
|
|
Loading…
Reference in New Issue