Merge pull request #340 from runseb/flake8

Add flake8 test and fix pep8 style in expansion scripts
pull/376/head
Matt Butcher 9 years ago
commit b2f1e8b938

@ -41,7 +41,7 @@ clean:
rm -rf bin
.PHONY: test
test: build test-style test-unit
test: build test-style test-unit test-flake8
ROOTFS := rootfs
@ -64,6 +64,12 @@ test-style: lint vet
echo "gofmt check failed:"; gofmt -e -d -s $(GO_DIRS); exit 1; \
fi
.PHONY: test-flake8
test-flake8:
@echo Running flake8...
flake8 expansion
@echo ----------------
.PHONY: lint
lint:
@echo Running golint...

@ -19,6 +19,7 @@ dependencies:
- export PATH="$HOME/bin:$PATH" GLIDE_HOME="$HOME/.glide"
- cd $GOPATH/src/$IMPORT_PATH
- sudo pip install -r expansion/requirements.txt
- sudo pip install flake8
test:
override:

@ -39,8 +39,8 @@ def Expand(config, imports=None, env=None, validate_schema=False):
to their values
validate_schema: True to run schema validation; False otherwise
Returns:
YAML containing the expanded configuration and its layout, in the following
format:
YAML containing the expanded configuration and its layout,
in the following format:
config:
...
@ -67,12 +67,12 @@ def _Expand(config, imports=None, env=None, validate_schema=False):
try:
yaml_config = yaml.safe_load(config)
except yaml.scanner.ScannerError as e:
# Here we know that YAML parser could not parse the template we've given it.
# YAML raises a ScannerError that specifies which file had the problem, as
# well as line and column, but since we're giving it the template from
# string, error message contains <string>, which is not very helpful on the
# user end, so replace it with word "template" and make it obvious that YAML
# contains a syntactic error.
# Here we know that YAML parser could not parse the template
# we've given it. YAML raises a ScannerError that specifies which file
# had the problem, as well as line and column, but since we're giving
# it the template from string, error message contains <string>, which
# is not very helpful on the user end, so replace it with word
# "template" and make it obvious that YAML contains a syntactic error.
msg = str(e).replace('"<string>"', 'template')
raise Exception('Error parsing YAML: %s' % msg)
@ -81,12 +81,12 @@ def _Expand(config, imports=None, env=None, validate_schema=False):
return ''
# If the configuration does not have ':' in it, the yaml_config will be a
# string. If this is the case just return the str. The code below it assumes
# yaml_config is a map for common cases.
# string. If this is the case just return the str. The code below it
# assumes yaml_config is a map for common cases.
if type(yaml_config) is str:
return yaml_config
if not yaml_config.has_key('resources') or yaml_config['resources'] is None:
if 'resources' not in yaml_config or yaml_config['resources'] is None:
yaml_config['resources'] = []
config = {'resources': []}
@ -125,11 +125,11 @@ def _ProcessResource(resource, imports, env, validate_schema=False):
ExpansionError: if there is any error occurred during expansion
"""
# A resource has to have to a name.
if not resource.has_key('name'):
if 'name' not in resource:
raise ExpansionError(resource, 'Resource does not have a name.')
# A resource has to have a type.
if not resource.has_key('type'):
if 'type' not in resource:
raise ExpansionError(resource, 'Resource does not have type defined.')
config = {'resources': []}
@ -139,19 +139,23 @@ def _ProcessResource(resource, imports, env, validate_schema=False):
if resource['type'] in imports:
# A template resource, which contains sub-resources.
expanded_template = ExpandTemplate(resource, imports, env, validate_schema)
expanded_template = ExpandTemplate(resource, imports,
env, validate_schema)
if expanded_template['resources']:
_ValidateUniqueNames(expanded_template['resources'], resource['type'])
_ValidateUniqueNames(expanded_template['resources'],
resource['type'])
# Process all sub-resources of this template.
for resource_to_process in expanded_template['resources']:
processed_resource = _ProcessResource(resource_to_process, imports, env,
processed_resource = _ProcessResource(resource_to_process,
imports, env,
validate_schema)
# Append all sub-resources to the config resources, and the resulting
# layout of sub-resources.
config['resources'].extend(processed_resource['config']['resources'])
# Append all sub-resources to the config resources,
# and the resulting layout of sub-resources.
config['resources'].extend(processed_resource['config']
['resources'])
# Lazy-initialize resources key here because it is not set for
# non-template layouts.
@ -178,8 +182,8 @@ def _ValidateUniqueNames(template_resources, template_name='config'):
if resource['name'] in names:
raise ExpansionError(
resource,
'Resource name \'%s\' is not unique in %s.' % (resource['name'],
template_name))
'Resource name \'%s\' is not unique in %s.'
% (resource['name'], template_name))
names.add(resource['name'])
# If this resource doesn't have a name, we will report that error later
@ -210,8 +214,9 @@ def ExpandTemplate(resource, imports, env, validate_schema=False):
source_file,
'Unable to find source file %s in imports.' % (source_file))
# source_file could be a short version of the template (say github short name)
# so we need to potentially map this into the fully resolvable name.
# source_file could be a short version of the template
# say github short name) so we need to potentially map this into
# the fully resolvable name.
if 'path' in imports[source_file] and imports[source_file]['path']:
path = imports[source_file]['path']
@ -260,12 +265,16 @@ def ExpandJinja(file_name, source_template, resource, imports):
"""Render the jinja template using jinja libraries.
Args:
file_name: string, the file name.
source_template: string, the content of jinja file to be render
resource: resource object, the resource that contains parameters to the
file_name:
string, the file name.
source_template:
string, the content of jinja file to be render
resource:
resource object, the resource that contains parameters to the
jinja file
imports: map from string to map {name, path}, the map of imported files names
fully resolved path and contents
imports:
map from string to map {name, path}, the map of imported
files names fully resolved path and contents
Returns:
The final expanded template
Raises:
@ -277,8 +286,8 @@ def ExpandJinja(file_name, source_template, resource, imports):
template = env.from_string(source_template)
if (resource.has_key('properties') or resource.has_key('env') or
resource.has_key('imports')):
if ('properties' in resource or 'env' in resource or
'imports' in resource):
return template.render(resource)
else:
return template.render()
@ -322,17 +331,17 @@ class PythonEvaluationContext(object):
"""
def __init__(self, params):
if params.has_key('properties'):
if 'properties' in params:
self.properties = params['properties']
else:
self.properties = None
if params.has_key('imports'):
if 'imports' in params:
self.imports = params['imports']
else:
self.imports = None
if params.has_key('env'):
if 'env' in params:
self.env = params['env']
else:
self.env = None
@ -361,7 +370,8 @@ def main():
imports = {}
while idx < len(sys.argv):
if idx + 1 == len(sys.argv):
print >>sys.stderr, 'Invalid import definition at argv pos %d' % idx
print >>sys.stderr, 'Invalid import definition at argv pos %d' \
% idx
sys.exit(1)
name = sys.argv[idx]
path = sys.argv[idx + 1]

@ -193,7 +193,8 @@ class ExpansionTest(unittest.TestCase):
expanded_template = expansion.Expand(
str(yaml_template), imports)
result_file = ReadTestFile('jinja_template_with_inlinedfile_result.yaml')
result_file = \
ReadTestFile('jinja_template_with_inlinedfile_result.yaml')
self.assertEquals(result_file, expanded_template)
@ -303,7 +304,8 @@ class ExpansionTest(unittest.TestCase):
template, imports)
self.fail('Expansion should fail')
except expansion.ExpansionError as e:
self.assertTrue('not unique in duplicate_names_in_subtemplates.jinja'
self.assertTrue('not unique in \
duplicate_names_in_subtemplates.jinja'
in e.message)
def testDuplicateNamesMixedLevel(self):
@ -335,7 +337,8 @@ class ExpansionTest(unittest.TestCase):
result_file = ReadTestFile('duplicate_names_parent_child_result.yaml')
self.assertEquals(result_file, expanded_template)
# Note, this template will fail in the frontend for duplicate resource names
# Note, this template will fail in the frontend
# for duplicate resource names
def testTemplateReturnsEmpty(self):
template = ReadTestFile('no_resources.yaml')
@ -377,7 +380,8 @@ class ExpansionTest(unittest.TestCase):
imports = {}
imports['python_schema.py'] = ReadTestFile('python_schema.py')
imports['python_schema.py.schema'] = ReadTestFile('python_schema.py.schema')
imports['python_schema.py.schema'] = \
ReadTestFile('python_schema.py.schema')
env = {'project': 'my-project'}
@ -412,7 +416,8 @@ class ExpansionTest(unittest.TestCase):
template = ReadTestFile('jinja_unresolved.yaml')
imports = {}
imports['jinja_unresolved.jinja'] = ReadTestFile('jinja_unresolved.jinja')
imports['jinja_unresolved.jinja'] = \
ReadTestFile('jinja_unresolved.jinja')
try:
expansion.Expand(

@ -30,7 +30,8 @@ class AllowedImportsLoader(object):
return '%s.py' % name.replace('.', '/')
def load_module(self, name, etc=None): # pylint: disable=unused-argument
"""Implements loader.load_module() for loading user provided imports."""
"""Implements loader.load_module()
for loading user provided imports."""
if name in AllowedImportsLoader.user_modules:
return AllowedImportsLoader.user_modules[name]
@ -38,7 +39,8 @@ class AllowedImportsLoader(object):
module = imp.new_module(name)
try:
data = FileAccessRedirector.allowed_imports[self.get_filename(name)]
data = \
FileAccessRedirector.allowed_imports[self.get_filename(name)]
except Exception: # pylint: disable=broad-except
return None
@ -56,10 +58,12 @@ class AllowedImportsLoader(object):
for child in FileAccessRedirector.parents[name]:
full_name = name + '.' + child
self.load_module(full_name)
# If we have helpers/common.py package, then for it to be successfully
# resolved helpers.common name must resolvable, hence, once we load
# If we have helpers/common.py package,
# then for it to be successfully resolved helpers.common name
# must resolvable, hence, once we load
# child package we attach it to parent module immeadiately.
module.__dict__[child] = AllowedImportsLoader.user_modules[full_name]
module.__dict__[child] = \
AllowedImportsLoader.user_modules[full_name]
return module
@ -80,8 +84,9 @@ def process_imports(imports):
Copies the imports and then for all the hierarchical packages creates
dummy entries for those parent packages, so that hierarchical imports
can be resolved. In the process parent child relationship map is built.
For example: helpers/extra/common.py will generate helpers, helpers.extra
and helpers.extra.common packages along with related .py files.
For example: helpers/extra/common.py will generate helpers,
helpers.extra and helpers.extra.common packages
along with related .py files.
Args:
imports: map of files to their relative paths.
@ -102,8 +107,8 @@ def process_imports(imports):
continue
# Normalize paths and trim .py extension, if any.
normalized = os.path.splitext(os.path.normpath(path))[0]
# If this is actually a path and not an absolute name, split it and process
# the hierarchical packages.
# If this is actually a path and not an absolute name,
# split it and process the hierarchical packages.
if sep in normalized:
parts = normalized.split(sep)
# Create dummy file entries for package levels and also retain
@ -111,14 +116,16 @@ def process_imports(imports):
for i in xrange(0, len(parts)-1):
# Generate the partial package path.
path = os.path.join(parts[0], *parts[1:i+1])
# __init__.py file might have been provided and non-empty by the user.
# __init__.py file might have been provided and
# non-empty by the user.
if path not in ret:
# exec requires at least new line to be present to successfully
# compile the file.
# exec requires at least new line to be present
# to successfully compile the file.
ret[path + '.py'] = '\n'
else:
# To simplify our code, we'll store both versions in that case, since
# loader code expects files with .py extension.
# To simplify our code, we'll store both versions
# in that case, since loader code expects files
# with .py extension.
ret[path + '.py'] = ret[path]
# Generate fully qualified package name.
fqpn = '.'.join(parts[0:i+1])
@ -132,8 +139,8 @@ def process_imports(imports):
class FileAccessRedirector(object):
# Dictionary with user provided imports.
allowed_imports = {}
# Dictionary that shows parent child relationships, key is the parent, value
# is the list of child packages.
# Dictionary that shows parent child relationships,
# key is the parent, value is the list of child packages.
parents = {}
@staticmethod
@ -153,4 +160,3 @@ class FileAccessRedirector(object):
# Prepend our module handler before standard ones.
sys.meta_path = [AllowedImportsHandler()] + sys.meta_path

@ -65,11 +65,14 @@ def _ValidateSchema(schema, validating_imports, schema_name, template_name):
"""Validate that the passed in schema file is correctly formatted.
Args:
schema: contents of the schema file
validating_imports: boolean, if we should validate the 'imports'
section of the schema
schema_name: name of the schema file to validate
template_name: name of the template whose properties are being validated
schema:
contents of the schema file
validating_imports:
boolean, if we should validate the 'imports' section of the schema
schema_name:
name of the schema file to validate
template_name:
name of the template whose properties are being validated
Raises:
ValidationErrors: A list of ValidationError errors that occured when
@ -98,10 +101,14 @@ def Validate(properties, schema_name, template_name, imports):
"""Given a set of properties, validates it against the given schema.
Args:
properties: dict, the properties to be validated
schema_name: name of the schema file to validate
template_name: name of the template whose properties are being validated
imports: the map of imported files names to file contents
properties:
dict, the properties to be validated
schema_name:
name of the schema file to validate
template_name:
name of the template whose properties are being validated
imports:
the map of imported files names to file contents
Returns:
Dict containing the validated properties, with defaults filled in
@ -113,7 +120,8 @@ def Validate(properties, schema_name, template_name, imports):
"""
if schema_name not in imports:
raise ValidationErrors(schema_name, template_name,
["Could not find schema file '%s'." % schema_name])
["Could not find schema file '%s'." %
schema_name])
raw_schema = imports[schema_name]
@ -160,7 +168,8 @@ def Validate(properties, schema_name, template_name, imports):
# 1) Use DEFAULT_SETTER to set default values in the user's properties
# 2) Use the unmodified VALIDATOR to report all of the errors
# Calling iter_errors mutates properties in place, adding default values.
# Calling iter_errors mutates properties in place,
# adding default values.
# You must call list()! This is a generator, not a function!
list(DEFAULT_SETTER(schema).iter_errors(properties))
@ -176,7 +185,8 @@ def Validate(properties, schema_name, template_name, imports):
except TypeError as e:
raise ValidationErrors(
schema_name, template_name,
[e, "Perhaps you forgot to put 'quotes' around your reference."],
[e, "Perhaps you forgot to put 'quotes' \
around your reference."],
is_schema_error=True)
return properties
@ -188,7 +198,8 @@ class ValidationErrors(Exception):
The errors could have occured either in the schema xor in the properties
Attributes:
is_schema_error: Boolean, either an invalid schema, or invalid properties
is_schema_error: Boolean, either an invalid schema,
or invalid properties
errors: List of ValidationError type objects
"""
@ -210,7 +221,8 @@ class ValidationErrors(Exception):
location = list(error.path)
if location and len(location):
error_message += " at " + str(location)
# If location is empty the error happened at the root of the schema
# If location is empty the error happened at
# the root of the schema
else:
error_message = str(error)
@ -218,7 +230,8 @@ class ValidationErrors(Exception):
return message
def __init__(self, schema_name, template_name, errors, is_schema_error=False):
def __init__(self, schema_name, template_name,
errors, is_schema_error=False):
self.schema_name = schema_name
self.template_name = template_name
self.errors = errors

@ -222,7 +222,8 @@ class SchemaValidationTest(unittest.TestCase):
self.assertIn(INVALID_PROPERTIES, e.message)
self.assertIn("'not a number' is not of type 'integer' at ['one']",
e.message)
self.assertIn("12345 is not of type 'string' at ['alpha']", e.message)
self.assertIn("12345 is not of type 'string' at ['alpha']",
e.message)
def testNumbersValid(self):
schema_name = 'numbers.py.schema'
@ -259,8 +260,8 @@ class SchemaValidationTest(unittest.TestCase):
e.message)
self.assertIn(('0 is less than or equal to the minimum of 0'
" at ['exclusiveMin0']"), e.message)
self.assertIn("11 is greater than the maximum of 10 at ['maximum10']",
e.message)
self.assertIn("11 is greater than the maximum of 10 at \
['maximum10']", e.message)
self.assertIn(('10 is greater than or equal to the maximum of 10'
" at ['exclusiveMax10']"), e.message)
self.assertIn("21 is not a multiple of 2 at ['even']", e.message)
@ -611,7 +612,8 @@ class SchemaValidationTest(unittest.TestCase):
self.assertEqual(2, len(e.errors))
self.assertIn("Invalid schema '%s'" % schema_name, e.message)
self.assertIn("is not of type 'array' at ['imports']", e.message)
self.assertIn("is not of type u'array' at [u'required']", e.message)
self.assertIn("is not of type u'array' at [u'required']",
e.message)
if __name__ == '__main__':
unittest.main()

@ -29,14 +29,15 @@ def ExtendWithDefault(validator_class):
validator_class: A class to add our overridden validators to
Returns:
A validator_class that will set default values and ignore required fields
A validator_class that will set default values
and ignore required fields
"""
validate_properties = validator_class.VALIDATORS['properties']
def SetDefaultsInProperties(validator, user_schema, user_properties,
parent_schema):
SetDefaults(validator, user_schema or {}, user_properties, parent_schema,
validate_properties)
SetDefaults(validator, user_schema or {}, user_properties,
parent_schema, validate_properties)
return jsonschema.validators.extend(
validator_class, {PROPERTIES: SetDefaultsInProperties,
@ -48,12 +49,13 @@ def SetDefaults(validator, user_schema, user_properties, parent_schema,
"""Populate the default values of properties.
Args:
validator: A generator that validates the "properties" keyword of the schema
user_schema: Schema which might define defaults, might be a nested part of
the entire schema file.
validator: A generator that validates the "properties" keyword
of the schema
user_schema: Schema which might define defaults, might be a nested
part of the entire schema file.
user_properties: User provided values which we are setting defaults on
parent_schema: Schema object that contains the schema being evaluated on
this pass, user_schema.
parent_schema: Schema object that contains the schema being
evaluated on this pass, user_schema.
validate_properties: Validator function, called recursively.
"""
@ -70,7 +72,7 @@ def SetDefaults(validator, user_schema, user_properties, parent_schema,
elif DEFAULT in subschema:
user_properties.setdefault(schema_property, subschema[DEFAULT])
# Recursively apply defaults. This is a generator, so we must wrap with list()
# Recursively apply defaults. This is a generator, we must wrap with list()
list(validate_properties(validator, user_schema,
user_properties, parent_schema))
@ -83,7 +85,8 @@ def ResolveReferencedDefault(validator, ref):
ref: The target of the "$ref" keyword
Returns:
The value of the 'default' field found in the referenced schema, or None
The value of the 'default' field found in the referenced schema,
or None
"""
with validator.resolver.resolving(ref) as resolved:
if DEFAULT in resolved:

Loading…
Cancel
Save