Merge pull request #340 from runseb/flake8

Add flake8 test and fix pep8 style in expansion scripts
pull/376/head
Matt Butcher 9 years ago
commit b2f1e8b938

@ -41,7 +41,7 @@ clean:
rm -rf bin rm -rf bin
.PHONY: test .PHONY: test
test: build test-style test-unit test: build test-style test-unit test-flake8
ROOTFS := rootfs ROOTFS := rootfs
@ -64,6 +64,12 @@ test-style: lint vet
echo "gofmt check failed:"; gofmt -e -d -s $(GO_DIRS); exit 1; \ echo "gofmt check failed:"; gofmt -e -d -s $(GO_DIRS); exit 1; \
fi fi
.PHONY: test-flake8
test-flake8:
@echo Running flake8...
flake8 expansion
@echo ----------------
.PHONY: lint .PHONY: lint
lint: lint:
@echo Running golint... @echo Running golint...

@ -19,6 +19,7 @@ dependencies:
- export PATH="$HOME/bin:$PATH" GLIDE_HOME="$HOME/.glide" - export PATH="$HOME/bin:$PATH" GLIDE_HOME="$HOME/.glide"
- cd $GOPATH/src/$IMPORT_PATH - cd $GOPATH/src/$IMPORT_PATH
- sudo pip install -r expansion/requirements.txt - sudo pip install -r expansion/requirements.txt
- sudo pip install flake8
test: test:
override: override:

@ -39,8 +39,8 @@ def Expand(config, imports=None, env=None, validate_schema=False):
to their values to their values
validate_schema: True to run schema validation; False otherwise validate_schema: True to run schema validation; False otherwise
Returns: Returns:
YAML containing the expanded configuration and its layout, in the following YAML containing the expanded configuration and its layout,
format: in the following format:
config: config:
... ...
@ -67,12 +67,12 @@ def _Expand(config, imports=None, env=None, validate_schema=False):
try: try:
yaml_config = yaml.safe_load(config) yaml_config = yaml.safe_load(config)
except yaml.scanner.ScannerError as e: except yaml.scanner.ScannerError as e:
# Here we know that YAML parser could not parse the template we've given it. # Here we know that YAML parser could not parse the template
# YAML raises a ScannerError that specifies which file had the problem, as # we've given it. YAML raises a ScannerError that specifies which file
# well as line and column, but since we're giving it the template from # had the problem, as well as line and column, but since we're giving
# string, error message contains <string>, which is not very helpful on the # it the template from string, error message contains <string>, which
# user end, so replace it with word "template" and make it obvious that YAML # is not very helpful on the user end, so replace it with word
# contains a syntactic error. # "template" and make it obvious that YAML contains a syntactic error.
msg = str(e).replace('"<string>"', 'template') msg = str(e).replace('"<string>"', 'template')
raise Exception('Error parsing YAML: %s' % msg) raise Exception('Error parsing YAML: %s' % msg)
@ -81,12 +81,12 @@ def _Expand(config, imports=None, env=None, validate_schema=False):
return '' return ''
# If the configuration does not have ':' in it, the yaml_config will be a # If the configuration does not have ':' in it, the yaml_config will be a
# string. If this is the case just return the str. The code below it assumes # string. If this is the case just return the str. The code below it
# yaml_config is a map for common cases. # assumes yaml_config is a map for common cases.
if type(yaml_config) is str: if type(yaml_config) is str:
return yaml_config return yaml_config
if not yaml_config.has_key('resources') or yaml_config['resources'] is None: if 'resources' not in yaml_config or yaml_config['resources'] is None:
yaml_config['resources'] = [] yaml_config['resources'] = []
config = {'resources': []} config = {'resources': []}
@ -125,11 +125,11 @@ def _ProcessResource(resource, imports, env, validate_schema=False):
ExpansionError: if there is any error occurred during expansion ExpansionError: if there is any error occurred during expansion
""" """
# A resource has to have to a name. # A resource has to have to a name.
if not resource.has_key('name'): if 'name' not in resource:
raise ExpansionError(resource, 'Resource does not have a name.') raise ExpansionError(resource, 'Resource does not have a name.')
# A resource has to have a type. # A resource has to have a type.
if not resource.has_key('type'): if 'type' not in resource:
raise ExpansionError(resource, 'Resource does not have type defined.') raise ExpansionError(resource, 'Resource does not have type defined.')
config = {'resources': []} config = {'resources': []}
@ -139,19 +139,23 @@ def _ProcessResource(resource, imports, env, validate_schema=False):
if resource['type'] in imports: if resource['type'] in imports:
# A template resource, which contains sub-resources. # A template resource, which contains sub-resources.
expanded_template = ExpandTemplate(resource, imports, env, validate_schema) expanded_template = ExpandTemplate(resource, imports,
env, validate_schema)
if expanded_template['resources']: if expanded_template['resources']:
_ValidateUniqueNames(expanded_template['resources'], resource['type']) _ValidateUniqueNames(expanded_template['resources'],
resource['type'])
# Process all sub-resources of this template. # Process all sub-resources of this template.
for resource_to_process in expanded_template['resources']: for resource_to_process in expanded_template['resources']:
processed_resource = _ProcessResource(resource_to_process, imports, env, processed_resource = _ProcessResource(resource_to_process,
imports, env,
validate_schema) validate_schema)
# Append all sub-resources to the config resources, and the resulting # Append all sub-resources to the config resources,
# layout of sub-resources. # and the resulting layout of sub-resources.
config['resources'].extend(processed_resource['config']['resources']) config['resources'].extend(processed_resource['config']
['resources'])
# Lazy-initialize resources key here because it is not set for # Lazy-initialize resources key here because it is not set for
# non-template layouts. # non-template layouts.
@ -178,8 +182,8 @@ def _ValidateUniqueNames(template_resources, template_name='config'):
if resource['name'] in names: if resource['name'] in names:
raise ExpansionError( raise ExpansionError(
resource, resource,
'Resource name \'%s\' is not unique in %s.' % (resource['name'], 'Resource name \'%s\' is not unique in %s.'
template_name)) % (resource['name'], template_name))
names.add(resource['name']) names.add(resource['name'])
# If this resource doesn't have a name, we will report that error later # If this resource doesn't have a name, we will report that error later
@ -210,8 +214,9 @@ def ExpandTemplate(resource, imports, env, validate_schema=False):
source_file, source_file,
'Unable to find source file %s in imports.' % (source_file)) 'Unable to find source file %s in imports.' % (source_file))
# source_file could be a short version of the template (say github short name) # source_file could be a short version of the template
# so we need to potentially map this into the fully resolvable name. # say github short name) so we need to potentially map this into
# the fully resolvable name.
if 'path' in imports[source_file] and imports[source_file]['path']: if 'path' in imports[source_file] and imports[source_file]['path']:
path = imports[source_file]['path'] path = imports[source_file]['path']
@ -260,12 +265,16 @@ def ExpandJinja(file_name, source_template, resource, imports):
"""Render the jinja template using jinja libraries. """Render the jinja template using jinja libraries.
Args: Args:
file_name: string, the file name. file_name:
source_template: string, the content of jinja file to be render string, the file name.
resource: resource object, the resource that contains parameters to the source_template:
string, the content of jinja file to be render
resource:
resource object, the resource that contains parameters to the
jinja file jinja file
imports: map from string to map {name, path}, the map of imported files names imports:
fully resolved path and contents map from string to map {name, path}, the map of imported
files names fully resolved path and contents
Returns: Returns:
The final expanded template The final expanded template
Raises: Raises:
@ -277,8 +286,8 @@ def ExpandJinja(file_name, source_template, resource, imports):
template = env.from_string(source_template) template = env.from_string(source_template)
if (resource.has_key('properties') or resource.has_key('env') or if ('properties' in resource or 'env' in resource or
resource.has_key('imports')): 'imports' in resource):
return template.render(resource) return template.render(resource)
else: else:
return template.render() return template.render()
@ -322,17 +331,17 @@ class PythonEvaluationContext(object):
""" """
def __init__(self, params): def __init__(self, params):
if params.has_key('properties'): if 'properties' in params:
self.properties = params['properties'] self.properties = params['properties']
else: else:
self.properties = None self.properties = None
if params.has_key('imports'): if 'imports' in params:
self.imports = params['imports'] self.imports = params['imports']
else: else:
self.imports = None self.imports = None
if params.has_key('env'): if 'env' in params:
self.env = params['env'] self.env = params['env']
else: else:
self.env = None self.env = None
@ -361,7 +370,8 @@ def main():
imports = {} imports = {}
while idx < len(sys.argv): while idx < len(sys.argv):
if idx + 1 == len(sys.argv): if idx + 1 == len(sys.argv):
print >>sys.stderr, 'Invalid import definition at argv pos %d' % idx print >>sys.stderr, 'Invalid import definition at argv pos %d' \
% idx
sys.exit(1) sys.exit(1)
name = sys.argv[idx] name = sys.argv[idx]
path = sys.argv[idx + 1] path = sys.argv[idx + 1]

@ -193,7 +193,8 @@ class ExpansionTest(unittest.TestCase):
expanded_template = expansion.Expand( expanded_template = expansion.Expand(
str(yaml_template), imports) str(yaml_template), imports)
result_file = ReadTestFile('jinja_template_with_inlinedfile_result.yaml') result_file = \
ReadTestFile('jinja_template_with_inlinedfile_result.yaml')
self.assertEquals(result_file, expanded_template) self.assertEquals(result_file, expanded_template)
@ -303,7 +304,8 @@ class ExpansionTest(unittest.TestCase):
template, imports) template, imports)
self.fail('Expansion should fail') self.fail('Expansion should fail')
except expansion.ExpansionError as e: except expansion.ExpansionError as e:
self.assertTrue('not unique in duplicate_names_in_subtemplates.jinja' self.assertTrue('not unique in \
duplicate_names_in_subtemplates.jinja'
in e.message) in e.message)
def testDuplicateNamesMixedLevel(self): def testDuplicateNamesMixedLevel(self):
@ -335,7 +337,8 @@ class ExpansionTest(unittest.TestCase):
result_file = ReadTestFile('duplicate_names_parent_child_result.yaml') result_file = ReadTestFile('duplicate_names_parent_child_result.yaml')
self.assertEquals(result_file, expanded_template) self.assertEquals(result_file, expanded_template)
# Note, this template will fail in the frontend for duplicate resource names # Note, this template will fail in the frontend
# for duplicate resource names
def testTemplateReturnsEmpty(self): def testTemplateReturnsEmpty(self):
template = ReadTestFile('no_resources.yaml') template = ReadTestFile('no_resources.yaml')
@ -377,7 +380,8 @@ class ExpansionTest(unittest.TestCase):
imports = {} imports = {}
imports['python_schema.py'] = ReadTestFile('python_schema.py') imports['python_schema.py'] = ReadTestFile('python_schema.py')
imports['python_schema.py.schema'] = ReadTestFile('python_schema.py.schema') imports['python_schema.py.schema'] = \
ReadTestFile('python_schema.py.schema')
env = {'project': 'my-project'} env = {'project': 'my-project'}
@ -412,7 +416,8 @@ class ExpansionTest(unittest.TestCase):
template = ReadTestFile('jinja_unresolved.yaml') template = ReadTestFile('jinja_unresolved.yaml')
imports = {} imports = {}
imports['jinja_unresolved.jinja'] = ReadTestFile('jinja_unresolved.jinja') imports['jinja_unresolved.jinja'] = \
ReadTestFile('jinja_unresolved.jinja')
try: try:
expansion.Expand( expansion.Expand(

@ -30,7 +30,8 @@ class AllowedImportsLoader(object):
return '%s.py' % name.replace('.', '/') return '%s.py' % name.replace('.', '/')
def load_module(self, name, etc=None): # pylint: disable=unused-argument def load_module(self, name, etc=None): # pylint: disable=unused-argument
"""Implements loader.load_module() for loading user provided imports.""" """Implements loader.load_module()
for loading user provided imports."""
if name in AllowedImportsLoader.user_modules: if name in AllowedImportsLoader.user_modules:
return AllowedImportsLoader.user_modules[name] return AllowedImportsLoader.user_modules[name]
@ -38,7 +39,8 @@ class AllowedImportsLoader(object):
module = imp.new_module(name) module = imp.new_module(name)
try: try:
data = FileAccessRedirector.allowed_imports[self.get_filename(name)] data = \
FileAccessRedirector.allowed_imports[self.get_filename(name)]
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
return None return None
@ -56,10 +58,12 @@ class AllowedImportsLoader(object):
for child in FileAccessRedirector.parents[name]: for child in FileAccessRedirector.parents[name]:
full_name = name + '.' + child full_name = name + '.' + child
self.load_module(full_name) self.load_module(full_name)
# If we have helpers/common.py package, then for it to be successfully # If we have helpers/common.py package,
# resolved helpers.common name must resolvable, hence, once we load # then for it to be successfully resolved helpers.common name
# must resolvable, hence, once we load
# child package we attach it to parent module immeadiately. # child package we attach it to parent module immeadiately.
module.__dict__[child] = AllowedImportsLoader.user_modules[full_name] module.__dict__[child] = \
AllowedImportsLoader.user_modules[full_name]
return module return module
@ -80,8 +84,9 @@ def process_imports(imports):
Copies the imports and then for all the hierarchical packages creates Copies the imports and then for all the hierarchical packages creates
dummy entries for those parent packages, so that hierarchical imports dummy entries for those parent packages, so that hierarchical imports
can be resolved. In the process parent child relationship map is built. can be resolved. In the process parent child relationship map is built.
For example: helpers/extra/common.py will generate helpers, helpers.extra For example: helpers/extra/common.py will generate helpers,
and helpers.extra.common packages along with related .py files. helpers.extra and helpers.extra.common packages
along with related .py files.
Args: Args:
imports: map of files to their relative paths. imports: map of files to their relative paths.
@ -102,8 +107,8 @@ def process_imports(imports):
continue continue
# Normalize paths and trim .py extension, if any. # Normalize paths and trim .py extension, if any.
normalized = os.path.splitext(os.path.normpath(path))[0] normalized = os.path.splitext(os.path.normpath(path))[0]
# If this is actually a path and not an absolute name, split it and process # If this is actually a path and not an absolute name,
# the hierarchical packages. # split it and process the hierarchical packages.
if sep in normalized: if sep in normalized:
parts = normalized.split(sep) parts = normalized.split(sep)
# Create dummy file entries for package levels and also retain # Create dummy file entries for package levels and also retain
@ -111,14 +116,16 @@ def process_imports(imports):
for i in xrange(0, len(parts)-1): for i in xrange(0, len(parts)-1):
# Generate the partial package path. # Generate the partial package path.
path = os.path.join(parts[0], *parts[1:i+1]) path = os.path.join(parts[0], *parts[1:i+1])
# __init__.py file might have been provided and non-empty by the user. # __init__.py file might have been provided and
# non-empty by the user.
if path not in ret: if path not in ret:
# exec requires at least new line to be present to successfully # exec requires at least new line to be present
# compile the file. # to successfully compile the file.
ret[path + '.py'] = '\n' ret[path + '.py'] = '\n'
else: else:
# To simplify our code, we'll store both versions in that case, since # To simplify our code, we'll store both versions
# loader code expects files with .py extension. # in that case, since loader code expects files
# with .py extension.
ret[path + '.py'] = ret[path] ret[path + '.py'] = ret[path]
# Generate fully qualified package name. # Generate fully qualified package name.
fqpn = '.'.join(parts[0:i+1]) fqpn = '.'.join(parts[0:i+1])
@ -132,8 +139,8 @@ def process_imports(imports):
class FileAccessRedirector(object): class FileAccessRedirector(object):
# Dictionary with user provided imports. # Dictionary with user provided imports.
allowed_imports = {} allowed_imports = {}
# Dictionary that shows parent child relationships, key is the parent, value # Dictionary that shows parent child relationships,
# is the list of child packages. # key is the parent, value is the list of child packages.
parents = {} parents = {}
@staticmethod @staticmethod
@ -153,4 +160,3 @@ class FileAccessRedirector(object):
# Prepend our module handler before standard ones. # Prepend our module handler before standard ones.
sys.meta_path = [AllowedImportsHandler()] + sys.meta_path sys.meta_path = [AllowedImportsHandler()] + sys.meta_path

@ -65,11 +65,14 @@ def _ValidateSchema(schema, validating_imports, schema_name, template_name):
"""Validate that the passed in schema file is correctly formatted. """Validate that the passed in schema file is correctly formatted.
Args: Args:
schema: contents of the schema file schema:
validating_imports: boolean, if we should validate the 'imports' contents of the schema file
section of the schema validating_imports:
schema_name: name of the schema file to validate boolean, if we should validate the 'imports' section of the schema
template_name: name of the template whose properties are being validated schema_name:
name of the schema file to validate
template_name:
name of the template whose properties are being validated
Raises: Raises:
ValidationErrors: A list of ValidationError errors that occured when ValidationErrors: A list of ValidationError errors that occured when
@ -98,10 +101,14 @@ def Validate(properties, schema_name, template_name, imports):
"""Given a set of properties, validates it against the given schema. """Given a set of properties, validates it against the given schema.
Args: Args:
properties: dict, the properties to be validated properties:
schema_name: name of the schema file to validate dict, the properties to be validated
template_name: name of the template whose properties are being validated schema_name:
imports: the map of imported files names to file contents name of the schema file to validate
template_name:
name of the template whose properties are being validated
imports:
the map of imported files names to file contents
Returns: Returns:
Dict containing the validated properties, with defaults filled in Dict containing the validated properties, with defaults filled in
@ -113,7 +120,8 @@ def Validate(properties, schema_name, template_name, imports):
""" """
if schema_name not in imports: if schema_name not in imports:
raise ValidationErrors(schema_name, template_name, raise ValidationErrors(schema_name, template_name,
["Could not find schema file '%s'." % schema_name]) ["Could not find schema file '%s'." %
schema_name])
raw_schema = imports[schema_name] raw_schema = imports[schema_name]
@ -160,7 +168,8 @@ def Validate(properties, schema_name, template_name, imports):
# 1) Use DEFAULT_SETTER to set default values in the user's properties # 1) Use DEFAULT_SETTER to set default values in the user's properties
# 2) Use the unmodified VALIDATOR to report all of the errors # 2) Use the unmodified VALIDATOR to report all of the errors
# Calling iter_errors mutates properties in place, adding default values. # Calling iter_errors mutates properties in place,
# adding default values.
# You must call list()! This is a generator, not a function! # You must call list()! This is a generator, not a function!
list(DEFAULT_SETTER(schema).iter_errors(properties)) list(DEFAULT_SETTER(schema).iter_errors(properties))
@ -176,7 +185,8 @@ def Validate(properties, schema_name, template_name, imports):
except TypeError as e: except TypeError as e:
raise ValidationErrors( raise ValidationErrors(
schema_name, template_name, schema_name, template_name,
[e, "Perhaps you forgot to put 'quotes' around your reference."], [e, "Perhaps you forgot to put 'quotes' \
around your reference."],
is_schema_error=True) is_schema_error=True)
return properties return properties
@ -188,7 +198,8 @@ class ValidationErrors(Exception):
The errors could have occured either in the schema xor in the properties The errors could have occured either in the schema xor in the properties
Attributes: Attributes:
is_schema_error: Boolean, either an invalid schema, or invalid properties is_schema_error: Boolean, either an invalid schema,
or invalid properties
errors: List of ValidationError type objects errors: List of ValidationError type objects
""" """
@ -210,7 +221,8 @@ class ValidationErrors(Exception):
location = list(error.path) location = list(error.path)
if location and len(location): if location and len(location):
error_message += " at " + str(location) error_message += " at " + str(location)
# If location is empty the error happened at the root of the schema # If location is empty the error happened at
# the root of the schema
else: else:
error_message = str(error) error_message = str(error)
@ -218,7 +230,8 @@ class ValidationErrors(Exception):
return message return message
def __init__(self, schema_name, template_name, errors, is_schema_error=False): def __init__(self, schema_name, template_name,
errors, is_schema_error=False):
self.schema_name = schema_name self.schema_name = schema_name
self.template_name = template_name self.template_name = template_name
self.errors = errors self.errors = errors

@ -222,7 +222,8 @@ class SchemaValidationTest(unittest.TestCase):
self.assertIn(INVALID_PROPERTIES, e.message) self.assertIn(INVALID_PROPERTIES, e.message)
self.assertIn("'not a number' is not of type 'integer' at ['one']", self.assertIn("'not a number' is not of type 'integer' at ['one']",
e.message) e.message)
self.assertIn("12345 is not of type 'string' at ['alpha']", e.message) self.assertIn("12345 is not of type 'string' at ['alpha']",
e.message)
def testNumbersValid(self): def testNumbersValid(self):
schema_name = 'numbers.py.schema' schema_name = 'numbers.py.schema'
@ -259,8 +260,8 @@ class SchemaValidationTest(unittest.TestCase):
e.message) e.message)
self.assertIn(('0 is less than or equal to the minimum of 0' self.assertIn(('0 is less than or equal to the minimum of 0'
" at ['exclusiveMin0']"), e.message) " at ['exclusiveMin0']"), e.message)
self.assertIn("11 is greater than the maximum of 10 at ['maximum10']", self.assertIn("11 is greater than the maximum of 10 at \
e.message) ['maximum10']", e.message)
self.assertIn(('10 is greater than or equal to the maximum of 10' self.assertIn(('10 is greater than or equal to the maximum of 10'
" at ['exclusiveMax10']"), e.message) " at ['exclusiveMax10']"), e.message)
self.assertIn("21 is not a multiple of 2 at ['even']", e.message) self.assertIn("21 is not a multiple of 2 at ['even']", e.message)
@ -611,7 +612,8 @@ class SchemaValidationTest(unittest.TestCase):
self.assertEqual(2, len(e.errors)) self.assertEqual(2, len(e.errors))
self.assertIn("Invalid schema '%s'" % schema_name, e.message) self.assertIn("Invalid schema '%s'" % schema_name, e.message)
self.assertIn("is not of type 'array' at ['imports']", e.message) self.assertIn("is not of type 'array' at ['imports']", e.message)
self.assertIn("is not of type u'array' at [u'required']", e.message) self.assertIn("is not of type u'array' at [u'required']",
e.message)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

@ -29,14 +29,15 @@ def ExtendWithDefault(validator_class):
validator_class: A class to add our overridden validators to validator_class: A class to add our overridden validators to
Returns: Returns:
A validator_class that will set default values and ignore required fields A validator_class that will set default values
and ignore required fields
""" """
validate_properties = validator_class.VALIDATORS['properties'] validate_properties = validator_class.VALIDATORS['properties']
def SetDefaultsInProperties(validator, user_schema, user_properties, def SetDefaultsInProperties(validator, user_schema, user_properties,
parent_schema): parent_schema):
SetDefaults(validator, user_schema or {}, user_properties, parent_schema, SetDefaults(validator, user_schema or {}, user_properties,
validate_properties) parent_schema, validate_properties)
return jsonschema.validators.extend( return jsonschema.validators.extend(
validator_class, {PROPERTIES: SetDefaultsInProperties, validator_class, {PROPERTIES: SetDefaultsInProperties,
@ -48,12 +49,13 @@ def SetDefaults(validator, user_schema, user_properties, parent_schema,
"""Populate the default values of properties. """Populate the default values of properties.
Args: Args:
validator: A generator that validates the "properties" keyword of the schema validator: A generator that validates the "properties" keyword
user_schema: Schema which might define defaults, might be a nested part of of the schema
the entire schema file. user_schema: Schema which might define defaults, might be a nested
part of the entire schema file.
user_properties: User provided values which we are setting defaults on user_properties: User provided values which we are setting defaults on
parent_schema: Schema object that contains the schema being evaluated on parent_schema: Schema object that contains the schema being
this pass, user_schema. evaluated on this pass, user_schema.
validate_properties: Validator function, called recursively. validate_properties: Validator function, called recursively.
""" """
@ -70,7 +72,7 @@ def SetDefaults(validator, user_schema, user_properties, parent_schema,
elif DEFAULT in subschema: elif DEFAULT in subschema:
user_properties.setdefault(schema_property, subschema[DEFAULT]) user_properties.setdefault(schema_property, subschema[DEFAULT])
# Recursively apply defaults. This is a generator, so we must wrap with list() # Recursively apply defaults. This is a generator, we must wrap with list()
list(validate_properties(validator, user_schema, list(validate_properties(validator, user_schema,
user_properties, parent_schema)) user_properties, parent_schema))
@ -83,7 +85,8 @@ def ResolveReferencedDefault(validator, ref):
ref: The target of the "$ref" keyword ref: The target of the "$ref" keyword
Returns: Returns:
The value of the 'default' field found in the referenced schema, or None The value of the 'default' field found in the referenced schema,
or None
""" """
with validator.resolver.resolving(ref) as resolved: with validator.resolver.resolving(ref) as resolved:
if DEFAULT in resolved: if DEFAULT in resolved:

Loading…
Cancel
Save