Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions genData100.sh
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ popd
# Verify that schema files are valid
pushd schema

python3 check_schemas.py $pwd
python3 check_schemas.py --schema_base $PWD
# And check generated data against schemas.
python3 check_generated_data.py ../$TEMP_DIR/testData
python3 check_generated_data.py --schema_base ../$TEMP_DIR/testData
popd

##########
Expand Down Expand Up @@ -124,7 +124,7 @@ popd

# Verify that test output matches schema.
pushd schema
python3 check_test_output.py ../$TEMP_DIR/testOutput
python3 check_test_output.py --schema_base ../$TEMP_DIR/testOutput
popd

# Verify everything
Expand Down
7 changes: 4 additions & 3 deletions generateDataAndRun.sh
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,10 @@ popd

# Verify that schema files are valid
pushd schema
python3 check_schemas.py $pwd

python3 check_schemas.py --schema_base $PWD
# And check generated data against schemas.
python3 check_generated_data.py ../$TEMP_DIR/testData
python3 check_generated_data.py --schema_base ../$TEMP_DIR/testData
popd

##########
Expand Down Expand Up @@ -138,7 +139,7 @@ popd

# Verify that test output matches schema.
pushd schema
python3 check_test_output.py ../$TEMP_DIR/testOutput
python3 check_test_output.py --schema_base ../$TEMP_DIR/testOutput
popd

# Verify everything
Expand Down
16 changes: 13 additions & 3 deletions schema/check_generated_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

from jsonschema import Draft7Validator, ValidationError

import argparse
import logging
import logging.config
import os.path
Expand All @@ -19,11 +20,19 @@
def main(args):
logging.config.fileConfig("../logging.conf")

arg_parser = argparse.ArgumentParser(description='Schema check arguments')
arg_parser.add_argument('schema_base', help='Where to find the files to validate')
arg_parser.add_argument(
'--run_serial', action='store_true',
help='Set to process serially. Parallel is the default.')

schema_options = arg_parser.parse_args(args[2:])

if len(args) <= 1:
logging.error('Please specify the path to test data directory')
return
else:
test_data_path = args[1]
test_data_path = schema_options.schema_base

logging.debug('TEST DATA PATH = %s', test_data_path)

Expand All @@ -44,12 +53,14 @@ def main(args):

validator = schema_validator.ConformanceSchemaValidator()

validator.run_serial = schema_options.run_serial

# Todo: use setters to initialize validator
validator.schema_base = '.'
validator.test_data_base = test_data_path
validator.icu_versions = sorted(icu_versions)
validator.test_types = ALL_TEST_TYPES
validator.debug = 1
validator.debug = None

all_results = validator.validate_test_data_with_schema()
logging.info(' %d results for generated test data', len(all_results))
Expand Down Expand Up @@ -101,6 +112,5 @@ def main(args):
logging.info("All %d generated test data files match with schema", schema_count)



if __name__ == "__main__":
main(sys.argv)
50 changes: 31 additions & 19 deletions schema/check_schemas.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Schema checker for the schemas in Conformance Testing
# For ICU Conformance project, Data Driven Testing

import argparse
from datetime import datetime
import glob
import json
Expand All @@ -15,7 +16,6 @@
import schema_validator
from schema_files import ALL_TEST_TYPES


class ValidateSchema:
def __init__(self, schema_base='.'):
self.schema_base = schema_base
Expand Down Expand Up @@ -60,37 +60,49 @@ def save_schema_validation_summary(self, validation_status):
return output_filename


def parallel_validate_schema(validator, file_names):
num_processors = multiprocessing.cpu_count()
logging.info('Schema validation: %s processors for %s schema validations', num_processors, len(file_names))
def validate_all_schema(validator, file_names):
if validator.options.run_serial:
results = []
logging.info('Schema serial validation of %s files!',
len(file_names))
return [validator.validate_schema_file(file) for file in file_names]
else:
num_processors = multiprocessing.cpu_count()
logging.info('Schema parallel validation: %s processors for %s schema validations',
num_processors, len(file_names))

processor_pool = multiprocessing.Pool(num_processors)
# How to get all the results
result = None
try:
result = processor_pool.map(validator.validate_schema_file, file_names)
except multiprocessing.pool.MaybeEncodingError as error:
pass
return result

processor_pool = multiprocessing.Pool(num_processors)
# How to get all the results
result = None
try:
result = processor_pool.map(validator.validate_schema_file, file_names)
except multiprocessing.pool.MaybeEncodingError as error:
pass
return result


def main(args):
logger = logging.Logger("TEST SCHEMAS LOGGER")
logger.setLevel(logging.INFO)
logger.info('+++ Test JSON Schema files')

arg_parser = argparse.ArgumentParser(description='Schema check arguments')
arg_parser.add_argument('schema_base', help='Where to find the files to validate')
arg_parser.add_argument(
'--run_serial', action='store_true',
help='Set to process serially. Parallel is the default.')

validator = schema_validator.ConformanceSchemaValidator()

# Todo: use setters to initialize validator
validator.schema_base = '.'
validator.options = arg_parser.parse_args(args[2:])

if len(args) > 1:
schema_base = args[1]
else:
schema_base = '.'
schema_base = validator.options.schema_base
schema_errors = []
schema_count = 0

val_schema = ValidateSchema(schema_base)
val_schema = ValidateSchema(validator.options.schema_base)

# An array of information to be reported on the main DDT page
validation_status = []
Expand All @@ -101,7 +113,7 @@ def main(args):
schema_file_names = glob.glob(schema_test_json_files)
schema_file_paths.extend(schema_file_names)

results = parallel_validate_schema(validator, schema_file_paths)
results = validate_all_schema(validator, schema_file_paths)
if not results:
# This should stop the whole thing!
exit(1)
Expand Down
21 changes: 15 additions & 6 deletions schema/check_test_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import glob
import json

import argparse
import logging
import logging.config
import os.path
Expand All @@ -18,12 +19,16 @@
def main(args):
logging.config.fileConfig("../logging.conf")

if len(args) <= 1:
logging.error('Please specify the path to the test output directory')
sys.exit(1)
else:
test_output_path = args[1]
arg_parser = argparse.ArgumentParser(description='Schema check arguments')
arg_parser.add_argument('schema_base', help='Where to find the files to validate')
arg_parser.add_argument(
'--run_serial', action='store_true',
help='Set to process serially. Parallel is the default.')

schema_options = arg_parser.parse_args(args[2:])

# file_base + output_path
test_output_path = schema_options.schema_base
logging.debug('TEST OUTPUT PATH = %s', test_output_path)

logger = logging.Logger("Checking Test Data vs. Schemas LOGGER")
Expand All @@ -43,6 +48,7 @@ def main(args):
executor_set.add(os.path.basename(path))

icu_path = os.path.join(test_output_path, '*', 'icu*')

icu_dirs = glob.glob(icu_path)

test_output_json_path = os.path.join(test_output_path, '*', 'icu*', '*.json')
Expand All @@ -63,8 +69,11 @@ def main(args):
logging.debug('ICU directories = %s', icu_versions)
logging.debug('test types = %s', ALL_TEST_TYPES)


validator = schema_validator.ConformanceSchemaValidator()
# Todo: use setters to initialize validator

# TODO: use setters to initialize validator
validator.run_serial = schema_options.run_serial
validator.schema_base = '.'
validator.test_output_base = test_output_path
validator.test_data_base = None
Expand Down
Loading
Loading