Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions .github/workflows/pull_request_test_run_update_flow.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
name: Pull Request Test Update Workflow
on:
pull_request:
types:
- opened
- synchronize
- reopened

jobs:
test_run:
name: Test that update contributions workflow runs
# Only run this job if the issue has the 'new contribution' label
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v4

- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: 3.x

- name: Install dependencies
run: pip install -r requirements.txt

- name: run unit tests
run: pytest

- name: fetch updates on contributions
run: python -u scripts/fetch_updates.py

- name: write contribs.txt file
run: python -u scripts/to_contribs_txt.py

- name: write source json files
run: python -u scripts/to_sources_jsons.py
29 changes: 19 additions & 10 deletions scripts/add_new_contribution_to_yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,22 +8,31 @@

from ruamel.yaml import YAML


def split_categories(categories):
categories = sorted(categories.replace('"', '').split(','))
categories = [category.strip() for category in categories]
return categories


def postprocess_properties(properties_dict):
if 'categories' in properties_dict and properties_dict['categories']:
properties_dict['categories'] = split_categories(properties_dict['categories'])
else:
properties_dict['categories'] = None

# add download
if 'download' not in properties_dict:
properties_dict['download'] = properties_dict['source'][:properties_dict['source'].rfind('.')] + '.zip'


if __name__ == "__main__":
if len(argv) < 2:
print("script takes json string as argument.\nStopping...")
raise ValueError

props = json.loads(argv[1])
# process category list
if 'categories' in props and props['categories']:
props['categories'] = sorted(props['categories'].replace('"', '').split(','))
props['categories'] = [category.strip() for category in props['categories']]
else:
props['categories'] = None

# add download
if 'download' not in props:
props['download'] = props['source'][:props['source'].rfind('.')] + '.zip'
postprocess_properties(props)

# open database
database_file = pathlib.Path(__file__).parent.parent / 'contributions.yaml'
Expand Down
23 changes: 13 additions & 10 deletions scripts/fetch_updates.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,18 @@ def process_contribution(item):
return index, contribution


def process_all(contributions_list):
total = len(contributions_list)
completed = 0
print(f"Starting processing of {total} contributions...")

with Pool(processes=256) as pool:
for index, contribution in pool.imap_unordered(process_contribution, enumerate(contributions_list)):
contributions_list[index] = contribution
completed += 1
print(f"Progress: {completed}/{total} ({(completed / total * 100):.1f}%)")


if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--index')
Expand All @@ -97,16 +109,7 @@ def process_contribution(item):
contributions_list = data['contributions']

if index == 'all':
total = len(contributions_list)
completed = 0
print(f"Starting processing of {total} contributions...")

with Pool(processes=256) as pool:
for index, contribution in pool.imap_unordered(process_contribution, enumerate(contributions_list)):
contributions_list[index] = contribution
completed += 1
print(f"Progress: {completed}/{total} ({(completed/total*100):.1f}%)")

process_all(contributions_list)
print("All processing complete")
else:
# update only contribution with id==index
Expand Down
6 changes: 3 additions & 3 deletions scripts/parse_and_validate_properties_txt.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,19 +75,19 @@ def validate_existing(properties_dict):
# validation on existing contribution is weaker
properties = PropertiesExisting.model_validate(properties_dict)

return properties.model_dump(exclude_unset=True)
return properties.model_dump()

def validate_new(properties_dict):
# new contribution has stronger validation
properties = PropertiesBase.model_validate(properties_dict)

return properties.model_dump(exclude_unset=True)
return properties.model_dump()

def validate_new_library(properties_dict):
# new contribution has stronger validation
properties = LibraryPropertiesNew.model_validate(properties_dict)

return properties.model_dump(exclude_unset=True)
return properties.model_dump()

def set_output(output_object):
with open(os.environ['GITHUB_OUTPUT'],'a') as f:
Expand Down
63 changes: 37 additions & 26 deletions scripts/to_contribs_txt.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import pathlib
import shutil
from collections import defaultdict
from typing import List

from utils import get_valid_contributions

Expand Down Expand Up @@ -45,6 +46,40 @@ def read_contribs_text(filepath):
return contribs_list


def preprocess_contributions() -> List:
all_contributions = get_valid_contributions()

# sort contributions list by type
def sort_key(d):
return type_list.index(d['type'])
all_contributions = sorted(all_contributions, key=sort_key)

return all_contributions


def write_contribs(all_contributions, fh):
for contribution in all_contributions:
fh.write(contribution['type'] + '\n')
for field in contribs_fields_list:
if field in contribution:
if field == 'id':
fh.write(f'{field}={contribution[field]:03}\n')
elif field == 'categories':
if contribution['type'] == 'library':
fh.write(f'{field}={",".join(contribution[field]) if contribution[field] else ""}\n')
else:
# categories are only relevant for libraries, except for examples with "Books" as category
if contribution[field] and 'Books' in contribution[field]:
fh.write(f'{field}={",".join(contribution[field]) if contribution[field] else ""}\n')
else:
fh.write(f'{field}=\n')
elif field == 'compatibleModesList':
fh.write(f'modes={contribution[field]}\n')
else:
fh.write(f'{field}={"" if contribution[field] is None else contribution[field]}\n')
Comment on lines +64 to +79
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think using a match statement on field could potentially improve readability. It could also help to put that logic into a write_field() helper. Just a non-blocking suggestion!

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks, I'll put this comment into an issue, as a possible future improvement to the code.

fh.write('\n')


if __name__ == "__main__":
pde_folder = pathlib.Path(__file__).parent.parent / 'pde/'
# remove sources folder if it already exists
Expand All @@ -54,34 +89,10 @@ def read_contribs_text(filepath):

contribs_text_file = pde_folder / 'contribs.txt'

contributions_list = get_valid_contributions()

# sort contributions list by type
def sort_key(d):
return type_list.index(d['type'])
contributions_list = sorted(contributions_list, key=sort_key)
contributions_list = preprocess_contributions()

# write contribs.txt file
with open(contribs_text_file, 'w+') as f:
for contribution in contributions_list:
f.write(contribution['type']+'\n')
for field in contribs_fields_list:
if field in contribution:
if field == 'id':
f.write(f'{field}={contribution[field]:03}\n')
elif field == 'categories':
if contribution['type'] == 'library':
f.write(f'{field}={",".join(contribution[field]) if contribution[field] else ""}\n')
else:
# categories are only relevant for libraries, except for examples with "Books" as category
if contribution[field] and 'Books' in contribution[field]:
f.write(f'{field}={",".join(contribution[field]) if contribution[field] else ""}\n')
else:
f.write(f'{field}=\n')
elif field == 'compatibleModesList':
f.write(f'modes={contribution[field]}\n')
else:
f.write(f'{field}={"" if contribution[field] is None else contribution[field]}\n')
f.write('\n')
write_contribs(contributions_list, f)


20 changes: 12 additions & 8 deletions scripts/to_sources_jsons.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,17 @@ def to_sources_dict(contribution_dict):
return sources_dict


def write_json_for_each_contribution_in_list(all_contributions, folder_path):
for contribution in all_contributions:
if 'name' in contribution:
# output zero padded string for id
contribution['id'] = f"{contribution['id']:03}"
filename = contribution['name'].replace(':', '').replace('/', '').replace(' ', '_') + '.json'
this_filepath = folder_path / filename
with open(this_filepath, 'w') as f:
json.dump(to_sources_dict(contribution), f, indent=2)


if __name__ == "__main__":
sources_folder = pathlib.Path(__file__).parent.parent / 'sources/'

Expand All @@ -74,11 +85,4 @@ def to_sources_dict(contribution_dict):
sources_folder.mkdir(parents=True, exist_ok=True)

# create a json file in the sources folder for each contribution
for contribution in contributions_list:
if 'name' in contribution:
# output zero padded string for id
contribution['id'] = f"{contribution['id']:03}"
filename = contribution['name'].replace(':','').replace('/','').replace(' ','_') + '.json'
this_filepath = sources_folder / filename
with open(this_filepath, 'w') as f:
json.dump(to_sources_dict(contribution),f,indent=2)
write_json_for_each_contribution_in_list(contributions_list, sources_folder)
135 changes: 135 additions & 0 deletions tests/unit/test_validate_and_model_dump.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
import pytest
from pydantic import ValidationError
from scripts.parse_and_validate_properties_txt import validate_new, validate_existing, validate_new_library


# Test Cases
class TestValidateAndExport:

def test_validate_existing_complete_data(self, valid_properties_data):
"""Test validate_existing with complete data"""
props = validate_existing(valid_properties_data)

assert props['name'] == valid_properties_data['name']
assert props['authors'] == valid_properties_data['authors']
assert props['url'] == valid_properties_data['url']
assert props['categories'] == valid_properties_data['categories']
assert props['sentence'] == valid_properties_data['sentence']
assert props['paragraph'] == valid_properties_data['paragraph']
assert props['version'] == valid_properties_data['version']
assert props['prettyVersion'] == valid_properties_data['prettyVersion']
assert props['minRevision'] == int(valid_properties_data['minRevision'])
assert props['maxRevision'] == int(valid_properties_data['maxRevision'])
assert props['modes'] == valid_properties_data['modes']


def test_validate_existing_minimal_required_data(self, minimal_properties_existing_data):
"""Test validate_existing with minimal data"""
props = validate_existing(minimal_properties_existing_data)

assert props['name'] == minimal_properties_existing_data['name']
assert props['authors'] == minimal_properties_existing_data['authors']
assert props['url'] == minimal_properties_existing_data['url']
assert props['categories'] is None
assert props['sentence'] == minimal_properties_existing_data['sentence']
assert props['paragraph'] is None
assert props['version'] == minimal_properties_existing_data['version']
assert props['prettyVersion'] is None
assert props['minRevision'] == 0 # Default value
assert props['maxRevision'] == 0 # Default value
assert props['modes'] is None


def test_validate_existing_extra_fields_allowed(self, properties_with_extra_fields):
"""Test validate_existing with extra fields"""
props = validate_existing(properties_with_extra_fields)

assert props['name'] == properties_with_extra_fields['name']
assert props['customField'] == properties_with_extra_fields['customField']
assert props['anotherExtra'] == properties_with_extra_fields['anotherExtra']


def test_validate_new_complete_data(self, valid_properties_data):
"""Test validate_new with complete data"""
props = validate_new(valid_properties_data)

assert props['name'] == valid_properties_data['name']
assert props['authors'] == valid_properties_data['authors']
assert props['url'] == valid_properties_data['url']
assert props['categories'] == valid_properties_data['categories']
assert props['sentence'] == valid_properties_data['sentence']
assert props['paragraph'] == valid_properties_data['paragraph']
assert props['version'] == int(valid_properties_data['version'])
assert props['prettyVersion'] == valid_properties_data['prettyVersion']
assert props['minRevision'] == int(valid_properties_data['minRevision'])
assert props['maxRevision'] == int(valid_properties_data['maxRevision'])
assert props['modes'] == valid_properties_data['modes']


def test_validate_new_minimal_required_data(self, minimal_properties_base_data):
"""Test validate_new with minimal data"""
props = validate_new(minimal_properties_base_data)

assert props['name'] == minimal_properties_base_data['name']
assert props['authors'] == minimal_properties_base_data['authors']
assert props['url'] == minimal_properties_base_data['url']
assert props['categories'] is None
assert props['sentence'] == minimal_properties_base_data['sentence']
assert props['paragraph'] is None
assert props['version'] == int(minimal_properties_base_data['version'])
assert props['prettyVersion'] == minimal_properties_base_data['prettyVersion']
assert props['minRevision'] == 0 # Default value
assert props['maxRevision'] == 0 # Default value
assert props['modes'] is None


def test_validate_new_extra_fields_allowed(self, properties_with_extra_fields):
"""Test validate_new with extra fields"""
props = validate_new(properties_with_extra_fields)

assert props['name'] == properties_with_extra_fields['name']
assert props['customField'] == properties_with_extra_fields['customField']
assert props['anotherExtra'] == properties_with_extra_fields['anotherExtra']


def test_validate_new_library_complete_data(self, valid_properties_data):
"""Test validate_new_library with complete data"""
props = validate_new_library(valid_properties_data)

assert props['name'] == valid_properties_data['name']
assert props['authors'] == valid_properties_data['authors']
assert props['url'] == valid_properties_data['url']
assert props['categories'] == valid_properties_data['categories']
assert props['sentence'] == valid_properties_data['sentence']
assert props['paragraph'] == valid_properties_data['paragraph']
assert props['version'] == int(valid_properties_data['version'])
assert props['prettyVersion'] == valid_properties_data['prettyVersion']
assert props['minRevision'] == int(valid_properties_data['minRevision'])
assert props['maxRevision'] == int(valid_properties_data['maxRevision'])
assert props['modes'] == valid_properties_data['modes']


def test_validate_new_library_minimal_required_data(self, minimal_properties_library_data):
"""Test validate_new_library with minimal data"""
props = validate_new_library(minimal_properties_library_data)

assert props['name'] == minimal_properties_library_data['name']
assert props['authors'] == minimal_properties_library_data['authors']
assert props['url'] == minimal_properties_library_data['url']
assert props['categories'] == minimal_properties_library_data['categories']
assert props['sentence'] == minimal_properties_library_data['sentence']
assert props['paragraph'] is None
assert props['version'] == int(minimal_properties_library_data['version'])
assert props['prettyVersion'] == minimal_properties_library_data['prettyVersion']
assert props['minRevision'] == 0 # Default value
assert props['maxRevision'] == 0 # Default value
assert props['modes'] is None


def test_validate_new_library_extra_fields_allowed(self, properties_with_extra_fields):
"""Test validate_new_library with extra fields"""
props = validate_new_library(properties_with_extra_fields)

assert props['name'] == properties_with_extra_fields['name']
assert props['customField'] == properties_with_extra_fields['customField']
assert props['anotherExtra'] == properties_with_extra_fields['anotherExtra']