Skip to content

Commit ae1dd2c

Browse files
authored
Merge pull request #19 from qingfengxia/dev
update GetStarted.md and 2 commits on rename
2 parents f0345c3 + b0381d0 commit ae1dd2c

File tree

8 files changed

+118
-67
lines changed

8 files changed

+118
-67
lines changed

src/Geom/CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ add_executable(MyGeomMain "GeometryMain.cpp")
4646
add_dependencies(MyGeomMain MyGeom)
4747

4848
target_link_libraries(MyGeomMain MyGeom)
49-
set_target_properties(MyGeomMain PROPERTIES OUTPUT_NAME "geomPipeline")
49+
set_target_properties(MyGeomMain PROPERTIES OUTPUT_NAME "pppGeomPipeline")
5050

5151
if(MSVC)
5252
target_compile_options(MyGeomMain PRIVATE /wd4996 /wd4251)

src/Geom/GeometryMain.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ int main(int argc, char* argv[])
1111
if (argc < 2)
1212
{
1313
std::cout << "Error: input config file is not provided" << std::endl;
14-
std::cout << "Usage: geomPipeline config.json" << std::endl;
14+
std::cout << "Usage: pppGeomPipeline config.json" << std::endl;
1515
std::terminate();
1616
}
1717
auto p = new Geom::GeometryPipelineController(argc, argv);

src/python/geomPipeline.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,13 @@
33

44
"""
55
After installation by pip/system package manager, `geomPipeline.py` should be on executable PATH
6+
On Windows, it is not possible to run geomPipeline.py without make a batch file.
67
7-
`geomPipeline.py path-to-your-geometry.stp`
8+
`geomPipeline.py imprint path-to-your-geometry.stp`
89
this script will generate a config file, save result into a folder has the name of your geometry file stem
910
1011
test command line arguments by
11-
python3 geomPipeline.py --thread-count 4 --tolerance=0.1 --config --no-merge ../data/test_geometry/test_geometry.stp && scite config.json
12+
python3 geomPipeline.py imprint --thread-count 4 --tolerance=0.1 --config --no-merge ../data/test_geometry/test_geometry.stp && scite config.json
1213
1314
you can keep the `config_file_content` dict in `geomPipeline.py` as it is, which will build a default pipeline
1415
@@ -19,8 +20,8 @@
1920
"""
2021

2122
USAGE = """
22-
geomPipeline.py action[imprint|check|detect|decompose|...] input_filename
23-
see more details and optional arguments by `geomPipeline.py -h`
23+
geomPipeline.py {imprint|check|detect|decompose|...} input_filename
24+
2425
"""
2526

2627
import sys
@@ -93,7 +94,7 @@ def geom_add_argument(parser):
9394

9495
############################### arg parse ###############################
9596
parser = argparse.ArgumentParser(usage=USAGE)
96-
# positional argument, the first positional argument can be ignored
97+
# positional argument, the first positional argument can have a default
9798
parser.add_argument(
9899
"action", nargs="?", type=Action, default=Action.imprint, choices=list(Action)
99100
)

src/python/pppPipelineController.py

Lines changed: 41 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,12 @@
55
# created and tested on Sunday March 22, 2020
66

77
"""
8-
This is a demonstration of usage of PPP module, using CommandLineProcessor
8+
This script defines utility functions that can be shared by all pipeline controllers such as GeomPipeline.py.
9+
By using those functions, consistent command line argument and config.json header can be achived.
10+
11+
At the end of this script. a demonstration of usage of PPP core module, using CommandLineProcessor.
912
ParallelAccessorTest.cpp is a demo (test) of instantiation of ProcessorTemplate class in C++
1013
11-
todo: make it a class, to be reused with GeomPipeline.py
1214
"""
1315

1416
import sys
@@ -17,8 +19,7 @@
1719
import copy
1820
from collections import OrderedDict
1921
from multiprocessing import cpu_count
20-
#!/usr/bin/env python3
21-
# -*- coding: utf-8 -*-
22+
2223

2324
import argparse
2425
import shutil
@@ -40,50 +41,58 @@
4041

4142

4243
def ppp_add_argument(parser):
43-
# the only compulsory arg
44+
# call this function after the first positional arg has been added to parser
45+
46+
# the second positional arg for input file
4447
parser.add_argument(
45-
"inputFile", help="input data file, detect type from file suffix"
48+
"input", help="input data file, detect type from file suffix",
4649
)
4750

4851
# optional arguments
52+
# do not use "nargs=1", it will return args.outputFile as a list instead of string
4953
parser.add_argument(
50-
"-o", "--outputFile", help="output file name (without folder path)"
54+
"--working-dir", help="working folder path, by default, the current working folder",
55+
dest = "workingDir"
5156
)
52-
# do not use "nargs=1" it will return args.outputFile as a list instead of string
57+
5358
parser.add_argument(
54-
"--workingDir", help="working folder path, by default pwd"
59+
"-o", "--output-file", help="output file name (without folder path)",
60+
dest = "outputFile"
5561
)
62+
5663
parser.add_argument(
57-
"--outputDir",
58-
help="output folder path, by default a subfolder in workingDir",
64+
"--output-dir",
65+
help="output folder path, by default a subfolder in the working dir",
66+
dest = "outputDir"
5967
)
6068

6169
parser.add_argument(
6270
"--config",
6371
dest="config_only",
6472
action="store_true",
65-
help=" only generate config.json without run the pipeline",
73+
help=" only generate config.json without run the pipeline processors",
6674
)
6775

68-
parser.add_argument(
69-
"-v",
70-
"--verbosity",
71-
type=str,
72-
default="INFO",
73-
help="verbosity: for console or termimal: DEBUG, PROGRESS, INFO, WARNING, ERROR",
74-
)
7576
parser.add_argument(
7677
"-nt",
7778
"--thread-count",
7879
dest="thread_count",
7980
type=int,
8081
default=cpu_count(),
81-
help="number of thread to use, max = hardware core number",
82+
help="number of thread to use, by default, hardware core number",
8283
)
83-
return (
84-
parser # must return parser, otherwise, modification to input parser will lost
84+
85+
parser.add_argument(
86+
"-v",
87+
"--verbosity",
88+
type=str,
89+
default="INFO",
90+
help="verbosity: for console or terminal: DEBUG, PROGRESS, INFO, WARNING, ERROR",
8591
)
8692

93+
# must return parser, otherwise, modification to input parser will lost
94+
return parser
95+
8796

8897
############################ input and output ##############################
8998
def is_url(s):
@@ -94,18 +103,18 @@ def is_url(s):
94103

95104

96105
def ppp_parse_input(args):
97-
if args.inputFile:
98-
if is_url(args.inputFile):
106+
if args.input:
107+
if is_url(args.input):
99108
# download to current folder
100109
import urllib.request
101110

102-
urllib.request.urlretrieve(args.inputFile, "input_data")
111+
urllib.request.urlretrieve(args.input, "input_data")
103112
return "input_data"
104113

105-
elif os.path.exists(args.inputFile):
106-
return args.inputFile
114+
elif os.path.exists(args.input):
115+
return args.input
107116
else:
108-
raise IOError("input file does not exist: ", args.inputFile)
117+
raise IOError("input file does not exist: ", args.input)
109118
else:
110119
raise Exception("input file must be given as an argument")
111120

@@ -173,22 +182,19 @@ def ppp_post_process(args):
173182
print("failed to create symbolic link", linkToInputFile)
174183

175184

176-
#####################################################
177-
178-
179185
def generate_config_file(config_file_content, args):
180186
# parse args first, the write config file
181187

182188
config_file_given = False
183189
generated_config_file_name = "config.json"
184190
# input json file is config, but not geomtry input manifest file
185-
if args.inputFile.find(".json") > 0:
186-
with open(args.inputFile, "r") as f:
191+
if args.input.find(".json") > 0:
192+
with open(args.input, "r") as f:
187193
_json_file_content = json.loads(f.read())
188194
# check compulsory key in config.json file
189195
if "readers" in _json_file_content:
190196
config_file_given = True
191-
input_config_file_name = args.inputFile
197+
input_config_file_name = args.input
192198
else:
193199
pass # it is a multiple geometry-material manifest json file
194200
#
@@ -242,7 +248,7 @@ def generate_config_file(config_file_content, args):
242248
},
243249
}
244250

245-
###############################################################################
251+
######################### module specific pipeline control ############################
246252

247253

248254
class PipelineController(object):

src/python/pppStartPipeline.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,14 @@
33

44
"""
55
this enable test for the cases: test after package installed or test in the build folder
6-
using ppp module is importable, otherwise use the executable: geomPipeline
6+
using ppp module is importable, otherwise use the executable: pppGeomPipeline
77
"""
88

99
import shutil
1010
import sys
1111
import os.path
1212

13-
ppp_geom_executable = "geomPipeline"
13+
ppp_geom_executable = "pppGeomPipeline"
1414

1515
try:
1616
import ppp # in case this python module has been installed
@@ -28,10 +28,10 @@
2828
try:
2929
import ppp # in case of running this module in the build folder
3030
except ImportError:
31-
print("parallel-preprocessor python module `ppp` is not installed/importable")
31+
# print("parallel-preprocessor python module `ppp` is not installed/importable")
3232
# print("if not installed, ppp module must be located in build folder")
3333
# print("../lib/ related to this script")
34-
print("start to run `geomPipeline config.json`in an external process")
34+
# print("start to run `pppGeomPipeline config.json`in an external process")
3535
ppp = None
3636

3737
has_ppp_module = bool(ppp)

wiki/GetStarted.md

Lines changed: 54 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,52 @@
11
## Get Started
22

3-
### Quick start
4-
The command line `geomPipeline.py imprint your_geometry_file_path` is the starting point for users, just replace *your_geometry_file_path* by your geometry.
5-
6-
run `geomPipeline.py -h` for more options.
3+
### Command line interface
4+
5+
Geometry pipeline can be started in a terminal. `geomPipeline.py` takes 2 positional arguments, the first is the action, and the second is the input file path. For example, `geomPipeline.py imprint your_geometry_file_path` , just replace *your_geometry_file_path* by your geometry.
6+
7+
Optional arguments for all pipelines, it is expected other pipelines also follow this convention.
8+
```
9+
-h, --help show this help message and exit
10+
-o OUTPUT_FILE, --output-file OUTPUT_FILE
11+
output file name (without folder path)
12+
--working-dir WORKING_DIR
13+
working folder path, by default the current working folder
14+
--output-dir OUTPUT_DIR
15+
output folder path, by default, a subfolder in workingDir
16+
--config only generate config.json without run the pipeline
17+
18+
-nt THREAD_COUNT, --thread-count THREAD_COUNT
19+
number of thread to use, max = hardware core number
20+
21+
-v VERBOSITY, --verbosity VERBOSITY
22+
verbosity: for console or terminal: DEBUG, PROGRESS, INFO, WARNING, ERROR
23+
```
24+
25+
optional arguments for geometry pipeline
26+
```
27+
--metadata METADATA input metadata file, only for brep input geometry
28+
--tolerance TOLERANCE
29+
tolerance for imprinting, unit MilliMeter
30+
--no-merge do not merge the imprinted shapes, for two-step workflow
31+
--ignore-failed ignore failed (in BOP check, collision detect, etc) solids
32+
```
33+
34+
Always run `geomPipeline.py -h` to get the latest arguments.
735

836
### Geometry preprocessing features
937

10-
The default action is **imprint**, outputting a geometry file (`*._processed.brep`) of one shape with duplicated shared faces removed, and also a `*_processed_metadata.json` file of meta data. The latter file contains meta data such as material information for the geometry brep file. All the output files are located in a folder in the current directory.
38+
Implemented geometry processing actions are: `check,detect,merge,imprint,search`, some other planned actions `tessellate, fix, decompose` can be found in [Roadmap.md](Roadmap.md)
39+
40+
The **imprint** action outputs a geometry file (`*._processed.brep`) of one shape with duplicated shared faces removed, and also a `*_processed_metadata.json` file of meta data. The latter file contains meta data such as material information for the geometry brep file. All the output files are located in a folder in the current directory.
1141

12-
The imprint operation can be split into 2 steps: Imprint faces and Merge faces:
42+
The imprint operation is accomplished in 2 steps: Imprint faces and Merge faces:
1343
`geomPipeline.py imprint geometry_file --thread-count 6 --no-merge`
1444
`geomPipeline.py merge /home/qxia/Documents/StepMultiphysics/parallel-preprocessor/result --thread-count 6`
1545

16-
Other actions are `search, check, detect, decompose`, etc, see more options by running `geomPipeline.py -h`.
46+
Usage of other actions such as `search, check, detect, decompose`
1747

1848
`geomPipeline.py check geometry_file` will check for errors, e.g. volume too small, invalid geometry, etc
19-
`geomPipeline.py detect geometry_file` will detect collision between solid shapes, see more shape relations types at Geom::CollisionType
49+
`geomPipeline.py detect geometry_file` will detect collision between solid shapes, see more shape spatial relationship types defined in the type `Geom::CollisionType`
2050

2151
### Input geometry format supported
2252

@@ -29,23 +59,28 @@ Other actions are `search, check, detect, decompose`, etc, see more options by r
2959
{"material": "Steel", "filename": "path_to_geometry_file2" },
3060
]
3161
```
62+
see doxygen generate document for this class for most updated information <>
3263

33-
### Advanced usage (adjust pipeline parameters)
34-
#### Pipeline configuration generation (python script)
3564

36-
`geomPipeline.py` will generate a json configuration based on user input (by default `config.json` in the current folder), then starts the geometry preprocessing pipeline. For example, the imprint action will be organized into a pipeline of several GeometryProcessors, with default parameters written into the `config.json`. If the output is not ideal, users can edit parameters in the generated `config.json` and re-run the pipeline by `python3 geomPipeline.py config.json`, or equally `geomPipeline path_to_json_config.json`.
65+
### Debug your installation
3766

38-
In fact, python pipeline controller such as `geomPipeline.py` generates input configuration, all the processing computation is done by `geomPipeline` which is an executable compiled from C++ code. This executable only accepts a json configuration file, e.g. `geomPipeline path_to_json_config.json`.
67+
`which geomPipeline` on Unix-like system, or `where geomPipeline` on Windows to see if executable has been installed on PATH.
3968

40-
The split of high-level user-oriented python script and lower-level C++ program has the benifits:
41-
+ to ease the debugging of mixed python and C++ programming
42-
+ to ease the parallel programming, since Python has the GIL problem
69+
NOTE: if installed using deb/rpm on ubuntu and fedora, while user has anaconda activated, then user will not be able to use c-extension module `ppp`. For example, on Ubuntu the ppp module `ppp.cpython-36m-x86_64-linux-gnu.so` is installed to `/usr/lib/python3/dist-packages/`. In that case, `python3 /usr/bin/geomPipeline.py manifest.json` will start an external process by python to run pipeline without using `ppp` module.
4370

71+
On windows, a batch file calling may be generated to run python script "geomPipeline.py" without "python path_to/geomePipeline.py".
4472

45-
### Debug your installation
4673

47-
NOTE: if installed using deb/rpm on ubuntu and fedora, while user has anaconda activated, then user should give the full path of system python3 path, as the Linux package of ppp link to system python `/usr/bin/python3`, and install ppp module to system python site. For example, on Ubuntu the ppp module `ppp.cpython-36m-x86_64-linux-gnu.so` is installed to `/usr/lib/python3/dist-packages/`
74+
### Advanced usage (adjust pipeline parameters)
75+
#### Pipeline configuration generation (python script)
76+
77+
`geomPipeline.py` will generate a json configuration based on user input (by default `config.json` in the current folder), then starts the geometry preprocessing pipeline. For example, the imprint action will be organized into a pipeline of several GeometryProcessors, with default parameters written into the `config.json`.
78+
79+
If the output is not ideal, users can edit parameters in the generated `config.json` and re-run the pipeline by `python3 geomPipeline.py config.json`, or equally `pppGeomPipeline path_to_json_config.json`.
4880

49-
To use `geomPipeline.py`
81+
Actually, all the processing computation is done by `pppGeomPipeline` which is an executable compiled from C++ code. This executable only accepts a json configuration file, e.g. `pppGeomPipeline path_to_json_config.json`.
82+
83+
The split of high-level user-oriented python script and lower-level C++ program has the benefits:
84+
+ to ease the debugging of mixed python and C++ programming
85+
+ to ease the parallel programming, since Python has the GIL problem
5086

51-
`/usr/bin/python3 /user/bin/geomPipeline.py manifest.json`

wiki/Packaging.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ First of all, I create release tag, then upload binary package manually (creatin
142142
then run this action to update the package on each push.
143143

144144
```yml
145-
# those release asset filename (is created mannually before running this action)
145+
# those release asset filename (is created manually before running this action)
146146
- name: Upload binary package to release
147147
uses: svenstaro/upload-release-action@v2
148148
with:

wiki/Testing.md

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,13 @@
11
## Testing
2+
3+
### Run all the tests
4+
5+
After out of source build, in the build folder such as `parallel-preprocessor/build/`, run `sh run_all_tests.sh`.
6+
7+
NOTE: unit test application such as `pppGeomTests` must be run in the `build/ppptest` folder for the moment, since test data in `parallel-preprocessor/build/data` are referred using relative path.
8+
9+
### Tested platforms
10+
211
Unit tests are written in C++ to test C++ functions and in Python to test the pipeline.
312

413
Unit tests can be triggered by `run_all_test.sh` in the build dir. This `run_all_test.sh` will make `ppptest` subfolder under the build folder if not yet generated by cmake build system, and copy/link necessary test data for all tests.
@@ -25,7 +34,7 @@ Those python tests script implement `unittest` paradigm; python3-pytest will be
2534

2635
### Continuous integration
2736

28-
Gitlab runners ubuntu and fedora setup within UKAEA
37+
Github CI runners ubuntu and fedora will run all the tests.
2938

3039

3140
### Coverage report in HTML

0 commit comments

Comments
 (0)