1
- from pathlib import Path
2
- from os .path import join
3
-
4
- files = list (Path ("." ).glob ("parameters_*.json" ))
5
-
6
- # extract the configuration from the parameter files
7
- # by reading in the json files and extracting the "configuration" value
8
- # configuration stores the appendix in the output files)"
9
- # in theory, you could make that identical so parameters_1.json with configuration "1"
10
- # would produce summary_1.json
11
1
import json
12
- def get_configuration (file ):
13
- with open (file , 'r' ) as f :
14
- data = json .load (f )
15
- # Check if "configuration" key exists, otherwise use the file name
16
- if "configuration" in data :
17
- return data ["configuration" ]
18
- # Fallback to using the file name if "configuration" is not present
19
- # Assuming the file name is in the format "parameters_<configuration>.json"
20
- if file .stem .startswith ("parameters_" ):
21
- return file .stem .split ("_" )[1 ]
22
- # If no configuration is found, raise an error
23
- raise ValueError (f"Configuration key not found for file: { file } " )
24
-
25
- # Create a dictionary of configurations (key is the name of the parameter file)
26
- # configurations: {Path("parameters_1.json"): "1", ...}
27
- configurations = {file : get_configuration (file ) for file in files if file .is_file ()}
2
+ configfile : "workflow_config.json"
28
3
29
- # Check for duplicate configuration values (the configurations should be unique)
30
- config_values = list ( configurations . values ())
31
- duplicates = set ([ x for x in config_values if config_values . count ( x ) > 1 ])
32
- if duplicates :
33
- raise ValueError ( f"Duplicate configuration values found in parameter files: { ', ' . join ( duplicates ) } " )
4
+ result_dir = config [ "result_dir" ]
5
+ configuration_to_parameter_file = config [ "configuration_to_parameter_file" ]
6
+ configurations = config [ "configurations" ]
7
+ tools = config [ "tools" ]
8
+ benchmark = config [ "benchmark" ]
34
9
35
- # Reverse mapping for easy lookup by configuration name
36
- configuration_to_parameter_file = {v : str (k ) for k , v in configurations .items ()}
37
-
38
- tools = ["fenics" ]
39
- benchmark = "linear-elastic-plate-with-hole"
40
- # results are stored in snakemake_results/linear-elastic-plate-with-hole/fenics
41
- result_dir = join ("snakemake_results" , benchmark )
42
10
43
11
rule all :
44
12
input :
@@ -59,35 +27,30 @@ rule create_mesh:
59
27
python3 {input.script} --input_parameter_file {input.parameters} --output_mesh_file {output.mesh}
60
28
"""
61
29
62
- rule run_simulation :
63
- input :
64
- script = "{tool}/run_simulation.py" ,
65
- parameters = lambda wildcards : configuration_to_parameter_file [wildcards .configuration ],
66
- mesh = f"{ result_dir } /mesh/mesh_{{configuration}}.msh" ,
67
- output :
68
- zip = f"{ result_dir } /{{tool}}/solution_field_data_{{configuration}}.zip" ,
69
- metrics = f"{ result_dir } /{{tool}}/solution_metrics_{{configuration}}.json" ,
70
- conda :
71
- "{tool}/environment_simulation.yml" ,
72
- shell :
73
- """
74
- python3 {input.script} --input_parameter_file {input.parameters} --input_mesh_file {input.mesh} --output_solution_file_zip {output.zip} --output_metrics_file {output.metrics}
75
- """
30
+ # Include tool-specific rules
31
+ # The should have at least the mesh file and the parameters as input
32
+ # and output for each configuration a
33
+ # solution_metrics_{configuration}.json and
34
+ # and solution_field_data_{configuration}.zip whee all the visualization files are stored
35
+ # (e.g. vtk)
36
+ for tool in tools :
37
+ include : f"{ tool } /Snakefile"
38
+
76
39
77
40
rule summary :
78
41
input :
79
42
# the summary is performed for all configurations saved into a single file
80
43
# (snakemake_results/linear-elastic-plate-with-hole/fenics/summary.json)
81
- parameters = expand ("{param}" , param = [configuration_to_parameter_file [c ] for c in configurations . values () ]),
82
- mesh = expand (f"{ result_dir } /mesh/mesh_{{configuration}}.msh" , configuration = configurations . values () ),
44
+ parameters = expand ("{param}" , param = [configuration_to_parameter_file [c ] for c in configurations ]),
45
+ mesh = expand (f"{ result_dir } /mesh/mesh_{{configuration}}.msh" , configuration = configurations ),
83
46
metrics = lambda wildcards : expand (
84
47
f"{ result_dir } /{{tool}}/solution_metrics_{{configuration}}.json" ,
85
- tool = [wildcards .tool ], configuration = configurations . values ()
48
+ tool = [wildcards .tool ], configuration = configurations
86
49
),
87
50
solution_field_data = lambda wildcards : expand (
88
51
f"{ result_dir } /{{tool}}/solution_field_data_{{configuration}}.zip" ,
89
- tool = [wildcards .tool ], configuration = configurations . values ()
90
- ),
52
+ tool = [wildcards .tool ], configuration = configurations
53
+ ),
91
54
output :
92
55
summary_json = f"{ result_dir } /{{tool}}/summary.json" ,
93
56
conda : "environment_postprocessing.yml" ,
@@ -96,7 +59,7 @@ rule summary:
96
59
from pathlib import Path
97
60
98
61
all_summaries = []
99
- for idx , config in enumerate (configurations . values () ):
62
+ for idx , config in enumerate (configurations ):
100
63
summary = {}
101
64
summary ["benchmark" ] = benchmark
102
65
with open (input .parameters [idx ], "r" ) as param_file :
0 commit comments