diff --git a/config/config.yaml b/config/config.yaml index 2f3f027..1a79dae 100644 --- a/config/config.yaml +++ b/config/config.yaml @@ -1,7 +1,7 @@ # name of base pypsa network scenario: - name: "nh" + name: "or" model_options: ch4: false # if true, ch4 counts against emission budget. Else, ch4 tracked, but not counted against budget. @@ -11,8 +11,8 @@ model_options: # Any additional files needed from pypsa-usa for constraints pypsa_usa: - network: "nh/elec_s20_c1m_ec_lv1.0_3h_E-G.nc" - pop_layout: "nh/pop_layout_elec_s20_c1m.csv" + network: "or/elec_s30_c3m_ec_lv1.0_3h_E-G.nc" + pop_layout: "or/pop_layout_elec_s30_c3m.csv" era5_year: 2018 # write out GSA associated metadata with each run @@ -51,27 +51,27 @@ uncertainity: # sobol -> N=2^m (m is a power of 2) -> N ~ 10d # lhs -> N = r -> N ~ > 10d sample: lhs # (lhs|sobol) - replicates: 1100 + replicates: 1400 parameters: # naems or groups + - capex_wind - eff_trn_lpg - elec_export_price - elec_import_price - - elec_import_rec_price - elec_trade_flow + - ev_policy - gwp - lpg_cost - ng_leakage_downstream - ng_marginal_cost_export - ng_marginal_cost_import - - rps - + - ptc_solar results: config/results.csv # results to extract from ua plots: config/plots_ua.csv # Choose a solver solver: - name: gurobi # (cbc|gurobi|cplex|highs) - options: gurobi-default # see solving config + name: cplex # (cbc|gurobi|cplex|highs) + options: cplex-default # see solving config diff --git a/config/solving.yaml b/config/solving.yaml index eb7d280..ce720a3 100644 --- a/config/solving.yaml +++ b/config/solving.yaml @@ -55,7 +55,6 @@ solving: threads: 8 Seed: 123 gurobi-fallback: # Use gurobi defaults - name: gurobi crossover: 0 method: 2 # barrier BarHomogeneous: 1 # Use homogeneous barrier if standard does not converge @@ -65,13 +64,10 @@ solving: Seed: 123 threads: 8 cplex-default: - threads: 4 + threads: 8 lpmethod: 4 # barrier solutiontype: 2 # non basic solution, ie no crossover - barrier.convergetol: 1.e-5 - feasopt.tolerance: 1.e-6 + barrier.convergetol: 1.e-3 + feasopt.tolerance: 1.e-3 cbc-default: {} # Used in CI glpk-default: {} # Used in CI - - mem: 30000 #memory in MB; 20 GB enough for 50+B+I+H2; 100 GB for 181+B+I+H2 - walltime: "12:00:00" \ No newline at end of file diff --git a/hpc/.bashrc b/hpc/.bashrc new file mode 100644 index 0000000..1835cc8 --- /dev/null +++ b/hpc/.bashrc @@ -0,0 +1,23 @@ +# .bashrc + +# Source global definitions +if [ -f /etc/bashrc ]; then + . /etc/bashrc +fi + +# Uncomment the following line if you don't like systemctl's auto-paging feature: +# export SYSTEMD_PAGER= + +# User specific aliases and functions +. "$HOME/.cargo/env" + +# Path for custom modules +export MODULEPATH=$HOME/modulefiles:$MODULEPATH + +# Path for CPLEX executable +export CPLEX_STUDIO_DIR=/home/trevor23/projects/def-tniet-ab/trevor23/cplex_2212 +export PATH=$CPLEX_STUDIO_DIR/cplex/bin/x86-64_linux:$PATH +export LD_LIBRARY_PATH=$CPLEX_STUDIO_DIR/cplex/bin/x86-64_linux:$LD_LIBRARY_PATH + +# for python location +export CPLEX_STUDIO_BINARIES=$CPLEX_STUDIO_DIR/cplex/bin/x86-64_linux diff --git a/hpc/load_env.sh b/hpc/load_env.sh new file mode 100755 index 0000000..90abda6 --- /dev/null +++ b/hpc/load_env.sh @@ -0,0 +1,63 @@ +#!/bin/bash + +# ------------------------------ +# Load Alliance HPC modules +# ------------------------------ +module load StdEnv/2023 +module load openmpi/4.1.5 +# module load netcdf-mpi/4.9.2 +module load netcdf +# module load mpi4py/3.1.6 +module load scipy-stack +module load arrow +module load ipopt +# module load gurobi/11.0.3 # uncomment if using HPC license +module load mycplex/22.1.2 +# must load python after mpi +module load python/3.11 + + +# ------------------------------ +# Address MPI version issue +# ------------------------------ +export OMPI_MCA_ess=^pmi +export OMPI_MCA_plm=isolated +export OMPI_MCA_pml=ob1 +export OMPI_MCA_rmaps_base_oversubscribe=1 +export OMPI_MCA_btl=^openib + +# Prevent ORTE "session_dir" crashes +export OMPI_MCA_orte_base_help_aggregate=0 + +# ------------------------------ +# PROJ fix for PyPSA / pyproj +# ------------------------------ + +module load proj +export PROJ_DATA="$EBROOTPROJ/share/proj" + +# ------------------------------ +# Activate Python virtual environment +# ------------------------------ +source ~/envs/gsa/bin/activate + +# ------------------------------ +# Set cplex location +# ------------------------------ +# docplex config --upgrade /project/6060200/trevor23/cplex + +# ------------------------------ +# Enable Rust/Cargo (needed for polars / pypsa) +# ------------------------------ +if [ -f "$HOME/.cargo/env" ]; then + source "$HOME/.cargo/env" +fi + +# ------------------------------ +# Optional: environment variables for Gurobi +# (needed only if Gurobi complains) +# ------------------------------ +# export GUROBI_HOME=$EBROOTGUROBI +# export GRB_LICENSE_FILE=$HOME/gurobi.lic +# export PATH=$GUROBI_HOME/bin:$PATH +# export LD_LIBRARY_PATH=$GUROBI_HOME/lib:$LD_LIBRARY_PATH diff --git a/hpc/modulefiles/mycplex/22.1.1 b/hpc/modulefiles/mycplex/22.1.1 new file mode 100644 index 0000000..639bfa7 --- /dev/null +++ b/hpc/modulefiles/mycplex/22.1.1 @@ -0,0 +1,64 @@ +#%Module1.0#### +## +## cplex +## +proc ModulesHelp { } { + global cplexversion + +puts stderr "\tIBM ILOG cplex " + puts stderr "\tThis module provides configuration for cplex, concert, cpoptimizer and opl" +} + +module-whatis "IBM ILOG cplex (cplex, concert, cpoptimizer, opl). This version doesn't ask for a licence file." + +# for Tcl script use only +set cplexversion 2211 +set studio_root /project/6060200/trevor23/cplex +set cplexroot $studio_root/cplex +set concertroot $studio_root/concert +set oplroot $studio_root/opl +set cpoptimizerroot $studio_root/cpoptimizer + + +set cplexbin x86-64_linux +set cplexlib $cplexbin/static_pic +set concertbin x86-64_linux +set concertlib $concertbin/static_pic +set oplbin x86-64_linux +set opllib $oplbin/static_pic +set cpoptimizerbin x86-64_linux +set cpoptimizerlib $cpoptimizerbin/static_pic + + +prepend-path PATH $cplexroot/bin/$cplexbin +prepend-path PATH $oplroot/bin/$oplbin +prepend-path PATH $cpoptimizerroot/bin/$cpoptimizerbin + +prepend-path CPATH $cplexroot/include +prepend-path CPATH $concertroot/include +prepend-path CPATH $oplroot/include +prepend-path CPATH $cpoptimizerroot/include + +prepend-path -d " " CPATH_EXPANDED -I$cplexroot/include +prepend-path -d " " CPATH_EXPANDED -I$concertroot/include +prepend-path -d " " CPATH_EXPANDED -I$oplroot/include +prepend-path -d " " CPATH_EXPANDED -I$cpoptimizerroot/include + +prepend-path LIBRARY_PATH $cplexroot/lib/$cplexlib +prepend-path LIBRARY_PATH $concertroot/lib/$concertlib +prepend-path LIBRARY_PATH $oplroot/lib/$opllib +prepend-path LIBRARY_PATH $oplroot/bin/x86-64_linux/ +prepend-path LIBRARY_PATH $cpoptimizerroot/lib/$cpoptimizerlib + +prepend-path -d " " LIBRARY_PATH_EXPANDED -L$cplexroot/lib/$cplexlib +prepend-path -d " " LIBRARY_PATH_EXPANDED -L$concertroot/lib/$concertlib +prepend-path -d " " LIBRARY_PATH_EXPANDED -L$oplroot/lib/$opllib +prepend-path -d " " LIBRARY_PATH_EXPANDED -L$oplroot/bin/x86-64_linux/ +prepend-path -d " " LIBRARY_PATH_EXPANDED -L$cpoptimizerroot/lib/$cpoptimizerlib + +prepend-path LD_LIBRARY_PATH $cplexroot/bin/$cplexbin +prepend-path LD_LIBRARY_PATH $oplroot/bin/$oplbin + +prepend-path CLASSPATH $cplexroot/lib/cplex.jar +prepend-path MATLABPATH $cplexroot/matlab +prepend-path STUDIO_ROOT $studio_root diff --git a/hpc/modulefiles/mycplex/22.1.2 b/hpc/modulefiles/mycplex/22.1.2 new file mode 100644 index 0000000..7dfa2b2 --- /dev/null +++ b/hpc/modulefiles/mycplex/22.1.2 @@ -0,0 +1,64 @@ +#%Module1.0#### +## +## cplex +## +proc ModulesHelp { } { + global cplexversion + +puts stderr "\tIBM ILOG cplex " + puts stderr "\tThis module provides configuration for cplex, concert, cpoptimizer and opl" +} + +module-whatis "IBM ILOG cplex (cplex, concert, cpoptimizer, opl). This version doesn't ask for a licence file." + +# for Tcl script use only +set cplexversion 2212 +set studio_root /home/trevor23/projects/def-tniet-ab/trevor23/cplex_2212 +set cplexroot $studio_root/cplex +set concertroot $studio_root/concert +set oplroot $studio_root/opl +set cpoptimizerroot $studio_root/cpoptimizer + + +set cplexbin x86-64_linux +set cplexlib $cplexbin/static_pic +set concertbin x86-64_linux +set concertlib $concertbin/static_pic +set oplbin x86-64_linux +set opllib $oplbin/static_pic +set cpoptimizerbin x86-64_linux +set cpoptimizerlib $cpoptimizerbin/static_pic + + +prepend-path PATH $cplexroot/bin/$cplexbin +prepend-path PATH $oplroot/bin/$oplbin +prepend-path PATH $cpoptimizerroot/bin/$cpoptimizerbin + +prepend-path CPATH $cplexroot/include +prepend-path CPATH $concertroot/include +prepend-path CPATH $oplroot/include +prepend-path CPATH $cpoptimizerroot/include + +prepend-path -d " " CPATH_EXPANDED -I$cplexroot/include +prepend-path -d " " CPATH_EXPANDED -I$concertroot/include +prepend-path -d " " CPATH_EXPANDED -I$oplroot/include +prepend-path -d " " CPATH_EXPANDED -I$cpoptimizerroot/include + +prepend-path LIBRARY_PATH $cplexroot/lib/$cplexlib +prepend-path LIBRARY_PATH $concertroot/lib/$concertlib +prepend-path LIBRARY_PATH $oplroot/lib/$opllib +prepend-path LIBRARY_PATH $oplroot/bin/x86-64_linux/ +prepend-path LIBRARY_PATH $cpoptimizerroot/lib/$cpoptimizerlib + +prepend-path -d " " LIBRARY_PATH_EXPANDED -L$cplexroot/lib/$cplexlib +prepend-path -d " " LIBRARY_PATH_EXPANDED -L$concertroot/lib/$concertlib +prepend-path -d " " LIBRARY_PATH_EXPANDED -L$oplroot/lib/$opllib +prepend-path -d " " LIBRARY_PATH_EXPANDED -L$oplroot/bin/x86-64_linux/ +prepend-path -d " " LIBRARY_PATH_EXPANDED -L$cpoptimizerroot/lib/$cpoptimizerlib + +prepend-path LD_LIBRARY_PATH $cplexroot/bin/$cplexbin +prepend-path LD_LIBRARY_PATH $oplroot/bin/$oplbin + +prepend-path CLASSPATH $cplexroot/lib/cplex.jar +prepend-path MATLABPATH $cplexroot/matlab +prepend-path STUDIO_ROOT $studio_root diff --git a/hpc/requirements.txt b/hpc/requirements.txt new file mode 100644 index 0000000..343dbec --- /dev/null +++ b/hpc/requirements.txt @@ -0,0 +1,147 @@ +appdirs==1.4.4+computecanada +argparse_dataclass==2.0.0+computecanada +asttokens==3.0.0+computecanada +attrs==25.4.0+computecanada +blosc2 @ file:///tmp/ebuser/avx512/HDF5/1.14.2/GCC-12.3.0/blosc2/blosc2-2.2.9 +Bottleneck==1.4.2+computecanada +cached-property @ file:///tmp/ebuser/avx512/HDF5/1.14.2/GCC-12.3.0/cachedproperty/cached-property-1.5.2 +cachetools==6.2.2 +certifi==2024.6.2 +cftime @ file:///tmp/ebuser/avx512/netCDF/4.9.2/GCC-12.3.0/cftime/cftime-1.6.2 +charset_normalizer==3.4.4+computecanada +click==8.3.1+computecanada +cloudpickle==3.1.2+computecanada +comm==0.2.2+computecanada +conda_inject==1.3.2+computecanada +configargparse==1.7.1+computecanada +connection_pool==0.0.3+computecanada +contourpy==1.3.1+computecanada +cplex==22.1.2.0 +cycler==0.12.1+computecanada +dask==2025.11.0 +datrie==0.8.2+computecanada +debugpy==1.8.12+computecanada +decorator==5.1.1+computecanada +deprecation==2.1.0+computecanada +dill==0.4.0+computecanada +docplex==2.30.251+computecanada +docutils==0.22.3 +dpath==2.2.0+computecanada +duckdb==1.2.0+computecanada +executing==2.2.0+computecanada +fastjsonschema==2.21.2+computecanada +fonttools==4.55.8+computecanada +fsspec==2025.12.0 +geopandas==1.1.1+computecanada +gitdb==4.0.12+computecanada +gitpython==3.1.45+computecanada +google-api-core==2.28.1 +google-auth==2.43.0 +google-cloud-core==2.5.0 +google-cloud-storage==3.6.0 +google-resumable-media==2.8.0 +google_crc32c==1.6.0+computecanada +googleapis-common-protos==1.72.0 +graphviz==0.21+computecanada +gurobipy==12.0.0+computecanada +h5py @ file:///tmp/ebuser/avx512/HDF5/1.14.2/GCC-12.3.0/h5py/h5py-3.10.0 +highspy==1.11.0+computecanada +humanfriendly==10.0+computecanada +idna==3.11+computecanada +immutables==0.20+computecanada +importlib_metadata==8.7.0+computecanada +ipykernel==6.29.5+computecanada +ipython==8.32.0+computecanada +ipywidgets==8.1.5+computecanada +jedi==0.19.2+computecanada +jinja2==3.1.6+computecanada +jsonschema==4.25.1+computecanada +jsonschema_specifications==2025.9.1+computecanada +jupyter_client==8.6.3+computecanada +jupyter_core==5.7.2+computecanada +jupyterlab_widgets==3.0.13+computecanada +kiwisolver==1.4.8+computecanada +linopy==0.5.5+computecanada +locket==1.0.0+computecanada +MarkupSafe==3.0.2+computecanada +matplotlib==3.10.0+computecanada +matplotlib_inline==0.1.7+computecanada +mpmath==1.3.0+computecanada +msgpack==1.1.2+computecanada +multiprocess==0.70.18+computecanada +nbformat==5.10.4+computecanada +ndindex @ file:///tmp/ebuser/avx512/HDF5/1.14.2/GCC-12.3.0/ndindex/ndindex-1.7 +nest_asyncio==1.6.0+computecanada +netCDF4 @ file:///tmp/ebuser/avx512/netCDF/4.9.2/GCC-12.3.0/netCDF4/netCDF4-1.6.4 +networkx==3.6+computecanada +nose==1.3.7+computecanada +numexpr @ file:///tmp/ebuser/avx512/HDF5/1.14.2/GCC-12.3.0/numexpr/numexpr-2.8.7 +numpy==1.26.4+computecanada +packaging==24.2+computecanada +pandas==2.2.3+computecanada +parso==0.8.4+computecanada +partd==1.4.2+computecanada +pexpect==4.9.0+computecanada +pillow==11.1.0+computecanada +platformdirs==3.10.0+computecanada +polars==1.35.2 +polars-runtime-32==1.35.2 +prompt_toolkit==3.0.50+computecanada +proto_plus==1.26.1+computecanada +protobuf==6.33.1 +psutil==6.1.1+computecanada +ptyprocess==0.7.0+computecanada +PuLP==2.8.0+computecanada +pure_eval==0.2.3+computecanada +py-cpuinfo @ file:///tmp/ebuser/avx512/HDF5/1.14.2/GCC-12.3.0/pycpuinfo/py-cpuinfo-9.0.0 +pyarrow @ file:///tmp/ebuser/avx512/Arrow/21.0.0/GCCcore-13.3-gentoo/arrow/python +pyasn1==0.6.1+computecanada +pyasn1_modules==0.4.2+computecanada +pygments==2.19.1+computecanada +pyogrio==0.10.0+computecanada +pyparsing==3.2.1+computecanada +pyproj==3.6.1+computecanada +pypsa==0.30.2 +python_dateutil==2.9.0.post0+computecanada +pytz==2025.1+computecanada +PyYAML==6.0.2+computecanada +pyzmq==26.2.1+computecanada +referencing==0.37.0+computecanada +requests==2.32.5+computecanada +reretry==0.11.8+computecanada +rpds_py==0.27.0+computecanada +rsa==4.9.1+computecanada +salib==1.5.1+computecanada +scipy==1.15.1+computecanada +seaborn==0.13.2+computecanada +shapely==2.0.7+computecanada +six==1.17.0+computecanada +smart_open==7.5.0 +smmap==5.0.2+computecanada +snakemake==8.20.5 +snakemake-executor-plugin-slurm==2.0.1 +snakemake-executor-plugin-slurm-jobstep==0.3.0 +snakemake-interface-common==1.22.0 +snakemake-interface-executor-plugins==9.3.9 +snakemake-interface-report-plugins==1.3.0 +snakemake-interface-storage-plugins==3.5.0 +stack_data==0.6.3+computecanada +sympy==1.13.3+computecanada +tables @ file:///tmp/ebuser/avx512/HDF5/1.14.2/GCC-12.3.0/tables/tables-3.9.1 +tabulate==0.9.0+computecanada +throttler==1.2.2+computecanada +toolz==1.1.0+computecanada +toposort==1.10+computecanada +tornado==6.4.2+computecanada +tqdm==4.67.1+computecanada +traitlets==5.14.3+computecanada +typing_extensions==4.12.2+computecanada +tzdata==2025.1+computecanada +urllib3==2.5.0+computecanada +validators==0.35.0+computecanada +wcwidth==0.2.13+computecanada +widgetsnbextension==4.0.13+computecanada +wrapt==1.17.3+computecanada +xarray==2025.4.0 +yte==1.9.4 +zipp==3.23.0+computecanada diff --git a/pyproject.toml b/pyproject.toml index 57722bc..860886a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,4 +21,5 @@ dependencies = [ "salib>=1.5.1", "seaborn>=0.13.2", "snakemake==8.20.5", + "xarray==2025.4.0", ] diff --git a/uv.lock b/uv.lock index c1a13ce..391a99f 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.12" [[package]] @@ -469,9 +469,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dash" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/49/8d/0f641e7c7878ac65b4bb78a2c7cb707db036f82da13fd61948adec44d5aa/dash_bootstrap_components-2.0.3.tar.gz", hash = "sha256:5c161b04a6e7ed19a7d54e42f070c29fd6c385d5a7797e7a82999aa2fc15b1de", size = 115466 } +sdist = { url = "https://files.pythonhosted.org/packages/49/8d/0f641e7c7878ac65b4bb78a2c7cb707db036f82da13fd61948adec44d5aa/dash_bootstrap_components-2.0.3.tar.gz", hash = "sha256:5c161b04a6e7ed19a7d54e42f070c29fd6c385d5a7797e7a82999aa2fc15b1de", size = 115466, upload-time = "2025-05-22T22:30:18.02Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/f6/b4652aacfbc8d684c9ca8efc5178860a50b54abf82cd1960013c59f8258f/dash_bootstrap_components-2.0.3-py3-none-any.whl", hash = "sha256:82754d3d001ad5482b8a82b496c7bf98a1c68d2669d607a89dda7ec627304af5", size = 203706 }, + { url = "https://files.pythonhosted.org/packages/f7/f6/b4652aacfbc8d684c9ca8efc5178860a50b54abf82cd1960013c59f8258f/dash_bootstrap_components-2.0.3-py3-none-any.whl", hash = "sha256:82754d3d001ad5482b8a82b496c7bf98a1c68d2669d607a89dda7ec627304af5", size = 203706, upload-time = "2025-05-22T22:30:16.304Z" }, ] [[package]] @@ -1602,6 +1602,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/71/96/d5d8859a6dac29f8ebc815ff8e75770bd513db9f08d7a711e21ae562a948/netCDF4-1.7.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30d20e56b9ba2c48884eb89c91b63e6c0612b4927881707e34402719153ef17f", size = 9378149, upload-time = "2024-10-22T19:01:04.924Z" }, { url = "https://files.pythonhosted.org/packages/d1/80/b9c19f1bb4ac6c5fa6f94a4f278bc68a778473d1814a86a375d7cffa193a/netCDF4-1.7.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d6bfd38ba0bde04d56f06c1554714a2ea9dab75811c89450dc3ec57a9d36b80", size = 9254471, upload-time = "2024-10-22T19:01:07.041Z" }, { url = "https://files.pythonhosted.org/packages/66/b5/e04550fd53de57001dbd5a87242da7ff784c80790adc48897977b6ccf891/netCDF4-1.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:5c5fbee6134ee1246c397e1508e5297d825aa19221fdf3fa8dc9727ad824d7a5", size = 6990521, upload-time = "2024-10-23T15:02:27.549Z" }, + { url = "https://files.pythonhosted.org/packages/84/0a/182bb4fe5639699ba39d558b553b8e6f04fbfea6cf78404c0f21ef149bf7/netcdf4-1.7.2-cp311-abi3-macosx_13_0_x86_64.whl", hash = "sha256:7e81c3c47f2772eab0b93fba8bb05b17b58dce17720e1bed25e9d76551deecd0", size = 2751391, upload-time = "2025-10-13T18:32:22.749Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1f/54ac27c791360f7452ca27ed1cb2917946bbe1ea4337c590a5abcef6332d/netcdf4-1.7.2-cp311-abi3-macosx_14_0_arm64.whl", hash = "sha256:cb2791dba37fc98fd1ac4e236c97822909f54efbcdf7f1415c9777810e0a28f4", size = 2387513, upload-time = "2025-10-13T18:32:27.499Z" }, + { url = "https://files.pythonhosted.org/packages/5c/5e/9bf3008a9e45c08f4c9fedce4d6f722ef5d970f56a9c5eb375a200dd2b66/netcdf4-1.7.2-cp311-abi3-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf11480f6b8a5b246818ffff6b4d90481e51f8b9555b41af0c372eb0aaf8b65f", size = 9621674, upload-time = "2025-10-13T18:32:29.193Z" }, + { url = "https://files.pythonhosted.org/packages/a1/75/46871e85f2bbfb1efe229623d25d7c9daa17e2e968d5235572b2c8bb53e8/netcdf4-1.7.2-cp311-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ccc05328a8ff31921b539821791aeb20b054879f3fdf6d1d505bf6422824fec", size = 9453759, upload-time = "2025-10-13T18:32:31.136Z" }, + { url = "https://files.pythonhosted.org/packages/cd/10/c52f12297965938d9b9be666ea1f9d8340c2aea31d6909d90aa650847248/netcdf4-1.7.2-cp311-abi3-win_amd64.whl", hash = "sha256:999bfc4acebf400ed724d5e7329e2e768accc7ee1fa1d82d505da782f730301b", size = 7148514, upload-time = "2025-10-13T18:32:33.121Z" }, ] [[package]] @@ -2081,6 +2086,7 @@ dependencies = [ { name = "salib" }, { name = "seaborn" }, { name = "snakemake" }, + { name = "xarray" }, ] [package.metadata] @@ -2101,6 +2107,7 @@ requires-dist = [ { name = "salib", specifier = ">=1.5.1" }, { name = "seaborn", specifier = ">=0.13.2" }, { name = "snakemake", specifier = "==8.20.5" }, + { name = "xarray", specifier = "==2025.4.0" }, ] [[package]] diff --git a/workflow/Snakefile b/workflow/Snakefile index 4293f2c..b22a803 100644 --- a/workflow/Snakefile +++ b/workflow/Snakefile @@ -8,7 +8,7 @@ configfile: "config/config.yaml" configfile: "config/api.yaml" configfile: "config/solving.yaml" -localrules: all, gsa, ua, clean_all, clean_gsa, clean_ua, clean_generate +localrules: all, gsa, ua, clean_all, clean_gsa, clean_ua, clean_generate, test_solve wildcard_constraints: scenario="[a-zA-Z0-9_]+", @@ -243,4 +243,4 @@ rule clean_generate: rule make_dag: shell: - "snakemake ua --dag | dot -Tpng > dag.png" \ No newline at end of file + "snakemake ua --dag | dot -Tpng > dag.png" diff --git a/workflow/profiles/default/config.yaml b/workflow/profiles/default/config.yaml index 1052bac..a7662b9 100644 --- a/workflow/profiles/default/config.yaml +++ b/workflow/profiles/default/config.yaml @@ -1,5 +1,5 @@ -executor: local -jobs: 999 +# executor: local +jobs: 100 local-cores: 32 # Allow 4 jobs in parallel (4 * 8 threads = 32 cores) cores: 10 latency-wait: 5 @@ -7,6 +7,6 @@ use-envmodules: True use-conda: False # quiet: all default-resources: - - runtime=10 - - mem_mb_per_cpu=16000 - - disk_mb=1000 \ No newline at end of file + runtime: 5 +# mem_mb_per_cpu: 16000 + disk_mb: 2000 diff --git a/workflow/profiles/slurm/config.yaml b/workflow/profiles/slurm/config.yaml new file mode 100644 index 0000000..08b20e9 --- /dev/null +++ b/workflow/profiles/slurm/config.yaml @@ -0,0 +1,34 @@ +executor: "slurm" + +default-resources: + mem_mb: 4000 + runtime: 10 + disk_mb: 2000 + threads: 1 + slurm_account: '"def-tniet-ab_cpu"' + # slurm_partition: '"cpubase_bycore_b1"' # auto-assigned by cluster + slurm_partition: '"default"' + cores: 2 + mem_mb_per_cpu: 8000 + +set-threads: + solve_network: 2 + test_solve_network: 2 + +set-resources: + solve_network: + mem_mb_per_cpu: 12000 + runtime: 20 + test_solve_network: + mem_mb_per_cpu: 12000 + runtime: 20 + + +jobname: "{rule}.{wildcards}.{jobid}" + +jobs: 100 +keep-going: True +rerun-incomplete: True +latency-wait: 15 # wait 15 seconds for files +use-envmodules: False +use-conda: False diff --git a/workflow/rules/generate.smk b/workflow/rules/generate.smk index 37603e1..6969207 100644 --- a/workflow/rules/generate.smk +++ b/workflow/rules/generate.smk @@ -11,10 +11,10 @@ rule generate_tct_data: tct_aeo = "results/{scenario}/generated/tct_aeo.csv", tct_gsa = "results/{scenario}/generated/tct_gsa.csv", resources: - mem_mb=100, - runtime=3 - group: - "generate_data" + mem_mb_per_cpu=1000, + runtime=5 + # group: + # "generate_data" script: "../scripts/process_tct.py" @@ -26,10 +26,10 @@ rule retrieve_co2L_data: co2_2005 = "resources/emissions/co2_2005.csv", co2_2030 = "resources/emissions/co2_2005_50pct.csv", resources: - mem_mb=200, - runtime=3 - group: - "generate_data" + mem_mb_per_cpu=1000, + runtime=5 + # group: + # "generate_data" script: "../scripts/retrieve_co2L.py" @@ -45,10 +45,10 @@ rule generate_co2L_data: output: co2_gsa = "results/{scenario}/generated/co2L_gsa.csv" resources: - mem_mb=200, - runtime=3 - group: - "generate_data" + mem_mb_per_cpu=1000, + runtime=5 + # group: + # "generate_data" script: "../scripts/process_co2L.py" @@ -61,10 +61,10 @@ rule append_generated_parameters: output: csv=expand("results/{{scenario}}/generated/{param_f}", param_f = config["gsa"]["parameters"]) resources: - mem_mb=200, - runtime=3 - group: - "generate_data" + mem_mb_per_cpu=1000, + runtime=5 + # group: + # "generate_data" run: import pandas as pd dfs = [] @@ -100,10 +100,10 @@ rule sanitize_parameters: benchmark: "benchmarks/sanitize_parameters/{scenario}.txt" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 250), - runtime=1 - group: - "generate_data" + mem_mb_per_cpu=1000, + runtime=5 + # group: + # "generate_data" script: "../scripts/sanitize_params.py" @@ -118,14 +118,14 @@ rule sanitize_results: output: results="results/{scenario}/{mode}/results.csv" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 300), - runtime=1 + mem_mb_per_cpu=1000, + runtime=5 benchmark: "benchmarks/sanitize_results/{scenario}_{mode}.txt" log: "logs/sanitize_results/{scenario}_{mode}.log" - group: - "generate_data" + # group: + # "generate_data" script: "../scripts/sanitize_results.py" - \ No newline at end of file + diff --git a/workflow/rules/prepare.smk b/workflow/rules/prepare.smk index 96ad2d4..38973ff 100644 --- a/workflow/rules/prepare.smk +++ b/workflow/rules/prepare.smk @@ -5,8 +5,8 @@ rule copy_network: output: n = temp("results/{scenario}/copy.nc") resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 100), - runtime=1 + mem_mb_per_cpu=1000, + runtime=5 group: "prepare_data" benchmark: @@ -21,8 +21,8 @@ rule copy_pop_layout: output: csv = "results/{scenario}/constraints/pop_layout.csv" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 100), - runtime=1 + mem_mb_per_cpu=1000, + runtime=5 group: "prepare_data" benchmark: @@ -40,8 +40,8 @@ rule process_reeds_policy: output: policy = "results/{scenario}/constraints/{policy}.csv", resources: - mem_mb=100, - runtime=1 + mem_mb_per_cpu=1000, + runtime=5 group: "prepare_data" benchmark: @@ -57,8 +57,8 @@ rule copy_tct_data: output: csv="results/{scenario}/constraints/tct.csv" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 100), - runtime=1 + runtime=5, + mem_mb_per_cpu=1000 group: "prepare_data" run: @@ -76,8 +76,8 @@ rule copy_ev_policy_data: output: csv="results/{scenario}/constraints/ev_policy.csv" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 100), - runtime=1 + mem_mb_per_cpu=1000, + runtime=5 group: "prepare_data" shell: @@ -98,8 +98,8 @@ rule retrieve_natural_gas_data: "benchmarks/retrieve_ng/benchmark.txt" retries: 3 resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 100), - runtime=2 + mem_mb_per_cpu=1000, + runtime=5 group: "prepare_data" script: @@ -117,8 +117,8 @@ rule process_natural_gas: ng_domestic = "results/{scenario}/constraints/ng_domestic.csv", ng_international = "results/{scenario}/constraints/ng_international.csv", resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 300), - runtime=1 + mem_mb_per_cpu=1000, + runtime=5 benchmark: "benchmarks/process_ng/{scenario}.txt" log: @@ -147,8 +147,8 @@ rule process_interchange_data: capacities = "results/{scenario}/constraints/import_export_capacity.csv", costs = "results/{scenario}/constraints/import_export_costs.csv" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 5000), - runtime=3 + mem_mb_per_cpu=1000, + runtime=5 benchmark: "benchmarks/process_interchanges/{scenario}.txt" log: @@ -168,8 +168,8 @@ rule add_import_export_to_network: output: network = "results/{scenario}/base.nc", resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 500), - runtime=1 + mem_mb_per_cpu=1000, + runtime=5 group: "prepare_data" script: @@ -186,8 +186,8 @@ rule prepare_static_values: output: parameters="results/{scenario}/ua/set_values.csv" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 300), - runtime=1 + mem_mb_per_cpu=1000, + runtime=5 benchmark: "benchmarks/prepare_set_values/{scenario}.txt" log: @@ -208,8 +208,8 @@ rule prepare_ua_params: output: parameters="results/{scenario}/ua/parameters.csv" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 300), - runtime=1 + mem_mb_per_cpu=1000, + runtime=5 benchmark: "benchmarks/prepare_ua_params/{scenario}.txt" log: @@ -231,8 +231,8 @@ rule sanitize_ua_plot_params: output: plots="results/{scenario}/ua/plots.csv" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 300), - runtime=1 + mem_mb_per_cpu=1000, + runtime=5 benchmark: "benchmarks/prepare_ua_params/{scenario}.txt" log: @@ -240,4 +240,4 @@ rule sanitize_ua_plot_params: group: "prepare_data" script: - "../scripts/sanitize_ua_plot_params.py" \ No newline at end of file + "../scripts/sanitize_ua_plot_params.py" diff --git a/workflow/rules/results.smk b/workflow/rules/results.smk index 9beacc9..8f2a642 100644 --- a/workflow/rules/results.smk +++ b/workflow/rules/results.smk @@ -77,11 +77,12 @@ rule extract_results: log: "logs/extract_results/{scenario}_{mode}_{run}.log" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 250), - runtime=1 + mem_mb_per_cpu=lambda wc, input: max(1.25 * input.size_mb, 4000), + runtime=5 benchmark: "benchmarks/extract_gsa_results/{scenario}_{mode}_{run}.txt" group: + # "extract_results" "solve_{scenario}_{mode}_{run}" script: "../scripts/extract_results.py" @@ -102,12 +103,12 @@ rule combine_results: log: "logs/combine_results/{scenario}_{mode}.log" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 250), - runtime=1 + mem_mb_per_cpu=lambda wc, input: max(1.25 * input.size_mb, 2000), + runtime=5 benchmark: "benchmarks/combine_results/{scenario}_{mode}.txt" - group: - "results" + #group: + # "results" run: import pandas as pd data = [pd.read_csv(str(x)) for x in input.results] @@ -134,12 +135,12 @@ rule parse_gsa_results: log: "logs/parse_results/{scenario}.log" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 200), - runtime=1 + mem_mb_per_cpu=lambda wc, input: max(1.25 * input.size_mb, 2000), + runtime=5 benchmark: "benchmarks/parse_results/{scenario}.txt" - group: - "results" + #group: + # "results" run: import pandas as pd from pathlib import Path @@ -168,12 +169,12 @@ rule calculate_SA: log: "logs/calculate_sa/{scenario}_{sa_result}.log" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 200), - runtime=1 + mem_mb_per_cpu=lambda wc, input: max(1.25 * input.size_mb, 2000), + runtime=5 benchmark: "benchmarks/calculate_sa/{scenario}_{sa_result}.txt" group: - "results" + "calculate_sa_{scenario}_{sa_result}" script: "../scripts/calculate_sa.py" @@ -186,12 +187,12 @@ rule combine_sa_results: log: "logs/combine_sa_results/{scenario}_gsa.log" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 200), - runtime=1 + mem_mb_per_cpu=lambda wc, input: max(1.25 * input.size_mb, 2000), + runtime=5 benchmark: "benchmarks/combine_sa_results/{scenario}_gsa.txt" - group: - "results" + #group: + # "results" run: import pandas as pd from pathlib import Path @@ -219,12 +220,12 @@ rule calculate_rankings: log: "logs/calculate_rankings/{scenario}_gsa.log" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 200), - runtime=1 + mem_mb_per_cpu=lambda wc, input: max(1.25 * input.size_mb, 2000), + runtime=5 benchmark: "benchmarks/calculate_rankings/{scenario}_gsa.txt" - group: - "results" + #group: + # "results" script: "../scripts/calculate_rankings.py" @@ -241,12 +242,12 @@ rule heatmap: log: "logs/create_heatmap/{scenario}_{plot}.log" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 500), - runtime=1 + mem_mb_per_cpu=lambda wc, input: max(1.25 * input.size_mb, 2000), + runtime=5 benchmark: "benchmarks/create_heatmap/{scenario}_{plot}.txt" - group: - "results" + #group: + # "results" script: "../scripts/plot_gsa_heatmap.py" @@ -263,12 +264,12 @@ rule barplot: log: "logs/create_barplot/{scenario}_{plot}.log" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 500), - runtime=1 + mem_mb_per_cpu=lambda wc, input: max(1.25 * input.size_mb, 2000), + runtime=5 benchmark: "benchmarks/create_barplot/{scenario}_{plot}.txt" - group: - "results" + #group: + # "results" script: "../scripts/plot_gsa_barplot.py" @@ -292,12 +293,12 @@ rule parse_ua_results: log: "logs/parse_results/{scenario}.log" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 200), - runtime=1 + mem_mb_per_cpu=lambda wc, input: max(1.25 * input.size_mb, 2000), + runtime=5 benchmark: "benchmarks/parse_results/{scenario}.txt" - group: - "results" + #group: + # "results" run: import pandas as pd from pathlib import Path @@ -321,12 +322,12 @@ rule plot_ua_scatter: log: "logs/plot_ua/{scenario}_{plot}.log" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 500), - runtime=1 + mem_mb_per_cpu=lambda wc, input: max(1.25 * input.size_mb, 2000), + runtime=5 benchmark: "benchmarks/plot_ua/{scenario}_{plot}.txt" - group: - "results" + #group: + # "results" script: "../scripts/plot_ua_scatter.py" @@ -344,11 +345,11 @@ rule plot_ua_barplots: log: "logs/plot_ua/{scenario}_{plot}.log" resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 500), - runtime=1 + mem_mb_per_cpu=lambda wc, input: max(1.25 * input.size_mb, 2000), + runtime=5 benchmark: "benchmarks/plot_ua/{scenario}_{plot}.txt" - group: - "results" + #group: + # "results" script: - "../scripts/plot_ua_barplot.py" \ No newline at end of file + "../scripts/plot_ua_barplot.py" diff --git a/workflow/rules/sample.smk b/workflow/rules/sample.smk index 4a1cf14..86330c7 100644 --- a/workflow/rules/sample.smk +++ b/workflow/rules/sample.smk @@ -40,7 +40,7 @@ rule create_sample: sample_file = "results/{scenario}/{mode}/sample.csv", resources: mem_mb=lambda wc, input: max(1.25 * input.size_mb, 250), - runtime=1 + runtime=5 log: "logs/create_sample/{scenario}_{mode}.log" benchmark: @@ -81,9 +81,10 @@ rule apply_gsa_sample_to_network: n = temp(expand("results/{{scenario}}/{{mode}}/modelruns/{run}/n.nc", run=get_gsa_modelruns())), scaled_sample = "results/{scenario}/{mode}/sample_scaled.csv", meta_constriant = expand("results/{{scenario}}/{{mode}}/modelruns/{run}/constraints.csv", run=get_gsa_modelruns()) + threads: 2 resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 600), - runtime=1 + mem_mb_per_cpu=8000, + runtime=20 benchmark: "benchmarks/apply_sample/{scenario}_{mode}.txt" group: @@ -119,9 +120,10 @@ rule apply_ua_sample_to_network: n = temp(expand("results/{{scenario}}/{{mode}}/modelruns/{run}/n.nc", run=get_ua_modelruns())), scaled_sample = "results/{scenario}/{mode}/sample_scaled.csv", meta_constriant = expand("results/{{scenario}}/{{mode}}/modelruns/{run}/constraints.csv", run=get_ua_modelruns()) + threads: 2 resources: - mem_mb=lambda wc, input: max(1.25 * input.size_mb, 600), - runtime=1 + mem_mb_per_cpu=8000, + runtime=20 benchmark: "benchmarks/apply_sample/{scenario}_{mode}.txt" group: @@ -159,7 +161,7 @@ rule test_apply_gsa_sample_to_network: meta_constriant = "results/{scenario}/{mode}/modelruns/testing/0/constraints.csv", resources: mem_mb=lambda wc, input: max(1.25 * input.size_mb, 600), - runtime=1 + runtime=3 benchmark: "benchmarks/apply_sample/{scenario}_{mode}.txt" group: @@ -167,4 +169,4 @@ rule test_apply_gsa_sample_to_network: log: "logs/apply_sample/{scenario}_{mode}.log" script: - "../scripts/apply_sample.py" \ No newline at end of file + "../scripts/apply_sample.py" diff --git a/workflow/rules/solve.smk b/workflow/rules/solve.smk index 3020ca1..f121c7a 100644 --- a/workflow/rules/solve.smk +++ b/workflow/rules/solve.smk @@ -32,10 +32,11 @@ rule solve_network: import_export_flows_f = "results/{scenario}/constraints/import_export_flows.csv", output: network = temp("results/{scenario}/{mode}/modelruns/{run}/network.nc") if not config['metadata']['networks'] else "results/{scenario}/{mode}/modelruns/{run}/network.nc", - threads: 12 - resources: - mem_mb=2000, - runtime=2 + # specify in profile + # threads: lambda wildcards: config["solving"]["solver_options"][config["solver"]["name"]]["threads"] + # resources: + # mem_mb_per_cpu=1500, + # runtime=5 benchmark: "benchmarks/solve/{scenario}_{mode}_{run}.txt" log: @@ -68,10 +69,11 @@ rule test_solve_network: import_export_flows_f = "results/{scenario}/constraints/import_export_flows.csv", output: network = temp("results/{scenario}/{mode}/modelruns/testing/0/network.nc") if not config['metadata']['networks'] else "results/{scenario}/{mode}/modelruns/testing/0/network.nc", - threads: 12 - resources: - mem_mb=2000, - runtime=2 + # specify in profile + # threads: 2 + # resources: + # mem_mb_per_cpu=16000, + # runtime=10 benchmark: "benchmarks/solve/{scenario}_{mode}_testing.txt" log: @@ -80,4 +82,4 @@ rule test_solve_network: group: "solve_{scenario}_{mode}_testing" script: - "../scripts/solve.py" \ No newline at end of file + "../scripts/solve.py"