|
| 1 | +from io import StringIO |
1 | 2 | from itertools import chain, permutations
|
2 | 3 | from pathlib import Path
|
3 | 4 | from unittest.mock import patch
|
4 | 5 |
|
| 6 | +import duckdb |
5 | 7 | import numpy as np
|
6 | 8 | import toml
|
7 | 9 | import xarray as xr
|
@@ -861,40 +863,110 @@ def default_new_input(tmp_path):
|
861 | 863 | from muse.examples import copy_model
|
862 | 864 |
|
863 | 865 | copy_model("default_new_input", tmp_path)
|
864 |
| - return tmp_path |
| 866 | + return tmp_path / "model" |
865 | 867 |
|
866 | 868 |
|
867 |
| -@mark.xfail |
868 |
| -def test_read_new_global_commodities(default_new_input): |
869 |
| - from muse.new_input.readers import read_inputs |
| 869 | +@fixture |
| 870 | +def con(): |
| 871 | + return duckdb.connect(":memory:") |
870 | 872 |
|
871 |
| - all_data = read_inputs(default_new_input) |
872 |
| - data = all_data["global_commodities"] |
| 873 | + |
| 874 | +@fixture |
| 875 | +def populate_regions(default_new_input, con): |
| 876 | + from muse.new_input.readers import read_regions_csv |
| 877 | + |
| 878 | + with open(default_new_input / "regions.csv") as f: |
| 879 | + return read_regions_csv(f, con) |
| 880 | + |
| 881 | + |
| 882 | +@fixture |
| 883 | +def populate_commodities(default_new_input, con): |
| 884 | + from muse.new_input.readers import read_commodities_csv |
| 885 | + |
| 886 | + with open(default_new_input / "commodities.csv") as f: |
| 887 | + return read_commodities_csv(f, con) |
| 888 | + |
| 889 | + |
| 890 | +@fixture |
| 891 | +def populate_demand(default_new_input, con, populate_regions, populate_commodities): |
| 892 | + from muse.new_input.readers import read_demand_csv |
| 893 | + |
| 894 | + with open(default_new_input / "demand.csv") as f: |
| 895 | + return read_demand_csv(f, con) |
| 896 | + |
| 897 | + |
| 898 | +def test_read_regions(populate_regions): |
| 899 | + assert populate_regions["name"] == np.array(["R1"]) |
| 900 | + |
| 901 | + |
| 902 | +def test_read_new_global_commodities(populate_commodities): |
| 903 | + data = populate_commodities |
| 904 | + assert list(data["name"]) == ["electricity", "gas", "heat", "wind", "CO2f"] |
| 905 | + assert list(data["type"]) == ["energy"] * 5 |
| 906 | + assert list(data["unit"]) == ["PJ"] * 4 + ["kt"] |
| 907 | + |
| 908 | + |
| 909 | +def test_calculate_global_commodities(populate_commodities): |
| 910 | + from muse.new_input.readers import calculate_global_commodities |
| 911 | + |
| 912 | + data = calculate_global_commodities(populate_commodities) |
873 | 913 |
|
874 | 914 | assert isinstance(data, xr.Dataset)
|
875 | 915 | assert set(data.dims) == {"commodity"}
|
876 |
| - assert dict(data.dtypes) == dict( |
877 |
| - type=np.dtype("str"), |
878 |
| - unit=np.dtype("str"), |
879 |
| - ) |
| 916 | + for dt in data.dtypes.values(): |
| 917 | + assert np.issubdtype(dt, np.dtype("str")) |
| 918 | + |
| 919 | + assert list(data.coords["commodity"].values) == list(populate_commodities["name"]) |
| 920 | + assert list(data.data_vars["type"].values) == list(populate_commodities["type"]) |
| 921 | + assert list(data.data_vars["unit"].values) == list(populate_commodities["unit"]) |
| 922 | + |
| 923 | + |
| 924 | +def test_read_new_global_commodities_type_constraint(default_new_input, con): |
| 925 | + from muse.new_input.readers import read_commodities_csv |
| 926 | + |
| 927 | + csv = StringIO("name,type,unit\nfoo,invalid,bar\n") |
| 928 | + with raises(duckdb.ConstraintException): |
| 929 | + read_commodities_csv(csv, con) |
880 | 930 |
|
881 |
| - assert list(data.coords["commodity"].values) == [ |
882 |
| - "electricity", |
883 |
| - "gas", |
884 |
| - "heat", |
885 |
| - "wind", |
886 |
| - "CO2f", |
887 |
| - ] |
888 |
| - assert list(data.data_vars["type"].values) == ["energy"] * 5 |
889 |
| - assert list(data.data_vars["unit"].values) == ["PJ"] * 4 + ["kt"] |
| 931 | + |
| 932 | +def test_new_read_demand_csv(populate_demand): |
| 933 | + data = populate_demand |
| 934 | + assert np.all(data["year"] == np.array([2020, 2050])) |
| 935 | + assert np.all(data["commodity"] == np.array(["heat", "heat"])) |
| 936 | + assert np.all(data["region"] == np.array(["R1", "R1"])) |
| 937 | + assert np.all(data["demand"] == np.array([10, 30])) |
| 938 | + |
| 939 | + |
| 940 | +def test_new_read_demand_csv_commodity_constraint( |
| 941 | + default_new_input, con, populate_commodities, populate_regions |
| 942 | +): |
| 943 | + from muse.new_input.readers import read_demand_csv |
| 944 | + |
| 945 | + csv = StringIO("year,commodity_name,region,demand\n2020,invalid,R1,0\n") |
| 946 | + with raises(duckdb.ConstraintException, match=".*foreign key.*"): |
| 947 | + read_demand_csv(csv, con) |
| 948 | + |
| 949 | + |
| 950 | +def test_new_read_demand_csv_region_constraint( |
| 951 | + default_new_input, con, populate_commodities, populate_regions |
| 952 | +): |
| 953 | + from muse.new_input.readers import read_demand_csv |
| 954 | + |
| 955 | + csv = StringIO("year,commodity_name,region,demand\n2020,heat,invalid,0\n") |
| 956 | + with raises(duckdb.ConstraintException, match=".*foreign key.*"): |
| 957 | + read_demand_csv(csv, con) |
890 | 958 |
|
891 | 959 |
|
892 | 960 | @mark.xfail
|
893 |
| -def test_read_demand(default_new_input): |
894 |
| - from muse.new_input.readers import read_inputs |
| 961 | +def test_demand_dataset(default_new_input): |
| 962 | + import duckdb |
| 963 | + from muse.new_input.readers import read_commodities, read_demand, read_regions |
895 | 964 |
|
896 |
| - all_data = read_inputs(default_new_input) |
897 |
| - data = all_data["demand"] |
| 965 | + con = duckdb.connect(":memory:") |
| 966 | + |
| 967 | + read_regions(default_new_input, con) |
| 968 | + read_commodities(default_new_input, con) |
| 969 | + data = read_demand(default_new_input, con) |
898 | 970 |
|
899 | 971 | assert isinstance(data, xr.DataArray)
|
900 | 972 | assert data.dtype == np.float64
|
|
0 commit comments