|
1
|
|
|
# -*- coding: utf-8 - |
|
2
|
|
|
|
|
3
|
|
|
"""Testing the flows. |
|
4
|
|
|
|
|
5
|
|
|
This file is part of project oemof (github.com/oemof/oemof). It's copyrighted |
|
6
|
|
|
by the contributors recorded in the version control history of the file, |
|
7
|
|
|
available from its original location oemof/tests/test_components.py |
|
8
|
|
|
|
|
9
|
|
|
SPDX-License-Identifier: MIT |
|
10
|
|
|
""" |
|
11
|
|
|
|
|
12
|
|
|
import pytest |
|
13
|
|
|
|
|
14
|
|
|
from oemof.solph import NonConvex |
|
15
|
|
|
from oemof.solph.flows import Flow |
|
16
|
|
|
|
|
17
|
|
|
|
|
18
|
|
|
def test_custom_properties(): |
|
19
|
|
|
flow1 = Flow(custom_properties={"prop": 1}) |
|
20
|
|
|
assert flow1.custom_properties["prop"] == 1 |
|
21
|
|
|
|
|
22
|
|
|
# --- BEGIN: The following code can be removed for versions >= v0.7 --- |
|
23
|
|
|
with pytest.warns( |
|
24
|
|
|
FutureWarning, |
|
25
|
|
|
match="For backward compatibility,", |
|
26
|
|
|
): |
|
27
|
|
|
flow2 = Flow(custom_attributes={"attribute": 1}) |
|
28
|
|
|
assert flow2.attribute == 1 |
|
29
|
|
|
assert flow2.custom_properties["attribute"] == 1 |
|
30
|
|
|
|
|
31
|
|
|
with pytest.raises( |
|
32
|
|
|
AttributeError, |
|
33
|
|
|
match="Both options cannot be set at the same time.", |
|
34
|
|
|
): |
|
35
|
|
|
Flow( |
|
36
|
|
|
custom_attributes={"attribute": 1}, |
|
37
|
|
|
custom_properties={"prop": 1}, |
|
38
|
|
|
) |
|
39
|
|
|
# --- END --- |
|
40
|
|
|
|
|
41
|
|
|
|
|
42
|
|
|
def test_source_with_full_load_time_max(): |
|
43
|
|
|
Flow(nominal_capacity=1, full_load_time_max=2) |
|
44
|
|
|
|
|
45
|
|
|
|
|
46
|
|
|
def test_nonconvex_positive_gradient_error(): |
|
47
|
|
|
"""Testing nonconvex positive gradient error.""" |
|
48
|
|
|
msg = ( |
|
49
|
|
|
"You specified a positive gradient in your nonconvex " |
|
50
|
|
|
"option. This cannot be combined with a positive or a " |
|
51
|
|
|
"negative gradient for a standard flow!" |
|
52
|
|
|
) |
|
53
|
|
|
|
|
54
|
|
|
with pytest.raises(ValueError, match=msg): |
|
55
|
|
|
Flow( |
|
56
|
|
|
nonconvex=NonConvex( |
|
57
|
|
|
positive_gradient_limit=0.03, |
|
58
|
|
|
), |
|
59
|
|
|
positive_gradient_limit=0.03, |
|
60
|
|
|
) |
|
61
|
|
|
|
|
62
|
|
|
|
|
63
|
|
|
def test_non_convex_negative_gradient_error(): |
|
64
|
|
|
"""Testing non-convex positive gradient error.""" |
|
65
|
|
|
msg = ( |
|
66
|
|
|
"You specified a negative gradient in your nonconvex " |
|
67
|
|
|
"option. This cannot be combined with a positive or a " |
|
68
|
|
|
"negative gradient for a standard flow!" |
|
69
|
|
|
) |
|
70
|
|
|
|
|
71
|
|
|
with pytest.raises(ValueError, match=msg): |
|
72
|
|
|
Flow( |
|
73
|
|
|
nonconvex=NonConvex( |
|
74
|
|
|
negative_gradient_limit=0.03, |
|
75
|
|
|
), |
|
76
|
|
|
positive_gradient_limit=0.03, |
|
77
|
|
|
) |
|
78
|
|
|
|
|
79
|
|
|
|
|
80
|
|
|
def test_fix_sequence(): |
|
81
|
|
|
flow = Flow(nominal_capacity=4, fix=[0.3, 0.2, 0.7]) |
|
82
|
|
|
|
|
83
|
|
|
assert flow.fix[0] == 0.3 |
|
84
|
|
|
assert flow.fix[1] == 0.2 |
|
85
|
|
|
assert flow.fix[2] == 0.7 |
|
86
|
|
|
|
|
87
|
|
|
|
|
88
|
|
|
def test_fix_sequence_non_nominal(): |
|
89
|
|
|
"""Attribute fix needs nominal_capacity""" |
|
90
|
|
|
with pytest.raises(AttributeError): |
|
91
|
|
|
Flow(fix=[0.3, 0.2, 0.7]) |
|
92
|
|
|
|