storage.py 78 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874
  1. from __future__ import annotations
  2. import re
  3. import copy
  4. from datetime import datetime, timedelta
  5. from typing import Type
  6. import pandas as pd
  7. import numpy as np
  8. from flask import current_app
  9. from flexmeasures import Sensor
  10. from flexmeasures.data.models.planning import (
  11. FlowCommitment,
  12. Scheduler,
  13. SchedulerOutputType,
  14. StockCommitment,
  15. )
  16. from flexmeasures.data.models.planning.linear_optimization import device_scheduler
  17. from flexmeasures.data.models.planning.utils import (
  18. add_tiny_price_slope,
  19. ensure_prices_are_not_empty,
  20. initialize_index,
  21. initialize_series,
  22. initialize_df,
  23. get_power_values,
  24. fallback_charging_policy,
  25. get_continuous_series_sensor_or_quantity,
  26. )
  27. from flexmeasures.data.models.planning.exceptions import InfeasibleProblemException
  28. from flexmeasures.data.schemas.scheduling.storage import StorageFlexModelSchema
  29. from flexmeasures.data.schemas.scheduling import (
  30. FlexContextSchema,
  31. MultiSensorFlexModelSchema,
  32. )
  33. from flexmeasures.utils.calculations import (
  34. integrate_time_series,
  35. )
  36. from flexmeasures.utils.time_utils import get_max_planning_horizon
  37. from flexmeasures.utils.coding_utils import deprecated
  38. from flexmeasures.utils.time_utils import determine_minimum_resampling_resolution
  39. from flexmeasures.utils.unit_utils import ur, convert_units
  40. class MetaStorageScheduler(Scheduler):
  41. """This class defines the constraints of a schedule for a storage device from the
  42. flex-model, flex-context, and sensor and asset attributes"""
  43. __version__ = None
  44. __author__ = "Seita"
  45. COLUMNS = [
  46. "equals",
  47. "max",
  48. "min",
  49. "efficiency",
  50. "derivative equals",
  51. "derivative max",
  52. "derivative min",
  53. "derivative down efficiency",
  54. "derivative up efficiency",
  55. "stock delta",
  56. ]
  57. def compute_schedule(self) -> pd.Series | None:
  58. """Schedule a battery or Charge Point based directly on the latest beliefs regarding market prices within the specified time window.
  59. For the resulting consumption schedule, consumption is defined as positive values.
  60. Deprecated method in v0.14. As an alternative, use MetaStorageScheduler.compute().
  61. """
  62. return self.compute()
  63. def _prepare(self, skip_validation: bool = False) -> tuple: # noqa: C901
  64. """This function prepares the required data to compute the schedule:
  65. - price data
  66. - device constraint
  67. - ems constraints
  68. :param skip_validation: If True, skip validation of constraints specified in the data.
  69. :returns: Input data for the scheduler
  70. """
  71. if not self.config_deserialized:
  72. self.deserialize_config()
  73. start = self.start
  74. end = self.end
  75. resolution = self.resolution
  76. belief_time = self.belief_time
  77. # List the asset and sensor(s) being scheduled
  78. if self.asset is not None:
  79. sensors = [flex_model_d["sensor"] for flex_model_d in self.flex_model]
  80. resolution = determine_minimum_resampling_resolution(
  81. [s.event_resolution for s in sensors]
  82. )
  83. asset = self.asset
  84. else:
  85. # For backwards compatibility with the single asset scheduler
  86. sensors = [self.sensor]
  87. asset = self.sensor.generic_asset
  88. # For backwards compatibility with the single asset scheduler
  89. flex_model = self.flex_model
  90. if not isinstance(flex_model, list):
  91. flex_model = [flex_model]
  92. # total number of flexible devices D described in the flex-model
  93. num_flexible_devices = len(flex_model)
  94. soc_at_start = [flex_model_d.get("soc_at_start") for flex_model_d in flex_model]
  95. soc_targets = [flex_model_d.get("soc_targets") for flex_model_d in flex_model]
  96. soc_min = [flex_model_d.get("soc_min") for flex_model_d in flex_model]
  97. soc_max = [flex_model_d.get("soc_max") for flex_model_d in flex_model]
  98. soc_minima = [flex_model_d.get("soc_minima") for flex_model_d in flex_model]
  99. soc_maxima = [flex_model_d.get("soc_maxima") for flex_model_d in flex_model]
  100. storage_efficiency = [
  101. flex_model_d.get("storage_efficiency") for flex_model_d in flex_model
  102. ]
  103. prefer_charging_sooner = [
  104. flex_model_d.get("prefer_charging_sooner") for flex_model_d in flex_model
  105. ]
  106. prefer_curtailing_later = [
  107. flex_model_d.get("prefer_curtailing_later") for flex_model_d in flex_model
  108. ]
  109. soc_gain = [flex_model_d.get("soc_gain") for flex_model_d in flex_model]
  110. soc_usage = [flex_model_d.get("soc_usage") for flex_model_d in flex_model]
  111. consumption_capacity = [
  112. flex_model_d.get("consumption_capacity") for flex_model_d in flex_model
  113. ]
  114. production_capacity = [
  115. flex_model_d.get("production_capacity") for flex_model_d in flex_model
  116. ]
  117. charging_efficiency = [
  118. flex_model_d.get("charging_efficiency") for flex_model_d in flex_model
  119. ]
  120. discharging_efficiency = [
  121. flex_model_d.get("discharging_efficiency") for flex_model_d in flex_model
  122. ]
  123. # Get info from flex-context
  124. consumption_price_sensor = self.flex_context.get("consumption_price_sensor")
  125. production_price_sensor = self.flex_context.get("production_price_sensor")
  126. consumption_price = self.flex_context.get(
  127. "consumption_price", consumption_price_sensor
  128. )
  129. production_price = self.flex_context.get(
  130. "production_price", production_price_sensor
  131. )
  132. # fallback to using the consumption price, for backwards compatibility
  133. if production_price is None:
  134. production_price = consumption_price
  135. inflexible_device_sensors = self.flex_context.get(
  136. "inflexible_device_sensors", []
  137. )
  138. # Fetch the device's power capacity (required Sensor attribute)
  139. power_capacity_in_mw = self._get_device_power_capacity(flex_model, sensors)
  140. # Check for known prices or price forecasts
  141. up_deviation_prices = get_continuous_series_sensor_or_quantity(
  142. variable_quantity=consumption_price,
  143. actuator=asset,
  144. unit=FlexContextSchema()
  145. .declared_fields["consumption_price"]
  146. ._get_unit(consumption_price),
  147. query_window=(start, end),
  148. resolution=resolution,
  149. beliefs_before=belief_time,
  150. fill_sides=True,
  151. ).to_frame(name="event_value")
  152. ensure_prices_are_not_empty(up_deviation_prices, consumption_price)
  153. down_deviation_prices = get_continuous_series_sensor_or_quantity(
  154. variable_quantity=production_price,
  155. actuator=asset,
  156. unit=FlexContextSchema()
  157. .declared_fields["production_price"]
  158. ._get_unit(production_price),
  159. query_window=(start, end),
  160. resolution=resolution,
  161. beliefs_before=belief_time,
  162. fill_sides=True,
  163. ).to_frame(name="event_value")
  164. ensure_prices_are_not_empty(down_deviation_prices, production_price)
  165. start = pd.Timestamp(start).tz_convert("UTC")
  166. end = pd.Timestamp(end).tz_convert("UTC")
  167. # Add tiny price slope to prefer charging now rather than later, and discharging later rather than now.
  168. # We penalise future consumption and reward future production with at most 1 per thousand times the energy price spread.
  169. # todo: move to flow or stock commitment per device
  170. if any(prefer_charging_sooner):
  171. up_deviation_prices = add_tiny_price_slope(
  172. up_deviation_prices, "event_value"
  173. )
  174. down_deviation_prices = add_tiny_price_slope(
  175. down_deviation_prices, "event_value"
  176. )
  177. # Create Series with EMS capacities
  178. ems_power_capacity_in_mw = get_continuous_series_sensor_or_quantity(
  179. variable_quantity=self.flex_context.get("ems_power_capacity_in_mw"),
  180. actuator=asset,
  181. unit="MW",
  182. query_window=(start, end),
  183. resolution=resolution,
  184. beliefs_before=belief_time,
  185. resolve_overlaps="min",
  186. )
  187. ems_consumption_capacity = get_continuous_series_sensor_or_quantity(
  188. variable_quantity=self.flex_context.get("ems_consumption_capacity_in_mw"),
  189. actuator=asset,
  190. unit="MW",
  191. query_window=(start, end),
  192. resolution=resolution,
  193. beliefs_before=belief_time,
  194. max_value=ems_power_capacity_in_mw,
  195. resolve_overlaps="min",
  196. )
  197. ems_production_capacity = -1 * get_continuous_series_sensor_or_quantity(
  198. variable_quantity=self.flex_context.get("ems_production_capacity_in_mw"),
  199. actuator=asset,
  200. unit="MW",
  201. query_window=(start, end),
  202. resolution=resolution,
  203. beliefs_before=belief_time,
  204. max_value=ems_power_capacity_in_mw,
  205. resolve_overlaps="min",
  206. )
  207. # Set up commitments to optimise for
  208. commitments = []
  209. index = initialize_index(start, end, resolution)
  210. commitment_quantities = initialize_series(0, start, end, resolution)
  211. # Convert energy prices to EUR/(deviation of commitment, which is in MW)
  212. commitment_upwards_deviation_price = (
  213. up_deviation_prices.loc[start : end - resolution]["event_value"]
  214. * resolution
  215. / pd.Timedelta("1h")
  216. )
  217. commitment_downwards_deviation_price = (
  218. down_deviation_prices.loc[start : end - resolution]["event_value"]
  219. * resolution
  220. / pd.Timedelta("1h")
  221. )
  222. # Set up commitments DataFrame
  223. commitment = FlowCommitment(
  224. name="energy",
  225. quantity=commitment_quantities,
  226. upwards_deviation_price=commitment_upwards_deviation_price,
  227. downwards_deviation_price=commitment_downwards_deviation_price,
  228. index=index,
  229. )
  230. commitments.append(commitment)
  231. # Set up peak commitments
  232. if self.flex_context.get("ems_peak_consumption_price") is not None:
  233. ems_peak_consumption = get_continuous_series_sensor_or_quantity(
  234. variable_quantity=self.flex_context.get("ems_peak_consumption_in_mw"),
  235. actuator=asset,
  236. unit="MW",
  237. query_window=(start, end),
  238. resolution=resolution,
  239. beliefs_before=belief_time,
  240. max_value=np.inf, # np.nan -> np.inf to ignore commitment if no quantity is given
  241. fill_sides=True,
  242. )
  243. ems_peak_consumption_price = self.flex_context.get(
  244. "ems_peak_consumption_price"
  245. )
  246. ems_peak_consumption_price = get_continuous_series_sensor_or_quantity(
  247. variable_quantity=ems_peak_consumption_price,
  248. actuator=asset,
  249. unit=FlexContextSchema()
  250. .declared_fields["ems_peak_consumption_price"]
  251. ._get_unit(ems_peak_consumption_price),
  252. query_window=(start, end),
  253. resolution=resolution,
  254. beliefs_before=belief_time,
  255. fill_sides=True,
  256. )
  257. # Set up commitments DataFrame
  258. commitment = FlowCommitment(
  259. name="consumption peak",
  260. quantity=ems_peak_consumption,
  261. # positive price because breaching in the upwards (consumption) direction is penalized
  262. upwards_deviation_price=ems_peak_consumption_price,
  263. _type="any",
  264. index=index,
  265. )
  266. commitments.append(commitment)
  267. if self.flex_context.get("ems_peak_production_price") is not None:
  268. ems_peak_production = get_continuous_series_sensor_or_quantity(
  269. variable_quantity=self.flex_context.get("ems_peak_production_in_mw"),
  270. actuator=asset,
  271. unit="MW",
  272. query_window=(start, end),
  273. resolution=resolution,
  274. beliefs_before=belief_time,
  275. max_value=np.inf, # np.nan -> np.inf to ignore commitment if no quantity is given
  276. fill_sides=True,
  277. )
  278. ems_peak_production_price = self.flex_context.get(
  279. "ems_peak_production_price"
  280. )
  281. ems_peak_production_price = get_continuous_series_sensor_or_quantity(
  282. variable_quantity=ems_peak_production_price,
  283. actuator=asset,
  284. unit=FlexContextSchema()
  285. .declared_fields["ems_peak_production_price"]
  286. ._get_unit(ems_peak_production_price),
  287. query_window=(start, end),
  288. resolution=resolution,
  289. beliefs_before=belief_time,
  290. fill_sides=True,
  291. )
  292. # Set up commitments DataFrame
  293. commitment = FlowCommitment(
  294. name="production peak",
  295. quantity=-ems_peak_production, # production is negative quantity
  296. # negative price because peaking in the downwards (production) direction is penalized
  297. downwards_deviation_price=-ems_peak_production_price,
  298. _type="any",
  299. index=index,
  300. )
  301. commitments.append(commitment)
  302. # Set up capacity breach commitments and EMS capacity constraints
  303. ems_consumption_breach_price = self.flex_context.get(
  304. "ems_consumption_breach_price"
  305. )
  306. ems_production_breach_price = self.flex_context.get(
  307. "ems_production_breach_price"
  308. )
  309. ems_constraints = initialize_df(
  310. StorageScheduler.COLUMNS, start, end, resolution
  311. )
  312. if ems_consumption_breach_price is not None:
  313. # Convert to Series
  314. any_ems_consumption_breach_price = get_continuous_series_sensor_or_quantity(
  315. variable_quantity=ems_consumption_breach_price,
  316. actuator=asset,
  317. unit=FlexContextSchema()
  318. .declared_fields["ems_consumption_breach_price"]
  319. ._get_unit(ems_consumption_breach_price),
  320. query_window=(start, end),
  321. resolution=resolution,
  322. beliefs_before=belief_time,
  323. fill_sides=True,
  324. )
  325. all_ems_consumption_breach_price = get_continuous_series_sensor_or_quantity(
  326. variable_quantity=ems_consumption_breach_price,
  327. actuator=asset,
  328. unit=FlexContextSchema()
  329. .declared_fields["ems_consumption_breach_price"]
  330. ._get_unit(ems_consumption_breach_price)
  331. + "*h", # from EUR/MWh to EUR/MW/resolution
  332. query_window=(start, end),
  333. resolution=resolution,
  334. beliefs_before=belief_time,
  335. fill_sides=True,
  336. )
  337. # Set up commitments DataFrame to penalize any breach
  338. commitment = FlowCommitment(
  339. name="any consumption breach",
  340. quantity=ems_consumption_capacity,
  341. # positive price because breaching in the upwards (consumption) direction is penalized
  342. upwards_deviation_price=any_ems_consumption_breach_price,
  343. _type="any",
  344. index=index,
  345. )
  346. commitments.append(commitment)
  347. # Set up commitments DataFrame to penalize each breach
  348. commitment = FlowCommitment(
  349. name="all consumption breaches",
  350. quantity=ems_consumption_capacity,
  351. # positive price because breaching in the upwards (consumption) direction is penalized
  352. upwards_deviation_price=all_ems_consumption_breach_price,
  353. index=index,
  354. )
  355. commitments.append(commitment)
  356. # Take the physical capacity as a hard constraint
  357. ems_constraints["derivative max"] = ems_power_capacity_in_mw
  358. else:
  359. # Take the contracted capacity as a hard constraint
  360. ems_constraints["derivative max"] = ems_consumption_capacity
  361. if ems_production_breach_price is not None:
  362. # Convert to Series
  363. any_ems_production_breach_price = get_continuous_series_sensor_or_quantity(
  364. variable_quantity=ems_production_breach_price,
  365. actuator=asset,
  366. unit=FlexContextSchema()
  367. .declared_fields["ems_production_breach_price"]
  368. ._get_unit(ems_production_breach_price),
  369. query_window=(start, end),
  370. resolution=resolution,
  371. beliefs_before=belief_time,
  372. fill_sides=True,
  373. )
  374. all_ems_production_breach_price = get_continuous_series_sensor_or_quantity(
  375. variable_quantity=ems_production_breach_price,
  376. actuator=asset,
  377. unit=FlexContextSchema()
  378. .declared_fields["ems_production_breach_price"]
  379. ._get_unit(ems_production_breach_price)
  380. + "*h", # from EUR/MWh to EUR/MW/resolution
  381. query_window=(start, end),
  382. resolution=resolution,
  383. beliefs_before=belief_time,
  384. fill_sides=True,
  385. )
  386. # Set up commitments DataFrame to penalize any breach
  387. commitment = FlowCommitment(
  388. name="any production breach",
  389. quantity=ems_production_capacity,
  390. # negative price because breaching in the downwards (production) direction is penalized
  391. downwards_deviation_price=-any_ems_production_breach_price,
  392. _type="any",
  393. index=index,
  394. )
  395. commitments.append(commitment)
  396. # Set up commitments DataFrame to penalize each breach
  397. commitment = FlowCommitment(
  398. name="all production breaches",
  399. quantity=ems_production_capacity,
  400. # negative price because breaching in the downwards (production) direction is penalized
  401. downwards_deviation_price=-all_ems_production_breach_price,
  402. index=index,
  403. )
  404. commitments.append(commitment)
  405. # Take the physical capacity as a hard constraint
  406. ems_constraints["derivative min"] = -ems_power_capacity_in_mw
  407. else:
  408. # Take the contracted capacity as a hard constraint
  409. ems_constraints["derivative min"] = ems_production_capacity
  410. # Flow commitments per device
  411. # Add tiny price slope to prefer curtailing later rather than now.
  412. # The price slope is half of the slope to prefer charging sooner
  413. for d, prefer_curtailing_later_d in enumerate(prefer_curtailing_later):
  414. if prefer_curtailing_later_d:
  415. tiny_price_slope = (
  416. add_tiny_price_slope(up_deviation_prices, "event_value")
  417. - up_deviation_prices
  418. )
  419. tiny_price_slope *= 0.5
  420. commitment = FlowCommitment(
  421. name=f"prefer curtailing device {d} later",
  422. # Prefer curtailing consumption later by penalizing later consumption
  423. upwards_deviation_price=tiny_price_slope,
  424. # Prefer curtailing production later by penalizing later production
  425. downwards_deviation_price=-tiny_price_slope,
  426. index=index,
  427. device=d,
  428. )
  429. commitments.append(commitment)
  430. # Set up device constraints: scheduled flexible devices for this EMS (from index 0 to D-1), plus the forecasted inflexible devices (at indices D to n).
  431. device_constraints = [
  432. initialize_df(StorageScheduler.COLUMNS, start, end, resolution)
  433. for i in range(num_flexible_devices + len(inflexible_device_sensors))
  434. ]
  435. for i, inflexible_sensor in enumerate(inflexible_device_sensors):
  436. device_constraints[i + num_flexible_devices]["derivative equals"] = (
  437. get_power_values(
  438. query_window=(start, end),
  439. resolution=resolution,
  440. beliefs_before=belief_time,
  441. sensor=inflexible_sensor,
  442. )
  443. )
  444. # Create the device constraints for all the flexible devices
  445. for d in range(num_flexible_devices):
  446. sensor_d = sensors[d]
  447. # fetch SOC constraints from sensors
  448. if isinstance(soc_targets[d], Sensor):
  449. soc_targets[d] = get_continuous_series_sensor_or_quantity(
  450. variable_quantity=soc_targets[d],
  451. actuator=sensor_d,
  452. unit="MWh",
  453. query_window=(start + resolution, end + resolution),
  454. resolution=resolution,
  455. beliefs_before=belief_time,
  456. as_instantaneous_events=True,
  457. resolve_overlaps="first",
  458. )
  459. # todo: check flex-model for soc_minima_breach_price and soc_maxima_breach_price fields; if these are defined, create a StockCommitment using both prices (if only 1 price is given, still create the commitment, but only penalize one direction)
  460. if isinstance(soc_minima[d], Sensor):
  461. soc_minima[d] = get_continuous_series_sensor_or_quantity(
  462. variable_quantity=soc_minima[d],
  463. actuator=sensor_d,
  464. unit="MWh",
  465. query_window=(start + resolution, end + resolution),
  466. resolution=resolution,
  467. beliefs_before=belief_time,
  468. as_instantaneous_events=True,
  469. resolve_overlaps="max",
  470. )
  471. if (
  472. self.flex_context.get("soc_minima_breach_price") is not None
  473. and soc_minima[d] is not None
  474. ):
  475. soc_minima_breach_price = self.flex_context["soc_minima_breach_price"]
  476. any_soc_minima_breach_price = get_continuous_series_sensor_or_quantity(
  477. variable_quantity=soc_minima_breach_price,
  478. actuator=asset,
  479. unit=FlexContextSchema()
  480. .declared_fields["soc_minima_breach_price"]
  481. ._get_unit(soc_minima_breach_price),
  482. query_window=(start + resolution, end + resolution),
  483. resolution=resolution,
  484. beliefs_before=belief_time,
  485. fallback_attribute="soc-minima-breach-price",
  486. fill_sides=True,
  487. ).shift(-1, freq=resolution)
  488. all_soc_minima_breach_price = get_continuous_series_sensor_or_quantity(
  489. variable_quantity=soc_minima_breach_price,
  490. actuator=asset,
  491. unit=FlexContextSchema()
  492. .declared_fields["soc_minima_breach_price"]
  493. ._get_unit(soc_minima_breach_price)
  494. + "*h", # from EUR/MWh² to EUR/MWh/resolution
  495. query_window=(start + resolution, end + resolution),
  496. resolution=resolution,
  497. beliefs_before=belief_time,
  498. fallback_attribute="soc-minima-breach-price",
  499. fill_sides=True,
  500. ).shift(-1, freq=resolution)
  501. # Set up commitments DataFrame
  502. # soc_minima_d is a temp variable because add_storage_constraints can't deal with Series yet
  503. soc_minima_d = get_continuous_series_sensor_or_quantity(
  504. variable_quantity=soc_minima[d],
  505. actuator=sensor_d,
  506. unit="MWh",
  507. query_window=(start + resolution, end + resolution),
  508. resolution=resolution,
  509. beliefs_before=belief_time,
  510. as_instantaneous_events=True,
  511. resolve_overlaps="max",
  512. )
  513. # shift soc minima by one resolution (they define a state at a certain time,
  514. # while the commitment defines what the total stock should be at the end of a time slot,
  515. # where the time slot is indexed by its starting time)
  516. soc_minima_d = soc_minima_d.shift(-1, freq=resolution) * (
  517. timedelta(hours=1) / resolution
  518. ) - soc_at_start[d] * (timedelta(hours=1) / resolution)
  519. commitment = StockCommitment(
  520. name="any soc minima",
  521. quantity=soc_minima_d,
  522. # negative price because breaching in the downwards (shortage) direction is penalized
  523. downwards_deviation_price=-any_soc_minima_breach_price,
  524. index=index,
  525. _type="any",
  526. device=d,
  527. )
  528. commitments.append(commitment)
  529. commitment = StockCommitment(
  530. name="all soc minima",
  531. quantity=soc_minima_d,
  532. # negative price because breaching in the downwards (shortage) direction is penalized
  533. downwards_deviation_price=-all_soc_minima_breach_price,
  534. index=index,
  535. device=d,
  536. )
  537. commitments.append(commitment)
  538. # soc-minima will become a soft constraint (modelled as stock commitments), so remove hard constraint
  539. soc_minima[d] = None
  540. if isinstance(soc_maxima[d], Sensor):
  541. soc_maxima[d] = get_continuous_series_sensor_or_quantity(
  542. variable_quantity=soc_maxima[d],
  543. actuator=sensor_d,
  544. unit="MWh",
  545. query_window=(start + resolution, end + resolution),
  546. resolution=resolution,
  547. beliefs_before=belief_time,
  548. as_instantaneous_events=True,
  549. resolve_overlaps="min",
  550. )
  551. if (
  552. self.flex_context.get("soc_maxima_breach_price") is not None
  553. and soc_maxima[d] is not None
  554. ):
  555. soc_maxima_breach_price = self.flex_context["soc_maxima_breach_price"]
  556. any_soc_maxima_breach_price = get_continuous_series_sensor_or_quantity(
  557. variable_quantity=soc_maxima_breach_price,
  558. actuator=asset,
  559. unit=FlexContextSchema()
  560. .declared_fields["soc_maxima_breach_price"]
  561. ._get_unit(soc_maxima_breach_price),
  562. query_window=(start + resolution, end + resolution),
  563. resolution=resolution,
  564. beliefs_before=belief_time,
  565. fallback_attribute="soc-maxima-breach-price",
  566. fill_sides=True,
  567. ).shift(-1, freq=resolution)
  568. all_soc_maxima_breach_price = get_continuous_series_sensor_or_quantity(
  569. variable_quantity=soc_maxima_breach_price,
  570. actuator=asset,
  571. unit=FlexContextSchema()
  572. .declared_fields["soc_maxima_breach_price"]
  573. ._get_unit(soc_maxima_breach_price)
  574. + "*h", # from EUR/MWh² to EUR/MWh/resolution
  575. query_window=(start + resolution, end + resolution),
  576. resolution=resolution,
  577. beliefs_before=belief_time,
  578. fallback_attribute="soc-maxima-breach-price",
  579. fill_sides=True,
  580. ).shift(-1, freq=resolution)
  581. # Set up commitments DataFrame
  582. # soc_maxima_d is a temp variable because add_storage_constraints can't deal with Series yet
  583. soc_maxima_d = get_continuous_series_sensor_or_quantity(
  584. variable_quantity=soc_maxima[d],
  585. actuator=sensor_d,
  586. unit="MWh",
  587. query_window=(start + resolution, end + resolution),
  588. resolution=resolution,
  589. beliefs_before=belief_time,
  590. as_instantaneous_events=True,
  591. resolve_overlaps="min",
  592. )
  593. # shift soc maxima by one resolution (they define a state at a certain time,
  594. # while the commitment defines what the total stock should be at the end of a time slot,
  595. # where the time slot is indexed by its starting time)
  596. soc_maxima_d = soc_maxima_d.shift(-1, freq=resolution) * (
  597. timedelta(hours=1) / resolution
  598. ) - soc_at_start[d] * (timedelta(hours=1) / resolution)
  599. commitment = StockCommitment(
  600. name="any soc maxima",
  601. quantity=soc_maxima_d,
  602. # positive price because breaching in the upwards (surplus) direction is penalized
  603. upwards_deviation_price=any_soc_maxima_breach_price,
  604. index=index,
  605. _type="any",
  606. device=d,
  607. )
  608. commitments.append(commitment)
  609. commitment = StockCommitment(
  610. name="all soc maxima",
  611. quantity=soc_maxima_d,
  612. # positive price because breaching in the upwards (surplus) direction is penalized
  613. upwards_deviation_price=all_soc_maxima_breach_price,
  614. index=index,
  615. device=d,
  616. )
  617. commitments.append(commitment)
  618. # soc-maxima will become a soft constraint (modelled as stock commitments), so remove hard constraint
  619. soc_maxima[d] = None
  620. if soc_at_start[d] is not None:
  621. device_constraints[d] = add_storage_constraints(
  622. start,
  623. end,
  624. resolution,
  625. soc_at_start[d],
  626. soc_targets[d],
  627. soc_maxima[d],
  628. soc_minima[d],
  629. soc_max[d],
  630. soc_min[d],
  631. )
  632. power_capacity_in_mw[d] = get_continuous_series_sensor_or_quantity(
  633. variable_quantity=power_capacity_in_mw[d],
  634. actuator=sensor_d,
  635. unit="MW",
  636. query_window=(start, end),
  637. resolution=resolution,
  638. beliefs_before=belief_time,
  639. min_value=0, # capacities are positive by definition
  640. resolve_overlaps="min",
  641. )
  642. device_constraints[d]["derivative max"] = power_capacity_in_mw[d]
  643. device_constraints[d]["derivative min"] = -power_capacity_in_mw[d]
  644. if sensor_d.get_attribute("is_strictly_non_positive"):
  645. device_constraints[d]["derivative min"] = 0
  646. else:
  647. production_capacity_d = get_continuous_series_sensor_or_quantity(
  648. variable_quantity=production_capacity[d],
  649. actuator=sensor_d,
  650. unit="MW",
  651. query_window=(start, end),
  652. resolution=resolution,
  653. beliefs_before=belief_time,
  654. fallback_attribute="production_capacity",
  655. max_value=power_capacity_in_mw[d],
  656. min_value=0, # capacities are positive by definition
  657. resolve_overlaps="min",
  658. )
  659. if (
  660. self.flex_context.get("production_breach_price") is not None
  661. and production_capacity[d] is not None
  662. ):
  663. # consumption-capacity will become a soft constraint
  664. production_breach_price = self.flex_context[
  665. "production_breach_price"
  666. ]
  667. any_production_breach_price = (
  668. get_continuous_series_sensor_or_quantity(
  669. variable_quantity=production_breach_price,
  670. actuator=asset,
  671. unit=FlexContextSchema()
  672. .declared_fields["production_breach_price"]
  673. ._get_unit(production_breach_price),
  674. query_window=(start, end),
  675. resolution=resolution,
  676. beliefs_before=belief_time,
  677. fallback_attribute="production-breach-price",
  678. fill_sides=True,
  679. )
  680. )
  681. all_production_breach_price = (
  682. get_continuous_series_sensor_or_quantity(
  683. variable_quantity=production_breach_price,
  684. actuator=asset,
  685. unit=FlexContextSchema()
  686. .declared_fields["production_breach_price"]
  687. ._get_unit(production_breach_price)
  688. + "*h", # from EUR/MWh to EUR/MW/resolution
  689. query_window=(start, end),
  690. resolution=resolution,
  691. beliefs_before=belief_time,
  692. fallback_attribute="production-breach-price",
  693. fill_sides=True,
  694. )
  695. )
  696. # Set up commitments DataFrame
  697. commitment = FlowCommitment(
  698. name=f"any production breach device {d}",
  699. quantity=-production_capacity_d,
  700. # negative price because breaching in the downwards (production) direction is penalized
  701. downwards_deviation_price=-any_production_breach_price,
  702. index=index,
  703. _type="any",
  704. device=d,
  705. )
  706. commitments.append(commitment)
  707. commitment = FlowCommitment(
  708. name=f"all production breaches device {d}",
  709. quantity=-production_capacity_d,
  710. # negative price because breaching in the downwards (production) direction is penalized
  711. downwards_deviation_price=-all_production_breach_price,
  712. index=index,
  713. device=d,
  714. )
  715. commitments.append(commitment)
  716. else:
  717. # consumption-capacity will become a hard constraint
  718. device_constraints[d]["derivative min"] = -production_capacity_d
  719. if sensor_d.get_attribute("is_strictly_non_negative"):
  720. device_constraints[d]["derivative max"] = 0
  721. else:
  722. consumption_capacity_d = get_continuous_series_sensor_or_quantity(
  723. variable_quantity=consumption_capacity[d],
  724. actuator=sensor_d,
  725. unit="MW",
  726. query_window=(start, end),
  727. resolution=resolution,
  728. beliefs_before=belief_time,
  729. fallback_attribute="consumption_capacity",
  730. min_value=0, # capacities are positive by definition
  731. max_value=power_capacity_in_mw[d],
  732. resolve_overlaps="min",
  733. )
  734. if (
  735. self.flex_context.get("consumption_breach_price") is not None
  736. and consumption_capacity[d] is not None
  737. ):
  738. # consumption-capacity will become a soft constraint
  739. consumption_breach_price = self.flex_context[
  740. "consumption_breach_price"
  741. ]
  742. any_consumption_breach_price = (
  743. get_continuous_series_sensor_or_quantity(
  744. variable_quantity=consumption_breach_price,
  745. actuator=asset,
  746. unit=FlexContextSchema()
  747. .declared_fields["consumption_breach_price"]
  748. ._get_unit(consumption_breach_price),
  749. query_window=(start, end),
  750. resolution=resolution,
  751. beliefs_before=belief_time,
  752. fallback_attribute="consumption-breach-price",
  753. fill_sides=True,
  754. )
  755. )
  756. all_consumption_breach_price = (
  757. get_continuous_series_sensor_or_quantity(
  758. variable_quantity=consumption_breach_price,
  759. actuator=asset,
  760. unit=FlexContextSchema()
  761. .declared_fields["consumption_breach_price"]
  762. ._get_unit(consumption_breach_price)
  763. + "*h", # from EUR/MWh to EUR/MW/resolution
  764. query_window=(start, end),
  765. resolution=resolution,
  766. beliefs_before=belief_time,
  767. fallback_attribute="consumption-breach-price",
  768. fill_sides=True,
  769. )
  770. )
  771. # Set up commitments DataFrame
  772. commitment = FlowCommitment(
  773. name=f"any consumption breach device {d}",
  774. quantity=consumption_capacity_d,
  775. upwards_deviation_price=any_consumption_breach_price,
  776. index=index,
  777. _type="any",
  778. device=d,
  779. )
  780. commitments.append(commitment)
  781. commitment = FlowCommitment(
  782. name=f"all consumption breaches device {d}",
  783. quantity=consumption_capacity_d,
  784. upwards_deviation_price=all_consumption_breach_price,
  785. index=index,
  786. device=d,
  787. )
  788. commitments.append(commitment)
  789. else:
  790. # consumption-capacity will become a hard constraint
  791. device_constraints[d]["derivative max"] = consumption_capacity_d
  792. all_stock_delta = []
  793. for is_usage, soc_delta in zip([False, True], [soc_gain[d], soc_usage[d]]):
  794. if soc_delta is None:
  795. # Try to get fallback
  796. soc_delta = [None]
  797. for component in soc_delta:
  798. stock_delta_series = get_continuous_series_sensor_or_quantity(
  799. variable_quantity=component,
  800. actuator=sensor_d,
  801. unit="MW",
  802. query_window=(start, end),
  803. resolution=resolution,
  804. beliefs_before=belief_time,
  805. fallback_attribute="soc-usage" if is_usage else "soc-gain",
  806. )
  807. # example: 4 MW sustained over 15 minutes gives 1 MWh
  808. stock_delta_series *= resolution / timedelta(
  809. hours=1
  810. ) # MW -> MWh / resolution
  811. if is_usage:
  812. stock_delta_series *= -1
  813. all_stock_delta.append(stock_delta_series)
  814. if len(all_stock_delta) > 0:
  815. all_stock_delta = pd.concat(all_stock_delta, axis=1)
  816. device_constraints[d]["stock delta"] = all_stock_delta.sum(1)
  817. device_constraints[d]["stock delta"] *= timedelta(hours=1) / resolution
  818. # Apply round-trip efficiency evenly to charging and discharging
  819. charging_efficiency[d] = get_continuous_series_sensor_or_quantity(
  820. variable_quantity=charging_efficiency[d],
  821. actuator=sensor_d,
  822. unit="dimensionless",
  823. query_window=(start, end),
  824. resolution=resolution,
  825. beliefs_before=belief_time,
  826. fallback_attribute="charging-efficiency",
  827. ).fillna(1)
  828. discharging_efficiency[d] = get_continuous_series_sensor_or_quantity(
  829. variable_quantity=discharging_efficiency[d],
  830. actuator=sensor_d,
  831. unit="dimensionless",
  832. query_window=(start, end),
  833. resolution=resolution,
  834. beliefs_before=belief_time,
  835. fallback_attribute="discharging-efficiency",
  836. ).fillna(1)
  837. roundtrip_efficiency = flex_model[d].get(
  838. "roundtrip_efficiency",
  839. sensor_d.get_attribute("roundtrip_efficiency", 1),
  840. )
  841. # if roundtrip efficiency is provided in the flex-model or defined as an asset attribute
  842. if "roundtrip_efficiency" in flex_model[d] or sensor_d.has_attribute(
  843. "roundtrip-efficiency"
  844. ):
  845. charging_efficiency[d] = roundtrip_efficiency**0.5
  846. discharging_efficiency[d] = roundtrip_efficiency**0.5
  847. device_constraints[d]["derivative down efficiency"] = (
  848. discharging_efficiency[d]
  849. )
  850. device_constraints[d]["derivative up efficiency"] = charging_efficiency[d]
  851. # Apply storage efficiency (accounts for losses over time)
  852. if isinstance(storage_efficiency[d], ur.Quantity) or isinstance(
  853. storage_efficiency[d], Sensor
  854. ):
  855. device_constraints[d]["efficiency"] = (
  856. get_continuous_series_sensor_or_quantity(
  857. variable_quantity=storage_efficiency[d],
  858. actuator=sensor_d,
  859. unit="dimensionless",
  860. query_window=(start, end),
  861. resolution=resolution,
  862. beliefs_before=belief_time,
  863. fallback_attribute="storage_efficiency", # this should become storage-efficiency
  864. max_value=1,
  865. )
  866. .fillna(1.0)
  867. .clip(lower=0.0, upper=1.0)
  868. )
  869. elif storage_efficiency[d] is not None:
  870. device_constraints[d]["efficiency"] = storage_efficiency[d]
  871. # check that storage constraints are fulfilled
  872. if not skip_validation:
  873. constraint_violations = validate_storage_constraints(
  874. constraints=device_constraints[d],
  875. soc_at_start=soc_at_start[d],
  876. soc_min=soc_min[d],
  877. soc_max=soc_max[d],
  878. resolution=resolution,
  879. )
  880. if len(constraint_violations) > 0:
  881. # TODO: include hints from constraint_violations into the error message
  882. message = create_constraint_violations_message(
  883. constraint_violations
  884. )
  885. raise ValueError(
  886. "The input data yields an infeasible problem. Constraint validation has found the following issues:\n"
  887. + message
  888. )
  889. return (
  890. sensors,
  891. start,
  892. end,
  893. resolution,
  894. soc_at_start,
  895. device_constraints,
  896. ems_constraints,
  897. commitments,
  898. )
  899. def persist_flex_model(self):
  900. """Store new soc info as GenericAsset attributes
  901. This method should become obsolete when all SoC information is recorded on a sensor, instead.
  902. """
  903. if self.sensor is not None:
  904. self.sensor.generic_asset.set_attribute(
  905. "soc_datetime", self.start.isoformat()
  906. )
  907. self.sensor.generic_asset.set_attribute(
  908. "soc_in_mwh", self.flex_model["soc_at_start"]
  909. )
  910. def deserialize_flex_config(self):
  911. """
  912. Deserialize storage flex model and the flex context against schemas.
  913. Before that, we fill in values from wider context, if possible.
  914. Mostly, we allow several fields to come from sensor attributes.
  915. TODO: this work could maybe go to the schema as a pre-load hook (if we pass in the sensor to schema initialization)
  916. Note: Before we apply the flex config schemas, we need to use the flex config identifiers with hyphens,
  917. (this is how they are represented to outside, e.g. by the API), after deserialization
  918. we use internal schema names (with underscores).
  919. """
  920. if self.flex_model is None:
  921. self.flex_model = {}
  922. # self.flex_context overrides db_flex_context (from the asset and its ancestors)
  923. if self.asset is not None:
  924. asset = self.asset
  925. else:
  926. asset = self.sensor.generic_asset
  927. db_flex_context = asset.get_flex_context()
  928. self.flex_context = FlexContextSchema().load(
  929. {**db_flex_context, **self.flex_context}
  930. )
  931. if isinstance(self.flex_model, dict):
  932. # Check state of charge.
  933. # Preferably, a starting soc is given.
  934. # Otherwise, we try to retrieve the current state of charge from the asset (if that is the valid one at the start).
  935. # If that doesn't work, we set the starting soc to 0 (some assets don't use the concept of a state of charge,
  936. # and without soc targets and limits the starting soc doesn't matter).
  937. if (
  938. "soc-at-start" not in self.flex_model
  939. or self.flex_model["soc-at-start"] is None
  940. ):
  941. if (
  942. self.start == self.sensor.get_attribute("soc_datetime")
  943. and self.sensor.get_attribute("soc_in_mwh") is not None
  944. ):
  945. self.flex_model["soc-at-start"] = self.sensor.get_attribute(
  946. "soc_in_mwh"
  947. )
  948. else:
  949. self.flex_model["soc-at-start"] = 0
  950. self.ensure_soc_min_max()
  951. # Now it's time to check if our flex configuration holds up to schemas
  952. self.flex_model = StorageFlexModelSchema(
  953. start=self.start,
  954. sensor=self.sensor,
  955. default_soc_unit=self.flex_model.get("soc-unit"),
  956. ).load(self.flex_model)
  957. # Extend schedule period in case a target exceeds its end
  958. self.possibly_extend_end(soc_targets=self.flex_model.get("soc_targets"))
  959. elif isinstance(self.flex_model, list):
  960. # todo: ensure_soc_min_max in case the device is a storage (see line 847)
  961. self.flex_model = MultiSensorFlexModelSchema(many=True).load(
  962. self.flex_model
  963. )
  964. for d, sensor_flex_model in enumerate(self.flex_model):
  965. self.flex_model[d] = StorageFlexModelSchema(
  966. start=self.start, sensor=sensor_flex_model["sensor"]
  967. ).load(sensor_flex_model["sensor_flex_model"])
  968. self.flex_model[d]["sensor"] = sensor_flex_model["sensor"]
  969. # Extend schedule period in case a target exceeds its end
  970. self.possibly_extend_end(
  971. soc_targets=sensor_flex_model.get("soc_targets"),
  972. sensor=sensor_flex_model["sensor"],
  973. )
  974. else:
  975. raise TypeError(
  976. f"Unsupported type of flex-model: '{type(self.flex_model)}'"
  977. )
  978. return self.flex_model
  979. def possibly_extend_end(self, soc_targets, sensor: Sensor = None):
  980. """Extend schedule period in case a target exceeds its end.
  981. The schedule's duration is possibly limited by the server config setting 'FLEXMEASURES_MAX_PLANNING_HORIZON'.
  982. todo: when deserialize_flex_config becomes a single schema for the whole scheduler,
  983. this function would become a class method with a @post_load decorator.
  984. """
  985. if sensor is None:
  986. sensor = self.sensor
  987. if soc_targets and not isinstance(soc_targets, Sensor):
  988. max_target_datetime = max([soc_target["end"] for soc_target in soc_targets])
  989. if max_target_datetime > self.end:
  990. max_server_horizon = get_max_planning_horizon(sensor.event_resolution)
  991. if max_server_horizon:
  992. self.end = min(max_target_datetime, self.start + max_server_horizon)
  993. else:
  994. self.end = max_target_datetime
  995. def get_min_max_targets(
  996. self, deserialized_names: bool = True
  997. ) -> tuple[float | None, float | None]:
  998. min_target = None
  999. max_target = None
  1000. soc_targets_label = "soc_targets" if deserialized_names else "soc-targets"
  1001. # if the SOC targets are defined as a Sensor, we don't get min max values
  1002. if isinstance(self.flex_model.get(soc_targets_label), dict):
  1003. return None, None
  1004. if (
  1005. soc_targets_label in self.flex_model
  1006. and len(self.flex_model[soc_targets_label]) > 0
  1007. ):
  1008. min_target = min(
  1009. [target["value"] for target in self.flex_model[soc_targets_label]]
  1010. )
  1011. max_target = max(
  1012. [target["value"] for target in self.flex_model[soc_targets_label]]
  1013. )
  1014. return min_target, max_target
  1015. def get_min_max_soc_on_sensor(
  1016. self, adjust_unit: bool = False, deserialized_names: bool = True
  1017. ) -> tuple[float | None, float | None]:
  1018. soc_min_sensor: float | None = self.sensor.get_attribute("min_soc_in_mwh")
  1019. soc_max_sensor: float | None = self.sensor.get_attribute("max_soc_in_mwh")
  1020. soc_unit_label = "soc_unit" if deserialized_names else "soc-unit"
  1021. if adjust_unit:
  1022. if soc_min_sensor and self.flex_model.get(soc_unit_label) == "kWh":
  1023. soc_min_sensor *= 1000 # later steps assume soc data is kWh
  1024. if soc_max_sensor and self.flex_model.get(soc_unit_label) == "kWh":
  1025. soc_max_sensor *= 1000
  1026. return soc_min_sensor, soc_max_sensor
  1027. def ensure_soc_min_max(self):
  1028. """
  1029. Make sure we have min and max SOC.
  1030. If not passed directly, then get default from sensor or targets.
  1031. """
  1032. _, max_target = self.get_min_max_targets(deserialized_names=False)
  1033. soc_min_sensor, soc_max_sensor = self.get_min_max_soc_on_sensor(
  1034. adjust_unit=True, deserialized_names=False
  1035. )
  1036. if "soc-min" not in self.flex_model or self.flex_model["soc-min"] is None:
  1037. # Default is 0 - can't drain the storage by more than it contains
  1038. self.flex_model["soc-min"] = soc_min_sensor if soc_min_sensor else 0
  1039. if "soc-max" not in self.flex_model or self.flex_model["soc-max"] is None:
  1040. self.flex_model["soc-max"] = soc_max_sensor
  1041. # Lacking information about the battery's nominal capacity, we use the highest target value as the maximum state of charge
  1042. if self.flex_model["soc-max"] is None:
  1043. if max_target:
  1044. self.flex_model["soc-max"] = max_target
  1045. else:
  1046. raise ValueError(
  1047. "Need maximal permitted state of charge, please specify soc-max or some soc-targets."
  1048. )
  1049. def _get_device_power_capacity(
  1050. self, flex_model: list[dict], sensors: list[Sensor]
  1051. ) -> list[ur.Quantity]:
  1052. """The device power capacity for each device must be known for the optimization problem to stay bounded.
  1053. We search for the power capacity in the following order:
  1054. 1. Look for the power_capacity_in_mw field in the deserialized flex-model.
  1055. 2. Look for the capacity_in_mw attribute of the sensor.
  1056. 3. Look for the capacity_in_mw attribute of the asset (sensor.get_attribute does this internally).
  1057. 4. Look for the power-capacity attribute of the sensor.
  1058. 5. Look for the power-capacity attribute of the asset.
  1059. 6. Look for the site-power-capacity attribute of the asset.
  1060. """
  1061. power_capacities = []
  1062. for flex_model_d, sensor in zip(flex_model, sensors):
  1063. # 1, 2 and 3
  1064. power_capacity_in_mw = flex_model_d.get(
  1065. "power_capacity_in_mw",
  1066. sensor.get_attribute("capacity_in_mw"),
  1067. )
  1068. if power_capacity_in_mw is not None:
  1069. power_capacities.append(
  1070. self._ensure_variable_quantity(power_capacity_in_mw, "MW")
  1071. )
  1072. continue
  1073. # 4 and 5
  1074. power_capacity = sensor.get_attribute("power-capacity")
  1075. if power_capacity is not None:
  1076. power_capacities.append(
  1077. self._ensure_variable_quantity(power_capacity, "MW")
  1078. )
  1079. continue
  1080. # 6
  1081. site_power_capacity = sensor.generic_asset.get_attribute(
  1082. "site-power-capacity"
  1083. )
  1084. if site_power_capacity is not None:
  1085. current_app.logger.warning(
  1086. f"Missing 'power-capacity' or 'capacity_in_mw' attribute on power sensor {sensor.id}. Using site-power-capacity instead."
  1087. )
  1088. power_capacities.append(
  1089. self._ensure_variable_quantity(site_power_capacity, "MW")
  1090. )
  1091. continue
  1092. raise ValueError(
  1093. "Power capacity is not defined in the sensor attributes or the flex-model."
  1094. )
  1095. return power_capacities
  1096. def _ensure_variable_quantity(
  1097. self, value: str | int | float | ur.Quantity, unit: str
  1098. ) -> Sensor | list[dict] | ur.Quantity:
  1099. if isinstance(value, str):
  1100. q = ur.Quantity(value).to(unit)
  1101. elif isinstance(value, (float, int)):
  1102. q = ur.Quantity(f"{value} {unit}")
  1103. elif isinstance(value, (Sensor, list, ur.Quantity)):
  1104. q = value
  1105. else:
  1106. raise TypeError(
  1107. f"Unsupported type '{type(value)}' to describe Quantity. Value: {value}"
  1108. )
  1109. return q
  1110. class StorageFallbackScheduler(MetaStorageScheduler):
  1111. __version__ = "2"
  1112. __author__ = "Seita"
  1113. def compute(self, skip_validation: bool = False) -> SchedulerOutputType:
  1114. """Schedule a battery or Charge Point by just starting to charge, discharge, or do neither,
  1115. depending on the first target state of charge and the capabilities of the Charge Point.
  1116. For the resulting consumption schedule, consumption is defined as positive values.
  1117. Note that this ignores any cause of the infeasibility.
  1118. :param skip_validation: If True, skip validation of constraints specified in the data.
  1119. :returns: The computed schedule.
  1120. """
  1121. (
  1122. sensors,
  1123. start,
  1124. end,
  1125. resolution,
  1126. soc_at_start,
  1127. device_constraints,
  1128. ems_constraints,
  1129. commitments,
  1130. ) = self._prepare(skip_validation=skip_validation)
  1131. # Fallback policy if the problem was unsolvable
  1132. storage_schedule = {
  1133. sensor: fallback_charging_policy(
  1134. sensor, device_constraints[d], start, end, resolution
  1135. )
  1136. for d, sensor in enumerate(sensors)
  1137. }
  1138. # Convert each device schedule to the unit of the device's power sensor
  1139. storage_schedule = {
  1140. sensor: convert_units(storage_schedule[sensor], "MW", sensor.unit)
  1141. for sensor in sensors
  1142. }
  1143. # Round schedule
  1144. if self.round_to_decimals:
  1145. storage_schedule = {
  1146. sensor: storage_schedule[sensor].round(self.round_to_decimals)
  1147. for sensor in sensors
  1148. }
  1149. if self.return_multiple:
  1150. return [
  1151. {
  1152. "name": "storage_schedule",
  1153. "sensor": sensor,
  1154. "data": storage_schedule[sensor],
  1155. }
  1156. for sensor in sensors
  1157. ]
  1158. else:
  1159. return storage_schedule[sensors[0]]
  1160. class StorageScheduler(MetaStorageScheduler):
  1161. __version__ = "5"
  1162. __author__ = "Seita"
  1163. fallback_scheduler_class: Type[Scheduler] = StorageFallbackScheduler
  1164. def compute(self, skip_validation: bool = False) -> SchedulerOutputType:
  1165. """Schedule a battery or Charge Point based directly on the latest beliefs regarding market prices within the specified time window.
  1166. For the resulting consumption schedule, consumption is defined as positive values.
  1167. :param skip_validation: If True, skip validation of constraints specified in the data.
  1168. :returns: The computed schedule.
  1169. """
  1170. (
  1171. sensors,
  1172. start,
  1173. end,
  1174. resolution,
  1175. soc_at_start,
  1176. device_constraints,
  1177. ems_constraints,
  1178. commitments,
  1179. ) = self._prepare(skip_validation=skip_validation)
  1180. ems_schedule, expected_costs, scheduler_results, model = device_scheduler(
  1181. device_constraints=device_constraints,
  1182. ems_constraints=ems_constraints,
  1183. commitments=commitments,
  1184. initial_stock=[
  1185. (
  1186. soc_at_start_d * (timedelta(hours=1) / resolution)
  1187. if soc_at_start_d is not None
  1188. else 0
  1189. )
  1190. for soc_at_start_d in soc_at_start
  1191. ],
  1192. )
  1193. if scheduler_results.solver.termination_condition == "infeasible":
  1194. raise InfeasibleProblemException()
  1195. # Obtain the storage schedule from all device schedules within the EMS
  1196. storage_schedule = {sensor: ems_schedule[d] for d, sensor in enumerate(sensors)}
  1197. # Convert each device schedule to the unit of the device's power sensor
  1198. storage_schedule = {
  1199. sensor: convert_units(storage_schedule[sensor], "MW", sensor.unit)
  1200. for sensor in sensors
  1201. }
  1202. flex_model = self.flex_model
  1203. if not isinstance(self.flex_model, list):
  1204. flex_model["sensor"] = sensors[0]
  1205. flex_model = [flex_model]
  1206. soc_schedule = {
  1207. flex_model_d["state_of_charge"]: convert_units(
  1208. integrate_time_series(
  1209. series=ems_schedule[d],
  1210. initial_stock=soc_at_start[d],
  1211. stock_delta=device_constraints[d]["stock delta"]
  1212. * resolution
  1213. / timedelta(hours=1),
  1214. up_efficiency=device_constraints[d]["derivative up efficiency"],
  1215. down_efficiency=device_constraints[d]["derivative down efficiency"],
  1216. storage_efficiency=device_constraints[d]["efficiency"].fillna(1),
  1217. ),
  1218. from_unit="MWh",
  1219. to_unit=flex_model_d["state_of_charge"].unit,
  1220. )
  1221. for d, flex_model_d in enumerate(flex_model)
  1222. if isinstance(flex_model_d.get("state_of_charge", None), Sensor)
  1223. }
  1224. # Resample each device schedule to the resolution of the device's power sensor
  1225. if self.resolution is None:
  1226. storage_schedule = {
  1227. sensor: storage_schedule[sensor]
  1228. .resample(sensor.event_resolution)
  1229. .mean()
  1230. for sensor in sensors
  1231. }
  1232. # Round schedule
  1233. if self.round_to_decimals:
  1234. storage_schedule = {
  1235. sensor: storage_schedule[sensor].round(self.round_to_decimals)
  1236. for sensor in sensors
  1237. }
  1238. soc_schedule = {
  1239. sensor: soc_schedule[sensor].round(self.round_to_decimals)
  1240. for sensor in soc_schedule.keys()
  1241. }
  1242. if self.return_multiple:
  1243. storage_schedules = [
  1244. {
  1245. "name": "storage_schedule",
  1246. "sensor": sensor,
  1247. "data": storage_schedule[sensor],
  1248. "unit": sensor.unit,
  1249. }
  1250. for sensor in sensors
  1251. ]
  1252. commitment_costs = [
  1253. {
  1254. "name": "commitment_costs",
  1255. "data": {
  1256. c.name: costs
  1257. for c, costs in zip(
  1258. commitments, model.commitment_costs.values()
  1259. )
  1260. },
  1261. "unit": self.flex_context["shared_currency_unit"],
  1262. },
  1263. ]
  1264. soc_schedules = [
  1265. {
  1266. "name": "state_of_charge",
  1267. "data": soc,
  1268. "sensor": sensor,
  1269. "unit": sensor.unit,
  1270. }
  1271. for sensor, soc in soc_schedule.items()
  1272. ]
  1273. return storage_schedules + commitment_costs + soc_schedules
  1274. else:
  1275. return storage_schedule[sensors[0]]
  1276. def create_constraint_violations_message(constraint_violations: list) -> str:
  1277. """Create a human-readable message with the constraint_violations.
  1278. :param constraint_violations: list with the constraint violations
  1279. :return: human-readable message
  1280. """
  1281. message = ""
  1282. for c in constraint_violations:
  1283. message += f"t={c['dt']} | {c['violation']}\n"
  1284. if len(message) > 1:
  1285. message = message[:-1]
  1286. return message
  1287. def build_device_soc_values(
  1288. soc_values: list[dict[str, datetime | float]] | pd.Series,
  1289. soc_at_start: float,
  1290. start_of_schedule: datetime,
  1291. end_of_schedule: datetime,
  1292. resolution: timedelta,
  1293. ) -> pd.Series:
  1294. """
  1295. Utility function to create a Pandas series from SOC values we got from the flex-model.
  1296. Should set NaN anywhere where there is no target.
  1297. SOC values should be indexed by their due date. For example, for quarter-hourly targets from 5 to 6 AM:
  1298. >>> df = pd.Series(data=[1, 1.5, 2, 2.5, 3], index=pd.date_range(pd.Timestamp("2010-01-01T05"), pd.Timestamp("2010-01-01T06"), freq=pd.Timedelta("PT15M"), inclusive="both"))
  1299. >>> print(df)
  1300. 2010-01-01 05:00:00 1.0
  1301. 2010-01-01 05:15:00 1.5
  1302. 2010-01-01 05:30:00 2.0
  1303. 2010-01-01 05:45:00 2.5
  1304. 2010-01-01 06:00:00 3.0
  1305. Freq: 15min, dtype: float64
  1306. TODO: this function could become the deserialization method of a new TimedEventSchema (targets, plural), which wraps TimedEventSchema.
  1307. """
  1308. if isinstance(soc_values, pd.Series): # some tests prepare it this way
  1309. device_values = soc_values
  1310. else:
  1311. device_values = initialize_series(
  1312. np.nan,
  1313. start=start_of_schedule,
  1314. end=end_of_schedule,
  1315. resolution=resolution,
  1316. inclusive="right", # note that target values are indexed by their due date (i.e. inclusive="right")
  1317. )
  1318. max_server_horizon = get_max_planning_horizon(resolution)
  1319. disregarded_periods: list[tuple[datetime, datetime]] = []
  1320. for soc_value in soc_values:
  1321. soc = soc_value["value"]
  1322. # convert timezone, otherwise DST would be problematic
  1323. soc_constraint_start = soc_value["start"].astimezone(
  1324. device_values.index.tzinfo
  1325. )
  1326. soc_constraint_end = soc_value["end"].astimezone(device_values.index.tzinfo)
  1327. if soc_constraint_end > end_of_schedule:
  1328. # Skip too-far-into-the-future target
  1329. disregarded_periods += [(soc_constraint_start, soc_constraint_end)]
  1330. if soc_constraint_start <= end_of_schedule:
  1331. device_values.loc[soc_constraint_start:end_of_schedule] = soc
  1332. continue
  1333. device_values.loc[soc_constraint_start:soc_constraint_end] = soc
  1334. if not disregarded_periods:
  1335. pass
  1336. elif len(disregarded_periods) == 1:
  1337. soc_constraint_start, soc_constraint_end = disregarded_periods[0]
  1338. if soc_constraint_start == soc_constraint_end:
  1339. current_app.logger.warning(
  1340. f"Disregarding target datetime {soc_constraint_end}, because it exceeds {end_of_schedule}. Maximum scheduling horizon is {max_server_horizon}."
  1341. )
  1342. else:
  1343. current_app.logger.warning(
  1344. f"Disregarding target datetimes that exceed {end_of_schedule} (within the window {soc_constraint_start} until {soc_constraint_end}). Maximum scheduling horizon is {max_server_horizon}."
  1345. )
  1346. else:
  1347. soc_constraint_starts, soc_constraint_ends = zip(*disregarded_periods)
  1348. current_app.logger.warning(
  1349. f"Disregarding target datetimes that exceed {end_of_schedule} (within the window {min(soc_constraint_starts)} until {max(soc_constraint_ends)} spanning {len(disregarded_periods)} targets). Maximum scheduling horizon is {max_server_horizon}."
  1350. )
  1351. # soc_values are at the end of each time slot, while prices are indexed by the start of each time slot
  1352. device_values = device_values[start_of_schedule + resolution : end_of_schedule]
  1353. device_values = device_values.tz_convert("UTC")
  1354. # shift "equals" constraint for target SOC by one resolution (the target defines a state at a certain time,
  1355. # while the "equals" constraint defines what the total stock should be at the end of a time slot,
  1356. # where the time slot is indexed by its starting time)
  1357. device_values = device_values.shift(-1, freq=resolution).values * (
  1358. timedelta(hours=1) / resolution
  1359. ) - soc_at_start * (timedelta(hours=1) / resolution)
  1360. return device_values
  1361. def add_storage_constraints(
  1362. start: datetime,
  1363. end: datetime,
  1364. resolution: timedelta,
  1365. soc_at_start: float,
  1366. soc_targets: list[dict[str, datetime | float]] | pd.Series | None,
  1367. soc_maxima: list[dict[str, datetime | float]] | pd.Series | None,
  1368. soc_minima: list[dict[str, datetime | float]] | pd.Series | None,
  1369. soc_max: float,
  1370. soc_min: float,
  1371. ) -> pd.DataFrame:
  1372. """Collect all constraints for a given storage device in a DataFrame that the device_scheduler can interpret.
  1373. :param start: Start of the schedule.
  1374. :param end: End of the schedule.
  1375. :param resolution: Timedelta used to resample the constraints to the resolution of the schedule.
  1376. :param soc_at_start: State of charge at the start time.
  1377. :param soc_targets: Exact targets for the state of charge at each time.
  1378. :param soc_maxima: Maximum state of charge at each time.
  1379. :param soc_minima: Minimum state of charge at each time.
  1380. :param soc_max: Maximum state of charge at all times.
  1381. :param soc_min: Minimum state of charge at all times.
  1382. :returns: Constraints (StorageScheduler.COLUMNS) for a storage device, at each time step (index).
  1383. See device_scheduler for possible column names.
  1384. """
  1385. # create empty storage device constraints dataframe
  1386. storage_device_constraints = initialize_df(
  1387. StorageScheduler.COLUMNS, start, end, resolution
  1388. )
  1389. if soc_targets is not None:
  1390. # make an equality series with the SOC targets set in the flex model
  1391. # storage_device_constraints refers to the flexible device we are scheduling
  1392. storage_device_constraints["equals"] = build_device_soc_values(
  1393. soc_targets, soc_at_start, start, end, resolution
  1394. )
  1395. soc_min_change = (soc_min - soc_at_start) * timedelta(hours=1) / resolution
  1396. soc_max_change = (soc_max - soc_at_start) * timedelta(hours=1) / resolution
  1397. if soc_minima is not None:
  1398. storage_device_constraints["min"] = build_device_soc_values(
  1399. soc_minima,
  1400. soc_at_start,
  1401. start,
  1402. end,
  1403. resolution,
  1404. )
  1405. storage_device_constraints["min"] = storage_device_constraints["min"].fillna(
  1406. soc_min_change
  1407. )
  1408. if soc_maxima is not None:
  1409. storage_device_constraints["max"] = build_device_soc_values(
  1410. soc_maxima,
  1411. soc_at_start,
  1412. start,
  1413. end,
  1414. resolution,
  1415. )
  1416. storage_device_constraints["max"] = storage_device_constraints["max"].fillna(
  1417. soc_max_change
  1418. )
  1419. # limiting max and min to be in the range [soc_min, soc_max]
  1420. storage_device_constraints["min"] = storage_device_constraints["min"].clip(
  1421. lower=soc_min_change, upper=soc_max_change
  1422. )
  1423. storage_device_constraints["max"] = storage_device_constraints["max"].clip(
  1424. lower=soc_min_change, upper=soc_max_change
  1425. )
  1426. return storage_device_constraints
  1427. def validate_storage_constraints(
  1428. constraints: pd.DataFrame,
  1429. soc_at_start: float,
  1430. soc_min: float,
  1431. soc_max: float,
  1432. resolution: timedelta,
  1433. ) -> list[dict]:
  1434. """Check that the storage constraints are fulfilled, e.g min <= equals <= max.
  1435. A. Global validation
  1436. A.1) min >= soc_min
  1437. A.2) max <= soc_max
  1438. B. Validation in the same time frame
  1439. B.1) min <= max
  1440. B.2) min <= equals
  1441. B.3) equals <= max
  1442. C. Validation in different time frames
  1443. C.1) equals(t) - equals(t-1) <= derivative_max(t)
  1444. C.2) derivative_min(t) <= equals(t) - equals(t-1)
  1445. C.3) min(t) - max(t-1) <= derivative_max(t)
  1446. C.4) max(t) - min(t-1) >= derivative_min(t)
  1447. C.5) equals(t) - max(t-1) <= derivative_max(t)
  1448. C.6) derivative_min(t) <= equals(t) - min(t-1)
  1449. :param constraints: dataframe containing the constraints of a storage device
  1450. :param soc_at_start: State of charge at the start time.
  1451. :param soc_min: Minimum state of charge at all times.
  1452. :param soc_max: Maximum state of charge at all times.
  1453. :param resolution: Constant duration between the start of each time step.
  1454. :returns: List of constraint violations, specifying their time, constraint and violation.
  1455. """
  1456. # get a copy of the constraints to make sure the dataframe doesn't get updated
  1457. _constraints = constraints.copy()
  1458. _constraints = _constraints.rename(
  1459. columns={
  1460. columns_name: columns_name.replace(" ", "_")
  1461. + "(t)" # replace spaces with underscore and add time index
  1462. for columns_name in _constraints.columns
  1463. }
  1464. )
  1465. constraint_violations = []
  1466. ########################
  1467. # A. Global validation #
  1468. ########################
  1469. # 1) min >= soc_min
  1470. soc_min = (soc_min - soc_at_start) * timedelta(hours=1) / resolution
  1471. _constraints["soc_min(t)"] = soc_min
  1472. constraint_violations += validate_constraint(
  1473. _constraints, "soc_min(t)", "<=", "min(t)"
  1474. )
  1475. # 2) max <= soc_max
  1476. soc_max = (soc_max - soc_at_start) * timedelta(hours=1) / resolution
  1477. _constraints["soc_max(t)"] = soc_max
  1478. constraint_violations += validate_constraint(
  1479. _constraints, "max(t)", "<=", "soc_max(t)"
  1480. )
  1481. ########################################
  1482. # B. Validation in the same time frame #
  1483. ########################################
  1484. # 1) min <= max
  1485. constraint_violations += validate_constraint(_constraints, "min(t)", "<=", "max(t)")
  1486. # 2) min <= equals
  1487. constraint_violations += validate_constraint(
  1488. _constraints, "min(t)", "<=", "equals(t)"
  1489. )
  1490. # 3) equals <= max
  1491. constraint_violations += validate_constraint(
  1492. _constraints, "equals(t)", "<=", "max(t)"
  1493. )
  1494. ##########################################
  1495. # C. Validation in different time frames #
  1496. ##########################################
  1497. _constraints["factor_w_wh(t)"] = resolution / timedelta(hours=1)
  1498. _constraints["min(t-1)"] = prepend_series(_constraints["min(t)"], soc_min)
  1499. _constraints["equals(t-1)"] = prepend_series(
  1500. _constraints["equals(t)"], soc_at_start
  1501. )
  1502. _constraints["max(t-1)"] = prepend_series(_constraints["max(t)"], soc_max)
  1503. # 1) equals(t) - equals(t-1) <= derivative_max(t)
  1504. constraint_violations += validate_constraint(
  1505. _constraints,
  1506. "equals(t) - equals(t-1)",
  1507. "<=",
  1508. "derivative_max(t) * factor_w_wh(t)",
  1509. )
  1510. # 2) derivative_min(t) <= equals(t) - equals(t-1)
  1511. constraint_violations += validate_constraint(
  1512. _constraints,
  1513. "derivative_min(t) * factor_w_wh(t)",
  1514. "<=",
  1515. "equals(t) - equals(t-1)",
  1516. )
  1517. # 3) min(t) - max(t-1) <= derivative_max(t)
  1518. constraint_violations += validate_constraint(
  1519. _constraints, "min(t) - max(t-1)", "<=", "derivative_max(t) * factor_w_wh(t)"
  1520. )
  1521. # 4) max(t) - min(t-1) >= derivative_min(t)
  1522. constraint_violations += validate_constraint(
  1523. _constraints, "derivative_min(t) * factor_w_wh(t)", "<=", "max(t) - min(t-1)"
  1524. )
  1525. # 5) equals(t) - max(t-1) <= derivative_max(t)
  1526. constraint_violations += validate_constraint(
  1527. _constraints, "equals(t) - max(t-1)", "<=", "derivative_max(t) * factor_w_wh(t)"
  1528. )
  1529. # 6) derivative_min(t) <= equals(t) - min(t-1)
  1530. constraint_violations += validate_constraint(
  1531. _constraints, "derivative_min(t) * factor_w_wh(t)", "<=", "equals(t) - min(t-1)"
  1532. )
  1533. return constraint_violations
  1534. def get_pattern_match_word(word: str) -> str:
  1535. """Get a regex pattern to match a word
  1536. The conditions to delimit a word are:
  1537. - start of line
  1538. - whitespace
  1539. - end of line
  1540. - word boundary
  1541. - arithmetic operations
  1542. :return: regex expression
  1543. """
  1544. regex = r"(^|\s|$|\b|\+|\-|\*|\/\|\\)"
  1545. return regex + re.escape(word) + regex
  1546. def sanitize_expression(expression: str, columns: list) -> tuple[str, list]:
  1547. """Wrap column in commas to accept arbitrary column names (e.g. with spaces).
  1548. :param expression: expression to sanitize
  1549. :param columns: list with the name of the columns of the input data for the expression.
  1550. :return: sanitized expression and columns (variables) used in the expression
  1551. """
  1552. _expression = copy.copy(expression)
  1553. columns_involved = []
  1554. for column in columns:
  1555. if re.search(get_pattern_match_word(column), _expression):
  1556. columns_involved.append(column)
  1557. _expression = re.sub(get_pattern_match_word(column), f"`{column}`", _expression)
  1558. return _expression, columns_involved
  1559. def validate_constraint(
  1560. constraints_df: pd.DataFrame,
  1561. lhs_expression: str,
  1562. inequality: str,
  1563. rhs_expression: str,
  1564. round_to_decimals: int | None = 6,
  1565. ) -> list[dict]:
  1566. """Validate the feasibility of a given set of constraints.
  1567. :param constraints_df: DataFrame with the constraints
  1568. :param lhs_expression: left-hand side of the inequality expression following pd.eval format.
  1569. No need to use the syntax `column` to reference
  1570. column, just use the column name.
  1571. :param inequality: inequality operator, one of ('<=', '<', '>=', '>', '==', '!=').
  1572. :param rhs_expression: right-hand side of the inequality expression following pd.eval format.
  1573. No need to use the syntax `column` to reference
  1574. column, just use the column name.
  1575. :param round_to_decimals: Number of decimals to round off to before validating constraints.
  1576. :return: List of constraint violations, specifying their time, constraint and violation.
  1577. """
  1578. constraint_expression = f"{lhs_expression} {inequality} {rhs_expression}"
  1579. constraints_df_columns = list(constraints_df.columns)
  1580. lhs_expression, columns_lhs = sanitize_expression(
  1581. lhs_expression, constraints_df_columns
  1582. )
  1583. rhs_expression, columns_rhs = sanitize_expression(
  1584. rhs_expression, constraints_df_columns
  1585. )
  1586. columns_involved = columns_lhs + columns_rhs
  1587. lhs = constraints_df.fillna(0).eval(lhs_expression).round(round_to_decimals)
  1588. rhs = constraints_df.fillna(0).eval(rhs_expression).round(round_to_decimals)
  1589. condition = None
  1590. inequality = inequality.strip()
  1591. if inequality == "<=":
  1592. condition = lhs <= rhs
  1593. elif inequality == "<":
  1594. condition = lhs < rhs
  1595. elif inequality == ">=":
  1596. condition = lhs >= rhs
  1597. elif inequality == ">":
  1598. condition = lhs > rhs
  1599. elif inequality == "==":
  1600. condition = lhs == rhs
  1601. elif inequality == "!=":
  1602. condition = lhs != rhs
  1603. else:
  1604. raise ValueError(f"Inequality `{inequality} not supported.")
  1605. time_condition_fails = constraints_df.index[
  1606. ~condition & ~constraints_df[columns_involved].isna().any(axis=1)
  1607. ]
  1608. constraint_violations = []
  1609. for dt in time_condition_fails:
  1610. value_replaced = copy.copy(constraint_expression)
  1611. for column in constraints_df.columns:
  1612. value_replaced = re.sub(
  1613. get_pattern_match_word(column),
  1614. f"{column} [{constraints_df.loc[dt, column]}] ",
  1615. value_replaced,
  1616. )
  1617. constraint_violations.append(
  1618. dict(
  1619. dt=dt.to_pydatetime(),
  1620. condition=constraint_expression,
  1621. violation=value_replaced,
  1622. )
  1623. )
  1624. return constraint_violations
  1625. def prepend_series(series: pd.Series, value) -> pd.Series:
  1626. """Prepend a value to a time series
  1627. :param series: series containing the timed values
  1628. :param value: value to place in the first position
  1629. """
  1630. # extend max
  1631. series = series.copy()
  1632. # insert `value` at time `series.index[0] - resolution` which creates a new entry at the end of the series
  1633. series[series.index[0] - series.index.freq] = value
  1634. # sort index to keep the time ordering
  1635. series = series.sort_index()
  1636. return series.shift(1)
  1637. #####################
  1638. # TO BE DEPRECATED #
  1639. ####################
  1640. @deprecated(build_device_soc_values, "0.14")
  1641. def build_device_soc_targets(
  1642. targets: list[dict[str, datetime | float]] | pd.Series,
  1643. soc_at_start: float,
  1644. start_of_schedule: datetime,
  1645. end_of_schedule: datetime,
  1646. resolution: timedelta,
  1647. ) -> pd.Series:
  1648. return build_device_soc_values(
  1649. targets, soc_at_start, start_of_schedule, end_of_schedule, resolution
  1650. )
  1651. StorageScheduler.compute_schedule = deprecated(StorageScheduler.compute, "0.14")(
  1652. StorageScheduler.compute_schedule
  1653. )