ad98460751d9_remove_obsolete_tables.py 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513
  1. """Remove obsolete tables
  2. Revision ID: ad98460751d9
  3. Revises: 5a9473a817cb
  4. Create Date: 2023-11-30 10:31:46.125670
  5. """
  6. from alembic import op
  7. import sqlalchemy as sa
  8. from sqlalchemy.dialects import postgresql
  9. from sqlalchemy.exc import ProgrammingError
  10. import click
  11. from flexmeasures.data.config import db
  12. # revision identifiers, used by Alembic.
  13. revision = "ad98460751d9"
  14. down_revision = "5a9473a817cb"
  15. branch_labels = None
  16. depends_on = None
  17. def upgrade():
  18. tables = [
  19. "price",
  20. "power",
  21. "market",
  22. "market_type",
  23. "weather",
  24. "asset",
  25. "weather_sensor",
  26. ]
  27. # check for existing data
  28. tables_with_data = []
  29. inspect = sa.inspect(db.engine)
  30. for table in tables:
  31. try:
  32. if inspect.has_table(table):
  33. result = db.session.execute(
  34. sa.text(f"SELECT 1 FROM {table};")
  35. ).scalar_one_or_none()
  36. if result:
  37. tables_with_data.append(table)
  38. except ProgrammingError as exception:
  39. print(exception)
  40. db.session.close() # https://stackoverflow.com/a/26346280/13775459
  41. if tables_with_data:
  42. click.confirm(
  43. f"The following tables still have data and will be dropped by this upgrade: {tables_with_data}. Use `flexmeasures db-ops dump` to create a backup. Are you sure you want to upgrade the database?: ",
  44. abort=True,
  45. )
  46. # drop indexes
  47. with op.batch_alter_table("power", schema=None) as batch_op:
  48. batch_op.drop_index("power_datetime_idx", if_exists=True)
  49. batch_op.drop_index("power_sensor_id_idx", if_exists=True)
  50. with op.batch_alter_table("asset_type", schema=None) as batch_op:
  51. batch_op.drop_index("asset_type_can_curtail_idx", if_exists=True)
  52. batch_op.drop_index("asset_type_can_shift_idx", if_exists=True)
  53. with op.batch_alter_table("weather", schema=None) as batch_op:
  54. batch_op.drop_index("weather_datetime_idx", if_exists=True)
  55. batch_op.drop_index("weather_sensor_id_idx", if_exists=True)
  56. with op.batch_alter_table("price", schema=None) as batch_op:
  57. batch_op.drop_index("price_datetime_idx", if_exists=True)
  58. batch_op.drop_index("price_sensor_id_idx", if_exists=True)
  59. # drop tables
  60. for table in tables:
  61. if inspect.has_table(table):
  62. op.drop_table(table)
  63. def downgrade():
  64. op.create_table(
  65. "power",
  66. sa.Column(
  67. "datetime",
  68. postgresql.TIMESTAMP(timezone=True),
  69. autoincrement=False,
  70. nullable=False,
  71. ),
  72. sa.Column("sensor_id", sa.INTEGER(), autoincrement=False, nullable=False),
  73. sa.Column(
  74. "value",
  75. postgresql.DOUBLE_PRECISION(precision=53),
  76. autoincrement=False,
  77. nullable=False,
  78. ),
  79. sa.Column(
  80. "horizon", postgresql.INTERVAL(), autoincrement=False, nullable=False
  81. ),
  82. sa.Column("data_source_id", sa.INTEGER(), autoincrement=False, nullable=False),
  83. sa.ForeignKeyConstraint(
  84. ["data_source_id"],
  85. ["data_source.id"],
  86. name="power_data_source_data_sources_fkey",
  87. ),
  88. sa.ForeignKeyConstraint(
  89. ["sensor_id"],
  90. ["sensor.id"],
  91. name="power_sensor_id_sensor_fkey",
  92. ondelete="CASCADE",
  93. ),
  94. sa.PrimaryKeyConstraint(
  95. "datetime", "sensor_id", "horizon", "data_source_id", name="power_pkey"
  96. ),
  97. )
  98. op.create_table(
  99. "asset_type",
  100. sa.Column("name", sa.VARCHAR(length=80), autoincrement=False, nullable=False),
  101. sa.Column("is_consumer", sa.BOOLEAN(), autoincrement=False, nullable=False),
  102. sa.Column("is_producer", sa.BOOLEAN(), autoincrement=False, nullable=False),
  103. sa.Column("can_curtail", sa.BOOLEAN(), autoincrement=False, nullable=False),
  104. sa.Column("can_shift", sa.BOOLEAN(), autoincrement=False, nullable=False),
  105. sa.Column(
  106. "daily_seasonality", sa.BOOLEAN(), autoincrement=False, nullable=False
  107. ),
  108. sa.Column(
  109. "weekly_seasonality", sa.BOOLEAN(), autoincrement=False, nullable=False
  110. ),
  111. sa.Column(
  112. "yearly_seasonality", sa.BOOLEAN(), autoincrement=False, nullable=False
  113. ),
  114. sa.Column(
  115. "display_name",
  116. sa.VARCHAR(length=80),
  117. server_default=sa.text("''::character varying"),
  118. autoincrement=False,
  119. nullable=True,
  120. ),
  121. sa.Column(
  122. "hover_label", sa.VARCHAR(length=80), autoincrement=False, nullable=True
  123. ),
  124. sa.PrimaryKeyConstraint("name", name="asset_type_pkey"),
  125. sa.UniqueConstraint("display_name", name="asset_type_display_name_key"),
  126. postgresql_ignore_search_path=False,
  127. )
  128. op.create_table(
  129. "weather_sensor_type",
  130. sa.Column("name", sa.VARCHAR(length=80), autoincrement=False, nullable=False),
  131. sa.Column(
  132. "display_name",
  133. sa.VARCHAR(length=80),
  134. server_default=sa.text("''::character varying"),
  135. autoincrement=False,
  136. nullable=True,
  137. ),
  138. sa.PrimaryKeyConstraint("name", name="weather_sensor_type_pkey"),
  139. sa.UniqueConstraint(
  140. "display_name", name="weather_sensor_type_display_name_key"
  141. ),
  142. postgresql_ignore_search_path=False,
  143. )
  144. op.create_table(
  145. "weather_sensor",
  146. sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
  147. sa.Column("name", sa.VARCHAR(length=80), autoincrement=False, nullable=True),
  148. sa.Column(
  149. "weather_sensor_type_name",
  150. sa.VARCHAR(length=80),
  151. autoincrement=False,
  152. nullable=False,
  153. ),
  154. sa.Column(
  155. "latitude",
  156. postgresql.DOUBLE_PRECISION(precision=53),
  157. autoincrement=False,
  158. nullable=False,
  159. ),
  160. sa.Column(
  161. "longitude",
  162. postgresql.DOUBLE_PRECISION(precision=53),
  163. autoincrement=False,
  164. nullable=False,
  165. ),
  166. sa.Column(
  167. "unit",
  168. sa.VARCHAR(length=80),
  169. server_default=sa.text("''::character varying"),
  170. autoincrement=False,
  171. nullable=False,
  172. ),
  173. sa.Column(
  174. "display_name",
  175. sa.VARCHAR(length=80),
  176. server_default=sa.text("''::character varying"),
  177. autoincrement=False,
  178. nullable=True,
  179. ),
  180. sa.Column(
  181. "event_resolution",
  182. postgresql.INTERVAL(),
  183. server_default=sa.text("'00:00:00'::interval"),
  184. autoincrement=False,
  185. nullable=False,
  186. ),
  187. sa.Column(
  188. "knowledge_horizon_fnc",
  189. sa.VARCHAR(length=80),
  190. autoincrement=False,
  191. nullable=False,
  192. ),
  193. sa.Column(
  194. "knowledge_horizon_par",
  195. postgresql.JSON(astext_type=sa.Text()),
  196. autoincrement=False,
  197. nullable=False,
  198. ),
  199. sa.Column(
  200. "timezone", sa.VARCHAR(length=80), autoincrement=False, nullable=False
  201. ),
  202. sa.ForeignKeyConstraint(
  203. ["id"], ["sensor.id"], name="weather_sensor_id_sensor_fkey"
  204. ),
  205. sa.ForeignKeyConstraint(
  206. ["weather_sensor_type_name"],
  207. ["weather_sensor_type.name"],
  208. name="weather_sensor_weather_sensor_type_name_weather_sensor__1390",
  209. ),
  210. sa.PrimaryKeyConstraint("id", name="weather_sensor_pkey"),
  211. sa.UniqueConstraint("name", name="weather_sensor_name_key"),
  212. sa.UniqueConstraint(
  213. "weather_sensor_type_name",
  214. "latitude",
  215. "longitude",
  216. name="weather_sensor_type_name_latitude_longitude_key",
  217. ),
  218. )
  219. op.create_table(
  220. "weather",
  221. sa.Column("sensor_id", sa.INTEGER(), autoincrement=False, nullable=False),
  222. sa.Column(
  223. "datetime",
  224. postgresql.TIMESTAMP(timezone=True),
  225. autoincrement=False,
  226. nullable=False,
  227. ),
  228. sa.Column(
  229. "value",
  230. postgresql.DOUBLE_PRECISION(precision=53),
  231. autoincrement=False,
  232. nullable=False,
  233. ),
  234. sa.Column(
  235. "horizon", postgresql.INTERVAL(), autoincrement=False, nullable=False
  236. ),
  237. sa.Column("data_source_id", sa.INTEGER(), autoincrement=False, nullable=False),
  238. sa.ForeignKeyConstraint(
  239. ["data_source_id"],
  240. ["data_source.id"],
  241. name="weather_data_source_data_sources_fkey",
  242. ),
  243. sa.ForeignKeyConstraint(
  244. ["sensor_id"], ["sensor.id"], name="weather_sensor_id_sensor_fkey"
  245. ),
  246. sa.PrimaryKeyConstraint(
  247. "datetime", "sensor_id", "horizon", "data_source_id", name="weather_pkey"
  248. ),
  249. )
  250. op.create_table(
  251. "market_type",
  252. sa.Column("name", sa.VARCHAR(length=80), autoincrement=False, nullable=False),
  253. sa.Column(
  254. "daily_seasonality", sa.BOOLEAN(), autoincrement=False, nullable=False
  255. ),
  256. sa.Column(
  257. "weekly_seasonality", sa.BOOLEAN(), autoincrement=False, nullable=False
  258. ),
  259. sa.Column(
  260. "yearly_seasonality", sa.BOOLEAN(), autoincrement=False, nullable=False
  261. ),
  262. sa.Column(
  263. "display_name",
  264. sa.VARCHAR(length=80),
  265. server_default=sa.text("''::character varying"),
  266. autoincrement=False,
  267. nullable=True,
  268. ),
  269. sa.PrimaryKeyConstraint("name", name="market_type_pkey"),
  270. sa.UniqueConstraint("display_name", name="market_type_display_name_key"),
  271. postgresql_ignore_search_path=False,
  272. )
  273. op.create_table(
  274. "market",
  275. sa.Column(
  276. "id",
  277. sa.INTEGER(),
  278. server_default=sa.text("nextval('market_id_seq'::regclass)"),
  279. autoincrement=True,
  280. nullable=False,
  281. ),
  282. sa.Column("name", sa.VARCHAR(length=80), autoincrement=False, nullable=True),
  283. sa.Column(
  284. "market_type_name",
  285. sa.VARCHAR(length=80),
  286. autoincrement=False,
  287. nullable=False,
  288. ),
  289. sa.Column(
  290. "display_name",
  291. sa.VARCHAR(length=80),
  292. server_default=sa.text("''::character varying"),
  293. autoincrement=False,
  294. nullable=True,
  295. ),
  296. sa.Column(
  297. "unit",
  298. sa.VARCHAR(length=80),
  299. server_default=sa.text("''::character varying"),
  300. autoincrement=False,
  301. nullable=False,
  302. ),
  303. sa.Column(
  304. "event_resolution",
  305. postgresql.INTERVAL(),
  306. server_default=sa.text("'00:00:00'::interval"),
  307. autoincrement=False,
  308. nullable=False,
  309. ),
  310. sa.Column(
  311. "knowledge_horizon_fnc",
  312. sa.VARCHAR(length=80),
  313. autoincrement=False,
  314. nullable=False,
  315. ),
  316. sa.Column(
  317. "knowledge_horizon_par",
  318. postgresql.JSON(astext_type=sa.Text()),
  319. autoincrement=False,
  320. nullable=False,
  321. ),
  322. sa.Column(
  323. "timezone", sa.VARCHAR(length=80), autoincrement=False, nullable=False
  324. ),
  325. sa.ForeignKeyConstraint(["id"], ["sensor.id"], name="market_id_sensor_fkey"),
  326. sa.ForeignKeyConstraint(
  327. ["market_type_name"],
  328. ["market_type.name"],
  329. name="market_market_type_name_market_type_fkey",
  330. ),
  331. sa.PrimaryKeyConstraint("id", name="market_pkey"),
  332. sa.UniqueConstraint("display_name", name="market_display_name_key"),
  333. sa.UniqueConstraint("name", name="market_name_key"),
  334. postgresql_ignore_search_path=False,
  335. )
  336. op.create_table(
  337. "price",
  338. sa.Column(
  339. "datetime",
  340. postgresql.TIMESTAMP(timezone=True),
  341. autoincrement=False,
  342. nullable=False,
  343. ),
  344. sa.Column("sensor_id", sa.INTEGER(), autoincrement=False, nullable=False),
  345. sa.Column(
  346. "value",
  347. postgresql.DOUBLE_PRECISION(precision=53),
  348. autoincrement=False,
  349. nullable=False,
  350. ),
  351. sa.Column(
  352. "horizon", postgresql.INTERVAL(), autoincrement=False, nullable=False
  353. ),
  354. sa.Column("data_source_id", sa.INTEGER(), autoincrement=False, nullable=False),
  355. sa.ForeignKeyConstraint(
  356. ["data_source_id"],
  357. ["data_source.id"],
  358. name="price_data_source_data_sources_fkey",
  359. ),
  360. sa.ForeignKeyConstraint(
  361. ["sensor_id"], ["sensor.id"], name="price_sensor_id_sensor_fkey"
  362. ),
  363. sa.PrimaryKeyConstraint(
  364. "datetime", "sensor_id", "horizon", "data_source_id", name="price_pkey"
  365. ),
  366. )
  367. op.create_table(
  368. "asset",
  369. sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
  370. sa.Column(
  371. "asset_type_name",
  372. sa.VARCHAR(length=80),
  373. autoincrement=False,
  374. nullable=False,
  375. ),
  376. sa.Column("name", sa.VARCHAR(length=80), autoincrement=False, nullable=True),
  377. sa.Column(
  378. "display_name", sa.VARCHAR(length=80), autoincrement=False, nullable=True
  379. ),
  380. sa.Column(
  381. "capacity_in_mw",
  382. postgresql.DOUBLE_PRECISION(precision=53),
  383. autoincrement=False,
  384. nullable=False,
  385. ),
  386. sa.Column(
  387. "latitude",
  388. postgresql.DOUBLE_PRECISION(precision=53),
  389. autoincrement=False,
  390. nullable=False,
  391. ),
  392. sa.Column(
  393. "longitude",
  394. postgresql.DOUBLE_PRECISION(precision=53),
  395. autoincrement=False,
  396. nullable=False,
  397. ),
  398. sa.Column("owner_id", sa.INTEGER(), autoincrement=False, nullable=True),
  399. sa.Column(
  400. "min_soc_in_mwh",
  401. postgresql.DOUBLE_PRECISION(precision=53),
  402. autoincrement=False,
  403. nullable=True,
  404. ),
  405. sa.Column(
  406. "max_soc_in_mwh",
  407. postgresql.DOUBLE_PRECISION(precision=53),
  408. autoincrement=False,
  409. nullable=True,
  410. ),
  411. sa.Column(
  412. "soc_in_mwh",
  413. postgresql.DOUBLE_PRECISION(precision=53),
  414. autoincrement=False,
  415. nullable=True,
  416. ),
  417. sa.Column(
  418. "soc_datetime",
  419. postgresql.TIMESTAMP(timezone=True),
  420. autoincrement=False,
  421. nullable=True,
  422. ),
  423. sa.Column("soc_udi_event_id", sa.INTEGER(), autoincrement=False, nullable=True),
  424. sa.Column(
  425. "unit",
  426. sa.VARCHAR(length=80),
  427. server_default=sa.text("''::character varying"),
  428. autoincrement=False,
  429. nullable=False,
  430. ),
  431. sa.Column("market_id", sa.INTEGER(), autoincrement=False, nullable=True),
  432. sa.Column(
  433. "event_resolution",
  434. postgresql.INTERVAL(),
  435. server_default=sa.text("'00:00:00'::interval"),
  436. autoincrement=False,
  437. nullable=False,
  438. ),
  439. sa.Column(
  440. "knowledge_horizon_fnc",
  441. sa.VARCHAR(length=80),
  442. autoincrement=False,
  443. nullable=False,
  444. ),
  445. sa.Column(
  446. "knowledge_horizon_par",
  447. postgresql.JSON(astext_type=sa.Text()),
  448. autoincrement=False,
  449. nullable=False,
  450. ),
  451. sa.Column(
  452. "timezone", sa.VARCHAR(length=80), autoincrement=False, nullable=False
  453. ),
  454. sa.ForeignKeyConstraint(
  455. ["asset_type_name"],
  456. ["asset_type.name"],
  457. name="asset_asset_type_name_asset_type_fkey",
  458. ),
  459. sa.ForeignKeyConstraint(
  460. ["id"], ["sensor.id"], name="asset_id_sensor_fkey", ondelete="CASCADE"
  461. ),
  462. sa.ForeignKeyConstraint(
  463. ["market_id"], ["market.id"], name="asset_market_id_market_fkey"
  464. ),
  465. sa.ForeignKeyConstraint(
  466. ["owner_id"],
  467. ["fm_user.id"],
  468. name="asset_owner_id_bvp_users_fkey",
  469. ondelete="CASCADE",
  470. ),
  471. sa.PrimaryKeyConstraint("id", name="asset_pkey"),
  472. sa.UniqueConstraint("display_name", name="asset_display_name_key"),
  473. sa.UniqueConstraint("name", name="asset_name_key"),
  474. )
  475. with op.batch_alter_table("power", schema=None) as batch_op:
  476. batch_op.create_index("power_sensor_id_idx", ["sensor_id"], unique=False)
  477. batch_op.create_index("power_datetime_idx", ["datetime"], unique=False)
  478. with op.batch_alter_table("asset_type", schema=None) as batch_op:
  479. batch_op.create_index("asset_type_can_shift_idx", ["can_shift"], unique=False)
  480. batch_op.create_index(
  481. "asset_type_can_curtail_idx", ["can_curtail"], unique=False
  482. )
  483. with op.batch_alter_table("weather", schema=None) as batch_op:
  484. batch_op.create_index("weather_sensor_id_idx", ["sensor_id"], unique=False)
  485. batch_op.create_index("weather_datetime_idx", ["datetime"], unique=False)
  486. with op.batch_alter_table("price", schema=None) as batch_op:
  487. batch_op.create_index("price_sensor_id_idx", ["sensor_id"], unique=False)
  488. batch_op.create_index("price_datetime_idx", ["datetime"], unique=False)