-
-
Notifications
You must be signed in to change notification settings - Fork 521
/
test_db_upgrades.py
2496 lines (2295 loc) · 135 KB
/
test_db_upgrades.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import json
import os
import shutil
from contextlib import ExitStack, contextmanager, suppress
from pathlib import Path
from unittest.mock import patch
import pytest
from pysqlcipher3 import dbapi2 as sqlcipher
from rotkehlchen.chain.evm.accounting.structures import TxEventSettings
from rotkehlchen.constants.misc import DEFAULT_SQL_VM_INSTRUCTIONS_CB
from rotkehlchen.data_handler import DataHandler
from rotkehlchen.db.checks import sanity_check_impl
from rotkehlchen.db.constants import HISTORY_MAPPING_KEY_STATE, HISTORY_MAPPING_STATE_CUSTOMIZED
from rotkehlchen.db.dbhandler import DBHandler
from rotkehlchen.db.drivers.gevent import DBConnection, DBConnectionType
from rotkehlchen.db.schema import DB_SCRIPT_CREATE_TABLES
from rotkehlchen.db.settings import ROTKEHLCHEN_DB_VERSION
from rotkehlchen.db.upgrade_manager import (
MIN_SUPPORTED_USER_DB_VERSION,
UPGRADES_LIST,
DBUpgradeProgressHandler,
)
from rotkehlchen.db.upgrades.v37_v38 import DEFAULT_POLYGON_NODES_AT_V38
from rotkehlchen.db.upgrades.v39_v40 import PREFIX
from rotkehlchen.db.utils import table_exists
from rotkehlchen.errors.api import RotkehlchenPermissionError
from rotkehlchen.errors.misc import DBUpgradeError
from rotkehlchen.oracles.structures import CurrentPriceOracle
from rotkehlchen.tests.utils.database import (
_use_prepared_db,
mock_db_schema_sanity_check,
mock_dbhandler_sync_globaldb_assets,
mock_dbhandler_update_owned_assets,
)
from rotkehlchen.types import Location, deserialize_evm_tx_hash
from rotkehlchen.user_messages import MessagesAggregator
from rotkehlchen.utils.hexbytes import HexBytes
from rotkehlchen.utils.misc import ts_now
def make_serialized_event_identifier(location: Location, raw_event_identifier: bytes) -> str:
"""Creates a serialized event identifeir using the logic at the moment of v32_v33 upgrade"""
if location == Location.KRAKEN or raw_event_identifier.startswith(b'rotki_events'):
return raw_event_identifier.decode()
hex_representation = raw_event_identifier.hex()
if hex_representation.startswith('0x') is True:
return hex_representation
return '0x' + hex_representation
def assert_tx_hash_is_bytes(
old: list,
new: list,
tx_hash_index: int,
is_history_event: bool = False,
) -> None:
"""This function does the following:
- Checks that the entries for `tx_hash_index` provided for `old` is string type.
- Checks that the entries for `tx_hash_index` provided for `new` is bytes type.
- Checks that comparing the entries after converting the bytes to its string equivalent yields
the same as its `old` counterpart.
"""
for _old, _new in zip(old, new, strict=True):
assert isinstance(_new[tx_hash_index], bytes)
assert isinstance(_old[tx_hash_index], str)
_old = list(_old)
_new = list(_new)
if is_history_event is True:
_new[tx_hash_index] = make_serialized_event_identifier(
location=Location.deserialize_from_db(_new[4]),
raw_event_identifier=_new[1],
)
else:
_new[tx_hash_index] = deserialize_evm_tx_hash(_new[tx_hash_index]).hex() # pylint: disable=no-member
assert _old == _new
@contextmanager
def target_patch(target_version: int):
"""Patches the upgrades to stop at target_version and also sets
ROTKEHLCHEN_DB_VERSION to the target_version"""
a = patch(
'rotkehlchen.db.upgrade_manager.ROTKEHLCHEN_DB_VERSION',
new=target_version,
)
b = patch(
'rotkehlchen.db.dbhandler.ROTKEHLCHEN_DB_VERSION',
new=target_version,
)
new_upgrades_list = [
upgrade for upgrade in UPGRADES_LIST if upgrade.from_version < target_version
]
c = patch(
'rotkehlchen.db.upgrade_manager.UPGRADES_LIST',
new=new_upgrades_list,
)
with a, b, c:
yield (a, b, c)
def _init_db_with_target_version(
target_version: int,
user_data_dir: Path,
msg_aggregator: MessagesAggregator,
resume_from_backup: bool,
) -> DBHandler:
no_tables_created_after_init = patch(
'rotkehlchen.db.dbhandler.DB_SCRIPT_CREATE_TABLES',
new='',
)
with ExitStack() as stack:
stack.enter_context(target_patch(target_version=target_version))
stack.enter_context(mock_db_schema_sanity_check())
stack.enter_context(no_tables_created_after_init)
if target_version <= 25:
stack.enter_context(mock_dbhandler_update_owned_assets())
stack.enter_context(mock_dbhandler_sync_globaldb_assets())
db = DBHandler(
user_data_dir=user_data_dir,
password='123',
msg_aggregator=msg_aggregator,
initial_settings=None,
sql_vm_instructions_cb=DEFAULT_SQL_VM_INSTRUCTIONS_CB,
resume_from_backup=resume_from_backup,
)
return db
@pytest.mark.parametrize('use_clean_caching_directory', [True])
def test_upgrade_db_26_to_27(user_data_dir): # pylint: disable=unused-argument
"""Test upgrading the DB from version 26 to version 27.
- Recreates balancer events, uniswap events, amm_swaps. Deletes balancer pools
"""
msg_aggregator = MessagesAggregator()
_use_prepared_db(user_data_dir, 'v26_rotkehlchen.db')
db_v26 = _init_db_with_target_version(
target_version=26,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
# Checks before migration
cursor = db_v26.conn.cursor()
assert cursor.execute(
'SELECT COUNT(*) from used_query_ranges WHERE name LIKE "uniswap%";',
).fetchone()[0] == 2
assert cursor.execute(
'SELECT COUNT(*) from used_query_ranges WHERE name LIKE "balancer%";',
).fetchone()[0] == 2
assert cursor.execute('SELECT COUNT(*) from used_query_ranges;').fetchone()[0] == 6
assert cursor.execute('SELECT COUNT(*) from amm_swaps;').fetchone()[0] == 2
assert cursor.execute('SELECT COUNT(*) from balancer_pools;').fetchone()[0] == 1
assert cursor.execute('SELECT COUNT(*) from balancer_events;').fetchone()[0] == 1
db_v26.logout()
# Migrate to v27
db = _init_db_with_target_version(
target_version=27,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
cursor = db.conn.cursor()
assert cursor.execute('SELECT COUNT(*) from used_query_ranges;').fetchone()[0] == 2
assert cursor.execute('SELECT COUNT(*) from amm_swaps;').fetchone()[0] == 0
assert cursor.execute('SELECT COUNT(*) from balancer_events;').fetchone()[0] == 0
# Finally also make sure that we have updated to the target version
with db.conn.read_ctx() as cursor:
assert db.get_setting(cursor, 'version') == 27
@pytest.mark.parametrize('use_clean_caching_directory', [True])
def test_upgrade_db_27_to_28(user_data_dir): # pylint: disable=unused-argument
"""Test upgrading the DB from version 27 to version 28.
- Adds a new column 'version' to the 'yearn_vaults_events' table
- Delete aave events
"""
msg_aggregator = MessagesAggregator()
_use_prepared_db(user_data_dir, 'v27_rotkehlchen.db')
db_v27 = _init_db_with_target_version(
target_version=27,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
cursor = db_v27.conn.cursor()
# Checks before migration
assert cursor.execute('SELECT COUNT(*) FROM aave_events;').fetchone()[0] == 1
assert cursor.execute('SELECT COUNT(*) from yearn_vaults_events;').fetchone()[0] == 1
db_v27.logout()
# Migrate to v28
db = _init_db_with_target_version(
target_version=28,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
cursor = db.conn.cursor()
cursor.execute(
'SELECT COUNT(*) FROM pragma_table_info("yearn_vaults_events") '
'WHERE name="version"',
)
assert cursor.fetchone()[0] == 1
cursor.execute('SELECT count(*) from yearn_vaults_events;')
assert cursor.fetchone()[0] == 1
# Check that the version is correct for the event in db
cursor.execute('SELECT version from yearn_vaults_events;')
assert cursor.fetchone()[0] == 1
# Check that aave_events got deleted
assert cursor.execute('SELECT COUNT(*) FROM aave_events;').fetchone()[0] == 0
# Finally also make sure that we have updated to the target version
with db.conn.read_ctx() as cursor:
assert db.get_setting(cursor, 'version') == 28
@pytest.mark.parametrize('use_clean_caching_directory', [True])
def test_upgrade_db_28_to_29(user_data_dir): # pylint: disable=unused-argument
"""Test upgrading the DB from version 28 to version 29.
- Updates the primary key of blockchain accounts to take into account chain type
"""
msg_aggregator = MessagesAggregator()
_use_prepared_db(user_data_dir, 'v28_rotkehlchen.db')
db_v28 = _init_db_with_target_version(
target_version=28,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
cursor = db_v28.conn.cursor()
expected_accounts = [
('AVAX', '0x84D34f4f83a87596Cd3FB6887cFf8F17Bf5A7B83', ''),
('BTC', '39bAC3Mr6RfT2V3Z8qShD7mA9JT1Phmvap', ''),
('ETH', '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', 'Address with internal txs'),
('ETH', '0xbd96cDCc6Ae1ffB73ace84E16601E1CF909D5749', 'address that sends'),
('ETH', '0x902CAe163C2B222285035aAEB9A25e6BA02Fa27B', 'address that receives'),
]
expected_xpubs = [('xpub68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk', '/m/0/0', 'label1')] # noqa: E501
expected_xpub_mappings = [
('39bAC3Mr6RfT2V3Z8qShD7mA9JT1Phmvap', 'xpub68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk', '/m/0/0', 0, 1), # noqa: E501
]
# Checks before migration
assert cursor.execute('SELECT * FROM blockchain_accounts;').fetchall() == expected_accounts
assert cursor.execute('SELECT * FROM xpubs;').fetchall() == expected_xpubs
assert cursor.execute('SELECT * FROM xpub_mappings;').fetchall() == expected_xpub_mappings
expected_transactions_before = [
('0xbf5a8870576098c23fb2736ad4832db401a04a52000e6064294711acddb1dac5', 12690344, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 0), # noqa: E501
('0x67884a2e791e3d93f5dc1a57019f3ac612693c7fe70fbb74d5e6e61b307cf5d9', 12691582, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 1), # noqa: E501
('0xfb90c1e3f50016d95c8b6d37b7f85aa9a9d4e6f2da0caf9abaa2e25395e53e57', 12719998, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 2), # noqa: E501
('0x49783d098aa710e05d6fc37f5aa28cf4743c0c8cee57a65f2d66fd7a869e06ac', 12773277, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 3), # noqa: E501
('0x5107cd6fa8c62435ab223134a3eecb94c2feea3084962421c73416329bc5ac49', 12817184, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 4), # noqa: E501
('0xe4d7486814e50aa11af7e8034bb1d48fa04cde6051fb557324a5001e36426f31', 12856423, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 5), # noqa: E501
('0x7c293ef356957263f164c16fe38730cd7bd6b1ffef0b2f5cc21851a9b43ca5cf', 12895650, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 6), # noqa: E501
('0xc8141b1260613f1281ea375402be9291f2330b240cf16bcdb0dc1dfe70a02722', 12931687, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 7), # noqa: E501
('0x841e78e62ad89c5de0100d27acce3593b52f0a2880366144ddfffc622e99c559', 12978963, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 8), # noqa: E501
('0x3617e90dae481ebd5f906d2f325940ba334a1a37f966f9945926fb36d518a5a3', 13030277, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 9), # noqa: E501
('0xff76780c18b8e8b1cae08af25ffe571d9862eb3587b0e8a705effadb3c9dfce2', 13047542, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 10), # noqa: E501
('0x8342dbd4b0befe23f5d0da0ef5e5aeb52c323fb939e242de8a8afeaae16e0f1c', 13087523, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 11), # noqa: E501
('0x07c51405f1046d85c6bb534fd8e8b9822935d33f7c96cf840611f719f4de8b52', 13145936, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 12), # noqa: E501
('0xf88c83f6c5725fbeff1969dc1b26c1d29c1435d457d092c14bee349ecc04781a', 12689445, '0x2602669a92fCCF44e5319fF51B0F453aAb9Db021', '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', -1), # noqa: E501
('0x548d45b53482ad22e7d8f0ae81a8b52e99d9582f3dad38bdf1fadc7911c99201', 13086898, '0x2602669a92fCCF44e5319fF51B0F453aAb9Db021', '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', -1), # noqa: E501
('0xa32075632ea2fa55c52dfe6ab361b4b9bf33ece75e78ae5f113aba8a91c20a28', 13145936, '0xbd96cDCc6Ae1ffB73ace84E16601E1CF909D5749', '0x902CAe163C2B222285035aAEB9A25e6BA02Fa27B', 1), # noqa: E501
('0x53d2205f3f4e4d4f083878253c1b6c1cf9476fb70a53f97255425837cf472b9f', 12096043, '0x85b931A32a0725Be14285B66f1a22178c672d69B', '0xbd96cDCc6Ae1ffB73ace84E16601E1CF909D5749', 650479), # noqa: E501
('0x04fb485b37b0a6107613ac6a9df403037ef41c523e562559bbfaa773f23c0ff8', 12096288, '0xbd96cDCc6Ae1ffB73ace84E16601E1CF909D5749', '0x3E66B66Fd1d0b02fDa6C811Da9E0547970DB2f21', 0), # noqa: E501
]
transactions_before_query = cursor.execute('SELECT tx_hash, block_number, from_address, to_address, nonce from ethereum_transactions;') # noqa: E501
transactions_before = [(
'0x' + entry[0].hex(),
entry[1],
entry[2],
entry[3],
entry[4],
) for entry in transactions_before_query]
assert transactions_before == expected_transactions_before
db_v28.logout()
# Migrate to v29
db = _init_db_with_target_version(
target_version=29,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
cursor = db.conn.cursor()
# check same data is there
assert cursor.execute('SELECT * FROM blockchain_accounts;').fetchall() == expected_accounts
assert cursor.execute('SELECT * FROM xpubs;').fetchall() == expected_xpubs
assert cursor.execute(
'SELECT address, xpub, derivation_path, account_index, derived_index FROM xpub_mappings;',
).fetchall() == expected_xpub_mappings
# Check transactions are migrated and internal ones removed
expected_transactions_after = [
('0xbf5a8870576098c23fb2736ad4832db401a04a52000e6064294711acddb1dac5', 12690344, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 0), # noqa: E501
('0x67884a2e791e3d93f5dc1a57019f3ac612693c7fe70fbb74d5e6e61b307cf5d9', 12691582, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 1), # noqa: E501
('0xfb90c1e3f50016d95c8b6d37b7f85aa9a9d4e6f2da0caf9abaa2e25395e53e57', 12719998, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 2), # noqa: E501
('0x49783d098aa710e05d6fc37f5aa28cf4743c0c8cee57a65f2d66fd7a869e06ac', 12773277, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 3), # noqa: E501
('0x5107cd6fa8c62435ab223134a3eecb94c2feea3084962421c73416329bc5ac49', 12817184, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 4), # noqa: E501
('0xe4d7486814e50aa11af7e8034bb1d48fa04cde6051fb557324a5001e36426f31', 12856423, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 5), # noqa: E501
('0x7c293ef356957263f164c16fe38730cd7bd6b1ffef0b2f5cc21851a9b43ca5cf', 12895650, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 6), # noqa: E501
('0xc8141b1260613f1281ea375402be9291f2330b240cf16bcdb0dc1dfe70a02722', 12931687, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 7), # noqa: E501
('0x841e78e62ad89c5de0100d27acce3593b52f0a2880366144ddfffc622e99c559', 12978963, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 8), # noqa: E501
('0x3617e90dae481ebd5f906d2f325940ba334a1a37f966f9945926fb36d518a5a3', 13030277, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 9), # noqa: E501
('0xff76780c18b8e8b1cae08af25ffe571d9862eb3587b0e8a705effadb3c9dfce2', 13047542, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 10), # noqa: E501
('0x8342dbd4b0befe23f5d0da0ef5e5aeb52c323fb939e242de8a8afeaae16e0f1c', 13087523, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 11), # noqa: E501
('0x07c51405f1046d85c6bb534fd8e8b9822935d33f7c96cf840611f719f4de8b52', 13145936, '0xb99Db59b12d43465848B11478AccBe491F4c6A4E', '0x9B9647431632AF44be02ddd22477Ed94d14AacAa', 12), # noqa: E501
('0xa32075632ea2fa55c52dfe6ab361b4b9bf33ece75e78ae5f113aba8a91c20a28', 13145936, '0xbd96cDCc6Ae1ffB73ace84E16601E1CF909D5749', '0x902CAe163C2B222285035aAEB9A25e6BA02Fa27B', 1), # noqa: E501
('0x53d2205f3f4e4d4f083878253c1b6c1cf9476fb70a53f97255425837cf472b9f', 12096043, '0x85b931A32a0725Be14285B66f1a22178c672d69B', '0xbd96cDCc6Ae1ffB73ace84E16601E1CF909D5749', 650479), # noqa: E501
('0x04fb485b37b0a6107613ac6a9df403037ef41c523e562559bbfaa773f23c0ff8', 12096288, '0xbd96cDCc6Ae1ffB73ace84E16601E1CF909D5749', '0x3E66B66Fd1d0b02fDa6C811Da9E0547970DB2f21', 0), # noqa: E501
]
transactions_after_query = cursor.execute('SELECT tx_hash, block_number, from_address, to_address, nonce from ethereum_transactions;') # noqa: E501
transactions_after = [(
'0x' + entry[0].hex(),
entry[1],
entry[2],
entry[3],
entry[4],
) for entry in transactions_after_query]
assert transactions_after == expected_transactions_after
# check that uniswap_events table was renamed
assert table_exists(cursor, 'uniswap_events') is False
assert table_exists(cursor, 'amm_events') is True
# Finally also make sure that we have updated to the target version
with db.conn.read_ctx() as cursor:
assert db.get_setting(cursor, 'version') == 29
@pytest.mark.parametrize('use_clean_caching_directory', [True])
def test_upgrade_db_29_to_30(user_data_dir): # pylint: disable=unused-argument
"""Test upgrading the DB from version 29 to version 30.
- Updates the primary key of blockchain accounts to take into account chain type
"""
msg_aggregator = MessagesAggregator()
_use_prepared_db(user_data_dir, 'v29_rotkehlchen.db')
db = _init_db_with_target_version(
target_version=30,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
# Finally also make sure that we have updated to the target version
with db.conn.read_ctx() as cursor:
assert db.get_setting(cursor, 'version') == 30
cursor = db.conn.cursor()
# Check that existing balances are not considered as liabilities after migration
cursor.execute('SELECT category FROM manually_tracked_balances;')
assert cursor.fetchone() == ('A',)
@pytest.mark.parametrize('use_clean_caching_directory', [True])
def test_upgrade_db_30_to_31(user_data_dir): # pylint: disable=unused-argument
"""Test upgrading the DB from version 30 to version 31.
Also checks that this code upgrade works even if the DB is affected by
https://github.com/rotki/rotki/issues/3744 and does not have a version
setting set. Checks that the version is detected as at least v30 by missing
the eth2_validators table.
- Upgrades the ETH2 tables
- Deletes ignored ethereum transactions ids
- Deletes kraken trades and used query ranges
"""
msg_aggregator = MessagesAggregator()
# Check we have data in the eth2 tables before the DB upgrade
_use_prepared_db(user_data_dir, 'v30_rotkehlchen.db')
db_v30 = _init_db_with_target_version(
target_version=30,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
cursor = db_v30.conn.cursor()
result = cursor.execute('SELECT COUNT(*) FROM eth2_deposits;')
assert result.fetchone()[0] == 1
result = cursor.execute('SELECT COUNT(*) FROM eth2_daily_staking_details;')
assert result.fetchone()[0] == 356
result = cursor.execute('SELECT * FROM ignored_actions;')
assert result.fetchall() == [('C', '0x1'), ('C', '0x2'), ('A', '0x3'), ('B', '0x4')]
result = cursor.execute('SELECT COUNT(*) FROM trades;')
assert result.fetchone()[0] == 2
result = cursor.execute('SELECT COUNT(*) FROM asset_movements;')
assert result.fetchone()[0] == 1
result = cursor.execute('SELECT * FROM used_query_ranges;')
assert result.fetchall() == [
('ethtxs_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 0, 1637575118),
('eth2_deposits_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 1602667372, 1637575118),
('kraken_trades_kraken1', 0, 1634850532),
('kraken_asset_movements_kraken1', 0, 1634850532),
]
db_v30.logout()
_use_prepared_db(user_data_dir, 'v30_rotkehlchen.db')
db = _init_db_with_target_version(
target_version=31,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
cursor = db.conn.cursor()
# Finally also make sure that we have updated to the target version
assert db.get_setting(cursor, 'version') == 31
cursor = db.conn.cursor()
# Check that the new table is created
assert table_exists(cursor, 'eth2_validators') is True
result = cursor.execute('SELECT COUNT(*) FROM eth2_deposits;')
assert result.fetchone()[0] == 0
result = cursor.execute('SELECT COUNT(*) FROM eth2_daily_staking_details;')
assert result.fetchone()[0] == 0
result = cursor.execute('SELECT * FROM ignored_actions;')
assert result.fetchall() == [('A', '0x3'), ('B', '0x4')]
result = cursor.execute('SELECT COUNT(*) FROM trades;')
assert result.fetchone()[0] == 0
result = cursor.execute('SELECT COUNT(*) FROM asset_movements;')
assert result.fetchone()[0] == 1
result = cursor.execute('SELECT * FROM used_query_ranges;')
assert result.fetchall() == [
('ethtxs_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 0, 1637575118),
('eth2_deposits_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 1602667372, 1637575118),
('kraken_asset_movements_kraken1', 0, 1634850532),
]
@pytest.mark.parametrize('use_clean_caching_directory', [True])
def test_upgrade_db_31_to_32(user_data_dir): # pylint: disable=unused-argument
"""Test upgrading the DB from version 31 to version 32.
- Check that subtype is correctly updated
- Check that gitcoin data is properly delete
- Check that trades with fee missing, sets fee_currency to NULL and vice versa
"""
msg_aggregator = MessagesAggregator()
_use_prepared_db(user_data_dir, 'v31_rotkehlchen.db')
db_v31 = _init_db_with_target_version(
target_version=31,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
cursor = db_v31.conn.cursor()
result = cursor.execute('SELECT rowid from history_events')
old_ids = {row[0] for row in result}
assert len(old_ids) == 19
cursor.execute(
'SELECT subtype from history_events',
)
subtypes = [row[0] for row in cursor]
assert set(subtypes) == {
'staking deposit asset',
'staking receive asset',
None,
'fee',
'staking remove asset',
}
# check used query ranges
result = cursor.execute('SELECT * from used_query_ranges').fetchall()
assert result == [
('ethtxs_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 0, 1637575118),
('eth2_deposits_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 1602667372, 1637575118),
('kraken_asset_movements_kraken1', 0, 1634850532),
('gitcoingrants_0x4362BBa5a26b07db048Bc2603f843E21Ac22D75E', 1, 2),
]
# Check gitcoin ledger actions are there
result = cursor.execute('SELECT * from ledger_actions').fetchall()
assert result == [
(1, 1, 'A', 'A', '1', 'ETH', None, None, None, None),
(2, 2, 'A', '^', '1', 'ETH', None, None, None, None),
(3, 3, 'A', '^', '1', 'ETH', None, None, None, None),
]
result = cursor.execute('SELECT * from ledger_actions_gitcoin_data').fetchall()
assert result == [(2, '0x1', 1, 1, 'A'), (3, '0x2', 1, 1, 'B')]
# Check that the other gitcoin tables exist at this point
for name in ('gitcoin_tx_type', 'ledger_actions_gitcoin_data', 'gitcoin_grant_metadata'):
assert table_exists(cursor, name)
manual_balance_before = cursor.execute(
'SELECT asset, label, amount, location, category FROM '
'manually_tracked_balances;',
).fetchall()
# check that the trades with invalid fee/fee_currency are present at this point
trades_before = cursor.execute('SELECT * FROM trades WHERE id != ? AND id != ?', ('foo1', 'foo2')).fetchall() # noqa: E501
assert trades_before == [
('1111111', 1595640208, 'external', 'ETH', 'USD', 'buy', '1.5541', '22.1', '3.4', 'USD', None, None), # noqa: E501
('1111112', 1595640208, 'external', 'ETH', 'USD', 'buy', '1.5541', '22.1', '3.4', None, None, None), # noqa: E501
('1111113', 1595640208, 'external', 'ETH', 'USD', 'buy', '1.5541', '22.1', None, 'USD', None, None), # noqa: E501
]
# Check that there are invalid pairs of (event_identifier, sequence_index)
base_entries_query = 'SELECT * from history_events WHERE event_identifier="KRAKEN-REMOTE-ID3"'
result = cursor.execute(base_entries_query).fetchall()
assert len(result) == 5
assert len([row[2] for row in result]) == 5
assert len({row[2] for row in result}) == 4
assert len([True for event in result if event[-1] is not None]) == 2
base_entries_query = 'SELECT * from history_events WHERE event_identifier="KRAKEN-REMOTE-ID4"'
result = cursor.execute(base_entries_query).fetchall()
assert len(result) == 5
assert len([row[2] for row in result]) == 5
assert len({row[2] for row in result}) == 3
# check that user_credential_mappings with setting_name=PAIRS are present
selected_binance_markets_before = cursor.execute('SELECT * from user_credentials_mappings WHERE setting_name="PAIRS"').fetchall() # noqa: E501
assert selected_binance_markets_before == [
('binance', 'E', 'PAIRS', 'pro'),
('binanceus', 'S', 'PAIRS', 'abc'),
]
tag_mappings_before = cursor.execute('SELECT object_reference, tag_name FROM tag_mappings').fetchall() # noqa: E501
assert tag_mappings_before == [
('LABEL1', 'TAG1'),
('LABEL2', 'TAG2'),
]
# Check that we have old staking events
expected_timestamp = 16099501664486
cursor.execute('SELECT COUNT(*) FROM history_events WHERE subtype="staking remove asset" AND type="unstaking"') # noqa: E501
assert cursor.fetchone() == (1,)
cursor.execute('SELECT COUNT(*), timestamp FROM history_events WHERE subtype="staking receive asset" AND type="unstaking"') # noqa: E501
assert cursor.fetchone() == (1, expected_timestamp)
db_v31.logout()
# Execute upgrade
db = _init_db_with_target_version(
target_version=32,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
cursor = db.conn.cursor()
cursor.execute('SELECT subtype FROM history_events')
subtypes = {row[0] for row in cursor}
assert subtypes == {'deposit asset', 'receive wrapped', 'reward', 'fee', None, 'remove asset'}
result = cursor.execute('SELECT identifier FROM history_events ORDER BY identifier')
assert [x[0] for x in result] == list(range(1, 20)), 'identifier column should be added'
# check used query range got delete and rest are intact
result = cursor.execute('SELECT * from used_query_ranges').fetchall()
assert result == [
('ethtxs_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 0, 1637575118),
('eth2_deposits_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 1602667372, 1637575118),
('kraken_asset_movements_kraken1', 0, 1634850532),
]
# Check that the non-gitcoin ledger action is still there
result = cursor.execute('SELECT * from ledger_actions').fetchall()
assert result == [(1, 1, 'A', 'A', '1', 'ETH', None, None, None, None)]
# Check that all gitcoin tables are deleted
for name in ('gitcoin_tx_type', 'ledger_actions_gitcoin_data', 'gitcoin_grant_metadata'):
assert table_exists(cursor, name) is False
manual_balance_after = cursor.execute(
'SELECT asset, label, amount, location, category FROM '
'manually_tracked_balances;',
).fetchall()
manual_balance_expected = [
('1CR', 'LABEL1', '34.5', 'A', 'B'),
('2GIVE', 'LABEL2', '0.3', 'B', 'B'),
('1CR', 'LABEL3', '3', 'A', 'A'),
]
assert manual_balance_expected == manual_balance_before == manual_balance_after
manual_balance_ids = cursor.execute('SELECT id FROM manually_tracked_balances;').fetchall()
assert [x[0] for x in manual_balance_ids] == [1, 2, 3]
# Check that trades with fee missing sets fee_currency to NULL and vice versa
trades_expected = cursor.execute('SELECT * FROM trades WHERE id != ? AND id != ?', ('foo1', 'foo2')).fetchall() # noqa: E501
assert trades_expected == [
('1111111', 1595640208, 'external', 'ETH', 'USD', 'buy', '1.5541', '22.1', '3.4', 'USD', None, None), # noqa: E501
('1111112', 1595640208, 'external', 'ETH', 'USD', 'buy', '1.5541', '22.1', None, None, None, None), # noqa: E501
('1111113', 1595640208, 'external', 'ETH', 'USD', 'buy', '1.5541', '22.1', None, None, None, None), # noqa: E501
]
# Check that sequence indeces are unique for the same event identifier
base_entries_query = 'SELECT * from history_events WHERE event_identifier="KRAKEN-REMOTE-ID3"'
result = cursor.execute(base_entries_query).fetchall()
assert len(result) == 5
assert len([row[2] for row in result]) == 5
assert len({row[2] for row in result}) == 5
base_entries_query = 'SELECT * from history_events WHERE event_identifier="KRAKEN-REMOTE-ID4"'
result = cursor.execute(base_entries_query).fetchall()
assert len(result) == 5
assert len([row[2] for row in result]) == 5
assert len({row[2] for row in result}) == 5
ens_names_test_data = ('0xASDF123', 'TEST_ENS_NAME', 1)
cursor.execute('INSERT INTO ens_mappings(address, ens_name, last_update) VALUES(?, ?, ?)', ens_names_test_data) # noqa: E501
data_in_db = cursor.execute('SELECT address, ens_name, last_update FROM ens_mappings').fetchone() # noqa: E501
assert data_in_db == ens_names_test_data
# Check that selected binance markets settings_name changed to the updated one.
selected_binance_markets_after = cursor.execute('SELECT * from user_credentials_mappings WHERE setting_name="binance_selected_trade_pairs"').fetchall() # noqa: E501
assert selected_binance_markets_after == [
('binance', 'E', 'binance_selected_trade_pairs', 'pro'),
('binanceus', 'S', 'binance_selected_trade_pairs', 'abc'),
]
tag_mappings_after = cursor.execute('SELECT object_reference, tag_name FROM tag_mappings').fetchall() # noqa: E501
assert tag_mappings_after == [
('1', 'TAG1'),
('2', 'TAG2'),
]
# Check that staking events have been updated
cursor.execute('SELECT COUNT(*) FROM history_events WHERE subtype="staking remove asset" AND type="unstaking"') # noqa: E501
assert cursor.fetchone() == (0,)
cursor.execute('SELECT COUNT(*) FROM history_events WHERE subtype="staking receive asset" AND type="unstaking"') # noqa: E501
assert cursor.fetchone() == (0,)
cursor.execute('SELECT COUNT(*), timestamp FROM history_events WHERE subtype="remove asset" AND type="staking"') # noqa: E501
assert cursor.fetchone() == (1, expected_timestamp // 10)
cursor.execute('SELECT COUNT(*), timestamp FROM history_events WHERE subtype="remove asset" AND type="staking"') # noqa: E501
assert cursor.fetchone() == (1, expected_timestamp // 10)
@pytest.mark.parametrize('use_clean_caching_directory', [True])
def test_upgrade_db_32_to_33(user_data_dir): # pylint: disable=unused-argument
"""Test upgrading the DB from version 32 to version 33.
"""
msg_aggregator = MessagesAggregator()
_use_prepared_db(user_data_dir, 'v32_rotkehlchen.db')
db_v32 = _init_db_with_target_version(
target_version=32,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
cursor = db_v32.conn.cursor()
# check that you cannot add blockchain column in xpub_mappings
with pytest.raises(sqlcipher.OperationalError) as exc_info: # pylint: disable=no-member
cursor.execute(
'INSERT INTO xpub_mappings(address, xpub, derivation_path, account_index, derived_index, blockchain) ' # noqa: E501
'VALUES ("1234", "abcd", "d", 3, 6, "BCH");',
)
assert 'cannot INSERT into generated column "blockchain"' in str(exc_info)
xpub_mapping_data = (
'1LZypJUwJJRdfdndwvDmtAjrVYaHko136r',
'xpub68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk',
'm',
0,
0,
'BTC',
)
old_xpub_mappings = cursor.execute('SELECT * FROM xpub_mappings').fetchall()
assert len(old_xpub_mappings) == 2
assert old_xpub_mappings[0] == xpub_mapping_data
# test that previous xpubs as what are expected
old_xpubs = cursor.execute('SELECT * FROM xpubs').fetchall()
assert len(old_xpubs) == 2
blockchain_account_label_initial = cursor.execute('SELECT * FROM blockchain_accounts WHERE account="0x45E6CA515E840A4e9E02A3062F99216951825eB2"').fetchone()[2] # noqa: E501
assert blockchain_account_label_initial == ''
# get tables with tx_hash as string.
old_aave_events = cursor.execute('SELECT * FROM aave_events').fetchall()
assert len(old_aave_events) == 4
old_adex_events = cursor.execute('SELECT * FROM adex_events').fetchall()
assert len(old_adex_events) == 2
old_balancer_events = cursor.execute('SELECT * FROM balancer_events').fetchall()
assert len(old_balancer_events) == 2
old_yearn_vaults_events = cursor.execute('SELECT * FROM yearn_vaults_events').fetchall()
assert len(old_yearn_vaults_events) == 2
old_amm_events = cursor.execute('SELECT * FROM amm_events').fetchall()
assert len(old_amm_events) == 2
old_amm_swaps = cursor.execute('SELECT * FROM amm_swaps').fetchall()
assert len(old_amm_swaps) == 2
old_combined_trades_views = cursor.execute('SELECT * FROM combined_trades_view;').fetchall()
assert len(old_combined_trades_views) == 7
# get history events
old_history_events = cursor.execute('SELECT * FROM history_events').fetchall()
assert len(old_history_events) == 5
db_v32.logout()
# Execute upgrade
db = _init_db_with_target_version(
target_version=33,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
cursor = db.conn.cursor()
# check that xpubs mappings were not altered.
new_xpub_mappings = cursor.execute('SELECT * FROM xpub_mappings').fetchall()
assert new_xpub_mappings == old_xpub_mappings
# check that you can now add blockchain column in xpub_mappings
address = '1MKSdDCtBSXiE49vik8xUG2pTgTGGh5pqe'
cursor.execute(
'INSERT INTO xpub_mappings(address, xpub, derivation_path, account_index, derived_index, blockchain) ' # noqa: E501
'VALUES (?, ?, ?, ?, ?, ?);',
(address, xpub_mapping_data[1], 'm', 0, 1, 'BTC'),
)
all_xpubs_mappings = cursor.execute('SELECT * FROM xpub_mappings').fetchall()
assert len(all_xpubs_mappings) == 3
# check that previous xpubs blockchain columns are set to BTC
new_xpubs = cursor.execute('SELECT * FROM xpubs').fetchall()
assert len(new_xpubs) == len(old_xpubs)
for xpub in new_xpubs:
assert xpub[3] == 'BTC'
blockchain_account_label_upgraded = cursor.execute('SELECT * FROM blockchain_accounts WHERE account="0x45E6CA515E840A4e9E02A3062F99216951825eB2"').fetchone()[2] # noqa: E501
assert blockchain_account_label_upgraded is None
# check that forcing bytes for tx hashes did not break anything.
new_aave_events = cursor.execute('SELECT * FROM aave_events').fetchall()
assert len(new_aave_events) == 4
new_adex_events = cursor.execute('SELECT * FROM adex_events').fetchall()
assert len(new_adex_events) == 2
new_balancer_events = cursor.execute('SELECT * FROM balancer_events').fetchall()
assert len(new_balancer_events) == 2
new_yearn_vaults_events = cursor.execute('SELECT * FROM yearn_vaults_events').fetchall()
assert len(new_yearn_vaults_events) == 2
new_amm_events = cursor.execute('SELECT * FROM amm_events').fetchall()
assert len(new_amm_events) == 2
new_amm_swaps = cursor.execute('SELECT * FROM amm_swaps').fetchall()
assert len(new_amm_swaps) == 2
new_combined_trades_views = cursor.execute('SELECT * FROM combined_trades_view;').fetchall()
assert len(new_combined_trades_views) == 7
new_history_events = cursor.execute('SELECT * FROM history_events').fetchall()
assert len(new_history_events) == 5
assert_tx_hash_is_bytes(old=old_aave_events, new=new_aave_events, tx_hash_index=4)
assert_tx_hash_is_bytes(old=old_adex_events, new=new_adex_events, tx_hash_index=0)
assert_tx_hash_is_bytes(old=old_balancer_events, new=new_balancer_events, tx_hash_index=0)
assert_tx_hash_is_bytes(old=old_yearn_vaults_events, new=new_yearn_vaults_events, tx_hash_index=12) # noqa: E501
assert_tx_hash_is_bytes(old=old_amm_events, new=new_amm_events, tx_hash_index=0)
assert_tx_hash_is_bytes(old=old_amm_swaps, new=new_amm_swaps, tx_hash_index=0)
# not all combined_trades_views have tx hash.
assert_tx_hash_is_bytes(old=old_combined_trades_views[:1], new=new_combined_trades_views[:1], tx_hash_index=10) # noqa: E501
assert_tx_hash_is_bytes(old=old_history_events, new=new_history_events, tx_hash_index=1, is_history_event=True) # noqa: E501
@pytest.mark.parametrize('use_clean_caching_directory', [True])
def test_upgrade_db_33_to_34(user_data_dir): # pylint: disable=unused-argument
"""Test upgrading the DB from version 33 to version 34.
- Change the combined_trades_view so a valid string is returned in the link field instead
of a blob.
"""
msg_aggregator = MessagesAggregator()
_use_prepared_db(user_data_dir, 'v33_rotkehlchen.db')
db_v33 = _init_db_with_target_version(
target_version=33,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
with db_v33.conn.read_ctx() as cursor:
cursor.execute('SELECT * FROM combined_trades_view ORDER BY time ASC')
result = cursor.fetchall()
assert isinstance(result[-1][10], bytes)
assert HexBytes(result[-1][10]).hex() == '0xb1fcf4aef6af87a061ca03e92c4eb8039efe600d501ba288a8bae90f78c91db5' # noqa: E501
# Execute upgrade
db = _init_db_with_target_version(
target_version=34,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
with db.conn.read_ctx() as cursor:
cursor.execute('SELECT * FROM combined_trades_view ORDER BY time ASC')
result = cursor.fetchall()
assert isinstance(result[-1][10], str)
assert result[-1][10] == '0xb1fcf4aef6af87a061ca03e92c4eb8039efe600d501ba288a8bae90f78c91db5' # noqa: E501
@pytest.mark.parametrize('use_clean_caching_directory', [True])
def test_upgrade_db_34_to_35(user_data_dir): # pylint: disable=unused-argument
"""Test upgrading the DB from version 34 to version 35.
- Check that expected information for the changes in timestamps exists and is correct
"""
msg_aggregator = MessagesAggregator()
_use_prepared_db(user_data_dir, 'v34_rotkehlchen.db')
# Make sure that assets from the globaldb at version 3 are not copied in the test database
with patch('rotkehlchen.db.dbhandler.DBHandler.sync_globaldb_assets', return_value=lambda *args: None): # noqa: E501
db_v34 = _init_db_with_target_version(
target_version=34,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
upgraded_tables = (
'timed_balances',
'timed_location_data',
'trades',
'asset_movements',
)
expected_timestamps = (
[(1658564495,)],
[(1637574520,)],
[(1595640208,), (1595640208,), (1595640208,)],
[(1,)],
)
expected_old_ignored_assets_ids = [
('_ceth_0x4Fabb145d64652a948d72533023f6E7A623C7C53',),
('_ceth_0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48',),
('_ceth_0xB8c77482e45F1F44dE1745F52C74426C631bDD52',),
('_ceth_0xdAC17F958D2ee523a2206206994597C13D831ec7',),
]
with db_v34.conn.read_ctx() as cursor:
for table_name, expected_result in zip(upgraded_tables, expected_timestamps, strict=True):
cursor.execute(f'SELECT time from {table_name}')
assert cursor.fetchall() == expected_result
used_ranges = cursor.execute('SELECT * from used_query_ranges').fetchall()
assert used_ranges == [
('ethtxs_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 0, 1637575118),
('eth2_deposits_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 1602667372, 1637575118),
('kraken_asset_movements_kraken1', 0, 1634850532),
('uniswap_trades_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 0, 1637575118),
('sushiswap_trades_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 0, 1637575118),
('balancer_trades_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 0, 1637575118)]
# check that amm_swaps and combined trades view exist
assert table_exists(cursor, 'amm_swaps') is True
assert cursor.execute(
'SELECT COUNT(*) FROM sqlite_master WHERE type="view" AND name=?', ('combined_trades_view',), # noqa: E501
).fetchone()[0] == 1
with pytest.raises(sqlcipher.OperationalError) as exc_info: # pylint: disable=no-member
cursor.execute('SELECT blockchain FROM web3_nodes;')
assert "no such column: blockchain'" in str(exc_info)
# check that ignored assets are present in the previous format.
old_ignored_assets_ids = cursor.execute('SELECT value FROM multisettings WHERE name="ignored_asset";').fetchall() # noqa: E501
assert old_ignored_assets_ids == expected_old_ignored_assets_ids
cursor.execute('SELECT tokens_list from ethereum_accounts_details WHERE account="0x45E6CA515E840A4e9E02A3062F99216951825eB2"') # noqa: E501
tokens = json.loads(cursor.fetchone()[0])
assert tokens['tokens'] == ['_ceth_0x0bc529c00C6401aEF6D220BE8C6Ea1667F6Ad93e']
# check that history events contain a transaction with an old style token identifier
old_tx_assets_ids = cursor.execute('SELECT DISTINCT asset FROM history_events;').fetchall()
asset_missing_from_assets_table = '_ceth_0xb3d608c31ACa7a1c7D6DAcec5978E5493181b67A'
assert old_tx_assets_ids == [
('ETH',),
('AVAX',),
('BTC',),
('_ceth_0x429881672B9AE42b8EbA0E26cD9C73711b891Ca5',),
(asset_missing_from_assets_table,),
]
# check that _ceth_0xb3d608c31ACa7a1c7D6DAcec5978E5493181b67A is not in assets table
# essentially reproduce: https://github.com/rotki/rotki/issues/5052
asset_in_table = cursor.execute(
'SELECT COUNT(*) FROM assets WHERE identifier=?', (asset_missing_from_assets_table,),
).fetchone()[0]
assert asset_in_table == 0, 'asset should be in history event but not in assets table'
# Check that oracles exist in the test db
oracles_before_upgrade = cursor.execute(
'SELECT value FROM settings WHERE name="current_price_oracles"',
).fetchone()[0]
assert oracles_before_upgrade == '["cryptocompare", "coingecko", "uniswapv2", "uniswapv3", "saddle"]' # noqa: E501
oracles_after_upgrade = cursor.execute(
'SELECT value FROM settings WHERE name="historical_price_oracles"',
).fetchone()
assert oracles_after_upgrade is None
# check that asset movement exist with previous format
result = [(
'_ceth_0x0bc529c00C6401aEF6D220BE8C6Ea1667F6Ad93e',
'_ceth_0x6810e776880C02933D47DB1b9fc05908e5386b96',
)]
assert cursor.execute('SELECT asset, fee_asset from asset_movements').fetchall() == result
# test that there is 3 evm tx mappings before the upgrade
expected_evm_tx_mappings = [
(b'\nP\xd7\x05}\xaf>y|\x03\x83\x89V\xd9\x90\xb4#\x8e\xfc\x02\xc1\x96STD\xe0\xccP6\x08\x1fF', 'ETH', 'decoded'), # noqa: E501
(b'\x0b\xbd\xa0\x0fMA\xe9\xcbs\x0e\x8cT*\x04B\xcb\x08R\x84\x16\xee;\xd5,\x1c\xca\xf7\xadd\x94\x03n', 'ETH', 'decoded'), # noqa: E501
(b"\x0c\x04\x82\x92Z\xf0\x97\xedM\x85\xec\x06\x8f\xed\xc3\xdaMev<\xc82WO'6\x92\xc5\xe88wV", 'ETH', 'customized'), # noqa: E501
]
assert cursor.execute('SELECT * from evm_tx_mappings').fetchall() == expected_evm_tx_mappings # noqa: E501
cursor.execute('SELECT COUNT(*) from assets WHERE identifier=?', ('BIFI',))
assert cursor.fetchone() == (1,)
# check that assets are updated correctly in the user db
cursor.execute(
'SELECT COUNT(*) from manually_tracked_balances WHERE asset=?',
('BIFI',),
)
assert cursor.fetchone() == (1,)
xpub1 = 'xpub68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk' # noqa: E501
xpub2 = 'zpub6quTRdxqWmerHdiWVKZdLMp9FY641F1F171gfT2RS4D1FyHnutwFSMiab58Nbsdu4fXBaFwpy5xyGnKZ8d6xn2j4r4yNmQ3Yp3yDDxQUo3q' # noqa: E501
def try_insert_mapping(cur):
# try to insert a new entry with values (except blockchain) duplicating another entry
cur.execute(
'INSERT INTO xpub_mappings VALUES (?, ?, ?, ?, ?, ?)',
(
'1LZypJUwJJRdfdndwvDmtAjrVYaHko136r',
xpub1,
'm', 0, 0, 'BCH',
),
)
# it should fail before the upgrade
with db_v34.conn.write_ctx() as write_cursor, pytest.raises(sqlcipher.IntegrityError): # pylint: disable=no-member
try_insert_mapping(write_cursor)
# Migrate the database
db_v35 = _init_db_with_target_version(
target_version=35,
user_data_dir=user_data_dir,
msg_aggregator=msg_aggregator,
resume_from_backup=False,
)
# it should not fail after upgrade since we added `blockchain` to primary key
with db_v35.conn.write_ctx() as write_cursor:
try_insert_mapping(write_cursor)
expected_xpubs_mappings = [
('1LZypJUwJJRdfdndwvDmtAjrVYaHko136r', xpub1, 'm', 0, 0, 'BTC'),
('bc1qc3qcxs025ka9l6qn0q5cyvmnpwrqw2z49qwrx5', xpub2, 'm/0', 0, 0, 'BTC'),
('1LZypJUwJJRdfdndwvDmtAjrVYaHko136r', xpub1, 'm', 0, 0, 'BCH'),
]
expected_new_ignored_assets_ids = [
('eip155:1/erc20:0x4Fabb145d64652a948d72533023f6E7A623C7C53',),
('eip155:1/erc20:0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48',),
('eip155:1/erc20:0xB8c77482e45F1F44dE1745F52C74426C631bDD52',),
('eip155:1/erc20:0xdAC17F958D2ee523a2206206994597C13D831ec7',),
]
with db_v35.conn.read_ctx() as cursor:
for table_name, expected_result in zip(upgraded_tables, expected_timestamps, strict=True):
cursor.execute(f'SELECT timestamp from {table_name}')
assert cursor.fetchall() == expected_result
cursor.execute('SELECT blockchain from web3_nodes LIMIT 1')
assert cursor.fetchall() == [('ETH',)]
# Check that data is correct
xpub_mappings_in_db = cursor.execute('SELECT * FROM xpub_mappings').fetchall()
assert xpub_mappings_in_db == expected_xpubs_mappings
# amm swap ranges should be cleared
used_ranges = cursor.execute('SELECT * from used_query_ranges').fetchall()
assert used_ranges == [
('ethtxs_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 0, 1637575118),
('eth2_deposits_0x45E6CA515E840A4e9E02A3062F99216951825eB2', 1602667372, 1637575118),
('kraken_asset_movements_kraken1', 0, 1634850532),
]
# check that amm_swaps and combined trades view are deleted
assert table_exists(cursor, 'amm_swaps') is False
assert cursor.execute(
'SELECT COUNT(*) FROM sqlite_master WHERE type="view" AND name=?', ('combined_trades_view',), # noqa: E501
).fetchone()[0] == 0
cursor.execute(
'SELECT COUNT(*) from assets WHERE identifier=?',
('eip155:56/erc20:0xCa3F508B8e4Dd382eE878A314789373D80A5190A',),
)
assert cursor.fetchone() == (1,)
cursor.execute('SELECT COUNT(*) from assets WHERE identifier=?', ('BIFI',))
assert cursor.fetchone() == (0,)
cursor.execute(
'SELECT COUNT(*) from manually_tracked_balances WHERE asset=?',
('eip155:56/erc20:0xCa3F508B8e4Dd382eE878A314789373D80A5190A',),
)
assert cursor.fetchone() == (1,)
# check that ignored assets are now in the current CAIP format.
new_ignored_assets_ids = cursor.execute('SELECT value FROM multisettings WHERE name="ignored_asset";').fetchall() # noqa: E501
for new_ignored_assets_id in expected_new_ignored_assets_ids:
assert new_ignored_assets_id in new_ignored_assets_ids
fixed_asset_id = 'eip155:1/erc20:0xb3d608c31ACa7a1c7D6DAcec5978E5493181b67A'
# check that history events contain a transaction with new style token identifier
new_tx_assets_ids = cursor.execute('SELECT DISTINCT asset FROM history_events;').fetchall()
assert new_tx_assets_ids == [
('ETH',),
('AVAX',),
('BTC',),
('eip155:1/erc20:0x429881672B9AE42b8EbA0E26cD9C73711b891Ca5',),
(fixed_asset_id,),
]
asset_in_table = cursor.execute(
'SELECT COUNT(*) FROM assets WHERE identifier=?', (fixed_asset_id,),
).fetchone()[0]
assert asset_in_table == 1, 'asset should now be in both history events and asset table'
# Check that token_list for accounts has been correctly upgraded
cursor.execute('SELECT value from accounts_details WHERE account="0x45E6CA515E840A4e9E02A3062F99216951825eB2" AND blockchain="eth" AND key="tokens"') # noqa: E501