1
   2
   3
   4
   5
   6
   7
   8
   9
  10
  11
  12
  13
  14
  15
  16
  17
  18
  19
  20
  21
  22
  23
  24
  25
  26
  27
  28
  29
  30
  31
  32
  33
  34
  35
  36
  37
  38
  39
  40
  41
  42
  43
  44
  45
  46
  47
  48
  49
  50
  51
  52
  53
  54
  55
  56
  57
  58
  59
  60
  61
  62
  63
  64
  65
  66
  67
  68
  69
  70
  71
  72
  73
  74
  75
  76
  77
  78
  79
  80
  81
  82
  83
  84
  85
  86
  87
  88
  89
  90
  91
  92
  93
  94
  95
  96
  97
  98
  99
 100
 101
 102
 103
 104
 105
 106
 107
 108
 109
 110
 111
 112
 113
 114
 115
 116
 117
 118
 119
 120
 121
 122
 123
 124
 125
 126
 127
 128
 129
 130
 131
 132
 133
 134
 135
 136
 137
 138
 139
 140
 141
 142
 143
 144
 145
 146
 147
 148
 149
 150
 151
 152
 153
 154
 155
 156
 157
 158
 159
 160
 161
 162
 163
 164
 165
 166
 167
 168
 169
 170
 171
 172
 173
 174
 175
 176
 177
 178
 179
 180
 181
 182
 183
 184
 185
 186
 187
 188
 189
 190
 191
 192
 193
 194
 195
 196
 197
 198
 199
 200
 201
 202
 203
 204
 205
 206
 207
 208
 209
 210
 211
 212
 213
 214
 215
 216
 217
 218
 219
 220
 221
 222
 223
 224
 225
 226
 227
 228
 229
 230
 231
 232
 233
 234
 235
 236
 237
 238
 239
 240
 241
 242
 243
 244
 245
 246
 247
 248
 249
 250
 251
 252
 253
 254
 255
 256
 257
 258
 259
 260
 261
 262
 263
 264
 265
 266
 267
 268
 269
 270
 271
 272
 273
 274
 275
 276
 277
 278
 279
 280
 281
 282
 283
 284
 285
 286
 287
 288
 289
 290
 291
 292
 293
 294
 295
 296
 297
 298
 299
 300
 301
 302
 303
 304
 305
 306
 307
 308
 309
 310
 311
 312
 313
 314
 315
 316
 317
 318
 319
 320
 321
 322
 323
 324
 325
 326
 327
 328
 329
 330
 331
 332
 333
 334
 335
 336
 337
 338
 339
 340
 341
 342
 343
 344
 345
 346
 347
 348
 349
 350
 351
 352
 353
 354
 355
 356
 357
 358
 359
 360
 361
 362
 363
 364
 365
 366
 367
 368
 369
 370
 371
 372
 373
 374
 375
 376
 377
 378
 379
 380
 381
 382
 383
 384
 385
 386
 387
 388
 389
 390
 391
 392
 393
 394
 395
 396
 397
 398
 399
 400
 401
 402
 403
 404
 405
 406
 407
 408
 409
 410
 411
 412
 413
 414
 415
 416
 417
 418
 419
 420
 421
 422
 423
 424
 425
 426
 427
 428
 429
 430
 431
 432
 433
 434
 435
 436
 437
 438
 439
 440
 441
 442
 443
 444
 445
 446
 447
 448
 449
 450
 451
 452
 453
 454
 455
 456
 457
 458
 459
 460
 461
 462
 463
 464
 465
 466
 467
 468
 469
 470
 471
 472
 473
 474
 475
 476
 477
 478
 479
 480
 481
 482
 483
 484
 485
 486
 487
 488
 489
 490
 491
 492
 493
 494
 495
 496
 497
 498
 499
 500
 501
 502
 503
 504
 505
 506
 507
 508
 509
 510
 511
 512
 513
 514
 515
 516
 517
 518
 519
 520
 521
 522
 523
 524
 525
 526
 527
 528
 529
 530
 531
 532
 533
 534
 535
 536
 537
 538
 539
 540
 541
 542
 543
 544
 545
 546
 547
 548
 549
 550
 551
 552
 553
 554
 555
 556
 557
 558
 559
 560
 561
 562
 563
 564
 565
 566
 567
 568
 569
 570
 571
 572
 573
 574
 575
 576
 577
 578
 579
 580
 581
 582
 583
 584
 585
 586
 587
 588
 589
 590
 591
 592
 593
 594
 595
 596
 597
 598
 599
 600
 601
 602
 603
 604
 605
 606
 607
 608
 609
 610
 611
 612
 613
 614
 615
 616
 617
 618
 619
 620
 621
 622
 623
 624
 625
 626
 627
 628
 629
 630
 631
 632
 633
 634
 635
 636
 637
 638
 639
 640
 641
 642
 643
 644
 645
 646
 647
 648
 649
 650
 651
 652
 653
 654
 655
 656
 657
 658
 659
 660
 661
 662
 663
 664
 665
 666
 667
 668
 669
 670
 671
 672
 673
 674
 675
 676
 677
 678
 679
 680
 681
 682
 683
 684
 685
 686
 687
 688
 689
 690
 691
 692
 693
 694
 695
 696
 697
 698
 699
 700
 701
 702
 703
 704
 705
 706
 707
 708
 709
 710
 711
 712
 713
 714
 715
 716
 717
 718
 719
 720
 721
 722
 723
 724
 725
 726
 727
 728
 729
 730
 731
 732
 733
 734
 735
 736
 737
 738
 739
 740
 741
 742
 743
 744
 745
 746
 747
 748
 749
 750
 751
 752
 753
 754
 755
 756
 757
 758
 759
 760
 761
 762
 763
 764
 765
 766
 767
 768
 769
 770
 771
 772
 773
 774
 775
 776
 777
 778
 779
 780
 781
 782
 783
 784
 785
 786
 787
 788
 789
 790
 791
 792
 793
 794
 795
 796
 797
 798
 799
 800
 801
 802
 803
 804
 805
 806
 807
 808
 809
 810
 811
 812
 813
 814
 815
 816
 817
 818
 819
 820
 821
 822
 823
 824
 825
 826
 827
 828
 829
 830
 831
 832
 833
 834
 835
 836
 837
 838
 839
 840
 841
 842
 843
 844
 845
 846
 847
 848
 849
 850
 851
 852
 853
 854
 855
 856
 857
 858
 859
 860
 861
 862
 863
 864
 865
 866
 867
 868
 869
 870
 871
 872
 873
 874
 875
 876
 877
 878
 879
 880
 881
 882
 883
 884
 885
 886
 887
 888
 889
 890
 891
 892
 893
 894
 895
 896
 897
 898
 899
 900
 901
 902
 903
 904
 905
 906
 907
 908
 909
 910
 911
 912
 913
 914
 915
 916
 917
 918
 919
 920
 921
 922
 923
 924
 925
 926
 927
 928
 929
 930
 931
 932
 933
 934
 935
 936
 937
 938
 939
 940
 941
 942
 943
 944
 945
 946
 947
 948
 949
 950
 951
 952
 953
 954
 955
 956
 957
 958
 959
 960
 961
 962
 963
 964
 965
 966
 967
 968
 969
 970
 971
 972
 973
 974
 975
 976
 977
 978
 979
 980
 981
 982
 983
 984
 985
 986
 987
 988
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
use cstr_core::CStr;
use num_traits::ToPrimitive;

use bad64_sys::*;

/// A register
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, FromPrimitive, ToPrimitive)]
#[repr(u32)]
#[allow(non_camel_case_types)]
pub enum Reg {
    W0 = Register_REG_W0 as u32,
    W1 = Register_REG_W1 as u32,
    W2 = Register_REG_W2 as u32,
    W3 = Register_REG_W3 as u32,
    W4 = Register_REG_W4 as u32,
    W5 = Register_REG_W5 as u32,
    W6 = Register_REG_W6 as u32,
    W7 = Register_REG_W7 as u32,
    W8 = Register_REG_W8 as u32,
    W9 = Register_REG_W9 as u32,
    W10 = Register_REG_W10 as u32,
    W11 = Register_REG_W11 as u32,
    W12 = Register_REG_W12 as u32,
    W13 = Register_REG_W13 as u32,
    W14 = Register_REG_W14 as u32,
    W15 = Register_REG_W15 as u32,
    W16 = Register_REG_W16 as u32,
    W17 = Register_REG_W17 as u32,
    W18 = Register_REG_W18 as u32,
    W19 = Register_REG_W19 as u32,
    W20 = Register_REG_W20 as u32,
    W21 = Register_REG_W21 as u32,
    W22 = Register_REG_W22 as u32,
    W23 = Register_REG_W23 as u32,
    W24 = Register_REG_W24 as u32,
    W25 = Register_REG_W25 as u32,
    W26 = Register_REG_W26 as u32,
    W27 = Register_REG_W27 as u32,
    W28 = Register_REG_W28 as u32,
    W29 = Register_REG_W29 as u32,
    W30 = Register_REG_W30 as u32,
    WZR = Register_REG_WZR as u32,
    WSP = Register_REG_WSP as u32,
    X0 = Register_REG_X0 as u32,
    X1 = Register_REG_X1 as u32,
    X2 = Register_REG_X2 as u32,
    X3 = Register_REG_X3 as u32,
    X4 = Register_REG_X4 as u32,
    X5 = Register_REG_X5 as u32,
    X6 = Register_REG_X6 as u32,
    X7 = Register_REG_X7 as u32,
    X8 = Register_REG_X8 as u32,
    X9 = Register_REG_X9 as u32,
    X10 = Register_REG_X10 as u32,
    X11 = Register_REG_X11 as u32,
    X12 = Register_REG_X12 as u32,
    X13 = Register_REG_X13 as u32,
    X14 = Register_REG_X14 as u32,
    X15 = Register_REG_X15 as u32,
    X16 = Register_REG_X16 as u32,
    X17 = Register_REG_X17 as u32,
    X18 = Register_REG_X18 as u32,
    X19 = Register_REG_X19 as u32,
    X20 = Register_REG_X20 as u32,
    X21 = Register_REG_X21 as u32,
    X22 = Register_REG_X22 as u32,
    X23 = Register_REG_X23 as u32,
    X24 = Register_REG_X24 as u32,
    X25 = Register_REG_X25 as u32,
    X26 = Register_REG_X26 as u32,
    X27 = Register_REG_X27 as u32,
    X28 = Register_REG_X28 as u32,
    X29 = Register_REG_X29 as u32,
    X30 = Register_REG_X30 as u32,
    XZR = Register_REG_XZR as u32,
    SP = Register_REG_SP as u32,
    V0 = Register_REG_V0 as u32,
    V1 = Register_REG_V1 as u32,
    V2 = Register_REG_V2 as u32,
    V3 = Register_REG_V3 as u32,
    V4 = Register_REG_V4 as u32,
    V5 = Register_REG_V5 as u32,
    V6 = Register_REG_V6 as u32,
    V7 = Register_REG_V7 as u32,
    V8 = Register_REG_V8 as u32,
    V9 = Register_REG_V9 as u32,
    V10 = Register_REG_V10 as u32,
    V11 = Register_REG_V11 as u32,
    V12 = Register_REG_V12 as u32,
    V13 = Register_REG_V13 as u32,
    V14 = Register_REG_V14 as u32,
    V15 = Register_REG_V15 as u32,
    V16 = Register_REG_V16 as u32,
    V17 = Register_REG_V17 as u32,
    V18 = Register_REG_V18 as u32,
    V19 = Register_REG_V19 as u32,
    V20 = Register_REG_V20 as u32,
    V21 = Register_REG_V21 as u32,
    V22 = Register_REG_V22 as u32,
    V23 = Register_REG_V23 as u32,
    V24 = Register_REG_V24 as u32,
    V25 = Register_REG_V25 as u32,
    V26 = Register_REG_V26 as u32,
    V27 = Register_REG_V27 as u32,
    V28 = Register_REG_V28 as u32,
    V29 = Register_REG_V29 as u32,
    V30 = Register_REG_V30 as u32,
    VZR = Register_REG_VZR as u32,
    V31 = Register_REG_V31 as u32,
    B0 = Register_REG_B0 as u32,
    B1 = Register_REG_B1 as u32,
    B2 = Register_REG_B2 as u32,
    B3 = Register_REG_B3 as u32,
    B4 = Register_REG_B4 as u32,
    B5 = Register_REG_B5 as u32,
    B6 = Register_REG_B6 as u32,
    B7 = Register_REG_B7 as u32,
    B8 = Register_REG_B8 as u32,
    B9 = Register_REG_B9 as u32,
    B10 = Register_REG_B10 as u32,
    B11 = Register_REG_B11 as u32,
    B12 = Register_REG_B12 as u32,
    B13 = Register_REG_B13 as u32,
    B14 = Register_REG_B14 as u32,
    B15 = Register_REG_B15 as u32,
    B16 = Register_REG_B16 as u32,
    B17 = Register_REG_B17 as u32,
    B18 = Register_REG_B18 as u32,
    B19 = Register_REG_B19 as u32,
    B20 = Register_REG_B20 as u32,
    B21 = Register_REG_B21 as u32,
    B22 = Register_REG_B22 as u32,
    B23 = Register_REG_B23 as u32,
    B24 = Register_REG_B24 as u32,
    B25 = Register_REG_B25 as u32,
    B26 = Register_REG_B26 as u32,
    B27 = Register_REG_B27 as u32,
    B28 = Register_REG_B28 as u32,
    B29 = Register_REG_B29 as u32,
    B30 = Register_REG_B30 as u32,
    BZR = Register_REG_BZR as u32,
    B31 = Register_REG_B31 as u32,
    H0 = Register_REG_H0 as u32,
    H1 = Register_REG_H1 as u32,
    H2 = Register_REG_H2 as u32,
    H3 = Register_REG_H3 as u32,
    H4 = Register_REG_H4 as u32,
    H5 = Register_REG_H5 as u32,
    H6 = Register_REG_H6 as u32,
    H7 = Register_REG_H7 as u32,
    H8 = Register_REG_H8 as u32,
    H9 = Register_REG_H9 as u32,
    H10 = Register_REG_H10 as u32,
    H11 = Register_REG_H11 as u32,
    H12 = Register_REG_H12 as u32,
    H13 = Register_REG_H13 as u32,
    H14 = Register_REG_H14 as u32,
    H15 = Register_REG_H15 as u32,
    H16 = Register_REG_H16 as u32,
    H17 = Register_REG_H17 as u32,
    H18 = Register_REG_H18 as u32,
    H19 = Register_REG_H19 as u32,
    H20 = Register_REG_H20 as u32,
    H21 = Register_REG_H21 as u32,
    H22 = Register_REG_H22 as u32,
    H23 = Register_REG_H23 as u32,
    H24 = Register_REG_H24 as u32,
    H25 = Register_REG_H25 as u32,
    H26 = Register_REG_H26 as u32,
    H27 = Register_REG_H27 as u32,
    H28 = Register_REG_H28 as u32,
    H29 = Register_REG_H29 as u32,
    H30 = Register_REG_H30 as u32,
    HZR = Register_REG_HZR as u32,
    H31 = Register_REG_H31 as u32,
    S0 = Register_REG_S0 as u32,
    S1 = Register_REG_S1 as u32,
    S2 = Register_REG_S2 as u32,
    S3 = Register_REG_S3 as u32,
    S4 = Register_REG_S4 as u32,
    S5 = Register_REG_S5 as u32,
    S6 = Register_REG_S6 as u32,
    S7 = Register_REG_S7 as u32,
    S8 = Register_REG_S8 as u32,
    S9 = Register_REG_S9 as u32,
    S10 = Register_REG_S10 as u32,
    S11 = Register_REG_S11 as u32,
    S12 = Register_REG_S12 as u32,
    S13 = Register_REG_S13 as u32,
    S14 = Register_REG_S14 as u32,
    S15 = Register_REG_S15 as u32,
    S16 = Register_REG_S16 as u32,
    S17 = Register_REG_S17 as u32,
    S18 = Register_REG_S18 as u32,
    S19 = Register_REG_S19 as u32,
    S20 = Register_REG_S20 as u32,
    S21 = Register_REG_S21 as u32,
    S22 = Register_REG_S22 as u32,
    S23 = Register_REG_S23 as u32,
    S24 = Register_REG_S24 as u32,
    S25 = Register_REG_S25 as u32,
    S26 = Register_REG_S26 as u32,
    S27 = Register_REG_S27 as u32,
    S28 = Register_REG_S28 as u32,
    S29 = Register_REG_S29 as u32,
    S30 = Register_REG_S30 as u32,
    SZR = Register_REG_SZR as u32,
    S31 = Register_REG_S31 as u32,
    D0 = Register_REG_D0 as u32,
    D1 = Register_REG_D1 as u32,
    D2 = Register_REG_D2 as u32,
    D3 = Register_REG_D3 as u32,
    D4 = Register_REG_D4 as u32,
    D5 = Register_REG_D5 as u32,
    D6 = Register_REG_D6 as u32,
    D7 = Register_REG_D7 as u32,
    D8 = Register_REG_D8 as u32,
    D9 = Register_REG_D9 as u32,
    D10 = Register_REG_D10 as u32,
    D11 = Register_REG_D11 as u32,
    D12 = Register_REG_D12 as u32,
    D13 = Register_REG_D13 as u32,
    D14 = Register_REG_D14 as u32,
    D15 = Register_REG_D15 as u32,
    D16 = Register_REG_D16 as u32,
    D17 = Register_REG_D17 as u32,
    D18 = Register_REG_D18 as u32,
    D19 = Register_REG_D19 as u32,
    D20 = Register_REG_D20 as u32,
    D21 = Register_REG_D21 as u32,
    D22 = Register_REG_D22 as u32,
    D23 = Register_REG_D23 as u32,
    D24 = Register_REG_D24 as u32,
    D25 = Register_REG_D25 as u32,
    D26 = Register_REG_D26 as u32,
    D27 = Register_REG_D27 as u32,
    D28 = Register_REG_D28 as u32,
    D29 = Register_REG_D29 as u32,
    D30 = Register_REG_D30 as u32,
    DZR = Register_REG_DZR as u32,
    D31 = Register_REG_D31 as u32,
    Q0 = Register_REG_Q0 as u32,
    Q1 = Register_REG_Q1 as u32,
    Q2 = Register_REG_Q2 as u32,
    Q3 = Register_REG_Q3 as u32,
    Q4 = Register_REG_Q4 as u32,
    Q5 = Register_REG_Q5 as u32,
    Q6 = Register_REG_Q6 as u32,
    Q7 = Register_REG_Q7 as u32,
    Q8 = Register_REG_Q8 as u32,
    Q9 = Register_REG_Q9 as u32,
    Q10 = Register_REG_Q10 as u32,
    Q11 = Register_REG_Q11 as u32,
    Q12 = Register_REG_Q12 as u32,
    Q13 = Register_REG_Q13 as u32,
    Q14 = Register_REG_Q14 as u32,
    Q15 = Register_REG_Q15 as u32,
    Q16 = Register_REG_Q16 as u32,
    Q17 = Register_REG_Q17 as u32,
    Q18 = Register_REG_Q18 as u32,
    Q19 = Register_REG_Q19 as u32,
    Q20 = Register_REG_Q20 as u32,
    Q21 = Register_REG_Q21 as u32,
    Q22 = Register_REG_Q22 as u32,
    Q23 = Register_REG_Q23 as u32,
    Q24 = Register_REG_Q24 as u32,
    Q25 = Register_REG_Q25 as u32,
    Q26 = Register_REG_Q26 as u32,
    Q27 = Register_REG_Q27 as u32,
    Q28 = Register_REG_Q28 as u32,
    Q29 = Register_REG_Q29 as u32,
    Q30 = Register_REG_Q30 as u32,
    QZR = Register_REG_QZR as u32,
    Q31 = Register_REG_Q31 as u32,
    V0_B0 = Register_REG_V0_B0 as u32,
    V0_B1 = Register_REG_V0_B1 as u32,
    V0_B2 = Register_REG_V0_B2 as u32,
    V0_B3 = Register_REG_V0_B3 as u32,
    V0_B4 = Register_REG_V0_B4 as u32,
    V0_B5 = Register_REG_V0_B5 as u32,
    V0_B6 = Register_REG_V0_B6 as u32,
    V0_B7 = Register_REG_V0_B7 as u32,
    V0_B8 = Register_REG_V0_B8 as u32,
    V0_B9 = Register_REG_V0_B9 as u32,
    V0_B10 = Register_REG_V0_B10 as u32,
    V0_B11 = Register_REG_V0_B11 as u32,
    V0_B12 = Register_REG_V0_B12 as u32,
    V0_B13 = Register_REG_V0_B13 as u32,
    V0_B14 = Register_REG_V0_B14 as u32,
    V0_B15 = Register_REG_V0_B15 as u32,
    V1_B0 = Register_REG_V1_B0 as u32,
    V1_B1 = Register_REG_V1_B1 as u32,
    V1_B2 = Register_REG_V1_B2 as u32,
    V1_B3 = Register_REG_V1_B3 as u32,
    V1_B4 = Register_REG_V1_B4 as u32,
    V1_B5 = Register_REG_V1_B5 as u32,
    V1_B6 = Register_REG_V1_B6 as u32,
    V1_B7 = Register_REG_V1_B7 as u32,
    V1_B8 = Register_REG_V1_B8 as u32,
    V1_B9 = Register_REG_V1_B9 as u32,
    V1_B10 = Register_REG_V1_B10 as u32,
    V1_B11 = Register_REG_V1_B11 as u32,
    V1_B12 = Register_REG_V1_B12 as u32,
    V1_B13 = Register_REG_V1_B13 as u32,
    V1_B14 = Register_REG_V1_B14 as u32,
    V1_B15 = Register_REG_V1_B15 as u32,
    V2_B0 = Register_REG_V2_B0 as u32,
    V2_B1 = Register_REG_V2_B1 as u32,
    V2_B2 = Register_REG_V2_B2 as u32,
    V2_B3 = Register_REG_V2_B3 as u32,
    V2_B4 = Register_REG_V2_B4 as u32,
    V2_B5 = Register_REG_V2_B5 as u32,
    V2_B6 = Register_REG_V2_B6 as u32,
    V2_B7 = Register_REG_V2_B7 as u32,
    V2_B8 = Register_REG_V2_B8 as u32,
    V2_B9 = Register_REG_V2_B9 as u32,
    V2_B10 = Register_REG_V2_B10 as u32,
    V2_B11 = Register_REG_V2_B11 as u32,
    V2_B12 = Register_REG_V2_B12 as u32,
    V2_B13 = Register_REG_V2_B13 as u32,
    V2_B14 = Register_REG_V2_B14 as u32,
    V2_B15 = Register_REG_V2_B15 as u32,
    V3_B0 = Register_REG_V3_B0 as u32,
    V3_B1 = Register_REG_V3_B1 as u32,
    V3_B2 = Register_REG_V3_B2 as u32,
    V3_B3 = Register_REG_V3_B3 as u32,
    V3_B4 = Register_REG_V3_B4 as u32,
    V3_B5 = Register_REG_V3_B5 as u32,
    V3_B6 = Register_REG_V3_B6 as u32,
    V3_B7 = Register_REG_V3_B7 as u32,
    V3_B8 = Register_REG_V3_B8 as u32,
    V3_B9 = Register_REG_V3_B9 as u32,
    V3_B10 = Register_REG_V3_B10 as u32,
    V3_B11 = Register_REG_V3_B11 as u32,
    V3_B12 = Register_REG_V3_B12 as u32,
    V3_B13 = Register_REG_V3_B13 as u32,
    V3_B14 = Register_REG_V3_B14 as u32,
    V3_B15 = Register_REG_V3_B15 as u32,
    V4_B0 = Register_REG_V4_B0 as u32,
    V4_B1 = Register_REG_V4_B1 as u32,
    V4_B2 = Register_REG_V4_B2 as u32,
    V4_B3 = Register_REG_V4_B3 as u32,
    V4_B4 = Register_REG_V4_B4 as u32,
    V4_B5 = Register_REG_V4_B5 as u32,
    V4_B6 = Register_REG_V4_B6 as u32,
    V4_B7 = Register_REG_V4_B7 as u32,
    V4_B8 = Register_REG_V4_B8 as u32,
    V4_B9 = Register_REG_V4_B9 as u32,
    V4_B10 = Register_REG_V4_B10 as u32,
    V4_B11 = Register_REG_V4_B11 as u32,
    V4_B12 = Register_REG_V4_B12 as u32,
    V4_B13 = Register_REG_V4_B13 as u32,
    V4_B14 = Register_REG_V4_B14 as u32,
    V4_B15 = Register_REG_V4_B15 as u32,
    V5_B0 = Register_REG_V5_B0 as u32,
    V5_B1 = Register_REG_V5_B1 as u32,
    V5_B2 = Register_REG_V5_B2 as u32,
    V5_B3 = Register_REG_V5_B3 as u32,
    V5_B4 = Register_REG_V5_B4 as u32,
    V5_B5 = Register_REG_V5_B5 as u32,
    V5_B6 = Register_REG_V5_B6 as u32,
    V5_B7 = Register_REG_V5_B7 as u32,
    V5_B8 = Register_REG_V5_B8 as u32,
    V5_B9 = Register_REG_V5_B9 as u32,
    V5_B10 = Register_REG_V5_B10 as u32,
    V5_B11 = Register_REG_V5_B11 as u32,
    V5_B12 = Register_REG_V5_B12 as u32,
    V5_B13 = Register_REG_V5_B13 as u32,
    V5_B14 = Register_REG_V5_B14 as u32,
    V5_B15 = Register_REG_V5_B15 as u32,
    V6_B0 = Register_REG_V6_B0 as u32,
    V6_B1 = Register_REG_V6_B1 as u32,
    V6_B2 = Register_REG_V6_B2 as u32,
    V6_B3 = Register_REG_V6_B3 as u32,
    V6_B4 = Register_REG_V6_B4 as u32,
    V6_B5 = Register_REG_V6_B5 as u32,
    V6_B6 = Register_REG_V6_B6 as u32,
    V6_B7 = Register_REG_V6_B7 as u32,
    V6_B8 = Register_REG_V6_B8 as u32,
    V6_B9 = Register_REG_V6_B9 as u32,
    V6_B10 = Register_REG_V6_B10 as u32,
    V6_B11 = Register_REG_V6_B11 as u32,
    V6_B12 = Register_REG_V6_B12 as u32,
    V6_B13 = Register_REG_V6_B13 as u32,
    V6_B14 = Register_REG_V6_B14 as u32,
    V6_B15 = Register_REG_V6_B15 as u32,
    V7_B0 = Register_REG_V7_B0 as u32,
    V7_B1 = Register_REG_V7_B1 as u32,
    V7_B2 = Register_REG_V7_B2 as u32,
    V7_B3 = Register_REG_V7_B3 as u32,
    V7_B4 = Register_REG_V7_B4 as u32,
    V7_B5 = Register_REG_V7_B5 as u32,
    V7_B6 = Register_REG_V7_B6 as u32,
    V7_B7 = Register_REG_V7_B7 as u32,
    V7_B8 = Register_REG_V7_B8 as u32,
    V7_B9 = Register_REG_V7_B9 as u32,
    V7_B10 = Register_REG_V7_B10 as u32,
    V7_B11 = Register_REG_V7_B11 as u32,
    V7_B12 = Register_REG_V7_B12 as u32,
    V7_B13 = Register_REG_V7_B13 as u32,
    V7_B14 = Register_REG_V7_B14 as u32,
    V7_B15 = Register_REG_V7_B15 as u32,
    V8_B0 = Register_REG_V8_B0 as u32,
    V8_B1 = Register_REG_V8_B1 as u32,
    V8_B2 = Register_REG_V8_B2 as u32,
    V8_B3 = Register_REG_V8_B3 as u32,
    V8_B4 = Register_REG_V8_B4 as u32,
    V8_B5 = Register_REG_V8_B5 as u32,
    V8_B6 = Register_REG_V8_B6 as u32,
    V8_B7 = Register_REG_V8_B7 as u32,
    V8_B8 = Register_REG_V8_B8 as u32,
    V8_B9 = Register_REG_V8_B9 as u32,
    V8_B10 = Register_REG_V8_B10 as u32,
    V8_B11 = Register_REG_V8_B11 as u32,
    V8_B12 = Register_REG_V8_B12 as u32,
    V8_B13 = Register_REG_V8_B13 as u32,
    V8_B14 = Register_REG_V8_B14 as u32,
    V8_B15 = Register_REG_V8_B15 as u32,
    V9_B0 = Register_REG_V9_B0 as u32,
    V9_B1 = Register_REG_V9_B1 as u32,
    V9_B2 = Register_REG_V9_B2 as u32,
    V9_B3 = Register_REG_V9_B3 as u32,
    V9_B4 = Register_REG_V9_B4 as u32,
    V9_B5 = Register_REG_V9_B5 as u32,
    V9_B6 = Register_REG_V9_B6 as u32,
    V9_B7 = Register_REG_V9_B7 as u32,
    V9_B8 = Register_REG_V9_B8 as u32,
    V9_B9 = Register_REG_V9_B9 as u32,
    V9_B10 = Register_REG_V9_B10 as u32,
    V9_B11 = Register_REG_V9_B11 as u32,
    V9_B12 = Register_REG_V9_B12 as u32,
    V9_B13 = Register_REG_V9_B13 as u32,
    V9_B14 = Register_REG_V9_B14 as u32,
    V9_B15 = Register_REG_V9_B15 as u32,
    V10_B0 = Register_REG_V10_B0 as u32,
    V10_B1 = Register_REG_V10_B1 as u32,
    V10_B2 = Register_REG_V10_B2 as u32,
    V10_B3 = Register_REG_V10_B3 as u32,
    V10_B4 = Register_REG_V10_B4 as u32,
    V10_B5 = Register_REG_V10_B5 as u32,
    V10_B6 = Register_REG_V10_B6 as u32,
    V10_B7 = Register_REG_V10_B7 as u32,
    V10_B8 = Register_REG_V10_B8 as u32,
    V10_B9 = Register_REG_V10_B9 as u32,
    V10_B10 = Register_REG_V10_B10 as u32,
    V10_B11 = Register_REG_V10_B11 as u32,
    V10_B12 = Register_REG_V10_B12 as u32,
    V10_B13 = Register_REG_V10_B13 as u32,
    V10_B14 = Register_REG_V10_B14 as u32,
    V10_B15 = Register_REG_V10_B15 as u32,
    V11_B0 = Register_REG_V11_B0 as u32,
    V11_B1 = Register_REG_V11_B1 as u32,
    V11_B2 = Register_REG_V11_B2 as u32,
    V11_B3 = Register_REG_V11_B3 as u32,
    V11_B4 = Register_REG_V11_B4 as u32,
    V11_B5 = Register_REG_V11_B5 as u32,
    V11_B6 = Register_REG_V11_B6 as u32,
    V11_B7 = Register_REG_V11_B7 as u32,
    V11_B8 = Register_REG_V11_B8 as u32,
    V11_B9 = Register_REG_V11_B9 as u32,
    V11_B10 = Register_REG_V11_B10 as u32,
    V11_B11 = Register_REG_V11_B11 as u32,
    V11_B12 = Register_REG_V11_B12 as u32,
    V11_B13 = Register_REG_V11_B13 as u32,
    V11_B14 = Register_REG_V11_B14 as u32,
    V11_B15 = Register_REG_V11_B15 as u32,
    V12_B0 = Register_REG_V12_B0 as u32,
    V12_B1 = Register_REG_V12_B1 as u32,
    V12_B2 = Register_REG_V12_B2 as u32,
    V12_B3 = Register_REG_V12_B3 as u32,
    V12_B4 = Register_REG_V12_B4 as u32,
    V12_B5 = Register_REG_V12_B5 as u32,
    V12_B6 = Register_REG_V12_B6 as u32,
    V12_B7 = Register_REG_V12_B7 as u32,
    V12_B8 = Register_REG_V12_B8 as u32,
    V12_B9 = Register_REG_V12_B9 as u32,
    V12_B10 = Register_REG_V12_B10 as u32,
    V12_B11 = Register_REG_V12_B11 as u32,
    V12_B12 = Register_REG_V12_B12 as u32,
    V12_B13 = Register_REG_V12_B13 as u32,
    V12_B14 = Register_REG_V12_B14 as u32,
    V12_B15 = Register_REG_V12_B15 as u32,
    V13_B0 = Register_REG_V13_B0 as u32,
    V13_B1 = Register_REG_V13_B1 as u32,
    V13_B2 = Register_REG_V13_B2 as u32,
    V13_B3 = Register_REG_V13_B3 as u32,
    V13_B4 = Register_REG_V13_B4 as u32,
    V13_B5 = Register_REG_V13_B5 as u32,
    V13_B6 = Register_REG_V13_B6 as u32,
    V13_B7 = Register_REG_V13_B7 as u32,
    V13_B8 = Register_REG_V13_B8 as u32,
    V13_B9 = Register_REG_V13_B9 as u32,
    V13_B10 = Register_REG_V13_B10 as u32,
    V13_B11 = Register_REG_V13_B11 as u32,
    V13_B12 = Register_REG_V13_B12 as u32,
    V13_B13 = Register_REG_V13_B13 as u32,
    V13_B14 = Register_REG_V13_B14 as u32,
    V13_B15 = Register_REG_V13_B15 as u32,
    V14_B0 = Register_REG_V14_B0 as u32,
    V14_B1 = Register_REG_V14_B1 as u32,
    V14_B2 = Register_REG_V14_B2 as u32,
    V14_B3 = Register_REG_V14_B3 as u32,
    V14_B4 = Register_REG_V14_B4 as u32,
    V14_B5 = Register_REG_V14_B5 as u32,
    V14_B6 = Register_REG_V14_B6 as u32,
    V14_B7 = Register_REG_V14_B7 as u32,
    V14_B8 = Register_REG_V14_B8 as u32,
    V14_B9 = Register_REG_V14_B9 as u32,
    V14_B10 = Register_REG_V14_B10 as u32,
    V14_B11 = Register_REG_V14_B11 as u32,
    V14_B12 = Register_REG_V14_B12 as u32,
    V14_B13 = Register_REG_V14_B13 as u32,
    V14_B14 = Register_REG_V14_B14 as u32,
    V14_B15 = Register_REG_V14_B15 as u32,
    V15_B0 = Register_REG_V15_B0 as u32,
    V15_B1 = Register_REG_V15_B1 as u32,
    V15_B2 = Register_REG_V15_B2 as u32,
    V15_B3 = Register_REG_V15_B3 as u32,
    V15_B4 = Register_REG_V15_B4 as u32,
    V15_B5 = Register_REG_V15_B5 as u32,
    V15_B6 = Register_REG_V15_B6 as u32,
    V15_B7 = Register_REG_V15_B7 as u32,
    V15_B8 = Register_REG_V15_B8 as u32,
    V15_B9 = Register_REG_V15_B9 as u32,
    V15_B10 = Register_REG_V15_B10 as u32,
    V15_B11 = Register_REG_V15_B11 as u32,
    V15_B12 = Register_REG_V15_B12 as u32,
    V15_B13 = Register_REG_V15_B13 as u32,
    V15_B14 = Register_REG_V15_B14 as u32,
    V15_B15 = Register_REG_V15_B15 as u32,
    V16_B0 = Register_REG_V16_B0 as u32,
    V16_B1 = Register_REG_V16_B1 as u32,
    V16_B2 = Register_REG_V16_B2 as u32,
    V16_B3 = Register_REG_V16_B3 as u32,
    V16_B4 = Register_REG_V16_B4 as u32,
    V16_B5 = Register_REG_V16_B5 as u32,
    V16_B6 = Register_REG_V16_B6 as u32,
    V16_B7 = Register_REG_V16_B7 as u32,
    V16_B8 = Register_REG_V16_B8 as u32,
    V16_B9 = Register_REG_V16_B9 as u32,
    V16_B10 = Register_REG_V16_B10 as u32,
    V16_B11 = Register_REG_V16_B11 as u32,
    V16_B12 = Register_REG_V16_B12 as u32,
    V16_B13 = Register_REG_V16_B13 as u32,
    V16_B14 = Register_REG_V16_B14 as u32,
    V16_B15 = Register_REG_V16_B15 as u32,
    V17_B0 = Register_REG_V17_B0 as u32,
    V17_B1 = Register_REG_V17_B1 as u32,
    V17_B2 = Register_REG_V17_B2 as u32,
    V17_B3 = Register_REG_V17_B3 as u32,
    V17_B4 = Register_REG_V17_B4 as u32,
    V17_B5 = Register_REG_V17_B5 as u32,
    V17_B6 = Register_REG_V17_B6 as u32,
    V17_B7 = Register_REG_V17_B7 as u32,
    V17_B8 = Register_REG_V17_B8 as u32,
    V17_B9 = Register_REG_V17_B9 as u32,
    V17_B10 = Register_REG_V17_B10 as u32,
    V17_B11 = Register_REG_V17_B11 as u32,
    V17_B12 = Register_REG_V17_B12 as u32,
    V17_B13 = Register_REG_V17_B13 as u32,
    V17_B14 = Register_REG_V17_B14 as u32,
    V17_B15 = Register_REG_V17_B15 as u32,
    V18_B0 = Register_REG_V18_B0 as u32,
    V18_B1 = Register_REG_V18_B1 as u32,
    V18_B2 = Register_REG_V18_B2 as u32,
    V18_B3 = Register_REG_V18_B3 as u32,
    V18_B4 = Register_REG_V18_B4 as u32,
    V18_B5 = Register_REG_V18_B5 as u32,
    V18_B6 = Register_REG_V18_B6 as u32,
    V18_B7 = Register_REG_V18_B7 as u32,
    V18_B8 = Register_REG_V18_B8 as u32,
    V18_B9 = Register_REG_V18_B9 as u32,
    V18_B10 = Register_REG_V18_B10 as u32,
    V18_B11 = Register_REG_V18_B11 as u32,
    V18_B12 = Register_REG_V18_B12 as u32,
    V18_B13 = Register_REG_V18_B13 as u32,
    V18_B14 = Register_REG_V18_B14 as u32,
    V18_B15 = Register_REG_V18_B15 as u32,
    V19_B0 = Register_REG_V19_B0 as u32,
    V19_B1 = Register_REG_V19_B1 as u32,
    V19_B2 = Register_REG_V19_B2 as u32,
    V19_B3 = Register_REG_V19_B3 as u32,
    V19_B4 = Register_REG_V19_B4 as u32,
    V19_B5 = Register_REG_V19_B5 as u32,
    V19_B6 = Register_REG_V19_B6 as u32,
    V19_B7 = Register_REG_V19_B7 as u32,
    V19_B8 = Register_REG_V19_B8 as u32,
    V19_B9 = Register_REG_V19_B9 as u32,
    V19_B10 = Register_REG_V19_B10 as u32,
    V19_B11 = Register_REG_V19_B11 as u32,
    V19_B12 = Register_REG_V19_B12 as u32,
    V19_B13 = Register_REG_V19_B13 as u32,
    V19_B14 = Register_REG_V19_B14 as u32,
    V19_B15 = Register_REG_V19_B15 as u32,
    V20_B0 = Register_REG_V20_B0 as u32,
    V20_B1 = Register_REG_V20_B1 as u32,
    V20_B2 = Register_REG_V20_B2 as u32,
    V20_B3 = Register_REG_V20_B3 as u32,
    V20_B4 = Register_REG_V20_B4 as u32,
    V20_B5 = Register_REG_V20_B5 as u32,
    V20_B6 = Register_REG_V20_B6 as u32,
    V20_B7 = Register_REG_V20_B7 as u32,
    V20_B8 = Register_REG_V20_B8 as u32,
    V20_B9 = Register_REG_V20_B9 as u32,
    V20_B10 = Register_REG_V20_B10 as u32,
    V20_B11 = Register_REG_V20_B11 as u32,
    V20_B12 = Register_REG_V20_B12 as u32,
    V20_B13 = Register_REG_V20_B13 as u32,
    V20_B14 = Register_REG_V20_B14 as u32,
    V20_B15 = Register_REG_V20_B15 as u32,
    V21_B0 = Register_REG_V21_B0 as u32,
    V21_B1 = Register_REG_V21_B1 as u32,
    V21_B2 = Register_REG_V21_B2 as u32,
    V21_B3 = Register_REG_V21_B3 as u32,
    V21_B4 = Register_REG_V21_B4 as u32,
    V21_B5 = Register_REG_V21_B5 as u32,
    V21_B6 = Register_REG_V21_B6 as u32,
    V21_B7 = Register_REG_V21_B7 as u32,
    V21_B8 = Register_REG_V21_B8 as u32,
    V21_B9 = Register_REG_V21_B9 as u32,
    V21_B10 = Register_REG_V21_B10 as u32,
    V21_B11 = Register_REG_V21_B11 as u32,
    V21_B12 = Register_REG_V21_B12 as u32,
    V21_B13 = Register_REG_V21_B13 as u32,
    V21_B14 = Register_REG_V21_B14 as u32,
    V21_B15 = Register_REG_V21_B15 as u32,
    V22_B0 = Register_REG_V22_B0 as u32,
    V22_B1 = Register_REG_V22_B1 as u32,
    V22_B2 = Register_REG_V22_B2 as u32,
    V22_B3 = Register_REG_V22_B3 as u32,
    V22_B4 = Register_REG_V22_B4 as u32,
    V22_B5 = Register_REG_V22_B5 as u32,
    V22_B6 = Register_REG_V22_B6 as u32,
    V22_B7 = Register_REG_V22_B7 as u32,
    V22_B8 = Register_REG_V22_B8 as u32,
    V22_B9 = Register_REG_V22_B9 as u32,
    V22_B10 = Register_REG_V22_B10 as u32,
    V22_B11 = Register_REG_V22_B11 as u32,
    V22_B12 = Register_REG_V22_B12 as u32,
    V22_B13 = Register_REG_V22_B13 as u32,
    V22_B14 = Register_REG_V22_B14 as u32,
    V22_B15 = Register_REG_V22_B15 as u32,
    V23_B0 = Register_REG_V23_B0 as u32,
    V23_B1 = Register_REG_V23_B1 as u32,
    V23_B2 = Register_REG_V23_B2 as u32,
    V23_B3 = Register_REG_V23_B3 as u32,
    V23_B4 = Register_REG_V23_B4 as u32,
    V23_B5 = Register_REG_V23_B5 as u32,
    V23_B6 = Register_REG_V23_B6 as u32,
    V23_B7 = Register_REG_V23_B7 as u32,
    V23_B8 = Register_REG_V23_B8 as u32,
    V23_B9 = Register_REG_V23_B9 as u32,
    V23_B10 = Register_REG_V23_B10 as u32,
    V23_B11 = Register_REG_V23_B11 as u32,
    V23_B12 = Register_REG_V23_B12 as u32,
    V23_B13 = Register_REG_V23_B13 as u32,
    V23_B14 = Register_REG_V23_B14 as u32,
    V23_B15 = Register_REG_V23_B15 as u32,
    V24_B0 = Register_REG_V24_B0 as u32,
    V24_B1 = Register_REG_V24_B1 as u32,
    V24_B2 = Register_REG_V24_B2 as u32,
    V24_B3 = Register_REG_V24_B3 as u32,
    V24_B4 = Register_REG_V24_B4 as u32,
    V24_B5 = Register_REG_V24_B5 as u32,
    V24_B6 = Register_REG_V24_B6 as u32,
    V24_B7 = Register_REG_V24_B7 as u32,
    V24_B8 = Register_REG_V24_B8 as u32,
    V24_B9 = Register_REG_V24_B9 as u32,
    V24_B10 = Register_REG_V24_B10 as u32,
    V24_B11 = Register_REG_V24_B11 as u32,
    V24_B12 = Register_REG_V24_B12 as u32,
    V24_B13 = Register_REG_V24_B13 as u32,
    V24_B14 = Register_REG_V24_B14 as u32,
    V24_B15 = Register_REG_V24_B15 as u32,
    V25_B0 = Register_REG_V25_B0 as u32,
    V25_B1 = Register_REG_V25_B1 as u32,
    V25_B2 = Register_REG_V25_B2 as u32,
    V25_B3 = Register_REG_V25_B3 as u32,
    V25_B4 = Register_REG_V25_B4 as u32,
    V25_B5 = Register_REG_V25_B5 as u32,
    V25_B6 = Register_REG_V25_B6 as u32,
    V25_B7 = Register_REG_V25_B7 as u32,
    V25_B8 = Register_REG_V25_B8 as u32,
    V25_B9 = Register_REG_V25_B9 as u32,
    V25_B10 = Register_REG_V25_B10 as u32,
    V25_B11 = Register_REG_V25_B11 as u32,
    V25_B12 = Register_REG_V25_B12 as u32,
    V25_B13 = Register_REG_V25_B13 as u32,
    V25_B14 = Register_REG_V25_B14 as u32,
    V25_B15 = Register_REG_V25_B15 as u32,
    V26_B0 = Register_REG_V26_B0 as u32,
    V26_B1 = Register_REG_V26_B1 as u32,
    V26_B2 = Register_REG_V26_B2 as u32,
    V26_B3 = Register_REG_V26_B3 as u32,
    V26_B4 = Register_REG_V26_B4 as u32,
    V26_B5 = Register_REG_V26_B5 as u32,
    V26_B6 = Register_REG_V26_B6 as u32,
    V26_B7 = Register_REG_V26_B7 as u32,
    V26_B8 = Register_REG_V26_B8 as u32,
    V26_B9 = Register_REG_V26_B9 as u32,
    V26_B10 = Register_REG_V26_B10 as u32,
    V26_B11 = Register_REG_V26_B11 as u32,
    V26_B12 = Register_REG_V26_B12 as u32,
    V26_B13 = Register_REG_V26_B13 as u32,
    V26_B14 = Register_REG_V26_B14 as u32,
    V26_B15 = Register_REG_V26_B15 as u32,
    V27_B0 = Register_REG_V27_B0 as u32,
    V27_B1 = Register_REG_V27_B1 as u32,
    V27_B2 = Register_REG_V27_B2 as u32,
    V27_B3 = Register_REG_V27_B3 as u32,
    V27_B4 = Register_REG_V27_B4 as u32,
    V27_B5 = Register_REG_V27_B5 as u32,
    V27_B6 = Register_REG_V27_B6 as u32,
    V27_B7 = Register_REG_V27_B7 as u32,
    V27_B8 = Register_REG_V27_B8 as u32,
    V27_B9 = Register_REG_V27_B9 as u32,
    V27_B10 = Register_REG_V27_B10 as u32,
    V27_B11 = Register_REG_V27_B11 as u32,
    V27_B12 = Register_REG_V27_B12 as u32,
    V27_B13 = Register_REG_V27_B13 as u32,
    V27_B14 = Register_REG_V27_B14 as u32,
    V27_B15 = Register_REG_V27_B15 as u32,
    V28_B0 = Register_REG_V28_B0 as u32,
    V28_B1 = Register_REG_V28_B1 as u32,
    V28_B2 = Register_REG_V28_B2 as u32,
    V28_B3 = Register_REG_V28_B3 as u32,
    V28_B4 = Register_REG_V28_B4 as u32,
    V28_B5 = Register_REG_V28_B5 as u32,
    V28_B6 = Register_REG_V28_B6 as u32,
    V28_B7 = Register_REG_V28_B7 as u32,
    V28_B8 = Register_REG_V28_B8 as u32,
    V28_B9 = Register_REG_V28_B9 as u32,
    V28_B10 = Register_REG_V28_B10 as u32,
    V28_B11 = Register_REG_V28_B11 as u32,
    V28_B12 = Register_REG_V28_B12 as u32,
    V28_B13 = Register_REG_V28_B13 as u32,
    V28_B14 = Register_REG_V28_B14 as u32,
    V28_B15 = Register_REG_V28_B15 as u32,
    V29_B0 = Register_REG_V29_B0 as u32,
    V29_B1 = Register_REG_V29_B1 as u32,
    V29_B2 = Register_REG_V29_B2 as u32,
    V29_B3 = Register_REG_V29_B3 as u32,
    V29_B4 = Register_REG_V29_B4 as u32,
    V29_B5 = Register_REG_V29_B5 as u32,
    V29_B6 = Register_REG_V29_B6 as u32,
    V29_B7 = Register_REG_V29_B7 as u32,
    V29_B8 = Register_REG_V29_B8 as u32,
    V29_B9 = Register_REG_V29_B9 as u32,
    V29_B10 = Register_REG_V29_B10 as u32,
    V29_B11 = Register_REG_V29_B11 as u32,
    V29_B12 = Register_REG_V29_B12 as u32,
    V29_B13 = Register_REG_V29_B13 as u32,
    V29_B14 = Register_REG_V29_B14 as u32,
    V29_B15 = Register_REG_V29_B15 as u32,
    V30_B0 = Register_REG_V30_B0 as u32,
    V30_B1 = Register_REG_V30_B1 as u32,
    V30_B2 = Register_REG_V30_B2 as u32,
    V30_B3 = Register_REG_V30_B3 as u32,
    V30_B4 = Register_REG_V30_B4 as u32,
    V30_B5 = Register_REG_V30_B5 as u32,
    V30_B6 = Register_REG_V30_B6 as u32,
    V30_B7 = Register_REG_V30_B7 as u32,
    V30_B8 = Register_REG_V30_B8 as u32,
    V30_B9 = Register_REG_V30_B9 as u32,
    V30_B10 = Register_REG_V30_B10 as u32,
    V30_B11 = Register_REG_V30_B11 as u32,
    V30_B12 = Register_REG_V30_B12 as u32,
    V30_B13 = Register_REG_V30_B13 as u32,
    V30_B14 = Register_REG_V30_B14 as u32,
    V30_B15 = Register_REG_V30_B15 as u32,
    V31_B0 = Register_REG_V31_B0 as u32,
    V31_B1 = Register_REG_V31_B1 as u32,
    V31_B2 = Register_REG_V31_B2 as u32,
    V31_B3 = Register_REG_V31_B3 as u32,
    V31_B4 = Register_REG_V31_B4 as u32,
    V31_B5 = Register_REG_V31_B5 as u32,
    V31_B6 = Register_REG_V31_B6 as u32,
    V31_B7 = Register_REG_V31_B7 as u32,
    V31_B8 = Register_REG_V31_B8 as u32,
    V31_B9 = Register_REG_V31_B9 as u32,
    V31_B10 = Register_REG_V31_B10 as u32,
    V31_B11 = Register_REG_V31_B11 as u32,
    V31_B12 = Register_REG_V31_B12 as u32,
    V31_B13 = Register_REG_V31_B13 as u32,
    V31_B14 = Register_REG_V31_B14 as u32,
    V31_B15 = Register_REG_V31_B15 as u32,
    V0_H0 = Register_REG_V0_H0 as u32,
    V0_H1 = Register_REG_V0_H1 as u32,
    V0_H2 = Register_REG_V0_H2 as u32,
    V0_H3 = Register_REG_V0_H3 as u32,
    V0_H4 = Register_REG_V0_H4 as u32,
    V0_H5 = Register_REG_V0_H5 as u32,
    V0_H6 = Register_REG_V0_H6 as u32,
    V0_H7 = Register_REG_V0_H7 as u32,
    V1_H0 = Register_REG_V1_H0 as u32,
    V1_H1 = Register_REG_V1_H1 as u32,
    V1_H2 = Register_REG_V1_H2 as u32,
    V1_H3 = Register_REG_V1_H3 as u32,
    V1_H4 = Register_REG_V1_H4 as u32,
    V1_H5 = Register_REG_V1_H5 as u32,
    V1_H6 = Register_REG_V1_H6 as u32,
    V1_H7 = Register_REG_V1_H7 as u32,
    V2_H0 = Register_REG_V2_H0 as u32,
    V2_H1 = Register_REG_V2_H1 as u32,
    V2_H2 = Register_REG_V2_H2 as u32,
    V2_H3 = Register_REG_V2_H3 as u32,
    V2_H4 = Register_REG_V2_H4 as u32,
    V2_H5 = Register_REG_V2_H5 as u32,
    V2_H6 = Register_REG_V2_H6 as u32,
    V2_H7 = Register_REG_V2_H7 as u32,
    V3_H0 = Register_REG_V3_H0 as u32,
    V3_H1 = Register_REG_V3_H1 as u32,
    V3_H2 = Register_REG_V3_H2 as u32,
    V3_H3 = Register_REG_V3_H3 as u32,
    V3_H4 = Register_REG_V3_H4 as u32,
    V3_H5 = Register_REG_V3_H5 as u32,
    V3_H6 = Register_REG_V3_H6 as u32,
    V3_H7 = Register_REG_V3_H7 as u32,
    V4_H0 = Register_REG_V4_H0 as u32,
    V4_H1 = Register_REG_V4_H1 as u32,
    V4_H2 = Register_REG_V4_H2 as u32,
    V4_H3 = Register_REG_V4_H3 as u32,
    V4_H4 = Register_REG_V4_H4 as u32,
    V4_H5 = Register_REG_V4_H5 as u32,
    V4_H6 = Register_REG_V4_H6 as u32,
    V4_H7 = Register_REG_V4_H7 as u32,
    V5_H0 = Register_REG_V5_H0 as u32,
    V5_H1 = Register_REG_V5_H1 as u32,
    V5_H2 = Register_REG_V5_H2 as u32,
    V5_H3 = Register_REG_V5_H3 as u32,
    V5_H4 = Register_REG_V5_H4 as u32,
    V5_H5 = Register_REG_V5_H5 as u32,
    V5_H6 = Register_REG_V5_H6 as u32,
    V5_H7 = Register_REG_V5_H7 as u32,
    V6_H0 = Register_REG_V6_H0 as u32,
    V6_H1 = Register_REG_V6_H1 as u32,
    V6_H2 = Register_REG_V6_H2 as u32,
    V6_H3 = Register_REG_V6_H3 as u32,
    V6_H4 = Register_REG_V6_H4 as u32,
    V6_H5 = Register_REG_V6_H5 as u32,
    V6_H6 = Register_REG_V6_H6 as u32,
    V6_H7 = Register_REG_V6_H7 as u32,
    V7_H0 = Register_REG_V7_H0 as u32,
    V7_H1 = Register_REG_V7_H1 as u32,
    V7_H2 = Register_REG_V7_H2 as u32,
    V7_H3 = Register_REG_V7_H3 as u32,
    V7_H4 = Register_REG_V7_H4 as u32,
    V7_H5 = Register_REG_V7_H5 as u32,
    V7_H6 = Register_REG_V7_H6 as u32,
    V7_H7 = Register_REG_V7_H7 as u32,
    V8_H0 = Register_REG_V8_H0 as u32,
    V8_H1 = Register_REG_V8_H1 as u32,
    V8_H2 = Register_REG_V8_H2 as u32,
    V8_H3 = Register_REG_V8_H3 as u32,
    V8_H4 = Register_REG_V8_H4 as u32,
    V8_H5 = Register_REG_V8_H5 as u32,
    V8_H6 = Register_REG_V8_H6 as u32,
    V8_H7 = Register_REG_V8_H7 as u32,
    V9_H0 = Register_REG_V9_H0 as u32,
    V9_H1 = Register_REG_V9_H1 as u32,
    V9_H2 = Register_REG_V9_H2 as u32,
    V9_H3 = Register_REG_V9_H3 as u32,
    V9_H4 = Register_REG_V9_H4 as u32,
    V9_H5 = Register_REG_V9_H5 as u32,
    V9_H6 = Register_REG_V9_H6 as u32,
    V9_H7 = Register_REG_V9_H7 as u32,
    V10_H0 = Register_REG_V10_H0 as u32,
    V10_H1 = Register_REG_V10_H1 as u32,
    V10_H2 = Register_REG_V10_H2 as u32,
    V10_H3 = Register_REG_V10_H3 as u32,
    V10_H4 = Register_REG_V10_H4 as u32,
    V10_H5 = Register_REG_V10_H5 as u32,
    V10_H6 = Register_REG_V10_H6 as u32,
    V10_H7 = Register_REG_V10_H7 as u32,
    V11_H0 = Register_REG_V11_H0 as u32,
    V11_H1 = Register_REG_V11_H1 as u32,
    V11_H2 = Register_REG_V11_H2 as u32,
    V11_H3 = Register_REG_V11_H3 as u32,
    V11_H4 = Register_REG_V11_H4 as u32,
    V11_H5 = Register_REG_V11_H5 as u32,
    V11_H6 = Register_REG_V11_H6 as u32,
    V11_H7 = Register_REG_V11_H7 as u32,
    V12_H0 = Register_REG_V12_H0 as u32,
    V12_H1 = Register_REG_V12_H1 as u32,
    V12_H2 = Register_REG_V12_H2 as u32,
    V12_H3 = Register_REG_V12_H3 as u32,
    V12_H4 = Register_REG_V12_H4 as u32,
    V12_H5 = Register_REG_V12_H5 as u32,
    V12_H6 = Register_REG_V12_H6 as u32,
    V12_H7 = Register_REG_V12_H7 as u32,
    V13_H0 = Register_REG_V13_H0 as u32,
    V13_H1 = Register_REG_V13_H1 as u32,
    V13_H2 = Register_REG_V13_H2 as u32,
    V13_H3 = Register_REG_V13_H3 as u32,
    V13_H4 = Register_REG_V13_H4 as u32,
    V13_H5 = Register_REG_V13_H5 as u32,
    V13_H6 = Register_REG_V13_H6 as u32,
    V13_H7 = Register_REG_V13_H7 as u32,
    V14_H0 = Register_REG_V14_H0 as u32,
    V14_H1 = Register_REG_V14_H1 as u32,
    V14_H2 = Register_REG_V14_H2 as u32,
    V14_H3 = Register_REG_V14_H3 as u32,
    V14_H4 = Register_REG_V14_H4 as u32,
    V14_H5 = Register_REG_V14_H5 as u32,
    V14_H6 = Register_REG_V14_H6 as u32,
    V14_H7 = Register_REG_V14_H7 as u32,
    V15_H0 = Register_REG_V15_H0 as u32,
    V15_H1 = Register_REG_V15_H1 as u32,
    V15_H2 = Register_REG_V15_H2 as u32,
    V15_H3 = Register_REG_V15_H3 as u32,
    V15_H4 = Register_REG_V15_H4 as u32,
    V15_H5 = Register_REG_V15_H5 as u32,
    V15_H6 = Register_REG_V15_H6 as u32,
    V15_H7 = Register_REG_V15_H7 as u32,
    V16_H0 = Register_REG_V16_H0 as u32,
    V16_H1 = Register_REG_V16_H1 as u32,
    V16_H2 = Register_REG_V16_H2 as u32,
    V16_H3 = Register_REG_V16_H3 as u32,
    V16_H4 = Register_REG_V16_H4 as u32,
    V16_H5 = Register_REG_V16_H5 as u32,
    V16_H6 = Register_REG_V16_H6 as u32,
    V16_H7 = Register_REG_V16_H7 as u32,
    V17_H0 = Register_REG_V17_H0 as u32,
    V17_H1 = Register_REG_V17_H1 as u32,
    V17_H2 = Register_REG_V17_H2 as u32,
    V17_H3 = Register_REG_V17_H3 as u32,
    V17_H4 = Register_REG_V17_H4 as u32,
    V17_H5 = Register_REG_V17_H5 as u32,
    V17_H6 = Register_REG_V17_H6 as u32,
    V17_H7 = Register_REG_V17_H7 as u32,
    V18_H0 = Register_REG_V18_H0 as u32,
    V18_H1 = Register_REG_V18_H1 as u32,
    V18_H2 = Register_REG_V18_H2 as u32,
    V18_H3 = Register_REG_V18_H3 as u32,
    V18_H4 = Register_REG_V18_H4 as u32,
    V18_H5 = Register_REG_V18_H5 as u32,
    V18_H6 = Register_REG_V18_H6 as u32,
    V18_H7 = Register_REG_V18_H7 as u32,
    V19_H0 = Register_REG_V19_H0 as u32,
    V19_H1 = Register_REG_V19_H1 as u32,
    V19_H2 = Register_REG_V19_H2 as u32,
    V19_H3 = Register_REG_V19_H3 as u32,
    V19_H4 = Register_REG_V19_H4 as u32,
    V19_H5 = Register_REG_V19_H5 as u32,
    V19_H6 = Register_REG_V19_H6 as u32,
    V19_H7 = Register_REG_V19_H7 as u32,
    V20_H0 = Register_REG_V20_H0 as u32,
    V20_H1 = Register_REG_V20_H1 as u32,
    V20_H2 = Register_REG_V20_H2 as u32,
    V20_H3 = Register_REG_V20_H3 as u32,
    V20_H4 = Register_REG_V20_H4 as u32,
    V20_H5 = Register_REG_V20_H5 as u32,
    V20_H6 = Register_REG_V20_H6 as u32,
    V20_H7 = Register_REG_V20_H7 as u32,
    V21_H0 = Register_REG_V21_H0 as u32,
    V21_H1 = Register_REG_V21_H1 as u32,
    V21_H2 = Register_REG_V21_H2 as u32,
    V21_H3 = Register_REG_V21_H3 as u32,
    V21_H4 = Register_REG_V21_H4 as u32,
    V21_H5 = Register_REG_V21_H5 as u32,
    V21_H6 = Register_REG_V21_H6 as u32,
    V21_H7 = Register_REG_V21_H7 as u32,
    V22_H0 = Register_REG_V22_H0 as u32,
    V22_H1 = Register_REG_V22_H1 as u32,
    V22_H2 = Register_REG_V22_H2 as u32,
    V22_H3 = Register_REG_V22_H3 as u32,
    V22_H4 = Register_REG_V22_H4 as u32,
    V22_H5 = Register_REG_V22_H5 as u32,
    V22_H6 = Register_REG_V22_H6 as u32,
    V22_H7 = Register_REG_V22_H7 as u32,
    V23_H0 = Register_REG_V23_H0 as u32,
    V23_H1 = Register_REG_V23_H1 as u32,
    V23_H2 = Register_REG_V23_H2 as u32,
    V23_H3 = Register_REG_V23_H3 as u32,
    V23_H4 = Register_REG_V23_H4 as u32,
    V23_H5 = Register_REG_V23_H5 as u32,
    V23_H6 = Register_REG_V23_H6 as u32,
    V23_H7 = Register_REG_V23_H7 as u32,
    V24_H0 = Register_REG_V24_H0 as u32,
    V24_H1 = Register_REG_V24_H1 as u32,
    V24_H2 = Register_REG_V24_H2 as u32,
    V24_H3 = Register_REG_V24_H3 as u32,
    V24_H4 = Register_REG_V24_H4 as u32,
    V24_H5 = Register_REG_V24_H5 as u32,
    V24_H6 = Register_REG_V24_H6 as u32,
    V24_H7 = Register_REG_V24_H7 as u32,
    V25_H0 = Register_REG_V25_H0 as u32,
    V25_H1 = Register_REG_V25_H1 as u32,
    V25_H2 = Register_REG_V25_H2 as u32,
    V25_H3 = Register_REG_V25_H3 as u32,
    V25_H4 = Register_REG_V25_H4 as u32,
    V25_H5 = Register_REG_V25_H5 as u32,
    V25_H6 = Register_REG_V25_H6 as u32,
    V25_H7 = Register_REG_V25_H7 as u32,
    V26_H0 = Register_REG_V26_H0 as u32,
    V26_H1 = Register_REG_V26_H1 as u32,
    V26_H2 = Register_REG_V26_H2 as u32,
    V26_H3 = Register_REG_V26_H3 as u32,
    V26_H4 = Register_REG_V26_H4 as u32,
    V26_H5 = Register_REG_V26_H5 as u32,
    V26_H6 = Register_REG_V26_H6 as u32,
    V26_H7 = Register_REG_V26_H7 as u32,
    V27_H0 = Register_REG_V27_H0 as u32,
    V27_H1 = Register_REG_V27_H1 as u32,
    V27_H2 = Register_REG_V27_H2 as u32,
    V27_H3 = Register_REG_V27_H3 as u32,
    V27_H4 = Register_REG_V27_H4 as u32,
    V27_H5 = Register_REG_V27_H5 as u32,
    V27_H6 = Register_REG_V27_H6 as u32,
    V27_H7 = Register_REG_V27_H7 as u32,
    V28_H0 = Register_REG_V28_H0 as u32,
    V28_H1 = Register_REG_V28_H1 as u32,
    V28_H2 = Register_REG_V28_H2 as u32,
    V28_H3 = Register_REG_V28_H3 as u32,
    V28_H4 = Register_REG_V28_H4 as u32,
    V28_H5 = Register_REG_V28_H5 as u32,
    V28_H6 = Register_REG_V28_H6 as u32,
    V28_H7 = Register_REG_V28_H7 as u32,
    V29_H0 = Register_REG_V29_H0 as u32,
    V29_H1 = Register_REG_V29_H1 as u32,
    V29_H2 = Register_REG_V29_H2 as u32,
    V29_H3 = Register_REG_V29_H3 as u32,
    V29_H4 = Register_REG_V29_H4 as u32,
    V29_H5 = Register_REG_V29_H5 as u32,
    V29_H6 = Register_REG_V29_H6 as u32,
    V29_H7 = Register_REG_V29_H7 as u32,
    V30_H0 = Register_REG_V30_H0 as u32,
    V30_H1 = Register_REG_V30_H1 as u32,
    V30_H2 = Register_REG_V30_H2 as u32,
    V30_H3 = Register_REG_V30_H3 as u32,
    V30_H4 = Register_REG_V30_H4 as u32,
    V30_H5 = Register_REG_V30_H5 as u32,
    V30_H6 = Register_REG_V30_H6 as u32,
    V30_H7 = Register_REG_V30_H7 as u32,
    V31_H0 = Register_REG_V31_H0 as u32,
    V31_H1 = Register_REG_V31_H1 as u32,
    V31_H2 = Register_REG_V31_H2 as u32,
    V31_H3 = Register_REG_V31_H3 as u32,
    V31_H4 = Register_REG_V31_H4 as u32,
    V31_H5 = Register_REG_V31_H5 as u32,
    V31_H6 = Register_REG_V31_H6 as u32,
    V31_H7 = Register_REG_V31_H7 as u32,
    V0_S0 = Register_REG_V0_S0 as u32,
    V0_S1 = Register_REG_V0_S1 as u32,
    V0_S2 = Register_REG_V0_S2 as u32,
    V0_S3 = Register_REG_V0_S3 as u32,
    V1_S0 = Register_REG_V1_S0 as u32,
    V1_S1 = Register_REG_V1_S1 as u32,
    V1_S2 = Register_REG_V1_S2 as u32,
    V1_S3 = Register_REG_V1_S3 as u32,
    V2_S0 = Register_REG_V2_S0 as u32,
    V2_S1 = Register_REG_V2_S1 as u32,
    V2_S2 = Register_REG_V2_S2 as u32,
    V2_S3 = Register_REG_V2_S3 as u32,
    V3_S0 = Register_REG_V3_S0 as u32,
    V3_S1 = Register_REG_V3_S1 as u32,
    V3_S2 = Register_REG_V3_S2 as u32,
    V3_S3 = Register_REG_V3_S3 as u32,
    V4_S0 = Register_REG_V4_S0 as u32,
    V4_S1 = Register_REG_V4_S1 as u32,
    V4_S2 = Register_REG_V4_S2 as u32,
    V4_S3 = Register_REG_V4_S3 as u32,
    V5_S0 = Register_REG_V5_S0 as u32,
    V5_S1 = Register_REG_V5_S1 as u32,
    V5_S2 = Register_REG_V5_S2 as u32,
    V5_S3 = Register_REG_V5_S3 as u32,
    V6_S0 = Register_REG_V6_S0 as u32,
    V6_S1 = Register_REG_V6_S1 as u32,
    V6_S2 = Register_REG_V6_S2 as u32,
    V6_S3 = Register_REG_V6_S3 as u32,
    V7_S0 = Register_REG_V7_S0 as u32,
    V7_S1 = Register_REG_V7_S1 as u32,
    V7_S2 = Register_REG_V7_S2 as u32,
    V7_S3 = Register_REG_V7_S3 as u32,
    V8_S0 = Register_REG_V8_S0 as u32,
    V8_S1 = Register_REG_V8_S1 as u32,
    V8_S2 = Register_REG_V8_S2 as u32,
    V8_S3 = Register_REG_V8_S3 as u32,
    V9_S0 = Register_REG_V9_S0 as u32,
    V9_S1 = Register_REG_V9_S1 as u32,
    V9_S2 = Register_REG_V9_S2 as u32,
    V9_S3 = Register_REG_V9_S3 as u32,
    V10_S0 = Register_REG_V10_S0 as u32,
    V10_S1 = Register_REG_V10_S1 as u32,
    V10_S2 = Register_REG_V10_S2 as u32,
    V10_S3 = Register_REG_V10_S3 as u32,
    V11_S0 = Register_REG_V11_S0 as u32,
    V11_S1 = Register_REG_V11_S1 as u32,
    V11_S2 = Register_REG_V11_S2 as u32,
    V11_S3 = Register_REG_V11_S3 as u32,
    V12_S0 = Register_REG_V12_S0 as u32,
    V12_S1 = Register_REG_V12_S1 as u32,
    V12_S2 = Register_REG_V12_S2 as u32,
    V12_S3 = Register_REG_V12_S3 as u32,
    V13_S0 = Register_REG_V13_S0 as u32,
    V13_S1 = Register_REG_V13_S1 as u32,
    V13_S2 = Register_REG_V13_S2 as u32,
    V13_S3 = Register_REG_V13_S3 as u32,
    V14_S0 = Register_REG_V14_S0 as u32,
    V14_S1 = Register_REG_V14_S1 as u32,
    V14_S2 = Register_REG_V14_S2 as u32,
    V14_S3 = Register_REG_V14_S3 as u32,
    V15_S0 = Register_REG_V15_S0 as u32,
    V15_S1 = Register_REG_V15_S1 as u32,
    V15_S2 = Register_REG_V15_S2 as u32,
    V15_S3 = Register_REG_V15_S3 as u32,
    V16_S0 = Register_REG_V16_S0 as u32,
    V16_S1 = Register_REG_V16_S1 as u32,
    V16_S2 = Register_REG_V16_S2 as u32,
    V16_S3 = Register_REG_V16_S3 as u32,
    V17_S0 = Register_REG_V17_S0 as u32,
    V17_S1 = Register_REG_V17_S1 as u32,
    V17_S2 = Register_REG_V17_S2 as u32,
    V17_S3 = Register_REG_V17_S3 as u32,
    V18_S0 = Register_REG_V18_S0 as u32,
    V18_S1 = Register_REG_V18_S1 as u32,
    V18_S2 = Register_REG_V18_S2 as u32,
    V18_S3 = Register_REG_V18_S3 as u32,
    V19_S0 = Register_REG_V19_S0 as u32,
    V19_S1 = Register_REG_V19_S1 as u32,
    V19_S2 = Register_REG_V19_S2 as u32,
    V19_S3 = Register_REG_V19_S3 as u32,
    V20_S0 = Register_REG_V20_S0 as u32,
    V20_S1 = Register_REG_V20_S1 as u32,
    V20_S2 = Register_REG_V20_S2 as u32,
    V20_S3 = Register_REG_V20_S3 as u32,
    V21_S0 = Register_REG_V21_S0 as u32,
    V21_S1 = Register_REG_V21_S1 as u32,
    V21_S2 = Register_REG_V21_S2 as u32,
    V21_S3 = Register_REG_V21_S3 as u32,
    V22_S0 = Register_REG_V22_S0 as u32,
    V22_S1 = Register_REG_V22_S1 as u32,
    V22_S2 = Register_REG_V22_S2 as u32,
    V22_S3 = Register_REG_V22_S3 as u32,
    V23_S0 = Register_REG_V23_S0 as u32,
    V23_S1 = Register_REG_V23_S1 as u32,
    V23_S2 = Register_REG_V23_S2 as u32,
    V23_S3 = Register_REG_V23_S3 as u32,
    V24_S0 = Register_REG_V24_S0 as u32,
    V24_S1 = Register_REG_V24_S1 as u32,
    V24_S2 = Register_REG_V24_S2 as u32,
    V24_S3 = Register_REG_V24_S3 as u32,
    V25_S0 = Register_REG_V25_S0 as u32,
    V25_S1 = Register_REG_V25_S1 as u32,
    V25_S2 = Register_REG_V25_S2 as u32,
    V25_S3 = Register_REG_V25_S3 as u32,
    V26_S0 = Register_REG_V26_S0 as u32,
    V26_S1 = Register_REG_V26_S1 as u32,
    V26_S2 = Register_REG_V26_S2 as u32,
    V26_S3 = Register_REG_V26_S3 as u32,
    V27_S0 = Register_REG_V27_S0 as u32,
    V27_S1 = Register_REG_V27_S1 as u32,
    V27_S2 = Register_REG_V27_S2 as u32,
    V27_S3 = Register_REG_V27_S3 as u32,
    V28_S0 = Register_REG_V28_S0 as u32,
    V28_S1 = Register_REG_V28_S1 as u32,
    V28_S2 = Register_REG_V28_S2 as u32,
    V28_S3 = Register_REG_V28_S3 as u32,
    V29_S0 = Register_REG_V29_S0 as u32,
    V29_S1 = Register_REG_V29_S1 as u32,
    V29_S2 = Register_REG_V29_S2 as u32,
    V29_S3 = Register_REG_V29_S3 as u32,
    V30_S0 = Register_REG_V30_S0 as u32,
    V30_S1 = Register_REG_V30_S1 as u32,
    V30_S2 = Register_REG_V30_S2 as u32,
    V30_S3 = Register_REG_V30_S3 as u32,
    V31_S0 = Register_REG_V31_S0 as u32,
    V31_S1 = Register_REG_V31_S1 as u32,
    V31_S2 = Register_REG_V31_S2 as u32,
    V31_S3 = Register_REG_V31_S3 as u32,
    V0_D0 = Register_REG_V0_D0 as u32,
    V0_D1 = Register_REG_V0_D1 as u32,
    V1_D0 = Register_REG_V1_D0 as u32,
    V1_D1 = Register_REG_V1_D1 as u32,
    V2_D0 = Register_REG_V2_D0 as u32,
    V2_D1 = Register_REG_V2_D1 as u32,
    V3_D0 = Register_REG_V3_D0 as u32,
    V3_D1 = Register_REG_V3_D1 as u32,
    V4_D0 = Register_REG_V4_D0 as u32,
    V4_D1 = Register_REG_V4_D1 as u32,
    V5_D0 = Register_REG_V5_D0 as u32,
    V5_D1 = Register_REG_V5_D1 as u32,
    V6_D0 = Register_REG_V6_D0 as u32,
    V6_D1 = Register_REG_V6_D1 as u32,
    V7_D0 = Register_REG_V7_D0 as u32,
    V7_D1 = Register_REG_V7_D1 as u32,
    V8_D0 = Register_REG_V8_D0 as u32,
    V8_D1 = Register_REG_V8_D1 as u32,
    V9_D0 = Register_REG_V9_D0 as u32,
    V9_D1 = Register_REG_V9_D1 as u32,
    V10_D0 = Register_REG_V10_D0 as u32,
    V10_D1 = Register_REG_V10_D1 as u32,
    V11_D0 = Register_REG_V11_D0 as u32,
    V11_D1 = Register_REG_V11_D1 as u32,
    V12_D0 = Register_REG_V12_D0 as u32,
    V12_D1 = Register_REG_V12_D1 as u32,
    V13_D0 = Register_REG_V13_D0 as u32,
    V13_D1 = Register_REG_V13_D1 as u32,
    V14_D0 = Register_REG_V14_D0 as u32,
    V14_D1 = Register_REG_V14_D1 as u32,
    V15_D0 = Register_REG_V15_D0 as u32,
    V15_D1 = Register_REG_V15_D1 as u32,
    V16_D0 = Register_REG_V16_D0 as u32,
    V16_D1 = Register_REG_V16_D1 as u32,
    V17_D0 = Register_REG_V17_D0 as u32,
    V17_D1 = Register_REG_V17_D1 as u32,
    V18_D0 = Register_REG_V18_D0 as u32,
    V18_D1 = Register_REG_V18_D1 as u32,
    V19_D0 = Register_REG_V19_D0 as u32,
    V19_D1 = Register_REG_V19_D1 as u32,
    V20_D0 = Register_REG_V20_D0 as u32,
    V20_D1 = Register_REG_V20_D1 as u32,
    V21_D0 = Register_REG_V21_D0 as u32,
    V21_D1 = Register_REG_V21_D1 as u32,
    V22_D0 = Register_REG_V22_D0 as u32,
    V22_D1 = Register_REG_V22_D1 as u32,
    V23_D0 = Register_REG_V23_D0 as u32,
    V23_D1 = Register_REG_V23_D1 as u32,
    V24_D0 = Register_REG_V24_D0 as u32,
    V24_D1 = Register_REG_V24_D1 as u32,
    V25_D0 = Register_REG_V25_D0 as u32,
    V25_D1 = Register_REG_V25_D1 as u32,
    V26_D0 = Register_REG_V26_D0 as u32,
    V26_D1 = Register_REG_V26_D1 as u32,
    V27_D0 = Register_REG_V27_D0 as u32,
    V27_D1 = Register_REG_V27_D1 as u32,
    V28_D0 = Register_REG_V28_D0 as u32,
    V28_D1 = Register_REG_V28_D1 as u32,
    V29_D0 = Register_REG_V29_D0 as u32,
    V29_D1 = Register_REG_V29_D1 as u32,
    V30_D0 = Register_REG_V30_D0 as u32,
    V30_D1 = Register_REG_V30_D1 as u32,
    V31_D0 = Register_REG_V31_D0 as u32,
    V31_D1 = Register_REG_V31_D1 as u32,
    Z0 = Register_REG_Z0 as u32,
    Z1 = Register_REG_Z1 as u32,
    Z2 = Register_REG_Z2 as u32,
    Z3 = Register_REG_Z3 as u32,
    Z4 = Register_REG_Z4 as u32,
    Z5 = Register_REG_Z5 as u32,
    Z6 = Register_REG_Z6 as u32,
    Z7 = Register_REG_Z7 as u32,
    Z8 = Register_REG_Z8 as u32,
    Z9 = Register_REG_Z9 as u32,
    Z10 = Register_REG_Z10 as u32,
    Z11 = Register_REG_Z11 as u32,
    Z12 = Register_REG_Z12 as u32,
    Z13 = Register_REG_Z13 as u32,
    Z14 = Register_REG_Z14 as u32,
    Z15 = Register_REG_Z15 as u32,
    Z16 = Register_REG_Z16 as u32,
    Z17 = Register_REG_Z17 as u32,
    Z18 = Register_REG_Z18 as u32,
    Z19 = Register_REG_Z19 as u32,
    Z20 = Register_REG_Z20 as u32,
    Z21 = Register_REG_Z21 as u32,
    Z22 = Register_REG_Z22 as u32,
    Z23 = Register_REG_Z23 as u32,
    Z24 = Register_REG_Z24 as u32,
    Z25 = Register_REG_Z25 as u32,
    Z26 = Register_REG_Z26 as u32,
    Z27 = Register_REG_Z27 as u32,
    Z28 = Register_REG_Z28 as u32,
    Z29 = Register_REG_Z29 as u32,
    Z30 = Register_REG_Z30 as u32,
    ZZR = Register_REG_ZZR as u32,
    Z31 = Register_REG_Z31 as u32,
    P0 = Register_REG_P0 as u32,
    P1 = Register_REG_P1 as u32,
    P2 = Register_REG_P2 as u32,
    P3 = Register_REG_P3 as u32,
    P4 = Register_REG_P4 as u32,
    P5 = Register_REG_P5 as u32,
    P6 = Register_REG_P6 as u32,
    P7 = Register_REG_P7 as u32,
    P8 = Register_REG_P8 as u32,
    P9 = Register_REG_P9 as u32,
    P10 = Register_REG_P10 as u32,
    P11 = Register_REG_P11 as u32,
    P12 = Register_REG_P12 as u32,
    P13 = Register_REG_P13 as u32,
    P14 = Register_REG_P14 as u32,
    P15 = Register_REG_P15 as u32,
    P16 = Register_REG_P16 as u32,
    P17 = Register_REG_P17 as u32,
    P18 = Register_REG_P18 as u32,
    P19 = Register_REG_P19 as u32,
    P20 = Register_REG_P20 as u32,
    P21 = Register_REG_P21 as u32,
    P22 = Register_REG_P22 as u32,
    P23 = Register_REG_P23 as u32,
    P24 = Register_REG_P24 as u32,
    P25 = Register_REG_P25 as u32,
    P26 = Register_REG_P26 as u32,
    P27 = Register_REG_P27 as u32,
    P28 = Register_REG_P28 as u32,
    P29 = Register_REG_P29 as u32,
    P30 = Register_REG_P30 as u32,
    P31 = Register_REG_P31 as u32,
    PF0 = Register_REG_PF0 as u32,
    PF1 = Register_REG_PF1 as u32,
    PF2 = Register_REG_PF2 as u32,
    PF3 = Register_REG_PF3 as u32,
    PF4 = Register_REG_PF4 as u32,
    PF5 = Register_REG_PF5 as u32,
    PF6 = Register_REG_PF6 as u32,
    PF7 = Register_REG_PF7 as u32,
    PF8 = Register_REG_PF8 as u32,
    PF9 = Register_REG_PF9 as u32,
    PF10 = Register_REG_PF10 as u32,
    PF11 = Register_REG_PF11 as u32,
    PF12 = Register_REG_PF12 as u32,
    PF13 = Register_REG_PF13 as u32,
    PF14 = Register_REG_PF14 as u32,
    PF15 = Register_REG_PF15 as u32,
    PF16 = Register_REG_PF16 as u32,
    PF17 = Register_REG_PF17 as u32,
    PF18 = Register_REG_PF18 as u32,
    PF19 = Register_REG_PF19 as u32,
    PF20 = Register_REG_PF20 as u32,
    PF21 = Register_REG_PF21 as u32,
    PF22 = Register_REG_PF22 as u32,
    PF23 = Register_REG_PF23 as u32,
    PF24 = Register_REG_PF24 as u32,
    PF25 = Register_REG_PF25 as u32,
    PF26 = Register_REG_PF26 as u32,
    PF27 = Register_REG_PF27 as u32,
    PF28 = Register_REG_PF28 as u32,
    PF29 = Register_REG_PF29 as u32,
    PF30 = Register_REG_PF30 as u32,
    PF31 = Register_REG_PF31 as u32,
}

const_assert_eq!(Register_REG_END, Register_REG_PF31 + 1);

impl Reg {
    /// Returns the register name
    ///
    /// # Examples
    /// ```
    /// use bad64::Reg;
    ///
    /// assert_eq!(Reg::X0.name(), "x0");
    /// ```
    pub fn name(&self) -> &'static str {
        #[cfg(target_os = "windows")]
        {
            unsafe { CStr::from_ptr(bad64_sys::get_register_name(self.to_i32().unwrap())) }
                .to_str()
                .unwrap()
        }
        #[cfg(not(target_os = "windows"))]
        {
            unsafe { CStr::from_ptr(bad64_sys::get_register_name(self.to_u32().unwrap())) }
                .to_str()
                .unwrap()
        }
    }

    /// Get register size
    ///
    /// # Examples
    /// ```
    /// use bad64::Reg;
    ///
    /// assert_eq!(Reg::X0.size(), 8);
    /// assert_eq!(Reg::V0.size(), 16);
    /// ```
    ///
    /// ```
    /// use bad64::{decode, Operand, Reg};
    ///
    /// // add x0, x1, #0x41  - "\x20\x04\x01\x91"
    /// let decoded = decode(0x91010420, 0x1000).unwrap();
    ///
    /// let op = decoded.operand(0).unwrap();
    ///
    /// assert_eq!(op, Operand::Reg { reg: Reg::X0, shift: None, arrspec: None });
    ///
    /// match op {
    ///     Operand::Reg { reg: r, .. } => assert_eq!(r.size(), 8),
    ///     _ => assert!(false),
    /// };
    /// ```
    pub fn size(&self) -> usize {
        #[cfg(target_os = "windows")]
        {
            unsafe { bad64_sys::get_register_size(self.to_i32().unwrap()) as usize }
        }
        #[cfg(not(target_os = "windows"))]
        {
            unsafe { bad64_sys::get_register_size(self.to_u32().unwrap()) as usize }
        }
    }
}