1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
|
/****************************************************************
DEFINES
****************************************************************/
.equ MAP_LAST, 12
rstatus .req R4 @ format : 0xff800000
reg_d_bank .req R4 @ format : 0x000000ll
reg_a .req R5 @ format : 0xhhll0000 or 0xll000000
reg_d .req R6 @ format : 0xhhll0000
reg_p_bank .req R6 @ format : 0x000000ll
reg_x .req R7 @ format : 0xhhll0000 or 0xll000000
reg_s .req R8 @ format : 0x0000hhll
reg_y .req R9 @ format : 0xhhll0000 or 0xll000000
rpc .req R10 @ 32bits address
reg_cycles .req R11 @ 32bits counter
regpcbase .req R12 @ 32bits address
rscratch .req R0 @ format : 0xhhll0000 if data and calculation or return of S9XREADBYTE or WORD
regopcode .req R0 @ format : 0x000000ll
rscratch2 .req R1 @ format : 0xhhll for calculation and value
rscratch3 .req R2 @
rscratch4 .req R3 @ ??????
@ used for SBC opcode
rscratch9 .req R10 @ ??????
reg_cpu_var .req R14
@ not used
@ R13 @ Pointer 32 bit on a struct.
@ R15 = pc (sic!)
/*
.equ Carry 1
.equ Zero 2
.equ IRQ 4
.equ Decimal 8
.equ IndexFlag 16
.equ MemoryFlag 32
.equ Overflow 64
.equ Negative 128
.equ Emulation 256*/
.equ STATUS_SHIFTER, 24
.equ MASK_APU_EXECUTING,(1<<(STATUS_SHIFTER-4))
.equ MASK_SHUTDOWN, (1<<(STATUS_SHIFTER-3))
.equ MASK_BRANCHSKIP, (1<<(STATUS_SHIFTER-2))
.equ MASK_EMUL, (1<<(STATUS_SHIFTER-1))
.equ MASK_SHIFTER_CARRY, (STATUS_SHIFTER+1)
.equ MASK_CARRY, (1<<(STATUS_SHIFTER)) @ 0
.equ MASK_ZERO, (2<<(STATUS_SHIFTER)) @ 1
.equ MASK_IRQ, (4<<(STATUS_SHIFTER)) @ 2
.equ MASK_DECIMAL, (8<<(STATUS_SHIFTER)) @ 3
.equ MASK_INDEX, (16<<(STATUS_SHIFTER)) @ 4 @ 1
.equ MASK_MEM, (32<<(STATUS_SHIFTER)) @ 5 @ 2
.equ MASK_OVERFLOW, (64<<(STATUS_SHIFTER)) @ 6 @ 4
.equ MASK_NEG, (128<<(STATUS_SHIFTER))@ 7 @ 8
.equ ONE_CYCLE, 6
.equ SLOW_ONE_CYCLE, 8
.equ NOSA1, 1
.equ NMI_FLAG, (1 << 7)
.equ IRQ_PENDING_FLAG, (1 << 11)
.equ SCAN_KEYS_FLAG, (1 << 4)
.equ MEMMAP_BLOCK_SIZE, (0x1000)
.equ MEMMAP_SHIFT, 12
.equ MEMMAP_MASK, (0xFFF)
.equ MEMSPEED_MASK, (0xF0000)
.equ MEMSPEED_SHIFT, 16
/****************************************************************
MACROS
****************************************************************/
@ #include "os9x_65c816_mac_gen.h"
/*****************************************************************/
/* Offset in SCPUState structure */
/*****************************************************************/
.equ Flags_ofs, 0
.equ BranchSkip_ofs, 4
.equ NMIActive_ofs, 5
.equ IRQActive_ofs, 6
.equ WaitingForInterrupt_ofs, 7
.equ RPB_ofs, 8
.equ RDB_ofs, 9
.equ RP_ofs, 10
.equ RA_ofs, 12
.equ RAH_ofs, 13
.equ RD_ofs, 14
.equ RX_ofs, 16
.equ RS_ofs, 18
.equ RY_ofs, 20
@.equ RPC_ofs, 22
.equ PC_ofs, 24
.equ Cycles_ofs, 28
.equ PCBase_ofs, 32
.equ PCAtOpcodeStart_ofs, 36
.equ WaitAddress_ofs, 40
.equ WaitCounter_ofs, 44
.equ NextEvent_ofs, 48
.equ V_Counter_ofs, 52
.equ MemSpeed_ofs, 56
.equ MemSpeedx2_ofs, 60
.equ FastROMSpeed_ofs, 64
.equ AutoSaveTimer_ofs, 68
.equ NMITriggerPoint_ofs, 72
.equ NMICycleCount_ofs, 76
.equ IRQCycleCount_ofs, 80
.equ InDMA_ofs, 84
.equ WhichEvent, 85
.equ SRAMModified_ofs, 86
.equ BRKTriggered_ofs, 87
.equ asm_OPTABLE_ofs, 88
.equ TriedInterleavedMode2_ofs, 92
.equ Map_ofs, 96
.equ WriteMap_ofs, 100
.equ MemorySpeed_ofs, 104
.equ BlockIsRAM_ofs, 108
.equ SRAM, 112
.equ BWRAM, 116
.equ SRAMMask, 120
.equ APUExecuting_ofs, 124
.equ PALMOS_R9_ofs, 132
.equ PALMOS_R10_ofs, 136
@ notaz
.equ APU_Cycles, 140
.equ DSPGet_ofs, 144
.equ DSPSet_ofs, 148
.equ rstatus_ofs, 152
/*****************************************************************/
/* Offset in CMemory structure */
/*****************************************************************/
.equ _sram, 12
.equ _bwram, 16
.equ _fillram, 20
.equ _c4ram, 24
/*****************************************************************/
/* prepare */
.macro PREPARE_C_CALL
STMFD R13!,{R12,R14}
.endm
.macro PREPARE_C_CALL_R0
STMFD R13!,{R0,R12,R14}
.endm
.macro PREPARE_C_CALL_R0R1
STMFD R13!,{R0,R1,R12,R14}
.endm
.macro PREPARE_C_CALL_LIGHT
STMFD R13!,{R14}
.endm
.macro PREPARE_C_CALL_LIGHTR12
STMFD R13!,{R12,R14}
.endm
/* restore */
.macro RESTORE_C_CALL
LDMFD R13!,{R12,R14}
.endm
.macro RESTORE_C_CALL_R0
LDMFD R13!,{R0,R12,R14}
.endm
.macro RESTORE_C_CALL_R1
LDMFD R13!,{R1,R12,R14}
.endm
.macro RESTORE_C_CALL_LIGHT
LDMFD R13!,{R14}
.endm
.macro RESTORE_C_CALL_LIGHTR12
LDMFD R13!,{R12,R14}
.endm
@ --------------
.macro LOAD_REGS
@ notaz
add r0,reg_cpu_var,#8
ldmia r0,{r1,reg_a,reg_x,reg_y,rpc,reg_cycles,regpcbase}
@ rstatus (P) & reg_d_bank
mov reg_d_bank,r1,lsl #16
mov reg_d_bank,reg_d_bank,lsr #24
mov r0,r1,lsr #16
orrs rstatus, rstatus, r0,lsl #STATUS_SHIFTER @ 24
@ if Carry set, then EMULATION bit was set
orrcs rstatus,rstatus,#MASK_EMUL
@ reg_d & reg_p_bank
mov reg_d,reg_a,lsr #16
mov reg_d,reg_d,lsl #8
orr reg_d,reg_d,r1,lsl #24
mov reg_d,reg_d,ror #24 @ 0xdddd00pb
@ reg_x, reg_s
mov reg_s,reg_x,lsr #16
@ Shift X,Y & A according to the current mode (INDEX, MEMORY bits)
tst rstatus,#MASK_INDEX
movne reg_x,reg_x,lsl #24
movne reg_y,reg_y,lsl #24
moveq reg_x,reg_x,lsl #16
moveq reg_y,reg_y,lsl #16
tst rstatus,#MASK_MEM
movne reg_a,reg_a,lsl #24
moveq reg_a,reg_a,lsl #16
@-- load MemSpeed into rstatus
ldr r0, [reg_cpu_var, #MemSpeed_ofs]
bic rstatus, rstatus, #MEMSPEED_MASK
orr rstatus, rstatus, r0, lsl #MEMSPEED_SHIFT
@-- load BranchSpkip into rstatus
ldrb r0, [reg_cpu_var, #BranchSkip_ofs]
movs r0, r0
biceq rstatus, rstatus, #MASK_BRANCHSKIP
orrne rstatus, rstatus, #MASK_BRANCHSKIP
@-- load Shutdown
ldr r0, [reg_cpu_var, #WaitAddress_ofs]
movs r0, r0
biceq rstatus, rstatus, #MASK_SHUTDOWN
orrne rstatus, rstatus, #MASK_SHUTDOWN
/*
@ reg_d & reg_p_bank share the same register
LDRB reg_p_bank,[reg_cpu_var,#RPB_ofs]
LDRH rscratch,[reg_cpu_var,#RD_ofs]
ORR reg_d,reg_d,rscratch, LSL #16
@ rstatus & reg_d_bank share the same register
LDRB reg_d_bank,[reg_cpu_var,#RDB_ofs]
LDRH rscratch,[reg_cpu_var,#RP_ofs]
ORRS rstatus, rstatus, rscratch,LSL #STATUS_SHIFTER @ 24
@ if Carry set, then EMULATION bit was set
ORRCS rstatus,rstatus,#MASK_EMUL
@
LDRH reg_a,[reg_cpu_var,#RA_ofs]
LDRH reg_x,[reg_cpu_var,#RX_ofs]
LDRH reg_y,[reg_cpu_var,#RY_ofs]
LDRH reg_s,[reg_cpu_var,#RS_ofs]
@ Shift X,Y & A according to the current mode (INDEX, MEMORY bits)
TST rstatus,#MASK_INDEX
MOVNE reg_x,reg_x,LSL #24
MOVNE reg_y,reg_y,LSL #24
MOVEQ reg_x,reg_x,LSL #16
MOVEQ reg_y,reg_y,LSL #16
TST rstatus,#MASK_MEM
MOVNE reg_a,reg_a,LSL #24
MOVEQ reg_a,reg_a,LSL #16
LDR regpcbase,[reg_cpu_var,#PCBase_ofs]
LDR rpc,[reg_cpu_var,#PC_ofs]
LDR reg_cycles,[reg_cpu_var,#Cycles_ofs]
*/
.endm
.macro SAVE_REGS
@-- Save Shutdown flag
tst rstatus, #MASK_BRANCHSKIP
moveq r0, #0
movne r0, #1
str r0, [reg_cpu_var, #WaitAddress_ofs]
@-- Save BranchSkip flag
tst rstatus, #MASK_BRANCHSKIP
moveq r0, #0
movne r0, #1
strb r0, [reg_cpu_var, #BranchSkip_ofs]
@-- Save MemSpeed2x for compatibility with other cores
ldr r0, [reg_cpu_var, #MemSpeed_ofs]
mov r0, r0, lsl #1
str r0, [reg_cpu_var, #MemSpeedx2_ofs]
@ notaz
@ reg_p_bank, reg_d_bank and rstatus
mov r1, rstatus, lsr #16
orr r1, r1, reg_p_bank, lsl #24
movs r1, r1, lsr #8
orrcs r1, r1, #0x100 @ EMULATION bit
orr r1, r1, reg_d_bank, lsl #24
mov r1, r1, ror #16
@ reg_a, reg_d
tst rstatus,#MASK_MEM
ldrneh r0, [reg_cpu_var,#RA_ofs]
bicne r0, r0,#0xFF
orrne reg_a, r0, reg_a,lsr #24
moveq reg_a, reg_a, lsr #16
mov reg_d, reg_d, lsr #16
orr reg_a, reg_a, reg_d, lsl #16
@ Shift X&Y according to the current mode (INDEX, MEMORY bits)
tst rstatus,#MASK_INDEX
movne reg_x,reg_x,LSR #24
movne reg_y,reg_y,LSR #24
moveq reg_x,reg_x,LSR #16
moveq reg_y,reg_y,LSR #16
@ reg_x, reg_s
orr reg_x, reg_x, reg_s, lsl #16
@ store
add r0,reg_cpu_var,#8
stmia r0,{r1,reg_a,reg_x,reg_y,rpc,reg_cycles,regpcbase}
/*
@ reg_d & reg_p_bank is same register
STRB reg_p_bank,[reg_cpu_var,#RPB_ofs]
MOV rscratch,reg_d, LSR #16
STRH rscratch,[reg_cpu_var,#RD_ofs]
@ rstatus & reg_d_bank is same register
STRB reg_d_bank,[reg_cpu_var,#RDB_ofs]
MOVS rscratch, rstatus, LSR #STATUS_SHIFTER
ORRCS rscratch,rscratch,#0x100 @ EMULATION bit
STRH rscratch,[reg_cpu_var,#RP_ofs]
@
@ Shift X,Y & A according to the current mode (INDEX, MEMORY bits)
TST rstatus,#MASK_INDEX
MOVNE rscratch,reg_x,LSR #24
MOVNE rscratch2,reg_y,LSR #24
MOVEQ rscratch,reg_x,LSR #16
MOVEQ rscratch2,reg_y,LSR #16
STRH rscratch,[reg_cpu_var,#RX_ofs]
STRH rscratch2,[reg_cpu_var,#RY_ofs]
TST rstatus,#MASK_MEM
LDRNEH rscratch,[reg_cpu_var,#RA_ofs]
BICNE rscratch,rscratch,#0xFF
ORRNE rscratch,rscratch,reg_a,LSR #24
MOVEQ rscratch,reg_a,LSR #16
STRH rscratch,[reg_cpu_var,#RA_ofs]
STRH reg_s,[reg_cpu_var,#RS_ofs]
STR regpcbase,[reg_cpu_var,#PCBase_ofs]
STR rpc,[reg_cpu_var,#PC_ofs]
STR reg_cycles,[reg_cpu_var,#Cycles_ofs]
*/
.endm
/*****************************************************************/
.macro ADD1CYCLE
add reg_cycles,reg_cycles, #ONE_CYCLE
.endm
.macro ADD1CYCLENE
addne reg_cycles,reg_cycles, #ONE_CYCLE
.endm
.macro ADD1CYCLEEQ
addeq reg_cycles,reg_cycles, #ONE_CYCLE
.endm
.macro ADD2CYCLE
add reg_cycles,reg_cycles, #(ONE_CYCLE*2)
.endm
.macro ADD2CYCLENE
addne reg_cycles,reg_cycles, #(ONE_CYCLE*2)
.endm
.macro ADD2CYCLE2MEM
@ldr rscratch,[reg_cpu_var,#MemSpeed_ofs]
and rscratch, rstatus, #MEMSPEED_MASK
add reg_cycles,reg_cycles, #(ONE_CYCLE*2)
@add reg_cycles, reg_cycles, rscratch, LSL #1
add reg_cycles, reg_cycles, rscratch, LSR #(MEMSPEED_SHIFT - 1)
.endm
.macro ADD2CYCLE1MEM
@ldr rscratch,[reg_cpu_var,#MemSpeed_ofs]
and rscratch, rstatus, #MEMSPEED_MASK
add reg_cycles,reg_cycles, #(ONE_CYCLE*2)
@add reg_cycles, reg_cycles, rscratch
add reg_cycles, reg_cycles, rscratch, LSR #MEMSPEED_SHIFT
.endm
.macro ADD3CYCLE
add reg_cycles,reg_cycles, #(ONE_CYCLE*3)
.endm
.macro ADD1CYCLE1MEM
@ldr rscratch,[reg_cpu_var,#MemSpeed_ofs]
and rscratch, rstatus, #MEMSPEED_MASK
add reg_cycles,reg_cycles, #ONE_CYCLE
@add reg_cycles, reg_cycles, rscratch
add reg_cycles, reg_cycles, rscratch, LSR #MEMSPEED_SHIFT
.endm
.macro ADD1CYCLE2MEM
@ldr rscratch,[reg_cpu_var,#MemSpeed_ofs]
and rscratch, rstatus, #MEMSPEED_MASK
add reg_cycles,reg_cycles, #ONE_CYCLE
@add reg_cycles, reg_cycles, rscratch, LSL #1
add reg_cycles, reg_cycles, rscratch, LSR #(MEMSPEED_SHIFT - 1)
.endm
.macro ADD1MEM
@ldr rscratch,[reg_cpu_var,#MemSpeed_ofs]
and rscratch, rstatus, #MEMSPEED_MASK
@add reg_cycles, reg_cycles, rscratch
add reg_cycles, reg_cycles, rscratch, LSR #MEMSPEED_SHIFT
.endm
.macro ADD2MEM
@ldr rscratch,[reg_cpu_var,#MemSpeed_ofs]
and rscratch, rstatus, #MEMSPEED_MASK
@add reg_cycles, reg_cycles, rscratch, LSL #1
add reg_cycles, reg_cycles, rscratch, LSR #(MEMSPEED_SHIFT - 1)
.endm
.macro ADD3MEM
@ldr rscratch,[reg_cpu_var,#MemSpeed_ofs]
and rscratch, rstatus, #MEMSPEED_MASK
@add reg_cycles, reg_cycles, rscratch
add reg_cycles, reg_cycles, rscratch, LSR #MEMSPEED_SHIFT
@add reg_cycles, reg_cycles, rscratch, LSL #1
add reg_cycles, reg_cycles, rscratch, LSR #(MEMSPEED_SHIFT - 1)
.endm
/**************/
/*****************************************************************/
.macro ADD1CYCLENEX x
addne reg_cycles,reg_cycles, #(ONE_CYCLE+(NOSA1*\x))
.endm
.macro ADD1CYCLEEQX x
addeq reg_cycles,reg_cycles, #(ONE_CYCLE+(NOSA1*\x))
.endm
.macro ADD2CYCLENEX x
addne reg_cycles,reg_cycles, #(ONE_CYCLE*2+(NOSA1*\x))
.endm
.macro ADD1CYCLEX x
add reg_cycles,reg_cycles, #(ONE_CYCLE+(NOSA1*\x))
.endm
.macro ADD2CYCLEX x
add reg_cycles,reg_cycles, #(ONE_CYCLE*2+(NOSA1*\x))
.endm
.macro ADD2CYCLE2MEMX x
@ldr rscratch,[reg_cpu_var,#MemSpeed_ofs]
and rscratch, rstatus, #MEMSPEED_MASK
add reg_cycles,reg_cycles, #(ONE_CYCLE*2+(NOSA1*\x))
@add reg_cycles, reg_cycles, rscratch, LSL #1
add reg_cycles, reg_cycles, rscratch, LSR #(MEMSPEED_SHIFT - 1)
.endm
.macro ADD2CYCLE1MEMX x
@ldr rscratch,[reg_cpu_var,#MemSpeed_ofs]
and rscratch, rstatus, #MEMSPEED_MASK
add reg_cycles,reg_cycles, #(ONE_CYCLE*2+(NOSA1*\x))
@add reg_cycles, reg_cycles, rscratch
add reg_cycles, reg_cycles, rscratch, LSR #MEMSPEED_SHIFT
.endm
.macro ADD3CYCLEX x
add reg_cycles,reg_cycles, #(ONE_CYCLE*3+(NOSA1*\x))
.endm
.macro ADD1CYCLE1MEMX x
@ldr rscratch,[reg_cpu_var,#MemSpeed_ofs]
and rscratch, rstatus, #MEMSPEED_MASK
add reg_cycles,reg_cycles, #(ONE_CYCLE+(NOSA1*\x))
@add reg_cycles, reg_cycles, rscratch
add reg_cycles, reg_cycles, rscratch, LSR #MEMSPEED_SHIFT
.endm
.macro ADD1CYCLE2MEMX x
@ldr rscratch,[reg_cpu_var,#MemSpeed_ofs]
and rscratch, rstatus, #MEMSPEED_MASK
add reg_cycles,reg_cycles, #(ONE_CYCLE+(NOSA1*\x))
@add reg_cycles, reg_cycles, rscratch, LSL #1
add reg_cycles, reg_cycles, rscratch, LSR #(MEMSPEED_SHIFT - 1)
.endm
/**************/
.macro ClearDecimal
BIC rstatus,rstatus,#MASK_DECIMAL
.endm
.macro SetDecimal
ORR rstatus,rstatus,#MASK_DECIMAL
.endm
.macro SetIRQ
ORR rstatus,rstatus,#MASK_IRQ
.endm
.macro ClearIRQ
BIC rstatus,rstatus,#MASK_IRQ
.endm
.macro CPUShutdown
@ if (Settings.Shutdown && CPU.PC == CPU.WaitAddress)
tst rstatus, #MASK_SHUTDOWN
beq 5431f
LDR rscratch,[reg_cpu_var,#WaitAddress_ofs]
CMP rpc,rscratch
BNE 5431f
@ if (CPU.WaitCounter == 0 && !(CPU.Flags & (IRQ_PENDING_FLAG | NMI_FLAG)))
LDR rscratch,[reg_cpu_var,#Flags_ofs]
LDR rscratch2,[reg_cpu_var,#WaitCounter_ofs]
TST rscratch,#(IRQ_PENDING_FLAG|NMI_FLAG)
@BNE 5432f
@MOVS rscratch2,rscratch2
MOVEQS rscratch2,rscratch2
BNE 5432f
@ CPU.WaitAddress = NULL;
@MOV rscratch,#0
@STR rscratch,[reg_cpu_var,#WaitAddress_ofs]
bic rstatus, rstatus, #MASK_SHUTDOWN
@ if (Settings.SA1)
@ S9xSA1ExecuteDuringSleep (); : TODO
@ CPU.Cycles = CPU.NextEvent;
LDR reg_cycles,[reg_cpu_var,#NextEvent_ofs]
@LDR r0,[reg_cpu_var,#APUExecuting_ofs]
@MOVS r0,r0
@BEQ 5431f
@ if (IAPU.APUExecuting)
/* {
ICPU.CPUExecuting = FALSE;
do
{
APU_EXECUTE1();
} while (APU.Cycles < CPU.NextEvent);
ICPU.CPUExecuting = TRUE;
}
*/
asmAPU_EXECUTE2
B 5431f
@.pool
5432:
/* else
if (CPU.WaitCounter >= 2)
CPU.WaitCounter = 1;
else
CPU.WaitCounter--;
*/
CMP rscratch2,#1
MOVHI rscratch2,#1
@SUBLS rscratch2,rscratch2,#1
MOVLS rscratch2,#0
STR rscratch2,[reg_cpu_var,#WaitCounter_ofs]
5431:
.endm
.macro BranchCheck0
/*in rsctach : OpAddress
/*destroy rscratch2*/
tst rstatus, #MASK_BRANCHSKIP
@beq 1110f
bicne rstatus, rstatus, #MASK_BRANCHSKIP
subne rscratch2, rpc, regpcbase
cmpne rscratch2, rscratch
bhi 1111f
@LDRB rscratch2,[reg_cpu_var,#BranchSkip_ofs]
@MOVS rscratch2,rscratch2
@BEQ 1110f
@MOV rscratch2,#0
@STRB rscratch2,[reg_cpu_var,#BranchSkip_ofs]
@SUB rscratch2,rpc,regpcbase
@ if( CPU.PC - CPU.PCBase > OpAddress) return;
@CMP rscratch2,rscratch
@BHI 1111f
1110:
.endm
.macro BranchCheck1
/*in rsctach : OpAddress
/*destroy rscratch2*/
tst rstatus, #MASK_BRANCHSKIP
@beq 1110f
bicne rstatus, rstatus, #MASK_BRANCHSKIP
subne rscratch2, rpc, regpcbase
cmpne rscratch2, rscratch
bhi 1111f
@LDRB rscratch2,[reg_cpu_var,#BranchSkip_ofs]
@MOVS rscratch2,rscratch2
@BEQ 1110f
@MOV rscratch2,#0
@STRB rscratch2,[reg_cpu_var,#BranchSkip_ofs]
@SUB rscratch2,rpc,regpcbase
@ if( CPU.PC - CPU.PCBase > OpAddress) return;
@CMP rscratch2,rscratch
@BHI 1111f
1110:
.endm
.macro BranchCheck2
/*in rsctach : OpAddress
/*destroy rscratch2*/
tst rstatus, #MASK_BRANCHSKIP
@beq 1110f
bicne rstatus, rstatus, #MASK_BRANCHSKIP
subne rscratch2, rpc, regpcbase
cmpne rscratch2, rscratch
bhi 1111f
@LDRB rscratch2,[reg_cpu_var,#BranchSkip_ofs]
@MOVS rscratch2,rscratch2
@BEQ 1110f
@MOV rscratch2,#0
@STRB rscratch2,[reg_cpu_var,#BranchSkip_ofs]
@SUB rscratch2,rpc,regpcbase
@ if( CPU.PC - CPU.PCBase > OpAddress) return;
@CMP rscratch2,rscratch
@BHI 1111f
1110:
.endm
.macro S9xSetPCBase
@ in : rscratch (0x00hhmmll)
@PREPARE_C_CALL
@BL asm_S9xSetPCBase
@RESTORE_C_CALL
@LDR rpc,[reg_cpu_var,#PC_ofs]
@LDR regpcbase,[reg_cpu_var,#PCBase_ofs]
mov r3, pc @ r3 = return address
b asmS9xSetPCBase
@return address
.endm
.macro S9xFixCycles
TST rstatus,#MASK_EMUL
LDRNE rscratch, = jumptable1 @ Mode 0 : M=1,X=1
BNE 991111f
@ EMULATION=0
TST rstatus,#MASK_MEM
BEQ 991112f
@ MEMORY=1
TST rstatus,#MASK_INDEX
@ INDEX=1 @ Mode 0 : M=1,X=1
LDRNE rscratch, = jumptable1
@ INDEX=0 @ Mode 1 : M=1,X=0
LDREQ rscratch, = jumptable2
B 991111f
991112: @ MEMORY=0
TST rstatus,#MASK_INDEX
@ INDEX=1 @ Mode 3 : M=0,X=1
LDRNE rscratch, = jumptable4
@ INDEX=0 @ Mode 2 : M=0,X=0
LDREQ rscratch, = jumptable3
991111:
STR rscratch,[reg_cpu_var,#asm_OPTABLE_ofs]
.endm
/*
.macro S9xOpcode_NMI
SAVE_REGS
PREPARE_C_CALL_LIGHT
BL asm_S9xOpcode_NMI
RESTORE_C_CALL_LIGHT
LOAD_REGS
.endm
.macro S9xOpcode_IRQ
SAVE_REGS
PREPARE_C_CALL_LIGHT
BL asm_S9xOpcode_IRQ
RESTORE_C_CALL_LIGHT
LOAD_REGS
.endm
*/
@-->
.macro S9xDoHBlankProcessing
SAVE_REGS
PREPARE_C_CALL_LIGHT
@ BL asm_S9xDoHBlankProcessing
BL S9xDoHBlankProcessing @ let's go straight to number one
RESTORE_C_CALL_LIGHT
LOAD_REGS
.endm
/********************************/
.macro EXEC_OP
LDR R1,[reg_cpu_var,#asm_OPTABLE_ofs]
STR rpc,[reg_cpu_var,#PCAtOpcodeStart_ofs]
@ADD1MEM
@ldr r3, [reg_cpu_var, #MemSpeed_ofs]
ldrb r0, [rpc], #1
@add reg_cycles, reg_cycles, r3
and r3, rstatus, #MEMSPEED_MASK
add reg_cycles, reg_cycles, r3, lsr #MEMSPEED_SHIFT
@LDRB R0, [rpc], #1
LDR PC, [R1,R0, LSL #2]
.endm
.macro NEXTOPCODE
LDR rscratch,[reg_cpu_var,#NextEvent_ofs]
CMP reg_cycles,rscratch
BLT mainLoop
S9xDoHBlankProcessing
B mainLoop
.pool
.endm
@ #include "os9x_65c816_mac_mem.h"
.macro S9xGetWord
@ in : rscratch (0x00hhmmll)
@ out : rscratch (0xhhll0000)
STMFD R13!,{PC} @ Push return address
B asmS9xGetWord
MOV R0,R0
MOV R0, R0, LSL #16
.endm
.macro S9xGetWordLow
@ in : rscratch (0x00hhmmll)
@ out : rscratch (0x0000hhll)
STMFD R13!,{PC} @ Push return address
B asmS9xGetWord
MOV R0,R0
.endm
.macro S9xGetWordRegStatus reg
@ in : rscratch (0x00hhmmll)
@ out : reg (0xhhll0000)
@ flags have to be updated with read value
STMFD R13!,{PC} @ Push return address
B asmS9xGetWord
MOV R0,R0
MOVS \reg, R0, LSL #16
.endm
.macro S9xGetWordRegNS reg
@ in : rscratch (0x00hhmmll)
@ out : reg (0xhhll0000)
@ DOES NOT DESTROY rscratch (R0)
STMFD R13!,{R0}
STMFD R13!,{PC} @ Push return address
B asmS9xGetWord
MOV R0,R0
MOV \reg, R0, LSL #16
LDMFD R13!,{R0}
.endm
.macro S9xGetWordLowRegNS reg
@ in : rscratch (0x00hhmmll)
@ out : reg (0xhhll0000)
@ DOES NOT DESTROY rscratch (R0)
STMFD R13!,{R0}
STMFD R13!,{PC} @ Push return address
B asmS9xGetWord
MOV R0,R0
MOV \reg, R0
LDMFD R13!,{R0}
.endm
.macro S9xGetByte
@ in : rscratch (0x00hhmmll)
@ out : rscratch (0xll000000)
STMFD R13!,{PC} @ Push return address
B asmS9xGetByte
MOV R0,R0
MOV R0, R0, LSL #24
.endm
.macro S9xGetByteLow
@ in : rscratch (0x00hhmmll)
@ out : rscratch (0x000000ll)
STMFD R13!,{PC}
B asmS9xGetByte
MOV R0,R0
.endm
.macro S9xGetByteRegStatus reg
@ in : rscratch (0x00hhmmll)
@ out : reg (0xll000000)
@ flags have to be updated with read value
STMFD R13!,{PC} @ Push return address
B asmS9xGetByte
MOV R0,R0
MOVS \reg, R0, LSL #24
.endm
.macro S9xGetByteRegNS reg
@ in : rscratch (0x00hhmmll)
@ out : reg (0xll000000)
@ DOES NOT DESTROY rscratch (R0)
STMFD R13!,{R0}
STMFD R13!,{PC} @ Push return address
B asmS9xGetByte
MOV R0,R0
MOVS \reg, R0, LSL #24
LDMFD R13!,{R0}
.endm
.macro S9xGetByteLowRegNS reg
@ in : rscratch (0x00hhmmll)
@ out : reg (0x000000ll)
@ DOES NOT DESTROY rscratch (R0)
STMFD R13!,{R0}
STMFD R13!,{PC} @ Push return address
B asmS9xGetByte
MOV R0,R0
MOVS \reg, R0
LDMFD R13!,{R0}
.endm
.macro S9xSetWord regValue
@ in : regValue (0xhhll0000)
@ in : rscratch=address (0x00hhmmll)
MOV R1,\regValue, LSR #16
STMFD R13!,{PC} @ Push return address
B asmS9xSetWord
MOV R0,R0
.endm
.macro S9xSetWordZero
@ in : rscratch=address (0x00hhmmll)
MOV R1,#0
STMFD R13!,{PC} @ Push return address
B asmS9xSetWord
MOV R0,R0
.endm
.macro S9xSetWordLow regValue
@ in : regValue (0x0000hhll)
@ in : rscratch=address (0x00hhmmll)
MOV R1,\regValue
STMFD R13!,{PC} @ Push return address
B asmS9xSetWord
MOV R0,R0
.endm
.macro S9xSetByte regValue
@ in : regValue (0xll000000)
@ in : rscratch=address (0x00hhmmll)
MOV R1,\regValue, LSR #24
STMFD R13!,{PC} @ Push return address
B asmS9xSetByte
MOV R0,R0
.endm
.macro S9xSetByteZero
@ in : rscratch=address (0x00hhmmll)
MOV R1,#0
STMFD R13!,{PC} @ Push return address
B asmS9xSetByte
MOV R0,R0
.endm
.macro S9xSetByteLow regValue
@ in : regValue (0x000000ll)
@ in : rscratch=address (0x00hhmmll)
MOV R1,\regValue
STMFD R13!,{PC} @ Push return address
B asmS9xSetByte
MOV R0,R0
.endm
.macro doMainLoop
@ save registers
STMFD R13!,{R4-R11,LR}
@ init pointer to CPUvar structure
MOV reg_cpu_var,R0
@ init registers
LOAD_REGS
@ get cpu mode from flag and init jump table
S9xFixCycles
mainLoop:
@ APU Execute
asmAPU_EXECUTE
@ Test Flags
LDR rscratch,[reg_cpu_var,#Flags_ofs]
MOVS rscratch,rscratch
BNE CPUFlags_set @ If flags => check for irq/nmi/scan_keys...
EXEC_OP @ Execute next opcode
CPUFlags_set: @ Check flags (!=0)
TST rscratch,#NMI_FLAG @ Check NMI
BEQ CPUFlagsNMI_FLAG_cleared
LDR rscratch2,[reg_cpu_var,#NMICycleCount_ofs]
SUBS rscratch2,rscratch2,#1
STR rscratch2,[reg_cpu_var,#NMICycleCount_ofs]
BNE CPUFlagsNMI_FLAG_cleared
BIC rscratch,rscratch,#NMI_FLAG
STR rscratch,[reg_cpu_var,#Flags_ofs]
LDRB rscratch2,[reg_cpu_var,#WaitingForInterrupt_ofs]
MOVS rscratch2,rscratch2
BEQ NotCPUaitingForInterruptNMI
MOV rscratch2,#0
ADD rpc,rpc,#1
STRB rscratch2,[reg_cpu_var,#WaitingForInterrupt_ofs]
NotCPUaitingForInterruptNMI:
S9xOpcode_NMI
LDR rscratch,[reg_cpu_var,#Flags_ofs]
CPUFlagsNMI_FLAG_cleared:
TST rscratch,#IRQ_PENDING_FLAG @ Check IRQ_PENDING_FLAG
BEQ CPUFlagsIRQ_PENDING_FLAG_cleared
LDR rscratch2,[reg_cpu_var,#IRQCycleCount_ofs]
MOVS rscratch2,rscratch2
BNE CPUIRQCycleCount_NotZero
LDRB rscratch2,[reg_cpu_var,#WaitingForInterrupt_ofs]
MOVS rscratch2,rscratch2
BEQ NotCPUaitingForInterruptIRQ
MOV rscratch2,#0
ADD rpc,rpc,#1
STRB rscratch2,[reg_cpu_var,#WaitingForInterrupt_ofs]
NotCPUaitingForInterruptIRQ:
LDRB rscratch2,[reg_cpu_var,#IRQActive_ofs]
MOVS rscratch2,rscratch2
BEQ CPUIRQActive_cleared
TST rstatus,#MASK_IRQ
BNE CPUFlagsIRQ_PENDING_FLAG_cleared
S9xOpcode_IRQ
LDR rscratch,[reg_cpu_var,#Flags_ofs]
B CPUFlagsIRQ_PENDING_FLAG_cleared
CPUIRQActive_cleared:
BIC rscratch,rscratch,#IRQ_PENDING_FLAG
STR rscratch,[reg_cpu_var,#Flags_ofs]
B CPUFlagsIRQ_PENDING_FLAG_cleared
CPUIRQCycleCount_NotZero:
SUB rscratch2,rscratch2,#1
STR rscratch2,[reg_cpu_var,#IRQCycleCount_ofs]
CPUFlagsIRQ_PENDING_FLAG_cleared:
TST rscratch,#SCAN_KEYS_FLAG @ Check SCAN_KEYS_FLAG
BNE endmainLoop
EXEC_OP @ Execute next opcode
endmainLoop:
/*Registers.PC = CPU.PC - CPU.PCBase;
S9xPackStatus ();
APURegisters.PC = IAPU.PC - IAPU.RAM;
S9xAPUPackStatus ();
if (CPU.Flags & SCAN_KEYS_FLAG)
{
S9xSyncSpeed ();
CPU.Flags &= ~SCAN_KEYS_FLAG;
} */
/********end*/
SAVE_REGS
LDMFD R13!,{R4-R11,pc}
/*MOV PC,LR*/
/*bx lr*/
.endm
.macro doTestOpcode
@ save registers
STMFD R13!,{R4-R11,LR}
@ init pointer to CPUvar structure
MOV reg_cpu_var,R0
@ init registers
LOAD_REGS
@ get cpu mode from flag and init jump table
S9xFixCycles
EXEC_OP
.endm
|