aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authornotaz2013-03-31 18:50:21 +0300
committernotaz2013-04-01 01:19:19 +0300
commitb1f89e6f247c9b11c745cc1a7201cce5fb4fe08f (patch)
tree2414c92ec96e6a4b54364aeece92550f26fb51be
parent4ae83961a97c97cf6bf7e775cc08337b292b9853 (diff)
downloadpcsx_rearmed-b1f89e6f247c9b11c745cc1a7201cce5fb4fe08f.tar.gz
pcsx_rearmed-b1f89e6f247c9b11c745cc1a7201cce5fb4fe08f.tar.bz2
pcsx_rearmed-b1f89e6f247c9b11c745cc1a7201cce5fb4fe08f.zip
drc: rework linkage_arm for better assembler compatibility
-rw-r--r--libpcsxcore/new_dynarec/linkage_arm.S345
-rw-r--r--libpcsxcore/new_dynarec/linkage_offsets.h38
2 files changed, 173 insertions, 210 deletions
diff --git a/libpcsxcore/new_dynarec/linkage_arm.S b/libpcsxcore/new_dynarec/linkage_arm.S
index 5a76f8e..e31b9b4 100644
--- a/libpcsxcore/new_dynarec/linkage_arm.S
+++ b/libpcsxcore/new_dynarec/linkage_arm.S
@@ -1,7 +1,7 @@
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* linkage_arm.s for PCSX *
* Copyright (C) 2009-2011 Ari64 *
- * Copyright (C) 2010-2011 Gražvydas "notaz" Ignotas *
+ * Copyright (C) 2010-2013 Gražvydas "notaz" Ignotas *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
@@ -20,151 +20,76 @@
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
#include "arm_features.h"
-
-
- .global dynarec_local
- .global reg
- .global hi
- .global lo
- .global reg_cop0
- .global reg_cop2d
- .global reg_cop2c
- .global FCR0
- .global FCR31
- .global next_interupt
- .global cycle_count
- .global last_count
- .global pending_exception
- .global pcaddr
- .global stop
- .global invc_ptr
- .global address
- .global branch_target
- .global PC
- .global mini_ht
- .global restore_candidate
- /* psx */
- .global psxRegs
- .global mem_rtab
- .global mem_wtab
- .global psxH_ptr
- .global zeromem_ptr
- .global inv_code_start
- .global inv_code_end
- .global rcnts
+#include "linkage_offsets.h"
+
+
+#ifdef __MACH__
+#define dynarec_local ESYM(dynarec_local)
+#define add_link ESYM(add_link)
+#define new_recompile_block ESYM(new_recompile_block)
+#define get_addr ESYM(get_addr)
+#define get_addr_ht ESYM(get_addr_ht)
+#define clean_blocks ESYM(clean_blocks)
+#define gen_interupt ESYM(gen_interupt)
+#define psxException ESYM(psxException)
+#define execI ESYM(execI)
+#define invalidate_addr ESYM(invalidate_addr)
+#endif
.bss
.align 4
+ .global dynarec_local
.type dynarec_local, %object
- .size dynarec_local, dynarec_local_end-dynarec_local
+ .size dynarec_local, LO_dynarec_local_size
dynarec_local:
- .space dynarec_local_end-dynarec_local
-next_interupt = dynarec_local + 64
- .type next_interupt, %object
- .size next_interupt, 4
-cycle_count = next_interupt + 4
- .type cycle_count, %object
- .size cycle_count, 4
-last_count = cycle_count + 4
- .type last_count, %object
- .size last_count, 4
-pending_exception = last_count + 4
- .type pending_exception, %object
- .size pending_exception, 4
-stop = pending_exception + 4
- .type stop, %object
- .size stop, 4
-invc_ptr = stop + 4
- .type invc_ptr, %object
- .size invc_ptr, 4
-address = invc_ptr + 4
- .type address, %object
- .size address, 4
-psxRegs = address + 4
+ .space LO_dynarec_local_size
+
+#define DRC_VAR_(name, vname, size_) \
+ vname = dynarec_local + LO_##name; \
+ .global vname; \
+ .type vname, %object; \
+ .size vname, size_
+
+#define DRC_VAR(name, size_) \
+ DRC_VAR_(name, ESYM(name), size_)
+
+DRC_VAR(next_interupt, 4)
+DRC_VAR(cycle_count, 4)
+DRC_VAR(last_count, 4)
+DRC_VAR(pending_exception, 4)
+DRC_VAR(stop, 4)
+DRC_VAR(invc_ptr, 4)
+DRC_VAR(address, 4)
+DRC_VAR(psxRegs, LO_psxRegs_end - LO_psxRegs)
/* psxRegs */
- .type psxRegs, %object
- .size psxRegs, psxRegs_end-psxRegs
-reg = psxRegs
- .type reg, %object
- .size reg, 128
-lo = reg + 128
- .type lo, %object
- .size lo, 4
-hi = lo + 4
- .type hi, %object
- .size hi, 4
-reg_cop0 = hi + 4
- .type reg_cop0, %object
- .size reg_cop0, 128
-reg_cop2d = reg_cop0 + 128
- .type reg_cop2d, %object
- .size reg_cop2d, 128
-reg_cop2c = reg_cop2d + 128
- .type reg_cop2c, %object
- .size reg_cop2c, 128
-PC = reg_cop2c + 128
-pcaddr = PC
- .type PC, %object
- .size PC, 4
-code = PC + 4
- .type code, %object
- .size code, 4
-cycle = code + 4
- .type cycle, %object
- .size cycle, 4
-interrupt = cycle + 4
- .type interrupt, %object
- .size interrupt, 4
-intCycle = interrupt + 4
- .type intCycle, %object
- .size intCycle, 256
-psxRegs_end = intCycle + 256
-
-rcnts = psxRegs_end
- .type rcnts, %object
- .size rcnts, 7*4*4
-rcnts_end = rcnts + 7*4*4
-
-mem_rtab = rcnts_end
- .type mem_rtab, %object
- .size mem_rtab, 4
-mem_wtab = mem_rtab + 4
- .type mem_wtab, %object
- .size mem_wtab, 4
-psxH_ptr = mem_wtab + 4
- .type psxH_ptr, %object
- .size psxH_ptr, 4
-zeromem_ptr = psxH_ptr + 4
- .type zeromem_ptr, %object
- .size zeromem_ptr, 4
-inv_code_start = zeromem_ptr + 4
- .type inv_code_start, %object
- .size inv_code_start, 4
-inv_code_end = inv_code_start + 4
- .type inv_code_end, %object
- .size inv_code_end, 4
-branch_target = inv_code_end + 4
- .type branch_target, %object
- .size branch_target, 4
-align0 = branch_target + 4 /* unused/alignment */
- .type align0, %object
- .size align0, 16
-mini_ht = align0 + 16
- .type mini_ht, %object
- .size mini_ht, 256
-restore_candidate = mini_ht + 256
- .type restore_candidate, %object
- .size restore_candidate, 512
-dynarec_local_end = restore_candidate + 512
+DRC_VAR(reg, 128)
+DRC_VAR(lo, 4)
+DRC_VAR(hi, 4)
+DRC_VAR(reg_cop0, 128)
+DRC_VAR(reg_cop2d, 128)
+DRC_VAR(reg_cop2c, 128)
+DRC_VAR(pcaddr, 4)
+@DRC_VAR(code, 4)
+@DRC_VAR(cycle, 4)
+@DRC_VAR(interrupt, 4)
+@DRC_VAR(intCycle, 256)
+
+DRC_VAR(rcnts, 7*4*4)
+DRC_VAR(mem_rtab, 4)
+DRC_VAR(mem_wtab, 4)
+DRC_VAR(psxH_ptr, 4)
+DRC_VAR(zeromem_ptr, 4)
+DRC_VAR(inv_code_start, 4)
+DRC_VAR(inv_code_end, 4)
+DRC_VAR(branch_target, 4)
+@DRC_VAR(align0, 16) /* unused/alignment */
+DRC_VAR(mini_ht, 256)
+DRC_VAR(restore_candidate, 512)
/* unused */
-FCR0 = align0
- .type FCR0, %object
- .size FCR0, 4
-FCR31 = align0
- .type FCR31, %object
- .size FCR31, 4
+DRC_VAR(FCR0, 4)
+DRC_VAR(FCR31, 4)
#ifndef HAVE_ARMV5
.macro blx rd
@@ -314,21 +239,21 @@ FUNCTION(exec_pagefault):
/* r0 = instruction pointer */
/* r1 = fault address */
/* r2 = cause */
- ldr r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
+ ldr r3, [fp, #LO_reg_cop0+48] /* Status */
mvn r6, #0xF000000F
- ldr r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
+ ldr r4, [fp, #LO_reg_cop0+16] /* Context */
bic r6, r6, #0x0F800000
- str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
+ str r0, [fp, #LO_reg_cop0+56] /* EPC */
orr r3, r3, #2
- str r1, [fp, #reg_cop0+32-dynarec_local] /* BadVAddr */
+ str r1, [fp, #LO_reg_cop0+32] /* BadVAddr */
bic r4, r4, r6
- str r3, [fp, #reg_cop0+48-dynarec_local] /* Status */
+ str r3, [fp, #LO_reg_cop0+48] /* Status */
and r5, r6, r1, lsr #9
- str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
+ str r2, [fp, #LO_reg_cop0+52] /* Cause */
and r1, r1, r6, lsl #9
- str r1, [fp, #reg_cop0+40-dynarec_local] /* EntryHi */
+ str r1, [fp, #LO_reg_cop0+40] /* EntryHi */
orr r4, r4, r5
- str r4, [fp, #reg_cop0+16-dynarec_local] /* Context */
+ str r4, [fp, #LO_reg_cop0+16] /* Context */
mov r0, #0x80000000
bl get_addr_ht
mov pc, r0
@@ -433,16 +358,16 @@ FUNCTION(jump_vaddr):
ldr r2, [r1, #8]
teq r2, r0
ldreq pc, [r1, #12]
- str r10, [fp, #cycle_count-dynarec_local]
+ str r10, [fp, #LO_cycle_count]
bl get_addr
- ldr r10, [fp, #cycle_count-dynarec_local]
+ ldr r10, [fp, #LO_cycle_count]
mov pc, r0
.size jump_vaddr, .-jump_vaddr
.align 2
FUNCTION(verify_code_ds):
- str r8, [fp, #branch_target-dynarec_local]
+ str r8, [fp, #LO_branch_target]
FUNCTION(verify_code_vm):
FUNCTION(verify_code):
/* r1 = source */
@@ -472,7 +397,7 @@ FUNCTION(verify_code):
.D3:
teqeq r4, r5
.D4:
- ldr r8, [fp, #branch_target-dynarec_local]
+ ldr r8, [fp, #LO_branch_target]
moveq pc, lr
.D5:
bl get_addr
@@ -482,15 +407,15 @@ FUNCTION(verify_code):
.align 2
FUNCTION(cc_interrupt):
- ldr r0, [fp, #last_count-dynarec_local]
+ ldr r0, [fp, #LO_last_count]
mov r1, #0
mov r2, #0x1fc
add r10, r0, r10
- str r1, [fp, #pending_exception-dynarec_local]
+ str r1, [fp, #LO_pending_exception]
and r2, r2, r10, lsr #17
- add r3, fp, #restore_candidate-dynarec_local
- str r10, [fp, #cycle-dynarec_local] /* PCSX cycles */
-@@ str r10, [fp, #reg_cop0+36-dynarec_local] /* Count */
+ add r3, fp, #LO_restore_candidate
+ str r10, [fp, #LO_cycle] /* PCSX cycles */
+@@ str r10, [fp, #LO_reg_cop0+36] /* Count */
ldr r4, [r2, r3]
mov r10, lr
tst r4, r4
@@ -498,18 +423,18 @@ FUNCTION(cc_interrupt):
.E1:
bl gen_interupt
mov lr, r10
- ldr r10, [fp, #cycle-dynarec_local]
- ldr r0, [fp, #next_interupt-dynarec_local]
- ldr r1, [fp, #pending_exception-dynarec_local]
- ldr r2, [fp, #stop-dynarec_local]
- str r0, [fp, #last_count-dynarec_local]
+ ldr r10, [fp, #LO_cycle]
+ ldr r0, [fp, #LO_next_interupt]
+ ldr r1, [fp, #LO_pending_exception]
+ ldr r2, [fp, #LO_stop]
+ str r0, [fp, #LO_last_count]
sub r10, r10, r0
tst r2, r2
ldmnefd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
tst r1, r1
moveq pc, lr
.E2:
- ldr r0, [fp, #pcaddr-dynarec_local]
+ ldr r0, [fp, #LO_pcaddr]
bl get_addr_ht
mov pc, r0
.E4:
@@ -528,7 +453,7 @@ FUNCTION(cc_interrupt):
.align 2
FUNCTION(do_interrupt):
- ldr r0, [fp, #pcaddr-dynarec_local]
+ ldr r0, [fp, #LO_pcaddr]
bl get_addr_ht
add r10, r10, #2
mov pc, r0
@@ -538,13 +463,13 @@ FUNCTION(do_interrupt):
FUNCTION(fp_exception):
mov r2, #0x10000000
.E7:
- ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
+ ldr r1, [fp, #LO_reg_cop0+48] /* Status */
mov r3, #0x80000000
- str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
+ str r0, [fp, #LO_reg_cop0+56] /* EPC */
orr r1, #2
add r2, r2, #0x2c
- str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
- str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
+ str r1, [fp, #LO_reg_cop0+48] /* Status */
+ str r2, [fp, #LO_reg_cop0+52] /* Cause */
add r0, r3, #0x80
bl get_addr_ht
mov pc, r0
@@ -557,13 +482,13 @@ FUNCTION(fp_exception_ds):
.align 2
FUNCTION(jump_syscall):
- ldr r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
+ ldr r1, [fp, #LO_reg_cop0+48] /* Status */
mov r3, #0x80000000
- str r0, [fp, #reg_cop0+56-dynarec_local] /* EPC */
+ str r0, [fp, #LO_reg_cop0+56] /* EPC */
orr r1, #2
mov r2, #0x20
- str r1, [fp, #reg_cop0+48-dynarec_local] /* Status */
- str r2, [fp, #reg_cop0+52-dynarec_local] /* Cause */
+ str r1, [fp, #LO_reg_cop0+48] /* Status */
+ str r2, [fp, #LO_reg_cop0+52] /* Cause */
add r0, r3, #0x80
bl get_addr_ht
mov pc, r0
@@ -572,52 +497,52 @@ FUNCTION(jump_syscall):
.align 2
FUNCTION(jump_syscall_hle):
- str r0, [fp, #pcaddr-dynarec_local] /* PC must be set to EPC for psxException */
- ldr r2, [fp, #last_count-dynarec_local]
+ str r0, [fp, #LO_pcaddr] /* PC must be set to EPC for psxException */
+ ldr r2, [fp, #LO_last_count]
mov r1, #0 /* in delay slot */
add r2, r2, r10
mov r0, #0x20 /* cause */
- str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
+ str r2, [fp, #LO_cycle] /* PCSX cycle counter */
bl psxException
- /* note: psxException might do recorsive recompiler call from it's HLE code,
+ /* note: psxException might do recursive recompiler call from it's HLE code,
* so be ready for this */
pcsx_return:
- ldr r1, [fp, #next_interupt-dynarec_local]
- ldr r10, [fp, #cycle-dynarec_local]
- ldr r0, [fp, #pcaddr-dynarec_local]
+ ldr r1, [fp, #LO_next_interupt]
+ ldr r10, [fp, #LO_cycle]
+ ldr r0, [fp, #LO_pcaddr]
sub r10, r10, r1
- str r1, [fp, #last_count-dynarec_local]
+ str r1, [fp, #LO_last_count]
bl get_addr_ht
mov pc, r0
.size jump_syscall_hle, .-jump_syscall_hle
.align 2
FUNCTION(jump_hlecall):
- ldr r2, [fp, #last_count-dynarec_local]
- str r0, [fp, #pcaddr-dynarec_local]
+ ldr r2, [fp, #LO_last_count]
+ str r0, [fp, #LO_pcaddr]
add r2, r2, r10
adr lr, pcsx_return
- str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
+ str r2, [fp, #LO_cycle] /* PCSX cycle counter */
bx r1
.size jump_hlecall, .-jump_hlecall
.align 2
FUNCTION(jump_intcall):
- ldr r2, [fp, #last_count-dynarec_local]
- str r0, [fp, #pcaddr-dynarec_local]
+ ldr r2, [fp, #LO_last_count]
+ str r0, [fp, #LO_pcaddr]
add r2, r2, r10
adr lr, pcsx_return
- str r2, [fp, #cycle-dynarec_local] /* PCSX cycle counter */
+ str r2, [fp, #LO_cycle] /* PCSX cycle counter */
b execI
.size jump_hlecall, .-jump_hlecall
.align 2
FUNCTION(new_dyna_leave):
- ldr r0, [fp, #last_count-dynarec_local]
+ ldr r0, [fp, #LO_last_count]
add r12, fp, #28
add r10, r0, r10
- str r10, [fp, #cycle-dynarec_local]
+ str r10, [fp, #LO_cycle]
ldmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, pc}
.size new_dyna_leave, .-new_dyna_leave
@@ -692,9 +617,9 @@ FUNCTION(invalidate_addr_r12):
mov r0, r12
.size invalidate_addr_r12, .-invalidate_addr_r12
.align 2
-FUNCTION(invalidate_addr_call):
- ldr r12, [fp, #inv_code_start-dynarec_local]
- ldr lr, [fp, #inv_code_end-dynarec_local]
+invalidate_addr_call:
+ ldr r12, [fp, #LO_inv_code_start]
+ ldr lr, [fp, #LO_inv_code_end]
cmp r0, r12
cmpcs lr, r0
blcc invalidate_addr
@@ -706,11 +631,11 @@ FUNCTION(new_dyna_start):
/* ip is stored to conform EABI alignment */
stmfd sp!, {r4, r5, r6, r7, r8, r9, sl, fp, ip, lr}
load_varadr fp, dynarec_local
- ldr r0, [fp, #pcaddr-dynarec_local]
+ ldr r0, [fp, #LO_pcaddr]
bl get_addr_ht
- ldr r1, [fp, #next_interupt-dynarec_local]
- ldr r10, [fp, #cycle-dynarec_local]
- str r1, [fp, #last_count-dynarec_local]
+ ldr r1, [fp, #LO_next_interupt]
+ ldr r10, [fp, #LO_cycle]
+ str r1, [fp, #LO_last_count]
sub r10, r10, r1
mov pc, r0
.size new_dyna_start, .-new_dyna_start
@@ -723,7 +648,7 @@ FUNCTION(new_dyna_start):
/* r0 = address, r1 = handler_tab, r2 = cycles */
lsl r3, r0, #20
lsr r3, #(20+\tab_shift)
- ldr r12, [fp, #last_count-dynarec_local]
+ ldr r12, [fp, #LO_last_count]
ldr r1, [r1, r3, lsl #2]
add r2, r2, r12
lsls r1, #1
@@ -734,7 +659,7 @@ FUNCTION(new_dyna_start):
\readop r0, [r1, r3, lsl #\tab_shift]
.endif
movcc pc, lr
- str r2, [fp, #cycle-dynarec_local]
+ str r2, [fp, #LO_cycle]
bx r1
.endm
@@ -755,7 +680,7 @@ FUNCTION(jump_handler_read32):
lsl r12,r0, #20
lsr r12, #(20+\tab_shift)
ldr r3, [r3, r12, lsl #2]
- str r0, [fp, #address-dynarec_local] @ some handlers still need it..
+ str r0, [fp, #LO_address] @ some handlers still need it..
lsls r3, #1
mov r0, r2 @ cycle return in case of direct store
.if \tab_shift == 1
@@ -765,16 +690,16 @@ FUNCTION(jump_handler_read32):
\wrtop r1, [r3, r12, lsl #\tab_shift]
.endif
movcc pc, lr
- ldr r12, [fp, #last_count-dynarec_local]
+ ldr r12, [fp, #LO_last_count]
mov r0, r1
add r2, r2, r12
push {r2, lr}
- str r2, [fp, #cycle-dynarec_local]
+ str r2, [fp, #LO_cycle]
blx r3
- ldr r0, [fp, #next_interupt-dynarec_local]
+ ldr r0, [fp, #LO_next_interupt]
pop {r2, r3}
- str r0, [fp, #last_count-dynarec_local]
+ str r0, [fp, #LO_last_count]
sub r0, r2, r0
bx r3
.endm
@@ -792,23 +717,23 @@ FUNCTION(jump_handler_write32):
FUNCTION(jump_handler_write_h):
/* r0 = address, r1 = data, r2 = cycles, r3 = handler */
- ldr r12, [fp, #last_count-dynarec_local]
- str r0, [fp, #address-dynarec_local] @ some handlers still need it..
+ ldr r12, [fp, #LO_last_count]
+ str r0, [fp, #LO_address] @ some handlers still need it..
add r2, r2, r12
mov r0, r1
push {r2, lr}
- str r2, [fp, #cycle-dynarec_local]
+ str r2, [fp, #LO_cycle]
blx r3
- ldr r0, [fp, #next_interupt-dynarec_local]
+ ldr r0, [fp, #LO_next_interupt]
pop {r2, r3}
- str r0, [fp, #last_count-dynarec_local]
+ str r0, [fp, #LO_last_count]
sub r0, r2, r0
bx r3
FUNCTION(jump_handle_swl):
/* r0 = address, r1 = data, r2 = cycles */
- ldr r3, [fp, #mem_wtab-dynarec_local]
+ ldr r3, [fp, #LO_mem_wtab]
mov r12,r0,lsr #12
ldr r3, [r3, r12, lsl #2]
lsls r3, #1
@@ -843,7 +768,7 @@ FUNCTION(jump_handle_swl):
FUNCTION(jump_handle_swr):
/* r0 = address, r1 = data, r2 = cycles */
- ldr r3, [fp, #mem_wtab-dynarec_local]
+ ldr r3, [fp, #LO_mem_wtab]
mov r12,r0,lsr #12
ldr r3, [r3, r12, lsl #2]
lsls r3, #1
@@ -869,7 +794,7 @@ FUNCTION(jump_handle_swr):
.macro rcntx_read_mode0 num
/* r0 = address, r2 = cycles */
- ldr r3, [fp, #rcnts-dynarec_local+6*4+7*4*\num] @ cycleStart
+ ldr r3, [fp, #LO_rcnts+6*4+7*4*\num] @ cycleStart
mov r0, r2, lsl #16
sub r0, r3, lsl #16
lsr r0, #16
@@ -887,7 +812,7 @@ FUNCTION(rcnt2_read_count_m0):
FUNCTION(rcnt0_read_count_m1):
/* r0 = address, r2 = cycles */
- ldr r3, [fp, #rcnts-dynarec_local+6*4+7*4*0] @ cycleStart
+ ldr r3, [fp, #LO_rcnts+6*4+7*4*0] @ cycleStart
mov_16 r1, 0x3334
sub r2, r2, r3
mul r0, r1, r2 @ /= 5
@@ -896,7 +821,7 @@ FUNCTION(rcnt0_read_count_m1):
FUNCTION(rcnt1_read_count_m1):
/* r0 = address, r2 = cycles */
- ldr r3, [fp, #rcnts-dynarec_local+6*4+7*4*1]
+ ldr r3, [fp, #LO_rcnts+6*4+7*4*1]
mov_24 r1, 0x1e6cde
sub r2, r2, r3
umull r3, r0, r1, r2 @ ~ /= hsync_cycles, max ~0x1e6cdd
@@ -904,7 +829,7 @@ FUNCTION(rcnt1_read_count_m1):
FUNCTION(rcnt2_read_count_m1):
/* r0 = address, r2 = cycles */
- ldr r3, [fp, #rcnts-dynarec_local+6*4+7*4*2]
+ ldr r3, [fp, #LO_rcnts+6*4+7*4*2]
mov r0, r2, lsl #16-3
sub r0, r3, lsl #16-3
lsr r0, #16 @ /= 8
diff --git a/libpcsxcore/new_dynarec/linkage_offsets.h b/libpcsxcore/new_dynarec/linkage_offsets.h
new file mode 100644
index 0000000..55ceb42
--- /dev/null
+++ b/libpcsxcore/new_dynarec/linkage_offsets.h
@@ -0,0 +1,38 @@
+
+#define LO_next_interupt 64
+#define LO_cycle_count (LO_next_interupt + 4)
+#define LO_last_count (LO_cycle_count + 4)
+#define LO_pending_exception (LO_last_count + 4)
+#define LO_stop (LO_pending_exception + 4)
+#define LO_invc_ptr (LO_stop + 4)
+#define LO_address (LO_invc_ptr + 4)
+#define LO_psxRegs (LO_address + 4)
+#define LO_reg (LO_psxRegs)
+#define LO_lo (LO_reg + 128)
+#define LO_hi (LO_lo + 4)
+#define LO_reg_cop0 (LO_hi + 4)
+#define LO_reg_cop2d (LO_reg_cop0 + 128)
+#define LO_reg_cop2c (LO_reg_cop2d + 128)
+#define LO_PC (LO_reg_cop2c + 128)
+#define LO_pcaddr (LO_PC)
+#define LO_code (LO_PC + 4)
+#define LO_cycle (LO_code + 4)
+#define LO_interrupt (LO_cycle + 4)
+#define LO_intCycle (LO_interrupt + 4)
+#define LO_psxRegs_end (LO_intCycle + 256)
+#define LO_rcnts (LO_psxRegs_end)
+#define LO_rcnts_end (LO_rcnts + 7*4*4)
+#define LO_mem_rtab (LO_rcnts_end)
+#define LO_mem_wtab (LO_mem_rtab + 4)
+#define LO_psxH_ptr (LO_mem_wtab + 4)
+#define LO_zeromem_ptr (LO_psxH_ptr + 4)
+#define LO_inv_code_start (LO_zeromem_ptr + 4)
+#define LO_inv_code_end (LO_inv_code_start + 4)
+#define LO_branch_target (LO_inv_code_end + 4)
+#define LO_align0 (LO_branch_target + 4)
+#define LO_mini_ht (LO_align0 + 16)
+#define LO_restore_candidate (LO_mini_ht + 256)
+#define LO_dynarec_local_size (LO_restore_candidate + 512)
+
+#define LO_FCR0 (LO_align0)
+#define LO_FCR31 (LO_align0)