@@ -1537,25 +1537,24 @@ static void emit_native_load_subscr(emit_t *emit) {
1537
1537
switch (vtype_base ) {
1538
1538
case VTYPE_PTR8 : {
1539
1539
// pointer to 8-bit memory
1540
- // TODO optimise to use thumb ldrb r1, [r2, r3]
1540
+ #if N_THUMB
1541
+ if (index_value >= 0 && index_value < 32 ) {
1542
+ asm_thumb_ldrb_rlo_rlo_i5 (emit -> as , REG_RET , reg_base , index_value );
1543
+ break ;
1544
+ }
1545
+ #elif N_RV32
1546
+ if (FIT_SIGNED (index_value , 12 )) {
1547
+ asm_rv32_opcode_lbu (emit -> as , REG_RET , reg_base , index_value );
1548
+ break ;
1549
+ }
1550
+ #elif N_XTENSA || N_XTENSAWIN
1551
+ if (index_value >= 0 && index_value < 256 ) {
1552
+ asm_xtensa_op_l8ui (emit -> as , REG_RET , reg_base , index_value );
1553
+ break ;
1554
+ }
1555
+ #endif
1541
1556
if (index_value != 0 ) {
1542
1557
// index is non-zero
1543
- #if N_THUMB
1544
- if (index_value > 0 && index_value < 32 ) {
1545
- asm_thumb_ldrb_rlo_rlo_i5 (emit -> as , REG_RET , reg_base , index_value );
1546
- break ;
1547
- }
1548
- #elif N_RV32
1549
- if (FIT_SIGNED (index_value , 12 )) {
1550
- asm_rv32_opcode_lbu (emit -> as , REG_RET , reg_base , index_value );
1551
- break ;
1552
- }
1553
- #elif N_XTENSA || N_XTENSAWIN
1554
- if (index_value > 0 && index_value < 256 ) {
1555
- asm_xtensa_op_l8ui (emit -> as , REG_RET , reg_base , index_value );
1556
- break ;
1557
- }
1558
- #endif
1559
1558
need_reg_single (emit , reg_index , 0 );
1560
1559
ASM_MOV_REG_IMM (emit -> as , reg_index , index_value );
1561
1560
ASM_ADD_REG_REG (emit -> as , reg_index , reg_base ); // add index to base
@@ -1566,24 +1565,24 @@ static void emit_native_load_subscr(emit_t *emit) {
1566
1565
}
1567
1566
case VTYPE_PTR16 : {
1568
1567
// pointer to 16-bit memory
1568
+ #if N_THUMB
1569
+ if (index_value >= 0 && index_value < 32 ) {
1570
+ asm_thumb_ldrh_rlo_rlo_i5 (emit -> as , REG_RET , reg_base , index_value );
1571
+ break ;
1572
+ }
1573
+ #elif N_RV32
1574
+ if (FIT_SIGNED (index_value , 11 )) {
1575
+ asm_rv32_opcode_lhu (emit -> as , REG_RET , reg_base , index_value << 1 );
1576
+ break ;
1577
+ }
1578
+ #elif N_XTENSA || N_XTENSAWIN
1579
+ if (index_value >= 0 && index_value < 256 ) {
1580
+ asm_xtensa_op_l16ui (emit -> as , REG_RET , reg_base , index_value );
1581
+ break ;
1582
+ }
1583
+ #endif
1569
1584
if (index_value != 0 ) {
1570
1585
// index is a non-zero immediate
1571
- #if N_THUMB
1572
- if (index_value > 0 && index_value < 32 ) {
1573
- asm_thumb_ldrh_rlo_rlo_i5 (emit -> as , REG_RET , reg_base , index_value );
1574
- break ;
1575
- }
1576
- #elif N_RV32
1577
- if (FIT_SIGNED (index_value , 11 )) {
1578
- asm_rv32_opcode_lhu (emit -> as , REG_RET , reg_base , index_value << 1 );
1579
- break ;
1580
- }
1581
- #elif N_XTENSA || N_XTENSAWIN
1582
- if (index_value > 0 && index_value < 256 ) {
1583
- asm_xtensa_op_l16ui (emit -> as , REG_RET , reg_base , index_value );
1584
- break ;
1585
- }
1586
- #endif
1587
1586
need_reg_single (emit , reg_index , 0 );
1588
1587
ASM_MOV_REG_IMM (emit -> as , reg_index , index_value << 1 );
1589
1588
ASM_ADD_REG_REG (emit -> as , reg_index , reg_base ); // add 2*index to base
@@ -1594,24 +1593,24 @@ static void emit_native_load_subscr(emit_t *emit) {
1594
1593
}
1595
1594
case VTYPE_PTR32 : {
1596
1595
// pointer to 32-bit memory
1596
+ #if N_THUMB
1597
+ if (index_value >= 0 && index_value < 32 ) {
1598
+ asm_thumb_ldr_rlo_rlo_i5 (emit -> as , REG_RET , reg_base , index_value );
1599
+ break ;
1600
+ }
1601
+ #elif N_RV32
1602
+ if (FIT_SIGNED (index_value , 10 )) {
1603
+ asm_rv32_opcode_lw (emit -> as , REG_RET , reg_base , index_value << 2 );
1604
+ break ;
1605
+ }
1606
+ #elif N_XTENSA || N_XTENSAWIN
1607
+ if (index_value >= 0 && index_value < 256 ) {
1608
+ asm_xtensa_l32i_optimised (emit -> as , REG_RET , reg_base , index_value );
1609
+ break ;
1610
+ }
1611
+ #endif
1597
1612
if (index_value != 0 ) {
1598
1613
// index is a non-zero immediate
1599
- #if N_THUMB
1600
- if (index_value > 0 && index_value < 32 ) {
1601
- asm_thumb_ldr_rlo_rlo_i5 (emit -> as , REG_RET , reg_base , index_value );
1602
- break ;
1603
- }
1604
- #elif N_RV32
1605
- if (FIT_SIGNED (index_value , 10 )) {
1606
- asm_rv32_opcode_lw (emit -> as , REG_RET , reg_base , index_value << 2 );
1607
- break ;
1608
- }
1609
- #elif N_XTENSA || N_XTENSAWIN
1610
- if (index_value > 0 && index_value < 256 ) {
1611
- asm_xtensa_l32i_optimised (emit -> as , REG_RET , reg_base , index_value );
1612
- break ;
1613
- }
1614
- #endif
1615
1614
need_reg_single (emit , reg_index , 0 );
1616
1615
ASM_MOV_REG_IMM (emit -> as , reg_index , index_value << 2 );
1617
1616
ASM_ADD_REG_REG (emit -> as , reg_index , reg_base ); // add 4*index to base
@@ -1811,28 +1810,28 @@ static void emit_native_store_subscr(emit_t *emit) {
1811
1810
case VTYPE_PTR8 : {
1812
1811
// pointer to 8-bit memory
1813
1812
// TODO optimise to use thumb strb r1, [r2, r3]
1813
+ #if N_THUMB
1814
+ if (index_value >= 0 && index_value < 32 ) {
1815
+ asm_thumb_strb_rlo_rlo_i5 (emit -> as , reg_value , reg_base , index_value );
1816
+ break ;
1817
+ }
1818
+ #elif N_RV32
1819
+ if (FIT_SIGNED (index_value , 12 )) {
1820
+ asm_rv32_opcode_sb (emit -> as , reg_value , reg_base , index_value );
1821
+ break ;
1822
+ }
1823
+ #elif N_XTENSA || N_XTENSAWIN
1824
+ if (index_value >= 0 && index_value < 256 ) {
1825
+ asm_xtensa_op_s8i (emit -> as , reg_value , reg_base , index_value );
1826
+ break ;
1827
+ }
1828
+ #endif
1814
1829
if (index_value != 0 ) {
1815
1830
// index is non-zero
1816
- #if N_THUMB
1817
- if (index_value > 0 && index_value < 32 ) {
1818
- asm_thumb_strb_rlo_rlo_i5 (emit -> as , reg_value , reg_base , index_value );
1819
- break ;
1820
- }
1821
- #elif N_RV32
1822
- if (FIT_SIGNED (index_value , 12 )) {
1823
- asm_rv32_opcode_sb (emit -> as , reg_value , reg_base , index_value );
1824
- break ;
1825
- }
1826
- #elif N_XTENSA || N_XTENSAWIN
1827
- if (index_value > 0 && index_value < 256 ) {
1828
- asm_xtensa_op_s8i (emit -> as , REG_RET , reg_base , index_value );
1829
- break ;
1830
- }
1831
- #endif
1832
1831
ASM_MOV_REG_IMM (emit -> as , reg_index , index_value );
1833
1832
#if N_ARM
1834
1833
asm_arm_strb_reg_reg_reg (emit -> as , reg_value , reg_base , reg_index );
1835
- return ;
1834
+ break ;
1836
1835
#endif
1837
1836
ASM_ADD_REG_REG (emit -> as , reg_index , reg_base ); // add index to base
1838
1837
reg_base = reg_index ;
@@ -1842,24 +1841,24 @@ static void emit_native_store_subscr(emit_t *emit) {
1842
1841
}
1843
1842
case VTYPE_PTR16 : {
1844
1843
// pointer to 16-bit memory
1844
+ #if N_THUMB
1845
+ if (index_value >= 0 && index_value < 32 ) {
1846
+ asm_thumb_strh_rlo_rlo_i5 (emit -> as , reg_value , reg_base , index_value );
1847
+ break ;
1848
+ }
1849
+ #elif N_RV32
1850
+ if (FIT_SIGNED (index_value , 11 )) {
1851
+ asm_rv32_opcode_sh (emit -> as , reg_value , reg_base , index_value << 1 );
1852
+ break ;
1853
+ }
1854
+ #elif N_XTENSA || N_XTENSAWIN
1855
+ if (index_value >= 0 && index_value < 256 ) {
1856
+ asm_xtensa_op_s16i (emit -> as , reg_value , reg_base , index_value );
1857
+ break ;
1858
+ }
1859
+ #endif
1845
1860
if (index_value != 0 ) {
1846
1861
// index is a non-zero immediate
1847
- #if N_THUMB
1848
- if (index_value > 0 && index_value < 32 ) {
1849
- asm_thumb_strh_rlo_rlo_i5 (emit -> as , reg_value , reg_base , index_value );
1850
- break ;
1851
- }
1852
- #elif N_RV32
1853
- if (FIT_SIGNED (index_value , 11 )) {
1854
- asm_rv32_opcode_sh (emit -> as , reg_value , reg_base , index_value << 1 );
1855
- break ;
1856
- }
1857
- #elif N_XTENSA || N_XTENSAWIN
1858
- if (index_value > 0 && index_value < 256 ) {
1859
- asm_xtensa_op_s16i (emit -> as , REG_RET , reg_base , index_value );
1860
- break ;
1861
- }
1862
- #endif
1863
1862
ASM_MOV_REG_IMM (emit -> as , reg_index , index_value << 1 );
1864
1863
ASM_ADD_REG_REG (emit -> as , reg_index , reg_base ); // add 2*index to base
1865
1864
reg_base = reg_index ;
@@ -1869,27 +1868,28 @@ static void emit_native_store_subscr(emit_t *emit) {
1869
1868
}
1870
1869
case VTYPE_PTR32 : {
1871
1870
// pointer to 32-bit memory
1871
+ #if N_THUMB
1872
+ if (index_value >= 0 && index_value < 32 ) {
1873
+ asm_thumb_str_rlo_rlo_i5 (emit -> as , reg_value , reg_base , index_value );
1874
+ break ;
1875
+ }
1876
+ #elif N_RV32
1877
+ if (FIT_SIGNED (index_value , 10 )) {
1878
+ asm_rv32_opcode_sw (emit -> as , reg_value , reg_base , index_value << 2 );
1879
+ break ;
1880
+ }
1881
+ #elif N_XTENSA || N_XTENSAWIN
1882
+ if (index_value >= 0 && index_value < 256 ) {
1883
+ asm_xtensa_s32i_optimised (emit -> as , reg_value , reg_base , index_value );
1884
+ break ;
1885
+ }
1886
+ #endif
1872
1887
if (index_value != 0 ) {
1873
1888
// index is a non-zero immediate
1874
- #if N_THUMB
1875
- if (index_value > 0 && index_value < 32 ) {
1876
- asm_thumb_str_rlo_rlo_i5 (emit -> as , reg_value , reg_base , index_value );
1877
- break ;
1878
- }
1879
- #elif N_RV32
1880
- if (FIT_SIGNED (index_value , 10 )) {
1881
- asm_rv32_opcode_sw (emit -> as , reg_value , reg_base , index_value << 2 );
1882
- break ;
1883
- }
1884
- #elif N_XTENSA || N_XTENSAWIN
1885
- if (index_value > 0 && index_value < 256 ) {
1886
- asm_xtensa_s32i_optimised (emit -> as , REG_RET , reg_base , index_value );
1887
- break ;
1888
- }
1889
- #elif N_ARM
1889
+ #if N_ARM
1890
1890
ASM_MOV_REG_IMM (emit -> as , reg_index , index_value );
1891
1891
asm_arm_str_reg_reg_reg (emit -> as , reg_value , reg_base , reg_index );
1892
- return ;
1892
+ break ;
1893
1893
#endif
1894
1894
ASM_MOV_REG_IMM (emit -> as , reg_index , index_value << 2 );
1895
1895
ASM_ADD_REG_REG (emit -> as , reg_index , reg_base ); // add 4*index to base
0 commit comments