@@ -1493,27 +1493,50 @@ fn mk_plain_tag(ast.def_id tid) -> @ty.t {
1493
1493
1494
1494
type val_and_ty_fn = fn ( @block_ctxt cx, ValueRef v, @ty. t t) -> result;
1495
1495
1496
+ type val_pair_and_ty_fn =
1497
+ fn ( @block_ctxt cx, ValueRef av, ValueRef bv, @ty. t t) -> result;
1498
+
1496
1499
// Iterates through the elements of a structural type.
1497
1500
fn iter_structural_ty( @block_ctxt cx,
1498
1501
ValueRef v,
1499
1502
@ty. t t,
1500
1503
val_and_ty_fn f)
1501
1504
-> result {
1505
+ fn adaptor_fn( val_and_ty_fn f,
1506
+ @block_ctxt cx,
1507
+ ValueRef av,
1508
+ ValueRef bv,
1509
+ @ty. t t) -> result {
1510
+ ret f( cx, av, t) ;
1511
+ }
1512
+ be iter_structural_ty_full( cx, v, v, t,
1513
+ bind adaptor_fn( f, _, _, _, _) ) ;
1514
+ }
1515
+
1516
+
1517
+ fn iter_structural_ty_full( @block_ctxt cx,
1518
+ ValueRef av,
1519
+ ValueRef bv,
1520
+ @ty. t t,
1521
+ val_pair_and_ty_fn f)
1522
+ -> result {
1502
1523
let result r = res( cx, C_nil ( ) ) ;
1503
1524
1504
1525
fn iter_boxpp( @block_ctxt cx,
1505
- ValueRef box_cell,
1506
- val_and_ty_fn f) -> result {
1507
- auto box_ptr = cx. build. Load ( box_cell) ;
1526
+ ValueRef box_a_cell,
1527
+ ValueRef box_b_cell,
1528
+ val_pair_and_ty_fn f) -> result {
1529
+ auto box_a_ptr = cx. build. Load ( box_a_cell) ;
1530
+ auto box_b_ptr = cx. build. Load ( box_b_cell) ;
1508
1531
auto tnil = plain_ty( ty. ty_nil) ;
1509
1532
auto tbox = plain_ty( ty. ty_box( tnil) ) ;
1510
1533
1511
1534
auto inner_cx = new_sub_block_ctxt( cx, "iter box" ) ;
1512
1535
auto next_cx = new_sub_block_ctxt( cx, "next" ) ;
1513
- auto null_test = cx. build. IsNull ( box_ptr ) ;
1536
+ auto null_test = cx. build. IsNull ( box_a_ptr ) ;
1514
1537
cx. build. CondBr ( null_test, next_cx. llbb, inner_cx. llbb) ;
1515
1538
1516
- auto r = f( inner_cx, box_ptr , tbox) ;
1539
+ auto r = f( inner_cx, box_a_ptr , box_b_ptr , tbox) ;
1517
1540
r. bcx. build. Br ( next_cx. llbb) ;
1518
1541
ret res( next_cx, r. val) ;
1519
1542
}
@@ -1522,19 +1545,23 @@ fn iter_structural_ty(@block_ctxt cx,
1522
1545
case ( ty. ty_tup( ?args) ) {
1523
1546
let int i = 0 ;
1524
1547
for ( @ty. t arg in args) {
1525
- auto elt = r. bcx. build. GEP ( v, vec( C_int ( 0 ) , C_int ( i) ) ) ;
1548
+ auto elt_a = r. bcx. build. GEP ( av, vec( C_int ( 0 ) , C_int ( i) ) ) ;
1549
+ auto elt_b = r. bcx. build. GEP ( bv, vec( C_int ( 0 ) , C_int ( i) ) ) ;
1526
1550
r = f( r. bcx,
1527
- load_scalar_or_boxed( r. bcx, elt, arg) ,
1551
+ load_scalar_or_boxed( r. bcx, elt_a, arg) ,
1552
+ load_scalar_or_boxed( r. bcx, elt_b, arg) ,
1528
1553
arg) ;
1529
1554
i += 1 ;
1530
1555
}
1531
1556
}
1532
1557
case ( ty. ty_rec( ?fields) ) {
1533
1558
let int i = 0 ;
1534
1559
for ( ty. field fld in fields) {
1535
- auto llfld = r. bcx. build. GEP ( v, vec( C_int ( 0 ) , C_int ( i) ) ) ;
1560
+ auto llfld_a = r. bcx. build. GEP ( av, vec( C_int ( 0 ) , C_int ( i) ) ) ;
1561
+ auto llfld_b = r. bcx. build. GEP ( bv, vec( C_int ( 0 ) , C_int ( i) ) ) ;
1536
1562
r = f( r. bcx,
1537
- load_scalar_or_boxed( r. bcx, llfld, fld. ty) ,
1563
+ load_scalar_or_boxed( r. bcx, llfld_a, fld. ty) ,
1564
+ load_scalar_or_boxed( r. bcx, llfld_b, fld. ty) ,
1538
1565
fld. ty) ;
1539
1566
i += 1 ;
1540
1567
}
@@ -1545,53 +1572,69 @@ fn iter_structural_ty(@block_ctxt cx,
1545
1572
auto variants = tag_variants( cx. fcx. ccx, tid) ;
1546
1573
auto n_variants = _vec. len[ ast. variant] ( variants) ;
1547
1574
1548
- auto lldiscrim_ptr = cx. build. GEP ( v, vec( C_int ( 0 ) , C_int ( 0 ) ) ) ;
1549
- auto llunion_ptr = cx. build. GEP ( v, vec( C_int ( 0 ) , C_int ( 1 ) ) ) ;
1550
- auto lldiscrim = cx. build. Load ( lldiscrim_ptr) ;
1575
+ auto lldiscrim_a_ptr = cx. build. GEP ( av, vec( C_int ( 0 ) , C_int ( 0 ) ) ) ;
1576
+ auto llunion_a_ptr = cx. build. GEP ( av, vec( C_int ( 0 ) , C_int ( 1 ) ) ) ;
1577
+ auto lldiscrim_a = cx. build. Load ( lldiscrim_a_ptr) ;
1578
+
1579
+ auto lldiscrim_b_ptr = cx. build. GEP ( bv, vec( C_int ( 0 ) , C_int ( 0 ) ) ) ;
1580
+ auto llunion_b_ptr = cx. build. GEP ( bv, vec( C_int ( 0 ) , C_int ( 1 ) ) ) ;
1581
+ auto lldiscrim_b = cx. build. Load ( lldiscrim_b_ptr) ;
1551
1582
1552
1583
auto unr_cx = new_sub_block_ctxt( cx, "tag-iter-unr" ) ;
1553
1584
unr_cx. build. Unreachable ( ) ;
1554
1585
1555
- auto llswitch = cx. build. Switch ( lldiscrim , unr_cx. llbb,
1556
- n_variants) ;
1586
+ auto llswitch = cx. build. Switch ( lldiscrim_a , unr_cx. llbb,
1587
+ n_variants) ;
1557
1588
1558
1589
auto next_cx = new_sub_block_ctxt( cx, "tag-iter-next" ) ;
1559
1590
1560
1591
auto i = 0 u;
1561
1592
for ( ast. variant variant in variants) {
1562
- auto variant_cx = new_sub_block_ctxt( cx, "tag-iter-variant-" +
1593
+ auto variant_cx = new_sub_block_ctxt( cx,
1594
+ "tag-iter-variant-" +
1563
1595
_uint. to_str( i, 10 u) ) ;
1564
1596
llvm. LLVMAddCase ( llswitch, C_int ( i as int) , variant_cx. llbb) ;
1565
1597
1566
1598
if ( _vec. len[ ast. variant_arg] ( variant. args) > 0 u) {
1567
1599
// N-ary variant.
1568
- let vec[ ValueRef ] vals = vec( C_int ( 0 ) , C_int ( 1 ) ,
1569
- C_int ( i as int) ) ;
1570
- auto llvar = variant_cx. build. GEP ( v, vals) ;
1571
1600
auto llvarty = type_of_variant( cx. fcx. ccx, variants. ( i) ) ;
1572
1601
1573
1602
auto fn_ty = ty. ann_to_type( variants. ( i) . ann) ;
1574
1603
alt ( fn_ty. struct ) {
1575
1604
case ( ty. ty_fn( _, ?args, _) ) {
1576
- auto llvarp = variant_cx. build.
1577
- TruncOrBitCast ( llunion_ptr, T_ptr ( llvarty) ) ;
1605
+ auto llvarp_a = variant_cx. build.
1606
+ TruncOrBitCast ( llunion_a_ptr, T_ptr ( llvarty) ) ;
1607
+
1608
+ auto llvarp_b = variant_cx. build.
1609
+ TruncOrBitCast ( llunion_b_ptr, T_ptr ( llvarty) ) ;
1578
1610
1579
1611
auto ty_params = tag_ty_params( cx. fcx. ccx, tid) ;
1580
1612
1581
1613
auto j = 0 u;
1582
1614
for ( ty. arg a in args) {
1583
1615
auto v = vec( C_int ( 0 ) , C_int ( j as int) ) ;
1584
- auto llfldp = variant_cx. build. GEP ( llvarp, v) ;
1616
+
1617
+ auto llfldp_a =
1618
+ variant_cx. build. GEP ( llvarp_a, v) ;
1619
+
1620
+ auto llfldp_b =
1621
+ variant_cx. build. GEP ( llvarp_b, v) ;
1585
1622
1586
1623
auto ty_subst = ty. substitute_ty_params(
1587
1624
ty_params, tps, a. ty) ;
1588
1625
1589
- auto llfld =
1626
+ auto llfld_a =
1590
1627
load_scalar_or_boxed( variant_cx,
1591
- llfldp ,
1628
+ llfldp_a ,
1592
1629
ty_subst) ;
1593
1630
1594
- auto res = f( variant_cx, llfld, ty_subst) ;
1631
+ auto llfld_b =
1632
+ load_scalar_or_boxed( variant_cx,
1633
+ llfldp_b,
1634
+ ty_subst) ;
1635
+
1636
+ auto res = f( variant_cx,
1637
+ llfld_a, llfld_b, ty_subst) ;
1595
1638
variant_cx = res. bcx;
1596
1639
j += 1 u;
1597
1640
}
@@ -1611,21 +1654,29 @@ fn iter_structural_ty(@block_ctxt cx,
1611
1654
ret res( next_cx, C_nil ( ) ) ;
1612
1655
}
1613
1656
case ( ty. ty_fn( _, _, _) ) {
1614
- auto box_cell =
1615
- cx. build. GEP ( v,
1657
+ auto box_cell_a =
1658
+ cx. build. GEP ( av,
1659
+ vec( C_int ( 0 ) ,
1660
+ C_int ( abi. fn_field_box) ) ) ;
1661
+ auto box_cell_b =
1662
+ cx. build. GEP ( bv,
1616
1663
vec( C_int ( 0 ) ,
1617
1664
C_int ( abi. fn_field_box) ) ) ;
1618
- ret iter_boxpp( cx, box_cell , f) ;
1665
+ ret iter_boxpp( cx, box_cell_a , box_cell_b , f) ;
1619
1666
}
1620
1667
case ( ty. ty_obj( _) ) {
1621
- auto box_cell =
1622
- cx. build. GEP ( v,
1668
+ auto box_cell_a =
1669
+ cx. build. GEP ( av,
1670
+ vec( C_int ( 0 ) ,
1671
+ C_int ( abi. obj_field_box) ) ) ;
1672
+ auto box_cell_b =
1673
+ cx. build. GEP ( bv,
1623
1674
vec( C_int ( 0 ) ,
1624
1675
C_int ( abi. obj_field_box) ) ) ;
1625
- ret iter_boxpp( cx, box_cell , f) ;
1676
+ ret iter_boxpp( cx, box_cell_a , box_cell_b , f) ;
1626
1677
}
1627
1678
case ( _) {
1628
- cx. fcx. ccx. sess. unimpl( "type in iter_structural_ty " ) ;
1679
+ cx. fcx. ccx. sess. unimpl( "type in iter_structural_ty_full " ) ;
1629
1680
}
1630
1681
}
1631
1682
ret r;
@@ -1965,21 +2016,47 @@ fn trans_compare(@block_ctxt cx, ast.binop op, @ty.t t,
1965
2016
ret res( cx, trans_scalar_compare( cx, op, t, lhs, rhs) ) ;
1966
2017
1967
2018
} else if ( ty. type_is_structural( t) ) {
1968
- auto scx = new_sub_block_ctxt( cx, "structural compare body " ) ;
1969
- auto next = new_sub_block_ctxt( cx, "structural compare completion " ) ;
2019
+ auto scx = new_sub_block_ctxt( cx, "structural compare start " ) ;
2020
+ auto next = new_sub_block_ctxt( cx, "structural compare end " ) ;
1970
2021
cx. build. Br ( scx. llbb) ;
1971
2022
1972
2023
// Start with the assumptioin that our predicate holds.
1973
2024
auto flag = scx. build. Alloca ( T_i1 ( ) ) ;
1974
2025
scx. build. Store ( C_integral ( 1 , T_i1 ( ) ) , flag) ;
1975
2026
1976
- // Attempt to prove otherwise by inverting the sense of the comparison
1977
- // on each inner element and bailing if any succeed.
2027
+ // Attempt to prove otherwise by assuming true, comparing each element
2028
+ // and writing 0 + early-exiting if any comparisons fail.
2029
+
2030
+ fn inner( @block_ctxt next_cx,
2031
+ ValueRef flag,
2032
+ ast. binop op,
2033
+ @block_ctxt cx,
2034
+ ValueRef av,
2035
+ ValueRef bv,
2036
+ @ty. t t) -> result {
2037
+ // Compare av op bv
2038
+ auto cnt_cx = new_sub_block_ctxt( cx, "continue comparison" ) ;
2039
+ auto stop_cx = new_sub_block_ctxt( cx, "stop comparison" ) ;
2040
+
2041
+ auto r = trans_compare( cx, op, t, av, bv) ;
2042
+
2043
+ // if true, then carry on, else write 0 to flag, branch to 'next'.
2044
+ r. bcx. build. CondBr ( r. val, cnt_cx. llbb, stop_cx. llbb) ;
2045
+ stop_cx. build. Store ( C_integral ( 0 , T_i1 ( ) ) , flag) ;
2046
+ stop_cx. build. Br ( next_cx. llbb) ;
2047
+
2048
+ ret res( cnt_cx, C_nil ( ) ) ;
2049
+ }
2050
+
2051
+ // FIXME: this is wrong for tag types; need to confirm discriminants
2052
+ // are equal before blindly walking over elements.
1978
2053
1979
- // FIXME: finish this.
2054
+ auto r = iter_structural_ty_full( scx, lhs, rhs, t,
2055
+ bind inner( next, flag, op,
2056
+ _, _, _, _) ) ;
1980
2057
1981
- auto v = scx . build. Load ( flag ) ;
1982
- scx . build. Br ( next . llbb ) ;
2058
+ r . bcx . build. Br ( next . llbb ) ;
2059
+ auto v = next . build. Load ( flag ) ;
1983
2060
ret res( next, v) ;
1984
2061
1985
2062
} else {
0 commit comments