@@ -2014,8 +2014,7 @@ impure fn trans_args(@block_ctxt cx,
2014
2014
auto retty = ty. ty_fn_ret( fn_ty) ;
2015
2015
auto llretty = type_of( cx. fcx. ccx, retty) ;
2016
2016
auto llretslot = cx. build. Alloca ( llretty) ;
2017
- llretslot = cx. build. PointerCast ( llretslot, T_ptr ( T_i8 ( ) ) ) ;
2018
- vs += llretslot;
2017
+ vs += cx. build. PointerCast ( llretslot, T_ptr ( T_i8 ( ) ) ) ;
2019
2018
llretslot_opt = some[ ValueRef ] ( llretslot) ;
2020
2019
}
2021
2020
}
@@ -2318,19 +2317,22 @@ impure fn trans_call(@block_ctxt cx, @ast.expr f,
2318
2317
2319
2318
auto bcx = args_res. _0;
2320
2319
auto real_retval = bcx. build. FastCall ( faddr, args_res. _1) ;
2321
- auto retval;
2320
+ auto retval = real_retval;
2321
+
2322
+ if ( ty. type_is_nil( ret_ty) ) {
2323
+ retval = C_nil ( ) ;
2324
+ }
2322
2325
2323
2326
// Check for a generic retslot.
2324
2327
alt ( args_res. _2) {
2328
+
2325
2329
case ( some[ ValueRef ] ( ?llretslot) ) {
2326
- retval = bcx . build . Load ( llretslot) ;
2330
+ retval = load_scalar_or_boxed ( bcx , llretslot, ret_ty ) ;
2327
2331
}
2328
- case ( none[ ValueRef ] ) {
2329
- retval = real_retval;
2330
2332
2331
- if ( ty . type_is_nil ( ret_ty ) ) {
2332
- retval = C_nil ( ) ;
2333
- } else if ( ty. type_is_structural ( ret_ty) ) {
2333
+ case ( none [ ValueRef ] ) {
2334
+ if ( ! ( ty . type_is_scalar ( ret_ty ) ||
2335
+ ty. type_is_boxed ( ret_ty) ) ) {
2334
2336
// Structured returns come back as first-class values. This is
2335
2337
// nice for LLVM but wrong for us; we treat structured values
2336
2338
// by pointer in most of our code here. So spill it to an
@@ -2342,8 +2344,6 @@ impure fn trans_call(@block_ctxt cx, @ast.expr f,
2342
2344
}
2343
2345
}
2344
2346
2345
-
2346
-
2347
2347
// Retval doesn't correspond to anything really tangible in the frame, but
2348
2348
// it's a ref all the same, so we put a note here to drop it when we're
2349
2349
// done in this scope.
0 commit comments