@@ -40,7 +40,7 @@ use crate::io_extras::{copy, sink};
40
40
use crate :: prelude:: * ;
41
41
use core:: { cmp, fmt} ;
42
42
use core:: convert:: TryFrom ;
43
- use crate :: sync:: { RwLock , RwLockReadGuard } ;
43
+ use crate :: sync:: { RwLock , RwLockReadGuard , LockTestExt } ;
44
44
#[ cfg( feature = "std" ) ]
45
45
use core:: sync:: atomic:: { AtomicUsize , Ordering } ;
46
46
use crate :: sync:: Mutex ;
@@ -1327,9 +1327,14 @@ impl<L: Deref> fmt::Display for NetworkGraph<L> where L::Target: Logger {
1327
1327
impl < L : Deref > Eq for NetworkGraph < L > where L :: Target : Logger { }
1328
1328
impl < L : Deref > PartialEq for NetworkGraph < L > where L :: Target : Logger {
1329
1329
fn eq ( & self , other : & Self ) -> bool {
1330
- self . genesis_hash == other. genesis_hash &&
1331
- * self . channels . read ( ) . unwrap ( ) == * other. channels . read ( ) . unwrap ( ) &&
1332
- * self . nodes . read ( ) . unwrap ( ) == * other. nodes . read ( ) . unwrap ( )
1330
+ // For a total lockorder, sort by position in memory and take the inner locks in that order.
1331
+ // (Assumes that we can't move within memory while a lock is held).
1332
+ let ord = ( ( self as * const _ ) as usize ) < ( ( other as * const _ ) as usize ) ;
1333
+ let a = if ord { ( & self . channels , & self . nodes ) } else { ( & other. channels , & other. nodes ) } ;
1334
+ let b = if ord { ( & other. channels , & other. nodes ) } else { ( & self . channels , & self . nodes ) } ;
1335
+ let ( channels_a, channels_b) = ( a. 0 . unsafe_well_ordered_double_lock_self ( ) , b. 0 . unsafe_well_ordered_double_lock_self ( ) ) ;
1336
+ let ( nodes_a, nodes_b) = ( a. 1 . unsafe_well_ordered_double_lock_self ( ) , b. 1 . unsafe_well_ordered_double_lock_self ( ) ) ;
1337
+ self . genesis_hash . eq ( & other. genesis_hash ) && channels_a. eq ( & channels_b) && nodes_a. eq ( & nodes_b)
1333
1338
}
1334
1339
}
1335
1340
0 commit comments