18
18
#include <intrin0.h>
19
19
20
20
/* First include the standard intrinsics. */
21
- #if defined(__i386__ ) || defined(__x86_64__ )
21
+ #if defined(__i386__ ) || ( defined(__x86_64__ ) && !defined( __arm64ec__ ) )
22
22
#include <x86intrin.h>
23
23
#endif
24
24
@@ -166,7 +166,7 @@ unsigned __int32 xbegin(void);
166
166
void _xend (void );
167
167
168
168
/* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
169
- #ifdef __x86_64__
169
+ #if defined( __x86_64__ ) && !defined( __arm64ec__ )
170
170
void __addgsbyte (unsigned long , unsigned char );
171
171
void __addgsdword (unsigned long , unsigned long );
172
172
void __addgsqword (unsigned long , unsigned __int64 );
@@ -236,7 +236,8 @@ __int64 _mul128(__int64, __int64, __int64 *);
236
236
/*----------------------------------------------------------------------------*\
237
237
|* movs, stos
238
238
\*----------------------------------------------------------------------------*/
239
- #if defined(__i386__ ) || defined(__x86_64__ )
239
+
240
+ #if defined(__i386__ ) || (defined(__x86_64__ ) && !defined(__arm64ec__ ))
240
241
static __inline__ void __DEFAULT_FN_ATTRS __movsb (unsigned char * __dst ,
241
242
unsigned char const * __src ,
242
243
size_t __n ) {
@@ -305,7 +306,7 @@ static __inline__ void __DEFAULT_FN_ATTRS __stosw(unsigned short *__dst,
305
306
: "memory" );
306
307
}
307
308
#endif
308
- #ifdef __x86_64__
309
+ #if defined( __x86_64__ ) && !defined( __arm64ec__ )
309
310
static __inline__ void __DEFAULT_FN_ATTRS __movsq (
310
311
unsigned long long * __dst , unsigned long long const * __src , size_t __n ) {
311
312
__asm__ __volatile__("rep movsq"
@@ -324,7 +325,7 @@ static __inline__ void __DEFAULT_FN_ATTRS __stosq(unsigned __int64 *__dst,
324
325
/*----------------------------------------------------------------------------*\
325
326
|* Misc
326
327
\*----------------------------------------------------------------------------*/
327
- #if defined(__i386__ ) || defined(__x86_64__ )
328
+ #if defined(__i386__ ) || ( defined(__x86_64__ ) && !defined( __arm64ec__ ) )
328
329
static __inline__ void __DEFAULT_FN_ATTRS __halt (void ) {
329
330
__asm__ volatile ("hlt" );
330
331
}
@@ -339,7 +340,7 @@ static __inline__ void __DEFAULT_FN_ATTRS __nop(void) {
339
340
/*----------------------------------------------------------------------------*\
340
341
|* MS AArch64 specific
341
342
\*----------------------------------------------------------------------------*/
342
- #if defined(__aarch64__ )
343
+ #if defined(__aarch64__ ) || defined( __arm64ec__ )
343
344
unsigned __int64 __getReg (int );
344
345
long _InterlockedAdd (long volatile * Addend , long Value );
345
346
__int64 _InterlockedAdd64 (__int64 volatile * Addend , __int64 Value );
@@ -383,7 +384,7 @@ void __cdecl __prefetch(void *);
383
384
/*----------------------------------------------------------------------------*\
384
385
|* Privileged intrinsics
385
386
\*----------------------------------------------------------------------------*/
386
- #if defined(__i386__ ) || defined(__x86_64__ )
387
+ #if defined(__i386__ ) || ( defined(__x86_64__ ) && !defined( __arm64ec__ ) )
387
388
static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
388
389
__readmsr (unsigned long __register ) {
389
390
// Loads the contents of a 64-bit model specific register (MSR) specified in
@@ -397,7 +398,6 @@ __readmsr(unsigned long __register) {
397
398
__asm__ ("rdmsr" : "=d" (__edx ), "=a" (__eax ) : "c" (__register ));
398
399
return (((unsigned __int64 )__edx ) << 32 ) | (unsigned __int64 )__eax ;
399
400
}
400
- #endif
401
401
402
402
static __inline__ unsigned __LPTRINT_TYPE__ __DEFAULT_FN_ATTRS __readcr3 (void ) {
403
403
unsigned __LPTRINT_TYPE__ __cr3_val ;
@@ -413,6 +413,7 @@ static __inline__ void __DEFAULT_FN_ATTRS
413
413
__writecr3 (unsigned __INTPTR_TYPE__ __cr3_val ) {
414
414
__asm__ ("mov {%0, %%cr3|cr3, %0}" : : "r" (__cr3_val ) : "memory" );
415
415
}
416
+ #endif
416
417
417
418
#ifdef __cplusplus
418
419
}
0 commit comments