📄 os_spec.h
字号:
/****************************************************************************
* *
* cryptlib OS-Specific Defines Header File *
* Copyright Peter Gutmann 1992-2006 *
* *
****************************************************************************/
#ifndef _OSSPEC_DEFINED
#define _OSSPEC_DEFINED
/* To build the static .LIB under Win32, uncomment the following define (this
it not recommended since the init/shutdown is no longer completely thread-
safe). In theory it should be possible to detect the build of a DLL vs.a
LIB with the _DLL define which is set when the /MD (multithreaded DLL)
option is used, however VC++ only defines _DLL when /MD is used *and*
it's linked with the MT DLL runtime. If it's linked with the statically
linked runtime, _DLL isn't defined, which would result in the unsafe LIB
version being built as a DLL */
/* #define STATIC_LIB */
/* os_spec.h performs OS and compiler detection that's used by config.h, so
this file must be applied before config.h */
#ifdef _CONFIG_DEFINED
#error "os_spec.h must be included before config.h"
#endif /* _CONFIG_DEFINED */
/****************************************************************************
* *
* OS Detection *
* *
****************************************************************************/
/* Try and figure out if we're running under Windows and Win16/Win32/WinCE.
We have to jump through all sorts of hoops later on, not helped by the
fact that the method of detecting Windows at compile time changes with
different versions of Visual C (it's different for each of VC 2.0, 2.1,
4.0, and 4.1. It actually remains the same after 4.1) */
#if !defined( __WINDOWS__ ) && ( defined( _Windows ) || defined( _WINDOWS ) )
#define __WINDOWS__
#endif /* Win16 */
#if !defined( __WIN32__ ) && ( defined( WIN32 ) || defined( _WIN32 ) )
#ifndef __WINDOWS__
#define __WINDOWS__ /* Win32 or WinCE */
#endif /* __WINDOWS__ */
#ifdef _WIN32_WCE
#define __WINCE__
#else
#define __WIN32__
#endif /* WinCE vs. Win32 */
#endif /* Win32 or WinCE */
#if defined( __WINDOWS__ ) && \
!( defined( __WIN32__ ) || defined( __WINCE__ ) )
#define __WIN16__
#endif /* Windows without Win32 or WinCE */
/* If we're using a DOS compiler and it's not a 32-bit one, record this.
__MSDOS__ is predefined by a number of compilers, so we use __MSDOS16__
for stuff that's 16-bit DOS specific, and __MSDOS32__ for stuff that's
32-bit DOS specific */
#if defined( __MSDOS__ ) && !defined( __MSDOS32__ )
#define __MSDOS16__
#endif /* 16-bit DOS */
#if defined( __WATCOMC__ ) && defined( __DOS__ )
#ifndef __MSDOS__
#define __MSDOS__
#endif /* 16- or 32-bit DOS */
#if defined( __386__ ) && !defined( __MSDOS32__ )
#define __MSDOS32__
#endif /* 32-bit DOS */
#endif /* Watcom C under DOS */
/* Make the Tandem, Macintosh, AS/400, PalmOS, and VMS defines look a bit
more like the usual ANSI defines used to identify the other OS types */
#ifdef __TANDEM
#if defined( _OSS_TARGET )
#define __TANDEM_OSS__
#elif defined( _GUARDIAN_TARGET )
#define __TANDEM_NSK__
#else
#error "Can't determine Tandem OS target type (NSK or OSS)"
#endif /* Tandem OSS vs. NSK */
#endif /* Tandem */
#if defined( __MWERKS__ ) || defined( SYMANTEC_C ) || defined( __MRC__ )
#define __MAC__
#endif /* Macintosh */
#if defined( __OS400__ ) || defined( __ILEC400__ )
#define __AS400__
#endif /* AS/400 */
#ifdef __PALMSOURCE__
#define __PALMOS__
#endif /* Palm OS */
#ifdef __VMS
#define __VMS__
#endif /* VMS */
/* In some cases we're using a DOS or Windows system as a cross-development
platform, if we are we add extra defines to turn off some Windows-
specific features */
#ifdef _SCCTK
#define __IBM4758__
#endif /* IBM 4758 cross-compiled under Windows */
/****************************************************************************
* *
* OS-Specific Compiler Configuration *
* *
****************************************************************************/
/* Visual C++ capabilities have changed somewhat over the years, the
following defines make explicit what we're testing for in a check of
_MSC_VER.
Visual C++ 1.5 _MSC_VER = 800
Visual C++ 2.0 _MSC_VER = 900
Visual C++ 4.0 _MSC_VER = 1000
Visual C++ 5.0 _MSC_VER = 1100
Visual C++ 6.0 _MSC_VER = 1200
Visual C++ 7.0 (VC++.NET/2002) _MSC_VER = 1300
Visual C++ 7.1 (VC++.NET/2003) _MSC_VER = 1310
Visual C++ 8.0 (VC2005) _MSC_VER = 1400
Visual C++ 9.0 (VC2008) _MSC_VER = 1500 */
#ifdef _MSC_VER
#define VC_16BIT( version ) ( version <= 800 )
#define VC_GE_2002( version ) ( version >= 1300 )
#define VC_LT_2005( version ) ( version < 1400 )
#define VC_GE_2005( version ) ( version >= 1400 )
#else
/* These aren't specifically required on non-VC++ systems, but some
preprocessors get confused if they aren't defined since they're used */
#define VC_16BIT( version ) 0
#define VC_GE_2002( version ) 0
#define VC_LT_2005( version ) 0
#define VC_GE_2005( version ) 0
#endif /* Visual C++ */
/* If we're compiling under VC++ with the maximum level of warnings, turn
off some of the more irritating warnings */
#if defined( _MSC_VER )
#if VC_16BIT( _MSC_VER )
#pragma warning( disable: 4135 )/* Conversion bet.diff.integral types */
#pragma warning( disable: 4761 )/* Integral size mismatch in argument */
#endif /* 16-bit VC++ */
/* Warning level 3 */
// Note: 4018 means the compiler has to convert the signed value to
// unsigned to perform the comparison, should make this explicit with a
// cast.
#pragma warning( disable: 4018 ) /* Comparing signed <-> unsigned value */
#pragma warning( disable: 4127 ) /* Conditional is constant: while( TRUE ) */
/* Warning level 4. The function <-> data pointer cast warnings are
orthogonal and impossible to disable (they override the universal
'void *' pointer type), the signed/unsigned and size warnings are
more compiler peeves as for the level 3 warnings (in particular the
int <-> unsigned char/short warning isn't caused by dangerous
trunctions but by things like depositing a small value contained in
an int into a byte array), and the struct initialisation warnings are
standards extensions that the struct STATIC_INIT macros manage for us */
// Note: Should remove 4244 (warn about truncation/data loss) and possibly
// 4389 (variant on 4018).
#pragma warning( disable: 4054 ) /* Cast from fn.ptr -> generic (data) ptr.*/
#pragma warning( disable: 4055 ) /* Cast from generic (data) ptr. -> fn.ptr.*/
#pragma warning( disable: 4057 ) /* char vs.unsigned char use */
#pragma warning( disable: 4204 ) /* Struct initialised with non-const value */
#pragma warning( disable: 4221 ) /* Struct initialised with addr.of auto.var */
#pragma warning( disable: 4244 ) /* int <-> unsigned char/short */
#pragma warning( disable: 4267 ) /* int <-> size_t */
#pragma warning( disable: 4305 ) /* long <-> size_t */
#pragma warning( disable: 4389 ) /* signed ==/!= unsigned compare */
/* Different versions of VC++ generates extra warnings at level 4 due to
problems in VC++/Platform SDK headers */
#if VC_LT_2005( _MSC_VER )
#pragma warning( disable: 4201 )/* Nameless struct/union in SQL/networking hdrs*/
#endif /* VC++ 6.0 and 2003 */
#if VC_GE_2005( _MSC_VER )
#pragma warning( disable: 4201 )/* Nameless struct/union */
#pragma warning( disable: 4214 )/* bit field types other than int */
#endif /* VC++ 2005 or newer */
/* Code analysis generates even more warnings. C6011 is particularly
problematic, it's issued whenever a pointer is derefenced without first
checking that it's not NULL */
#if defined( _MSC_VER ) && defined( _PREFAST_ )
#pragma warning( disable: 6011 )/* Deferencing NULL pointer */
#endif /* VC++ with source analysis enabled */
/* Windows DDK fre builds treat warnings as errors and the DDK headers
have some problems so we have to disable additional warnings */
#ifdef WIN_DDK
#pragma warning( disable: 4242 )/* MS-only bit field type used */
#pragma warning( disable: 4731 )/* Frame pointer modified by inline asm */
#endif /* WIN_DDK */
/* gcc -wall type warnings. The highest warning level generates large
numbers of spurious warnings (including ones in VC++ headers), so it's
best to only enable them for one-off test builds requiring manual
checking for real errors */
#pragma warning( disable: 4100 ) /* Unreferenced parameter */
#endif /* Visual C++ */
/* VC++ 2005 implements the TR 24731 security extensions but doesn't yet
define __STDC_LIB_EXT1__, so if we detect this version of the compiler we
define it ourselves */
#if defined( _MSC_VER ) && VC_GE_2005( _MSC_VER ) && \
!defined( __STDC_LIB_EXT1__ )
#define __STDC_LIB_EXT1__
#endif /* VC++ 2005 without __STDC_LIB_EXT1__ defined */
/* The ability to modify warnings via the project file in BC++ 5.0x is
completely broken, the only way to do this is via pragmas in the source
code */
#if defined( __BORLANDC__ ) && ( __BORLANDC__ < 0x550 )
/* Spurious warnings to disable */
#pragma warn -aus /* Assigned but never used. This is
frequently misreported even when
the value is quite obviously used */
#pragma warn -csu /* Comparing signed/unsigned value */
#pragma warn -par /* Parameter is never used */
#pragma warn -sig /* Conversion may lose significant digits */
#pragma warn -ucp /* Signed/unsigned char assignment */
/* Useful warnings to enable */
#pragma warn +amb /* Ambiguous operators need parentheses */
#pragma warn +amp /* Superfluous & with function */
#pragma warn +asm /* Unknown assembler instruction */
#pragma warn +ccc /* Condition is always true/false */
#pragma warn +cln /* Constant is long */
#pragma warn +def /* Use of ident before definition */
#pragma warn +stv /* Structure passed by value */
#endif /* Broken BC++ 5.0x warning handling */
/* All Windows CE functions are Unicode-only, this was an attempt to clean
up the ASCII vs. Unicode kludges in Win32 but unfortunately was made just
before UTF8 took off. Because UTF8 allows everyone to keep using their
old ASCII stuff while being nominally Unicode-aware, it's unlikely that
any new Unicode-only systems will appear in the future, leaving WinCE's
Unicode-only API an orphan. The easiest way to handle this is to convert
all strings to ASCII/8 bit as they come in from the external cryptlib API
and convert them back to Unicode as required when they're passed to WinCE
OS functions. In other words Unicode is treated just like EBCDIC and
pushed out to the edges of cryptlib. This requires the minimum amount of
conversion and special-case handling internally */
#ifdef __WINCE__
#define UNICODE_CHARS
#endif /* WinCE */
/* If we're compiling on the AS/400, make enums a fixed size rather than
using the variable-length values that IBM compilers default to, and force
strings into a read-only segment (by default they're writeable) */
#ifdef __AS400__
#pragma enumsize( 4 )
#pragma strings( readonly )
#define EBCDIC_CHARS
#endif /* AS/400 */
/* If we're compiling under MVS or VM/CMS, make enums a fixed size rather
than using the variable-length values that IBM compilers default to */
#if defined( __MVS__ ) || defined( __VMCMS__ )
#pragma enum( 4 )
#define USE_ETOA /* Use built-in ASCII <-> EBCDIC conversion */
#define EBCDIC_CHARS
#endif /* __MVS__ */
/* If we're compiling under QNX, make enums a fixed size rather than using
the variable-length values that the Watcom compiler defaults to */
#if defined( __QNX__ ) && defined( __WATCOMC__ )
#pragma enum int
#endif /* QNX and Watcom C */
/* A few rare operations are word-size-dependant, which we detect via
limits.h */
#include <limits.h>
#if INT_MAX <= 32768L
#define SYSTEM_16BIT
#elif ULONG_MAX > 0xFFFFFFFFUL
#define SYSTEM_64BIT
#else
#define SYSTEM_32BIT
#endif /* 16- vs.32- vs.64-bit system */
/* Useful data types. Newer compilers provide a 'bool' datatype via
stdbool.h, but in a fit of braindamage generally make this a char instead
of an int. While Microsoft's use of char for BOOLEAN in the early 1980s
with 8/16-bit 8086s and 129K of RAM makes sense, it's a pretty stupid
choice for 32- or 64-bit CPUs because alignment issues mean that it'll
generally still require 32 or 64 bits of storage (except for special
cases like an array of bool), but then the difficulty or even inability
of many CPUs and/or architectures in performing byte-level accesses means
that in order to update a boolean the system has to fetch a full machine
word, mask out the byte data, or/and in the value, and write the word
back out. So 'bool' = 'char' combines most of the worst features of both
char and int. It also leads to really hard-to-find implementation bugs
due to the fact that '(bool) int = true' produces different results to
'*(bool *) intptr = true', something that was resolved years ago in enums
without causing such breakage.
Because of this we avoid the use of bool and just define it to int */
typedef unsigned char BYTE;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -