ConvertUTF.h File Reference
#include <stdint.h>

Macros

#define UNI_REPLACEMENT_CHAR   (UTF32)0x0000FFFD
 
#define UNI_MAX_BMP   (UTF32)0x0000FFFF
 
#define UNI_MAX_UTF16   (UTF32)0x0010FFFF
 
#define UNI_MAX_UTF32   (UTF32)0x7FFFFFFF
 
#define UNI_MAX_LEGAL_UTF32   (UTF32)0x0010FFFF
 

Typedefs

typedef uint32_t UTF32
 
typedef uint16_t UTF16
 
typedef unsigned char UTF8
 
typedef unsigned char Boolean
 

Enumerations

enum  ConversionResult { conversionOK , sourceExhausted , targetExhausted , sourceIllegal }
 
enum  ConversionFlags { strictConversion = 0 , lenientConversion }
 

Functions

ConversionResult ConvertUTF8toUTF16 (const UTF8 **sourceStart, const UTF8 *sourceEnd, UTF16 **targetStart, UTF16 *targetEnd, ConversionFlags flags)
 
ConversionResult ConvertUTF16toUTF8 (const UTF16 **sourceStart, const UTF16 *sourceEnd, UTF8 **targetStart, UTF8 *targetEnd, ConversionFlags flags)
 
ConversionResult ConvertUTF8toUTF32 (const UTF8 **sourceStart, const UTF8 *sourceEnd, UTF32 **targetStart, UTF32 *targetEnd, ConversionFlags flags)
 
ConversionResult ConvertUTF32toUTF8 (const UTF32 **sourceStart, const UTF32 *sourceEnd, UTF8 **targetStart, UTF8 *targetEnd, ConversionFlags flags)
 
ConversionResult ConvertUTF16toUTF32 (const UTF16 **sourceStart, const UTF16 *sourceEnd, UTF32 **targetStart, UTF32 *targetEnd, ConversionFlags flags)
 
ConversionResult ConvertUTF32toUTF16 (const UTF32 **sourceStart, const UTF32 *sourceEnd, UTF16 **targetStart, UTF16 *targetEnd, ConversionFlags flags)
 
Boolean isLegalUTF8Sequence (const UTF8 *source, const UTF8 *sourceEnd)
 

Macro Definition Documentation

◆ UNI_MAX_BMP

#define UNI_MAX_BMP   (UTF32)0x0000FFFF

◆ UNI_MAX_LEGAL_UTF32

#define UNI_MAX_LEGAL_UTF32   (UTF32)0x0010FFFF

◆ UNI_MAX_UTF16

#define UNI_MAX_UTF16   (UTF32)0x0010FFFF

◆ UNI_MAX_UTF32

#define UNI_MAX_UTF32   (UTF32)0x7FFFFFFF

◆ UNI_REPLACEMENT_CHAR

#define UNI_REPLACEMENT_CHAR   (UTF32)0x0000FFFD

Typedef Documentation

◆ Boolean

typedef unsigned char Boolean

◆ UTF16

typedef uint16_t UTF16

◆ UTF32

typedef uint32_t UTF32

◆ UTF8

typedef unsigned char UTF8

Enumeration Type Documentation

◆ ConversionFlags

Enumerator
strictConversion 
lenientConversion 

◆ ConversionResult

Enumerator
conversionOK 
sourceExhausted 
targetExhausted 
sourceIllegal 

Function Documentation

◆ ConvertUTF16toUTF32()

ConversionResult ConvertUTF16toUTF32 ( const UTF16 **  sourceStart,
const UTF16 sourceEnd,
UTF32 **  targetStart,
UTF32 targetEnd,
ConversionFlags  flags 
)

◆ ConvertUTF16toUTF8()

ConversionResult ConvertUTF16toUTF8 ( const UTF16 **  sourceStart,
const UTF16 sourceEnd,
UTF8 **  targetStart,
UTF8 targetEnd,
ConversionFlags  flags 
)

◆ ConvertUTF32toUTF16()

ConversionResult ConvertUTF32toUTF16 ( const UTF32 **  sourceStart,
const UTF32 sourceEnd,
UTF16 **  targetStart,
UTF16 targetEnd,
ConversionFlags  flags 
)

◆ ConvertUTF32toUTF8()

ConversionResult ConvertUTF32toUTF8 ( const UTF32 **  sourceStart,
const UTF32 sourceEnd,
UTF8 **  targetStart,
UTF8 targetEnd,
ConversionFlags  flags 
)

◆ ConvertUTF8toUTF16()

ConversionResult ConvertUTF8toUTF16 ( const UTF8 **  sourceStart,
const UTF8 sourceEnd,
UTF16 **  targetStart,
UTF16 targetEnd,
ConversionFlags  flags 
)

◆ ConvertUTF8toUTF32()

ConversionResult ConvertUTF8toUTF32 ( const UTF8 **  sourceStart,
const UTF8 sourceEnd,
UTF32 **  targetStart,
UTF32 targetEnd,
ConversionFlags  flags 
)

◆ isLegalUTF8Sequence()

Boolean isLegalUTF8Sequence ( const UTF8 source,
const UTF8 sourceEnd 
)