/******************************************/ /* integer.c 0.0.0 (1999-Oct-17-Sun) */ /* Adam M. Costello */ /******************************************/ /* Implementation of the interface defined by integer.c 0.0.*. */ /* This is ANSI C code. */ #include "integer.h" #include void encode_uint16(unsigned char *bytes, uint16 n) { #if UINT16_MAX > 0xffff assert(n <= 0xffff); #endif bytes[1] = n & 0xff; bytes[0] = n >> 8; } void encode_uint32(unsigned char *bytes, uint32 n) { #if UINT32_MAX > 0xffffffff assert(n <= 0xffffffff); #endif bytes[3] = n & 0xff; n >>= 8; bytes[2] = n & 0xff; n >>= 8; bytes[1] = n & 0xff; bytes[0] = n >> 8; } uint16 decode_uint16(const unsigned char *bytes) { #if UCHAR_MAX > 0xff assert(bytes[0] <= 0xff); assert(bytes[1] <= 0xff); #endif return (uint16) bytes[0] << 8 | bytes[1]; } uint32 decode_uint32(const unsigned char *bytes) { #if UCHAR_MAX > 0xff assert(bytes[0] <= 0xff); assert(bytes[1] <= 0xff); assert(bytes[2] <= 0xff); assert(bytes[3] <= 0xff); #endif return (((uint32) bytes[0] << 8 | bytes[1]) << 8 | bytes[2]) << 8 | bytes[3]; }