The code below is compile against Image Magick with Q32 and HDRI enabled.
The values all round to 1 as float, but when they are scaled to maximum QuantumRange, the error leads to an off by one error e.g. the quantum scaled value for the red channel, created by doing 'PixelSetColor(pixel_wand, "red");' is 4294967296.0 rather than 4294967295.0color 'red' 1.000000 bits are: 3ff0000000100000
color 'rgb(255, 0, 0)' 1.000000 bits are: 3fefffffe0200000
color '#ff0000' 1.000000 bits are: 3ff0000000100000
unity 1.000000 bits are: 3ff0000000000000
Version is ImageMagick 6.8.9-7 Q32 x86_64 2014-08-08 http://www.imagemagick.org
Code: Select all
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <malloc.h>
#include <wand/MagickWand.h>
static void printBits(void *c, size_t n)
{
unsigned char *t = c;
if (c == NULL)
return;
while (n > 0) {
--n;
printf("%02x", t[n]);
}
printf("\n");
}
int main(int argc,char **argv) {
MagickWandGenesis();
size_t version_number;
char * versionString;
double unity = 1.0;
double red;
char * color;
int i;
PixelWand *pixel_wand = NULL;
char *colors[] = {"red", "rgb(255, 0, 0)", "#ff0000"};
pixel_wand = NewPixelWand();
for (i=0; i<3 ; i++) {
color = colors[i];
PixelSetColor(pixel_wand, color);
red = PixelGetRed(pixel_wand);
printf("color '%s' %f bits are: ", color, red);
printBits(&red, sizeof red);
}
printf("unity %f bits are: ", unity);
printBits(&unity, sizeof unity);
versionString = (char *)MagickGetVersion(&version_number);
printf("Version is %s \n", versionString);
MagickWandTerminus();
return(0);
}