this is the code for unravel through which i want to decode and link the compressed image using huffman coding.
#include "mex.h" void unrav(uint16_T *hx, double *link, double *x, double xsz,int hxsz) { int i=15; int j=0, k=0, n=0; while (xsz-k) { if (*(link + n) >0) { if ((*(hx + j) >> i) & 0x0001) n=*(link + n); else n=*(link + n) - 1; if (i) i--; else { j++; i= 15;} if (j>hxsz) mexErrMsgTxt("Out of cod ebits ???"); } else { *(x + k++) =-*(link +n); n=0; } } if (k== xsz -1) *(x+ k++) =-*(link +n); } void mexFunction( int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[]) { double *link, *x, xsz; uint16_T *hx; int hxsz; if(nrhs !=3) mexErrMsgTxt("Three inputs required."); else if (nlhs>1) mexErrMSgTxt("Too many output arguments."); if(!mxIsDouble(prhs[2]) || mxIsComplex(prhs[2]) || mxGetN(prhs[2])*mxGetM(prhs[2]) !=1) mexErrMsgTxt("Input size must be a scalar"); hx=(uint16_T *) mxGetData(prhs[0]); link=(double*) mxGetData(prhs[1]); xsz=mxGetScalar(prhs[2]); hxsz=mxGetM(prhs[0]); plhs[0]=mxCreateDoubleMatrix(xsz, 1, mxREAL); x=(double *) mxGetData(plhs[0]); unrav(hx, link, x, xsz, hxsz); }
This is my unravel program, but while executing it on the matlab R2012a it shows the error like this:
Undefined symbols for architecture x86_64: "_mexErrMSgTxt", referenced from: _mexFunction in unrav.o ld: symbol(s) not found for architecture x86_64 collect2: ld returned 1 exit status mex: link of ' "unrav.mexmaci64"' failed.
I am using Mac OSX Lion 10.7.2 and I have Xcode 4.2, but I tried to compile it there but that doesn't support mex.h file
I know the program is OK, but I don't know what I do? Kindly any body help me.