Re: CryptBinaryToString and CryptStringToBinary
"Carl Forsman" wrote:
why this code does not work?
I used the CryptStringToBinary function to save the Base64
string to a binary image file (test.png)
http://www.oniva.com/upload/1356/test.cpp
I don't know why does not work. It won't even compile. For
starters, you allocate an array of bytes with the size 1 byte. I
am in doubt that this array will be sufficient to hold the result
of conversion. I tried this code and it works for me:
#include <vector>
#include <algorithm>
#include <fstream>
#include <iterator>
int _tmain(int /*argc*/, _TCHAR* /*argv*/[])
{
LPCTSTR pszBase64 = _T("<base64 string goes here>");
// calculate the length of the buffer needed
//
DWORD cbBinary = 0;
BOOL bRet = CryptStringToBinary(
pszBase64,
_tcslen(pszBase64),
CRYPT_STRING_BASE64,
NULL,
&cbBinary,
NULL,
NULL);
if(bRet)
{
vector<BYTE> vecBin(cbBinary);
// decode base64 to binary
//
bRet = CryptStringToBinary(
pszBase64,
_tcslen(pszBase64),
CRYPT_STRING_BASE64,
&vecBin[0],
&cbBinary,
NULL,
NULL);
if(bRet)
{
// save decoded data
//
ofstream ofsPng(_T("test.png"),
ios_base::out | ios_base::binary);
if(!ofsPng.bad())
{
copy(vecBin.begin(), vecBin.end(),
ostream_iterator<BYTE>(ofsPng));
}
}
}
return 0;
}