Chainlink: Contract cannot retrieve large-response data type from external adapter - chainlink

I tried to use the large-response type to fulfil the request but somehow it does not show up in my contract I tried to fulfil the status value, and the job works completely as shown in my chainlink node but it does not change the status value, it stays 0x as it is. So, I wonder that is my contract or job spec wrong?
This is my contract
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.7;
import "#chainlink/contracts/src/v0.8/ChainlinkClient.sol";
contract APIConsumer is ChainlinkClient {
using Chainlink for Chainlink.Request;
bytes public status;
string public statusString;
address private oracle;
bytes32 private jobId;
uint256 private fee;
event RequestFulfilled(bytes32 indexed requestId,bytes indexed data);
/**
* Network: Kovan
* Oracle: 0xc57B33452b4F7BB189bB5AfaE9cc4aBa1f7a4FD8 (Chainlink Devrel
* Node)
* Job ID: d5270d1c311941d0b08bead21fea7747
* Fee: 0.1 LINK
*/
constructor() {
setPublicChainlinkToken();
oracle = 0xDFE5e6C5C624724384b55719b7da79d3EbB60057;
fee = 1 * 10 ** 18; // (Varies by network and job)
}
function requesData(string memory _jobId) public returns (bytes32 requestId)
{
Chainlink.Request memory request = buildChainlinkRequest(stringToBytes32(_jobId), address(this), this.fulfill.selector);
// Set the URL to perform the GET request on
request.add("trackingNo", "HF123456789DL");
return sendChainlinkRequestTo(oracle, request, fee);
}
/**
* Receive the response in the form of uint256
*/
function fulfill(bytes32 _requestId, bytes memory bytesData) public recordChainlinkFulfillment(_requestId)
{
emit RequestFulfilled(_requestId, bytesData);
status = bytesData;
statusString = string(status);
}
// function withdrawLink() external {} - Implement a withdraw function to avoid locking your LINK in the contract
// function getStatus() public view returns (string memory) {
// return bytes32ToString(status);
// }
function bytes32ToString(bytes32 _bytes32)
public
pure
returns (string memory)
{
uint8 i = 0;
while (i < 32 && _bytes32[i] != 0) {
i++;
}
bytes memory bytesArray = new bytes(i);
for (i = 0; i < 32 && _bytes32[i] != 0; i++) {
bytesArray[i] = _bytes32[i];
}
return string(bytesArray);
}
function stringToBytes32(string memory source)
public
pure
returns (bytes32 result)
{
bytes memory tempEmptyStringTest = bytes(source);
if (tempEmptyStringTest.length == 0) {
return 0x0;
}
assembly {
// solhint-disable-line no-inline-assembly
result := mload(add(source, 32))
}
}
}
This is my job spec.
type = "directrequest"
schemaVersion = 1
name = "Halffin-Data-EA-Create-Tracking8"
externalJobID = "3f706a6b-efdd-44ac-8167-f880a6ca63ac"
maxTaskDuration = "0s"
contractAddress = "0xDFE5e6C5C624724384b55719b7da79d3EbB60057"
minIncomingConfirmations = 0
observationSource = """
decode_log [type=ethabidecodelog
abi="OracleRequest(bytes32 indexed specId, address requester, bytes32 requestId, uint256 payment, address callbackAddr, bytes4 callbackFunctionId, uint256 cancelExpiration, uint256 dataVersion, bytes data)"
data="$(jobRun.logData)"
topics="$(jobRun.logTopics)"]
decode_cbor [type=cborparse data="$(decode_log.data)"]
fetch [type=bridge name="halffin-data" requestData="{\\"id\\": $(jobSpec.externalJobID), \\"data\\": { \\"trackingNo\\": $(decode_cbor.trackingNo)}}"]
parse [type=jsonparse path="data,tracking,slug" data="$(fetch)"]
encode_data [type=ethabiencode abi="(bytes value)" data="{ \\"value\\": $(parse) }"]
encode_tx [type=ethabiencode
abi="fulfillOracleRequest(bytes32 requestId, uint256 payment, address callbackAddress, bytes4 callbackFunctionId, uint256 expiration, bytes data)"
data="{\\"requestId\\": $(decode_log.requestId), \\"payment\\": $(decode_log.payment), \\"callbackAddress\\": $(decode_log.callbackAddr), \\"callbackFunctionId\\": $(decode_log.callbackFunctionId), \\"expiration\\": $(decode_log.cancelExpiration), \\"data\\": $(encode_data)}"
]
submit_tx [type=ethtx to="0xDFE5e6C5C624724384b55719b7da79d3EbB60057" data="$(encode_tx)"]
decode_log -> decode_cbor -> fetch -> parse -> encode_data -> encode_tx -> submit_tx
"""
These are logs from completed job
fetch
"{\"jobRunID\":\"3f706a6b-efdd-44ac-8167-f880a6ca63ac\",\"data\":{\"tracking\":{\"id\":2,\"slug\":\"halffin-logistics\",\"tracking_number\":\"HF123456789DL\"},\"result\":null},\"result\":null}"
name: halffin-data
requestData: {"id": $(jobSpec.externalJobID), "data": { "trackingNo": $(decode_cbor.trackingNo)}}
parse
"halffin-logistics"
path: data,tracking,slug
data: $(fetch)
encode_data
"0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001168616c6666696e2d6c6f67697374696373000000000000000000000000000000"
abi: (bytes value)
data: { "value": $(parse) }
encode_tx
"0x728853aa63b008d8b908b2d431b9ea703268ba10e60ab40603941ec91a2955278f219c1e0000000000000000000000000000000000000000000000000de0b6b3a7640000000000000000000000000000136e61cdeae727926aa768574e2f979c724d6cad7c1de7e1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000062586d6800000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001168616c6666696e2d6c6f67697374696373000000000000000000000000000000"
abi: fulfillOracleRequest(bytes32 requestId, uint256 payment, address callbackAddress, bytes4 callbackFunctionId, uint256 expiration, bytes data)
data: {"requestId": $(decode_log.requestId), "payment": $(decode_log.payment), "callbackAddress": $(decode_log.callbackAddr), "callbackFunctionId": $(decode_log.callbackFunctionId), "expiration": $(decode_log.cancelExpiration), "data": $(encode_data)}
submit_tx
"{\"logs\": [], \"root\": \"0x\", \"status\": \"0x0\", \"gasUsed\": \"0x5c49\", \"blockHash\": \"0x5b55db677b2776bb637fdb9ba2077e7db21de8e8beba60fb79e1384ae51f39a8\", \"logsBloom\": \"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\", \"blockNumber\": \"0x1d94cd5\", \"contractAddress\": \"0x0000000000000000000000000000000000000000\", \"transactionHash\": \"0xc5371c5ce1692b39835e44fb61c47d2deeeb509f71e32fccb0c5d42eec3be443\", \"transactionIndex\": \"0x1\", \"cumulativeGasUsed\": \"0x39418\"}"
to: 0xDFE5e6C5C624724384b55719b7da79d3EbB60057
data: $(encode_tx)
In case, you might wonder why I use the large-response data type. I followed this link

The contractAddress specified in the (Get > Large bytes) must be pointed at operator.sol not oracle.sol. (Get > Uint256) is pointed at oracle.sol. Oracle.sol is not meant to handle Large bytes, nor multi-variable Uint256 output.
Here is the code to deploy the current version of operator.sol on remix to then obtain the correct contractAddress to the associated job-spec within the chainlink node GUI.
// SPDX-License-Identifier: MIT
pragma solidity ^0.7.0;
import "#chainlink/contracts/src/v0.7/Operator.sol";

Related

Chainlink Node Job: requestData: while interpolating variables in JSON payload: invalid character '_' after top-level value: bad input for task

my fetch statement in my job is causing an error.
Log of the job run:
fetch bridge
requestData: while interpolating variables in JSON payload: invalid character '_' after top-level value: bad input for task
name: graphql
requestData: {"jobRunId": $(jobSpec.externalJobId), "data": {"query": "{ reserve (id: $(decode_cbor.reserveId)) { paramsHistory(orderby: timestamp, orderDirection: desc, where: { timestamp_gt: $(decode_cbor.timestamp) }) { liquidityRate, } } }", "variables": null, "graphqlEndpoint": $(decode_cbor.graphqlEndpoint)}}
TOML Job:
type = "directrequest"
schemaVersion = 1
name = "Get 30-day Average Liquidity Rate 2"
contractAddress = "0x6eFc5873cB4eB9CE5024E9DeBA2139Aab235D84C"
maxTaskDuration = "0s"
observationSource = """
decode_log [type="ethabidecodelog"
abi="OracleRequest(bytes32 indexed specId, address requester, bytes32 requestId, uint256 payment, address callbackAddr, bytes4 callbackFunctionId, uint256 cancelExpiration, uint256 dataVersion, bytes data)"
data="$(jobRun.logData)"
topics="$(jobRun.logTopics)"]
decode_cbor [type="cborparse" data="$(decode_log.data)"]
fetch [type="bridge" name="graphql" requestData="{\\"jobRunId\\": $(jobSpec.externalJobId), \\"data\\": {\\"query\\": \\"
{
reserve (id: $(decode_cbor.reserveId)) {
paramsHistory(orderby: timestamp, orderDirection: desc, where: { timestamp_gt: $(decode_cbor.timestamp) }) {
liquidityRate,
}
}
}\\", \\"variables\\": null, \\"graphqlEndpoint\\": $(decode_cbor.graphqlEndpoint)}}"]
parse [type="jsonparse" path="data,liquidityRate" data="$(fetch)"]
mean [type="mean" values="$(parse)" precision=0]
encode_data [type="ethabiencode" abi="(uint256 value)" data="{ \\"value\\": $(mean) }"]
encode_tx [type="ethabiencode"
abi="fulfillOracleRequest(bytes32 requestId, uint256 payment, address callbackAddress, bytes4 callbackFunctionId, uint256 expiration, bytes32 data)"
data="{\\"requestId\\": $(decode_log.requestId), \\"payment\\": $(decode_log.payment), \\"callbackAddress\\": $(decode_log.callbackAddr), \\"callbackFunctionId\\": $(decode_log.callbackFunctionId), \\"expiration\\": $(decode_log.cancelExpiration), \\"data\\": $(encode_data)}"
]
submit_tx [type="ethtx" to="0x6eFc5873cB4eB9CE5024E9DeBA2139Aab235D84C" data="$(encode_tx)"]
decode_log -> decode_cbor -> fetch -> parse -> mean -> encode_data -> encode_tx -> submit_tx
"""
Seems the request data part of my fecth statement is hitting an _ and not liking it. I am not sure how to format _ within the job.
Any help would be appreciated.

How do I configure the perform upkeep to know which function to call in chainlink keepers?

I want the Chainlink Keeper to call a function based on some paramters, so my checkUpkeep function is as follows:
function checkUpkeep(
bytes calldata checkData
) external view override returns (
bool upkeepNeeded, bytes memory performData
) {
if (getPrice() == roundMoonPrice[coinRound]) {
upkeepNeeded = true;
return (true, performData) ;`//should perform upkeep for the setTime() function`
} if (roundWinningIndex[coinRound].length != 0) {
upkeepNeeded = true;
return (true, performData);`//should perform upkep for the a withdrawal function
}
How do I configure the perform upkeep to know which function to call? How do I make sure it calls the correct function?
There are a number of ways you can have the keepers call the correct function. One way would be to add information in your performData designating which function to call.
The performUpkeep looks like this:
function performUpkeep(bytes calldata performData) external override {
// conditional to call one function
// conditional to call a different function
}
1. The Better way
When the Chainlink keepers call checkUpkeep they pass the returned information of performData to the input of performUpkeep. You can encode just about anything to pass as inputs to the performUpkeep function.
For example, you could pass a number to performUpkeep that corresponds with which function to call:
function checkUpkeep(
bytes calldata checkData
) external view override returns (
bool upkeepNeeded, bytes memory performData
) {
if (getPrice() == roundMoonPrice[coinRound]) {
upkeepNeeded = true;
performData = abi.encodePacked(uint256(0); // This is the new line
return (true, performData) ;
} if (roundWinningIndex[coinRound].length != 0) {
upkeepNeeded = true;
performData = abi.encodePacked(uint256(1); // This is the new line
return (true, performData);
}
Then, in your performUpkeep:
function performUpkeep(bytes calldata performData) external override {
uint256 decodedValue = abi.decode(performData, (uint256));
if(decodedValue == 0){
setTime();
}
if(decodedValue == 1){
withdraw();
}
}
We encode and decode our performData object. In fact, we can use multiple parameters as well using more clever encoding per the solidity docs.
2. The not as good, but easier to understand way
Otherwise, you could do something with storage variables if the encoding is too tricky. This isn't ideal, because multiple calls of performUpkeep will fight for the storage variable.
For example:
uint256 public functionToCall = 0;
function checkUpkeep(
bytes calldata checkData
) external view override returns (
bool upkeepNeeded, bytes memory performData
) {
if (getPrice() == roundMoonPrice[coinRound]) {
upkeepNeeded = true;
functionToCall = 0;
return (true, performData) ;
} if (roundWinningIndex[coinRound].length != 0) {
upkeepNeeded = true;
functionToCall = 1;
return (true, performData);
}
Then:
function performUpkeep(bytes calldata performData) external override {
if(functionToCall == 0){
setTime();
}
if(functionToCall == 1){
withdraw();
}
}

Solidity mapping returning null values

So basically, I create a mapping within a smart contract to store hashes of user data. It's mapped from a user id to the hash itself (a bytes32 value). I use a double sha256 hash and store it in the mapping with the aforementioned id. The function for storing it returns the hash by returning the values at the id in the mapping. This hash is correct, meaning at the very least it's initially stored correctly. However, I have another function that gets the hash from the id and it always returns a null value in the javascript tests. I am wondering if it's a problem with the test or with the contract itself.
pragma solidity ^0.4.22;
contract UserStore {
mapping(uint => bytes32) UserHashes; //User id to hash
event HashStored (
uint id,
bytes32 original,
bytes32 hash
);
function HashData(bytes32 data) returns (bytes32){
return sha256(abi.encodePacked(sha256(abi.encodePacked(data))));
}
function StoreHash(uint user_id, bytes32 data) external view returns (bytes32){
UserHashes[user_id] = HashData(data);
HashStored(user_id, data, UserHashes[user_id]);
return UserHashes[user_id];
}
/*
Gets the hash from the blockchain.
*/
function GetHash(uint u_id) view public returns (bytes32){
return UserHashes[u_id];
}
}
Everytime I run this test, GetHash returns a 0 value;
contract("Storage_Test", function(accounts) {
const args = {user_id: 0,
data: "This is some security data",
group_id : 15,
user_ids : [1,2,3,4,5],
num_accounts : 2
}
it("Hash Test: Multiple Storage and retrieving", async function() { return
await UserStore.deployed()
.then(async function(instance) {
var temp = args.data;
var _temp;
for (i = 1; i < args.num_accounts; i++) {
_temp = temp;
temp = await instance.HashData.call(temp);
// console.log("Datahash: " + temp);
result = await instance.StoreHash.call(i, _temp);
// console.log("Result: " + result);
assert.equal(result, temp, "Hash at " + i + " wasn't returned
correctly");
}
temp = args.data;
for (i= 1; i < args.num_accounts; i++) {
temp = await instance.HashData.call(temp);
result = await instance.GetHash.call(i);
assert.equal( result, temp, "Hash at " + i + " wasn't stored
correctly");
}
})
});
});
Change instance.StoreHash.call(...) to instance.StoreHash.sendTransaction(...). call() runs the function locally instead of submitting the transaction. The result is any state change isn’t persisted.

How to stream a video or a file considering request and response range headers?

I am now using FileStreamResult and it works to stream a video, but can't seek it. It always starts again from the beginning.
I was using ByteRangeStreamContent but it seems that it is not available anymore with dnxcore50.
So how to proceed ?
Do i need to manually parse the request range headers and write a custom FileResult that sets the response Content-Range and the rest of the headers and writes the buffer range to the response body or is there something already implemented and i'm missing it ?
Here is a naive implementation of a VideoStreamResult. I am using at the moment (the multipart content part is not tested):
public class VideoStreamResult : FileStreamResult
{
// default buffer size as defined in BufferedStream type
private const int BufferSize = 0x1000;
private string MultipartBoundary = "<qwe123>";
public VideoStreamResult(Stream fileStream, string contentType)
: base(fileStream, contentType)
{
}
public VideoStreamResult(Stream fileStream, MediaTypeHeaderValue contentType)
: base(fileStream, contentType)
{
}
private bool IsMultipartRequest(RangeHeaderValue range)
{
return range != null && range.Ranges != null && range.Ranges.Count > 1;
}
private bool IsRangeRequest(RangeHeaderValue range)
{
return range != null && range.Ranges != null && range.Ranges.Count > 0;
}
protected async Task WriteVideoAsync(HttpResponse response)
{
var bufferingFeature = response.HttpContext.Features.Get<IHttpBufferingFeature>();
bufferingFeature?.DisableResponseBuffering();
var length = FileStream.Length;
var range = response.HttpContext.GetRanges(length);
if (IsMultipartRequest(range))
{
response.ContentType = $"multipart/byteranges; boundary={MultipartBoundary}";
}
else
{
response.ContentType = ContentType.ToString();
}
response.Headers.Add("Accept-Ranges", "bytes");
if (IsRangeRequest(range))
{
response.StatusCode = (int)HttpStatusCode.PartialContent;
if (!IsMultipartRequest(range))
{
response.Headers.Add("Content-Range", $"bytes {range.Ranges.First().From}-{range.Ranges.First().To}/{length}");
}
foreach (var rangeValue in range.Ranges)
{
if (IsMultipartRequest(range)) // I don't know if multipart works
{
await response.WriteAsync($"--{MultipartBoundary}");
await response.WriteAsync(Environment.NewLine);
await response.WriteAsync($"Content-type: {ContentType}");
await response.WriteAsync(Environment.NewLine);
await response.WriteAsync($"Content-Range: bytes {range.Ranges.First().From}-{range.Ranges.First().To}/{length}");
await response.WriteAsync(Environment.NewLine);
}
await WriteDataToResponseBody(rangeValue, response);
if (IsMultipartRequest(range))
{
await response.WriteAsync(Environment.NewLine);
}
}
if (IsMultipartRequest(range))
{
await response.WriteAsync($"--{MultipartBoundary}--");
await response.WriteAsync(Environment.NewLine);
}
}
else
{
await FileStream.CopyToAsync(response.Body);
}
}
private async Task WriteDataToResponseBody(RangeItemHeaderValue rangeValue, HttpResponse response)
{
var startIndex = rangeValue.From ?? 0;
var endIndex = rangeValue.To ?? 0;
byte[] buffer = new byte[BufferSize];
long totalToSend = endIndex - startIndex;
int count = 0;
long bytesRemaining = totalToSend + 1;
response.ContentLength = bytesRemaining;
FileStream.Seek(startIndex, SeekOrigin.Begin);
while (bytesRemaining > 0)
{
try
{
if (bytesRemaining <= buffer.Length)
count = FileStream.Read(buffer, 0, (int)bytesRemaining);
else
count = FileStream.Read(buffer, 0, buffer.Length);
if (count == 0)
return;
await response.Body.WriteAsync(buffer, 0, count);
bytesRemaining -= count;
}
catch (IndexOutOfRangeException)
{
await response.Body.FlushAsync();
return;
}
finally
{
await response.Body.FlushAsync();
}
}
}
public override async Task ExecuteResultAsync(ActionContext context)
{
await WriteVideoAsync(context.HttpContext.Response);
}
}
And parse request headers range:
public static RangeHeaderValue GetRanges(this HttpContext context, long contentSize)
{
RangeHeaderValue rangesResult = null;
string rangeHeader = context.Request.Headers["Range"];
if (!string.IsNullOrEmpty(rangeHeader))
{
// rangeHeader contains the value of the Range HTTP Header and can have values like:
// Range: bytes=0-1 * Get bytes 0 and 1, inclusive
// Range: bytes=0-500 * Get bytes 0 to 500 (the first 501 bytes), inclusive
// Range: bytes=400-1000 * Get bytes 500 to 1000 (501 bytes in total), inclusive
// Range: bytes=-200 * Get the last 200 bytes
// Range: bytes=500- * Get all bytes from byte 500 to the end
//
// Can also have multiple ranges delimited by commas, as in:
// Range: bytes=0-500,600-1000 * Get bytes 0-500 (the first 501 bytes), inclusive plus bytes 600-1000 (401 bytes) inclusive
// Remove "Ranges" and break up the ranges
string[] ranges = rangeHeader.Replace("bytes=", string.Empty).Split(",".ToCharArray());
rangesResult = new RangeHeaderValue();
for (int i = 0; i < ranges.Length; i++)
{
const int START = 0, END = 1;
long endByte, startByte;
long parsedValue;
string[] currentRange = ranges[i].Split("-".ToCharArray());
if (long.TryParse(currentRange[END], out parsedValue))
endByte = parsedValue;
else
endByte = contentSize - 1;
if (long.TryParse(currentRange[START], out parsedValue))
startByte = parsedValue;
else
{
// No beginning specified, get last n bytes of file
// We already parsed end, so subtract from total and
// make end the actual size of the file
startByte = contentSize - endByte;
endByte = contentSize - 1;
}
rangesResult.Ranges.Add(new RangeItemHeaderValue(startByte, endByte));
}
}
return rangesResult;
}
FYI, built-in support for range requests will be present in .NET Core 2.1
https://github.com/aspnet/Mvc/pull/6895

CryptEncrypt fails with NTE_BAD_LEN

I need to encrypt message with X.509 RSA public key. I successfully imported 1024-bit public key in DER format, but my program fails with message longer than about 110 bytes. I'm calling CryptEncrypt function with pbData set to NULL, because I need to calculate size of output buffer first.
This is the source code:
Plain text version of the same:
bool CCrypt::RSAEncrypt() {
HCRYPTPROV hProv = NULL;
HCRYPTKEY hKey = NULL;
if (CryptAcquireContext(&hProv, NULL, MS_ENHANCED_PROV, PROV_RSA_FULL, CRYPT_VERIFYCONTEXT)) {
CParam *pubKey = coreData.local.overlay->getItem(3);
// Decode the binary key blob in DER format into a CERT_PUBLIC_KEY_INFO
CERT_PUBLIC_KEY_INFO* publicKeyInfo = NULL;
DWORD publicKeyInfoSize;
if (CryptDecodeObjectEx(
X509_ASN_ENCODING,
X509_PUBLIC_KEY_INFO,
(LPBYTE) pubKey->getVal(),
pubKey->getLength(),
CRYPT_DECODE_ALLOC_FLAG/* | CRYPT_DECODE_NOCOPY_FLAG*/,
NULL, // TODO: Pass a CRYPT_DECODE_PARA to use own heap management to allocate memory
&publicKeyInfo,
&publicKeyInfoSize
)) {
// Import the public using the context
if (CryptImportPublicKeyInfo(
hProv,
X509_ASN_ENCODING,
publicKeyInfo,
&hKey
)) {
// Get the size of a key
DWORD dwBlockLen = NULL;
DWORD dwValLen = sizeof(DWORD);
if (CryptGetKeyParam(hKey, KP_BLOCKLEN, (LPBYTE) &dwBlockLen, &dwValLen, 0)) {
dwBlockLen = (dwBlockLen + 7) / 8; // Transform to bytes legth
BYTE msg[] = "Lorem ipsum dolor sit .... [3000+ characters here]";
DWORD dwMsgLen = I(str)->lengthA((LPSTR) msg);
//dwMsgLen = 110;
DBG(C_INFO, "CryptGetKeyParam succeed. dwMsgLen: %d, dwBlockLen: %d", dwMsgLen, dwBlockLen);
// pbData [in, out] set to NULL to calculate actual size of a buffer required
if (CryptEncrypt(hKey, 0, TRUE, CRYPT_OAEP, NULL, &dwMsgLen, 0)) {
DBG(C_INFO, "CryptEncrypt succeed. dwMsgLen: %d", dwMsgLen);
// TODO: Fails here
} else {
DBG(C_ERROR, "CryptEncrypt error.");
}
} else {
DBG(C_ERROR, "CryptGetKeyParam error.");
}
CryptDestroyKey(hKey);
}
LocalFree(publicKeyInfo);
}
CWA(advapi32, CryptReleaseContext)(hProv, 0);
}
return false;
}
Output from my debugger:
[16:08:14] TC=1093889010, PID=25484(0x638C), TID=26340(0x66E4), LE=0(0x0), F=CCrypt::RSAEncrypt, FL=d:\c\source\client\../common/Crypt.cpp (62)
INFO: CryptGetKeyParam succeed. dwMsgLen: 2175, dwBlockLen: 128
[16:08:14] TC=1093889010, PID=25484(0x638C), TID=26340(0x66E4), LE=2148073476(0x80090004), F=CCrypt::RSAEncrypt, FL=d:\c\source\client\../common/Crypt.cpp (69)
ERROR: CryptEncrypt error.
As you can see, this function fails at CryptEncrypt with error NTE_BAD_LEN (0x80090004). However when I manually set dwMsgLen to 110 [see line 61], it works correctly. When I set this variable to 120, it fails.
Can someone tell me where the problem can be?

Resources