I am implementing a basic encryption/decryption set of functions in nodejs and I keep getting the following error in the decryption part:
Error: Unsupported state or unable to authenticate data
This is my code so far:
import crypto from 'crypto'
import logger from './logger'
const ALGORITHM = 'aes-256-gcm'
export const encrypt = (keyBuffer, dataBuffer, aadBuffer) => {
// iv stands for "initialization vector"
const iv = Buffer.from(crypto.randomBytes(12), 'utf8')
logger.debug('iv: ', iv)
const encryptor = crypto.createCipheriv(ALGORITHM, keyBuffer, iv)
logger.debug('encryptor: ', encryptor)
logger.debug('dataBuffer: ', dataBuffer)
return Buffer.concat([iv, encryptor.update(dataBuffer, 'utf8'), encryptor.final()])
}
export const decrypt = (keyBuffer, dataBuffer, aadBuffer) => {
const iv = dataBuffer.slice(0, 96)
const decryptor = crypto.createDecipheriv(ALGORITHM, keyBuffer, iv)
return Buffer.concat([decryptor.update(dataBuffer.slice(96), 'utf8'), decryptor.final()])
}
My error happens in the last line of the decrypt function. I am storing the iv as part of the dataBuffer.
Thanks in advance!
I realized I had made a couple of mistakes with the original code that I posted, one of them as #TheGreatContini remarked was the size of the slicing which was being done in bits instead of bytes as it should be. Still, the biggest piece that I was missing was the authTag which always should be included in the decipher function setup.
Here is my working code for anybody interested for future references:
import crypto from 'crypto'
import logger from './logger'
const ALGORITHM = 'aes-256-gcm'
export const encrypt = (keyBuffer, dataBuffer, aadBuffer) => {
// iv stands for "initialization vector"
const iv = crypto.randomBytes(12)
const cipher = crypto.createCipheriv(ALGORITHM, keyBuffer, iv)
const encryptedBuffer = Buffer.concat([cipher.update(dataBuffer), cipher.final()])
const authTag = cipher.getAuthTag()
let bufferLength = Buffer.alloc(1)
bufferLength.writeUInt8(iv.length, 0)
return Buffer.concat([bufferLength, iv, authTag, encryptedBuffer])
}
export const decrypt = (keyBuffer, dataBuffer, aadBuffer) => {
const ivSize = dataBuffer.readUInt8(0)
const iv = dataBuffer.slice(1, ivSize + 1)
// The authTag is by default 16 bytes in AES-GCM
const authTag = dataBuffer.slice(ivSize + 1, ivSize + 17)
const decipher = crypto.createDecipheriv(ALGORITHM, keyBuffer, iv)
decipher.setAuthTag(authTag)
return Buffer.concat([decipher.update(dataBuffer.slice(ivSize + 17)), decipher.final()])
}
Related
I'm trying to execute a smart-contract batch processing function but after a certain point of time I'm getting the following error:
processing response error (body="{\"jsonrpc\":\"2.0\",\"id\":52,\"error\":{\"code\":-32000,\"message\":\"tx fee (4.00 ether) exceeds the configured cap (1.00 ether)\"}}", error={"code":-32000}, requestBody="{\"method\":\"eth_sendRawTransaction\",
I do look for the same and got a link: tx fee (2.11 ether) exceeds the configured cap (1.00 ether), how to bypass default cap? but the answer is specific to local net and geth, I'm using ethers.js for making a call on polygon and avalanche c-chain network, and wanted to know how can I able to change the default configuration in default provider. Below is the code:
async function createTransactionData(arg: any[]) {
const transferABI = ["function batchSafeMint(address toAddress, uint256[] tokenIds, string[] uris)"];
const transferInterface = new ethers.utils.Interface(transferABI);
const transferData = transferInterface.encodeFunctionData(
'batchSafeMint',
[
arg[0],
arg[1],
arg[2]
]
);
return transferData
}
const transferData = await createTransactionData(params)
const provider = new ethers.providers.JsonRpcProvider(network.networkURL)
const gasFeeData = await provider.getFeeData();
const nonce = await provider.getTransactionCount(senderAddress);
const chainId = (await provider.getNetwork()).chainId;
const rawTxn: any = {
to: contractHash,
maxFeePerGas: gasFeeData.maxFeePerGas,
maxPriorityFeePerGas: gasFeeData.maxPriorityFeePerGas,
chainId: chainId,
data: transferData,
gasPrice: null,
nonce: nonce,
value: null,
from: senderAddress,
type: 2
}
rawTxn["gasLimit"] = await provider.estimateGas(rawTxn)
delete rawTxn["from"]
const unsignedTxn = await ethers.utils.resolveProperties(rawTxn);
const keccak256Hash = await getUnsignedTxnKeccak256Hash(unsignedTxn);
const signedTxn = await signer.sign(keccak256Hash); // implemented own signer
const signedTxnRaw = ethers.utils.serializeTransaction(
//#ts-ignore
<UnsignedTransaction>unsignedTxn,
signedTxn
);
const receipt = await provider.sendTransaction(signedTxnRaw);
let result = await provider.waitForTransaction(receipt.hash);
So I have seen at the time of creating a provider there is an option to provide a ConnectionInfo object as well.
// Exported Types
export type ConnectionInfo = {
url: string,
headers?: { [key: string]: string | number }
user?: string,
password?: string,
allowInsecureAuthentication?: boolean,
allowGzip?: boolean,
throttleLimit?: number,
throttleSlotInterval?: number;
throttleCallback?: (attempt: number, url: string) => Promise<boolean>,
skipFetchSetup?: boolean;
errorPassThrough?: boolean;
timeout?: number,
};
By any chance can I use config params to change the default config?
I wanted to use ViteJs. For the purpose, I need to use CryptoJS instead of Crypto. Current code using crypto is working fine.
const crypto = require('crypto');
export function encrypt(plainText, secret) {
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv('aes-256-cbc', secret, iv);
let cipherText;
try {
cipherText = cipher.update(plainText, 'utf8', 'hex');
cipherText += cipher.final('hex');
cipherText = iv.toString('hex') + cipherText
} catch (e) {
cipherText = null;
}
return cipherText;
}
export function decrypt(cipherText, secret) {
const contents = Buffer.from(cipherText, 'hex');
const iv = contents.slice(0, 16);
const textBytes = contents.slice(16);
const decipher = crypto.createDecipheriv('aes-256-cbc', secret, iv);
let decrypted = decipher.update(textBytes, 'hex', 'utf8');
decrypted += decipher.final('utf8');
return decrypted;
}
Here I want to encrypt on JS and decrypt on Golang. Please help converting the following code to use CryptoJS
The CryptoJS code I tried but didn't work on golang
import CryptoJS from 'crypto-js'
export const encrypt = (plainText: string, secret: string) => {
const iv = CryptoJS.lib.WordArray.random(16)
const encrypted = CryptoJS.AES.encrypt(plainText, secret, {
iv,
padding: CryptoJS.pad.Pkcs7,
mode: CryptoJS.mode.CBC,
})
return iv + encrypted.toString()
}
export const decrypt = (cipherText: string, secret: string) => {
const iv = cipherText.slice(0, 32)
const ivEx = CryptoJS.enc.Hex.parse(iv)
cipherText = cipherText.replace(iv, '')
const decrypt = CryptoJS.AES.decrypt(cipherText, secret, {
ivEx,
padding: CryptoJS.pad.Pkcs7,
mode: CryptoJS.mode.CBC,
})
return decrypt.toString(CryptoJS.enc.Utf8)
}
Golang code I used to decrypt
// Decrypt decrypts cipher text string into plain text string
func Decrypt(encrypted string, password string) (string, error) {
key := []byte(password)
cipherText, err := hex.DecodeString(encrypted)
if err != nil {
fmt.Printf("%v", err)
return "", err
}
block, err := aes.NewCipher(key)
if err != nil {
fmt.Printf("%v", err)
return "", err
}
if len(cipherText) < aes.BlockSize {
return "", errors.New("cipherText too short")
}
iv := cipherText[:aes.BlockSize]
cipherText = cipherText[aes.BlockSize:]
if len(cipherText)%aes.BlockSize != 0 {
return "", errors.New("cipherText is not a multiple of the block size")
}
mode := cipher.NewCBCDecrypter(block, iv)
mode.CryptBlocks(cipherText, cipherText)
cipherText, _ = pkcs7.Unpad(cipherText, aes.BlockSize)
return fmt.Sprintf("%s", cipherText), nil
}
I use the script in web
But error in edge browser console
At const key = await .... expect “;”
How to use this script correctly?
I have no chrome browser in another computer and only expect to use default browser edge in window 10
I use for password transit through USB storage
const encoder = new TextEncoder();
const toBase64 = buffer =>
btoa(String.fromCharCode(...new Uint8Array(buffer)));
const PBKDF2 = async (
password, salt, iterations,
length, hash, algorithm = 'AES-CBC') => {
keyMaterial = await window.crypto.subtle.importKey(
'raw',
encoder.encode(password),
{name: 'PBKDF2'},
false,
['deriveKey']
);
return await window.crypto.subtle.deriveKey(
{
name: 'PBKDF2',
salt: encoder.encode(salt),
iterations,
hash
},
keyMaterial,
{ name: algorithm, length },
false, // we don't need to export our key!!!
['encrypt', 'decrypt']
);
}
const salt = window.crypto.getRandomValues(new Uint8Array(16));
const iv = window.crypto.getRandomValues(new Uint8Array(16));
const plain_text = encoder.encode("That is our super secret text");
const key = await PBKDF2('my password', salt, 100000, 256, 'SHA-256');
const encrypted = await window.crypto.subtle.encrypt(
{name: "AES-CBC", iv },
key,
plain_text
);
console.log({
salt: toBase64(salt),
iv: toBase64(iv),
encrypted: toBase64(encrypted),
concatennated: toBase64([
...salt,
...iv,
...new Uint8Array(encrypted)
])
});
In the script, the keyword var is missing in the definition of keyMaterial. Edge then generates the error message Uncaught (in promise) ReferenceError: keyMaterial is not defined at PBKDF2... Apart from that the script is executed on my machine (Edge version 90.0.818.46).
Another problem may be an older Edge version. Edge (meanwhile Chromium-based) supports top-level-awaits as of version 89, in earlier versions the script has to be encapsulated in an asynchronous top-level function (see also here), e.g.
const main = async () => {
const encoder = new TextEncoder();
const toBase64 = buffer =>
btoa(String.fromCharCode(...new Uint8Array(buffer)));
const PBKDF2 = async (
password, salt, iterations,
length, hash, algorithm = 'AES-CBC') => {
var keyMaterial = await window.crypto.subtle.importKey(
'raw',
encoder.encode(password),
{name: 'PBKDF2'},
false,
['deriveKey']
);
return await window.crypto.subtle.deriveKey(
{
name: 'PBKDF2',
salt: encoder.encode(salt),
iterations,
hash
},
keyMaterial,
{ name: algorithm, length },
false, // we don't need to export our key!!!
['encrypt', 'decrypt']
);
}
const salt = window.crypto.getRandomValues(new Uint8Array(16));
const iv = window.crypto.getRandomValues(new Uint8Array(16));
const plain_text = encoder.encode("That is our super secret text");
const key = await PBKDF2('my password', salt, 100000, 256, 'SHA-256');
const encrypted = await window.crypto.subtle.encrypt(
{name: "AES-CBC", iv },
key,
plain_text
);
console.log({
salt: toBase64(salt),
iv: toBase64(iv),
encrypted: toBase64(encrypted),
concatennated: toBase64([
...salt,
...iv,
...new Uint8Array(encrypted)
])
});
}
(async () => {
await main();
})();
Newer Edge versions are based on Chronium, under which the script also runs for earlier versions (tested for v85). I couldn't do a test for Edge versions before the Chromium switch, because I don't have a correspondingly old version. If you use one, you would have to test this in your environment.
I'm working on a simple proof of concept to export and import private and public keys through node.js and browser communication using SubtleCrypto. I've installed the #peculiar/webcrypto package to keep things consistent between the front and backend. The first part is simply the generation and export of the keys, and the second part is my attempt to import the keys. Here is the code:
// GENERATE AND EXPORT KEYS
const { publicKey, privateKey } = await crypto.subtle.generateKey(
{
name: 'RSA-OAEP',
modulusLength: 2048,
publicExponent: new Uint8Array([1, 0, 1]),
hash: 'SHA-256',
},
true,
['encrypt', 'decrypt'],
);
const publicExport = await crypto.subtle.exportKey('spki', publicKey);
const privateExport = await crypto.subtle.exportKey('pkcs8', privateKey);
const pubExportedAsString = ab2str(publicExport);
const pubExportedAsBase64 = btoa(pubExportedAsString);
const publicKeyPem = `${pubExportedAsBase64}`;
const privExportedAsString = ab2str(privateExport);
const privExportedAsBase64 = btoa(privExportedAsString);
const privateKeyPem = `${privExportedAsBase64}`;
// IMPORT KEYS
const pubKeyImportedAsString = atob(publicKeyPem);
const pubKeyImportedAsArrayBuffer = str2ab(pubKeyImportedAsString);
const publicKeyImport = await crypto.subtle.importKey(
'spki', pubKeyImportedAsArrayBuffer, { name: 'RSA-OAEP', hash: 'SHA-256' }, true, ['encrypt']
);
const privateKeyImportedAsString = atob(privateKeyPem);
const privateKeyImportedAsArrayBuffer = str2ab(privateKeyImportedAsString);
const privateKeyImport = await crypto.subtle.importKey(
'pkcs8', privateKeyImportedAsArrayBuffer, { name: 'RSA-OAEP', hash: 'SHA-256' }, true, ['decrypt']
);
// HELPERS
const ab2str = (buffer: ArrayBuffer): string => new TextDecoder().decode(buffer);
const str2ab = (text: string): ArrayBuffer => new TextEncoder().encode(text);
The import of the keys is where this throws an error:
Unhandled error Error: Too big integer
I can't seem to see where the issue is, and why the key encoding and decoding would fail. If anyone has any clues or ideas, that would help out tremendously :)
Figured it out (for anyone in a similar predicament). Turns out the issue was the ab2str and str2ab helper functions, I was misled to believe TextDecoder/TextEncoder would properly handle the conversions for this use case.
Rewriting these to the following solved this issue!
const ab2str = (buffer: ArrayBuffer) =>
String.fromCharCode.apply(null, Array.from(new Uint8Array(buffer)));
const str2ab = (str: string): ArrayBuffer => {
const buffer = new ArrayBuffer(str.length * 2);
const bufferInterface = new Uint8Array(buffer);
Array.from(str)
.forEach((_, index: number) => bufferInterface[index] = str.charCodeAt(index));
return buffer;
}
It's solved finally, you can see my answers below
File Decryption not working in Node when encrypted from php
PHP Code to Encrypt
<?php
$key = "f9036c20bdb656106fd176d260878c63";
$iv = "7152201381f54b46";
exec('openssl enc -aes-256-cbc -K '.$key.' -iv '.$iv.' -in a.txt -out b.txt');
exec('openssl enc -d -aes-256-cbc -K '.$key.' -iv '.$iv.' -in b.txt -out outr.txt');
?>
Decryption works fine in PHP
JS code for Decryption both the below approach is not working
var CryptoJS = require('crypto-js');
var key ="f9036c20bdb656106fd176d260878c63";
var iv1 = "7152201381f54b46";
var text = require('fs').readFileSync('../b.txt');
var bytes = CryptoJS.AES.decrypt(text,key,{iv:iv1, mode: CryptoJS.mode.CBC,padding: CryptoJS.pad.Pkcs7 });
console.log(CryptoJS.enc.Utf8.stringify(bytes));
require('fs').writeFile('../out.txt', CryptoJS.enc.Utf8.stringify(bytes), function (err) {
if (err) {
return console.error(err);
}
});
Also tried with crypto no luck
const crypto = require('crypto');
const fs = require('fs');
var secret = "f9036c20bdb656106fd176d260878c63";
const buf_secret = Buffer.from(secret);
var iv = "7152201381f54b46";
const buf_iv = Buffer.from(iv);
const decipher = crypto.createCipheriv('aes-256-cbc', buf_secret, buf_iv);
decipher.setAutoPadding(true);
fs.readFile('../b.txt', function (err, data) {
if (err) {
return console.log(err);
}
const buf_data = Buffer.from(data);
console.log(buf_data);
let decrypted = decipher.update(buf_data, 'utf8');
decrypted += decipher.final('utf8');
console.log(decrypted);
});
I am sure some padding issue is there, can someone point out what error its having?
It's solved. The problem is PHP openssl accepts key and iv as hex. For openssl256 key length should be 64 and iv length should be 32, but in PHP key length was 32 and iv length was 16 which is for openssl128, so PHP is adding trailing zeros. In JS after adding trailing zeros and considering it as hex its working fine.
const crypto = require('crypto');
const fs = require('fs');
const key_size = 64;
const iv_size = 32;
var secret = "f9036c20bdb656106fd176d260878c63";
secret = pad(secret,key_size,"0"); //pad with trailing zeros
const buf_secret = Buffer.from(secret,'hex');
var iv = "7152201381f54b46";
iv = pad(iv,iv_size,"0");//pad with trailing zeros
const buf_iv = Buffer.from(iv,'hex');
const decipher = crypto.createDecipheriv('aes-256-cbc', buf_secret, buf_iv);
decipher.setAutoPadding(true);
const input = fs.createReadStream('../b.txt');
const output = fs.createWriteStream('../decrypted.txt');
input.pipe(decipher).pipe(output);
//content if you want instead of direct writing
//fs.readFile('../b.txt', function (err, data) {
// if (err) {
// return console.log(err);
// }
// const buf_data = Buffer.from(data);
// console.log(buf_data);
// let decrypted = decipher.update(buf_data, 'utf8');
// decrypted += decipher.final('utf8');
// console.log(decrypted);
//
//});
//for padding trailing zeros
function pad(value, width, padchar) {
while (value.length < width) {
value += padchar;
}
return value;
}