Go encryption differs from Ruby encryption using same key and iv - ruby

I have the following Ruby code:
require 'base64'
require 'openssl'
data = '503666666'
key = '4768c01c4f598828ef80d9982d95f888fb952c5b12189c002123e87f751e3e82'
nonce = '4eFi6Q3PX1478767\n'
nonce = Base64.decode64(nonce)
c = OpenSSL::Cipher.new('aes-256-gcm')
c.encrypt
c.key = key
c.iv = nonce
result = c.update(data) + c.final
tag = c.auth_tag
puts Base64.encode64(result + tag) # => J3AVfNG84bz2UuXcfre7LVjSbMpX9XBq6g==\n
that I'm trying to replicate in Golang.
Here's what I have so far:
package main
import (
"fmt"
"crypto/aes"
"crypto/cipher"
"encoding/base64"
"encoding/hex"
)
func main() {
data := []byte("503666666")
key, err := hex.DecodeString(`4768c01c4f598828ef80d9982d95f888fb952c5b12189c002123e87f751e3e82`)
if err != nil {
panic(err)
}
nonceB64 := "4eFi6Q3PX1478767\n"
nonce, err := base64.StdEncoding.DecodeString(nonceB64)
if err != nil {
panic(err)
}
block, err := aes.NewCipher(key)
if err != nil {
panic(err.Error())
}
aesgcm, err := cipher.NewGCM(block)
if err != nil {
panic(err.Error())
}
ciphertext := aesgcm.Seal(nil, nonce, data, nil)
fmt.Printf("%s\n", base64.StdEncoding.EncodeToString(ciphertext))
}
However the outcome from the Go version is:
+S52HGbLV1xp+GnF0v8VNOqc5J2GY2+SqA==
vs.
J3AVfNG84bz2UuXcfre7LVjSbMpX9XBq6g==\n
Why am I getting different results?
Thanks,

The AES 256 cipher requires a 32 byte key. The Ruby code is setting the key to a 64 byte string consisting of hexadecimal digits. OpenSSL is truncating the string to 32 bytes before use (change key to '4768c01c4f598828ef80d9982d95f888' in the Ruby code and you'll get the same output).
The Go code however is hex decoding the key before use, converting the 64 hexadecimal digits to the 32 bytes required for the key.
If you want to change the Go code so that it matches the Ruby result, then you'll need to truncate the key and remove the hex decoding step:
key := []byte("4768c01c4f598828ef80d9982d95f888")
However, I'd argue that the key handling in the Go version of the code is better. If you want to change the Ruby version to match the Go version, you can hex decode the key before use:
key = [key].pack('H*')

Related

How I can decode aes-256-cfb

How I can decode aes-256-cfb?
I have file encoded by aes-256-cfb, when I use openssl command
openssl enc -d -aes-256-cfb -salt -pbkdf2 -pass file:encpass -out x.txz -in encpkg
this file is decrypted without any problem,but when I try to decrypt this file by golang, I always get incorrect file, I don't know what my problem is and I hope to find help
my code:
package main
import (
"crypto/aes"
"crypto/cipher"
"crypto/sha256"
"log"
"os"
)
func main() {
fiencpkg, err := os.ReadFile("encpkg")
if err != nil {
log.Println(err)
os.Exit(1)
}
fiencpass, err := os.ReadFile("encpass")
if err != nil {
log.Println(err)
os.Exit(1)
}
keyb := sha256.Sum256(fiencpass)
block, err := aes.NewCipher(keyb[:])
if err != nil {
panic(err)
}
if len(fiencpkg) < aes.BlockSize {
panic("data too short")
}
iv := fiencpkg[:aes.BlockSize]
decdata := fiencpkg[aes.BlockSize:]
stream := cipher.NewCFBDecrypter(block, iv)
stream.XORKeyStream(decdata, fiencpkg[aes.BlockSize:])
os.WriteFile("x_go.txz", decdata, 0777)
}
The -pbkdf2 option in the OpenSSL statement causes the PBKDF2 key derivation to be used:
During encryption, a random 8 bytes salt is generated, and together with the password, the key and IV are determined using this key derivation.
Since the salt is needed for decryption, the OpenSSL statement concatenates salt and ciphertext and indicates this with the prefix Salted__.
Thus, during decryption, salt and ciphertext must first be separated:
salt := fiencpkg[8:16]
ciphertext := fiencpkg[16:]
Then key and IV can be derived via PBKDF2 using e.g. the pbkdf2 package:
keyIv := pbkdf2.Key(fiencpass, salt, 10000, 48, sha256.New)
key := keyIv[0:32]
iv := keyIv[32:48]
Note the OpenSSL default values 10000 and SHA256 for iteration count and digest. Since the encryption was done with AES-256-CFB 48 bytes have to be generated (32 bytes for the key, 16 bytes for the IV).
After determining key and IV, decryption can be performed as usual.
Full code:
package main
import (
"crypto/aes"
"crypto/cipher"
"crypto/sha256"
"log"
"os"
"golang.org/x/crypto/pbkdf2"
)
func main() {
fiencpkg, err := os.ReadFile("encpkg")
if err != nil {
log.Println(err)
os.Exit(1)
}
salt := fiencpkg[8:16]
ciphertext := fiencpkg[16:]
fiencpass, err := os.ReadFile("encpass")
if err != nil {
log.Println(err)
os.Exit(1)
}
keyIv := pbkdf2.Key(fiencpass, salt, 10000, 48, sha256.New)
key := keyIv[0:32]
iv := keyIv[32:48]
block, err := aes.NewCipher(key)
if err != nil {
panic(err)
}
stream := cipher.NewCFBDecrypter(block, iv)
stream.XORKeyStream(ciphertext, ciphertext)
os.WriteFile("x_go.txz", ciphertext, 0777)
}

Implementing Ethereum personal_sign (EIP-191) from go-ethereum gives different signature from ethers.js

I am attempting to generate a personal_sign in Golang like its implemented in ethers.js. Similar question but that ended up using the regular sign over the personal sign_implementation.
Ethers
// keccak256 hash of the data
let dataHash = ethers.utils.keccak256(
ethers.utils.toUtf8Bytes(JSON.stringify(dataToSign))
);
//0x8d218fc37d2fd952b2d115046b786b787e44d105cccf156882a2e74ad993ee13
let signature = await wallet.signMessage(dataHash); // 0x469b07327fc41a2d85b7e69bcf4a9184098835c47cc7575375e3a306c3718ae35702af84f3a62aafeb8aab6a455d761274263d79e7fc99fbedfeaf759d8dc9361c
Golang:
func signHash(data []byte) common.Hash {
msg := fmt.Sprintf("\x19Ethereum Signed Message:\n%d%s", len(data), data)
return crypto.Keccak256Hash([]byte(msg))
}
privateKey, err := crypto.HexToECDSA(hexPrivateKey)
if err != nil {
log.Fatal(err)
}
dataHash := crypto.Keccak256Hash(dataToSign) //0x8d218fc37d2fd952b2d115046b786b787e44d105cccf156882a2e74ad993ee13
signHash := signHash(dataHash.Bytes())
signatureBytes, err := crypto.Sign(signHash.Bytes(), privateKey)
if err != nil {
log.Fatal(err)
}
// signatureBytes 0xec56178d3dca77c3cee7aed83cdca2ffa2bec8ef1685ce5103cfa72c27beb61313d91b9ad9b9a644b0edf6352cb69f2f8acd25297e3c64cd060646242e0455ea00
As you can see the hash is the same, but the signature is different:
0x469b07327fc41a2d85b7e69bcf4a9184098835c47cc7575375e3a306c3718ae35702af84f3a62aafeb8aab6a455d761274263d79e7fc99fbedfeaf759d8dc9361c Ethers
0xec56178d3dca77c3cee7aed83cdca2ffa2bec8ef1685ce5103cfa72c27beb61313d91b9ad9b9a644b0edf6352cb69f2f8acd25297e3c64cd060646242e0455ea00 Golang
Looking at the source code of Ethers.js I can't find anything different aside how the padding is managed.
Edit
Check the approved answer
signHash(data []byte) common.Hash {
hexData := hexutil.Encode(data)
msg := fmt.Sprintf("\x19Ethereum Signed Message:\n%d%s", len(hexData), hexData)
return crypto.Keccak256Hash([]byte(msg))
}
There is a bug in the JavaScript code.
From the documentation of signer.signMessage() (see the Note section), it appears that a string is UTF8 encoded and binary data must be passed as TypedArray or Array.
The Keccak hash is returned hex encoded, i.e. as string, and is therefore UTF8 encoded, which is incorrect. Instead, it must be converted to a TypedArray. For this purpose the library provides the function ethers.utils.arrayify().
The following JavaScript is based on the posted code, but performs the required hex decoding:
(async () => {
let privateKey = "0x8da4ef21b864d2cc526dbdb2a120bd2874c36c9d0a1fb7f8c63d7f7a8b41de8f";
let dataToSign = {"data1":"value1","data2":"value2"};
let dataHash = ethers.utils.keccak256(
ethers.utils.toUtf8Bytes(JSON.stringify(dataToSign))
);
dataHashBin = ethers.utils.arrayify(dataHash)
let wallet = new ethers.Wallet(privateKey);
let signature = await wallet.signMessage(dataHashBin);
document.getElementById("signature").innerHTML = signature; // 0xfcc3e9431c139b5f943591af78c280b939595ce9df66210b7b8bb69565bdd2af7081a8acc0cbb5ea55bd0d673b176797966a5180c11ac297b7e6344c5822e66d1c
})();
<script src="https://cdn.ethers.io/lib/ethers-5.0.umd.min.js" type="text/javascript"></script>
<p style="font-family:'Courier New', monospace;" id="signature"></p>
which produces the following signature:
0xfcc3e9431c139b5f943591af78c280b939595ce9df66210b7b8bb69565bdd2af7081a8acc0cbb5ea55bd0d673b176797966a5180c11ac297b7e6344c5822e66d1c
The Go code below is based on the unmodified posted Go code, but using key and data from the JavaScript code for a comparison:
package main
import (
"fmt"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
"encoding/hex"
"encoding/json"
"log"
)
func signHash(data []byte) common.Hash {
msg := fmt.Sprintf("\x19Ethereum Signed Message:\n%d%s", len(data), data)
return crypto.Keccak256Hash([]byte(msg))
}
func main() {
hexPrivateKey := "8da4ef21b864d2cc526dbdb2a120bd2874c36c9d0a1fb7f8c63d7f7a8b41de8f"
dataMap := map[string]string{"data1":"value1","data2":"value2"}
dataToSign, _ := json.Marshal(dataMap)
privateKey, err := crypto.HexToECDSA(hexPrivateKey)
if err != nil {
log.Fatal(err)
}
dataHash := crypto.Keccak256Hash(dataToSign) //0x8d218fc37d2fd952b2d115046b786b787e44d105cccf156882a2e74ad993ee13
signHash := signHash(dataHash.Bytes())
signatureBytes, err := crypto.Sign(signHash.Bytes(), privateKey)
if err != nil {
log.Fatal(err)
}
fmt.Println("0x" + hex.EncodeToString(signatureBytes))
}
The Go Code gives the following signature:
0xfcc3e9431c139b5f943591af78c280b939595ce9df66210b7b8bb69565bdd2af7081a8acc0cbb5ea55bd0d673b176797966a5180c11ac297b7e6344c5822e66d01
Both signatures match except for the last byte.
The JavaScript code returns the signature in the format r|s|v (see here). v is one byte in size and is just the value in which both signatures differ.
It is v = 27 + rid where rid is the recovery ID. The recovery ID has values between 0 and 3, so v has values between 27 and 30 or 0x1b and 0x1e (see here).
The Go code, on the other hand, returns the recovery ID in the last byte instead of v. So that the signature of the Go code matches that of the JavaScript code in the last byte as well, the recovery ID must be replaced by v:
signatureBytes[64] += 27
fmt.Println("0x" + hex.EncodeToString(signatureBytes))

How to handle invalid keys in NewCBCDecrypter example?

I found an example for the usage of CBC decrypter:
https://golang.org/pkg/crypto/cipher/#NewCBCDecrypter
package main
import (
"crypto/aes"
"crypto/cipher"
"encoding/hex"
"fmt"
)
func main() {
key, _ := hex.DecodeString("6368616e676520746869732070617373")
ciphertext, _ := hex.DecodeString("73c86d43a9d700a253a96c85b0f6b03ac9792e0e757f869cca306bd3cba1c62b")
block, err := aes.NewCipher(key)
if err != nil {
panic(err)
}
if len(ciphertext) < aes.BlockSize {
panic("ciphertext too short")
}
iv := ciphertext[:aes.BlockSize]
ciphertext = ciphertext[aes.BlockSize:]
if len(ciphertext)%aes.BlockSize != 0 {
panic("ciphertext is not a multiple of the block size")
}
mode := cipher.NewCBCDecrypter(block, iv)
mode.CryptBlocks(ciphertext, ciphertext)
fmt.Printf("%s\n", ciphertext)
}
It works fine. But if I modify the key to make it invalid then it returns some strange characters instead of throwing an error.
How could I make it more programmer friendly by throwing an error if the key is invalid?
How could I make it more programmer friendly by throwing an error if the key is invalid?
You cannot. That simply isn't how this stuff works. You supply a key and this key is used to decipher the input. There is no "correct" or "wrong" key here, there is just a key here.
You might be able to inspect the deciphered output: If you know the output always starts with some magic prefix or is a well-formed XML or anything you can check reliably after deciphering you can fail. But there is really nothing intrinsic to a key being wrong or right.

Go lang 3DES partially decrypted the encrypted string

While performing decryption using 3des the given encrypted text is not fully decrypted, not sure where it went wrong, help me complete the decryption error
The code is avaialbe at Go Playground for Insection and run
package main
import (
"crypto/des"
"encoding/hex"
"fmt"
)
func main() {
// Mimimum Key Size of Length 24
key := "mysecretPasswordkeySiz24"
plainText := "https://8gwifi.org"
ct := EncryptTripleDES([]byte(key),plainText)
fmt.Printf("Original Text: %s\n",plainText)
fmt.Printf("3DES Encrypted Text: %s\n", ct)
DecryptTripleDES([]byte(key),ct)
}
func EncryptTripleDES(key []byte, plaintext string) string {
c,err := des.NewTripleDESCipher(key)
if err != nil {
fmt.Errorf("NewTripleDESCipher(%d bytes) = %s", len(key), err)
panic(err)
}
out := make([]byte, len(plaintext))
c.Encrypt(out, []byte(plaintext))
return hex.EncodeToString(out)
}
func DecryptTripleDES(key []byte, ct string) {
ciphertext, _ := hex.DecodeString(ct)
c, err := des.NewTripleDESCipher([]byte(key))
if err != nil {
fmt.Errorf("NewTripleDESCipher(%d bytes) = %s", len(key), err)
panic(err)
}
plain := make([]byte, len(ciphertext))
c.Decrypt(plain, ciphertext)
s := string(plain[:])
fmt.Printf("3DES Decrypyed Text: %s\n", s)
}
The output
Original Text: https://8gwifi.org
3DES Encrypted Text: a6e5215154bf86d000000000000000000000
3DES Decrypyed Text: https://
the given encrypted text is not fully decrypted
The encrypted text you gave is fully decrypted. The problem is not (yet) the decryption but your encryption. As documented des.NewTripleDESCipher returns a cipher.Block and cipher.Block.Encrypt encrypts as documented only the first block of the input data. Given that DES has a block size of 8 byte only the first 8 byte of the input data are encrypted, i.e. https://.
This means in order to encrypt all data you must encrypt all blocks. Similar you need to decrypt all blocks when decrypting - but cipher.Block.Decrypt also decrypts only a single block.
Apart from that DES is broken, so don't use it for something serious.

equivalent salt and hash in golang

Here's an example of salting and hashing a given password in python.
import scrypt
import os
# Length of salt
PW_SALT_BYTES = 32
# Length of scrypt hash of passwords
PW_HASH_BYTES = 64
# test password
password = "hello"
salt = os.urandom(PW_SALT_BYTES).encode('hex')
# hash(password, salt, N=1 << 14, r=8, p=1, buflen=64)
hashed_password = scrypt.hash(str(password), salt.decode('hex'), buflen=PW_HASH_BYTES).encode('hex')
print(hashed_password)
Which would give us a hashed and salted string in return:-
4d1da45b401961fccb10e094ecd70ec79510f05483ca293d300bbd0024e35866ca39fe09fbc15f83a359431021a1ed9644f7d2b871b357e37a186300877edb18
How would I implement this in golang?
Rather than using scrypt, a great library for securely hashing passwords with random salts in Golang is golang.org/x/crypto/bcrypt, as mentioned in the following answer:
Bcrypt password hashing in Golang (compatible with Node.js)?
A couple benefits of using bcrypt instead of scrypt:
The salt is automatically (and randomly) generated upon hashing a password, so that you don't have to worry about salt generation.
When storing hashed passwords in a database, you no longer have to worry about storing the salt for each password hash as well.
The syntax is simplified for hashing and checking passwords.
The hash produced by bcrypt includes the bcrypt version, cost, salt and cipher, not only the cipher.
Here's an example of using bcrypt taken from the above answer:
package main
import (
"golang.org/x/crypto/bcrypt"
"fmt"
)
func main() {
password := []byte("MyDarkSecret")
// Hashing the password with the default cost of 10
hashedPassword, err := bcrypt.GenerateFromPassword(password, bcrypt.DefaultCost)
if err != nil {
panic(err)
}
fmt.Println(string(hashedPassword))
// Comparing the password with the hash
err = bcrypt.CompareHashAndPassword(hashedPassword, password)
fmt.Println(err) // nil means it is a match
}
Go doesn't have scrypt in the standard library but there is an "official" implementation in the go.crypto repo.
import (
"crypto/rand"
"fmt"
"io"
"log"
"code.google.com/p/go.crypto/scrypt"
)
const (
PW_SALT_BYTES = 32
PW_HASH_BYTES = 64
password = "hello"
)
func main() {
salt := make([]byte, PW_SALT_BYTES)
_, err := io.ReadFull(rand.Reader, salt)
if err != nil {
log.Fatal(err)
}
hash, err := scrypt.Key([]byte(password), salt, 1<<14, 8, 1, PW_HASH_BYTES)
if err != nil {
log.Fatal(err)
}
fmt.Printf("%x\n", hash)
}
It looks like now Go has scrypt in official library. Its subrepository x/crypto among many other crypto functions has an scrypt.
Here is an example of how you can use it:
package main
import (
"golang.org/x/crypto/scrypt"
"fmt"
)
func main(){
salt := []byte("asdfasdf")
dk, err := scrypt.Key([]byte("some password"), salt, 16384, 8, 1, 32)
fmt.Println(dk)
fmt.Println(err)
}
Here's a complete hashing utilities funcs I wrote based on RFC 2898 / PKCS #5 v2.0.
Hash can be used to hash passwords, straight forward something like Hash("hello")
while Verify can be used to raw password against a hash, basically what it does is to hashes the raw string and compares it with the actual hash.
package common
import (
"crypto/rand"
"crypto/sha1"
"encoding/base64"
"errors"
"fmt"
"golang.org/x/crypto/pbkdf2"
"io"
"strconv"
"strings"
)
const (
SALT_BYTE_SIZE = 24
HASH_BYTE_SIZE = 24
PBKDF2_ITERATIONS = 1000
)
func Hash(password string) (string, error) {
salt := make([]byte, SALT_BYTE_SIZE)
if _, err := io.ReadFull(rand.Reader, salt); err != nil {
fmt.Print("Err generating random salt")
return "", errors.New("Err generating random salt")
}
//todo: enhance: randomize itrs as well
hbts := pbkdf2.Key([]byte(password), salt, PBKDF2_ITERATIONS, HASH_BYTE_SIZE, sha1.New)
//hbtstr := fmt.Sprintf("%x", hbts)
return fmt.Sprintf("%v:%v:%v",
PBKDF2_ITERATIONS,
base64.StdEncoding.EncodeToString(salt),
base64.StdEncoding.EncodeToString(hbts)), nil
}
func Verify(raw, hash string) (bool, error) {
hparts := strings.Split(hash, ":")
itr, err := strconv.Atoi(hparts[0])
if err != nil {
fmt.Printf("wrong hash %v", hash)
return false, errors.New("wrong hash, iteration is invalid")
}
salt, err := base64.StdEncoding.DecodeString(hparts[1])
if err != nil {
fmt.Print("wrong hash, salt error:", err)
return false, errors.New("wrong hash, salt error:" + err.Error())
}
hsh, err := base64.StdEncoding.DecodeString(hparts[2])
if err != nil {
fmt.Print("wrong hash, hash error:", err)
return false, errors.New("wrong hash, hash error:" + err.Error())
}
rhash := pbkdf2.Key([]byte(raw), salt, itr, len(hsh), sha1.New)
return equal(rhash, hsh), nil
}
//bytes comparisons
func equal(h1, h2 []byte) bool {
diff := uint32(len(h1)) ^ uint32(len(h2))
for i := 0; i < len(h1) && i < len(h2); i++ {
diff |= uint32(h1[i] ^ h2[i])
}
return diff == 0
}
Here's unit test that would help you figuring out how to call such funcs
package common
import (
"github.com/stretchr/testify/assert"
"testing"
)
func TestHash(t *testing.T) {
hash, err := Hash("hello")
assert.Nil(t, err)
assert.NotEmpty(t, hash)
}
func TestVerify(t *testing.T) {
hash, err := Hash("hello")
assert.Nil(t, err)
assert.NotEmpty(t, hash)
ok, err := Verify("hello", hash)
assert.Nil(t, err)
assert.True(t, ok)
}

Resources