From 54d33bdb63bb7808733e5001c65babd66697f18e Mon Sep 17 00:00:00 2001 From: Sokolov Yura Date: Wed, 8 Jul 2020 20:29:10 +0300 Subject: [PATCH 1/8] refactor timestamp generation --- securecookie.go | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/securecookie.go b/securecookie.go index b718ce9..048cc1f 100644 --- a/securecookie.go +++ b/securecookie.go @@ -278,7 +278,7 @@ func (s *SecureCookie) Encode(name string, value interface{}) (string, error) { } b = encode(b) // 3. Create MAC for "name|date|value". Extra pipe to be used later. - b = []byte(fmt.Sprintf("%s|%d|%s|", name, s.timestamp(), b)) + b = []byte(fmt.Sprintf("%s|%d|%s|", name, timestamp(), b)) mac := createMac(hmac.New(s.hashFunc, s.hashKey), b[:len(b)-1]) // Append mac, remove name. b = append(b, mac...)[len(name)+1:] @@ -332,7 +332,7 @@ func (s *SecureCookie) Decode(name, value string, dst interface{}) error { if t1, err = strconv.ParseInt(string(parts[0]), 10, 64); err != nil { return errTimestampInvalid } - t2 := s.timestamp() + t2 := timestamp() if s.minAge != 0 && t1 > t2-s.minAge { return errTimestampTooNew } @@ -357,15 +357,20 @@ func (s *SecureCookie) Decode(name, value string, dst interface{}) error { return nil } +var faketsnano int64 + +func timestampNano() int64 { + if faketsnano != 0 { + return faketsnano + } + return time.Now().UnixNano() +} + // timestamp returns the current timestamp, in seconds. // -// For testing purposes, the function that generates the timestamp can be -// overridden. If not set, it will return time.Now().UTC().Unix(). -func (s *SecureCookie) timestamp() int64 { - if s.timeFunc == nil { - return time.Now().UTC().Unix() - } - return s.timeFunc() +// For For testing purposes, one could override faketsnano variable. +func timestamp() int64 { + return timestampNano() / 1000000000 } // Authentication ------------------------------------------------------------- From 406f3ed40300397a1b86e0826f425e4e82b43a00 Mon Sep 17 00:00:00 2001 From: Sokolov Yura Date: Wed, 8 Jul 2020 21:07:49 +0300 Subject: [PATCH 2/8] add compact cookie encoding. Use exclusively Blake2s as a MAC and ChaCha20 as a stream cipher. Blake2s is always faster than SHA2 and could be safely used as a MAC without HMAC construction (therefore, it is much faster than HMAC-SHA256). ChaCha20 is a bit slower than AES-CTR, but its usage causes less allocations. And it is faster without AES hardware optimization (appengine for example). --- compact.go | 185 ++++++++++++++++++++++++++++++++++++++++++++++++ go.mod | 2 + securecookie.go | 46 +++++++++--- 3 files changed, 222 insertions(+), 11 deletions(-) create mode 100644 compact.go diff --git a/compact.go b/compact.go new file mode 100644 index 0000000..7bc4c4c --- /dev/null +++ b/compact.go @@ -0,0 +1,185 @@ +package securecookie + +import ( + "crypto/subtle" + "encoding/base64" + "encoding/binary" + "hash" + "sync" + + "golang.org/x/crypto/blake2s" + "golang.org/x/crypto/chacha20" +) + +const ( + nameMaxLen = 127 + keyLen = 32 + macLen = 15 + timeLen = 8 + versionLen = 1 + version = 0 +) + +func (s *SecureCookie) prepareCompactKeys() { + // initialize for compact encoding even if no genCompact set to allow + // two step migration. + s.compactHashKey = blake2s.Sum256(s.hashKey) + bl, _ := blake2s.New256(s.compactHashKey[:]) + _, _ = bl.Write(s.blockKey) + copy(s.compactBlockKey[:], bl.Sum(nil)) +} + +func (s *SecureCookie) encodeCompact(name string, serialized []byte) (string, error) { + if len(name) > nameMaxLen { + return "", errNameTooLong + } + + // Check length + encodedLen := base64.URLEncoding.EncodedLen(len(serialized) + macLen + timeLen + versionLen) + if s.maxLength != 0 && encodedLen > s.maxLength { + return "", errEncodedValueTooLong + } + + // form message + r := make([]byte, versionLen+macLen+timeLen+len(serialized)) + r[0] = version + m := r[versionLen:] + tag, body := m[:macLen], m[macLen:] + binary.LittleEndian.PutUint64(body, uint64(timeShift(timestampNano()))) + copy(body[timeLen:], serialized) + + // Mac + s.compactMac(version, name, body, tag) + + // Encrypt (if needed) + s.compactXorStream(tag, body) + + // Encode + return base64.RawURLEncoding.EncodeToString(r), nil +} + +func (s *SecureCookie) decodeCompact(name string, encoded string, dest interface{}) error { + if len(name) > nameMaxLen { + return errNameTooLong + } + + decoded, err := base64.RawURLEncoding.DecodeString(encoded) + if err != nil { + return cookieError{cause: err, typ: decodeError, msg: "base64 decode failed"} + } + + if len(encoded) < macLen+timeLen+versionLen { + return errValueToDecodeTooShort + } + + // Decompose + if decoded[0] != version { + // there is only version currently + return errVersionDoesntMatch + } + + m := decoded[versionLen:] + tag, body := m[:macLen], m[macLen:] + + // Decrypt (if need) + s.compactXorStream(tag, body) + + // Check time + ts := int64(binary.LittleEndian.Uint64(body)) + now := timeShift(timestampNano()) + if s.maxAge > 0 && ts+secsShift(s.maxAge) < now { + return errTimestampExpired + } + if s.minAge > 0 && ts+secsShift(s.minAge) > now { + return errTimestampExpired + } + if !timeValid(ts) { + // We are checking bytes we explicitely leaved as zero as preliminary + // MAC check. We could do it because ChaCha20 has no known plaintext + // issues. + return ErrMacInvalid + } + + // Verify + var mac [macLen]byte + s.compactMac(version, name, body, mac[:]) + if subtle.ConstantTimeCompare(mac[:], tag) == 0 { + return ErrMacInvalid + } + + // Deserialize + if err := s.sz.Deserialize(body[timeLen:], dest); err != nil { + return cookieError{cause: err, typ: decodeError} + } + + return nil +} + +var macPool = sync.Pool{New: func() interface{} { + hsh, _ := blake2s.New256(nil) + return &macbuf{Hash: hsh} +}} + +type macbuf struct { + hash.Hash + buf [3 + nameMaxLen]byte + sum [32]byte +} + +func (m *macbuf) Reset() { + m.Hash.Reset() + m.buf = [3 + nameMaxLen]byte{} + m.sum = [32]byte{} +} + +func (s *SecureCookie) compactMac(version byte, name string, body, mac []byte) { + enc := macPool.Get().(*macbuf) + + // While it is not "recommended" way to mix key in, it is still valid + // because 1) Blake2b is not susceptible to length-extention attack, 2) + // "recommended" way does almost same, just stores key length in other place + // (it mixes length into constan iv itself). + enc.buf[0] = version + // name should not be longer than 127 bytes to fallback to varint in a future + enc.buf[1] = byte(len(name)) + enc.buf[2] = keyLen + copy(enc.buf[3:], name) + + _, _ = enc.Write(enc.buf[:3+len(name)]) + _, _ = enc.Write(s.hashKey[:]) + _, _ = enc.Write(body) + + copy(mac, enc.Sum(enc.sum[:0])) + + enc.Reset() + macPool.Put(enc) +} + +func (s *SecureCookie) compactXorStream(tag, body []byte) { + if len(s.blockKey) == 0 { // no blockKey - no encryption + return + } + key := s.compactBlockKey + // Mix remaining tag bytes into key. + // We may do it because ChaCha20 has no related keys issues. + key[29] ^= tag[12] + key[30] ^= tag[13] + key[31] ^= tag[14] + stream, err := chacha20.NewUnauthenticatedCipher(key[:], tag[:12]) + if err != nil { + panic("stream initialization failed") + } + stream.XORKeyStream(body, body) +} + +func timeShift(t int64) int64 { + return t >> 16 +} + +func timeValid(t int64) bool { + return (t >> (64 - 16)) == 0 +} + +func secsShift(t int64) int64 { + return (t * 1000000000) >> 16 +} diff --git a/go.mod b/go.mod index db69e44..60b7f26 100644 --- a/go.mod +++ b/go.mod @@ -1 +1,3 @@ module github.com/gorilla/securecookie + +require golang.org/x/crypto v0.0.0-20200707235045-ab33eee955e0 diff --git a/securecookie.go b/securecookie.go index 048cc1f..ab43351 100644 --- a/securecookie.go +++ b/securecookie.go @@ -89,20 +89,24 @@ func (e cookieError) Error() string { } var ( - errGeneratingIV = cookieError{typ: internalError, msg: "failed to generate random iv"} + errGeneratingIV = cookieError{typ: internalError, msg: "failed to generate random iv"} + errGeneratingMAC = cookieError{typ: internalError, msg: "failed to generate mac"} errNoCodecs = cookieError{typ: usageError, msg: "no codecs provided"} errHashKeyNotSet = cookieError{typ: usageError, msg: "hash key is not set"} errBlockKeyNotSet = cookieError{typ: usageError, msg: "block key is not set"} errEncodedValueTooLong = cookieError{typ: usageError, msg: "the value is too long"} - errValueToDecodeTooLong = cookieError{typ: decodeError, msg: "the value is too long"} - errTimestampInvalid = cookieError{typ: decodeError, msg: "invalid timestamp"} - errTimestampTooNew = cookieError{typ: decodeError, msg: "timestamp is too new"} - errTimestampExpired = cookieError{typ: decodeError, msg: "expired timestamp"} - errDecryptionFailed = cookieError{typ: decodeError, msg: "the value could not be decrypted"} - errValueNotByte = cookieError{typ: decodeError, msg: "value not a []byte."} - errValueNotBytePtr = cookieError{typ: decodeError, msg: "value not a pointer to []byte."} + errValueToDecodeTooLong = cookieError{typ: decodeError, msg: "the value is too long"} + errTimestampInvalid = cookieError{typ: decodeError, msg: "invalid timestamp"} + errTimestampTooNew = cookieError{typ: decodeError, msg: "timestamp is too new"} + errTimestampExpired = cookieError{typ: decodeError, msg: "expired timestamp"} + errDecryptionFailed = cookieError{typ: decodeError, msg: "the value could not be decrypted"} + errValueNotByte = cookieError{typ: decodeError, msg: "value not a []byte."} + errValueNotBytePtr = cookieError{typ: decodeError, msg: "value not a pointer to []byte."} + errNameTooLong = cookieError{typ: usageError, msg: "cookie name is too long"} + errValueToDecodeTooShort = cookieError{typ: decodeError, msg: "the value is too short"} + errVersionDoesntMatch = cookieError{typ: decodeError, msg: "value version unknown"} // ErrMacInvalid indicates that cookie decoding failed because the HMAC // could not be extracted and verified. Direct use of this error @@ -147,6 +151,7 @@ func New(hashKey, blockKey []byte) *SecureCookie { if blockKey != nil { s.BlockFunc(aes.NewCipher) } + s.prepareCompactKeys() return s } @@ -162,9 +167,10 @@ type SecureCookie struct { minAge int64 err error sz Serializer - // For testing purposes, the function that returns the current timestamp. - // If not set, it will use time.Now().UTC().Unix(). - timeFunc func() int64 + + compactHashKey [keyLen]byte + compactBlockKey [keyLen]byte + genCompact bool } // Serializer provides an interface for providing custom serializers for cookie @@ -244,6 +250,16 @@ func (s *SecureCookie) SetSerializer(sz Serializer) *SecureCookie { return s } +// Compact sets generation mode. +// +// If set to true, then compact encoding will be used for cookie. +// Note, it will use Blake2b as a hash function and ChaCha20 as a cipher +// exclusively. And hash key and block key will be derived with Blake2b. +func (s *SecureCookie) Compact(c bool) *SecureCookie { + s.genCompact = c + return s +} + // Encode encodes a cookie value. // // It serializes, optionally encrypts, signs with a message authentication code, @@ -270,6 +286,11 @@ func (s *SecureCookie) Encode(name string, value interface{}) (string, error) { if b, err = s.sz.Serialize(value); err != nil { return "", cookieError{cause: err, typ: usageError} } + + if s.genCompact { + return s.encodeCompact(name, b) + } + // 2. Encrypt (optional). if s.block != nil { if b, err = encrypt(s.block, b); err != nil { @@ -312,6 +333,9 @@ func (s *SecureCookie) Decode(name, value string, dst interface{}) error { if s.maxLength != 0 && len(value) > s.maxLength { return errValueToDecodeTooLong } + if len(value) > 0 && value[0] == 'A' { // first byte of decoded value is less than 0x04 + return s.decodeCompact(name, value, dst) + } // 2. Decode from base64. b, err := decode([]byte(value)) if err != nil { From d410dc285fb7e36557fd7589ef23f72bf44dd4e3 Mon Sep 17 00:00:00 2001 From: Sokolov Yura Date: Thu, 9 Jul 2020 14:51:17 +0300 Subject: [PATCH 3/8] add tests and benchmark for compact encoding BenchmarkLegacyEncode-8 239144 4504 ns/op 3445 B/op 21 allocs/op BenchmarkLegacyEncode-8 261038 4499 ns/op 3442 B/op 21 allocs/op BenchmarkCompactEncode-8 933446 1304 ns/op 721 B/op 4 allocs/op BenchmarkCompactEncode-8 927328 1331 ns/op 721 B/op 4 allocs/op BenchmarkLegacyDecode-8 298273 3923 ns/op 2312 B/op 17 allocs/op BenchmarkLegacyDecode-8 298401 3944 ns/op 2367 B/op 17 allocs/op BenchmarkCompactDecode-8 806112 1359 ns/op 465 B/op 3 allocs/op BenchmarkCompactDecode-8 905617 1348 ns/op 471 B/op 3 allocs/op --- securecookie_test.go | 67 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/securecookie_test.go b/securecookie_test.go index c32ff33..c703ffb 100644 --- a/securecookie_test.go +++ b/securecookie_test.go @@ -10,6 +10,7 @@ import ( "crypto/sha256" "encoding/base64" "fmt" + "math/rand" "reflect" "strings" "testing" @@ -35,7 +36,13 @@ func TestSecureCookie(t *testing.T) { "baz": 128, } + rng := rand.New(rand.NewSource(1)) + for i := 0; i < 50; i++ { + t.Log("i=", i) + s1.Compact(i&1 != 0) + s2.Compact(i&2 != 0) + // Running this multiple times to check if any special character // breaks encoding/decoding. encoded, err1 := s1.Encode("sid", value) @@ -71,6 +78,20 @@ func TestSecureCookie(t *testing.T) { if err4.IsInternal() { t.Fatalf("Expected IsInternal() == false, got: %#v", err4) } + + // check compatibility + s1.Compact(!s1.genCompact) + dst3 := make(map[string]interface{}) + err5 := s1.Decode("sid", encoded, &dst3) + if err5 != nil { + t.Fatalf("%v: %v", err5, encoded) + } + if !reflect.DeepEqual(dst3, value) { + t.Fatalf("Expected %v, got %v.", value, dst3) + } + + value["foo"] = "bar" + string([]rune{rune(rng.Intn(1024) + 20)}) + value["bas"] = rng.Intn(1000000) } } @@ -306,3 +327,49 @@ func TestCustomType(t *testing.T) { t.Fatalf("Expected %#v, got %#v", src, dst) } } + +const N = 250 + +func benchmarkEncode(b *testing.B, compact bool) { + hk := make([]byte, 32) + bk := make([]byte, 32) + buf := make([]byte, N) + rand.Read(hk) + rand.Read(bk) + rand.Read(buf) + sec := New(hk, bk) + sec.SetSerializer(NopEncoder{}) + sec.Compact(compact) + b.ResetTimer() + for i := 0; i < b.N; i++ { + v := buf[:rand.Intn(N-N/4)+N/4] + _, _ = sec.Encode("session", v) + } +} + +func benchmarkDecode(b *testing.B, compact bool) { + hk := make([]byte, 32) + bk := make([]byte, 32) + buf := make([]byte, N) + rand.Read(hk) + rand.Read(bk) + rand.Read(buf) + sec := New(hk, bk) + sec.SetSerializer(NopEncoder{}) + sec.Compact(compact) + vals := make([]string, 128) + for i := range vals { + v := buf[:rand.Intn(N-N/4)+N/4] + vals[i], _ = sec.Encode("session", v) + } + b.ResetTimer() + for i := 0; i < b.N; i++ { + var v []byte + _ = sec.Decode("session", vals[i&127], &v) + } +} + +func BenchmarkLegacyEncode(b *testing.B) { benchmarkEncode(b, false) } +func BenchmarkCompactEncode(b *testing.B) { benchmarkEncode(b, true) } +func BenchmarkLegacyDecode(b *testing.B) { benchmarkDecode(b, false) } +func BenchmarkCompactDecode(b *testing.B) { benchmarkDecode(b, true) } From f278b8514e46be671dc24618f66142ccb23d5ed1 Mon Sep 17 00:00:00 2001 From: Sokolov Yura Date: Thu, 9 Jul 2020 15:42:08 +0300 Subject: [PATCH 4/8] remove go 1.7, 1.8 from circleci --- .circleci/config.yml | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index c2f13d4..659ae4d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -41,17 +41,6 @@ jobs: docker: - image: circleci/golang:1.9 - "1.8": - <<: *test - docker: - - image: circleci/golang:1.8 - - "1.7": - <<: *test - docker: - - image: circleci/golang:1.7 - - workflows: version: 2 build: @@ -61,5 +50,3 @@ workflows: - "1.11" - "1.10" - "1.9" - - "1.8" - - "1.7" From f83c21031222205597bfb92923820f5265d1d5b3 Mon Sep 17 00:00:00 2001 From: Sokolov Yura Date: Thu, 9 Jul 2020 15:51:34 +0300 Subject: [PATCH 5/8] Document compact encoding. --- README.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/README.md b/README.md index f416daa..58e0f9f 100644 --- a/README.md +++ b/README.md @@ -77,6 +77,24 @@ registered first using gob.Register(). For basic types this is not needed; it works out of the box. An optional JSON encoder that uses `encoding/json` is available for types compatible with JSON. +### Compact Encoding + +Original encoding adds a lot of unnecessary overhead for encoded value. +Therefore new encoding is added to reduce length of cookie. To simplify +migration, same SecureCookie instance may decode both original and compact +encodings, but generates those you choose to. By default original encoding +is used therefore you may safely update this library without code change. + +```go +var s = securecookie.New(hashKey, blockKey) +s.Compact(true) // enable generation of compact encoding. +s.Compact(false) // disable generation of compact encoding. It is default. +``` + +Not that algorithms are fixed with compact encoding: ChaCha20 is used for +stream cipher and Blake2s is used as a MAC and key expansion (to meet ChaCha20 +requirements for key length). + ### Key Rotation Rotating keys is an important part of any security strategy. The `EncodeMulti` and `DecodeMulti` functions allow for multiple keys to be rotated in and out. From b5523ec7299b14489fb3d38191c6a1c81d5c0b47 Mon Sep 17 00:00:00 2001 From: Sokolov Yura Date: Sun, 12 Jul 2020 12:29:30 +0300 Subject: [PATCH 6/8] less opinionated scheme simplify use of Blake2s and ChaCha: - use keyed Blake2s instance - use 192bit nonce with XChaCha20 (add 8 byte 'version+timestamp' header to mac) - get rid of cookie name length restriction (yes, allocate []byte(name)) --- compact.go | 150 ++++++++++++++++++++---------------------------- securecookie.go | 6 +- 2 files changed, 65 insertions(+), 91 deletions(-) diff --git a/compact.go b/compact.go index 7bc4c4c..6268b7c 100644 --- a/compact.go +++ b/compact.go @@ -5,181 +5,155 @@ import ( "encoding/base64" "encoding/binary" "hash" - "sync" "golang.org/x/crypto/blake2s" "golang.org/x/crypto/chacha20" ) const ( - nameMaxLen = 127 - keyLen = 32 - macLen = 15 - timeLen = 8 - versionLen = 1 - version = 0 + nameMaxLen = 127 + keyLen = 32 + macLen = 16 + headerLen = 8 + macHeaderLen = macLen + headerLen + version = 1 ) -func (s *SecureCookie) prepareCompactKeys() { +func (s *SecureCookie) prepareCompact() { // initialize for compact encoding even if no genCompact set to allow // two step migration. - s.compactHashKey = blake2s.Sum256(s.hashKey) - bl, _ := blake2s.New256(s.compactHashKey[:]) + hashKey := blake2s.Sum256(s.hashKey) + + bl, _ := blake2s.New256(hashKey[:]) _, _ = bl.Write(s.blockKey) copy(s.compactBlockKey[:], bl.Sum(nil)) -} -func (s *SecureCookie) encodeCompact(name string, serialized []byte) (string, error) { - if len(name) > nameMaxLen { - return "", errNameTooLong + s.macPool.New = func() interface{} { + hsh, _ := blake2s.New128(hashKey[:]) + return &macbuf{Hash: hsh} } +} +func (s *SecureCookie) encodeCompact(name string, serialized []byte) (string, error) { // Check length - encodedLen := base64.URLEncoding.EncodedLen(len(serialized) + macLen + timeLen + versionLen) + encodedLen := base64.URLEncoding.EncodedLen(len(serialized) + macLen + headerLen) if s.maxLength != 0 && encodedLen > s.maxLength { return "", errEncodedValueTooLong } // form message - r := make([]byte, versionLen+macLen+timeLen+len(serialized)) - r[0] = version - m := r[versionLen:] - tag, body := m[:macLen], m[macLen:] - binary.LittleEndian.PutUint64(body, uint64(timeShift(timestampNano()))) - copy(body[timeLen:], serialized) + r := make([]byte, headerLen+macLen+len(serialized)) + macHeader, body := r[:macHeaderLen], r[macHeaderLen:] + copy(body, serialized) + + header, mac := macHeader[:headerLen], macHeader[headerLen:] + binary.BigEndian.PutUint64(header, uint64(timeShift(timestampNano()))) + header[0] = version // it is made free in timestamp // Mac - s.compactMac(version, name, body, tag) + s.compactMac(header, name, body, mac) // Encrypt (if needed) - s.compactXorStream(tag, body) + s.compactXorStream(macHeader, body) // Encode return base64.RawURLEncoding.EncodeToString(r), nil } func (s *SecureCookie) decodeCompact(name string, encoded string, dest interface{}) error { - if len(name) > nameMaxLen { - return errNameTooLong - } - decoded, err := base64.RawURLEncoding.DecodeString(encoded) if err != nil { return cookieError{cause: err, typ: decodeError, msg: "base64 decode failed"} } - if len(encoded) < macLen+timeLen+versionLen { + if len(encoded) < macHeaderLen { return errValueToDecodeTooShort } + macHeader, body := decoded[:macHeaderLen], decoded[macHeaderLen:] + header, mac := macHeader[:headerLen], macHeader[headerLen:] + // Decompose - if decoded[0] != version { + if header[0] != version { // there is only version currently return errVersionDoesntMatch } - m := decoded[versionLen:] - tag, body := m[:macLen], m[macLen:] - - // Decrypt (if need) - s.compactXorStream(tag, body) - // Check time - ts := int64(binary.LittleEndian.Uint64(body)) - now := timeShift(timestampNano()) - if s.maxAge > 0 && ts+secsShift(s.maxAge) < now { + ts := timeUnshift(int64(binary.BigEndian.Uint64(header))) + now := timestampNano() + if s.maxAge > 0 && ts+secs2nano(s.maxAge) < now { return errTimestampExpired } - if s.minAge > 0 && ts+secsShift(s.minAge) > now { + if s.minAge > 0 && ts+secs2nano(s.minAge) > now { return errTimestampExpired } - if !timeValid(ts) { - // We are checking bytes we explicitely leaved as zero as preliminary - // MAC check. We could do it because ChaCha20 has no known plaintext - // issues. - return ErrMacInvalid - } - // Verify - var mac [macLen]byte - s.compactMac(version, name, body, mac[:]) - if subtle.ConstantTimeCompare(mac[:], tag) == 0 { + // Decrypt (if need) + s.compactXorStream(macHeader, body) + + // Check MAC + var macCheck [macLen]byte + s.compactMac(header, name, body, macCheck[:]) + if subtle.ConstantTimeCompare(mac, macCheck[:]) == 0 { return ErrMacInvalid } // Deserialize - if err := s.sz.Deserialize(body[timeLen:], dest); err != nil { + if err := s.sz.Deserialize(body, dest); err != nil { return cookieError{cause: err, typ: decodeError} } return nil } -var macPool = sync.Pool{New: func() interface{} { - hsh, _ := blake2s.New256(nil) - return &macbuf{Hash: hsh} -}} - type macbuf struct { hash.Hash - buf [3 + nameMaxLen]byte - sum [32]byte + nameLen [4]byte + sum [16]byte } func (m *macbuf) Reset() { m.Hash.Reset() - m.buf = [3 + nameMaxLen]byte{} - m.sum = [32]byte{} + m.sum = [16]byte{} } -func (s *SecureCookie) compactMac(version byte, name string, body, mac []byte) { - enc := macPool.Get().(*macbuf) - - // While it is not "recommended" way to mix key in, it is still valid - // because 1) Blake2b is not susceptible to length-extention attack, 2) - // "recommended" way does almost same, just stores key length in other place - // (it mixes length into constan iv itself). - enc.buf[0] = version - // name should not be longer than 127 bytes to fallback to varint in a future - enc.buf[1] = byte(len(name)) - enc.buf[2] = keyLen - copy(enc.buf[3:], name) - - _, _ = enc.Write(enc.buf[:3+len(name)]) - _, _ = enc.Write(s.hashKey[:]) +func (s *SecureCookie) compactMac(header []byte, name string, body, mac []byte) { + enc := s.macPool.Get().(*macbuf) + + binary.BigEndian.PutUint32(enc.nameLen[:], uint32(len(name))) + _, _ = enc.Write(header) + _, _ = enc.Write(enc.nameLen[:]) + _, _ = enc.Write([]byte(name)) _, _ = enc.Write(body) copy(mac, enc.Sum(enc.sum[:0])) enc.Reset() - macPool.Put(enc) + s.macPool.Put(enc) } -func (s *SecureCookie) compactXorStream(tag, body []byte) { +func (s *SecureCookie) compactXorStream(nonce, body []byte) { if len(s.blockKey) == 0 { // no blockKey - no encryption return } - key := s.compactBlockKey - // Mix remaining tag bytes into key. - // We may do it because ChaCha20 has no related keys issues. - key[29] ^= tag[12] - key[30] ^= tag[13] - key[31] ^= tag[14] - stream, err := chacha20.NewUnauthenticatedCipher(key[:], tag[:12]) + stream, err := chacha20.NewUnauthenticatedCipher(s.compactBlockKey[:], nonce) if err != nil { panic("stream initialization failed") } stream.XORKeyStream(body, body) } +// timeShift ensures high byte is zero to use it for version func timeShift(t int64) int64 { - return t >> 16 + return t >> 8 } -func timeValid(t int64) bool { - return (t >> (64 - 16)) == 0 +// timeUnshift restores timestamp to nanoseconds + clears high byte +func timeUnshift(t int64) int64 { + return t << 8 } -func secsShift(t int64) int64 { - return (t * 1000000000) >> 16 +func secs2nano(t int64) int64 { + return t * 1000000000 } diff --git a/securecookie.go b/securecookie.go index ab43351..c1090f9 100644 --- a/securecookie.go +++ b/securecookie.go @@ -20,6 +20,7 @@ import ( "io" "strconv" "strings" + "sync" "time" ) @@ -104,7 +105,6 @@ var ( errDecryptionFailed = cookieError{typ: decodeError, msg: "the value could not be decrypted"} errValueNotByte = cookieError{typ: decodeError, msg: "value not a []byte."} errValueNotBytePtr = cookieError{typ: decodeError, msg: "value not a pointer to []byte."} - errNameTooLong = cookieError{typ: usageError, msg: "cookie name is too long"} errValueToDecodeTooShort = cookieError{typ: decodeError, msg: "the value is too short"} errVersionDoesntMatch = cookieError{typ: decodeError, msg: "value version unknown"} @@ -151,7 +151,7 @@ func New(hashKey, blockKey []byte) *SecureCookie { if blockKey != nil { s.BlockFunc(aes.NewCipher) } - s.prepareCompactKeys() + s.prepareCompact() return s } @@ -168,8 +168,8 @@ type SecureCookie struct { err error sz Serializer - compactHashKey [keyLen]byte compactBlockKey [keyLen]byte + macPool sync.Pool genCompact bool } From 0820f4f6a85433c37ceace9022cc32b19281f96a Mon Sep 17 00:00:00 2001 From: Sokolov Yura Date: Sun, 12 Jul 2020 13:13:09 +0300 Subject: [PATCH 7/8] get rid of blake2s, use hmac-sha256 instead --- README.md | 2 +- compact.go | 22 +++++++++++----------- securecookie.go | 2 +- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 58e0f9f..2054941 100644 --- a/README.md +++ b/README.md @@ -92,7 +92,7 @@ s.Compact(false) // disable generation of compact encoding. It is default. ``` Not that algorithms are fixed with compact encoding: ChaCha20 is used for -stream cipher and Blake2s is used as a MAC and key expansion (to meet ChaCha20 +stream cipher and HMAC-SHA256 is used as a MAC and key expansion (to meet ChaCha20 requirements for key length). ### Key Rotation diff --git a/compact.go b/compact.go index 6268b7c..62113e0 100644 --- a/compact.go +++ b/compact.go @@ -1,12 +1,14 @@ package securecookie import ( + "crypto/hmac" + "crypto/sha256" "crypto/subtle" "encoding/base64" "encoding/binary" "hash" + "sync" - "golang.org/x/crypto/blake2s" "golang.org/x/crypto/chacha20" ) @@ -20,17 +22,15 @@ const ( ) func (s *SecureCookie) prepareCompact() { - // initialize for compact encoding even if no genCompact set to allow - // two step migration. - hashKey := blake2s.Sum256(s.hashKey) - - bl, _ := blake2s.New256(hashKey[:]) + bl := hmac.New(sha256.New, s.hashKey) _, _ = bl.Write(s.blockKey) copy(s.compactBlockKey[:], bl.Sum(nil)) - s.macPool.New = func() interface{} { - hsh, _ := blake2s.New128(hashKey[:]) - return &macbuf{Hash: hsh} + s.macPool = &sync.Pool{ + New: func() interface{} { + hsh := hmac.New(sha256.New, s.hashKey) + return &macbuf{Hash: hsh} + }, } } @@ -110,12 +110,12 @@ func (s *SecureCookie) decodeCompact(name string, encoded string, dest interface type macbuf struct { hash.Hash nameLen [4]byte - sum [16]byte + sum [32]byte } func (m *macbuf) Reset() { m.Hash.Reset() - m.sum = [16]byte{} + m.sum = [32]byte{} } func (s *SecureCookie) compactMac(header []byte, name string, body, mac []byte) { diff --git a/securecookie.go b/securecookie.go index c1090f9..e6d309a 100644 --- a/securecookie.go +++ b/securecookie.go @@ -169,7 +169,7 @@ type SecureCookie struct { sz Serializer compactBlockKey [keyLen]byte - macPool sync.Pool + macPool *sync.Pool genCompact bool } From cf607a7391b44381e8b416596844f6d7035e6b49 Mon Sep 17 00:00:00 2001 From: Sokolov Yura Date: Sun, 12 Jul 2020 13:26:07 +0300 Subject: [PATCH 8/8] clearer compact header manipulations --- compact.go | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/compact.go b/compact.go index 62113e0..1511f6e 100644 --- a/compact.go +++ b/compact.go @@ -47,8 +47,7 @@ func (s *SecureCookie) encodeCompact(name string, serialized []byte) (string, er copy(body, serialized) header, mac := macHeader[:headerLen], macHeader[headerLen:] - binary.BigEndian.PutUint64(header, uint64(timeShift(timestampNano()))) - header[0] = version // it is made free in timestamp + composeHeader(version, timestampNano(), header) // Mac s.compactMac(header, name, body, mac) @@ -74,13 +73,13 @@ func (s *SecureCookie) decodeCompact(name string, encoded string, dest interface header, mac := macHeader[:headerLen], macHeader[headerLen:] // Decompose - if header[0] != version { + v, ts := decomposeHeader(header) + if v != version { // there is only version currently return errVersionDoesntMatch } // Check time - ts := timeUnshift(int64(binary.BigEndian.Uint64(header))) now := timestampNano() if s.maxAge > 0 && ts+secs2nano(s.maxAge) < now { return errTimestampExpired @@ -144,14 +143,17 @@ func (s *SecureCookie) compactXorStream(nonce, body []byte) { stream.XORKeyStream(body, body) } -// timeShift ensures high byte is zero to use it for version -func timeShift(t int64) int64 { - return t >> 8 +func composeHeader(v byte, t int64, header []byte) { + ut := uint64(t) >> 8 // clear highest octet for version + binary.BigEndian.PutUint64(header, ut) + header[0] = v } -// timeUnshift restores timestamp to nanoseconds + clears high byte -func timeUnshift(t int64) int64 { - return t << 8 +func decomposeHeader(header []byte) (v byte, t int64) { + v = header[0] + ut := binary.BigEndian.Uint64(header) + t = int64(ut << 8) + return } func secs2nano(t int64) int64 {