Reduce base58 dependencies, use reverse lookup table, do direct 64 bit math
This commit is contained in:
parent
71018b5b4d
commit
7b24ed2d11
136
base58.go
136
base58.go
|
@ -1,56 +1,56 @@
|
|||
package moneroutil
|
||||
|
||||
import (
|
||||
"lukechampine.com/uint128"
|
||||
"encoding/binary"
|
||||
)
|
||||
|
||||
const BASE58 = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
|
||||
|
||||
var base58Lookup = map[uint8]uint8{
|
||||
'1': 0, '2': 1, '3': 2, '4': 3, '5': 4, '6': 5, '7': 6, '8': 7, '9': 8,
|
||||
const REVERSEBASE58 =
|
||||
// 0 1 2 3 4 5 6 7 8 9 a b c d e f */
|
||||
/* 00 */ "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" +
|
||||
/* 10 */ "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" +
|
||||
/* 20 */ "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" +
|
||||
/* 30 */ "\xff\x00\x01\x02\x03\x04\x05\x06\x07\x08\xff\xff\xff\xff\xff\xff" +
|
||||
/* 40 */ "\xff\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\xff\x11\x12\x13\x14\x15\xff" +
|
||||
/* 50 */ "\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\xff\xff\xff\xff\xff" +
|
||||
/* 60 */ "\xff\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\xff\x2c\x2d\x2e" +
|
||||
/* 70 */ "\x2f\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\xff\xff\xff\xff\xff" +
|
||||
/* 80 */ "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" +
|
||||
/* 90 */ "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" +
|
||||
/* a0 */ "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" +
|
||||
/* b0 */ "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" +
|
||||
/* c0 */ "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" +
|
||||
/* d0 */ "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" +
|
||||
/* e0 */ "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" +
|
||||
/* f0 */ "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||
|
||||
'A': 9, 'B': 10, 'C': 11, 'D': 12, 'E': 13, 'F': 14, 'G': 15, 'H': 16,
|
||||
func encodeChunk(raw []byte, buf []byte) []byte {
|
||||
intToDecode := binary.BigEndian.Uint64(raw[:])
|
||||
|
||||
'J': 17, 'K': 18, 'L': 19, 'M': 20, 'N': 21,
|
||||
|
||||
'P': 22, 'Q': 23, 'R': 24, 'S': 25, 'T': 26, 'U': 27, 'V': 28, 'W': 29, 'X': 30, 'Y': 31, 'Z': 32,
|
||||
|
||||
'a': 33, 'b': 34, 'c': 35, 'd': 36, 'e': 37, 'f': 38, 'g': 39, 'h': 40, 'i': 41, 'j': 42, 'k': 43,
|
||||
|
||||
'm': 44, 'n': 45, 'o': 46, 'p': 47, 'q': 48, 'r': 49, 's': 50, 't': 51, 'u': 52, 'v': 53, 'w': 54, 'x': 55, 'y': 56, 'z': 57,
|
||||
}
|
||||
|
||||
func slowBase58Lookup(c uint8) uint8 {
|
||||
return base58Lookup[c]
|
||||
}
|
||||
|
||||
func fastBase58Lookup(c uint8) uint8 {
|
||||
if c >= '1' && c <= '9' {
|
||||
return c - '1'
|
||||
} else if c >= 'A' && c <= 'H' {
|
||||
return c - 'A' + 9
|
||||
} else if c >= 'J' && c <= 'N' {
|
||||
return c - 'J' + 17
|
||||
} else if c >= 'P' && c <= 'Z' {
|
||||
return c - 'P' + 22
|
||||
} else if c >= 'a' && c <= 'k' {
|
||||
return c - 'a' + 33
|
||||
} else if c >= 'm' && c <= 'z' {
|
||||
return c - 'm' + 44
|
||||
for intToDecode > 0 {
|
||||
buf = append(buf, BASE58[intToDecode%58])
|
||||
intToDecode /= 58
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func encodeChunk(raw []byte, padding int, buf []byte) []byte {
|
||||
var data [16]byte
|
||||
copy(data[16-len(raw):], raw)
|
||||
remainder := uint128.FromBytesBE(data[:])
|
||||
var current uint64
|
||||
for remainder.Cmp64(0) > 0 {
|
||||
remainder, current = remainder.QuoRem64(58)
|
||||
buf = append(buf, BASE58[current])
|
||||
for len(buf) < 11 {
|
||||
buf = append(buf, '1')
|
||||
}
|
||||
for len(buf) < padding {
|
||||
for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
|
||||
buf[i], buf[j] = buf[j], buf[i]
|
||||
}
|
||||
return buf
|
||||
}
|
||||
func encodeChunkTail(raw []byte, buf []byte) []byte {
|
||||
var data [8]byte
|
||||
copy(data[8-len(raw):], raw)
|
||||
|
||||
intToDecode := binary.BigEndian.Uint64(data[:])
|
||||
|
||||
for intToDecode > 0 {
|
||||
buf = append(buf, BASE58[intToDecode%58])
|
||||
intToDecode /= 58
|
||||
}
|
||||
for len(buf) < 7 {
|
||||
buf = append(buf, '1')
|
||||
}
|
||||
for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
|
||||
|
@ -60,36 +60,38 @@ func encodeChunk(raw []byte, padding int, buf []byte) []byte {
|
|||
}
|
||||
|
||||
func decodeChunk(buf []byte, encoded string) []byte {
|
||||
var bigResult uint128.Uint128
|
||||
currentMultiplier := uint128.From64(1)
|
||||
var intResult uint64
|
||||
currentMultiplier := uint64(1)
|
||||
for i := len(encoded) - 1; i >= 0; i-- {
|
||||
bigResult = bigResult.Add(currentMultiplier.Mul64(uint64(fastBase58Lookup(encoded[i]))))
|
||||
currentMultiplier = currentMultiplier.Mul64(58)
|
||||
intResult += currentMultiplier * uint64(REVERSEBASE58[encoded[i]])
|
||||
// this can overflow, but only on the last iteration when i == 0 when all data is valid
|
||||
currentMultiplier *= 58
|
||||
}
|
||||
var result [16]byte
|
||||
bigResult.ReverseBytes().PutBytes(result[:])
|
||||
|
||||
var result [8]byte
|
||||
binary.BigEndian.PutUint64(result[:], intResult)
|
||||
switch len(encoded) {
|
||||
case 0:
|
||||
return append(buf, result[8+8:]...)
|
||||
case 2:
|
||||
return append(buf, result[8+7:]...)
|
||||
case 3:
|
||||
return append(buf, result[8+6:]...)
|
||||
case 5:
|
||||
return append(buf, result[8+5:]...)
|
||||
case 6:
|
||||
return append(buf, result[8+4:]...)
|
||||
case 7:
|
||||
return append(buf, result[8+3:]...)
|
||||
case 9:
|
||||
return append(buf, result[8+2:]...)
|
||||
case 10:
|
||||
return append(buf, result[8+1:]...)
|
||||
case 11:
|
||||
return append(buf, result[8:]...)
|
||||
case 2:
|
||||
return append(buf, result[7:]...)
|
||||
case 3:
|
||||
return append(buf, result[6:]...)
|
||||
case 5:
|
||||
return append(buf, result[5:]...)
|
||||
case 6:
|
||||
return append(buf, result[4:]...)
|
||||
case 7:
|
||||
return append(buf, result[3:]...)
|
||||
case 9:
|
||||
return append(buf, result[2:]...)
|
||||
case 10:
|
||||
return append(buf, result[1:]...)
|
||||
case 11:
|
||||
return append(buf, result[:]...)
|
||||
default:
|
||||
}
|
||||
return buf
|
||||
return nil
|
||||
}
|
||||
|
||||
func EncodeMoneroBase58(data ...[]byte) string {
|
||||
|
@ -104,10 +106,10 @@ func EncodeMoneroBase58(data ...[]byte) string {
|
|||
length := len(combined)
|
||||
rounds := length / 8
|
||||
for i := 0; i < rounds; i++ {
|
||||
result = append(result, encodeChunk(combined[i*8:(i+1)*8], 11, buf[:0])...)
|
||||
result = append(result, encodeChunk(combined[i*8:(i+1)*8], buf[:0])...)
|
||||
}
|
||||
if length%8 > 0 {
|
||||
result = append(result, encodeChunk(combined[rounds*8:], 7, buf[:0])...)
|
||||
result = append(result, encodeChunkTail(combined[rounds*8:], buf[:0])...)
|
||||
}
|
||||
return string(result)
|
||||
}
|
||||
|
|
16
base58_test.go
Normal file
16
base58_test.go
Normal file
|
@ -0,0 +1,16 @@
|
|||
package moneroutil
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestEncodeDecodeMoneroBase58Bounds(t *testing.T) {
|
||||
data := make([]byte, 2048)
|
||||
for i := range data {
|
||||
data[i] = 0xff
|
||||
}
|
||||
if bytes.Compare(DecodeMoneroBase58(EncodeMoneroBase58(data)), data) != 0 {
|
||||
t.Fatal()
|
||||
}
|
||||
}
|
5
go.mod
5
go.mod
|
@ -2,9 +2,6 @@ module git.gammaspectra.live/P2Pool/moneroutil
|
|||
|
||||
go 1.19
|
||||
|
||||
require (
|
||||
golang.org/x/crypto v0.9.0
|
||||
lukechampine.com/uint128 v1.3.0
|
||||
)
|
||||
require golang.org/x/crypto v0.9.0
|
||||
|
||||
require golang.org/x/sys v0.8.0 // indirect
|
||||
|
|
2
go.sum
2
go.sum
|
@ -2,5 +2,3 @@ golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g=
|
|||
golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0=
|
||||
golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
lukechampine.com/uint128 v1.3.0 h1:cDdUVfRwDUDovz610ABgFD17nXD4/uDgVHl2sC3+sbo=
|
||||
lukechampine.com/uint128 v1.3.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk=
|
||||
|
|
Reference in a new issue