diff --git a/README.md b/README.md index a19c28e..d81e15e 100644 --- a/README.md +++ b/README.md @@ -55,10 +55,11 @@ Not all libraries have implemented the full range of features yet. The **OpenPAYGO Go library** supports the following features: -| Feature | Status | -| ----------------- | -------------------- | -| OpenPAYGO Token | ✅ (beta) | -| OpenPAYGO Metrics | ❌ (not implemented) | +| Feature | Status | +| -------------------------- | -------------------- | +| OpenPAYGO Token (normal) | ✅ | +| OPenPAYGO Token (extended) | ❌ (not implemented) | +| OpenPAYGO Metrics | ❌ (not implemented) | ## Support diff --git a/token/extended/extended.go b/token/extended/extended.go index ab69fe0..3643e9b 100644 --- a/token/extended/extended.go +++ b/token/extended/extended.go @@ -10,19 +10,19 @@ import ( ) const ( - maxBase = 999999 - maxActivationValue = 999999 - tokenValueOffset = 1000000 + MaxBase = 999999 + MaxActivationValue = 999999 + TokenValueOffset = 1000000 ) -type tokenType uint8 +type TokenType uint8 func getTokenBase(code uint64) uint64 { - return code % tokenValueOffset + return code % TokenValueOffset } func putBaseInToken(token, tokenbase uint64) (uint64, error) { - if tokenbase > maxBase { + if tokenbase > MaxBase { return 0, fmt.Errorf("invalid value") } @@ -32,7 +32,7 @@ func putBaseInToken(token, tokenbase uint64) (uint64, error) { func generateNextToken(lastCode uint64, key []byte) uint32 { conformedToken := make([]byte, 8) - binary.LittleEndian.PutUint64(conformedToken, lastCode) + binary.BigEndian.PutUint64(conformedToken, lastCode) return convertHashToToken(generateHash(key, conformedToken)) } @@ -106,11 +106,11 @@ func convertFrom4DigitsToken(digits string) uint64 { func getBitArrayFromInt(source uint64, nbOfBits int) []byte { bitsArray := make([]byte, (nbOfBits/8)+1) - binary.LittleEndian.PutUint64(bitsArray, source) + binary.BigEndian.PutUint64(bitsArray, source) return bitsArray } func bitArrayToInt(bits []byte) uint64 { - return binary.LittleEndian.Uint64(bits) + return binary.BigEndian.Uint64(bits) } diff --git a/token/sample_tokens.json b/token/sample_tokens.json new file mode 100644 index 0000000..73a4a39 --- /dev/null +++ b/token/sample_tokens.json @@ -0,0 +1,242 @@ +[ + { + "serial_number": "TEST220000001", + "starting_code": 516959010, + "key": "bc41ec9530f6dac86b1a29ab82edc5fb", + "token_count": 3, + "restricted_digit_set": false, + "token_type": 1, + "value_raw": 1, + "token": "380589011" + }, + { + "serial_number": "TEST220000001", + "starting_code": 516959010, + "key": "bc41ec9530f6dac86b1a29ab82edc5fb", + "token_count": 5, + "restricted_digit_set": false, + "token_type": 1, + "value_raw": 2, + "token": "283675012" + }, + { + "serial_number": "TEST220000001", + "starting_code": 516959010, + "key": "bc41ec9530f6dac86b1a29ab82edc5fb", + "token_count": 7, + "restricted_digit_set": false, + "token_type": 1, + "value_raw": 5, + "token": "034254015" + }, + { + "serial_number": "TEST220000001", + "starting_code": 516959010, + "key": "bc41ec9530f6dac86b1a29ab82edc5fb", + "token_count": 9, + "restricted_digit_set": false, + "token_type": 1, + "value_raw": 995, + "token": "409152005" + }, + { + "serial_number": "TEST220000001", + "starting_code": 516959010, + "key": "bc41ec9530f6dac86b1a29ab82edc5fb", + "token_count": 11, + "restricted_digit_set": false, + "token_type": 1, + "value_raw": 998, + "token": "071763008" + }, + { + "serial_number": "TEST220000001", + "starting_code": 516959010, + "key": "bc41ec9530f6dac86b1a29ab82edc5fb", + "token_count": 13, + "restricted_digit_set": false, + "token_type": 1, + "value_raw": 999, + "token": "814704009" + }, + { + "serial_number": "TEST220000001", + "starting_code": 516959010, + "key": "bc41ec9530f6dac86b1a29ab82edc5fb", + "token_count": 14, + "restricted_digit_set": false, + "token_type": 2, + "value_raw": 1, + "token": "141465011" + }, + { + "serial_number": "TEST220000001", + "starting_code": 516959010, + "key": "bc41ec9530f6dac86b1a29ab82edc5fb", + "token_count": 16, + "restricted_digit_set": false, + "token_type": 2, + "value_raw": 2, + "token": "448320012" + }, + { + "serial_number": "TEST220000001", + "starting_code": 516959010, + "key": "bc41ec9530f6dac86b1a29ab82edc5fb", + "token_count": 18, + "restricted_digit_set": false, + "token_type": 2, + "value_raw": 5, + "token": "730651015" + }, + { + "serial_number": "TEST220000001", + "starting_code": 516959010, + "key": "bc41ec9530f6dac86b1a29ab82edc5fb", + "token_count": 20, + "restricted_digit_set": false, + "token_type": 2, + "value_raw": 995, + "token": "132820005" + }, + { + "serial_number": "TEST220000001", + "starting_code": 516959010, + "key": "bc41ec9530f6dac86b1a29ab82edc5fb", + "token_count": 22, + "restricted_digit_set": false, + "token_type": 2, + "value_raw": 998, + "token": "146345008" + }, + { + "serial_number": "TEST220000001", + "starting_code": 516959010, + "key": "bc41ec9530f6dac86b1a29ab82edc5fb", + "token_count": 24, + "restricted_digit_set": false, + "token_type": 2, + "value_raw": 999, + "token": "386863009" + }, + { + "serial_number": "TEST240000002", + "starting_code": 432435255, + "key": "dac86b1a29ab82edc5fbbc41ec9530f6", + "token_count": 3, + "restricted_digit_set": true, + "token_type": 1, + "value_raw": 1, + "token": "413441444234331" + }, + { + "serial_number": "TEST240000002", + "starting_code": 432435255, + "key": "dac86b1a29ab82edc5fbbc41ec9530f6", + "token_count": 5, + "restricted_digit_set": true, + "token_type": 1, + "value_raw": 2, + "token": "431131331113332" + }, + { + "serial_number": "TEST240000002", + "starting_code": 432435255, + "key": "dac86b1a29ab82edc5fbbc41ec9530f6", + "token_count": 7, + "restricted_digit_set": true, + "token_type": 1, + "value_raw": 5, + "token": "423424444232241" + }, + { + "serial_number": "TEST240000002", + "starting_code": 432435255, + "key": "dac86b1a29ab82edc5fbbc41ec9530f6", + "token_count": 9, + "restricted_digit_set": true, + "token_type": 1, + "value_raw": 995, + "token": "422313413112333" + }, + { + "serial_number": "TEST240000002", + "starting_code": 432435255, + "key": "dac86b1a29ab82edc5fbbc41ec9530f6", + "token_count": 11, + "restricted_digit_set": true, + "token_type": 1, + "value_raw": 998, + "token": "231434142221342" + }, + { + "serial_number": "TEST240000002", + "starting_code": 432435255, + "key": "dac86b1a29ab82edc5fbbc41ec9530f6", + "token_count": 13, + "restricted_digit_set": true, + "token_type": 1, + "value_raw": 999, + "token": "242313431134143" + }, + { + "serial_number": "TEST240000002", + "starting_code": 432435255, + "key": "dac86b1a29ab82edc5fbbc41ec9530f6", + "token_count": 14, + "restricted_digit_set": true, + "token_type": 2, + "value_raw": 1, + "token": "113434333414311" + }, + { + "serial_number": "TEST240000002", + "starting_code": 432435255, + "key": "dac86b1a29ab82edc5fbbc41ec9530f6", + "token_count": 16, + "restricted_digit_set": true, + "token_type": 2, + "value_raw": 2, + "token": "414212121322332" + }, + { + "serial_number": "TEST240000002", + "starting_code": 432435255, + "key": "dac86b1a29ab82edc5fbbc41ec9530f6", + "token_count": 18, + "restricted_digit_set": true, + "token_type": 2, + "value_raw": 5, + "token": "413424224321241" + }, + { + "serial_number": "TEST240000002", + "starting_code": 432435255, + "key": "dac86b1a29ab82edc5fbbc41ec9530f6", + "token_count": 20, + "restricted_digit_set": true, + "token_type": 2, + "value_raw": 995, + "token": "342124322343233" + }, + { + "serial_number": "TEST240000002", + "starting_code": 432435255, + "key": "dac86b1a29ab82edc5fbbc41ec9530f6", + "token_count": 22, + "restricted_digit_set": true, + "token_type": 2, + "value_raw": 998, + "token": "211422314241142" + }, + { + "serial_number": "TEST240000002", + "starting_code": 432435255, + "key": "dac86b1a29ab82edc5fbbc41ec9530f6", + "token_count": 24, + "restricted_digit_set": true, + "token_type": 2, + "value_raw": 999, + "token": "331233113332423" + } +] diff --git a/token/shared/shared.go b/token/shared/shared.go index e7cbe05..036f69f 100644 --- a/token/shared/shared.go +++ b/token/shared/shared.go @@ -17,33 +17,33 @@ const ( TokenValueOffset = 1000 ) -type tokenType uint8 +type TokenType uint8 const ( - tokenTypeAddTime tokenType = 1 - tokenTypeSetTime = 2 - tokenTypeDisablePayg = 3 - tokenTypeCounterSync = 4 - tokenTypeInvalid = 10 - tokenTypeAlreadyUsed = 11 + TokenTypeAddTime TokenType = 1 + TokenTypeSetTime = 2 + TokenTypeDisablePayg = 3 + TokenTypeCounterSync = 4 + TokenTypeInvalid = 10 + TokenTypeAlreadyUsed = 11 ) -func getTokenBase(code uint64) uint64 { +func GetTokenBase(code uint64) uint64 { return code % TokenValueOffset } -func putBaseInToken(token, tokenbase uint64) (uint64, error) { +func PutBaseInToken(token, tokenbase uint64) (uint64, error) { if tokenbase > MaxBase { return 0, fmt.Errorf("invalid value") } - return token - getTokenBase(token) + tokenbase, nil + return token - GetTokenBase(token) + tokenbase, nil } -func generateNextToken(lastCode uint32, key []byte) uint32 { +func GenerateNextToken(lastCode uint32, key []byte) uint32 { conformedToken := make([]byte, 4) - binary.LittleEndian.PutUint32(conformedToken, lastCode) + binary.BigEndian.PutUint32(conformedToken, lastCode) extendedToken := append(conformedToken, conformedToken...) @@ -59,12 +59,12 @@ func generateHash(key []byte, token []byte) uint64 { func convertHashToToken(hash uint64) uint32 { binHash := make([]byte, 8) - binary.LittleEndian.PutUint64(binHash, hash) + binary.BigEndian.PutUint64(binHash, hash) - hiHash := binary.LittleEndian.Uint32(binHash[0:4]) - loHash := binary.LittleEndian.Uint32(binHash[4:8]) + hiHash := binary.BigEndian.Uint32(binHash[0:4]) + loHash := binary.BigEndian.Uint32(binHash[4:8]) - return convertTo29BitsAndHalf(uint64((hiHash ^ loHash))) + return convertTo29BitsAndHalf(uint64(hiHash ^ loHash)) } func loadSecretKeyFromHex(hexKey string) ([]byte, error) { @@ -87,7 +87,6 @@ func GenerateStartingCode(key []byte) uint32 { func convertTo29BitsAndHalf(source uint64) uint32 { //TODO: check this mask value mask := ((uint64(1) << (32 - 2 + 1)) - 1) << 2 - temp := (source & mask) >> 2 if temp > 999999999 { temp = temp - 73741825 @@ -96,12 +95,12 @@ func convertTo29BitsAndHalf(source uint64) uint32 { return uint32(temp) } -func convertTo4DigitsToken(source uint64) string { +func ConvertTo4DigitsToken(source uint64, nbOfDigits int) string { var restrictedDigitToken strings.Builder - bitArray := getBitArrayFromInt(source, 30) + bitArray := getBitArrayFromInt(source, nbOfDigits*2) - for i := range 15 { + for i := range nbOfDigits { thisArray := bitArray[i*2 : (i*2)+2] restrictedDigitToken.WriteString( fmt.Sprint(bitArrayToInt(thisArray) + 1)) @@ -119,16 +118,27 @@ func convertFrom4DigitsToken(digits string) uint64 { bits = append(bits, tmp...) } - return bitArrayToInt(bits) + return uint64(bitArrayToInt(bits)) } func getBitArrayFromInt(source uint64, nbOfBits int) []byte { - bitsArray := make([]byte, (nbOfBits/8)+1) - binary.LittleEndian.PutUint64(bitsArray, source) + + bitsArray := make([]byte, nbOfBits) + for i := 0; i < nbOfBits; i++ { + if (source & (1 << (nbOfBits - 1 - i))) != 0 { + bitsArray[i] = 1 + } else { + bitsArray[i] = 0 + } + } return bitsArray } -func bitArrayToInt(bits []byte) uint64 { - return binary.LittleEndian.Uint64(bits) +func bitArrayToInt(bits []byte) int { + num := 0 + for _, bit := range bits { + num = (num << 1) | int(bit) + } + return num } diff --git a/token/shared/shared_test.go b/token/shared/shared_test.go new file mode 100644 index 0000000..1278a4a --- /dev/null +++ b/token/shared/shared_test.go @@ -0,0 +1,45 @@ +package shared + +import ( + "encoding/hex" + "fmt" + "log" + "testing" +) + +func TestGenerateHash(t *testing.T) { + key := []byte("0123456789ABCDEF") + hash := generateHash(key, []byte("")) + + expectedHash := uint64(3627314469837380007) + if hash != expectedHash { + t.Errorf("Expected hash to be %d, got %d", expectedHash, hash) + } +} + +func TestConvertHashToToken(t *testing.T) { + key, err := hex.DecodeString("bc41ec9530f6dac86b1a29ab82edc5fb") + if err != nil { + log.Fatal(err) + } + hash := generateHash(key, []byte("hello world")) + fmt.Printf("hash: %x\n", hash) + token := convertHashToToken(hash) + expectedToken := uint32(184900559) + if token != expectedToken { + t.Errorf("Expected token to be %d, got %d", expectedToken, token) + } +} + +func TestGenerateNextToken(t *testing.T) { + startingCode := uint32(516959010) + key, err := hex.DecodeString("bc41ec9530f6dac86b1a29ab82edc5fb") + if err != nil { + log.Fatal(err) + } + nextToken := GenerateNextToken(startingCode, key) + expectedNextToken := uint32(117642353) + if nextToken != expectedNextToken { + t.Errorf("Expected next token to be %d, got %d", expectedNextToken, nextToken) + } +} diff --git a/token/token.go b/token/token.go index 1765cc0..47e278d 100644 --- a/token/token.go +++ b/token/token.go @@ -1 +1,138 @@ package token + +import ( + "errors" + "fmt" + "github.com/EnAccess/OpenPAYGO-go/token/extended" + "github.com/EnAccess/OpenPAYGO-go/token/shared" + "math" + "strconv" + "strings" +) + +type FinalToken struct { + Token string + Count uint8 +} + +type TokenContext struct { + Key []byte + Count uint8 + Value int + TokenType shared.TokenType + StartCode uint32 + ValueDivider uint8 + RestrictDigitSet bool + ExtendToken bool +} + +func generateToken(tokenContext TokenContext) (FinalToken, error) { + startCode := tokenContext.StartCode + value := tokenContext.Value + valueDivider := tokenContext.ValueDivider + tokenType := tokenContext.TokenType + if valueDivider == 0 { + valueDivider = 1 + } + + if startCode == 0 { + startCode = shared.GenerateStartingCode(tokenContext.Key) + } + + if tokenType == shared.TokenTypeAddTime || tokenType == shared.TokenTypeSetTime { + if value == 0 { + return FinalToken{}, errors.New("token does not have a value") + } + + value = int(math.Round(float64(value * int(valueDivider)))) + var maxValue int + if tokenContext.ExtendToken { + maxValue = extended.MaxActivationValue + } else { + maxValue = shared.MaxActivationValue + } + + if value > maxValue { + return FinalToken{}, errors.New("token value provided is too high") + } + } else if value != 0 { + return FinalToken{}, errors.New("a value is not allowed for this token type") + } else { + if tokenType == shared.TokenTypeDisablePayg { + value = shared.PaygDisableValue + } else if tokenType == shared.TokenTypeCounterSync { + value = shared.CounterSyncValue + } else { + return FinalToken{}, errors.New("the token type provided is not supported") + } + } + + // TODO: add support for extended tokens + return generateStandardToken(TokenContext{ + StartCode: startCode, + Key: tokenContext.Key, + Count: tokenContext.Count, + RestrictDigitSet: tokenContext.RestrictDigitSet, + Value: value, + TokenType: tokenType, + }) +} + +func generateStandardToken(tokenContext TokenContext) (FinalToken, error) { + + startBaseCode := shared.GetTokenBase(uint64(tokenContext.StartCode)) + tokenBase := encodeBase(startBaseCode, tokenContext.Value) + curToken, err := shared.PutBaseInToken(uint64(tokenContext.StartCode), tokenBase) + if err != nil { + return FinalToken{}, fmt.Errorf("generating standard token: %s", err.Error()) + } + + newCount := getNewCount(tokenContext.Count, tokenContext.TokenType) + for i := 0; i < int(newCount-1); i++ { + curToken = uint64(shared.GenerateNextToken(uint32(curToken), tokenContext.Key)) + } + finalToken, err := shared.PutBaseInToken(curToken, tokenBase) + if err != nil { + return FinalToken{}, fmt.Errorf("generating standard token: %s", err.Error()) + } + + var token FinalToken + if tokenContext.RestrictDigitSet { + token.Token = shared.ConvertTo4DigitsToken(finalToken, 15) + token.Token = strings.TrimLeft(fmt.Sprintf("%015s", token.Token), " ") + } else { + token.Token = strconv.FormatInt(int64(finalToken), 10) + token.Token = strings.TrimLeft(fmt.Sprintf("%09s", token.Token), " ") + } + token.Count = newCount + return token, nil +} + +func encodeBase(baseCode uint64, value int) uint64 { + if uint64(value)+baseCode > 999 { + return uint64(value) + baseCode - 1000 + } + return uint64(value) + baseCode +} + +func getNewCount(count uint8, mode shared.TokenType) uint8 { + var newCnt uint8 + currCountOdd := count % 2 + + if mode == shared.TokenTypeSetTime || + mode == shared.TokenTypeDisablePayg || + mode == shared.TokenTypeCounterSync { + if currCountOdd != 0 { + newCnt = count + 2 + } else { + newCnt = count + 1 + } + } else { + if currCountOdd != 0 { + newCnt = count + 1 + } else { + newCnt = count + 2 + } + } + return newCnt +} diff --git a/token/token_test.go b/token/token_test.go index 941b99c..4d7d996 100644 --- a/token/token_test.go +++ b/token/token_test.go @@ -1 +1,86 @@ -package token_test +package token + +import ( + "encoding/hex" + "encoding/json" + "fmt" + "github.com/EnAccess/OpenPAYGO-go/token/shared" + "io" + "os" + "testing" +) + +type TTokenContext struct { + Context TokenContext + Token string +} + +type TokenJSON struct { + SerialNumber string `json:"serial_number"` + StartingCode uint32 `json:"starting_code"` + Key string `json:"key"` + TokenCount uint8 `json:"token_count"` + RestrictedDigitSet bool `json:"restricted_digit_set"` + TokenType int `json:"token_type"` + ValueRaw int `json:"value_raw"` + Token string `json:"token"` +} + +func getTestTokens() ([]TTokenContext, error) { + file, err := os.Open("sample_tokens.json") + if err != nil { + fmt.Println("Error opening file:", err) + return []TTokenContext{}, nil + } + defer file.Close() + + data, err := io.ReadAll(file) + if err != nil { + fmt.Println("Error reading file:", err) + return []TTokenContext{}, nil + } + + var tokens []TokenJSON + if err := json.Unmarshal(data, &tokens); err != nil { + fmt.Println("Error parsing JSON:", err) + return []TTokenContext{}, nil + } + + var tokenContexts []TTokenContext + for _, t := range tokens { + keyBytes, err := hex.DecodeString(t.Key) + if err != nil { + fmt.Println("Error decoding key:", err) + continue + } + tc := TokenContext{ + Key: keyBytes, + Count: t.TokenCount, + Value: t.ValueRaw, + TokenType: shared.TokenType(t.TokenType), + StartCode: t.StartingCode, + ValueDivider: 1, // You can adjust this as needed + RestrictDigitSet: t.RestrictedDigitSet, + ExtendToken: false, // You can adjust this as needed + } + tokenContexts = append(tokenContexts, TTokenContext{Context: tc, Token: t.Token}) + } + return tokenContexts, nil +} +func TestGenerateToken(t *testing.T) { + tokenCxts, _ := getTestTokens() + + for _, tokenCxt := range tokenCxts { + + token, err := generateToken(tokenCxt.Context) + if err != nil { + t.Errorf("Error generating token: %s", err.Error()) + continue + } + + if token.Token != tokenCxt.Token { + t.Errorf("Error generating token: expected %s, got %s", tokenCxt.Token, token.Token) + } + + } +}