extra_bytes_test.go
1 package lnwire 2 3 import ( 4 "bytes" 5 "math/rand" 6 "reflect" 7 "testing" 8 "testing/quick" 9 10 "github.com/lightningnetwork/lnd/tlv" 11 "github.com/stretchr/testify/require" 12 ) 13 14 var ( 15 tlvType1 tlv.TlvType1 16 tlvType2 tlv.TlvType2 17 tlvType3 tlv.TlvType3 18 ) 19 20 // TestExtraOpaqueDataEncodeDecode tests that we're able to encode/decode 21 // arbitrary payloads. 22 func TestExtraOpaqueDataEncodeDecode(t *testing.T) { 23 t.Parallel() 24 25 type testCase struct { 26 // emptyBytes indicates if we should try to encode empty bytes 27 // or not. 28 emptyBytes bool 29 30 // inputBytes if emptyBytes is false, then we'll read in this 31 // set of bytes instead. 32 inputBytes []byte 33 } 34 35 // We should be able to read in an arbitrary set of bytes as an 36 // ExtraOpaqueData, then encode those new bytes into a new instance. 37 // The final two instances should be identical. 38 scenario := func(test testCase) bool { 39 var ( 40 extraData ExtraOpaqueData 41 b bytes.Buffer 42 ) 43 44 copy(extraData[:], test.inputBytes) 45 46 if err := extraData.Encode(&b); err != nil { 47 t.Fatalf("unable to encode extra data: %v", err) 48 return false 49 } 50 51 var newBytes ExtraOpaqueData 52 if err := newBytes.Decode(&b); err != nil { 53 t.Fatalf("unable to decode extra bytes: %v", err) 54 return false 55 } 56 57 if !bytes.Equal(extraData[:], newBytes[:]) { 58 t.Fatalf("expected %x, got %x", extraData, 59 newBytes) 60 return false 61 } 62 63 return true 64 } 65 66 // We'll make a function to generate random test data. Half of the 67 // time, we'll actually feed in blank bytes. 68 quickCfg := &quick.Config{ 69 Values: func(v []reflect.Value, r *rand.Rand) { 70 var newTestCase testCase 71 if r.Int31()%2 == 0 { 72 newTestCase.emptyBytes = true 73 } 74 75 if !newTestCase.emptyBytes { 76 numBytes := r.Int31n(1000) 77 newTestCase.inputBytes = make([]byte, numBytes) 78 79 _, err := r.Read(newTestCase.inputBytes) 80 require.NoError(t, err) 81 } 82 83 v[0] = reflect.ValueOf(newTestCase) 84 }, 85 } 86 87 if err := quick.Check(scenario, quickCfg); err != nil { 88 t.Fatalf("encode+decode test failed: %v", err) 89 } 90 } 91 92 // TestExtraOpaqueDataPackUnpackRecords tests that we're able to pack a set of 93 // tlv.Records into a stream, and unpack them on the other side to obtain the 94 // same set of records. 95 func TestExtraOpaqueDataPackUnpackRecords(t *testing.T) { 96 t.Parallel() 97 98 var ( 99 type1 tlv.Type = 1 100 type2 tlv.Type = 2 101 102 channelType1 uint8 = 2 103 channelType2 uint8 104 105 hop1 uint32 = 99 106 hop2 uint32 107 ) 108 testRecordsProducers := []tlv.RecordProducer{ 109 &recordProducer{tlv.MakePrimitiveRecord(type1, &channelType1)}, 110 &recordProducer{tlv.MakePrimitiveRecord(type2, &hop1)}, 111 } 112 113 // Now that we have our set of sample records and types, we'll encode 114 // them into the passed ExtraOpaqueData instance. 115 var extraBytes ExtraOpaqueData 116 if err := extraBytes.PackRecords(testRecordsProducers...); err != nil { 117 t.Fatalf("unable to pack records: %v", err) 118 } 119 120 // We'll now simulate decoding these types _back_ into records on the 121 // other side. 122 newRecords := []tlv.RecordProducer{ 123 &recordProducer{tlv.MakePrimitiveRecord(type1, &channelType2)}, 124 &recordProducer{tlv.MakePrimitiveRecord(type2, &hop2)}, 125 } 126 typeMap, err := extraBytes.ExtractRecords(newRecords...) 127 require.NoError(t, err, "unable to extract record") 128 129 // We should find that the new backing values have been populated with 130 // the proper value. 131 switch { 132 case channelType1 != channelType2: 133 t.Fatalf("wrong record for channel type: expected %v, got %v", 134 channelType1, channelType2) 135 136 case hop1 != hop2: 137 t.Fatalf("wrong record for hop: expected %v, got %v", hop1, 138 hop2) 139 } 140 141 // Both types we created above should be found in the type map. 142 if _, ok := typeMap[type1]; !ok { 143 t.Fatalf("type1 not found in typeMap") 144 } 145 if _, ok := typeMap[type2]; !ok { 146 t.Fatalf("type2 not found in typeMap") 147 } 148 } 149 150 // TestPackRecords tests that we're able to pack a set of records into an 151 // ExtraOpaqueData instance, and then extract them back out. Crucially, we'll 152 // ensure that records can be packed in any order, and we'll ensure that the 153 // unpacked records are valid. 154 func TestPackRecords(t *testing.T) { 155 t.Parallel() 156 157 // Create an empty ExtraOpaqueData instance. 158 extraBytes := ExtraOpaqueData{} 159 160 var ( 161 // Record type 1. 162 recordBytes1 = []byte("recordBytes1") 163 tlvRecord1 = tlv.NewPrimitiveRecord[tlv.TlvType1]( 164 recordBytes1, 165 ) 166 167 // Record type 2. 168 recordBytes2 = []byte("recordBytes2") 169 tlvRecord2 = tlv.NewPrimitiveRecord[tlv.TlvType2]( 170 recordBytes2, 171 ) 172 173 // Record type 3. 174 recordBytes3 = []byte("recordBytes3") 175 tlvRecord3 = tlv.NewPrimitiveRecord[tlv.TlvType3]( 176 recordBytes3, 177 ) 178 ) 179 180 // Pack records 1 and 2 into the ExtraOpaqueData instance. 181 err := extraBytes.PackRecords( 182 []tlv.RecordProducer{&tlvRecord1, &tlvRecord2}..., 183 ) 184 require.NoError(t, err) 185 186 // Examine the records that were packed into the ExtraOpaqueData. 187 extractedRecords, err := extraBytes.ExtractRecords() 188 require.NoError(t, err) 189 190 require.Equal(t, 2, len(extractedRecords)) 191 require.Equal(t, recordBytes1, extractedRecords[tlvType1.TypeVal()]) 192 require.Equal(t, recordBytes2, extractedRecords[tlvType2.TypeVal()]) 193 194 // Pack records 1, 2, and 3 into the ExtraOpaqueData instance. 195 err = extraBytes.PackRecords( 196 []tlv.RecordProducer{&tlvRecord3, &tlvRecord1, &tlvRecord2}..., 197 ) 198 require.NoError(t, err) 199 200 // Examine the records that were packed into the ExtraOpaqueData. 201 extractedRecords, err = extraBytes.ExtractRecords() 202 require.NoError(t, err) 203 204 require.Equal(t, 3, len(extractedRecords)) 205 require.Equal(t, recordBytes1, extractedRecords[tlvType1.TypeVal()]) 206 require.Equal(t, recordBytes2, extractedRecords[tlvType2.TypeVal()]) 207 require.Equal(t, recordBytes3, extractedRecords[tlvType3.TypeVal()]) 208 } 209 210 type dummyRecordProducer struct { 211 typ tlv.Type 212 scratchValue []byte 213 expectedValue []byte 214 } 215 216 func (d *dummyRecordProducer) Record() tlv.Record { 217 return tlv.MakePrimitiveRecord(d.typ, &d.scratchValue) 218 } 219 220 // TestExtraOpaqueData tests that we're able to properly encode/decode an 221 // ExtraOpaqueData instance. 222 func TestExtraOpaqueData(t *testing.T) { 223 t.Parallel() 224 225 testCases := []struct { 226 name string 227 types tlv.TypeMap 228 expectedData ExtraOpaqueData 229 expectedTypes tlv.TypeMap 230 decoders []tlv.RecordProducer 231 }{ 232 { 233 name: "empty map", 234 expectedTypes: tlv.TypeMap{}, 235 expectedData: make([]byte, 0), 236 }, 237 { 238 name: "single record", 239 types: tlv.TypeMap{ 240 tlvType1.TypeVal(): []byte{1, 2, 3}, 241 }, 242 expectedData: ExtraOpaqueData{ 243 0x01, 0x03, 1, 2, 3, 244 }, 245 expectedTypes: tlv.TypeMap{ 246 tlvType1.TypeVal(): []byte{1, 2, 3}, 247 }, 248 decoders: []tlv.RecordProducer{ 249 &dummyRecordProducer{ 250 typ: tlvType1.TypeVal(), 251 expectedValue: []byte{1, 2, 3}, 252 }, 253 }, 254 }, 255 { 256 name: "multiple records", 257 types: tlv.TypeMap{ 258 tlvType2.TypeVal(): []byte{4, 5, 6}, 259 tlvType1.TypeVal(): []byte{1, 2, 3}, 260 }, 261 expectedData: ExtraOpaqueData{ 262 0x01, 0x03, 1, 2, 3, 263 0x02, 0x03, 4, 5, 6, 264 }, 265 expectedTypes: tlv.TypeMap{ 266 tlvType1.TypeVal(): []byte{1, 2, 3}, 267 tlvType2.TypeVal(): []byte{4, 5, 6}, 268 }, 269 decoders: []tlv.RecordProducer{ 270 &dummyRecordProducer{ 271 typ: tlvType1.TypeVal(), 272 expectedValue: []byte{1, 2, 3}, 273 }, 274 &dummyRecordProducer{ 275 typ: tlvType2.TypeVal(), 276 expectedValue: []byte{4, 5, 6}, 277 }, 278 }, 279 }, 280 } 281 282 for _, tc := range testCases { 283 t.Run(tc.name, func(t *testing.T) { 284 // First, test the constructor. 285 opaqueData, err := NewExtraOpaqueData(tc.types) 286 require.NoError(t, err) 287 288 require.Equal(t, tc.expectedData, opaqueData) 289 290 // Now encode/decode. 291 var b bytes.Buffer 292 err = opaqueData.Encode(&b) 293 require.NoError(t, err) 294 295 var decoded ExtraOpaqueData 296 err = decoded.Decode(&b) 297 require.NoError(t, err) 298 299 require.Equal(t, opaqueData, decoded) 300 301 // Now RecordProducers/PackRecords. 302 producers, err := opaqueData.RecordProducers() 303 require.NoError(t, err) 304 305 var packed ExtraOpaqueData 306 err = packed.PackRecords(producers...) 307 require.NoError(t, err) 308 309 // PackRecords returns nil vs. an empty slice if there 310 // are no records. We need to handle this case 311 // separately. 312 if len(producers) == 0 { 313 // Make sure the packed data is empty. 314 require.Empty(t, packed) 315 316 // Now change it to an empty slice for the 317 // comparison below. 318 packed = make([]byte, 0) 319 } 320 require.Equal(t, opaqueData, packed) 321 322 // ExtractRecords with an empty set of record producers 323 // should return the original type map. 324 extracted, err := opaqueData.ExtractRecords() 325 require.NoError(t, err) 326 327 require.Equal(t, tc.expectedTypes, extracted) 328 329 if len(tc.decoders) == 0 { 330 return 331 } 332 333 // ExtractRecords with a set of record producers should 334 // only return the types that weren't in the passed-in 335 // set of producers. 336 extracted, err = opaqueData.ExtractRecords( 337 tc.decoders..., 338 ) 339 require.NoError(t, err) 340 341 for parsedType := range tc.expectedTypes { 342 remainder, ok := extracted[parsedType] 343 require.True(t, ok) 344 require.Nil(t, remainder) 345 } 346 347 for _, dec := range tc.decoders { 348 //nolint:forcetypeassert 349 dec := dec.(*dummyRecordProducer) 350 require.Equal( 351 t, dec.expectedValue, dec.scratchValue, 352 ) 353 } 354 }) 355 } 356 } 357 358 // TestExtractAndMerge tests that the ParseAndExtractCustomRecords and 359 // MergeAndEncode functions work as expected. 360 func TestExtractAndMerge(t *testing.T) { 361 t.Parallel() 362 363 testCases := []struct { 364 name string 365 knownRecords []tlv.RecordProducer 366 extraData ExtraOpaqueData 367 customRecords CustomRecords 368 expectedErr string 369 expectEncoded []byte 370 }{ 371 { 372 name: "invalid custom record", 373 customRecords: CustomRecords{ 374 123: []byte("invalid"), 375 }, 376 expectedErr: "custom records validation error", 377 }, 378 { 379 name: "empty everything", 380 }, 381 { 382 name: "just extra data", 383 extraData: ExtraOpaqueData{ 384 0x01, 0x03, 1, 2, 3, 385 0x02, 0x03, 4, 5, 6, 386 }, 387 expectEncoded: []byte{ 388 0x01, 0x03, 1, 2, 3, 389 0x02, 0x03, 4, 5, 6, 390 }, 391 }, 392 { 393 name: "extra data with known record", 394 extraData: ExtraOpaqueData{ 395 0x04, 0x03, 4, 4, 4, 396 0x05, 0x03, 5, 5, 5, 397 }, 398 knownRecords: []tlv.RecordProducer{ 399 &dummyRecordProducer{ 400 typ: tlvType1.TypeVal(), 401 scratchValue: []byte{1, 2, 3}, 402 expectedValue: []byte{1, 2, 3}, 403 }, 404 &dummyRecordProducer{ 405 typ: tlvType2.TypeVal(), 406 scratchValue: []byte{4, 5, 6}, 407 expectedValue: []byte{4, 5, 6}, 408 }, 409 }, 410 expectEncoded: []byte{ 411 0x01, 0x03, 1, 2, 3, 412 0x02, 0x03, 4, 5, 6, 413 0x04, 0x03, 4, 4, 4, 414 0x05, 0x03, 5, 5, 5, 415 }, 416 }, 417 { 418 name: "extra data and custom records with known record", 419 extraData: ExtraOpaqueData{ 420 0x04, 0x03, 4, 4, 4, 421 0x05, 0x03, 5, 5, 5, 422 }, 423 customRecords: CustomRecords{ 424 MinCustomRecordsTlvType + 1: []byte{99, 99, 99}, 425 }, 426 knownRecords: []tlv.RecordProducer{ 427 &dummyRecordProducer{ 428 typ: tlvType1.TypeVal(), 429 scratchValue: []byte{1, 2, 3}, 430 expectedValue: []byte{1, 2, 3}, 431 }, 432 &dummyRecordProducer{ 433 typ: tlvType2.TypeVal(), 434 scratchValue: []byte{4, 5, 6}, 435 expectedValue: []byte{4, 5, 6}, 436 }, 437 }, 438 expectEncoded: []byte{ 439 0x01, 0x03, 1, 2, 3, 440 0x02, 0x03, 4, 5, 6, 441 0x04, 0x03, 4, 4, 4, 442 0x05, 0x03, 5, 5, 5, 443 0xfe, 0x0, 0x1, 0x0, 0x1, 0x3, 0x63, 0x63, 0x63, 444 }, 445 }, 446 { 447 name: "duplicate records", 448 extraData: ExtraOpaqueData{ 449 0x01, 0x03, 4, 4, 4, 450 0x05, 0x03, 5, 5, 5, 451 }, 452 customRecords: CustomRecords{ 453 MinCustomRecordsTlvType + 1: []byte{99, 99, 99}, 454 }, 455 knownRecords: []tlv.RecordProducer{ 456 &dummyRecordProducer{ 457 typ: tlvType1.TypeVal(), 458 scratchValue: []byte{1, 2, 3}, 459 expectedValue: []byte{1, 2, 3}, 460 }, 461 &dummyRecordProducer{ 462 typ: tlvType2.TypeVal(), 463 scratchValue: []byte{4, 5, 6}, 464 expectedValue: []byte{4, 5, 6}, 465 }, 466 }, 467 expectedErr: "duplicate record type: 1", 468 }, 469 } 470 471 for _, tc := range testCases { 472 t.Run(tc.name, func(t *testing.T) { 473 encoded, err := MergeAndEncode( 474 tc.knownRecords, tc.extraData, tc.customRecords, 475 ) 476 477 if tc.expectedErr != "" { 478 require.ErrorContains(t, err, tc.expectedErr) 479 480 return 481 } 482 483 require.NoError(t, err) 484 require.Equal(t, tc.expectEncoded, encoded) 485 486 // Clear all the scratch values, to make sure they're 487 // decoded from the data again. 488 for _, dec := range tc.knownRecords { 489 //nolint:forcetypeassert 490 dec := dec.(*dummyRecordProducer) 491 dec.scratchValue = nil 492 } 493 494 pCR, pKR, pED, err := ParseAndExtractCustomRecords( 495 encoded, tc.knownRecords..., 496 ) 497 require.NoError(t, err) 498 499 require.Equal(t, tc.customRecords, pCR) 500 require.Equal(t, tc.extraData, pED) 501 502 for _, dec := range tc.knownRecords { 503 //nolint:forcetypeassert 504 dec := dec.(*dummyRecordProducer) 505 require.Equal( 506 t, dec.expectedValue, dec.scratchValue, 507 ) 508 509 require.Contains(t, pKR, dec.typ) 510 } 511 }) 512 } 513 }