Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1import json 

2import os 

3import sys 

4 

5import pytest 

6 

7sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../../../../") 

8from sdc_etl_libs.sdc_data_schema.schema_exceptions import (DataSchemaCriticalError, DataSchemaFailedValidation) 

9from sdc_etl_libs.sdc_data_schema.schema_validation import (ResultTypeStatuses, SchemaValidation) 

10import sdc_etl_libs.sdc_data_schema.schema_exceptions as SchemaExceptions 

11 

12 

13def test_validation__dataSchemafailedvalidation_raised(): 

14 """ 

15 Simple test to ensure DataSchemaFailedValidation is raised when total errors are more than 0, and not 

16 raised when total errors are 0. 

17 """ 

18 

19 # Errors more than 0 

20 validation = SchemaValidation() 

21 validation.validation_type = "full" 

22 validation.ttl_errors = 1 

23 validation.schema_name = "test-test" 

24 with pytest.raises(SchemaExceptions.DataSchemaFailedValidation): 

25 schema = validation.finish_validation() 

26 

27 # Errors are 0 

28 validation.ttl_errors = 0 

29 validation.validation_type = "full" 

30 validation.data_schema = {"name": "test", "namespace": "test"} 

31 validation.schema_name = "test-test" 

32 validation.finish_validation() 

33 

34 

35def test_validation__record_increments(): 

36 """ 

37 Simple test to ensure the record attributes do increment +1 when record results with the correct 

38 ResultTypeStatuses come through. 

39 """ 

40 

41 validation = SchemaValidation() 

42 validation.record_result(ResultTypeStatuses.ERROR, ResultTypeStatuses.ERROR, "TEST-SECTION", "TEST_MSG") 

43 validation.record_result(ResultTypeStatuses.UPDATE, ResultTypeStatuses.UPDATE, "TEST-SECTION", "TEST_MSG") 

44 validation.record_result(ResultTypeStatuses.ERROR, ResultTypeStatuses.ERROR, "TEST-SECTION", "TEST_MSG") 

45 validation.record_result(ResultTypeStatuses.ERROR, ResultTypeStatuses.ERROR, "TEST-SECTION", "TEST_MSG") 

46 validation.record_result(ResultTypeStatuses.UPDATE, ResultTypeStatuses.UPDATE, "TEST-SECTION", "TEST_MSG") 

47 

48 assert validation.ttl_errors == 3 

49 assert validation.ttl_updates == 2 

50 

51 

52def test_validation__no_issues(): 

53 """ 

54 Ensure that schema passes validation if there are no issues. 

55 #################################################################################################### 

56 # key_o_1 = {"required": True, "data_type": str} # 

57 # key_o_2 = {"required": ["test|test"], "data_type": str} # 

58 #################################################################################################### 

59 """ 

60 

61 ep_schema_raw = """ 

62 { 

63 "type": "test", 

64 "tag": "main_source", 

65 "info": { 

66 "type": "test", 

67 "access": { 

68 "type": "test_other", 

69 "test_details": { 

70 "key_o_1": "value", 

71 "key_o_2": "value" 

72 } 

73 }, 

74 "opts": { 

75 } 

76 } 

77 } 

78 """ 

79 

80 ep_schema = json.loads(ep_schema_raw) 

81 

82 validation = SchemaValidation() 

83 schema = validation.validate_schema(ep_schema, validation_type_="endpoint") 

84 assert validation.ttl_errors == 0 

85 

86 

87def test_validation__key_required_and_missing(): 

88 """ 

89 Ensure that if a key is "required" with True in the enum, that a ERROR record is recorded and 

90 DataSchemaFailedValidation is raised. 

91 #################################################################################################### 

92 # key_o_1 = {"required": True, "data_type": str} # 

93 # key_o_2 = {"required": ["test|test"], "data_type": str} # 

94 # key_o_3 = {"required": ["test|test2"], "data_type": "*"} # 

95 #################################################################################################### 

96 """ 

97 

98 ep_schema_raw = """ 

99 { 

100 "type": "test", 

101 "tag": "main_source", 

102 "info": { 

103 "type": "test", 

104 "access": { 

105 "type": "test_other", 

106 "test_details": { 

107 "key_o_4": { 

108 "subkey1": "value", 

109 "subkey2": "value" 

110 } 

111 } 

112 }, 

113 "opts": { 

114 } 

115 } 

116 } 

117 """ 

118 

119 ep_schema = json.loads(ep_schema_raw) 

120 

121 validation = SchemaValidation() 

122 with pytest.raises(DataSchemaFailedValidation): 

123 schema = validation.validate_schema(ep_schema, validation_type_="endpoint") 

124 

125 # Test that "required": True works in enum and error is raised 

126 assert { 

127 'status': 'ERROR', 

128 'reason': 'MISSING_KEY', 

129 'section': 'endpoint:info:access:test_details', 

130 'note': '"key_o_1" is missing and is mandatory when endpoint is "test|test".' 

131 } in validation.results 

132 # Test that "required": ["test|test"] works in enum and error is raised 

133 assert { 

134 'status': 'ERROR', 

135 'reason': 'MISSING_KEY', 

136 'section': 'endpoint:info:access:test_details', 

137 'note': '"key_o_2" is missing and is mandatory when endpoint is "test|test".' 

138 } in validation.results 

139 # Test that "required": ["test|test2"] works in enum and error is NOT raise since endpoint typing is "test|test" 

140 assert { 

141 'status': 'ERROR', 

142 'reason': 'MISSING_KEY', 

143 'section': 'endpoint:info:access:test_details', 

144 'note': '"key_o_3" is missing and is mandatory when endpoint is "test|test2".' 

145 } not in validation.results 

146 

147 

148def test_validation__catch_unknown_keys(): 

149 """ 

150 Ensure that unknown keys (or, keys that are not required or optional for a section) are caught 

151 and result in an ERROR being recorded and DataSchemaFailedValidation being raised. 

152 #################################################################################################### 

153 # key_o_5 = {"required": False, "optional": ["test|test2"], "data_type": str} # 

154 #################################################################################################### 

155 """ 

156 

157 ep_schema_raw = """ 

158 { 

159 "type": "test", 

160 "tag": "main_source", 

161 "info": { 

162 "type": "test", 

163 "access": { 

164 "type": "test_other", 

165 "test_details": { 

166 "key_o_1": "value", 

167 "key_o_2": "value", 

168 "key0": "not-a-key", 

169 "key_o_5": "not-required-or-optional" 

170 } 

171 }, 

172 "opts": { 

173 } 

174 } 

175 } 

176 """ 

177 

178 ep_schema = json.loads(ep_schema_raw) 

179 

180 validation = SchemaValidation() 

181 with pytest.raises(DataSchemaFailedValidation): 

182 schema = validation.validate_schema(ep_schema, validation_type_="endpoint") 

183 

184 # Test that "key0" causes an UNKNOWN_KEY error as it's not a valid key in the enum 

185 assert { 

186 'status': 'ERROR', 

187 'reason': 'UNKNOWN_KEY', 

188 'section': 'endpoint:info:access:test_details', 

189 'note': '"key0" is an unknown key for this section.' 

190 } in validation.results 

191 # Test that "key_o_5" causes a NOT_VALID error since it's technically a key, but, it's "required" is False and 

192 # "optional" is only for endpoint typing "test|test2", which this is not 

193 assert { 

194 'status': 'ERROR', 

195 'reason': 'NOT_VALID', 

196 'section': 'endpoint:info:access:test_details', 

197 'note': '"key_o_5" is not a valid key for this section when endpoint is "test|test".' 

198 } in validation.results 

199 

200 

201def test_validation__key_maps_to_subkey_but_keys_missing(): 

202 """ 

203 Ensure that if a key has a "keys" value, that it maps to to that Enum and validates that section 

204 based on the new keys. This test specifically tests when the subkeys are missing and an error is expected. 

205 #################################################################################################### 

206 # class TestEndpointAccessKeys(Enum): # 

207 # key_o_4 = {"required": False, "data_type": dict, "keys": TestSubKeys} # 

208 # # 

209 # class TestSubKeys(Enum): # 

210 # subkey1 = {"required": True, "data_type": str} # 

211 # subkey2 = {"required": True, "data_type": str} # 

212 #################################################################################################### 

213 """ 

214 

215 ep_schema_raw = """ 

216 { 

217 "type": "test", 

218 "tag": "main_source", 

219 "info": { 

220 "type": "test", 

221 "access": { 

222 "type": "test_other", 

223 "test_details": { 

224 "key_o_1": "value", 

225 "key_o_2": "value", 

226 "key_o_4": { 

227 } 

228 } 

229 }, 

230 "opts": { 

231 } 

232 } 

233 } 

234 """ 

235 

236 ep_schema = json.loads(ep_schema_raw) 

237 validation = SchemaValidation() 

238 with pytest.raises(DataSchemaFailedValidation): 

239 schema = validation.validate_schema(ep_schema, validation_type_="endpoint") 

240 

241 # Both subkeys from the "key_o_4" section should show up as ERRORS as they are required but are missing 

242 assert { 

243 'status': 'ERROR', 

244 'reason': 'MISSING_KEY', 

245 'section': 'endpoint:info:access:test_details:key_o_4', 

246 'note': '"subkey1" is missing and is mandatory when endpoint is "test|test".' 

247 } in validation.results 

248 assert { 

249 'status': 'ERROR', 

250 'reason': 'MISSING_KEY', 

251 'section': 'endpoint:info:access:test_details:key_o_4', 

252 'note': '"subkey2" is missing and is mandatory when endpoint is "test|test".' 

253 } in validation.results 

254 

255 

256def test_validation__key_maps_to_subkey_happy_path(): 

257 """ 

258 Ensure that if a key has a "keys" value, that it maps to to that Enum and validates that section 

259 based on the new keys. This test specifically tests when the subkeys exist and everything is good. 

260 #################################################################################################### 

261 # class TestEndpointAccessKeys(Enum): # 

262 # key_o_4 = {"required": False, "data_type": dict, "keys": TestSubKeys} # 

263 # # 

264 # class TestSubKeys(Enum): # 

265 # subkey1 = {"required": True, "data_type": str} # 

266 # subkey2 = {"required": True, "data_type": str} # 

267 #################################################################################################### 

268 """ 

269 

270 ep_schema_raw = """ 

271 { 

272 "type": "test", 

273 "tag": "main_source", 

274 "info": { 

275 "type": "test", 

276 "access": { 

277 "type": "test_other", 

278 "test_details": { 

279 "key_o_1": "value", 

280 "key_o_2": "value", 

281 "key_o_4": { 

282 "subkey1": "value", 

283 "subkey2": "value" 

284 } 

285 } 

286 }, 

287 "opts": { 

288 } 

289 } 

290 } 

291 """ 

292 

293 ep_schema = json.loads(ep_schema_raw) 

294 validation = SchemaValidation() 

295 schema = validation.validate_schema(ep_schema, validation_type_="endpoint") 

296 

297 # Both subkeys from the "key_o_4" section should show up as ERRORS as they are required but are missing 

298 assert schema["info"]["access"]['test_details']["key_o_4"]["subkey1"] == "value" 

299 assert schema["info"]["access"]['test_details']["key_o_4"]["subkey2"] == "value" 

300 

301 

302def test_validation__missing_endpoint_typings__info_section(): 

303 """ 

304 Ensure if "type" key is missing in "info" section of an endpoint, that a specific ERROR message is recorded. 

305 This would be the "type" of destination, e.g. snowflke, s3... 

306 """ 

307 

308 ep_schema_raw = """ 

309 { 

310 "tag": "main_source", 

311 "info": { 

312 "type": "test", 

313 "access": { 

314 "type": "test_other", 

315 "test_details": { 

316 "key_o_1": "value", 

317 "key_o_2": "value" 

318 } 

319 } 

320 }, 

321 "opts": { 

322 } 

323 } 

324 } 

325 """ 

326 

327 ep_schema = json.loads(ep_schema_raw) 

328 validation = SchemaValidation() 

329 with pytest.raises(DataSchemaFailedValidation): 

330 schema = validation.validate_schema(ep_schema, validation_type_="endpoint") 

331 

332 assert { 

333 'status': 'ERROR', 

334 'reason': 'MISSING_KEY', 

335 'section': 'endpoint:info', 

336 'note': '"type" is missing here, and needs to be provided being validating rest of endpoint.' 

337 } in validation.results 

338 

339 

340def test_validation__missing_endpoint_typings__info_section(): 

341 """ 

342 Ensure if "type" key is missing in the parent section of an endpoint, that a specific ERROR message is recorded. 

343 This would be the "type" of source or sink. 

344 """ 

345 

346 ep_schema_raw = """ 

347 { 

348 "tag": "main_source", 

349 "info": { 

350 "type": "test", 

351 "access": { 

352 "type": "test_other", 

353 "test_details": { 

354 "key_o_1": "value", 

355 "key_o_2": "value" 

356 } 

357 }, 

358 "opts": { 

359 } 

360 } 

361 } 

362 """ 

363 

364 ep_schema = json.loads(ep_schema_raw) 

365 validation = SchemaValidation() 

366 with pytest.raises(DataSchemaFailedValidation): 

367 schema = validation.validate_schema(ep_schema, validation_type_="endpoint") 

368 

369 assert { 

370 'status': 'ERROR', 

371 'reason': 'MISSING_KEY', 

372 'section': 'endpoint', 

373 'note': '"type" is missing here, and needs to be provided being validating rest of endpoint.' 

374 } in validation.results 

375 

376 

377def test_validation__endpoints_section_missing(): 

378 """ 

379 If the "endpoints" section is missing in a full data schema validation, ensure a ERROR is recorded and 

380 DataSchemaFailedValidation is raised. 

381 """ 

382 

383 full_schema_raw = """ 

384 {"name": "test-name", "namespace": "test-name"} 

385 """ 

386 

387 ep_schema = json.loads(full_schema_raw) 

388 validation = SchemaValidation() 

389 with pytest.raises(DataSchemaFailedValidation): 

390 schema = validation.validate_schema(ep_schema, validation_type_="full") 

391 

392 assert { 

393 'status': 'ERROR', 

394 'reason': 'MISSING_KEY', 

395 'section': 'top_level', 

396 'note': 'The "endpoints" section appears to be be missing. This is a required section.' 

397 } in validation.results 

398 

399 

400def test_validation__key_mapping_no_issue(): 

401 """ 

402 Ensure if key mapping is setup correctly in the enums, that a key and it's type properly resolve to the mapped keys 

403 #################################################################################################### 

404 # class TestType1Keys(Enum): # 

405 # subkey4 = {"required": True, "data_type": str} # 

406 # subkey5 = {"required": True, "data_type": str} # 

407 # # 

408 # class TestTypeMapping(Enum): # 

409 # testtype1 = TestType1Keys # 

410 # testtype2 = TestType2Keys # 

411 # # 

412 # class TestEndpointMapFromKeys(Enum): # 

413 # type = {"required": True, "data_type": str, "allowed_values": ["testtype1", "testtype2"]} # 

414 # opts = {"required": True, "data_type": dict, "key_map": TestTypeMapping} # 

415 # # 

416 # class TestEndpointOptsKeys(Enum): # 

417 # key10 = {"required": False, "data_type": dict, "keys": TestEndpointMapFromKeys} # 

418 #################################################################################################### 

419 """ 

420 

421 ep_schema_raw = """ 

422 { 

423 "type": "test", 

424 "tag": "main_source", 

425 "info": { 

426 "type": "test", 

427 "access": { 

428 "type": "test_other", 

429 "test_details": { 

430 "key_o_1": "value", 

431 "key_o_2": "value" 

432 } 

433 }, 

434 "opts": { 

435 "key10": { 

436 "type": "testtype1", 

437 "opts": { 

438 "subkey4": "this", 

439 "subkey5": "that" 

440 } 

441 } 

442 } 

443 } 

444 } 

445 """ 

446 

447 ep_schema = json.loads(ep_schema_raw) 

448 validation = SchemaValidation() 

449 schema = validation.validate_schema(ep_schema, validation_type_="endpoint") 

450 

451 # Both subkeys from the "key_o_4" section should show up as ERRORS as they are required but are missing 

452 assert validation.ttl_errors == 0 

453 assert schema["info"]["opts"]["key10"]["opts"]["subkey4"] == "this" 

454 assert schema["info"]["opts"]["key10"]["opts"]["subkey5"] == "that" 

455 

456 

457def test_validation__bad_parent_typing(): 

458 """ 

459 Ensure if key mapping is setup correctly in the enums, that a bad typing for the key records an ERROR and 

460 DataSchemaFailedValidation is raised. 

461 #################################################################################################### 

462 # class TestType1Keys(Enum): # 

463 # subkey4 = {"required": True, "data_type": str} # 

464 # subkey5 = {"required": True, "data_type": str} # 

465 # # 

466 # class TestTypeMapping(Enum): # 

467 # testtype1 = TestType1Keys # 

468 # testtype2 = TestType2Keys # 

469 # # 

470 # class TestEndpointMapFromKeys(Enum): # 

471 # type = {"required": True, "data_type": str, "allowed_values": ["testtype1", "testtype2"]} # 

472 # opts = {"required": True, "data_type": dict, "key_map": TestTypeMapping} # 

473 # # 

474 # class TestEndpointOptsKeys(Enum): # 

475 # key10 = {"required": False, "data_type": dict, "keys": TestEndpointMapFromKeys} # 

476 #################################################################################################### 

477 """ 

478 

479 ep_schema_raw = """ 

480 { 

481 "type": "test", 

482 "tag": "main_source", 

483 "info": { 

484 "type": "test", 

485 "access": { 

486 "type": "test_other", 

487 "test_details": { 

488 "key_o_1": "value", 

489 "key_o_2": "value" 

490 } 

491 }, 

492 "opts": { 

493 "key10": { 

494 "type": "not-allowed-type", 

495 "opts": { 

496 "subkey4": "this", 

497 "subkey5": "that" 

498 } 

499 } 

500 } 

501 } 

502 } 

503 """ 

504 

505 ep_schema = json.loads(ep_schema_raw) 

506 validation = SchemaValidation() 

507 with pytest.raises(DataSchemaFailedValidation): 

508 schema = validation.validate_schema(ep_schema, validation_type_="endpoint") 

509 

510 assert { 

511 'status': 'ERROR', 

512 'reason': 'BAD_PARENT_TYPE', 

513 'section': 'endpoint:info:opts:key10:opts', 

514 'note': 'Section could not be validated due to bad parent section type. Check allowed values.' 

515 } in validation.results 

516 

517 

518def test_validation__verify_tag_uniqueness(): 

519 """ 

520 Ensure that if a endpoint tag name is used more than once, an ERROR is recorded and DataSchemaFailedValidation is raised. 

521 """ 

522 

523 full_schema_raw = """ 

524 { 

525 "namespace": "Test", 

526 "name": "test", 

527 "type": "object", 

528 "country_code": "USA", 

529 "estimated_row_size": "10b", 

530 "estimated_row_count": 3000, 

531 "endpoints": [ 

532 { 

533 "type": "source", 

534 "tag": "main_source", 

535 "info": { 

536 "type": "s3" 

537 } 

538 }, 

539 { 

540 "type": "sink", 

541 "tag": "main_source", 

542 "info": { 

543 "type": "snowflake" 

544 } 

545 } 

546 ] 

547 } 

548 """ 

549 

550 full_schema = json.loads(full_schema_raw) 

551 validation = SchemaValidation() 

552 with pytest.raises(DataSchemaFailedValidation): 

553 schema = validation.validate_schema(full_schema, validation_type_="full") 

554 

555 assert { 

556 'status': 'ERROR', 

557 'reason': 'DUPED_TAG', 

558 'section': 'top_level:endpoints', 

559 'note': 'The tag "main_source" is used 2 time(s). It can only be used once per schema.' 

560 } in validation.results 

561 

562 

563def test_validation__verify_fields_defaultvalue_set(): 

564 """ 

565 Ensure that if a merge key is set where there is no default key a warning is issued 

566 """ 

567 

568 full_schema_fields = """ 

569 {"namespace": "Test", 

570 "name": "test", 

571 "type": "object", 

572 "country_code": "USA", 

573 "estimated_row_size": "10b", 

574 "estimated_row_count": 3000, 

575 "endpoints": [ 

576 { 

577 "type": "source", 

578 "tag": "main_source_0", 

579 "info": { 

580 "type": "sftp", 

581 "access": { 

582 "host": "xxx.com", 

583 "path": "/test/", 

584 "port": 22, 

585 "credentials": { 

586 "type": "aws_secrets", 

587 "opts": { 

588 "name": "secret" 

589 } 

590 } 

591 }, 

592 "opts": {}, 

593 "file_info": { 

594 "type": "csv", 

595 "opts": { 

596 "file_regex": "yourmama", 

597 "delimiter": ",", 

598 "headers": true, 

599 "encoding": "utf-8", 

600 "compression_type": null, 

601 "format": null, 

602 "line_terminator": null 

603 } 

604 } 

605 } 

606 }, 

607 { 

608 "type": "source", 

609 "tag": "main_source_1", 

610 "info": { 

611 "type": "sftp", 

612 "access": { 

613 "host": "xxx.com", 

614 "path": "/test/", 

615 "port": 22, 

616 "credentials": { 

617 "type": "aws_secrets", 

618 "opts": { 

619 "name": "secret" 

620 } 

621 } 

622 }, 

623 "opts": {}, 

624 "file_info": { 

625 "type": "csv", 

626 "opts": { 

627 "file_regex": "yourmama", 

628 "delimiter": ",", 

629 "headers": true, 

630 "encoding": "utf-8", 

631 "compression_type": null, 

632 "format": null, 

633 "line_terminator": null 

634 } 

635 } 

636 } 

637 } 

638 ], 

639 "fields": [ 

640 { 

641 "name": "CALL ID", 

642 "type": { 

643 "type": "long" 

644 }, 

645 "sf_merge_key": true 

646 }, 

647 { 

648 "name": "SESSION ID", 

649 "type": { 

650 "type": "string" 

651 }, 

652 "sf_merge_key": true, 

653 "default_value": "no_session_id" 

654 } 

655 ] 

656 } 

657 """ 

658 full_schema = json.loads(full_schema_fields) 

659 validation = SchemaValidation() 

660 schema = validation.validate_schema(full_schema, validation_type_="full") 

661 

662 assert { 

663 'note': 'Default_value should be set when sf_merge_key = True', 

664 'status': 'WARNING', 

665 'reason': 'MISSING_KEY', 

666 'section': 'top_level:fields[CALL ID]' 

667 } in validation.results 

668 

669 

670def test_validation__verify_fields_uniqueness(): 

671 """ 

672 Ensure a duped field name is flagged as a warning 

673 """ 

674 

675 full_schema_fields = """ 

676 {"namespace": "Test", 

677 "name": "test", 

678 "type": "object", 

679 "country_code": "USA", 

680 "estimated_row_size": "10b", 

681 "estimated_row_count": 3000, 

682 "contains_pii": false, 

683 "endpoints": [ 

684 { 

685 "type": "source", 

686 "tag": "main_source_0", 

687 "info": { 

688 "type": "sftp", 

689 "access": { 

690 "host": "xxx.com", 

691 "path": "/test/", 

692 "port": 22, 

693 "credentials": { 

694 "type": "aws_secrets", 

695 "opts": { 

696 "name": "secret" 

697 } 

698 } 

699 }, 

700 "opts": {}, 

701 "file_info": { 

702 "type": "csv", 

703 "opts": { 

704 "file_regex": "yourmama", 

705 "delimiter": ",", 

706 "headers": true, 

707 "encoding": "utf-8", 

708 "compression_type": null, 

709 "format": null, 

710 "line_terminator": null 

711 } 

712 } 

713 } 

714 }, 

715 { 

716 "type": "source", 

717 "tag": "main_source_1", 

718 "info": { 

719 "type": "sftp", 

720 "access": { 

721 "host": "xxx.com", 

722 "path": "/test/", 

723 "port": 22, 

724 "credentials": { 

725 "type": "aws_secrets", 

726 "opts": { 

727 "name": "secret" 

728 } 

729 } 

730 }, 

731 "opts": {}, 

732 "file_info": { 

733 "type": "csv", 

734 "opts": { 

735 "file_regex": "yourmama", 

736 "delimiter": ",", 

737 "headers": true, 

738 "encoding": "utf-8", 

739 "compression_type": null, 

740 "format": null, 

741 "line_terminator": null 

742 } 

743 } 

744 } 

745 } 

746 ], 

747 "fields": [ 

748 { 

749 "name": "CALL ID", 

750 "type": { 

751 "type": "long" 

752 }, 

753 "sf_merge_key": true, 

754 "default_value": "no_call_id" 

755 }, 

756 { 

757 "name": "SESSION ID", 

758 "type": { 

759 "type": "string" 

760 }, 

761 "sf_merge_key": true, 

762 "default_value": "no_session_id" 

763 }, 

764 { 

765 "name": "SES_ID", 

766 "type": { 

767 "type": "string" 

768 }, 

769 "rename": "SESSION ID" 

770 

771 } 

772 ] 

773 } 

774 """ 

775 full_schema = json.loads(full_schema_fields) 

776 validation = SchemaValidation() 

777 schema = validation.validate_schema(full_schema, validation_type_="full") 

778 assert { 

779 'note': 'Duplicate name in field list', 

780 'status': 'WARNING', 

781 'reason': 'DUPLICATE_VALUE', 

782 'section': 'top_level:fields[SESSION ID]' 

783 } in validation.results