Skip to content

Commit bef50bb

Browse files
authored
Adding Changelog and release date for v1.14 release (#417)
* Adding Changelog and release date for v1.14 release * Adding Deprecated node mapping and few test refactoring * Linter Fixes * Fixing all flake8 errors
1 parent 2553222 commit bef50bb

9 files changed

+140
-44
lines changed

CHANGELOG.md

+19
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,25 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p
1010

1111
### Changed
1212

13+
## [v1.14](https://github.com/bugcrowd/vulnerability-rating-taxonomy/compare/v1.13...v1.14) - 2024-07-09
14+
### Added
15+
- Server Security Misconfiguration - Email verification bypass - P5
16+
- Server Security Misconfiguration - Missing Subresource Integrity - P5
17+
- Sensitive Data Exposure - Token Leakage via Referer - Password Reset Token - P5
18+
- Server Security Misconfiguration - Software Package Takeover - VARIES
19+
- Broken Access Control (BAC) - Privilege Escalation - VARIES
20+
- Data Biases - Representation Bias - VARIES
21+
- Data Biases - Pre-existing Bias - VARIES
22+
- Algorithmic Biases - Processing Bias - VARIES
23+
- Algorithmic Biases - Aggregation Bias - VARIES
24+
- Societal Biases - Confirmation Bias - VARIES
25+
- Societal Biases - Systemic Bias - VARIES
26+
- Misinterpretation Biases - Context Ignorance - VARIES
27+
- Developer Biases - Implicit Bias - VARIES
28+
29+
### Removed
30+
- Broken Authentication and Session Management - Privilege Escalation - VARIES
31+
1332
## [v1.13](https://github.com/bugcrowd/vulnerability-rating-taxonomy/compare/v1.12...v1.13) - 2024-04-02
1433
### Added
1534
- Physical Security Issues - Bypass of physical access control - VARIES

deprecated-node-mapping.json

+3
Original file line numberDiff line numberDiff line change
@@ -232,5 +232,8 @@
232232
},
233233
"broken_access_control.server_side_request_forgery_ssrf.external": {
234234
"1.11": "server_security_misconfiguration.server_side_request_forgery_ssrf.external_low_impact"
235+
},
236+
"broken_authentication_and_session_management.privilege_escalation": {
237+
"1.14": "broken_access_control.privilege_escalation"
235238
}
236239
}

lib/artifacts/scw_artifact.py

+5-3
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
import json
22
import requests
3-
import utils.utils
43

5-
BASE_SCW_URL = 'https://integration-api.securecodewarrior.com/api/v1/trial?id=bugcrowd&mappingList=vrt&mappingKey='
4+
BASE_SCW_URL = 'https://integration-api.securecodewarrior.com\
5+
/api/v1/trial?id=bugcrowd&mappingList=vrt&mappingKey='
66
OUTPUT_FILENAME = 'scw_links.json'
77

88

@@ -23,7 +23,9 @@ def scw_mapping(vrt_id):
2323

2424

2525
def join_vrt_id(parent_id, child_id):
26-
return '.'.join([parent_id, child_id]) if parent_id is not None else child_id
26+
return '.'.join(
27+
[parent_id, child_id]
28+
) if parent_id is not None else child_id
2729

2830

2931
def generate_urls(vrt, content, parent_id=None):

lib/tests/test_artifact_format.py

+15-12
Original file line numberDiff line numberDiff line change
@@ -2,19 +2,22 @@
22
import os
33
import unittest
44

5+
56
class TestArtifactFormat(unittest.TestCase):
6-
def setUp(self):
7-
self.scw_artifact_path = os.path.join(
8-
utils.THIRD_PARTY_MAPPING_DIR,
9-
utils.SCW_DIR,
10-
utils.SCW_FILENAME
11-
)
7+
def setUp(self):
8+
print("\n`---{}---`".format(self._testMethodName))
9+
self.scw_artifact_path = os.path.join(
10+
utils.THIRD_PARTY_MAPPING_DIR,
11+
utils.SCW_DIR,
12+
utils.SCW_FILENAME
13+
)
14+
15+
def test_artifact_loads_valid_json(self):
16+
self.assertTrue(
17+
utils.get_json(self.scw_artifact_path),
18+
self.scw_artifact_path + ' is not valid JSON.'
19+
)
1220

13-
def test_artifact_loads_valid_json(self):
14-
self.assertTrue(
15-
utils.get_json(self.scw_artifact_path),
16-
self.scw_artifact_path + ' is not valid JSON.'
17-
)
1821

1922
if __name__ == "__main__":
20-
unittest.main()
23+
unittest.main()

lib/tests/test_deprecated_mapping.py

+27-7
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,17 @@
55

66
class TestDeprecatedMapping(unittest.TestCase):
77
def setUp(self):
8+
print("\n`---{}---`".format(self._testMethodName))
89
self.vrt_versions = utils.all_versions(utils.VRT_FILENAME)
9-
self.last_tagged_version = max([Version.coerce(x) for x in self.vrt_versions.keys() if x != 'current'])
10-
self.deprecated_json = utils.get_json(utils.DEPRECATED_MAPPING_FILENAME)
10+
self.last_tagged_version = max(
11+
[
12+
Version.coerce(x) for x in self.vrt_versions.keys()
13+
if x != 'current'
14+
]
15+
)
16+
self.deprecated_json = utils.get_json(
17+
utils.DEPRECATED_MAPPING_FILENAME
18+
)
1119

1220
def test_old_vrt_ids_have_current_node(self):
1321
for version, vrt in self.vrt_versions.items():
@@ -16,17 +24,28 @@ def test_old_vrt_ids_have_current_node(self):
1624
for id_list in utils.all_id_lists(vrt):
1725
vrt_id = '.'.join(id_list)
1826
if vrt_id in self.deprecated_json:
19-
max_ver = sorted(self.deprecated_json[vrt_id].keys(), key=lambda s: map(int, s.split('.')))[-1]
27+
max_ver = sorted(
28+
self.deprecated_json[vrt_id].keys(),
29+
key=lambda s: map(int, s.split('.'))
30+
)[-1]
2031
vrt_id = self.deprecated_json[vrt_id][max_ver]
2132
id_list = vrt_id.split('.')
22-
self.assertTrue(vrt_id == 'other' or self.check_mapping(id_list),
23-
'%s from v%s has no mapping' % (vrt_id, version))
33+
self.assertTrue(
34+
vrt_id == 'other' or self.check_mapping(id_list),
35+
'%s from v%s has no mapping' % (vrt_id, version)
36+
)
2437

2538
def test_deprecated_nodes_map_valid_node(self):
2639
for old_id, mapping in self.deprecated_json.items():
2740
for new_version, new_id in mapping.items():
28-
self.assertTrue(new_id == 'other' or utils.id_valid(self.vrt_version(new_version), new_id.split('.')),
29-
new_id + ' is not valid')
41+
self.assertTrue(
42+
new_id == 'other' or utils.id_valid(
43+
self.vrt_version(
44+
new_version
45+
), new_id.split('.')
46+
),
47+
new_id + ' is not valid'
48+
)
3049

3150
def check_mapping(self, id_list):
3251
if utils.id_valid(self.vrt_versions['current'], id_list):
@@ -44,5 +63,6 @@ def vrt_version(self, version):
4463
else:
4564
self.fail('Unknown version: %s' % version)
4665

66+
4767
if __name__ == "__main__":
4868
unittest.main()

lib/tests/test_vrt.py

+34-10
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,17 @@
55
import glob
66
import os
77

8+
89
class TestVrt(unittest.TestCase):
910
def setUp(self):
1011
print("\n`---{}---`".format(self._testMethodName))
1112
self.vrt = utils.get_json(utils.VRT_FILENAME)
1213
self.mappings = [
13-
{ 'filename': f, 'name': os.path.splitext(os.path.basename(f))[0] }
14-
for f in glob.glob(utils.MAPPING_DIR + '/**/*.json', recursive=True) if 'schema' not in f
14+
{'filename': f, 'name': os.path.splitext(os.path.basename(f))[0]}
15+
for f in glob.glob(
16+
utils.MAPPING_DIR + '/**/*.json', recursive=True
17+
)
18+
if 'schema' not in f
1519
]
1620

1721
@unittest.skip('need to decide the best way to handle this')
@@ -20,15 +24,20 @@ def test_changelog_updated(self):
2024
Checks if CHANGELOG.md is being updated with the current commit
2125
and prompts the user if it isn't
2226
"""
23-
p = subprocess.Popen('git diff HEAD --stat --staged CHANGELOG.md | wc -l', shell=True, stdout=subprocess.PIPE)
27+
p = subprocess.Popen(
28+
'git diff HEAD --stat --staged CHANGELOG.md | wc -l',
29+
shell=True, stdout=subprocess.PIPE
30+
)
2431
out, _err = p.communicate()
2532
self.assertGreater(int(out), 0, 'CHANGELOG.md not updated')
2633

2734
def validate_schema(self, schema_file, data_file):
2835
schema = utils.get_json(schema_file)
2936
data = utils.get_json(data_file)
3037
jsonschema.Draft4Validator.check_schema(schema)
31-
error = jsonschema.exceptions.best_match(jsonschema.Draft4Validator(schema).iter_errors(data))
38+
error = jsonschema.exceptions.best_match(
39+
jsonschema.Draft4Validator(schema).iter_errors(data)
40+
)
3241
if error:
3342
raise error
3443

@@ -41,19 +50,30 @@ def test_mapping_schemas(self):
4150
f'{utils.MAPPING_DIR}/**/{mapping["name"]}.schema.json',
4251
recursive=True
4352
)[0]
44-
self.assertTrue(os.path.isfile(schema_file), 'Missing schema file for %s mapping' % mapping['name'])
53+
self.assertTrue(
54+
os.path.isfile(schema_file),
55+
'Missing schema file for %s mapping' % mapping['name']
56+
)
4557
self.validate_schema(schema_file, mapping['filename'])
4658

4759
def all_vrt_ids_have_mapping(self, mappping_filename, key):
4860
mapping = utils.get_json(mappping_filename)
4961
keyed_mapping = utils.key_by_id(mapping['content'])
50-
for vrt_id_list in utils.all_id_lists(self.vrt, include_internal=False):
62+
for vrt_id_list in utils.all_id_lists(
63+
self.vrt, include_internal=False
64+
):
5165
result = utils.has_mapping(keyed_mapping, vrt_id_list, key)
5266
if key == 'cwe' and not result:
53-
print('WARNING: no ' + key + ' mapping for ' + '.'.join(vrt_id_list))
67+
print('WARNING: no ' + key + ' mapping for ' + '.'.join(
68+
vrt_id_list
69+
))
5470
else:
55-
self.assertTrue(utils.has_mapping(keyed_mapping, vrt_id_list, key),
56-
'no ' + key + ' mapping for ' + '.'.join(vrt_id_list))
71+
self.assertTrue(
72+
utils.has_mapping(
73+
keyed_mapping, vrt_id_list, key
74+
),
75+
'no ' + key + ' mapping for ' + '.'.join(vrt_id_list)
76+
)
5777

5878
def test_all_vrt_ids_have_all_mappings(self):
5979
for mapping in self.mappings:
@@ -63,7 +83,11 @@ def only_map_valid_ids(self, mapping_filename):
6383
vrt_ids = utils.all_id_lists(self.vrt)
6484
mapping_ids = utils.all_id_lists(utils.get_json(mapping_filename))
6585
for id_list in mapping_ids:
66-
self.assertIn(id_list, vrt_ids, 'invalid id in ' + mapping_filename + ' - ' + '.'.join(id_list))
86+
self.assertIn(
87+
id_list,
88+
vrt_ids,
89+
'invalid id in ' + mapping_filename + ' - ' + '.'.join(id_list)
90+
)
6791

6892
def test_only_map_valid_ids(self):
6993
for mapping in self.mappings:

lib/utils/utils.py

+27-9
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,16 @@
1010
SCW_DIR = 'remediation_training'
1111
THIRD_PARTY_MAPPING_DIR = 'third-party-mappings'
1212

13+
1314
def get_json(filename):
1415
with open(filename) as f:
1516
return json.loads(f.read())
1617

18+
1719
def all_versions(filename):
1820
"""
19-
Find, open and parse all tagged versions of a json file, including the current version
21+
Find, open and parse all tagged versions of a json file,
22+
including the current version
2023
2124
:param filename: The filename to find
2225
:return: a dictionary of all the versions, in the form
@@ -41,10 +44,12 @@ def id_valid(vrt, id_list):
4144
Check if a vrt id is valid
4245
4346
:param vrt: The vrt object
44-
:param id_list: The vrt id, split into components, eg ['category', 'subcategory', 'variant']
47+
:param id_list: The vrt id, split into components,
48+
eg ['category', 'subcategory', 'variant']
4549
:return: True/False
4650
"""
47-
# this is not particularly efficient, but it's more readable than other options so until we need to care...
51+
# this is not particularly efficient, but it's more readable than other
52+
# options so until we need to care...
4853
return id_list in all_id_lists(vrt)
4954

5055

@@ -53,7 +58,8 @@ def has_mapping(mapping, id_list, key):
5358
Check if a vrt id has a mapping
5459
5560
:param mapping: The mapping object, keyed by id
56-
:param id_list: The vrt id, split into components, eg ['category', 'subcategory', 'variant']
61+
:param id_list: The vrt id, split into components,
62+
eg ['category', 'subcategory', 'variant']
5763
:param key: The mapping key to look for, eg 'cvss_v3'
5864
:return: True/False
5965
"""
@@ -72,9 +78,16 @@ def key_by_id(mapping):
7278
Converts arrays to hashes keyed by the id attribute for easier lookup. So
7379
[{'id': 'one', 'foo': 'bar'}, {'id': 'two', 'foo': 'baz'}]
7480
becomes
75-
{'one': {'id': 'one', 'foo': 'bar'}, 'two': {'id': 'two', 'foo': 'baz'}}
81+
{
82+
'one': {'id': 'one', 'foo': 'bar'},
83+
'two': {'id': 'two', 'foo': 'baz'}
84+
}
7685
"""
77-
if isinstance(mapping, list) and isinstance(mapping[0], dict) and 'id' in mapping[0]:
86+
if isinstance(
87+
mapping, list
88+
) and isinstance(
89+
mapping[0], dict
90+
) and 'id' in mapping[0]:
7891
return {x['id']: key_by_id(x) for x in mapping}
7992
elif isinstance(mapping, dict):
8093
return {k: key_by_id(v) for k, v in mapping.items()}
@@ -84,10 +97,12 @@ def key_by_id(mapping):
8497

8598
def all_id_lists(vrt, include_internal=True):
8699
"""
87-
Get all valid vrt ids for a given vrt object, including internal nodes by default
100+
Get all valid vrt ids for a given vrt object, including internal nodes
101+
by default
88102
89103
:param vrt: The vrt object
90-
:param include_internal: Whether to include internal nodes or only leaf nodes
104+
:param include_internal: Whether to include internal nodes or only
105+
leaf nodes
91106
:return: ids in the form
92107
[
93108
['category'],
@@ -98,7 +113,10 @@ def all_id_lists(vrt, include_internal=True):
98113
"""
99114
def _all_id_lists(sub_vrt, prefix):
100115
if isinstance(sub_vrt, list):
101-
return [vrt_id for entry in sub_vrt for vrt_id in _all_id_lists(entry, prefix)]
116+
return [
117+
vrt_id for entry in sub_vrt
118+
for vrt_id in _all_id_lists(entry, prefix)
119+
]
102120
elif isinstance(sub_vrt, dict):
103121
if 'children' in sub_vrt:
104122
new_prefix = prefix + [sub_vrt['id']]

lib/validate_artifacts.py

+9-2
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,11 @@
55
from artifacts import scw_artifact
66

77
artifact_json = utils.get_json(scw_artifact.OUTPUT_FILENAME)
8-
repo_path = os.path.join(utils.THIRD_PARTY_MAPPING_DIR, utils.SCW_DIR, utils.SCW_FILENAME)
8+
repo_path = os.path.join(
9+
utils.THIRD_PARTY_MAPPING_DIR,
10+
utils.SCW_DIR,
11+
utils.SCW_FILENAME
12+
)
913
print(os.path.abspath(repo_path))
1014
repo_json = utils.get_json(repo_path)
1115

@@ -16,5 +20,8 @@
1620
print('SCW Document is valid!')
1721
sys.exit(0)
1822
else:
19-
print('SCW Document is invalid, copy the artifact to the remediation training')
23+
print(
24+
'SCW Document is invalid, copy the artifact to the remediation\
25+
training'
26+
)
2027
sys.exit(1)

vulnerability-rating-taxonomy.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"metadata": {
3-
"release_date": "2024-04-02T00:00:00+00:00"
3+
"release_date": "2024-07-09T00:00:00+00:00"
44
},
55
"content": [
66
{

0 commit comments

Comments
 (0)