6
6
require 'heimdall_tools/asff_compatible_products/prowler'
7
7
require 'heimdall_tools/asff_compatible_products/securityhub'
8
8
9
-
10
9
module HeimdallTools
11
10
DEFAULT_NIST_TAG = %w{ SA-11 RA-5 } . freeze
12
11
@@ -40,19 +39,19 @@ class ASFFMapper
40
39
} . freeze
41
40
42
41
PRODUCT_ARN_MAPPING = {
43
- / arn:.+:securityhub:.+:.*:product\ / aws\ / firewall-manager/ => FirewallManager ,
44
- / arn:.+:securityhub:.+:.*:product\ / aws\ / securityhub/ => SecurityHub ,
45
- / arn:.+:securityhub:.+:.*:product\ / prowler\ / prowler/ => Prowler
42
+ %r{ arn:.+:securityhub:.+:.*:product/aws/firewall-manager} => FirewallManager ,
43
+ %r{ arn:.+:securityhub:.+:.*:product/aws/securityhub} => SecurityHub ,
44
+ %r{ arn:.+:securityhub:.+:.*:product/prowler/prowler} => Prowler
46
45
} . freeze
47
46
48
47
def initialize ( asff_json , securityhub_standards_json_array : nil , meta : nil )
49
48
@meta = meta
50
49
51
50
@supporting_docs = { }
52
- @supporting_docs [ SecurityHub ] = SecurityHub . supporting_docs ( { standards : securityhub_standards_json_array } )
51
+ @supporting_docs [ SecurityHub ] = SecurityHub . supporting_docs ( { standards : securityhub_standards_json_array } )
53
52
54
53
begin
55
- asff_required_keys = %w( AwsAccountId CreatedAt Description GeneratorId Id ProductArn Resources SchemaVersion Severity Title Types UpdatedAt )
54
+ asff_required_keys = %w{ AwsAccountId CreatedAt Description GeneratorId Id ProductArn Resources SchemaVersion Severity Title Types UpdatedAt }
56
55
@report = JSON . parse ( asff_json )
57
56
if @report . length == 1 && @report . member? ( 'Findings' ) && @report [ 'Findings' ] . each { |finding | asff_required_keys . difference ( finding . keys ) . none? } . all?
58
57
# ideal case that is spec compliant
@@ -61,7 +60,7 @@ def initialize(asff_json, securityhub_standards_json_array: nil, meta: nil)
61
60
# individual finding so have to add wrapping array
62
61
@report = { 'Findings' => [ @report ] }
63
62
else
64
- raise " Not a findings file nor an individual finding"
63
+ raise ' Not a findings file nor an individual finding'
65
64
end
66
65
rescue StandardError => e
67
66
raise "Invalid ASFF file provided:\n Exception: #{ e } "
@@ -79,12 +78,10 @@ def external_product_handler(product, data, func, default)
79
78
keywords = { encode : method ( :encode ) }
80
79
keywords = keywords . merge ( @supporting_docs [ PRODUCT_ARN_MAPPING [ arn || product ] ] ) if @supporting_docs . member? ( PRODUCT_ARN_MAPPING [ arn || product ] )
81
80
PRODUCT_ARN_MAPPING [ arn || product ] . send ( func , data , **keywords )
81
+ elsif default . is_a? Proc
82
+ default . call
82
83
else
83
- if default . is_a? Proc
84
- default . call
85
- else
86
- default
87
- end
84
+ default
88
85
end
89
86
end
90
87
@@ -100,7 +97,7 @@ def impact(finding)
100
97
imp = :INFORMATIONAL
101
98
else
102
99
# severity is required, but can be either 'label' or 'normalized' internally with 'label' being preferred. other values can be in here too such as the original severity rating.
103
- default = Proc . new { finding [ 'Severity' ] . key? ( 'Label' ) ? finding [ 'Severity' ] [ 'Label' ] . to_sym : finding [ 'Severity' ] [ 'Normalized' ] /100.0 }
100
+ default = proc { finding [ 'Severity' ] . key? ( 'Label' ) ? finding [ 'Severity' ] [ 'Label' ] . to_sym : finding [ 'Severity' ] [ 'Normalized' ] /100.0 }
104
101
imp = external_product_handler ( finding [ 'ProductArn' ] , finding , :finding_impact , default )
105
102
end
106
103
imp . is_a? ( Symbol ) ? IMPACT_MAPPING [ imp ] : imp
@@ -140,7 +137,7 @@ def subfindings(finding)
140
137
141
138
subfinding [ 'code_desc' ] = external_product_handler ( finding [ 'ProductArn' ] , finding , :subfindings_code_desc , '' )
142
139
subfinding [ 'code_desc' ] += '; ' unless subfinding [ 'code_desc' ] . empty?
143
- subfinding [ 'code_desc' ] += "Resources: [#{ finding [ 'Resources' ] . map { |r | "Type: #{ encode ( r [ 'Type' ] ) } , Id: #{ encode ( r [ 'Id' ] ) } #{ ' , Partition: ' + encode ( r [ 'Partition' ] ) if r . key? ( 'Partition' ) } #{ ' , Region: ' + encode ( r [ 'Region' ] ) if r . key? ( 'Region' ) } " } . join ( ', ' ) } ]"
140
+ subfinding [ 'code_desc' ] += "Resources: [#{ finding [ 'Resources' ] . map { |r | "Type: #{ encode ( r [ 'Type' ] ) } , Id: #{ encode ( r [ 'Id' ] ) } #{ " , Partition: #{ encode ( r [ 'Partition' ] ) } " if r . key? ( 'Partition' ) } #{ " , Region: #{ encode ( r [ 'Region' ] ) } " if r . key? ( 'Region' ) } " } . join ( ', ' ) } ]"
144
141
145
142
subfinding [ 'start_time' ] = finding . key? ( 'LastObservedAt' ) ? finding [ 'LastObservedAt' ] : finding [ 'UpdatedAt' ]
146
143
@@ -167,7 +164,7 @@ def to_hdf
167
164
item [ 'desc' ] = encode ( finding [ 'Description' ] )
168
165
169
166
item [ 'descriptions' ] = [ ]
170
- item [ 'descriptions' ] << desc_tags ( finding [ 'Remediation' ] [ 'Recommendation' ] . map { |k , v | encode ( v ) } . join ( "\n " ) , 'fix' ) if finding . key? ( 'Remediation' ) && finding [ 'Remediation' ] . key? ( 'Recommendation' )
167
+ item [ 'descriptions' ] << desc_tags ( finding [ 'Remediation' ] [ 'Recommendation' ] . map { |_k , v | encode ( v ) } . join ( "\n " ) , 'fix' ) if finding . key? ( 'Remediation' ) && finding [ 'Remediation' ] . key? ( 'Recommendation' )
171
168
172
169
item [ 'refs' ] = [ ]
173
170
item [ 'refs' ] << { url : finding [ 'SourceUrl' ] } if finding . key? ( 'SourceUrl' )
@@ -203,7 +200,7 @@ def to_hdf
203
200
# add product name to id if any ids are the same across products
204
201
item [ 'id' ] = product_groups . filter { |pg | pg != product } . values . any? { |ig | ig . keys . include? ( id ) } ? "[#{ product_name } ] #{ id } " : id
205
202
206
- item [ 'title' ] = "#{ product_name } : #{ group . map { |d | d [ 'title' ] } . uniq . join ( ";" ) } "
203
+ item [ 'title' ] = "#{ product_name } : #{ group . map { |d | d [ 'title' ] } . uniq . join ( ';' ) } "
207
204
208
205
item [ 'tags' ] = { nist : group . map { |d | d [ 'tags' ] [ :nist ] } . flatten . uniq }
209
206
@@ -216,16 +213,16 @@ def to_hdf
216
213
item [ 'refs' ] = group . map { |d | d [ 'refs' ] } . flatten . compact . reject ( &:empty? ) . uniq
217
214
218
215
item [ 'source_location' ] = NA_HASH
219
- item [ 'code' ] = JSON . pretty_generate ( { " Findings" : findings } )
216
+ item [ 'code' ] = JSON . pretty_generate ( { Findings : findings } )
220
217
221
218
item [ 'results' ] = group . map { |d | d [ 'results' ] } . flatten . uniq
222
219
223
220
controls << item
224
221
end
225
222
end
226
223
227
- results = HeimdallDataFormat . new ( profile_name : @meta && @meta . key? ( 'name' ) ? @meta [ 'name' ] : 'AWS Security Finding Format' ,
228
- title : @meta && @meta . key? ( 'title' ) ? @meta [ 'title' ] : " ASFF findings" ,
224
+ results = HeimdallDataFormat . new ( profile_name : @meta & .key? ( 'name' ) ? @meta [ 'name' ] : 'AWS Security Finding Format' ,
225
+ title : @meta & .key? ( 'title' ) ? @meta [ 'title' ] : ' ASFF findings' ,
229
226
controls : controls )
230
227
results . to_hdf
231
228
end
0 commit comments