7
7
# Determine if project sites support HTTPS
8
8
9
9
class HardenedSitesDetective < Detective
10
- # Field name, must be in lowercase.
11
- XCTO = 'x-content-type-options'
12
-
13
- # The sole allowed value for the X-Content-Type-Options header.
14
- NOSNIFF = 'nosniff'
15
-
16
10
# All of the security-hardening headers that need to be present to pass.
17
11
# They're listed in the same order as the criteria text.
12
+ # We don't list 'x-frame-options' here because there's a CSP alternative.
13
+ # Technically 'x-content-type-options' must be 'nosniff' but if you can
14
+ # set it at all, you can set it correctly, so we don't bother to dig further.
18
15
# Field names must be in lowercase here.
19
- CHECK =
20
- [
21
- 'content-security-policy' , 'strict-transport-security' ,
22
- XCTO , 'x-frame-options'
23
- ] . freeze
16
+ REQUIRED_FIELDS = %w[
17
+ content-security-policy strict-transport-security
18
+ x-content-type-options
19
+ ] . freeze
24
20
MET =
25
21
{
26
22
value : 'Met' , confidence : 3 ,
@@ -29,8 +25,7 @@ class HardenedSitesDetective < Detective
29
25
UNMET_MISSING =
30
26
{
31
27
value : 'Unmet' , confidence : 5 ,
32
- explanation : '// One or more of the required security hardening headers ' \
33
- 'is missing.'
28
+ explanation : 'Required security hardening headers missing: '
34
29
} . freeze
35
30
UNMET_NOSNIFF =
36
31
{
@@ -41,16 +36,34 @@ class HardenedSitesDetective < Detective
41
36
INPUTS = %i[ repo_url homepage_url ] . freeze
42
37
OUTPUTS = [ :hardened_site_status ] . freeze
43
38
44
- # Check the given list of header hashes to make sure that all expected
45
- # keys are present.
46
- def security_fields_present? ( headers_list )
47
- result = true
48
- headers_list . each do |headers |
49
- result &&= CHECK . reduce ( true ) { |acc , elem | acc & headers . key? ( elem ) }
39
+ # Check the given hash of header values to make sure that all expected
40
+ # keys are present. Return a list of missing fields (preferably empty).
41
+ def missing_security_fields ( headers )
42
+ result = [ ]
43
+ REQUIRED_FIELDS . each do |required_item |
44
+ if !headers . key? ( required_item )
45
+ result . append ( required_item . to_s )
46
+ end
50
47
end
51
48
result
52
49
end
53
50
51
+ # Return a list of missing frame-options fields (preferably empty).
52
+ # Using content-security-policy's frame-ancestors is a valid way to do it.
53
+ # Instead of parsing content-security-policy,
54
+ # we just check for the string 'frame-ancestors'. That can be fooled by
55
+ # weird CSPs, but this isn't expected to be a problem, and people who do
56
+ # that are just hurting themselves.
57
+ def missing_frame_options ( headers )
58
+ if headers . key? ( 'x-frame-options' ) ||
59
+ ( headers . key? ( 'content-security-policy' ) &&
60
+ headers [ 'content-security-policy' ] . include? ( 'frame-ancestors' ) )
61
+ [ ]
62
+ else
63
+ [ 'x-frame-options' ]
64
+ end
65
+ end
66
+
54
67
# Perform GET request, and return either an empty hash (if the GET is
55
68
# unsuccessful) or a hash of the HTTP response header keys and values.
56
69
# Note: in the returned hash all field names are ASCII *lowercase*, so that
@@ -66,36 +79,52 @@ def get_headers(evidence, url)
66
79
results . transform_keys { |k | k . to_s . downcase ( :ascii ) }
67
80
end
68
81
69
- # Inspect the X-Content-Type-Options headers and make sure that they have the
70
- # only allowed value.
71
- def check_nosniff? ( headers_list )
72
- result = true
73
- headers_list . each do |response_headers |
74
- xcto = response_headers [ XCTO ]
75
- result &&= xcto . nil? ? false : xcto . casecmp ( NOSNIFF ) . zero?
82
+ # Given evidence and a URL, return the list of problems with it.
83
+ def problems_in_url ( evidence , url )
84
+ headers = get_headers ( evidence , url )
85
+ problems = missing_security_fields ( headers )
86
+ problems += missing_frame_options ( headers )
87
+ if problems . empty?
88
+ problems
89
+ else
90
+ [ "#{ url } : #{ problems . join ( ', ' ) } " ]
76
91
end
77
- result
92
+ end
93
+
94
+ # Given evidence and set of URLs, return the list of problems with the URLs
95
+ def problems_in_urls ( evidence , urls )
96
+ all_problems = [ ]
97
+ urls . each do |url |
98
+ all_problems += problems_in_url ( evidence , url )
99
+ end
100
+ all_problems
78
101
end
79
102
80
103
# Internal method that does the inspection work for the 'analyze' method.
81
- def check_urls ( evidence , homepage_url , repo_url )
82
- @results = { }
104
+ # rubocop:disable Metrics/MethodLength
105
+ def report_on_check_urls ( evidence , homepage_url , repo_url )
106
+ results = { }
83
107
# Only complain if we have *both* a homepage_url AND repo_url.
84
108
# When that isn't true other criteria will catch it first.
85
109
if homepage_url . present? && repo_url . present?
86
- homepage_headers = get_headers ( evidence , homepage_url )
87
- repo_headers = get_headers ( evidence , repo_url )
88
- hardened = security_fields_present? ( [ homepage_headers , repo_headers ] )
89
- @results [ :hardened_site_status ] = hardened ? MET : UNMET_MISSING
90
- hardened ||= check_nosniff? ( [ homepage_headers , repo_headers ] )
91
- @results [ :hardened_site_status ] = UNMET_NOSNIFF unless hardened
110
+ urls = [ homepage_url , repo_url ] . to_set
111
+ all_problems = problems_in_urls ( evidence , urls )
112
+ results [ :hardened_site_status ] =
113
+ if all_problems . empty?
114
+ MET
115
+ else
116
+ answer = UNMET_MISSING . deep_dup # clone but result is not frozen
117
+ answer [ :explanation ] += all_problems . join ( ', ' )
118
+ answer
119
+ end
92
120
end
93
- @ results
121
+ results
94
122
end
123
+ # rubocop:enable Metrics/MethodLength
95
124
96
125
# Analyze the home page and repository URLs to make sure that security
97
126
# hardening headers are returned in the headers of a GET response.
98
127
def analyze ( evidence , current )
99
- check_urls ( evidence , current [ :homepage_url ] , current [ :repo_url ] )
128
+ report_on_check_urls ( evidence , current [ :homepage_url ] , current [ :repo_url ] )
100
129
end
101
130
end
0 commit comments