From b38d34e8266bf8487f9a6ef4868ea676bbf6b31e Mon Sep 17 00:00:00 2001 From: rfrisch Date: Sat, 20 Jan 2018 08:22:43 +0100 Subject: [PATCH 1/6] support for excel (break currrent database) --- ..._generation.rb => docx_xslt_generation.rb} | 8 +- helpers/helper.rb | 2008 ++++++++--------- helpers/xslx_xslt_generation.rb | 396 ++++ model/master.rb | 687 +++--- routes/admin.rb | 851 +++---- routes/master.rb | 232 +- routes/report.rb | 1797 +++++++-------- scripts/first_time.rb | 349 +-- serpico.rb | 44 +- server.rb | 343 ++- views/add_template.haml | 43 +- views/edit_template.haml | 35 +- views/layout.haml | 5 +- views/new_report.haml | 31 +- views/report_edit.haml | 40 +- views/template_list.haml | 83 +- 16 files changed, 3639 insertions(+), 3313 deletions(-) rename helpers/{xslt_generation.rb => docx_xslt_generation.rb} (99%) create mode 100644 helpers/xslx_xslt_generation.rb diff --git a/helpers/xslt_generation.rb b/helpers/docx_xslt_generation.rb similarity index 99% rename from helpers/xslt_generation.rb rename to helpers/docx_xslt_generation.rb index c7107ed2..22b9b38c 100644 --- a/helpers/xslt_generation.rb +++ b/helpers/docx_xslt_generation.rb @@ -16,7 +16,7 @@ def initialize(errorString) end end -def generate_xslt(docx) +def generate_docx_xslt(docx) # hardcoded stuff @top = ' @@ -33,7 +33,7 @@ def generate_xslt(docx) document = "" debug = false - document = read_rels(docx,"word/document.xml") + document = read_from_zip(docx,"word/document.xml") # fix for curly apostrophes document = document.gsub(/‘/,"'") @@ -622,7 +622,7 @@ def white_space(document) return document end -def generate_xslt_components(docx) +def generate_docx_xslt_components(docx) # Initialize the xsl @top = ' get_username) - if user - uname = user.username - else - uname = "unknown user" - end - if settings.logger_out - settings.logger_out.puts "|+| [#{DateTime.now.strftime("%d/%m/%Y %H:%M")}] #{msg} : #{uname}" - else - puts "|+| [#{DateTime.now.strftime("%d/%m/%Y %H:%M")}] #{msg} : #{uname}" - end + user = User.first(username: get_username) + uname = if user + user.username + else + 'unknown user' + end + if settings.logger_out + settings.logger_out.puts "|+| [#{DateTime.now.strftime('%d/%m/%Y %H:%M')}] #{msg} : #{uname}" + else + puts "|+| [#{DateTime.now.strftime('%d/%m/%Y %H:%M')}] #{msg} : #{uname}" + end end # Log a message globally, not attached to a user def server_log(msg) - if settings and settings.logger_out - settings.logger_out.puts "|+| [#{DateTime.now.strftime("%d/%m/%Y %H:%M")}] #{msg} : SERVER_LOG" - else - puts "|+| [#{DateTime.now.strftime("%d/%m/%Y %H:%M")}] #{msg} : SERVER_LOG" - end + if settings && settings.logger_out + settings.logger_out.puts "|+| [#{DateTime.now.strftime('%d/%m/%Y %H:%M')}] #{msg} : SERVER_LOG" + else + puts "|+| [#{DateTime.now.strftime('%d/%m/%Y %H:%M')}] #{msg} : SERVER_LOG" + end end -def docx_modify(rand_file,docx_xml,fil_r) - Zip::File.open(rand_file) do |zipfile| - zipfile.get_output_stream(fil_r) {|f| f.write(docx_xml)} - end +def archive_modify(rand_file, docx_xml, fil_r) + Zip::File.open(rand_file) do |zipfile| + zipfile.get_output_stream(fil_r) { |f| f.write(docx_xml) } + end end def find_headers_footers(docx) - header_footer = [] - - Zip::File.open(docx) do |zip| - i = 1 - while zip.find_entry("word/header#{i}.xml") != nil do - header_footer.push("word/header#{i}.xml") - i = i+1 - end - - i = 1 - while zip.find_entry("word/footer#{i}.xml") != nil do - header_footer.push("word/footer#{i}.xml") - i = i+1 - end - end - return header_footer + header_footer = [] + + Zip::File.open(docx) do |zip| + i = 1 + until zip.find_entry("word/header#{i}.xml").nil? + header_footer.push("word/header#{i}.xml") + i += 1 + end + + i = 1 + until zip.find_entry("word/footer#{i}.xml").nil? + header_footer.push("word/footer#{i}.xml") + i += 1 + end + end + header_footer end # Returns xmlText with hyperlinks and a list of References tags def updateHyperlinks(xmlText) - retHash = Hash.new + retHash = {} # Find urls urls = xmlText.scan(/{{.*}}<\/w:t>/) # Resources for tag - retHash["urls"] = [] - retHash["id"] = [] + retHash['urls'] = [] + retHash['id'] = [] i = 25 urls.each do |url| - cleanUrl = url.gsub("{{", "").gsub("}}", "") + cleanUrl = url.gsub('{{', '').gsub('}}', '') # set resourceId and xmlText resourceId = "r:id=\"rId#{i}\"" - xmlText = xmlText.gsub(url,"#{cleanUrl}") + xmlText = xmlText.gsub(url, "#{cleanUrl}") # remove tags - cleanUrl = cleanUrl.gsub("", "") - cleanUrl = cleanUrl.gsub("<\/w:t>", "") + cleanUrl = cleanUrl.gsub('', '') + cleanUrl = cleanUrl.gsub("<\/w:t>", '') # put urls in resources - retHash["urls"].push(cleanUrl) - retHash["id"].push("rId#{i}") - i = i+1 + retHash['urls'].push(cleanUrl) + retHash['id'].push("rId#{i}") + i += 1 end - retHash["xmlText"] = xmlText - return retHash + retHash['xmlText'] = xmlText + retHash end def setHyperlinks(xmlText) urls = xmlText.scan(/http(s).*<\/w:t>/) urls.each do |url| - xmlText = xmlText.gsub(url,"#{url}") + xmlText = xmlText.gsub(url, "#{url}") end - return xmlText + xmlText end -def read_rels(zipfile,fil_r) - content_types = "" - - Zip::File.open(zipfile) do |zipfile| - content_types = zipfile.read(fil_r) - end +def find_excel_worksheets(excel) + worksheets = [] + Zip::File.open(excel) do |zip| + i = 1 + until zip.find_entry("xl/worksheets/sheet#{i}.xml").nil? + worksheets.push("xl/worksheets/sheet#{i}.xml") + i += 1 + end + end + worksheets +end - return content_types +def read_from_zip(zipfile, fil_r) + file = '' + Zip::File.open(zipfile) do |zipfile| + file = zipfile.read(fil_r) + end + file end -def write_rels(zipfile, fil_r, content) +def write_to_zip(zipfile, fil_r, content) Zip::File.open(zipfile) do |zipfile| - zipfile.get_output_stream(fil_r) {|f| f.write(content)} + zipfile.get_output_stream(fil_r) { |f| f.write(content) } end end def zip_attachments(zip_file) Zip::Archive.open(zip_file, Zip::CREATE) do |zipfile| - Dir["../attachments/*" ].each do | name| + Dir['../attachments/*'].each do |name| zipfile.add_file(name) end end @@ -110,281 +120,274 @@ def zip_attachments(zip_file) # this tallies the findings by criticality and sets them as a udv def add_findings_totals(udv, findings, config_options) - critical = 0 - high = 0 - moderate = 0 - low = 0 - informational = 0 - - unless udv - udv = {} - end - - if config_options.has_key?("cvssv2_scoring_override") - @cvssv2_scoring_override = config_options["cvssv2_scoring_override"] - else - @cvssv2_scoring_override = false - end + critical = 0 + high = 0 + moderate = 0 + low = 0 + informational = 0 + + udv ||= {} + + @cvssv2_scoring_override = if config_options.key?('cvssv2_scoring_override') + config_options['cvssv2_scoring_override'] + else + false + end # Query for the findings that match the report_id - if(config_options["dread"]) - findings.each do |finding| - if finding.dread_total >= 40 - critical += 1 - elsif finding.dread_total >= 30 and finding.dread_total < 40 - high += 1 - elsif finding.dread_total >= 20 and finding.dread_total <= 30 - moderate += 1 - elsif finding.dread_total >= 10 and finding.dread_total <= 20 - low += 1 - elsif finding.dread_total >= 0 and finding.dread_total <= 10 - informational += 1 - end - end - elsif(config_options["cvss"]) - if(@cvssv2_scoring_override) - findings.each do |finding| - if finding.cvss_total >= 9 - critical += 1 - elsif finding.cvss_total >= 7 - high += 1 - elsif finding.cvss_total >= 4 and finding.cvss_total <= 6.9 - moderate += 1 - elsif finding.cvss_total >= 0.1 and finding.cvss_total <= 3.9 - low += 1 - elsif finding.cvss_total < 0.1 - informational += 1 - end - end - else - findings.each do |finding| - if finding.cvss_total >= 7 - high += 1 - elsif finding.cvss_total >= 4 and finding.cvss_total <= 6.9 - moderate += 1 - elsif finding.cvss_total >= 0 and finding.cvss_total <= 3.9 - low += 1 - end - end - end - elsif(config_options["cvssv3"]) - findings.each do |finding| - if finding.cvss_total >= 9 - critical += 1 - elsif finding.cvss_total >= 7 and finding.cvss_total <= 8.9 - high += 1 - elsif finding.cvss_total >= 4 and finding.cvss_total <= 6.9 - moderate += 1 - elsif finding.cvss_total >= 0.1 and finding.cvss_total <= 3.9 - low += 1 - elsif finding.cvss_total < 0.1 - informational += 1 - end + if config_options['dread'] + findings.each do |finding| + if finding.dread_total >= 40 + critical += 1 + elsif (finding.dread_total >= 30) && (finding.dread_total < 40) + high += 1 + elsif (finding.dread_total >= 20) && (finding.dread_total <= 30) + moderate += 1 + elsif (finding.dread_total >= 10) && (finding.dread_total <= 20) + low += 1 + elsif (finding.dread_total >= 0) && (finding.dread_total <= 10) + informational += 1 + end + end + elsif config_options['cvss'] + if @cvssv2_scoring_override + findings.each do |finding| + if finding.cvss_total >= 9 + critical += 1 + elsif finding.cvss_total >= 7 + high += 1 + elsif (finding.cvss_total >= 4) && (finding.cvss_total <= 6.9) + moderate += 1 + elsif (finding.cvss_total >= 0.1) && (finding.cvss_total <= 3.9) + low += 1 + elsif finding.cvss_total < 0.1 + informational += 1 + end + end + else + findings.each do |finding| + if finding.cvss_total >= 7 + high += 1 + elsif (finding.cvss_total >= 4) && (finding.cvss_total <= 6.9) + moderate += 1 + elsif (finding.cvss_total >= 0) && (finding.cvss_total <= 3.9) + low += 1 + end + end end - else - findings.each do |finding| - if finding.risk == 4 - critical += 1 - elsif finding.risk == 3 - high += 1 - elsif finding.risk == 2 - moderate += 1 - elsif finding.risk == 1 - low += 1 - elsif finding.risk == 0 - informational += 1 - end + elsif config_options['cvssv3'] + findings.each do |finding| + if finding.cvss_total >= 9 + critical += 1 + elsif (finding.cvss_total >= 7) && (finding.cvss_total <= 8.9) + high += 1 + elsif (finding.cvss_total >= 4) && (finding.cvss_total <= 6.9) + moderate += 1 + elsif (finding.cvss_total >= 0.1) && (finding.cvss_total <= 3.9) + low += 1 + elsif finding.cvss_total < 0.1 + informational += 1 + end + end + else + findings.each do |finding| + if finding.risk == 4 + critical += 1 + elsif finding.risk == 3 + high += 1 + elsif finding.risk == 2 + moderate += 1 + elsif finding.risk == 1 + low += 1 + elsif finding.risk == 0 + informational += 1 + end end end - udv["critical_tally"] = critical - udv["high_tally"] = high - udv["moderate_tally"] = moderate - udv["low_tally"] = low - udv["informational_tally"] = informational + udv['critical_tally'] = critical + udv['high_tally'] = high + udv['moderate_tally'] = moderate + udv['low_tally'] = low + udv['informational_tally'] = informational - return udv + udv end - # The helper class exists to do string manipulation and heavy lifting def url_escape_hash(hash) - hash.each do |k,v| - v = "" unless v - v = CGI::escapeHTML(v) + hash.each do |k, v| + v ||= '' + v = CGI.escapeHTML(v) if v - # convert bullets - v = v.gsub("*-","") - v = v.gsub("-*","") + # convert bullets + v = v.gsub('*-', '') + v = v.gsub('-*', '') - #convert h4 - v = v.gsub("[==","

") - v = v.gsub("==]","

") + # convert h4 + v = v.gsub('[==', '

') + v = v.gsub('==]', '

') - #convert indent text - v = v.gsub("[--","") - v = v.gsub("--]","") + # convert indent text + v = v.gsub('[--', '') + v = v.gsub('--]', '') - #convert indent text - v = v.gsub("[~~","") - v = v.gsub("~~]","") + # convert indent text + v = v.gsub('[~~', '') + v = v.gsub('~~]', '') end - # replace linebreaks with paragraph xml elements - if v =~ /\r\n/ - new_v = "" - brs = v.split("\r\n") - brs.each do |br| - new_v << "" - new_v << br - new_v << "" - end - - v = new_v - elsif k == "remediation" or k == "overview" or k == "poc" or k == "affected_hosts" or k == "references" - new_v = "#{v}" - v = new_v - end - - hash[k] = v - end - - return hash + # replace linebreaks with paragraph xml elements + if v =~ /\r\n/ + new_v = '' + brs = v.split("\r\n") + brs.each do |br| + new_v << '' + new_v << br + new_v << '' + end + + v = new_v + elsif (k == 'remediation') || (k == 'overview') || (k == 'poc') || (k == 'affected_hosts') || (k == 'references') + new_v = "#{v}" + v = new_v + end + + hash[k] = v + end + + hash end def meta_markup(text) - if not text == nil - new_text = text.gsub(""," ").gsub("","") - new_text = new_text.gsub("","*-").gsub("","-*") - new_text = new_text.gsub("

","[==").gsub("

","==]") - new_text = new_text.gsub("","[[[").gsub("","]]]") - new_text = new_text.gsub("","[--").gsub("","--]") - new_text = new_text.gsub("","[~~").gsub("","~~]") - end + unless text.nil? + new_text = text.gsub('', ' ').gsub('', '') + new_text = new_text.gsub('', '*-').gsub('', '-*') + new_text = new_text.gsub('

', '[==').gsub('

', '==]') + new_text = new_text.gsub('', '[[[').gsub('', ']]]') + new_text = new_text.gsub('', '[--').gsub('', '--]') + new_text = new_text.gsub('', '[~~').gsub('', '~~]') + end end - # URL escaping messes up the inserted XML, this method switches it back to XML elements def meta_markup_unencode(findings_xml, report) - # code tags get added in later - findings_xml = findings_xml.gsub("[[[","") - findings_xml = findings_xml.gsub("]]]","") - - # creates paragraphs - findings_xml = findings_xml.gsub("<paragraph>","") - findings_xml = findings_xml.gsub("</paragraph>","") - # same for the bullets - findings_xml = findings_xml.gsub("<bullet>","") - findings_xml = findings_xml.gsub("</bullet>","") - # same for the h4 - findings_xml = findings_xml.gsub("<h4>","

") - findings_xml = findings_xml.gsub("</h4>","

") - # same for the code markings - findings_xml = findings_xml.gsub("<code>","") - findings_xml = findings_xml.gsub("</code>","") - # same for the indented text - findings_xml = findings_xml.gsub("<indented>","") - findings_xml = findings_xml.gsub("</indented>","") - # same for the indented text - findings_xml = findings_xml.gsub("<italics>","") - findings_xml = findings_xml.gsub("</italics>","") + findings_xml = findings_xml.gsub('[[[', '') + findings_xml = findings_xml.gsub(']]]', '') + + # creates paragraphs + findings_xml = findings_xml.gsub('<paragraph>', '') + findings_xml = findings_xml.gsub('</paragraph>', '') + # same for the bullets + findings_xml = findings_xml.gsub('<bullet>', '') + findings_xml = findings_xml.gsub('</bullet>', '') + # same for the h4 + findings_xml = findings_xml.gsub('<h4>', '

') + findings_xml = findings_xml.gsub('</h4>', '

') + # same for the code markings + findings_xml = findings_xml.gsub('<code>', '') + findings_xml = findings_xml.gsub('</code>', '') + # same for the indented text + findings_xml = findings_xml.gsub('<indented>', '') + findings_xml = findings_xml.gsub('</indented>', '') + # same for the indented text + findings_xml = findings_xml.gsub('<italics>', '') + findings_xml = findings_xml.gsub('</italics>', '') # changes the <> marks - for i in report.instance_variables - report_property = i[1..-1] - findings_xml = findings_xml.gsub("&lt;&lt;#{report_property}&gt;&gt;","#{report.instance_variable_get("@#{report_property}")}") - end + for i in report.instance_variables + report_property = i[1..-1] + findings_xml = findings_xml.gsub("&lt;&lt;#{report_property}&gt;&gt;", report.instance_variable_get("@#{report_property}").to_s) + end - if report and report.user_defined_variables - udv_hash = JSON.parse(report.user_defined_variables) - udv_hash.each do |key,value| - findings_xml = findings_xml.gsub("&lt;&lt;#{key}&gt;&gt;","#{value}") - end + if report && report.user_defined_variables + udv_hash = JSON.parse(report.user_defined_variables) + udv_hash.each do |key, value| + findings_xml = findings_xml.gsub("&lt;&lt;#{key}&gt;&gt;", value.to_s) end + end - #this is for re-upping the comment fields - findings_xml = findings_xml.gsub("<modified>","") - findings_xml = findings_xml.gsub("</modified>","") + # this is for re-upping the comment fields + findings_xml = findings_xml.gsub('<modified>', '') + findings_xml = findings_xml.gsub('</modified>', '') - findings_xml = findings_xml.gsub("<new_finding>","") - findings_xml = findings_xml.gsub("</new_finding>","") + findings_xml = findings_xml.gsub('<new_finding>', '') + findings_xml = findings_xml.gsub('</new_finding>', '') # these are for beautification - findings_xml = findings_xml.gsub("&quot;","\"") - findings_xml = findings_xml.gsub("&","&") - findings_xml = findings_xml.gsub("&lt;","<").gsub("&gt;",">") + findings_xml = findings_xml.gsub('&quot;', '"') + findings_xml = findings_xml.gsub('&', '&') + findings_xml = findings_xml.gsub('&lt;', '<').gsub('&gt;', '>') - return findings_xml + findings_xml end # verify that the markup is sane def mm_verify(hash) - error = "" - - hash.each do |k,text| - text = CGI::escapeHTML(text) - - if text - - if text.include?("*-") - elem = text.split("*-") - elem.shift - elem.each do |bl| - if !text.include?("-*") - error = "Markdown error, missing -* close tag." - end - end - end - - if text.include?("[==") - elem = text.split("[==") - elem.shift - elem.each do |bl| - if !text.include?("==]") - error = "Markdown error, missing ==] close tag." - end - end - end - - if text.include?("[~~") - elem = text.split("[~~") - elem.shift - elem.each do |bl| - if !text.include?("~~]") - error = "Markdown error, missing ~~] close tag." - end - end - end - - if text.include?("[[[") - elem = text.split("[[[") - elem.shift - elem.each do |bl| - if !text.include?("]]]") - error = "Markdown error, missing ]]] close tag." - end - end - end - end - end - return error + error = '' + + hash.each do |_k, text| + text = CGI.escapeHTML(text) + + next unless text + + if text.include?('*-') + elem = text.split('*-') + elem.shift + elem.each do |_bl| + unless text.include?('-*') + error = 'Markdown error, missing -* close tag.' + end + end + end + + if text.include?('[==') + elem = text.split('[==') + elem.shift + elem.each do |_bl| + unless text.include?('==]') + error = 'Markdown error, missing ==] close tag.' + end + end + end + + if text.include?('[~~') + elem = text.split('[~~') + elem.shift + elem.each do |_bl| + unless text.include?('~~]') + error = 'Markdown error, missing ~~] close tag.' + end + end + end + + next unless text.include?('[[[') + elem = text.split('[[[') + elem.shift + elem.each do |_bl| + unless text.include?(']]]') + error = 'Markdown error, missing ]]] close tag.' + end + end + end + error end def compare_text(new_text, orig_text) - if orig_text == nil + if orig_text.nil? # there is no master finding, must be new - t = "" + t = '' t << "#{new_text}" return t - end + end if new_text == orig_text return new_text else - n_t = "" + n_t = '' n_t << "#{new_text}" return n_t @@ -393,740 +396,737 @@ def compare_text(new_text, orig_text) # CVSS helper, there is a lot of hardcoded stuff def cvss(data, is_cvssv3) + # TODO: this needs to be refactored, cvss2 is calculated everytime + unless is_cvssv3 + av = data['av'].downcase + ac = data['ac'].downcase + au = data['au'].downcase + c = data['c'].downcase + i = data['i'].downcase + a = data['a'].downcase + e = data['e'].downcase + rl = data['rl'].downcase + rc = data['rc'].downcase + cdp = data['cdp'].downcase + td = data['td'].downcase + cr = data['cr'].downcase + ir = data['ir'].downcase + ar = data['ar'].downcase + end + + # vector string + c2_vs = 'CVSS:2.0/' - # todo this needs to be refactored, cvss2 is calculated everytime - if not is_cvssv3 - av = data["av"].downcase - ac = data["ac"].downcase - au = data["au"].downcase - c = data["c"].downcase - i = data["i"].downcase - a = data["a"].downcase - e = data["e"].downcase - rl = data["rl"].downcase - rc = data["rc"].downcase - cdp = data["cdp"].downcase - td = data["td"].downcase - cr = data["cr"].downcase - ir = data["ir"].downcase - ar = data["ar"].downcase - end - - # vector string - c2_vs = "CVSS:2.0/" - - # cvssV2 - if ac == "high" - cvss_ac = 0.35 - c2_vs += "AC:H/" - elsif ac == "medium" - cvss_ac = 0.61 - c2_vs += "AC:M/" - else - cvss_ac = 0.71 - c2_vs += "AC:L/" - end - - if au == "none" - cvss_au = 0.704 - c2_vs += "AU:N/" - elsif au == "single" - cvss_au = 0.56 - c2_vs += "AU:S/" - else - cvss_au = 0.45 - c2_vs += "AU:M/" - end - - if av == "local" - cvss_av = 0.395 - c2_vs += "AV:L/" - elsif av == "adjacent network" - cvss_av = 0.646 - c2_vs += "AV:A/" - else - cvss_av = 1 - c2_vs += "AV:N/" - end - - if c == "none" - cvss_c = 0 - c2_vs += "C:N/" - elsif c == "partial" - cvss_c = 0.275 - c2_vs += "C:P/" - else - cvss_c = 0.660 - c2_vs += "C:C" - end - if i == "none" - cvss_i = 00 - c2_vs += "I:N/" - elsif i == "partial" - cvss_i = 0.275 - c2_vs += "I:P/" - else - cvss_i = 0.660 - c2_vs += "I:C/" - end - - if a == "none" - cvss_a = 0 - c2_vs += "A:N/" - elsif a == "partial" - cvss_a = 0.275 - c2_vs += "I:P/" - else - cvss_a = 0.660 - c2_vs += "I:C/" - end - - # temporal score calculations - if e == "unproven exploit exists" - cvss_e = 0.85 - c2_vs += "E:U/" - elsif e == "proof-of-concept code" - cvss_e = 0.90 - c2_vs += "E:POC/" - elsif e == "functional exploit exists" - cvss_e = 0.95 - c2_vs += "E:F/" - else - cvss_e = 1 - c2_vs += "E:H/" - end - - if rl == "official fix" - cvss_rl = 0.87 - c2_vs += "RL:OF/" - elsif rl == "temporary fix" - cvss_rl = 0.90 - c2_vs += "RL:TF/" - elsif rl == "workaround" - cvss_rl = 0.95 - c2_vs += "RL:W/" - else - cvss_rl = 1 - c2_vs += "RL:U/" - end - - if rc == "unconfirmed" - cvss_rc = 0.90 - c2_vs += "RC:UC/" - elsif rc == "uncorroborated" - cvss_rc = 0.95 - c2_vs += "RC:UR/" - else - cvss_rc = 1 - c2_vs += "RC:C/" - end - - #environemental - if cdp == "low" - cvss_cdp = 0.1 - c2_vs += "CDP:L/" - elsif cdp == "low-medium" - cvss_cdp = 0.3 - c2_vs += "CDP:LM/" - elsif cdp == "medium-high" - cvss_cdp = 0.4 - c2_vs += "CDP:MH/" - elsif cdp == "high" - cvss_cdp = 0.5 - c2_vs += "CDP:H/" - else - cvss_cdp = 0 - end - - if td == "none" - c2_vs += "TD:N/" - cvss_td = 0 - elsif td == "low" - c2_vs += "TD:L/" - cvss_td = 0.25 - elsif td == "medium" - c2_vs += "TD:M/" - cvss_td = 0.75 - else - c2_vs += "TD:H/" - cvss_td = 1 - end - - if cr == "low" - c2_vs += "CR:L/" - cvss_cr = 0.5 - elsif cr == "high" - c2_vs += "CR:H/" - cvss_cr = 1.51 - else - c2_vs += "CR:M/" - cvss_cr = 1 - end - - if ir == "low" - cvss_ir = 0.5 - c2_vs += "IR:L/" - elsif ir == "high" - cvss_ir = 1.51 - c2_vs += "IR:H/" - else - c2_vs += "IR:M/" - cvss_ir = 1 - end - - if ar == "low" - c2_vs += "AR:L/" - cvss_ar = 0.5 - elsif ar == "high" - c2_vs += "AR:H/" - cvss_ar = 1.51 - else - c2_vs += "AR:M/" - cvss_ar = 1 - end - - cvss_impact = 10.41 * (1 - (1 - cvss_c) * (1 - cvss_i) * (1 - cvss_a)) - cvss_exploitability = 20 * cvss_ac * cvss_au * cvss_av - if cvss_impact == 0 - cvss_impact_f = 0 - else - cvss_impact_f = 1.176 - end - cvss_base = (0.6*cvss_impact + 0.4*cvss_exploitability-1.5)*cvss_impact_f - cvss_temporal = cvss_base * cvss_e * cvss_rl * cvss_rc - cvss_modified_impact = [10, 10.41 * (1 - (1 - cvss_c * cvss_cr) * (1 - cvss_i * cvss_ir) * (1 - cvss_a * cvss_ar))].min - if cvss_modified_impact == 0 - cvss_modified_impact_f = 0 - else - cvss_modified_impact_f = 1.176 - end - cvss_modified_base = (0.6*cvss_modified_impact + 0.4*cvss_exploitability-1.5)*cvss_modified_impact_f - cvss_adjusted_temporal = cvss_modified_base * cvss_e * cvss_rl * cvss_rc - cvss_environmental = (cvss_adjusted_temporal + (10 - cvss_adjusted_temporal) * cvss_cdp) * cvss_td - if cvss_environmental - cvss_total = cvss_environmental - elsif cvss_temporal - cvss_total = cvss_temporal - else - cvss_total = cvss_base - end - - c3_vs = "CVSS3.0:/" - - # cvssV3 - if is_cvssv3 - attack_vector = data["attack_vector"].downcase - attack_complexity = data["attack_complexity"].downcase - privileges_required = data["privileges_required"].downcase - user_interaction = data["user_interaction"].downcase - scope_cvss = data["scope_cvss"].downcase - confidentiality = data["confidentiality"].downcase - integrity = data["integrity"].downcase - availability = data["availability"].downcase - exploit_maturity = data["exploit_maturity"].downcase - remeditation_level = data["remeditation_level"].downcase - report_confidence = data["report_confidence"].downcase - integrity_requirement = data["integrity_requirement"].downcase - availability_requirement = data["availability_requirement"].downcase - confidentiality_requirement = data["confidentiality_requirement"].downcase - mod_attack_vector = data["mod_attack_vector"].downcase - mod_attack_complexity = data["mod_attack_complexity"].downcase - mod_privileges_required = data["mod_privileges_required"].downcase - mod_user_interaction = data["mod_user_interaction"].downcase - mod_scope = data["mod_scope"].downcase - mod_confidentiality = data["mod_confidentiality"].downcase - mod_integrity = data["mod_integrity"].downcase - mod_availability = data["mod_availability"].downcase - - # Calculations taken from here: - # https://gist.github.com/TheCjw/23b1f8b8f1da6ceb011c - # https://www.first.org/cvss/specification-document#i8 - - #Base - if attack_vector == "network" - c3_vs += "AV:N/" - attack_vector_result = 0.85 - elsif attack_vector == "adjacent" - c3_vs += "AV:A/" - attack_vector_result = 0.62 - elsif attack_vector == "local" - c3_vs += "AV:L/" - attack_vector_result = 0.55 - elsif attack_vector == "physical" - c3_vs += "AV:P/" - attack_vector_result = 0.2 - end - - if attack_complexity == "high" - c3_vs += "AC:H/" - attack_complexity_result = 0.44 - elsif attack_complexity == "low" - c3_vs += "AC:L/" - attack_complexity_result = 0.77 - end - - if privileges_required == "none" - c3_vs += "PR:N/" - privileges_required_result = 0.85 - elsif privileges_required == "high" - c3_vs += "PR:H/" - if (scope_cvss == "changed" || mod_scope == "changed") - privileges_required_result = 0.50 - else - privileges_required_result = 0.27 - end - elsif privileges_required == "low" - c3_vs += "PR:L/" - if (scope_cvss == "changed" || mod_scope == "changed") - privileges_required_result = 0.68 - else - privileges_required_result = 0.62 - end - end - - if user_interaction == "none" - c3_vs += "UI:N/" - user_interaction_result = 0.85 - elsif user_interaction == "required" - c3_vs += "UI:R/" - user_interaction_result = 0.62 - end - - if scope_cvss == "unchanged" - c3_vs += "S:U/" - scope_cvss_result = 6.42 - else - c3_vs += "S:C/" - scope_cvss_result = 7.52 - end - - if confidentiality == "none" - c3_vs += "C:N/" - confidentiality_result = 0.0 - elsif confidentiality == "high" - c3_vs += "C:H/" - confidentiality_result = 0.56 - elsif confidentiality == "low" - c3_vs += "C:L/" - confidentiality_result = 0.22 - end - - if integrity == "none" - c3_vs += "I:N/" - integrity_result = 0.0 - elsif integrity == "high" - c3_vs += "I:H/" - integrity_result = 0.56 - elsif integrity == "low" - c3_vs += "I:L/" - integrity_result = 0.22 - end - - if availability == "none" - c3_vs += "A:N/" - availability_result = 0.0 - elsif availability == "high" - c3_vs += "A:H/" - availability_result = 0.56 - elsif availability == "low" - c3_vs += "A:L/" - availability_result = 0.22 - end - - #Temporal - if exploit_maturity == "not defined" - c3_vs += "E:X/" - exploit_maturity_result = 1 - elsif exploit_maturity == "high" - c3_vs += "E:H/" - exploit_maturity_result = 1 - elsif exploit_maturity == "functional exploit exists" - c3_vs += "E:F/" - exploit_maturity_result = 0.97 - elsif exploit_maturity == "proof-of-concept code" - c3_vs += "E:P/" - exploit_maturity_result = 0.94 - elsif exploit_maturity == "unproven exploit exists" - c3_vs += "E:U/" - exploit_maturity_result = 0.91 - end - - if remeditation_level == "not defined" - c3_vs += "RL:X/" - remeditation_level_result = 1 - elsif remeditation_level == "unavailable" - c3_vs += "RL:U/" - remeditation_level_result = 1 - elsif remeditation_level == "workaround" - c3_vs += "RL:W/" - remeditation_level_result = 0.97 - elsif remeditation_level == "temporary fix" - c3_vs += "RL:T/" - remeditation_level_result = 0.96 - elsif remeditation_level == "official fix" - c3_vs += "RL:O/" - remeditation_level_result = 0.95 - end - - if report_confidence == "not defined" - c3_vs += "RC:X/" - report_confidence_result = 1 - elsif report_confidence == "confirmed" - c3_vs += "RC:C/" - report_confidence_result = 1 - elsif report_confidence == "reasonable" - report_confidence_result = 0.96 - c3_vs += "RC:R/" - elsif report_confidence == "unknown" - report_confidence_result = 0.92 - c3_vs += "RC:U/" - end - - #Enviromental - if confidentiality_requirement == "not defined" - c3_vs += "CR:X/" - confidentiality_requirement_result = 1 - elsif confidentiality_requirement == "high" - c3_vs += "CR:H/" - confidentiality_requirement_result = 1.5 - elsif confidentiality_requirement == "medium" - c3_vs += "CR:M/" - confidentiality_requirement_result = 1 - elsif confidentiality_requirement == "low" - c3_vs += "CR:L/" - confidentiality_requirement_result = 0.5 - end - - if integrity_requirement == "not defined" - c3_vs += "IR:X/" - integrity_requirement_result = 1 - elsif integrity_requirement == "high" - c3_vs += "IR:H/" - integrity_requirement_result = 1.5 - elsif integrity_requirement == "medium" - c3_vs += "IR:M/" - integrity_requirement_result = 1 - elsif integrity_requirement == "low" - c3_vs += "IR:L/" - integrity_requirement_result = 0.5 - end - - if availability_requirement == "not defined" - c3_vs += "AR:X/" - availability_requirement_result = 1 - elsif availability_requirement == "high" - c3_vs += "AR:H/" - availability_requirement_result = 1.5 - elsif availability_requirement == "medium" - c3_vs += "AR:M/" - availability_requirement_result = 1 - elsif availability_requirement == "low" - c3_vs += "AR:L/" - availability_requirement_result = 0.5 - end - - if mod_attack_vector == "network" - c3_vs += "MAV:N/" - mod_attack_vector_result = 0.85 - elsif mod_attack_vector == "adjacent" - c3_vs += "MAV:A/" - mod_attack_vector_result = 0.62 - elsif mod_attack_vector == "local" - c3_vs += "MAV:L/" - mod_attack_vector_result = 0.55 - elsif mod_attack_vector == "physical" - c3_vs += "MAV:P/" - mod_attack_vector_result = 0.2 - elsif mod_attack_vector == "not defined" - c3_vs += "MAV:X/" - mod_attack_vector_result = attack_vector_result - end - - if mod_attack_complexity == "high" - c3_vs += "MAC:H/" - mod_attack_complexity_result = 0.44 - elsif mod_attack_complexity == "low" - c3_vs += "MAC:L/" - mod_attack_complexity_result = 0.77 - elsif mod_attack_complexity == "not defined" - c3_vs += "MAC:X/" - mod_attack_complexity_result = attack_complexity_result - end - - if mod_privileges_required == "none" - c3_vs += "MPR:N/" - mod_privileges_required_result = 0.85 - elsif mod_privileges_required == "low" - c3_vs += "MPR:L/" - if (scope_cvss == "changed" || mod_scope == "changed") - mod_privileges_required_result = 0.68 - else - mod_privileges_required_result = 0.62 - end - elsif mod_privileges_required == "high" - c3_vs += "MPR:H/" - if (scope_cvss == "changed" || mod_scope == "changed") - mod_privileges_required_result = 0.5 - else - mod_privileges_required_result = 0.27 - end - elsif mod_privileges_required == "not defined" - c3_vs += "MPR:X/" - mod_privileges_required_result = privileges_required_result - end - - if mod_user_interaction == "none" - c3_vs += "MUI:N/" - mod_user_interaction_result = 0.85 - elsif mod_user_interaction == "required" - c3_vs += "MUI:R/" - mod_user_interaction_result = 0.62 - elsif mod_user_interaction == "not defined" - c3_vs += "MUI:X/" - mod_user_interaction_result = user_interaction_result - end - - if mod_scope == "unchanged" - c3_vs += "MS:U/" - mod_scope_result = 6.42 - elsif mod_scope == "changed" - c3_vs += "MS:C/" - mod_scope_result = 7.52 - elsif mod_scope == "not defined" - c3_vs += "MS:X/" - mod_scope_result = scope_cvss_result - end - - if mod_confidentiality == "none" - c3_vs += "MC:N/" - mod_confidentiality_result = 0.0 - elsif mod_confidentiality == "high" - c3_vs += "MC:H/" - mod_confidentiality_result = 0.56 - elsif mod_confidentiality == "low" - c3_vs += "MC:L/" - mod_confidentiality_result = 0.22 - elsif mod_confidentiality == "not defined" - c3_vs += "MC:X/" - mod_confidentiality_result = confidentiality_result - end - - if mod_integrity == "none" - c3_vs += "MI:N/" - mod_integrity_result = 0.0 - elsif mod_integrity == "high" - c3_vs += "MI:H/" - mod_integrity_result = 0.56 - elsif mod_integrity == "low" - c3_vs += "MI:L/" - mod_integrity_result = 0.22 - elsif mod_integrity == "not defined" - c3_vs += "MI:X/" - mod_integrity_result = integrity_result - end - - if mod_availability == "none" - c3_vs += "MA:N/" - mod_availability_result = 0.0 - elsif mod_availability == "high" - c3_vs += "MA:H/" - mod_availability_result = 0.56 - elsif mod_availability == "low" - c3_vs += "MA:L/" - mod_availability_result = 0.22 - elsif mod_availability == "not defined" - c3_vs += "MA:X/" - mod_availability_result = availability_result - end - - # Base Score - cvss_exploitability = 8.22 * attack_vector_result * attack_complexity_result * privileges_required_result * user_interaction_result #exploitabilitySubScore - cvss_impact_multipler = (1 - ((1 - confidentiality_result) * (1 - integrity_result) * (1 - availability_result))) # ISCbase - - if scope_cvss == "unchanged" - cvss_impact_score = scope_cvss_result * cvss_impact_multipler - elsif scope_cvss == "changed" - cvss_impact_score = scope_cvss_result * (cvss_impact_multipler - 0.029) - 3.25 * ((cvss_impact_multipler - 0.02) ** 15) - end - - if cvss_impact_score <= 0 - cvss_base_score = 0 - end - - if scope_cvss == "unchanged" - if (cvss_exploitability + cvss_impact_score) < 10 - cvss_base_score = (((cvss_exploitability + cvss_impact_score) * 10).ceil) / 10.0 - else - cvss_base_score = 10 - end - elsif scope_cvss == "changed" - if ((cvss_exploitability + cvss_impact_score) * 1.08) < 10 - cvss_base_score = ((((cvss_exploitability + cvss_impact_score) * 1.08) * 10).ceil) / 10.0 - else - cvss_base_score = 10 - end - end - cvss_base_score = ((cvss_base_score * 10).ceil) / 10.0 - - # Temporal Score - cvss_temporal = ((cvss_base_score * exploit_maturity_result * remeditation_level_result * report_confidence_result * 10).ceil) / 10.0 - - # Enviromental Score - cvss_mod_exploitability = 8.22 * mod_attack_vector_result * mod_attack_complexity_result * mod_privileges_required_result * mod_user_interaction_result - - if (1 - (1 - mod_confidentiality_result * confidentiality_requirement_result) * (1 - mod_integrity_result * integrity_requirement_result) * (1 - mod_availability_result * availability_requirement_result)) > 0.915 - cvss_mod_impact_multipler = 0.915 - else - cvss_mod_impact_multipler = 1 - (1 - mod_confidentiality_result * confidentiality_requirement_result) * (1 - mod_integrity_result * integrity_requirement_result) * (1 - mod_availability_result * availability_requirement_result) - end - - if mod_scope == "unchanged" - cvss_mod_impact_score = mod_scope_result * cvss_mod_impact_multipler - elsif mod_scope == "changed" - cvss_mod_impact_score = mod_scope_result * (cvss_mod_impact_multipler - 0.029) - 3.25 * ((cvss_mod_impact_multipler - 0.02) ** 15) - elsif mod_scope == "not defined" - if scope_cvss == "unchanged" - cvss_mod_impact_score = scope_cvss_result * cvss_mod_impact_multipler - elsif scope_cvss =="changed" - cvss_mod_impact_score = scope_cvss_result * (cvss_mod_impact_multipler - 0.029) - 3.25 * ((cvss_mod_impact_multipler - 0.02) ** 15) - end - end - - mod_impact_exploit_add = cvss_mod_impact_score + cvss_mod_exploitability - - if cvss_mod_impact_score <= 0 - cvss_environmental = 0 - else - if mod_scope == "not defined" - if scope_cvss == "unchanged" - if mod_impact_exploit_add > 10 - mod_impact_exploit_add = 10 - else - mod_impact_exploit_add = ((mod_impact_exploit_add * 10).ceil) / 10.0 - end - cvss_environmental = ((mod_impact_exploit_add * exploit_maturity_result * remeditation_level_result * report_confidence_result * 10).ceil) / 10.0 - elsif scope_cvss == "changed" - if (((1.08 * mod_impact_exploit_add * 10).ceil) / 10.0) > 10 - mod_impact_exploit_add = 10 - else - mod_impact_exploit_add = ((1.08 * mod_impact_exploit_add * 10).ceil) / 10.0 - end - cvss_environmental = ((mod_impact_exploit_add * exploit_maturity_result * remeditation_level_result * report_confidence_result * 10).ceil) / 10.0 - end - end - if mod_scope == "unchanged" - if mod_impact_exploit_add > 10 - mod_impact_exploit_add = 10 - else - mod_impact_exploit_add = ((mod_impact_exploit_add * 10).ceil) / 10.0 - end - cvss_environmental = ((mod_impact_exploit_add * exploit_maturity_result * remeditation_level_result * report_confidence_result * 10).ceil) / 10.0 - elsif mod_scope == "changed" - if (((1.08 * mod_impact_exploit_add * 10).ceil) / 10.0) > 10 - mod_impact_exploit_add = 10 - else - mod_impact_exploit_add = ((1.08 * mod_impact_exploit_add * 10).ceil) / 10.0 - end - cvss_environmental = ((mod_impact_exploit_add * exploit_maturity_result * remeditation_level_result * report_confidence_result * 10).ceil) / 10.0 - end - end - end - - data["cvss_base"] = sprintf("%0.1f" % cvss_base) - data["cvss_impact"] = sprintf("%0.1f" % cvss_impact) - data["cvss_exploitability"] = sprintf("%0.1f" % cvss_exploitability) - data["cvss_temporal"] = sprintf("%0.1f" % cvss_temporal) - data["cvss_environmental"] = sprintf("%0.1f" % cvss_environmental) - data["cvss_modified_impact"] = sprintf("%0.1f" % cvss_modified_impact) - - if(is_cvssv3) - data["cvss_base_score"] = sprintf("%0.1f" % cvss_base_score) - data["cvss_impact_score"] = sprintf("%0.1f" % cvss_impact_score) - data["cvss_mod_impact_score"] = sprintf("%0.1f" % cvss_mod_impact_score) - - data["cvss_total"] = sprintf("%0.1f" % cvss_environmental) - else - data["cvss_total"] = sprintf("%0.1f" % cvss_total) - end - - data["c2_vs"] = c2_vs.chop - data["c3_vs"] = c3_vs.chop - - return data + # cvssV2 + if ac == 'high' + cvss_ac = 0.35 + c2_vs += 'AC:H/' + elsif ac == 'medium' + cvss_ac = 0.61 + c2_vs += 'AC:M/' + else + cvss_ac = 0.71 + c2_vs += 'AC:L/' + end + + if au == 'none' + cvss_au = 0.704 + c2_vs += 'AU:N/' + elsif au == 'single' + cvss_au = 0.56 + c2_vs += 'AU:S/' + else + cvss_au = 0.45 + c2_vs += 'AU:M/' + end + + if av == 'local' + cvss_av = 0.395 + c2_vs += 'AV:L/' + elsif av == 'adjacent network' + cvss_av = 0.646 + c2_vs += 'AV:A/' + else + cvss_av = 1 + c2_vs += 'AV:N/' + end + + if c == 'none' + cvss_c = 0 + c2_vs += 'C:N/' + elsif c == 'partial' + cvss_c = 0.275 + c2_vs += 'C:P/' + else + cvss_c = 0.660 + c2_vs += 'C:C' + end + if i == 'none' + cvss_i = 0o0 + c2_vs += 'I:N/' + elsif i == 'partial' + cvss_i = 0.275 + c2_vs += 'I:P/' + else + cvss_i = 0.660 + c2_vs += 'I:C/' + end + + if a == 'none' + cvss_a = 0 + c2_vs += 'A:N/' + elsif a == 'partial' + cvss_a = 0.275 + c2_vs += 'I:P/' + else + cvss_a = 0.660 + c2_vs += 'I:C/' + end + + # temporal score calculations + if e == 'unproven exploit exists' + cvss_e = 0.85 + c2_vs += 'E:U/' + elsif e == 'proof-of-concept code' + cvss_e = 0.90 + c2_vs += 'E:POC/' + elsif e == 'functional exploit exists' + cvss_e = 0.95 + c2_vs += 'E:F/' + else + cvss_e = 1 + c2_vs += 'E:H/' + end + + if rl == 'official fix' + cvss_rl = 0.87 + c2_vs += 'RL:OF/' + elsif rl == 'temporary fix' + cvss_rl = 0.90 + c2_vs += 'RL:TF/' + elsif rl == 'workaround' + cvss_rl = 0.95 + c2_vs += 'RL:W/' + else + cvss_rl = 1 + c2_vs += 'RL:U/' + end + + if rc == 'unconfirmed' + cvss_rc = 0.90 + c2_vs += 'RC:UC/' + elsif rc == 'uncorroborated' + cvss_rc = 0.95 + c2_vs += 'RC:UR/' + else + cvss_rc = 1 + c2_vs += 'RC:C/' + end + + # environemental + if cdp == 'low' + cvss_cdp = 0.1 + c2_vs += 'CDP:L/' + elsif cdp == 'low-medium' + cvss_cdp = 0.3 + c2_vs += 'CDP:LM/' + elsif cdp == 'medium-high' + cvss_cdp = 0.4 + c2_vs += 'CDP:MH/' + elsif cdp == 'high' + cvss_cdp = 0.5 + c2_vs += 'CDP:H/' + else + cvss_cdp = 0 + end + + if td == 'none' + c2_vs += 'TD:N/' + cvss_td = 0 + elsif td == 'low' + c2_vs += 'TD:L/' + cvss_td = 0.25 + elsif td == 'medium' + c2_vs += 'TD:M/' + cvss_td = 0.75 + else + c2_vs += 'TD:H/' + cvss_td = 1 + end + + if cr == 'low' + c2_vs += 'CR:L/' + cvss_cr = 0.5 + elsif cr == 'high' + c2_vs += 'CR:H/' + cvss_cr = 1.51 + else + c2_vs += 'CR:M/' + cvss_cr = 1 + end + + if ir == 'low' + cvss_ir = 0.5 + c2_vs += 'IR:L/' + elsif ir == 'high' + cvss_ir = 1.51 + c2_vs += 'IR:H/' + else + c2_vs += 'IR:M/' + cvss_ir = 1 + end + + if ar == 'low' + c2_vs += 'AR:L/' + cvss_ar = 0.5 + elsif ar == 'high' + c2_vs += 'AR:H/' + cvss_ar = 1.51 + else + c2_vs += 'AR:M/' + cvss_ar = 1 + end + + cvss_impact = 10.41 * (1 - (1 - cvss_c) * (1 - cvss_i) * (1 - cvss_a)) + cvss_exploitability = 20 * cvss_ac * cvss_au * cvss_av + cvss_impact_f = if cvss_impact == 0 + 0 + else + 1.176 + end + cvss_base = (0.6 * cvss_impact + 0.4 * cvss_exploitability - 1.5) * cvss_impact_f + cvss_temporal = cvss_base * cvss_e * cvss_rl * cvss_rc + cvss_modified_impact = [10, 10.41 * (1 - (1 - cvss_c * cvss_cr) * (1 - cvss_i * cvss_ir) * (1 - cvss_a * cvss_ar))].min + cvss_modified_impact_f = if cvss_modified_impact == 0 + 0 + else + 1.176 + end + cvss_modified_base = (0.6 * cvss_modified_impact + 0.4 * cvss_exploitability - 1.5) * cvss_modified_impact_f + cvss_adjusted_temporal = cvss_modified_base * cvss_e * cvss_rl * cvss_rc + cvss_environmental = (cvss_adjusted_temporal + (10 - cvss_adjusted_temporal) * cvss_cdp) * cvss_td + cvss_total = if cvss_environmental + cvss_environmental + elsif cvss_temporal + cvss_temporal + else + cvss_base + end + + c3_vs = 'CVSS3.0:/' + + # cvssV3 + if is_cvssv3 + attack_vector = data['attack_vector'].downcase + attack_complexity = data['attack_complexity'].downcase + privileges_required = data['privileges_required'].downcase + user_interaction = data['user_interaction'].downcase + scope_cvss = data['scope_cvss'].downcase + confidentiality = data['confidentiality'].downcase + integrity = data['integrity'].downcase + availability = data['availability'].downcase + exploit_maturity = data['exploit_maturity'].downcase + remeditation_level = data['remeditation_level'].downcase + report_confidence = data['report_confidence'].downcase + integrity_requirement = data['integrity_requirement'].downcase + availability_requirement = data['availability_requirement'].downcase + confidentiality_requirement = data['confidentiality_requirement'].downcase + mod_attack_vector = data['mod_attack_vector'].downcase + mod_attack_complexity = data['mod_attack_complexity'].downcase + mod_privileges_required = data['mod_privileges_required'].downcase + mod_user_interaction = data['mod_user_interaction'].downcase + mod_scope = data['mod_scope'].downcase + mod_confidentiality = data['mod_confidentiality'].downcase + mod_integrity = data['mod_integrity'].downcase + mod_availability = data['mod_availability'].downcase + + # Calculations taken from here: + # https://gist.github.com/TheCjw/23b1f8b8f1da6ceb011c + # https://www.first.org/cvss/specification-document#i8 + + # Base + if attack_vector == 'network' + c3_vs += 'AV:N/' + attack_vector_result = 0.85 + elsif attack_vector == 'adjacent' + c3_vs += 'AV:A/' + attack_vector_result = 0.62 + elsif attack_vector == 'local' + c3_vs += 'AV:L/' + attack_vector_result = 0.55 + elsif attack_vector == 'physical' + c3_vs += 'AV:P/' + attack_vector_result = 0.2 + end + + if attack_complexity == 'high' + c3_vs += 'AC:H/' + attack_complexity_result = 0.44 + elsif attack_complexity == 'low' + c3_vs += 'AC:L/' + attack_complexity_result = 0.77 + end + + if privileges_required == 'none' + c3_vs += 'PR:N/' + privileges_required_result = 0.85 + elsif privileges_required == 'high' + c3_vs += 'PR:H/' + if scope_cvss == 'changed' || mod_scope == 'changed' + privileges_required_result = 0.50 + else + privileges_required_result = 0.27 + end + elsif privileges_required == 'low' + c3_vs += 'PR:L/' + if scope_cvss == 'changed' || mod_scope == 'changed' + privileges_required_result = 0.68 + else + privileges_required_result = 0.62 + end + end + + if user_interaction == 'none' + c3_vs += 'UI:N/' + user_interaction_result = 0.85 + elsif user_interaction == 'required' + c3_vs += 'UI:R/' + user_interaction_result = 0.62 + end + + if scope_cvss == 'unchanged' + c3_vs += 'S:U/' + scope_cvss_result = 6.42 + else + c3_vs += 'S:C/' + scope_cvss_result = 7.52 + end + + if confidentiality == 'none' + c3_vs += 'C:N/' + confidentiality_result = 0.0 + elsif confidentiality == 'high' + c3_vs += 'C:H/' + confidentiality_result = 0.56 + elsif confidentiality == 'low' + c3_vs += 'C:L/' + confidentiality_result = 0.22 + end + + if integrity == 'none' + c3_vs += 'I:N/' + integrity_result = 0.0 + elsif integrity == 'high' + c3_vs += 'I:H/' + integrity_result = 0.56 + elsif integrity == 'low' + c3_vs += 'I:L/' + integrity_result = 0.22 + end + + if availability == 'none' + c3_vs += 'A:N/' + availability_result = 0.0 + elsif availability == 'high' + c3_vs += 'A:H/' + availability_result = 0.56 + elsif availability == 'low' + c3_vs += 'A:L/' + availability_result = 0.22 + end + + # Temporal + if exploit_maturity == 'not defined' + c3_vs += 'E:X/' + exploit_maturity_result = 1 + elsif exploit_maturity == 'high' + c3_vs += 'E:H/' + exploit_maturity_result = 1 + elsif exploit_maturity == 'functional exploit exists' + c3_vs += 'E:F/' + exploit_maturity_result = 0.97 + elsif exploit_maturity == 'proof-of-concept code' + c3_vs += 'E:P/' + exploit_maturity_result = 0.94 + elsif exploit_maturity == 'unproven exploit exists' + c3_vs += 'E:U/' + exploit_maturity_result = 0.91 + end + + if remeditation_level == 'not defined' + c3_vs += 'RL:X/' + remeditation_level_result = 1 + elsif remeditation_level == 'unavailable' + c3_vs += 'RL:U/' + remeditation_level_result = 1 + elsif remeditation_level == 'workaround' + c3_vs += 'RL:W/' + remeditation_level_result = 0.97 + elsif remeditation_level == 'temporary fix' + c3_vs += 'RL:T/' + remeditation_level_result = 0.96 + elsif remeditation_level == 'official fix' + c3_vs += 'RL:O/' + remeditation_level_result = 0.95 + end + + if report_confidence == 'not defined' + c3_vs += 'RC:X/' + report_confidence_result = 1 + elsif report_confidence == 'confirmed' + c3_vs += 'RC:C/' + report_confidence_result = 1 + elsif report_confidence == 'reasonable' + report_confidence_result = 0.96 + c3_vs += 'RC:R/' + elsif report_confidence == 'unknown' + report_confidence_result = 0.92 + c3_vs += 'RC:U/' + end + + # Enviromental + if confidentiality_requirement == 'not defined' + c3_vs += 'CR:X/' + confidentiality_requirement_result = 1 + elsif confidentiality_requirement == 'high' + c3_vs += 'CR:H/' + confidentiality_requirement_result = 1.5 + elsif confidentiality_requirement == 'medium' + c3_vs += 'CR:M/' + confidentiality_requirement_result = 1 + elsif confidentiality_requirement == 'low' + c3_vs += 'CR:L/' + confidentiality_requirement_result = 0.5 + end + + if integrity_requirement == 'not defined' + c3_vs += 'IR:X/' + integrity_requirement_result = 1 + elsif integrity_requirement == 'high' + c3_vs += 'IR:H/' + integrity_requirement_result = 1.5 + elsif integrity_requirement == 'medium' + c3_vs += 'IR:M/' + integrity_requirement_result = 1 + elsif integrity_requirement == 'low' + c3_vs += 'IR:L/' + integrity_requirement_result = 0.5 + end + + if availability_requirement == 'not defined' + c3_vs += 'AR:X/' + availability_requirement_result = 1 + elsif availability_requirement == 'high' + c3_vs += 'AR:H/' + availability_requirement_result = 1.5 + elsif availability_requirement == 'medium' + c3_vs += 'AR:M/' + availability_requirement_result = 1 + elsif availability_requirement == 'low' + c3_vs += 'AR:L/' + availability_requirement_result = 0.5 + end + + if mod_attack_vector == 'network' + c3_vs += 'MAV:N/' + mod_attack_vector_result = 0.85 + elsif mod_attack_vector == 'adjacent' + c3_vs += 'MAV:A/' + mod_attack_vector_result = 0.62 + elsif mod_attack_vector == 'local' + c3_vs += 'MAV:L/' + mod_attack_vector_result = 0.55 + elsif mod_attack_vector == 'physical' + c3_vs += 'MAV:P/' + mod_attack_vector_result = 0.2 + elsif mod_attack_vector == 'not defined' + c3_vs += 'MAV:X/' + mod_attack_vector_result = attack_vector_result + end + + if mod_attack_complexity == 'high' + c3_vs += 'MAC:H/' + mod_attack_complexity_result = 0.44 + elsif mod_attack_complexity == 'low' + c3_vs += 'MAC:L/' + mod_attack_complexity_result = 0.77 + elsif mod_attack_complexity == 'not defined' + c3_vs += 'MAC:X/' + mod_attack_complexity_result = attack_complexity_result + end + + if mod_privileges_required == 'none' + c3_vs += 'MPR:N/' + mod_privileges_required_result = 0.85 + elsif mod_privileges_required == 'low' + c3_vs += 'MPR:L/' + if scope_cvss == 'changed' || mod_scope == 'changed' + mod_privileges_required_result = 0.68 + else + mod_privileges_required_result = 0.62 + end + elsif mod_privileges_required == 'high' + c3_vs += 'MPR:H/' + if scope_cvss == 'changed' || mod_scope == 'changed' + mod_privileges_required_result = 0.5 + else + mod_privileges_required_result = 0.27 + end + elsif mod_privileges_required == 'not defined' + c3_vs += 'MPR:X/' + mod_privileges_required_result = privileges_required_result + end + + if mod_user_interaction == 'none' + c3_vs += 'MUI:N/' + mod_user_interaction_result = 0.85 + elsif mod_user_interaction == 'required' + c3_vs += 'MUI:R/' + mod_user_interaction_result = 0.62 + elsif mod_user_interaction == 'not defined' + c3_vs += 'MUI:X/' + mod_user_interaction_result = user_interaction_result + end + + if mod_scope == 'unchanged' + c3_vs += 'MS:U/' + mod_scope_result = 6.42 + elsif mod_scope == 'changed' + c3_vs += 'MS:C/' + mod_scope_result = 7.52 + elsif mod_scope == 'not defined' + c3_vs += 'MS:X/' + mod_scope_result = scope_cvss_result + end + + if mod_confidentiality == 'none' + c3_vs += 'MC:N/' + mod_confidentiality_result = 0.0 + elsif mod_confidentiality == 'high' + c3_vs += 'MC:H/' + mod_confidentiality_result = 0.56 + elsif mod_confidentiality == 'low' + c3_vs += 'MC:L/' + mod_confidentiality_result = 0.22 + elsif mod_confidentiality == 'not defined' + c3_vs += 'MC:X/' + mod_confidentiality_result = confidentiality_result + end + + if mod_integrity == 'none' + c3_vs += 'MI:N/' + mod_integrity_result = 0.0 + elsif mod_integrity == 'high' + c3_vs += 'MI:H/' + mod_integrity_result = 0.56 + elsif mod_integrity == 'low' + c3_vs += 'MI:L/' + mod_integrity_result = 0.22 + elsif mod_integrity == 'not defined' + c3_vs += 'MI:X/' + mod_integrity_result = integrity_result + end + + if mod_availability == 'none' + c3_vs += 'MA:N/' + mod_availability_result = 0.0 + elsif mod_availability == 'high' + c3_vs += 'MA:H/' + mod_availability_result = 0.56 + elsif mod_availability == 'low' + c3_vs += 'MA:L/' + mod_availability_result = 0.22 + elsif mod_availability == 'not defined' + c3_vs += 'MA:X/' + mod_availability_result = availability_result + end + + # Base Score + cvss_exploitability = 8.22 * attack_vector_result * attack_complexity_result * privileges_required_result * user_interaction_result # exploitabilitySubScore + cvss_impact_multipler = (1 - ((1 - confidentiality_result) * (1 - integrity_result) * (1 - availability_result))) # ISCbase + + if scope_cvss == 'unchanged' + cvss_impact_score = scope_cvss_result * cvss_impact_multipler + elsif scope_cvss == 'changed' + cvss_impact_score = scope_cvss_result * (cvss_impact_multipler - 0.029) - 3.25 * ((cvss_impact_multipler - 0.02)**15) + end + + cvss_base_score = 0 if cvss_impact_score <= 0 + + if scope_cvss == 'unchanged' + cvss_base_score = if (cvss_exploitability + cvss_impact_score) < 10 + ((cvss_exploitability + cvss_impact_score) * 10).ceil / 10.0 + else + 10 + end + elsif scope_cvss == 'changed' + if ((cvss_exploitability + cvss_impact_score) * 1.08) < 10 + cvss_base_score = (((cvss_exploitability + cvss_impact_score) * 1.08) * 10).ceil / 10.0 + else + cvss_base_score = 10 + end + end + cvss_base_score = (cvss_base_score * 10).ceil / 10.0 + + # Temporal Score + cvss_temporal = (cvss_base_score * exploit_maturity_result * remeditation_level_result * report_confidence_result * 10).ceil / 10.0 + + # Enviromental Score + cvss_mod_exploitability = 8.22 * mod_attack_vector_result * mod_attack_complexity_result * mod_privileges_required_result * mod_user_interaction_result + + if (1 - (1 - mod_confidentiality_result * confidentiality_requirement_result) * (1 - mod_integrity_result * integrity_requirement_result) * (1 - mod_availability_result * availability_requirement_result)) > 0.915 + cvss_mod_impact_multipler = 0.915 + else + cvss_mod_impact_multipler = 1 - (1 - mod_confidentiality_result * confidentiality_requirement_result) * (1 - mod_integrity_result * integrity_requirement_result) * (1 - mod_availability_result * availability_requirement_result) + end + + if mod_scope == 'unchanged' + cvss_mod_impact_score = mod_scope_result * cvss_mod_impact_multipler + elsif mod_scope == 'changed' + cvss_mod_impact_score = mod_scope_result * (cvss_mod_impact_multipler - 0.029) - 3.25 * ((cvss_mod_impact_multipler - 0.02)**15) + elsif mod_scope == 'not defined' + if scope_cvss == 'unchanged' + cvss_mod_impact_score = scope_cvss_result * cvss_mod_impact_multipler + elsif scope_cvss == 'changed' + cvss_mod_impact_score = scope_cvss_result * (cvss_mod_impact_multipler - 0.029) - 3.25 * ((cvss_mod_impact_multipler - 0.02)**15) + end + end + + mod_impact_exploit_add = cvss_mod_impact_score + cvss_mod_exploitability + + if cvss_mod_impact_score <= 0 + cvss_environmental = 0 + else + if mod_scope == 'not defined' + if scope_cvss == 'unchanged' + mod_impact_exploit_add = if mod_impact_exploit_add > 10 + 10 + else + (mod_impact_exploit_add * 10).ceil / 10.0 + end + cvss_environmental = (mod_impact_exploit_add * exploit_maturity_result * remeditation_level_result * report_confidence_result * 10).ceil / 10.0 + elsif scope_cvss == 'changed' + if ((1.08 * mod_impact_exploit_add * 10).ceil / 10.0) > 10 + mod_impact_exploit_add = 10 + else + mod_impact_exploit_add = (1.08 * mod_impact_exploit_add * 10).ceil / 10.0 + end + cvss_environmental = (mod_impact_exploit_add * exploit_maturity_result * remeditation_level_result * report_confidence_result * 10).ceil / 10.0 + end + end + if mod_scope == 'unchanged' + mod_impact_exploit_add = if mod_impact_exploit_add > 10 + 10 + else + (mod_impact_exploit_add * 10).ceil / 10.0 + end + cvss_environmental = (mod_impact_exploit_add * exploit_maturity_result * remeditation_level_result * report_confidence_result * 10).ceil / 10.0 + elsif mod_scope == 'changed' + if ((1.08 * mod_impact_exploit_add * 10).ceil / 10.0) > 10 + mod_impact_exploit_add = 10 + else + mod_impact_exploit_add = (1.08 * mod_impact_exploit_add * 10).ceil / 10.0 + end + cvss_environmental = (mod_impact_exploit_add * exploit_maturity_result * remeditation_level_result * report_confidence_result * 10).ceil / 10.0 + end + end + end + + data['cvss_base'] = sprintf(format('%0.1f', cvss_base)) + data['cvss_impact'] = sprintf(format('%0.1f', cvss_impact)) + data['cvss_exploitability'] = sprintf(format('%0.1f', cvss_exploitability)) + data['cvss_temporal'] = sprintf(format('%0.1f', cvss_temporal)) + data['cvss_environmental'] = sprintf(format('%0.1f', cvss_environmental)) + data['cvss_modified_impact'] = sprintf(format('%0.1f', cvss_modified_impact)) + + if is_cvssv3 + data['cvss_base_score'] = sprintf(format('%0.1f', cvss_base_score)) + data['cvss_impact_score'] = sprintf(format('%0.1f', cvss_impact_score)) + data['cvss_mod_impact_score'] = sprintf(format('%0.1f', cvss_mod_impact_score)) + + data['cvss_total'] = sprintf(format('%0.1f', cvss_environmental)) + else + data['cvss_total'] = sprintf(format('%0.1f', cvss_total)) + end + + data['c2_vs'] = c2_vs.chop + data['c3_vs'] = c3_vs.chop + + data end # there are three scoring types; risk, dread and cvss # this sets a score for all three in case the user switches later def convert_score(finding) - if(finding.cvss_total == nil) - puts "|!| No CVSS score exists" - finding.cvss_total = 0 - end - if(finding.dread_total == nil) - puts "|!| No DREAD score exists" - finding.dread_total = 0 - end - if(finding.risk == nil) - puts "|!| No RISK score exists" - finding.risk = 0 - end - return finding + if finding.cvss_total.nil? + puts '|!| No CVSS score exists' + finding.cvss_total = 0 + end + if finding.dread_total.nil? + puts '|!| No DREAD score exists' + finding.dread_total = 0 + end + if finding.risk.nil? + puts '|!| No RISK score exists' + finding.risk = 0 + end + finding end # Get the type of scoring from the report and set the view variables, pull findings def get_scoring_findings(report) - if(report.scoring.downcase == "dread") - findings = Findings.all(:report_id => report.id, :order => [:dread_total.desc]) - dread = true - cvss = false - cvss3 = false - risk = false - riskmatrix = false - elsif(report.scoring.downcase == "cvss") - findings = Findings.all(:report_id => report.id, :order => [:cvss_total.desc]) - dread = false - cvss = true - cvss3 = false - risk = false - riskmatrix = false - elsif(report.scoring.downcase == "cvssv3") - findings = Findings.all(:report_id => report.id, :order => [:cvss_total.desc]) - dread = false - cvss = false - cvss3 = true - risk = false - riskmatrix = false - elsif(report.scoring.downcase == "riskmatrix") - findings = Findings.all(:report_id => report.id, :order => [:risk.desc]) - dread = false - cvss = false - cvss3 = false - risk = false - riskmatrix = true - else - findings = Findings.all(:report_id => report.id, :order => [:risk.desc]) - dread = false - cvss = false - cvss3 = false - risk = true - riskmatrix = false - end + if report.scoring.casecmp('dread').zero? + findings = Findings.all(report_id: report.id, order: [:dread_total.desc]) + dread = true + cvss = false + cvss3 = false + risk = false + riskmatrix = false + elsif report.scoring.casecmp('cvss').zero? + findings = Findings.all(report_id: report.id, order: [:cvss_total.desc]) + dread = false + cvss = true + cvss3 = false + risk = false + riskmatrix = false + elsif report.scoring.casecmp('cvssv3').zero? + findings = Findings.all(report_id: report.id, order: [:cvss_total.desc]) + dread = false + cvss = false + cvss3 = true + risk = false + riskmatrix = false + elsif report.scoring.casecmp('riskmatrix').zero? + findings = Findings.all(report_id: report.id, order: [:risk.desc]) + dread = false + cvss = false + cvss3 = false + risk = false + riskmatrix = true + else + findings = Findings.all(report_id: report.id, order: [:risk.desc]) + dread = false + cvss = false + cvss3 = false + risk = true + riskmatrix = false + end - return findings,dread,cvss,cvss3,risk,riskmatrix + [findings, dread, cvss, cvss3, risk, riskmatrix] end # Get the global configuration scoring algorithm and set at the report level def set_scoring(config_options) - if(config_options["dread"]) - return "dread" - elsif(config_options["cvss"]) - return "cvss" - elsif(config_options["cvssv3"]) - return "cvssv3" - elsif(config_options["riskmatrix"]) - return "riskmatrix" - end + if config_options['dread'] + return 'dread' + elsif config_options['cvss'] + return 'cvss' + elsif config_options['cvssv3'] + return 'cvssv3' + elsif config_options['riskmatrix'] + return 'riskmatrix' + end - return "risk" + 'risk' end diff --git a/helpers/xslx_xslt_generation.rb b/helpers/xslx_xslt_generation.rb new file mode 100644 index 00000000..d42d7bd9 --- /dev/null +++ b/helpers/xslx_xslt_generation.rb @@ -0,0 +1,396 @@ +# encoding: ASCII-8BIT +require 'rubygems' +require './model/master.rb' +require 'cgi' +require './helpers/helper' + +# This does the heavy lifting for taking a report template and creating the resulting XSLT template. +# It needs a lot of love but it works for now. + +# This is a custom error class to be thrown if the template fails to parse correctly. +class ReportingError < RuntimeError + attr_reader :errorString + + def initialize(errorString) + @errorString = errorString + end +end + +def generate_excel_xslt(excel) + xslts_components = {} + + # Initialize the xsl + @top = ' + + + + + + progid="Excel.Sheet" + ' + @bottom = '' + + document = '' + + ###### SHAREDSTRINGS PART ############################################################# + + # Excel saves all its user defined strings in a file called sharedstrings.xml + # UDVs and serpico variables are substitued only for this file + + document = read_from_zip(excel, 'xl/sharedStrings.xml') + # replace {} for the sake of XSL + document = document.gsub('{', '{{').gsub('}', '}}') + + # metachar pairing verification + worksheets = find_excel_worksheets(excel) + shared_strings_noko = Nokogiri::XML(document) + worksheets.each do |sheet| + ws = read_from_zip(excel, sheet) + # metacharacter_splitted_strings {} for the sake of XSL + sheet_noko = Nokogiri::XML(ws) + verify_paired_metacharacters(['æ', '∞', '§', 'π', 'Ω', '√'], sheet_noko, shared_strings_noko) + end + + ########################### + + # Ω - used as a normal substituion variable + + # let's pull out variables + metacharacter_splitted_strings = document.split('Ω') + + count = 0 + metacharacter_splitted_strings.each do |value_between_metachar| + if count.even? + count += 1 + next + end + + # we metacharacter_splitted_strings ΩvalueΩ by the corresponding xsl code + metacharacter_splitted_strings[count] = "" + + count += 1 + end + + # remove all the Ω and put the document back together + document = metacharacter_splitted_strings.join('') + + ########################### + + # § - used as a user defined variable substituion variable + + # let's pull out variables + metacharacter_splitted_strings = document.split('§') + + count = 0 + metacharacter_splitted_strings.each do |value_between_metachar| + if count.even? + count += 1 + next + end + + # now, we metacharacter_splitted_strings metachar with the real deal + # + # + # + metacharacter_splitted_strings[count] = "" + count += 1 + end + + # remove all the § and put the document back together + document = metacharacter_splitted_strings.join('') + + ########################### + # √ - string comparison + + # For example, '√ short_company_name:::serpico testing √' is read as "compare short_company_name to 'serpico test' (case_insensitive) and return the result as true or false; ..." + + metacharacter_splitted_strings = document.split('√') + + count = 0 + metacharacter_splitted_strings.each do |metachar| + if count.even? + count += 1 + next + end + + left = metachar.split(':::').first.strip + left = if left =~ /:/ + 'report/udv/' + left.delete(':') + elsif left =~ /\+/ + left.delete('+') + else + 'report/reports/' + left + end + right = metachar.split(':::').last.strip + + metacharacter_splitted_strings[count] = "translate(#{left},$up,$low)=translate('#{right}',$up,$low)" + + count += 1 + end + document = metacharacter_splitted_strings.join('') + + ########################### + + # π - a metacharacter_splitted_stringsment variable which takes full xpath + + metacharacter_splitted_strings = document.split('π') + + count = 0 + metacharacter_splitted_strings.each do |metachar| + if count.even? + count += 1 + next + end + + metacharacter_splitted_strings[count] = "" + count += 1 + end + + # remove all the π and put the document back together + document = metacharacter_splitted_strings.join('') + + # we will need this during the worksheets xslt generation + shared_strings_noko = Nokogiri::XML(document) + + ############################################################################################ + ###### WORKSHEETS PART + + worksheets = find_excel_worksheets(excel) + worksheets.each do |sheet| + document = read_from_zip(excel, sheet) + # replace {} for the sake of XSL + document = document.gsub('{', '{{').gsub('}', '}}') + sheet_noko = Nokogiri::XML(document) + verify_paired_metacharacters(['æ', '∞', '§', 'π', 'Ω', '√'], sheet_noko, shared_strings_noko) + + # æ - for each loop for table rows only + # ::: - is used for if statements within the row + # For example, 'æ findings:::X > 1 æ' is read as "for each finding with X greater than 1 create a new table row" + + + # for every cell that has a shared string... ( contains the id of the shared string in excel) + sheet_noko.xpath('//xmlns:worksheet/xmlns:sheetData/xmlns:row/xmlns:c[xmlns:v]').each do |c| + # We get the shared string value of the current cell + shared_string_value = get_shared_string_value(c, shared_strings_noko) + if shared_string_value.include?('æ') + + xpath_between_ae_characters = shared_string_value.split('æ')[1] + # we extract the iffies + if shared_string_value.include?(':::') + iffies_xpath = xpath_between_ae_characters.split(':::').drop(1) + # we delete the remaining 'æ' in the condition + iffies_xpath[-1] = iffies_xpath[-1].tr('æ', '') + xpath_between_ae_characters = xpath_between_ae_characters.split(':::')[0] + end + # we find the row in which the æ were found + parent_row = c.xpath('parent::xmlns:row') + # we create the nested xslt conditions. This returns a nokogori node for the deepeest nested xslt condition + # nested_iffies_xslt = create_nested_xslt_conditions(iffies_xpath, sheet_noko, parent = nil ) + # //xml:msub[count(descendant::xml:msub) = 0] + # parent_row.children.first.add_previous_sibling(nested_iffies_xslt.at_xpath('ancestor::*[last()]')) + # raise ReportingError, parent_row.to_xml + + # we add a for loop just above the row in which the æ were found + parent_row.wrap("") + # we add the ::: conditions + if iffies_xpath + iffies_xpath.each do |xpath| + parent_row.wrap("") + end + end + # now that the loop is ready, we replace each cells that contains the ∞ of the current row with the xpath between ∞ + cells_on_same_row_as_metacharacters = parent_row.xpath('xmlns:c[xmlns:v]') + cells_on_same_row_as_metacharacters.each do |c_on_same_row| + c_on_same_row_shared_string = get_shared_string_value(c_on_same_row, shared_strings_noko) + # final_cell_value will be used to construct the final xslt string to inject in the cell + final_cell_value_splitted = [] + # if there's at least one metacharacter pair + if c_on_same_row_shared_string.include?('∞') + splitted = c_on_same_row_shared_string.split('∞') + final_cell_value = replace_value_between_metacharaters(splitted, '') + # now that we transformed the ∞ metacharacters in xslt, + # we inject it in the cell were we found the "∞" metacharacters + modify_shared_string_value(c_on_same_row, final_cell_value, shared_strings_noko) + shared_string_to_inline_string(c_on_same_row, final_cell_value, sheet_noko) + end + end + end + ################################################### + + # † - if variable + # For example, '† DREAD_SCORE > 1 †' is read as "if the DREAD_SCORE is greater than 1 then ..." + + if shared_string_value.include?('†') + shared_string_value = get_shared_string_value(c, shared_strings_noko) + + values_between_if_characters = shared_string_value.split('†') + final_cell_value = replace_value_between_metacharaters(values_between_if_characters, '') + modify_shared_string_value(c, final_cell_value, shared_strings_noko) + # now that we transformed the † metacharacters in xslt, + # we inject it in the cell were we found the "†" metacharacters + shared_string_to_inline_string(c, final_cell_value, sheet_noko) + end + ############################################################ + # ¥ - ends an if statement + + next unless shared_string_value.include?('¥') + shared_string_value = get_shared_string_value(c, shared_strings_noko) + final_cell_value = shared_string_value.gsub('¥', '') + modify_shared_string_value(c, final_cell_value, shared_strings_noko) + # now that we transformed the † metacharacters in xslt, + # we inject it in the cell were we found the "¥" metacharacters + shared_string_to_inline_string(c, final_cell_value, sheet_noko) + ####################################################### + end + + # we clean the metacharacters in the sheet + sheet_noko = clean_sheet('æ', sheet_noko) + xslts_components[sheet] = @top + sheet_noko.to_xml + @bottom + end + #we clean the sharedstring so that they are not left in the final excel + shared_strings_noko = clean_shared_strings('æ', shared_strings_noko) + # because we cleaned the shared string, we need to update it in the returned xslts_components + xslts_components['xl/sharedStrings.xml'] = @top + shared_strings_noko.to_xml + @bottom + i = 1 + # debug + xslts_components.each do |_sheet, component| + i += 1 + File.open("/mnt/Kali_Shared/excel_xslt_#{i}", 'w') { |file| file.write(component) } + end + # return the xslts + xslts_components +end + +# returns the shared string value of a sheet cell +def get_shared_string_value(sheet_cell, shared_strings_noko) + # ...We take the id of the shared string contained by the cell + shared_string_id = sheet_cell.at_xpath('xmlns:v').content + # ...we look in the shared strings file the corresponding value + shared_string_value = shared_strings_noko.at_xpath("/xmlns:sst/xmlns:si[#{shared_string_id.to_i + 1}]/xmlns:t").content.to_s.force_encoding('ASCII-8BIT') +end + +# modify the shared string value of a sheet cell +def modify_shared_string_value(sheet_cell, string, shared_strings_noko) + # ...We take the id of the shared string contained by the cell + shared_string_id = sheet_cell.at_xpath('xmlns:v').content + # ...we look in the shared strings file the corresponding value + shared_strings_noko.at_xpath("/xmlns:sst/xmlns:si[#{shared_string_id.to_i + 1}]/xmlns:t").content = string + # raise ReportingError, shared_strings_noko.at_xpath("/xmlns:sst/xmlns:si[#{shared_string_id.to_i + 1}]/xmlns:t").content +end + +# verify that the givin metacharacters are paired in the cells they are found in. +def verify_paired_metacharacters(metachars, sheet_noko, shared_strings_noko) + metachars.each do |metachar| + sheet_noko.xpath('//xmlns:worksheet/xmlns:sheetData/xmlns:row/xmlns:c[xmlns:v]').each do |sheet_cell| + # ...We take the id of the shared string contained by the cell + shared_string_value = get_shared_string_value(sheet_cell, shared_strings_noko) + next unless shared_string_value.include?(metachar) + # raise ReportingError, shared_string_value + if shared_string_value.count(metachar).odd? + rox_and_column_index = sheet_cell['r'] + raise ReportingError, "Uneven number of #{metachar} in cell #{rox_and_column_index}. This is usually caused by a mismatch in a variable." + end + end + end +end + +# transform a cell with a shared string value into a cell with an inline string value +# "string" var is used as the new value +# if the cell was already an inline string, it just adds the string to the already existing +# inline string +# inline string cells are like this : +# +# +# This is inline string example +# +# +def shared_string_to_inline_string(c, string, sheet_noko) + # we remove the v tag that is used as an index to the shared string + # v_tag = c.at_xpath('xmlns:v') + # v_tag.remove + # we indicate that the cell value is an inline string + if c['t'] != 'inlineStr' + c['t'] = 'inlineStr' + cell_descendants = c.xpath('descendant::*') + # we create the needed tags and add the inline string value + is_tag = Nokogiri::XML::Node.new 'is', sheet_noko + t_tag = Nokogiri::XML.fragment("#{string}") + # we add the created tags to the cell xml + is_tag << t_tag + c << is_tag + else + c.at_xpath('xmlns:is/xmlns:t').inner_html = string + # raise ReportingError, string + end + c +end + +# this function will delete the metacharacters from the shared strings +# For exemple "sdsdfdsf æsome_xpathæ" will become "sdsdffdsf". +# It returns the noko shared string document with the cleaned strings +def clean_shared_strings(metachar, shared_strings_noko) + shared_strings_noko.xpath("//xmlns:t[contains(.,'#{metachar}')]").each do |shared_string| + shared_string_value = shared_string.content.to_s.force_encoding('ASCII-8BIT') + splitted = shared_string_value.split(metachar) + count = 0 + cleaned_shared_string = [] + splitted.each do |_value_between_metachar| + cleaned_shared_string << splitted[count] if count.even? + count += 1 + end + # raise ReportingError, cleaned_shared_string.join.inspect + shared_string.content = cleaned_shared_string.join + end + shared_strings_noko +end + +#clan the given sheet of the given metachar. Returns the cleaned sheet +def clean_sheet(metachar, sheet_noko) + sheet_noko.xpath("//xmlns:t[contains(.,'#{metachar}')]").each do |inline_string| + #inline_string_descendants = inline_string.xpath('descendant::*') + inline_string_value = inline_string.inner_html.to_s.force_encoding('ASCII-8BIT') + splitted = inline_string_value.split(metachar) + count = 0 + cleaned_inline_string = [] + splitted.each do |_value_between_metachar| + cleaned_inline_string << splitted[count] if count.even? + count += 1 + end + inline_string.inner_html = cleaned_inline_string.join + #inline_string << inline_string_descendants + #raise ReportingError, cleaned_inline_string.inspect + end + sheet_noko +end + +# not used in the end, but could be useful. Recursivly create nested condition from an array of xpath used as conditions +def create_nested_xslt_conditions(iffies, sheet_noko, parent) + if iffies.length == 1 + xslt_if_node = Nokogiri::XML::Node.new 'xsl:if', sheet_noko + xslt_if_node['test'] = CGI.escapeHTML(iffies[0].downcase).gsub('&', '&').to_s + parent << xslt_if_node + return xslt_if_node + end + xslt_if_node = Nokogiri::XML::Node.new 'xsl:if', sheet_noko + xslt_if_node['test'] = CGI.escapeHTML(iffies[0].downcase).gsub('&', '&').to_s + parent << xslt_if_node unless parent.nil? + next_iffies = iffies.drop(1) + create_nested_xslt_conditions(next_iffies, sheet_noko, xslt_if_node) +end + +def replace_value_between_metacharaters(splitted, xslt_beginning, xslt_ending) + count = 0 + final_value_splitted = [] + splitted.each do |value_between_metachars| + if count.odd? + # we replace the value between metacharacters by the corresponding xsl code + final_value_splitted.push("#{xslt_beginning}#{CGI.escapeHTML(value_between_metachars.downcase).gsub('&', '&')}#{xslt_ending}") + else + final_value_splitted.push(value_between_metachars) + end + count += 1 + end + final_value_splitted.join +end diff --git a/model/master.rb b/model/master.rb index 97449c97..9495166e 100644 --- a/model/master.rb +++ b/model/master.rb @@ -6,430 +6,417 @@ # Initialize the Master DB DataMapper.setup(:default, "sqlite://#{Dir.pwd}/db/master.db") - class TemplateFindings - include DataMapper::Resource - - property :id, Serial - property :title, String, :required => true, :length => 200 - property :damage, Integer, :required => false - property :reproducability, Integer, :required => false - property :exploitability, Integer, :required => false - property :affected_users, Integer, :required => false - property :discoverability, Integer, :required => false - property :dread_total, Integer, :required => false - property :effort, String, :required => false - property :type, String, :required => false, :length=>200 - property :overview, String, :length => 20000, :required => false - property :poc, String, :length => 20000, :required => false - property :remediation, String, :length => 20000, :required => false - property :references, String, :length => 20000, :required => false - property :approved, Boolean, :required => false, :default => true - property :risk, Integer, :required => false - property :affected_hosts, String, :length => 20000, :required => false - # CVSSv2 - property :av, String, :required => false - property :ac, String, :required => false - property :au, String, :required => false - property :c, String, :required => false - property :i, String, :required => false - property :a, String, :required => false - property :e, String, :required => false - property :rl, String, :required => false - property :rc, String, :required => false - property :cdp, String, :required => false - property :td, String, :required => false - property :cr, String, :required => false - property :ir, String, :required => false - property :ar, String, :required => false - property :cvss_base, Float, :required => false - property :cvss_impact, Float, :required => false - property :cvss_exploitability, Float, :required => false - property :cvss_temporal, Float, :required => false - property :cvss_environmental, Float, :required => false - property :cvss_modified_impact, Float, :required => false - property :cvss_total, Float, :required => false - property :ease, String, :required => false - property :c2_vs, String, :length => 300, :required => false - - #CVSSv3 - property :attack_vector, String, :required => false - property :attack_complexity, String, :required => false - property :privileges_required, String, :required => false - property :user_interaction, String, :required => false - property :scope_cvss, String, :required => false - property :confidentiality, String, :required => false - property :integrity, String, :required => false - property :availability, String, :required => false - property :exploit_maturity, String, :required => false - property :remeditation_level, String, :required => false - property :report_confidence, String, :required => false - property :confidentiality_requirement, String, :required => false - property :integrity_requirement, String, :required => false - property :availability_requirement, String, :required => false - property :mod_attack_vector, String, :required => false - property :mod_attack_complexity, String, :required => false - property :mod_privileges_required, String, :required => false - property :mod_user_interaction, String, :required => false - property :mod_scope, String, :required => false - property :mod_confidentiality, String, :required => false - property :mod_integrity, String, :required => false - property :mod_availability, String, :required => false - property :cvss_base_score, Float, :required => false - property :cvss_impact_score, Float, :required => false - property :cvss_mod_impact_score, Float, :required => false - property :c3_vs, String, :length => 300, :required => false - - - # Risk Matrix - property :severity, String, :required => false - property :likelihood, String, :required => false - property :severity_rationale, String, :length => 20000, :required => false - property :likelihood_rationale, String, :length => 20000, :required => false + include DataMapper::Resource + property :id, Serial + property :title, String, required: true, length: 200 + property :damage, Integer, required: false + property :reproducability, Integer, required: false + property :exploitability, Integer, required: false + property :affected_users, Integer, required: false + property :discoverability, Integer, required: false + property :dread_total, Integer, required: false + property :effort, String, required: false + property :type, String, required: false, length: 200 + property :overview, String, length: 20_000, required: false + property :poc, String, length: 20_000, required: false + property :remediation, String, length: 20_000, required: false + property :references, String, length: 20_000, required: false + property :approved, Boolean, required: false, default: true + property :risk, Integer, required: false + property :affected_hosts, String, length: 20_000, required: false + # CVSSv2 + property :av, String, required: false + property :ac, String, required: false + property :au, String, required: false + property :c, String, required: false + property :i, String, required: false + property :a, String, required: false + property :e, String, required: false + property :rl, String, required: false + property :rc, String, required: false + property :cdp, String, required: false + property :td, String, required: false + property :cr, String, required: false + property :ir, String, required: false + property :ar, String, required: false + property :cvss_base, Float, required: false + property :cvss_impact, Float, required: false + property :cvss_exploitability, Float, required: false + property :cvss_temporal, Float, required: false + property :cvss_environmental, Float, required: false + property :cvss_modified_impact, Float, required: false + property :cvss_total, Float, required: false + property :ease, String, required: false + property :c2_vs, String, length: 300, required: false + + # CVSSv3 + property :attack_vector, String, required: false + property :attack_complexity, String, required: false + property :privileges_required, String, required: false + property :user_interaction, String, required: false + property :scope_cvss, String, required: false + property :confidentiality, String, required: false + property :integrity, String, required: false + property :availability, String, required: false + property :exploit_maturity, String, required: false + property :remeditation_level, String, required: false + property :report_confidence, String, required: false + property :confidentiality_requirement, String, required: false + property :integrity_requirement, String, required: false + property :availability_requirement, String, required: false + property :mod_attack_vector, String, required: false + property :mod_attack_complexity, String, required: false + property :mod_privileges_required, String, required: false + property :mod_user_interaction, String, required: false + property :mod_scope, String, required: false + property :mod_confidentiality, String, required: false + property :mod_integrity, String, required: false + property :mod_availability, String, required: false + property :cvss_base_score, Float, required: false + property :cvss_impact_score, Float, required: false + property :cvss_mod_impact_score, Float, required: false + property :c3_vs, String, length: 300, required: false + + # Risk Matrix + property :severity, String, required: false + property :likelihood, String, required: false + property :severity_rationale, String, length: 20_000, required: false + property :likelihood_rationale, String, length: 20_000, required: false end class Findings - include DataMapper::Resource - - property :id, Serial - property :finding_number, Integer, :required => false - property :report_id, Integer, :required => true - property :master_id, Integer, :required => false - property :finding_modified, Boolean, :required => false - property :title, String, :required => true, :length => 200 - property :damage, Integer, :required => false - property :reproducability, Integer, :required => false - property :exploitability, Integer, :required => false - property :affected_users, Integer, :required => false - property :discoverability, Integer, :required => false - property :effort, String, :required => false - property :type, String, :required => false, :length=>200 - property :dread_total, Integer, :required => false - property :overview, String, :length => 20000, :required => false - property :poc, String, :length => 20000, :required => false - property :remediation, String, :length => 20000, :required => false - property :notes, String, :length => 1000000, :required => false - property :assessment_type, String, :required => false - property :references, String, :length => 20000, :required => false - property :risk, Integer, :required => false - property :affected_hosts, String, :length => 1000000, :required => false - property :presentation_points, String, :length => 100000, :required => false - property :presentation_rem_points, String, :length => 100000, :required => false - - #CVSSv2 - property :av, String, :required => false - property :ac, String, :required => false - property :au, String, :required => false - property :c, String, :required => false - property :i, String, :required => false - property :a, String, :required => false - property :e, String, :required => false - property :rl, String, :required => false - property :rc, String, :required => false - property :cdp, String, :required => false - property :td, String, :required => false - property :cr, String, :required => false - property :ir, String, :required => false - property :ar, String, :required => false - property :cvss_base, Float, :required => false - property :cvss_impact, Float, :required => false - property :cvss_exploitability, Float, :required => false - property :cvss_temporal, Float, :required => false - property :cvss_environmental, Float, :required => false - property :cvss_modified_impact, Float, :required => false - property :cvss_total, Float, :required => false - property :ease, String, :required => false - property :c2_vs, String, :length => 300, :required => false - - # CVSSv3 - property :attack_vector, String, :required => false - property :attack_complexity, String, :required => false - property :privileges_required, String, :required => false - property :user_interaction, String, :required => false - property :scope_cvss, String, :required => false - property :confidentiality, String, :required => false - property :integrity, String, :required => false - property :availability, String, :required => false - property :exploit_maturity, String, :required => false - property :remeditation_level, String, :required => false - property :report_confidence, String, :required => false - property :confidentiality_requirement, String, :required => false - property :integrity_requirement, String, :required => false - property :availability_requirement, String, :required => false - property :mod_attack_vector, String, :required => false - property :mod_attack_complexity, String, :required => false - property :mod_privileges_required, String, :required => false - property :mod_user_interaction, String, :required => false - property :mod_scope, String, :required => false - property :mod_confidentiality, String, :required => false - property :mod_integrity, String, :required => false - property :mod_availability, String, :required => false - property :cvss_base_score, Float, :required => false - property :cvss_impact_score, Float, :required => false - property :cvss_mod_impact_score, Float, :required => false - property :c3_vs, String, :length => 300, :required => false - - # Risk Matrix - property :severity, String, :required => false - property :likelihood, String, :required => false - property :severity_rationale, String, :length => 20000, :required => false - property :likelihood_rationale, String, :length => 20000, :required => false + include DataMapper::Resource + property :id, Serial + property :finding_number, Integer, required: false + property :report_id, Integer, required: true + property :master_id, Integer, required: false + property :finding_modified, Boolean, required: false + property :title, String, required: true, length: 200 + property :damage, Integer, required: false + property :reproducability, Integer, required: false + property :exploitability, Integer, required: false + property :affected_users, Integer, required: false + property :discoverability, Integer, required: false + property :effort, String, required: false + property :type, String, required: false, length: 200 + property :dread_total, Integer, required: false + property :overview, String, length: 20_000, required: false + property :poc, String, length: 20_000, required: false + property :remediation, String, length: 20_000, required: false + property :notes, String, length: 1_000_000, required: false + property :assessment_type, String, required: false + property :references, String, length: 20_000, required: false + property :risk, Integer, required: false + property :affected_hosts, String, length: 1_000_000, required: false + property :presentation_points, String, length: 100_000, required: false + property :presentation_rem_points, String, length: 100_000, required: false + + # CVSSv2 + property :av, String, required: false + property :ac, String, required: false + property :au, String, required: false + property :c, String, required: false + property :i, String, required: false + property :a, String, required: false + property :e, String, required: false + property :rl, String, required: false + property :rc, String, required: false + property :cdp, String, required: false + property :td, String, required: false + property :cr, String, required: false + property :ir, String, required: false + property :ar, String, required: false + property :cvss_base, Float, required: false + property :cvss_impact, Float, required: false + property :cvss_exploitability, Float, required: false + property :cvss_temporal, Float, required: false + property :cvss_environmental, Float, required: false + property :cvss_modified_impact, Float, required: false + property :cvss_total, Float, required: false + property :ease, String, required: false + property :c2_vs, String, length: 300, required: false + + # CVSSv3 + property :attack_vector, String, required: false + property :attack_complexity, String, required: false + property :privileges_required, String, required: false + property :user_interaction, String, required: false + property :scope_cvss, String, required: false + property :confidentiality, String, required: false + property :integrity, String, required: false + property :availability, String, required: false + property :exploit_maturity, String, required: false + property :remeditation_level, String, required: false + property :report_confidence, String, required: false + property :confidentiality_requirement, String, required: false + property :integrity_requirement, String, required: false + property :availability_requirement, String, required: false + property :mod_attack_vector, String, required: false + property :mod_attack_complexity, String, required: false + property :mod_privileges_required, String, required: false + property :mod_user_interaction, String, required: false + property :mod_scope, String, required: false + property :mod_confidentiality, String, required: false + property :mod_integrity, String, required: false + property :mod_availability, String, required: false + property :cvss_base_score, Float, required: false + property :cvss_impact_score, Float, required: false + property :cvss_mod_impact_score, Float, required: false + property :c3_vs, String, length: 300, required: false + + # Risk Matrix + property :severity, String, required: false + property :likelihood, String, required: false + property :severity_rationale, String, length: 20_000, required: false + property :likelihood_rationale, String, length: 20_000, required: false end class TemplateReports - include DataMapper::Resource - - property :id, Serial - property :consultant_name, String, :required => false, :length => 200 - property :consultant_company, String, :required => false, :length => 200 - property :consultant_phone, String - property :consultant_email, String, :required => false, :length => 200 - property :contact_name, String, :required => false, :length => 200 - property :contact_phone, String - property :contact_email, String - property :contact_city, String - property :contact_address, String - property :contact_zip, String - property :full_company_name, String, :required => true, :length => 200 - property :short_company_name, String, :required => true, :length => 200 - property :company_website, String - + include DataMapper::Resource + property :id, Serial + property :consultant_name, String, required: false, length: 200 + property :consultant_company, String, required: false, length: 200 + property :consultant_phone, String + property :consultant_email, String, required: false, length: 200 + property :contact_name, String, required: false, length: 200 + property :contact_phone, String + property :contact_email, String + property :contact_city, String + property :contact_address, String + property :contact_zip, String + property :full_company_name, String, required: true, length: 200 + property :short_company_name, String, required: true, length: 200 + property :company_website, String end class User - include DataMapper::Resource - - property :id, Serial - property :username, String, :key => true, :length => (3..40), :required => true - property :hashed_password, String - property :salt, String - property :type, String - property :plugin, Boolean, :required => false, :default => false - property :auth_type, String, :required => false - property :created_at, DateTime, :default => DateTime.now - property :consultant_name, String, :required => false - property :consultant_company, String, :required => false - property :consultant_phone, String, :required => false - property :consultant_email, String, :required => false - property :consultant_title, String, :required => false - - attr_accessor :password - validates_presence_of :username - - def password=(pass) - @password = pass - self.salt = rand(36**12).to_s(36) unless self.salt - self.hashed_password = User.encrypt(@password, self.salt) - end - - def self.encrypt(pass, salt) - return Digest::SHA1.hexdigest(pass + salt) - end + include DataMapper::Resource - def self.authenticate(username, pass) - user = User.first(:username => username) - if user - return user.username if User.encrypt(pass, user.salt) == user.hashed_password - end + property :id, Serial + property :username, String, key: true, length: (3..40), required: true + property :hashed_password, String + property :salt, String + property :type, String + property :plugin, Boolean, required: false, default: false + property :auth_type, String, required: false + property :created_at, DateTime, default: DateTime.now + property :consultant_name, String, required: false + property :consultant_company, String, required: false + property :consultant_phone, String, required: false + property :consultant_email, String, required: false + property :consultant_title, String, required: false + + attr_accessor :password + validates_presence_of :username + + def password=(pass) + @password = pass + self.salt = rand(36**12).to_s(36) unless salt + self.hashed_password = User.encrypt(@password, salt) + end + + def self.encrypt(pass, salt) + Digest::SHA1.hexdigest(pass + salt) + end + + def self.authenticate(username, pass) + user = User.first(username: username) + if user + return user.username if User.encrypt(pass, user.salt) == user.hashed_password end - + end end class Sessions - include DataMapper::Resource - - property :id, Serial - property :session_key, String, :length => 128 - property :username, String, :length => (3..40), :required => true - - def self.is_valid?(session_key) - sessions = Sessions.first(:session_key => session_key) - return true if sessions - end + include DataMapper::Resource - def self.type(session_key) - sess = Sessions.first(:session_key => session_key) + property :id, Serial + property :session_key, String, length: 128 + property :username, String, length: (3..40), required: true - if sess - return User.first(:username => sess.username).type - end - end + def self.is_valid?(session_key) + sessions = Sessions.first(session_key: session_key) + return true if sessions + end - def self.get_username(session_key) - sess = Sessions.first(:session_key => session_key) + def self.type(session_key) + sess = Sessions.first(session_key: session_key) - if sess - return sess.username - end - end + return User.first(username: sess.username).type if sess + end - def self.is_plugin?(session_key) - sess = Sessions.first(:session_key => session_key) + def self.get_username(session_key) + sess = Sessions.first(session_key: session_key) - if sess - return User.first(:username => sess.username).plugin - end - end + return sess.username if sess + end + def self.is_plugin?(session_key) + sess = Sessions.first(session_key: session_key) + return User.first(username: sess.username).plugin if sess + end end # For a metasploit connector eventually class RemoteEndpoints - include DataMapper::Resource - - property :id, Serial - property :ip, String - property :port, String - property :type, String - property :report_id, Integer - property :workspace, String - property :user, String - property :pass, String + include DataMapper::Resource + property :id, Serial + property :ip, String + property :port, String + property :type, String + property :report_id, Integer + property :workspace, String + property :user, String + property :pass, String end class VulnMappings include DataMapper::Resource property :id, Serial - property :templatefindings_id, String, :required => true - property :msf_ref, String, :required => true - #property :type, String, :required => true - + property :templatefindings_id, String, required: true + property :msf_ref, String, required: true + # property :type, String, :required => true end class NessusMapping - include DataMapper::Resource + include DataMapper::Resource - property :id, Serial - property :templatefindings_id, String, :required => true - property :pluginid, String, :required => true + property :id, Serial + property :templatefindings_id, String, required: true + property :pluginid, String, required: true end class BurpMapping - include DataMapper::Resource + include DataMapper::Resource - property :id, Serial - property :templatefindings_id, String, :required => true - property :pluginid, String, :required => true + property :id, Serial + property :templatefindings_id, String, required: true + property :pluginid, String, required: true end class Reports - include DataMapper::Resource - - property :id, Serial - property :date, String, :length => 20 - property :report_type, String, :length => 200 - property :report_name, String, :length => 200 - property :assessment_type, String, :length => 200 - property :consultant_name, String, :length => 200 - property :consultant_company, String, :length => 200 - property :consultant_phone, String - property :consultant_title, String, :length => 200 - property :consultant_email, String, :length => 200 - property :contact_name, String, :length => 200 - property :contact_phone, String - property :contact_title, String, :length => 200 - property :contact_email, String, :length => 200 - property :contact_city, String - property :contact_address, String, :length => 200 - property :contact_state, String - property :contact_zip, String - property :full_company_name, String, :length => 200 - property :short_company_name, String, :length => 200 - property :company_website, String, :length => 200 - property :assessment_start_date, String, :length => 200 - property :assessment_end_date, String, :length => 200 - property :owner, String, :length => 200 - property :authors, CommaSeparatedList, :required => false, :lazy => false - property :user_defined_variables, String, :length => 10000 - property :scoring, String, :length => 100 - + include DataMapper::Resource + property :id, Serial + property :date, String, length: 20 + property :associated_docx_template, String, length: 200 + property :associated_excel_template, String, length: 200 + property :report_name, String, length: 200 + property :assessment_type, String, length: 200 + property :consultant_name, String, length: 200 + property :consultant_company, String, length: 200 + property :consultant_phone, String + property :consultant_title, String, length: 200 + property :consultant_email, String, length: 200 + property :contact_name, String, length: 200 + property :contact_phone, String + property :contact_title, String, length: 200 + property :contact_email, String, length: 200 + property :contact_city, String + property :contact_address, String, length: 200 + property :contact_state, String + property :contact_zip, String + property :full_company_name, String, length: 200 + property :short_company_name, String, length: 200 + property :company_website, String, length: 200 + property :assessment_start_date, String, length: 200 + property :assessment_end_date, String, length: 200 + property :owner, String, length: 200 + property :authors, CommaSeparatedList, required: false, lazy: false + #10_000 was not enough for heavy udv usage + property :user_defined_variables, String, length: 1_000_000 + property :scoring, String, length: 100 end class Attachments - include DataMapper::Resource - - property :id, Serial - property :filename, String, :length => 400 - property :filename_location, String, :length => 400 - property :report_id, String, :length => 30 - property :description, String, :length => 500 - property :caption, String, :length => 500 + include DataMapper::Resource + property :id, Serial + property :filename, String, length: 400 + property :filename_location, String, length: 400 + property :report_id, String, length: 30 + property :description, String, length: 500 + property :caption, String, length: 500 end class Charts - include DataMapper::Resource - - property :id, Serial - property :location, String, :length => 400 - property :report_id, String, :length => 30 - property :type, String, :length => 500 + include DataMapper::Resource + property :id, Serial + property :location, String, length: 400 + property :report_id, String, length: 30 + property :type, String, length: 500 end class Hosts - include DataMapper::Resource - - property :id, Serial - property :ip, String - property :port, String + include DataMapper::Resource + property :id, Serial + property :ip, String + property :port, String end class UserDefinedObjectTemplates - include DataMapper::Resource - - property :id, Serial - property :type, String, :length => 300 - property :udo_properties, String, :length => 10000 + include DataMapper::Resource + property :id, Serial + property :type, String, length: 300 + property :udo_properties, Json, length: 10_000 end class UserDefinedObjects - include DataMapper::Resource - - property :id, Serial - property :report_id, Integer, :required => true - property :template_id, Integer, :required => true - property :type, String, :length => 300 - property :udo_properties, String, :length => 10000 + include DataMapper::Resource + property :id, Serial + property :report_id, Integer, required: true + property :template_id, Integer, required: true + property :type, String, length: 300 + property :udo_properties, Json, length: 10_000 end -class Xslt - include DataMapper::Resource - - property :id, Serial - property :docx_location, String, :length => 400 - property :description, String, :length => 400 - property :xslt_location, String, :length => 400 - property :report_type, String, :length => 400 - property :finding_template, Boolean, :required => false, :default => false - property :status_template, Boolean, :required => false, :default => false - property :screenshot_names, String, :length => 10000 - has n, :components, 'Xslt_component', - :parent_key => [ :id ], - :child_key => [ :xslt_id ] +class ExcelXslts + include DataMapper::Resource + property :id, Serial + property :excel_location, FilePath, length: 400 + property :template_title, String, length: 400 + property :template_type, String, length: 400 + property :description, String, length: 400 + #hashmap, key = path in excel, value = path of corresponding xslt in serpico files + property :xslt_sheet_locations, Json, length: 4000 + property :xslt_shared_strings_location, String, length: 400 end -class Xslt_component - include DataMapper::Resource - - property :id, Serial - property :xslt_location, String, :length => 400 - property :name, String, :length => 400 +class DocxXslts + include DataMapper::Resource + property :id, Serial + property :docx_location, FilePath, length: 400 + property :description, String, length: 400 + property :xslt_location, String, length: 400 + property :template_title, String, length: 400 + property :template_type, String, length: 400 + property :screenshot_names, String, length: 10_000 + has n, :components, 'DocxXsltComponents', + parent_key: [:id], + child_key: [:docx_xslt_id] +end - belongs_to :xslt, 'Xslt', - :parent_key => [ :id ], - :child_key => [ :xslt_id ], - :required => true +# each docx xslt file is linked to its header/footer components +class DocxXsltComponents + include DataMapper::Resource + property :id, Serial + property :xslt_location, FilePath, length: 400 + property :name, String, length: 400 + belongs_to :docx_xslt, 'DocxXslts', + parent_key: [:id], + child_key: [:docx_xslt_id], + required: true end DataMapper.finalize diff --git a/routes/admin.rb b/routes/admin.rb index aebe4f6b..cc3456b7 100644 --- a/routes/admin.rb +++ b/routes/admin.rb @@ -4,8 +4,8 @@ config_options = JSON.parse(File.read('./config.json')) # set the report_assessment_types for <1.2 versions of Serpico -unless config_options["report_assessment_types"] - config_options["report_assessment_types"] = ["Network Internal","External","Web application","Physical","Social engineering","Configuration audit"] +unless config_options['report_assessment_types'] + config_options['report_assessment_types'] = ['Network Internal', 'External', 'Web application', 'Physical', 'Social engineering', 'Configuration audit'] end ###### @@ -13,71 +13,71 @@ ###### get '/admin/' do - redirect to("/no_access") if not is_administrator? + redirect to('/no_access') unless is_administrator? @admin = true - haml :admin, :encode_html => true + haml :admin, encode_html: true end get '/admin/add_user' do - redirect to("/no_access") if not is_administrator? + redirect to('/no_access') unless is_administrator? @admin = true - haml :add_user, :encode_html => true + haml :add_user, encode_html: true end # serve a copy of the code get '/admin/pull' do - redirect to("/no_access") if not is_administrator? + redirect to('/no_access') unless is_administrator? - if File.exists?("./export.zip") - send_file "./export.zip", :filename => "export.zip", :type => 'Application/octet-stream' - else - "No copy of the code available. Run scripts/make_export.sh." - end + if File.exist?('./export.zip') + send_file './export.zip', filename: 'export.zip', type: 'Application/octet-stream' + else + 'No copy of the code available. Run scripts/make_export.sh.' + end end -#create DB backup +# create DB backup get '/admin/dbbackup' do - redirect to("/no_access") if not is_administrator? - bdate = Time.now() - filename = "./tmp/master" + "-" + (bdate.strftime("%Y%m%d%H%M%S") +".bak") - FileUtils::copy_file("./db/master.db", filename) - if not File.zero?(filename) - send_file filename, :filename => "#{filename}", :type => 'Application/octet-stream' + redirect to('/no_access') unless is_administrator? + bdate = Time.now + filename = './tmp/master' + '-' + (bdate.strftime('%Y%m%d%H%M%S') + '.bak') + FileUtils.copy_file('./db/master.db', filename) + if !File.zero?(filename) + send_file filename, filename: filename.to_s, type: 'Application/octet-stream' else - "No copy of the database is available. Please try again." - sleep(5) - redirect to("/admin/") - end + 'No copy of the database is available. Please try again.' + sleep(5) + redirect to('/admin/') + end end -#create backup of all attachments +# create backup of all attachments get '/admin/attacments_backup' do - bdate = Time.now() - zip_file = "./tmp/Attachments" + "-" + (bdate.strftime("%Y%m%d%H%M%S") +".zip") + bdate = Time.now + zip_file = './tmp/Attachments' + '-' + (bdate.strftime('%Y%m%d%H%M%S') + '.zip') Zip::File.open(zip_file, Zip::File::CREATE) do |zipfile| - Dir["./attachments/*" ].each do | name| - zipfile.add(name.split("/").last,name) + Dir['./attachments/*'].each do |name| + zipfile.add(name.split('/').last, name) end end - send_file zip_file, :type => 'zip', :filename => zip_file - #File.delete(rand_zip) should the temp file be deleted? + send_file zip_file, type: 'zip', filename: zip_file + # File.delete(rand_zip) should the temp file be deleted? end # Create a new user post '/admin/add_user' do - redirect to("/no_access") if not is_administrator? + redirect to('/no_access') unless is_administrator? - user = User.first(:username => params[:username]) + user = User.first(username: params[:username]) if user - if params[:password] and params[:password].size > 1 + if params[:password] && (params[:password].size > 1) # we have to hardcode the input params to prevent param pollution - user.update(:type => params[:type], :auth_type => params[:auth_type], :password => params[:password]) + user.update(type: params[:type], auth_type: params[:auth_type], password: params[:password]) else # we have to hardcode the params to prevent param pollution - user.update(:type => params[:type], :auth_type => params[:auth_type]) + user.update(type: params[:type], auth_type: params[:auth_type]) end else user = User.new @@ -93,25 +93,25 @@ end get '/admin/list_user' do - redirect to("/no_access") if not is_administrator? + redirect to('/no_access') unless is_administrator? @admin = true @users = User.all @plugin = is_plugin? - haml :list_user, :encode_html => true + haml :list_user, encode_html: true end get '/admin/edit_user/:id' do - redirect to("/no_access") if not is_administrator? + redirect to('/no_access') unless is_administrator? - @user = User.first(:id => params[:id]) - haml :add_user, :encode_html => true + @user = User.first(id: params[:id]) + haml :add_user, encode_html: true end get '/admin/delete/:id' do - redirect to("/no_access") if not is_administrator? + redirect to('/no_access') unless is_administrator? - @user = User.first(:id => params[:id]) + @user = User.first(id: params[:id]) @user.destroy if @user serpico_log("User #{@user.username} deleted") @@ -120,44 +120,36 @@ end get '/admin/add_user/:id' do - if not is_administrator? + unless is_administrator? id = params[:id] - unless get_report(id) - redirect to("/no_access") - end + redirect to('/no_access') unless get_report(id) end - @users = User.all(:order => [:username.asc]) - @report = Reports.first(:id => params[:id]) + @users = User.all(order: [:username.asc]) + @report = Reports.first(id: params[:id]) - if is_administrator? - @admin = true - end + @admin = true if is_administrator? - haml :add_user_report, :encode_html => true + haml :add_user_report, encode_html: true end post '/admin/add_user/:id' do - if not is_administrator? + unless is_administrator? id = params[:id] - unless get_report(id) - redirect to("/no_access") - end + redirect to('/no_access') unless get_report(id) end - report = Reports.first(:id => params[:id]) + report = Reports.first(id: params[:id]) - if report == nil - return "No Such Report" - end + return 'No Such Report' if report.nil? authors = report.authors - if authors - authors = authors.push(params[:author]) - else - authors = ["#{params[:author]}"] - end + authors = if authors + authors.push(params[:author]) + else + [params[:author].to_s] + end report.authors = authors report.save @@ -166,24 +158,18 @@ end get '/admin/del_user_report/:id/:author' do - if not is_administrator? + unless is_administrator? id = params[:id] - unless get_report(id) - redirect to("/no_access") - end + redirect to('/no_access') unless get_report(id) end - report = Reports.first(:id => params[:id]) + report = Reports.first(id: params[:id]) - if report == nil - return "No Such Report" - end + return 'No Such Report' if report.nil? authors = report.authors - if authors - authors = authors - ["#{params[:author]}"] - end + authors -= [params[:author].to_s] if authors report.authors = authors report.save @@ -192,517 +178,592 @@ end get '/admin/config' do - redirect to("/no_access") if not is_administrator? + redirect to('/no_access') unless is_administrator? @config = config_options - if config_options["cvss"] - @scoring = "cvss" - elsif config_options["cvssv3"] - @scoring = "cvssv3" - elsif config_options["dread"] - @scoring = "dread" - elsif config_options["riskmatrix"] - @scoring = "riskmatrix" - else - @scoring = "default" - end - - haml :config, :encode_html => true + @scoring = if config_options['cvss'] + 'cvss' + elsif config_options['cvssv3'] + 'cvssv3' + elsif config_options['dread'] + 'dread' + elsif config_options['riskmatrix'] + 'riskmatrix' + else + 'default' + end + + haml :config, encode_html: true end post '/admin/config' do - redirect to("/no_access") if not is_administrator? - - ft = params["finding_types"].split(",") - udv = params["user_defined_variables"].split(",") - rat = params["report_assessment_types"].split(",") - - if params["effort"] - config_options["effort"] = params["effort"].split(",") - end - - config_options["finding_types"] = ft - config_options["user_defined_variables"] = udv - config_options["port"] = params["port"] - config_options["report_assessment_types"] = rat - config_options["use_ssl"] = params["use_ssl"] ? true : false - config_options["bind_address"] = params["bind_address"] - config_options["ldap"] = params["ldap"] ? true : false - config_options["ldap_domain"] = params["ldap_domain"] - config_options["ldap_dc"] = params["ldap_dc"] - config_options["burpmap"] = params["burpmap"] ? true : false - config_options["nessusmap"] = params["nessusmap"] ? true : false - config_options["vulnmap"] = params["vulnmap"] ? true : false - config_options["logo"] = params["logo"] - config_options["auto_import"] = params["auto_import"] ? true : false - config_options["chart"] = params["chart"] ? true : false - config_options["threshold"] = params["threshold"] - config_options["show_exceptions"] = params["show_exceptions"] ? true : false - config_options["cvssv2_scoring_override"] = params["cvssv2_scoring_override"] ? true : false - - if params["risk_scoring"] == "CVSSv2" - config_options["dread"] = false - config_options["cvss"] = true - config_options["cvssv3"] = false - config_options["riskmatrix"] = false - elsif params["risk_scoring"] == "CVSSv3" - config_options["dread"] = false - config_options["cvss"] = false - config_options["cvssv3"] = true - config_options["riskmatrix"] = false - elsif params["risk_scoring"] == "DREAD" - config_options["dread"] = true - config_options["cvss"] = false - config_options["cvssv3"] = false - config_options["riskmatrix"] = false - elsif params["risk_scoring"] == "RISKMATRIX" - config_options["dread"] = false - config_options["cvss"] = false - config_options["cvssv3"] = false - config_options["riskmatrix"] = true + redirect to('/no_access') unless is_administrator? + + ft = params['finding_types'].split(',') + udv = params['user_defined_variables'].split(',') + rat = params['report_assessment_types'].split(',') + + config_options['effort'] = params['effort'].split(',') if params['effort'] + + config_options['finding_types'] = ft + config_options['user_defined_variables'] = udv + config_options['port'] = params['port'] + config_options['report_assessment_types'] = rat + config_options['use_ssl'] = params['use_ssl'] ? true : false + config_options['bind_address'] = params['bind_address'] + config_options['ldap'] = params['ldap'] ? true : false + config_options['ldap_domain'] = params['ldap_domain'] + config_options['ldap_dc'] = params['ldap_dc'] + config_options['burpmap'] = params['burpmap'] ? true : false + config_options['nessusmap'] = params['nessusmap'] ? true : false + config_options['vulnmap'] = params['vulnmap'] ? true : false + config_options['logo'] = params['logo'] + config_options['auto_import'] = params['auto_import'] ? true : false + config_options['chart'] = params['chart'] ? true : false + config_options['threshold'] = params['threshold'] + config_options['show_exceptions'] = params['show_exceptions'] ? true : false + config_options['cvssv2_scoring_override'] = params['cvssv2_scoring_override'] ? true : false + + if params['risk_scoring'] == 'CVSSv2' + config_options['dread'] = false + config_options['cvss'] = true + config_options['cvssv3'] = false + config_options['riskmatrix'] = false + elsif params['risk_scoring'] == 'CVSSv3' + config_options['dread'] = false + config_options['cvss'] = false + config_options['cvssv3'] = true + config_options['riskmatrix'] = false + elsif params['risk_scoring'] == 'DREAD' + config_options['dread'] = true + config_options['cvss'] = false + config_options['cvssv3'] = false + config_options['riskmatrix'] = false + elsif params['risk_scoring'] == 'RISKMATRIX' + config_options['dread'] = false + config_options['cvss'] = false + config_options['cvssv3'] = false + config_options['riskmatrix'] = true else - config_options["dread"] = false - config_options["cvss"] = false - config_options["cvssv3"] = false - config_options["riskmatrix"] = false + config_options['dread'] = false + config_options['cvss'] = false + config_options['cvssv3'] = false + config_options['riskmatrix'] = false end - File.open("./config.json","w") do |f| + File.open('./config.json', 'w') do |f| f.write(JSON.pretty_generate(config_options)) end - redirect to("/admin/config") + redirect to('/admin/config') end # get plugins available get '/admin/plugins' do - redirect to("/no_access") if not is_administrator? + redirect to('/no_access') unless is_administrator? @plugins = [] - Dir[File.join(File.dirname(__FILE__), "../plugins/**/", "*.json")].each { |lib| + Dir[File.join(File.dirname(__FILE__), '../plugins/**/', '*.json')].each do |lib| @plugins.push(JSON.parse(File.open(lib).read)) - } + end @admin = true if is_administrator? @plugin = true if is_plugin? - haml :plugins, :encode_html => true + haml :plugins, encode_html: true end # enable plugins post '/admin/plugins' do - redirect to("/no_access") if not is_administrator? + redirect to('/no_access') unless is_administrator? @plugins = [] - Dir[File.join(File.dirname(__FILE__), "../plugins/**/", "*.json")].each { |lib| + Dir[File.join(File.dirname(__FILE__), '../plugins/**/', '*.json')].each do |lib| @plugins.push(JSON.parse(File.open(lib).read)) - } + end @plugins.each do |plug| - if params[plug["name"]] - plug["enabled"] = true - File.open("./plugins/#{plug['name']}/plugin.json","w") do |f| + if params[plug['name']] + plug['enabled'] = true + File.open("./plugins/#{plug['name']}/plugin.json", 'w') do |f| f.write(JSON.pretty_generate(plug)) end else - plug["enabled"] = false - File.open("./plugins/#{plug['name']}/plugin.json","w") do |f| + plug['enabled'] = false + File.open("./plugins/#{plug['name']}/plugin.json", 'w') do |f| f.write(JSON.pretty_generate(plug)) end end end - redirect to("/admin/plugins") + redirect to('/admin/plugins') end # upload plugin zip post '/admin/plugin_upload' do - redirect to("/no_access") if not is_administrator? - redirect to("/no_access") if not is_plugin? + redirect to('/no_access') unless is_administrator? + redirect to('/no_access') unless is_plugin? # take each zip in turn - params['files'].map{ |upf| + params['files'].map do |upf| # We use a random filename rand_file = "./tmp/#{rand(36**36).to_s(36)}" # reject if the file is above a certain limit - if upf[:tempfile].size > 100000000 - return "File too large. 100MB limit" - end + return 'File too large. 100MB limit' if upf[:tempfile].size > 100_000_000 # unzip the plugin and write it to the fs, writing the OS is possible but so is RCE - File.open(rand_file, 'wb') {|f| f.write(upf[:tempfile].read) } + File.open(rand_file, 'wb') { |f| f.write(upf[:tempfile].read) } # find the config.json file - config = "" + config = '' Zip::File.open(rand_file) do |zipfile| # read the config file zipfile.each do |entry| - if entry.name == "plugin.json" + if entry.name == 'plugin.json' configj = entry.get_input_stream.read config = JSON.parse(configj) end end end - if config == "" - return "plugin.json does not exist in zip." - end + return 'plugin.json does not exist in zip.' if config == '' Zip::File.open(rand_file) do |zipfile| # read the config file zipfile.each do |entry| # Extract to file/directory/symlink - fn = "./plugins/#{config['name']}/"+entry.name + fn = "./plugins/#{config['name']}/" + entry.name # create the directory if dne - dirj = fn.split("/") + dirj = fn.split('/') dirj.pop - unless File.directory?(dirj.join("/")) - FileUtils.mkdir_p(dirj.join("/")) - end + FileUtils.mkdir_p(dirj.join('/')) unless File.directory?(dirj.join('/')) - next if fn[-1] == "/" + next if fn[-1] == '/' # Read into memory content = entry.get_input_stream.read - File.open(fn, 'a') {|f| + File.open(fn, 'a') do |f| f.write(content) - } - + end end end - } - redirect to("/admin/plugins") + end + redirect to('/admin/plugins') end # Manage Templated Reports get '/admin/templates' do - redirect to("/no_access") if not is_administrator? + redirect to('/no_access') unless is_administrator? @admin = true - # Query for all Findings - @templates = Xslt.all(:order => [:report_type.asc]) - - haml :template_list, :encode_html => true + # Query for all templates + @docx_templates = DocxXslts.all(order: [:template_title.asc]) + @excel_templates = ExcelXslts.all(order: [:template_title.asc]) + haml :template_list, encode_html: true end # Manage Templated Reports get '/admin/templates/add' do - redirect to("/no_access") if not is_administrator? + redirect to('/no_access') unless is_administrator? @admin = true - haml :add_template, :encode_html => true + haml :add_template, encode_html: true end # Manage Templated Reports -get '/admin/templates/:id/download' do - redirect to("/no_access") if not is_administrator? - +get '/admin/templates/:id/download/:template_type' do + redirect to('/no_access') unless is_administrator? @admin = true - - xslt = Xslt.first(:id => params[:id]) - - send_file xslt.docx_location, :type => 'docx', :filename => "#{xslt.report_type}.docx" + if params[:template_type].casecmp('word').zero? + xslt = DocxXslts.first(id: params[:id]) + else + xslt = ExcelXslts.first(id: params[:id]) + end + send_file xslt.docx_location, type: 'docx', filename: "#{xslt.report_type}.docx" end -get '/admin/delete/templates/:id' do - redirect to("/no_access") if not is_administrator? +get '/admin/delete/templates/:id/:template_type' do + redirect to('/no_access') unless is_administrator? + if params[:template_type].casecmp('word').zero? + @xslt = DocxXslts.first(id: params[:id]) - @xslt = Xslt.first(:id => params[:id]) - - if @xslt - @xslt.components.destroy - @xslt.destroy - if File.file?(@xslt.xslt_location) - File.delete(@xslt.xslt_location) - end - if File.file?(@xslt.docx_location) - File.delete(@xslt.docx_location) + if @xslt + @xslt.components.destroy + @xslt.destroy + File.delete(@xslt.xslt_location) if File.file?(@xslt.xslt_location) + File.delete(@xslt.docx_location) if File.file?(@xslt.docx_location) + end + else + @xslt = ExcelXslts.first(id: params[:id]) + if @xslt + @xslt.destroy + File.delete(@xslt.excel_location) if File.file?(@xslt.excel_location) + File.delete(@xslt.xslt_shared_strings_location) if File.file?(@xslt.xslt_shared_strings_location) + # TODO: delete worksheets temp files for excel end - end + end redirect to('/admin/templates') end - -# Manage Templated Reports +# Manage Templates post '/admin/templates/add' do - redirect to("/no_access") if not is_administrator? - + redirect to('/no_access') unless is_administrator? @admin = true - xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" - - redirect to("/admin/templates/add") unless params[:file] + redirect to('/admin/templates/add') unless params[:file] # reject if the file is above a certain limit - if params[:file][:tempfile].size > 100000000 - return "File too large. 10MB limit" - end - - docx = "./templates/#{rand(36**36).to_s(36)}.docx" - File.open(docx, 'wb') {|f| f.write(params[:file][:tempfile].read) } - - error = false - detail = "" - begin - xslt = generate_xslt(docx) - xslt_components = generate_xslt_components(docx) - rescue ReportingError => detail - error = true + if params[:file][:tempfile].size > 100_000_000 + return 'File too large. 100MB limit' end + # we initialize the data that are part of both excel and docx + unescaped_data = {} + unescaped_data['description'] = params[:description] + unescaped_data['template_title'] = params[:template_title] + unescaped_data['template_type'] = params[:template_type] + + # TODO : send error message if the template type is word + # but sent file isn't actually word + if params[:template_type].split(' ')[0].casecmp('word').zero? + docx_location = "./templates/#{rand(36**36).to_s(36)}.docx" + File.open(docx_location, 'wb') { |f| f.write(params[:file][:tempfile].read) } + + xslt_file_location = "./templates/docx_#{rand(36**36).to_s(36)}.xslt" + error = false + detail = '' + begin + xslt = generate_docx_xslt(docx_location) + xslt_components = generate_docx_xslt_components(docx_location) + rescue ReportingError => detail + error = true + end + if error + return "The report template you uploaded threw an error when parsing:

#{detail.errorString}" + else + # we save the produced xslt file + File.open(xslt_file_location, 'wb') { |f| f.write(xslt) } + # extract the screenshot names from the file + screenshot_names = xslt.scan(/\[!!(.*?)!!\]/) + # to prevent traversal we hardcode this + unescaped_data['docx_location'] = docx_location.to_s + unescaped_data['xslt_location'] = xslt_file_location.to_s + unescaped_data['screenshot_names'] = screenshot_names.join(',') + data = url_escape_hash(unescaped_data) + # data['finding_template'] = params[:finding_template] ? true : false + # data['status_template'] = params[:status_template] ? true : false + + @template = DocxXslts.first(template_title: data['template_title']) + if @template + @template.update(xslt_location: data['xslt_location'], docx_location: data['docx_location'], description: data['description'], screenshot_names: data['screenshot_names']) + @template.components.destroy + else + @template = DocxXslts.new(data) + @template.save + end + # create a xslt file for each component + list_components_files = [] + xslt_components.each do |component_name, component_xslt| + componentHash = {} + componentHash['xslt_location'] = "./templates/docx_xslt_component_#{rand(36**36).to_s(36)}.xslt" + componentHash['name'] = component_name + componentHash['docx_xslt'] = @template + File.open(componentHash['xslt_location'], 'wb') { |f| f.write(component_xslt) } + list_components_files.push(componentHash) + end - if error - "The report template you uploaded threw an error when parsing:

#{detail.errorString}" - else - # open up a file handle and write the attachment - File.open(xslt_file, 'wb') {|f| f.write(xslt) } - #extract the screenshot names from the file - screenshot_names = xslt.scan(/\[!!(.*?)!!\]/) - # delete the file data from the attachment - datax = Hash.new - # to prevent traversal we hardcode this - datax["docx_location"] = "#{docx}" - datax["xslt_location"] = "#{xslt_file}" - datax["description"] = params[:description] - datax["report_type"] = params[:report_type] - datax["screenshot_names"] = screenshot_names.join(",") - data = url_escape_hash(datax) - data["finding_template"] = params[:finding_template] ? true : false - data["status_template"] = params[:status_template] ? true : false - - @template = Xslt.first(:report_type => data["report_type"]) - - if @template - @template.update(:xslt_location => data["xslt_location"], :docx_location => data["docx_location"], :description => data["description"], :screenshot_names => data["screenshot_names"]) - @template.components.destroy - else - @template = Xslt.new(data) - @template.save + # insert components into the db + list_components_files.each do |component| + @component = DocxXsltComponents.new(component) + @component.save + end end - - # create a xslt file for each component - list_components_files = [] - xslt_components.each do |component_name, component_xslt| - componentHash = Hash.new - componentHash['xslt_location'] = "./templates/#{rand(36**36).to_s(36)}.xslt" - componentHash['name'] = component_name - componentHash['xslt'] = @template - File.open(componentHash['xslt_location'], 'wb') {|f| f.write(component_xslt) } - list_components_files.push(componentHash) + # TODO : send error message if the template type is excel + # but sent file isn't actually excel + elsif params[:template_type].split(' ')[0].casecmp('excel').zero? + excel_file_data = params[:file][:tempfile] + # to prevent traversal we hardcode this + xslt_shared_strings_file_location = "./templates/excel_shared_strings_#{rand(36**36).to_s(36)}.xslt" + excel_location = "./templates/#{rand(36**36).to_s(36)}.xlsx" + File.open(excel_location, 'wb') { |f| f.write(excel_file_data.read) } + excel_worksheets = find_excel_worksheets(excel_file_data) + error = false + detail = '' + begin + xslts = generate_excel_xslt(excel_location) + rescue ReportingError => detail + error = true end - - # insert components into the db - list_components_files.each do |component| - @component = Xslt_component.new(component) - @component.save + if error + return "The report template you uploaded threw an error when parsing: #{detail.errorString}" + else + # we save the produced xslt file for shared strings + File.open(xslt_shared_strings_file_location, 'wb') { |f| f.write(xslts['xl/sharedStrings.xml']) } + unescaped_data['xslt_shared_strings_location'] = xslt_shared_strings_file_location + unescaped_data['excel_location'] = excel_location.to_s + + # create a xslt file for each worksheets + worksheets = {} + xslts.each do |document_path, document_xslt| + if document_path =~ /sheet/ + worksheets[document_path] = "./templates/excel_worksheet_#{rand(36**36).to_s(36)}.xslt" + File.open(worksheets[document_path], 'wb') { |f| f.write(document_xslt) } + end + end + data = url_escape_hash(unescaped_data) + data['xslt_sheet_locations'] = worksheets.to_json + @template = ExcelXslts.first(template_title: data['template_title']) + if @template + @template.update(xslt_sheet_locations: worksheets.to_json, xslt_shared_strings_location: data['xslt_shared_strings_location'], excel_location: data['excel_location'], description: data['description']) + else + @template = ExcelXslts.new(data) + @template.save + end end - redirect to("/admin/templates") - - haml :add_template, :encode_html => true end + redirect to('/admin/templates') + haml :add_template, encode_html: true end # Manage Templated Reports -get '/admin/templates/:id/edit' do - redirect to("/no_access") if not is_administrator? +get '/admin/templates/:id/edit/:template_type' do + redirect to('/no_access') unless is_administrator? @admind = true - @template = Xslt.first(:id => params[:id]) + @template = if params[:template_type] == 'word' + DocxXslts.first(id: params[:id]) + else + ExcelXslts.first(id: params[:id]) + end - haml :edit_template, :encode_html => true + haml :edit_template, encode_html: true end -# Manage Templated Reports +# Manage Templates post '/admin/templates/edit' do - redirect to("/no_access") if not is_administrator? - + redirect to('/no_access') unless is_administrator? @admin = true - template = Xslt.first(:id => params[:id]) - - xslt_file = template.xslt_location - redirect to("/admin/templates/#{params[:id]}/edit") unless params[:file] + redirect to('/admin/templates/add') unless params[:file] # reject if the file is above a certain limit - if params[:file][:tempfile].size > 100000000 - return "File too large. 10MB limit" + if params[:file][:tempfile].size > 100_000_000 + return 'File too large. 100MB limit' end - - docx = "./templates/#{rand(36**36).to_s(36)}.docx" - File.open(docx, 'wb') {|f| f.write(params[:file][:tempfile].read) } - - error = false - detail = "" - begin - xslt = generate_xslt(docx) - xslt_components = generate_xslt_components(docx) + # we initialize the data that are part of both excel and docx + unescaped_data = {} + unescaped_data['description'] = params[:description] + unescaped_data['template_type'] = params[:template_type] + unescaped_data['old_template_title'] = params[:old_template_title] + unescaped_data['new_template_title'] = params[:new_template_title] + + # ################## WORD PART ################################# + + # TODO : send error message if the template type is word + # but sent file isn't actually word + if params[:template_type].split(' ')[0].casecmp('word').zero? + docx_location = "./templates/#{rand(36**36).to_s(36)}.docx" + File.open(docx_location, 'wb') { |f| f.write(params[:file][:tempfile].read) } + File.open('/mnt/Kali_Shared/original_xml.xml', 'w') { |file| file.write(params[:file][:tempfile].read) } + + xslt_file_location = "./templates/docx_#{rand(36**36).to_s(36)}.xslt" + error = false + detail = '' + begin + xslt = generate_docx_xslt(docx_location) + xslt_components = generate_docx_xslt_components(docx_location) rescue ReportingError => detail error = true end - if error - "The report template you uploaded threw an error when parsing:

#{detail.errorString}" + return "The report template you uploaded threw an error when parsing:

#{detail.errorString}" else - - # open up a file handle and write the attachment - File.open(xslt_file, 'wb') {|f| f.write(xslt) } - #extract the screenshot names from the file + # we save the produced xslt file + File.open(xslt_file_location, 'wb') { |f| f.write(xslt) } + # extract the screenshot names from the file screenshot_names = xslt.scan(/\[!!(.*?)!!\]/) - # delete the file data from the attachment - datax = Hash.new - # to prevent traversal we hardcode this - datax["docx_location"] = "#{docx}" - datax["xslt_location"] = "#{xslt_file}" - datax["description"] = params[:description] - datax["report_type"] = params[:report_type] - datax["screenshot_names"] = screenshot_names.join(",") - data = url_escape_hash(datax) - data["finding_template"] = params[:finding_template] ? true : false - data["status_template"] = params[:status_template] ? true : false - - @template = Xslt.first(:report_type => data["report_type"]) - - if @template - @template.update(:xslt_location => data["xslt_location"], :docx_location => data["docx_location"], :description => data["description"], :screenshot_names => data["screenshot_names"]) - @template.components.destroy - else - @template = Xslt.new(data) - @template.save - end - - # create a xslt file for each component - list_components_files = [] - xslt_components.each do |component_name, component_xslt| - componentHash = Hash.new - componentHash['xslt_location'] = "./templates/#{rand(36**36).to_s(36)}.xslt" - componentHash['name'] = component_name - componentHash['xslt'] = @template - File.open(componentHash['xslt_location'], 'wb') {|f| f.write(component_xslt) } + # to prevent traversal we hardcode this + unescaped_data['docx_location'] = docx_location.to_s + unescaped_data['xslt_location'] = xslt_file_location.to_s + unescaped_data['screenshot_names'] = screenshot_names.join(',') + data = url_escape_hash(unescaped_data) + # data['finding_template'] = params[:finding_template] ? true : false + # data['status_template'] = params[:status_template] ? true : false + + @template = DocxXslts.first(template_title: data['old_template_title']) + if @template + @template.update(template_title: data['new_template_title'], xslt_location: data['xslt_location'], docx_location: data['docx_location'], description: data['description'], screenshot_names: data['screenshot_names']) + @template.components.destroy + else + return 'No Such Template' + end + + # create a xslt file for each component + list_components_files = [] + xslt_components.each do |component_name, component_xslt| + componentHash = {} + componentHash['xslt_location'] = "./templates/docx_xslt_component_#{rand(36**36).to_s(36)}.xslt" + componentHash['name'] = component_name + componentHash['docx_xslt'] = @template + File.open(componentHash['xslt_location'], 'wb') { |f| f.write(component_xslt) } list_components_files.push(componentHash) - end + end - # insert components into the db + # insert components into the db list_components_files.each do |component| - @component = Xslt_component.new(component) - @component.save + @component = DocxXsltComponents.new(component) + @component.save end - redirect to("/admin/templates") + end + + ################################## EXCEL PART ######################################## + # TODO : send error message if the template type is excel + # but sent file isn't actually excel + elsif params[:template_type].split(' ')[0].casecmp('excel').zero? + excel_file_data = params[:file][:tempfile] + # to prevent traversal we hardcode this + xslt_shared_strings_file_location = "./templates/excel_shared_strings_#{rand(36**36).to_s(36)}.xslt" + excel_location = "./templates/#{rand(36**36).to_s(36)}.xlsx" + File.open(excel_location, 'wb') { |f| f.write(excel_file_data.read) } + excel_worksheets = find_excel_worksheets(excel_file_data) + error = false + detail = '' + begin + xslts = generate_excel_xslt(excel_location) + rescue ReportingError => detail + error = true + end + if error + return "The report template you uploaded threw an error when parsing: #{detail.errorString}" + else + # we save the produced xslt file for shared strings + File.open(xslt_shared_strings_file_location, 'wb') { |f| f.write(xslts['xl/sharedStrings.xml']) } + unescaped_data['xslt_shared_strings_location'] = xslt_shared_strings_file_location + unescaped_data['excel_location'] = excel_location.to_s + + # create a xslt file for each worksheets + worksheets = {} + xslts.each do |document_path, document_xslt| + if document_path =~ /sheet/ + worksheets[document_path] = "./templates/excel_worksheet_#{rand(36**36).to_s(36)}.xslt" + File.open(worksheets[document_path], 'wb') { |f| f.write(document_xslt) } + end + end + data = url_escape_hash(unescaped_data) + @template = ExcelXslts.first(template_title: data['old_template_title']) + + if @template + @template.update(template_title: data['new_template_title'], xslt_sheet_locations: worksheets.to_json, xslt_shared_strings_location: data['xslt_shared_strings_location'], excel_location: data['excel_location'], description: data['description']) + else + return 'No Such Template' + end + end end + redirect to('/admin/templates') + haml :add_template, encode_html: true end # get enabled plugins get '/admin/admin_plugins' do @menu = [] - Dir[File.join(File.dirname(__FILE__), "../plugins/**/", "*.json")].each { |lib| + Dir[File.join(File.dirname(__FILE__), '../plugins/**/', '*.json')].each do |lib| pl = JSON.parse(File.open(lib).read) a = {} - if pl["enabled"] and pl["admin_view"] - # add the plugin to the menu - a["name"] = pl["name"] - a["description"] = pl["description"] - a["link"] = pl["link"] - @menu.push(a) - end - } - haml :enabled_plugins, :encode_html => true + next unless pl['enabled'] && pl['admin_view'] + # add the plugin to the menu + a['name'] = pl['name'] + a['description'] = pl['description'] + a['link'] = pl['link'] + @menu.push(a) + end + haml :enabled_plugins, encode_html: true end get '/admin/udo_templates' do - redirect to("/no_access") unless is_administrator? + redirect to('/no_access') unless is_administrator? # delete UDO template part if params[:delete] udo_template = UserDefinedObjectTemplates.get(params[:delete]) - if udo_template == nil - return "UDO Template not found" - end + return 'UDO Template not found' if udo_template.nil? udo_template.destroy end @udos_templates = UserDefinedObjectTemplates.all - haml :user_defined_object_templates, :encode_html => true + haml :user_defined_object_templates, encode_html: true end post '/admin/udo_templates' do - redirect to("/no_access") unless is_administrator? + redirect to('/no_access') unless is_administrator? data = url_escape_hash(request.POST) # Save new UDO template part - if data["action"] = "Save" + if data['action'] = 'Save' new_udo_template = UserDefinedObjectTemplates.new - new_udo_template.type = data["object_type"] + new_udo_template.type = data['object_type'] udo_properties = {} - #we extract the udo properties from the posted data + # we extract the udo properties from the posted data data.each do |param, value| - if param =~ /property_/ - if not value.to_s.empty? - udo_properties[value] = "" - end + next unless param =~ /property_/ + udo_properties[value] = '' unless value.to_s.empty? end - end new_udo_template.udo_properties = udo_properties.to_json - new_udo_template.save() + new_udo_template.save end @udos_templates = UserDefinedObjectTemplates.all - haml :user_defined_object_templates, :encode_html => true + haml :user_defined_object_templates, encode_html: true end -#edit udo template +# edit udo template get '/admin/udo_template/:template_id/edit' do - redirect to("/no_access") unless is_administrator? + redirect to('/no_access') unless is_administrator? @udo_to_edit = UserDefinedObjectTemplates.get(params[:template_id]) - if @udo_to_edit == nil - return "No such UDO Template" - end + return 'No such UDO Template' if @udo_to_edit.nil? @udo_to_edit_properties = JSON.parse(@udo_to_edit.udo_properties) - haml :udo_template_edit, :encode_html => true + haml :udo_template_edit, encode_html: true end post '/admin/udo_template/:template_id/edit' do - redirect to("/no_access") unless is_administrator? + redirect to('/no_access') unless is_administrator? data = url_escape_hash(request.POST) @udo_to_edit = UserDefinedObjectTemplates.get(params[:template_id]) - if @udo_to_edit == nil - return "No such UDO Template" - end + return 'No such UDO Template' if @udo_to_edit.nil? @udo_to_edit_properties = JSON.parse(@udo_to_edit.udo_properties) udo_properties = {} - #we extract the udo properties from the posted data + # we extract the udo properties from the posted data data.each do |param1, value1| - if not value1.to_s.empty? - #we add the new properties + unless value1.to_s.empty? + # we add the new properties if param1 =~ /prop_new_\d+/ - id = param1.split("_")[2] + id = param1.split('_')[2] data.each do |param2, value2| - if param2 =~ /default_new_#{id}/ - if not value2 =~ /\/ - udo_properties[value1] = "#{value2}" - else - udo_properties[value1] = value2 - end - end + next unless param2 =~ /default_new_#{id}/ + udo_properties[value1] = if value2 !~ /\/ + "#{value2}" + else + value2 + end end - #we edit the already existing properties + # we edit the already existing properties elsif param1 =~ /prop_/ data.each do |param2, value2| - if param2 =~ /default_#{param1.split("_")[-1]}/ - if not value2 =~ /\/ - udo_properties[value1] = "#{value2}" - else - udo_properties[value1] = value2 - end - end + next unless param2 =~ /default_#{param1.split("_")[-1]}/ + udo_properties[value1] = if value2 !~ /\/ + "#{value2}" + else + value2 + end end end end - end + end @udo_to_edit.udo_properties = udo_properties.to_json - @udo_to_edit.save() - redirect to("/admin/udo_templates") + @udo_to_edit.save + redirect to('/admin/udo_templates') end diff --git a/routes/master.rb b/routes/master.rb index 8c76888b..d5c0012f 100644 --- a/routes/master.rb +++ b/routes/master.rb @@ -10,51 +10,51 @@ # These are the master routes, they control the findings database # List Available Templated Findings get '/master/findings' do - @findings = TemplateFindings.all(:order => [:title.asc]) + @findings = TemplateFindings.all(order: [:title.asc]) @master = true - @dread = config_options["dread"] - @cvss = config_options["cvss"] - @cvssv3 = config_options["cvssv3"] - @riskmatrix = config_options["riskmatrix"] + @dread = config_options['dread'] + @cvss = config_options['cvss'] + @cvssv3 = config_options['cvssv3'] + @riskmatrix = config_options['riskmatrix'] - haml :findings_list, :encode_html => true + haml :findings_list, encode_html: true end # Create a new templated finding get '/master/findings/new' do @master = true - @dread = config_options["dread"] - @cvss = config_options["cvss"] - @cvssv3 = config_options["cvssv3"] - @riskmatrix = config_options["riskmatrix"] - @nessusmap = config_options["nessusmap"] - @vulnmap = config_options["vulnmap"] - - haml :create_finding, :encode_html => true + @dread = config_options['dread'] + @cvss = config_options['cvss'] + @cvssv3 = config_options['cvssv3'] + @riskmatrix = config_options['riskmatrix'] + @nessusmap = config_options['nessusmap'] + @vulnmap = config_options['vulnmap'] + + haml :create_finding, encode_html: true end # Create the finding in the DB post '/master/findings/new' do data = url_escape_hash(request.POST) - if(config_options["dread"]) - data["dread_total"] = data["damage"].to_i + data["reproducability"].to_i + data["exploitability"].to_i + data["affected_users"].to_i + data["discoverability"].to_i + if config_options['dread'] + data['dread_total'] = data['damage'].to_i + data['reproducability'].to_i + data['exploitability'].to_i + data['affected_users'].to_i + data['discoverability'].to_i end - if(config_options["riskmatrix"]) - if data["severity"] == "Low" + if config_options['riskmatrix'] + if data['severity'] == 'Low' severity_val = 0 - elsif data["severity"] == "Medium" + elsif data['severity'] == 'Medium' severity_val = 1 - elsif data["severity"] == "High" + elsif data['severity'] == 'High' severity_val = 2 end - if data["likelihood"] == "Low" + if data['likelihood'] == 'Low' likelihood_val = 0 - elsif data["likelihood"] == "Medium" + elsif data['likelihood'] == 'Medium' likelihood_val = 1 - elsif data["likelihood"] == "High" + elsif data['likelihood'] == 'High' likelihood_val = 2 end @@ -62,37 +62,37 @@ end # split out any nessus mapping data - nessusdata = Hash.new() - nessusdata["pluginid"] = data["pluginid"] - data.delete("pluginid") + nessusdata = {} + nessusdata['pluginid'] = data['pluginid'] + data.delete('pluginid') # split out any vuln mapping data - vulnmapdata = Hash.new() - vulnmapdata["msf_ref"] = data["msf_ref"] - data.delete("msf_ref") + vulnmapdata = {} + vulnmapdata['msf_ref'] = data['msf_ref'] + data.delete('msf_ref') @finding = TemplateFindings.new(data) @finding.save # find the id of the newly created finding so we can link mappings to it - @newfinding = TemplateFindings.first(:title => data["title"], :order => [:id.desc], :limit => 1) + @newfinding = TemplateFindings.first(title: data['title'], order: [:id.desc], limit: 1) # save mapping data - if (config_options["nessusmap"] && nessusdata["pluginid"]) - nessusdata["templatefindings_id"] = @finding.id + if config_options['nessusmap'] && nessusdata['pluginid'] + nessusdata['templatefindings_id'] = @finding.id @nessus = NessusMapping.new(nessusdata) @nessus.save end - if (config_options["vulnmap"] && vulnmapdata["msf_ref"]) - vulnmapdata["templatefindings_id"] = @finding.id + if config_options['vulnmap'] && vulnmapdata['msf_ref'] + vulnmapdata['templatefindings_id'] = @finding.id @vulnmappings = VulnMappings.new(vulnmapdata) @vulnmappings.save end - if (config_options["cvss"]) + if config_options['cvss'] data = cvss(data, false) - elsif (config_options["cvssv3"]) + elsif config_options['cvssv3'] data = cvss(data, true) end @@ -102,38 +102,30 @@ # Edit the templated finding get '/master/findings/:id/edit' do @master = true - @dread = config_options["dread"] - @cvss = config_options["cvss"] - @cvssv3 = config_options["cvssv3"] - @riskmatrix = config_options["riskmatrix"] - @nessusmap = config_options["nessusmap"] - @burpmap = config_options["burpmap"] - @vulnmap = config_options["vulnmap"] + @dread = config_options['dread'] + @cvss = config_options['cvss'] + @cvssv3 = config_options['cvssv3'] + @riskmatrix = config_options['riskmatrix'] + @nessusmap = config_options['nessusmap'] + @burpmap = config_options['burpmap'] + @vulnmap = config_options['vulnmap'] # Check for kosher name in report name id = params[:id] # Query for Finding - @finding = TemplateFindings.first(:id => id) - @templates = Xslt.all() + @finding = TemplateFindings.first(id: id) + @templates = DocxXslts.all - if (@nessusmap) - @nessus = NessusMapping.all(:templatefindings_id => id) - end + @nessus = NessusMapping.all(templatefindings_id: id) if @nessusmap - if (@burpmap) - @burp = BurpMapping.all(:templatefindings_id => id) - end + @burp = BurpMapping.all(templatefindings_id: id) if @burpmap - if (@vulnmap) - @vulnmaps = VulnMappings.all(:templatefindings_id => id) - end + @vulnmaps = VulnMappings.all(templatefindings_id: id) if @vulnmap - if @finding == nil - return "No Such Finding" - end + return 'No Such Finding' if @finding.nil? - haml :findings_edit, :encode_html => true + haml :findings_edit, encode_html: true end # Edit a finding @@ -142,45 +134,39 @@ id = params[:id] # Query for all Findings - @finding = TemplateFindings.first(:id => id) + @finding = TemplateFindings.first(id: id) - if @finding == nil - return "No Such Finding" - end + return 'No Such Finding' if @finding.nil? data = url_escape_hash(request.POST) - if data["approved"] == "on" - data["approved"] = true - else - data["approved"] = false - end + data['approved'] = data['approved'] == 'on' # to prevent title's from degenerating with >, etc. [issue 237] - data["title"] = data["title"].gsub('&','&') + data['title'] = data['title'].gsub('&', '&') - if(config_options["dread"]) - data["dread_total"] = data["damage"].to_i + data["reproducability"].to_i + data["exploitability"].to_i + data["affected_users"].to_i + data["discoverability"].to_i - elsif(config_options["cvss"]) + if config_options['dread'] + data['dread_total'] = data['damage'].to_i + data['reproducability'].to_i + data['exploitability'].to_i + data['affected_users'].to_i + data['discoverability'].to_i + elsif config_options['cvss'] data = cvss(data, false) - elsif(config_options["cvssv3"]) + elsif config_options['cvssv3'] data = cvss(data, true) end - if(config_options["riskmatrix"]) - if data["severity"] == "Low" + if config_options['riskmatrix'] + if data['severity'] == 'Low' severity_val = 0 - elsif data["severity"] == "Medium" + elsif data['severity'] == 'Medium' severity_val = 1 - elsif data["severity"] == "High" + elsif data['severity'] == 'High' severity_val = 2 end - if data["likelihood"] == "Low" + if data['likelihood'] == 'Low' likelihood_val = 0 - elsif data["likelihood"] == "Medium" + elsif data['likelihood'] == 'Medium' likelihood_val = 1 - elsif data["likelihood"] == "High" + elsif data['likelihood'] == 'High' likelihood_val = 2 end @@ -188,45 +174,45 @@ end # split out any nessus mapping data - nessusdata = Hash.new() - nessusdata["pluginid"] = data["nessus_pluginid"] - data.delete("nessus_pluginid") - nessusdata["templatefindings_id"] = id + nessusdata = {} + nessusdata['pluginid'] = data['nessus_pluginid'] + data.delete('nessus_pluginid') + nessusdata['templatefindings_id'] = id # split out any burp mapping data - burpdata = Hash.new() - burpdata["pluginid"] = data["burp_pluginid"] - data.delete("burp_pluginid") - burpdata["templatefindings_id"] = id + burpdata = {} + burpdata['pluginid'] = data['burp_pluginid'] + data.delete('burp_pluginid') + burpdata['templatefindings_id'] = id # split out any vuln mapping data - vulnmappingdata = Hash.new() - vulnmappingdata["msf_ref"] = data["msf_ref"] - data.delete("msf_ref") - vulnmappingdata["templatefindings_id"] = id + vulnmappingdata = {} + vulnmappingdata['msf_ref'] = data['msf_ref'] + data.delete('msf_ref') + vulnmappingdata['templatefindings_id'] = id # Update the finding with templated finding stuff @finding.update(data) # save nessus mapping data to db - if(config_options["nessusmap"]) + if config_options['nessusmap'] @nessus = NessusMapping.new(nessusdata) @nessus.save end # save burp mapping data to db - if(config_options["burpmap"]) + if config_options['burpmap'] @burp = BurpMapping.new(burpdata) @burp.save end # save vuln mapping data to db - if(config_options["vulnmap"]) + if config_options['vulnmap'] @vulnmappings = VulnMappings.new(vulnmappingdata) @vulnmappings.save end - redirect to("/master/findings") + redirect to('/master/findings') end # Delete a template finding @@ -253,28 +239,26 @@ id = params[:id] # Query for all Findings - @finding = TemplateFindings.first(:id => id) + @finding = TemplateFindings.first(id: id) - if @finding == nil - return "No Such Finding" - end + return 'No Such Finding' if @finding.nil? ## We have to do some hackery here for wordml - findings_xml = "" - findings_xml << "" + findings_xml = '' + findings_xml << '' findings_xml << @finding.to_xml - findings_xml << "" + findings_xml << '' findings_xml = meta_markup_unencode(findings_xml, nil) # this is the master db so we have to do a bait and switch # The other option is creating a master finding specific docx - findings_xml = findings_xml.gsub("","") - findings_xml = findings_xml.gsub(";","") + findings_xml = findings_xml.gsub('', '') + findings_xml = findings_xml.gsub(';', '') - report_xml = "#{findings_xml}" + report_xml = findings_xml.to_s - xslt_elem = Xslt.first(:finding_template => true) + xslt_elem = DocxXslts.first(template_type: "Word - Finding Template") if xslt_elem # Push the finding from XML to XSLT @@ -286,12 +270,12 @@ rand_file = "./tmp/#{rand(36**12).to_s(36)}.docx" # Create a temporary copy of the finding_template - FileUtils::copy_file(xslt_elem.docx_location,rand_file) + FileUtils.copy_file(xslt_elem.docx_location, rand_file) # modify docx - docx_modify(rand_file,docx_xml,'word/document.xml') + archive_modify(rand_file, docx_xml, 'word/document.xml') - send_file rand_file, :type => 'docx', :filename => "#{@finding.title}.docx" + send_file rand_file, type: 'docx', filename: "#{@finding.title}.docx" else "You don't have a Finding Template (did you delete the temp?) -_- ... If you're an admin go to here to add one.'" @@ -300,54 +284,54 @@ # Export a findings database get '/master/export' do - json = "" + json = '' findings = TemplateFindings.all local_filename = "./tmp/#{rand(36**12).to_s(36)}.json" - File.open(local_filename, 'w') {|f| f.write(JSON.pretty_generate(findings)) } + File.open(local_filename, 'w') { |f| f.write(JSON.pretty_generate(findings)) } - send_file local_filename, :type => 'json', :filename => "template_findings.json" + send_file local_filename, type: 'json', filename: 'template_findings.json' end # Import a findings database get '/master/import' do - haml :import_templates + haml :import_templates end # Import a findings database post '/master/import' do - redirect to("/master/import") unless params[:file] + redirect to('/master/import') unless params[:file] # reject if the file is above a certain limit - if params[:file][:tempfile].size > 100000000 - return "File too large. 100MB limit" + if params[:file][:tempfile].size > 100_000_000 + return 'File too large. 100MB limit' end json_file = params[:file][:tempfile].read line = JSON.parse(json_file) line.each do |j| - j["id"] = nil + j['id'] = nil - finding = TemplateFindings.first(:title => j["title"]) + finding = TemplateFindings.first(title: j['title']) if finding - #the finding title already exists in the database - if finding["overview"] == j["overview"] and finding["remediation"] == j["remediation"] + # the finding title already exists in the database + if (finding['overview'] == j['overview']) && (finding['remediation'] == j['remediation']) # the finding already exists, ignore it else # it's a modified finding - j["title"] = "#{j['title']} - [Uploaded Modified Templated Finding]" - params[:approved] !=nil ? j["approved"] = true : j["approved"] = false + j['title'] = "#{j['title']} - [Uploaded Modified Templated Finding]" + j['approved'] = !params[:approved].nil? ? true : false f = TemplateFindings.create(j) f.save end else - params[:approved] != nil ? j["approved"] = true : j["approved"] = false + j['approved'] = !params[:approved].nil? ? true : false f = TemplateFindings.first_or_create(j) f.save end end - redirect to("/master/findings") + redirect to('/master/findings') end diff --git a/routes/report.rb b/routes/report.rb index e59fd0c4..a463dce8 100644 --- a/routes/report.rb +++ b/routes/report.rb @@ -7,43 +7,42 @@ config_options = JSON.parse(File.read('./config.json')) # set the report_assessment_types for <1.2 versions of Serpico -unless config_options["report_assessment_types"] - config_options["report_assessment_types"] = ["Network Internal","External","Web application","Physical","Social engineering","Configuration audit"] +unless config_options['report_assessment_types'] + config_options['report_assessment_types'] = ['Network Internal', 'External', 'Web application', 'Physical', 'Social engineering', 'Configuration audit'] end - # List current reports get '/reports/list' do @reports = get_reports @admin = true if is_administrator? - # allow the user to set their logo in the configuration options - @logo = config_options["logo"] + # allow the user to set their logo in the configuration options + @logo = config_options['logo'] - haml :reports_list, :encode_html => true + haml :reports_list, encode_html: true end # Create a report get '/report/new' do - @templates = Xslt.all - @assessment_types = config_options["report_assessment_types"] - haml :new_report, :encode_html => true + @templates = DocxXslts.all + @assessment_types = config_options['report_assessment_types'] + haml :new_report, encode_html: true end # Create a report post '/report/new' do data = url_escape_hash(request.POST) - data["owner"] = get_username - data["date"] = DateTime.now.strftime "%m/%d/%Y" + data['owner'] = get_username + data['date'] = DateTime.now.strftime '%m/%d/%Y' @report = Reports.new(data) @report.scoring = set_scoring(config_options) @report.save # compensate for datamappers oddness - @report1 = get_report(@report.id) + # @report1 = get_report(@report.id) redirect to("/report/#{@report.id}/edit") end @@ -56,22 +55,22 @@ @report = get_report(id) return 'No Such Report' if @report.nil? @screenshot_names_from_findings = {} - #fetching screenshots names in findings + # fetching screenshots names in findings findings = Findings.all(report_id: id) findings.each do |find| next unless find.poc @screenshot_names_from_findings[find.id] = [] - #for each finding, we extract the screenshot name in the poc field. - #screenshot names are like this : [!!screenshotnames.png!!] + # for each finding, we extract the screenshot name in the poc field. + # screenshot names are like this : [!!screenshotnames.png!!] find.poc.to_s.split('').each do |pp| next unless pp =~ /\[\!\!/ @screenshot_names_from_findings[find.id] << pp.split('[!!')[1].split('!!]').first end end @attachments = Attachments.all(report_id: id) - #fetching screenshot names in report - xslt = Xslt.first(report_type: @report.report_type) - @screenshot_names_from_report = xslt.screenshot_names + # fetching screenshot names in report + xslt = DocxXslts.first(template_title: @report.associated_docx_template) + @screenshot_names_from_report = xslt.screenshot_names if xslt @screenshot_names_from_findings haml :list_attachments, encode_html: true end @@ -79,51 +78,49 @@ get '/report/:id/export_attachments' do id = params[:id] rand_zip = "./tmp/#{rand(36**12).to_s(36)}.zip" - @attachments = Attachments.all(:report_id => id) + @attachments = Attachments.all(report_id: id) Zip::File.open(rand_zip, Zip::File::CREATE) do |zipfile| - @attachments.each do | attachment| - zipfile.add(attachment.filename_location.gsub("./attachments/",""), attachment.filename_location ) + @attachments.each do |attachment| + zipfile.add(attachment.filename_location.gsub('./attachments/', ''), attachment.filename_location) end end - send_file rand_zip, :type => 'zip', :filename => "attachments.zip" - #File.delete(rand_zip) should the temp file be deleted? + send_file rand_zip, type: 'zip', filename: 'attachments.zip' + # File.delete(rand_zip) should the temp file be deleted? end # Restore Attachments menu get '/report/:id/restore_attachments' do @id = params[:id] @report = get_report(@id) - if @report == nil - return "No Such Report" - end - - haml :restore_attachments, :encode_html => true + return 'No Such Report' if @report.nil? + + haml :restore_attachments, encode_html: true end post '/report/:id/restore_attachments' do - id = params["id"] - #Not sure this is the best way to do this. + id = params['id'] + # Not sure this is the best way to do this. rand_zip = "./tmp/#{rand(36**12).to_s(36)}.zip" - File.open(rand_zip, 'wb') {|f| f.write(params[:file][:tempfile].read) } + File.open(rand_zip, 'wb') { |f| f.write(params[:file][:tempfile].read) } begin Zip::File.open(rand_zip) do |file| n = file.num_files n.times do |i| entry_name = file.get_name(i) file.fopen(entry_name) do |f| - clean_name = f.name.split(".")[0] - File.open("./attachments/#{clean_name}", "wb") do |data| + clean_name = f.name.split('.')[0] + File.open("./attachments/#{clean_name}", 'wb') do |data| data << f.read end end end end rescue - puts "Not a Zip file. Please try again" + puts 'Not a Zip file. Please try again' end - #File.delete(rand_zip) should the temp file be deleted? + # File.delete(rand_zip) should the temp file be deleted? redirect to("/report/#{id}/edit") end @@ -131,12 +128,12 @@ get '/report/:id/import_nessus' do id = params[:id] - @nessusmap = config_options["nessusmap"] + @nessusmap = config_options['nessusmap'] # Query for the first report matching the id @report = get_report(id) - haml :import_nessus, :encode_html => true + haml :import_nessus, encode_html: true end # auto add serpico findings if mapped to nessus ids @@ -144,66 +141,62 @@ type = params[:type] xml = params[:file][:tempfile].read - if (xml =~ /^/ && type == "nessus") + if xml =~ /^/ && type == 'nessus' import_nessus = true - vulns = parse_nessus_xml(xml, config_options["threshold"]) - elsif (xml =~ /^ 1000000 + # if params[:file][:tempfile].size > 1000000 # return "File too large. 1MB limit" - #end + # end # Check for kosher name in report name id = params[:id] - add_findings = Array.new - dup_findings = Array.new - autoadd_hosts = Hash.new + add_findings = [] + dup_findings = [] + autoadd_hosts = {} # Query for the first report matching the report_name @report = get_report(id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? # load all findings - @findings = TemplateFindings.all(:order => [:title.asc]) + @findings = TemplateFindings.all(order: [:title.asc]) # parse nessus xml into hash - #nessus_vulns = parse_nessus_xml(nessus_xml) + # nessus_vulns = parse_nessus_xml(nessus_xml) # determine findings to add from vuln data # host/ip is key, value is array of vuln ids vulns.keys.each do |i| vulns[i].each do |v| - - # if serpico finding id maps to nessus/burp plugin id, add to report + # if serpico finding id maps to nessus/burp plugin id, add to report if import_nessus - @mappings = NessusMapping.all(:pluginid => v) + @mappings = NessusMapping.all(pluginid: v) elsif import_burp - @mappings = BurpMapping.all(:pluginid => v) + @mappings = BurpMapping.all(pluginid: v) end # add affected hosts for each finding - if (@mappings) - @mappings.each do |m| - if autoadd_hosts[m.templatefindings_id] - # only one host/url per finding (regardless of ports and urls). this should change in the future - if not autoadd_hosts[m.templatefindings_id].include?(i) - autoadd_hosts[m.templatefindings_id] << i - end - else - autoadd_hosts[m.templatefindings_id] = [] + next unless @mappings + @mappings.each do |m| + if autoadd_hosts[m.templatefindings_id] + # only one host/url per finding (regardless of ports and urls). this should change in the future + unless autoadd_hosts[m.templatefindings_id].include?(i) autoadd_hosts[m.templatefindings_id] << i end - add_findings << m.templatefindings_id + else + autoadd_hosts[m.templatefindings_id] = [] + autoadd_hosts[m.templatefindings_id] << i end + add_findings << m.templatefindings_id end end end @@ -212,47 +205,44 @@ # create new findings from an import # TODO: This will duplicate if the user already has a nessus id mapped - if config_options["auto_import"] - vulns["findings"].each do |vuln| + if config_options['auto_import'] + vulns['findings'].each do |vuln| vuln.report_id = id vuln.save end end - if add_findings.size == 0 + if add_findings.empty? redirect to("/report/#{id}/findings") else @autoadd = true add_findings.each do |finding| # if the finding already exists in the report dont add - currentfindings = Findings.all(:report_id => id) + currentfindings = Findings.all(report_id: id) currentfindings.each do |cf| - if cf.master_id == finding.to_i - if not dup_findings.include?(finding.to_i) - dup_findings << finding.to_i - end - add_findings.delete(finding.to_i) - end + next unless cf.master_id == finding.to_i + dup_findings << finding.to_i unless dup_findings.include?(finding.to_i) + add_findings.delete(finding.to_i) end end @autoadd_hosts = autoadd_hosts @dup_findings = dup_findings.uniq @autoadd_findings = add_findings end - haml :findings_add, :encode_html => true + haml :findings_add, encode_html: true end # upload burp xml files to be processed get '/report/:id/import_burp' do id = params[:id] - @burpmap = config_options["burpmap"] + @burpmap = config_options['burpmap'] # Query for the first report matching the id @report = get_report(id) - haml :import_burp, :encode_html => true + haml :import_burp, encode_html: true end # Upload attachment menu @@ -263,12 +253,10 @@ # Query for the first report matching the id @report = get_report(id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? - @attachments = Attachments.all(:report_id => id) - haml :upload_attachments, :encode_html => true + @attachments = Attachments.all(report_id: id) + haml :upload_attachments, encode_html: true end post '/report/:id/upload_attachments' do @@ -277,55 +265,51 @@ # Query for the first report matching the id @report = get_report(id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? - if params[:files] == nil - redirect to("/report/#{id}/upload_attachments?no_file=1") + if params[:files].nil? + redirect to("/report/#{id}/upload_attachments?no_file=1") end - params['files'].map{ |upf| + params['files'].map do |upf| # We use a random filename - rand_file = "./attachments/#{rand(36**36).to_s(36)}" - - # reject if the file is above a certain limit - if upf[:tempfile].size > 100000000 - return "File too large. 100MB limit" - end - - # open up a file handle and write the attachment - File.open(rand_file, 'wb') {|f| f.write(upf[:tempfile].read) } - - # delete the file data from the attachment - datax = Hash.new - # to prevent traversal we hardcode this - datax["filename_location"] = "#{rand_file}" - datax["filename"] = upf[:filename] - datax["description"] = CGI::escapeHTML(upf[:filename]).gsub(" ","_").gsub("/","_").gsub("\\","_").gsub("`","_") - datax["report_id"] = id - datax["caption"] = params[:caption] - data = url_escape_hash(datax) - - @attachment = Attachments.new(data) - @attachment.save - } - redirect to("/report/#{id}/attachments") + rand_file_name = "./attachments/#{rand(36**36).to_s(36)}" + + # reject if the file is above a certain limit + return 'File too large. 100MB limit' if upf[:tempfile].size > 100_000_000 + + # open up a file handle and write the attachment + File.open(rand_file_name, 'wb') { |f| f.write(upf[:tempfile].read) } + + # delete the file data from the attachment + datax = {} + # to prevent traversal we hardcode this + datax['filename_location'] = rand_file_name.to_s + datax['filename'] = upf[:filename] + datax['description'] = CGI.escapeHTML(upf[:filename]).tr(' ', '_').tr('/', '_').tr('\\', '_').tr('`', '_') + datax['report_id'] = id + #datax['caption'] = params[:caption] + data = url_escape_hash(datax) + + @attachment = Attachments.new(data) + @attachment.save + end + redirect to("/report/#{id}/attachments") end get '/report/:id/export_attachments' do id = params[:id] rand_zip = "./tmp/#{rand(36**12).to_s(36)}.zip" - @attachments = Attachments.all(:report_id => id) + @attachments = Attachments.all(report_id: id) Zip::File.open(rand_zip, Zip::File::CREATE) do |zipfile| - @attachments.each do | attachment| - zipfile.add(attachment.filename_location.gsub("./attachments/",""), attachment.filename_location ) + @attachments.each do |attachment| + zipfile.add(attachment.filename_location.gsub('./attachments/', ''), attachment.filename_location) end end - send_file rand_zip, :type => 'zip', :filename => "attachments.zip" - #File.delete(rand_zip) should the temp file be deleted? + send_file rand_zip, type: 'zip', filename: 'attachments.zip' + # File.delete(rand_zip) should the temp file be deleted? end # display attachment @@ -335,12 +319,10 @@ # Query for the first report matching the id @report = get_report(id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? - @attachment = Attachments.first(:report_id => id, :id => params[:att_id]) - send_file @attachment.filename_location, :filename => "#{@attachment.filename}" + @attachment = Attachments.first(report_id: id, id: params[:att_id]) + send_file @attachment.filename_location, filename: @attachment.filename.to_s end # Delete an attachment @@ -363,7 +345,6 @@ redirect to("/report/#{id}/attachments") end - # Delete a report get '/report/remove/:id' do id = params[:id] @@ -388,20 +369,17 @@ # Query for the first report matching the report_name @report = get_report(id) - @templates = Xslt.all(:order => [:report_type.asc]) + @docx_templates = DocxXslts.all(order: [:template_title.asc]) + @excel_templates = ExcelXslts.all(order: [:template_title.asc]) @plugin_side_menu = get_plugin_list - @assessment_types = config_options["report_assessment_types"] - @risk_scores = ["Risk","DREAD","CVSS","CVSSv3","RiskMatrix"] + @assessment_types = config_options['report_assessment_types'] + @risk_scores = %w[Risk DREAD CVSS CVSSv3 RiskMatrix] - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? - unless @report.scoring - @report.update(:scoring => set_scoring(config_options)) - end + @report.update(scoring: set_scoring(config_options)) unless @report.scoring - haml :report_edit, :encode_html => true + haml :report_edit, encode_html: true end # Edit the Report's main information; Name, Consultant, etc. @@ -411,14 +389,11 @@ # Query for the first report matching the report_name @report = get_report(id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? - haml :additional_features, :encode_html => true + haml :additional_features, encode_html: true end - # Edit a report post '/report/:id/edit' do id = params[:id] @@ -431,130 +406,110 @@ redirect to("/report/#{id}/edit") end -#Manage user defined objects. User can see all UDOs from here. +# Manage user defined objects. User can see all UDOs from here. get '/report/:id/udo/manage' do - - #if a udo has just been created, we add a message + # if a udo has just been created, we add a message @id = params[:id] @report = get_report(@id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? if params[:created_id] @udo_template_created = UserDefinedObjectTemplates.get(params[:created_id]) end @udos_templates = UserDefinedObjectTemplates.all - haml :user_defined_object_manage, :encode_html => true + haml :user_defined_object_manage, encode_html: true end -#Create new user defined objects. Get => the user chose the udo values +# Create new user defined objects. Get => the user chose the udo values get '/report/:id/udo/:udo_template_id/create' do @id = params[:id] @report = get_report(@id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? @udo_template = UserDefinedObjectTemplates.get(params[:udo_template_id]) - if @udo_template == nil - return "no Such UDO Template" - end + return 'no Such UDO Template' if @udo_template.nil? @udo_template_properties = JSON.parse(@udo_template.udo_properties) - haml :user_defined_object_create, :encode_html => true + haml :user_defined_object_create, encode_html: true end -#Create new user defined objects. Post => the UDO is stored in database +# Create new user defined objects. Post => the UDO is stored in database post '/report/:id/udo/:udo_template_id/create' do data = url_escape_hash(request.POST) id = params[:id] report = get_report(id) - if report == nil - return "No Such Report" - end + return 'No Such Report' if report.nil? udo_template_id = params[:udo_template_id] @udo_template = UserDefinedObjectTemplates.get(udo_template_id) - if @udo_template == nil - return "no Such UDO Template" - end - #we create the udo + return 'no Such UDO Template' if @udo_template.nil? + # we create the udo @udo = UserDefinedObjects.new @udo.type = @udo_template.type @udo.report_id = id @udo.template_id = udo_template_id - #we extract the properties from the posted data + # we extract the properties from the posted data udo_properties = {} data.each do |post_param, post_value| if post_param =~ /param_/ - if not post_value =~ /\/ - udo_properties[post_param.split("_")[1].downcase] = "#{post_value}" + if post_value !~ /\/ + udo_properties[post_param.split('_')[1].downcase] = "#{post_value}" else - udo_properties[post_param.split("_")[1].downcase] = post_value + udo_properties[post_param.split('_')[1].downcase] = post_value end end end @udo.udo_properties = udo_properties.to_json @udo.save - #we go back to the manage panel after creating the udo + # we go back to the manage panel after creating the udo redirect to("/report/#{id}/udo/manage?created_id=#{@udo_template.id}") end -#Edit an UDO. get => user chose the values +# Edit an UDO. get => user chose the values get '/report/:id/udo/:udo_id/edit' do id = params[:id] udo_id = params[:udo_id] - @report = get_report(id) - if @report == nil - return "No Such Report" - end - - #By asking only udos for the report we have access to, we're safe from - #edit by arbitrary user - udos = UserDefinedObjects.all(:report_id => id, :id => udo_id) - if udos.empty? - return "No Such UDO for this report" - end + report = get_report(id) + return 'No Such Report' if report.nil? + # By asking only udos for the report we have access to, we're safe from + # edit by arbitrary user + udos = UserDefinedObjects.all(report_id: id, id: udo_id) + return 'No Such UDO for this report' if udos.empty? @udo_to_edit = UserDefinedObjects.new - #there will always be only one udo in this collecion + # there will always be only one udo in this collecion udos.each do |udo_to_edit| @udo_to_edit = udo_to_edit end - #udo template linked to the udo to edit. We want the template in case the admin changed the properties + # udo template linked to the udo to edit. We want the template in case the admin changed the properties @udo_template = UserDefinedObjectTemplates.get(@udo_to_edit.template_id) @udo_template_properties = JSON.parse(@udo_template.udo_properties) @udo_to_edit_properties = JSON.parse(@udo_to_edit.udo_properties) - haml :user_defined_object_edit, :encode_html => true + haml :user_defined_object_edit, encode_html: true end -#Edit an UDO. Post => value stored in db +# Edit an UDO. Post => value stored in db post '/report/:id/udo/:udo_id/edit' do data = url_escape_hash(request.POST) id = params[:id] udo_id = params[:udo_id] report = get_report(id) - if report == nil - return "No Such Report" - end - #By asking only udos for the report we have access to, we're safe from - #edit by arbitrary user - udos = UserDefinedObjects.all(:report_id => id, :id => udo_id) - if udos.empty? - return "No Such UDO for this report" - end + return 'No Such Report' if report.nil? + # By asking only udos for the report we have access to, we're safe from + # edit by arbitrary user + udos = UserDefinedObjects.all(report_id: id, id: udo_id) + return 'No Such UDO for this report' if udos.empty? edited_udo = UserDefinedObjects.new - #there will always be only one udo in this collecion + # there will always be only one udo in this collecion udos.each do |udo_to_edit| - edited_udo = udo_to_edit + edited_udo = udo_to_edit end - #we extract the properties from the posted data + # we extract the properties from the posted data udo_new_properties = {} data.each do |post_param, post_value| if post_param =~ /param_/ - if not post_value =~ /\/ - udo_new_properties[post_param.split("_")[1].downcase] = "#{post_value}" + if post_value !~ /\/ + udo_new_properties[post_param.split('_')[1].downcase] = "#{post_value}" else - udo_new_properties[post_param.split("_")[1].downcase] = post_value + udo_new_properties[post_param.split('_')[1].downcase] = post_value end end end @@ -563,107 +518,91 @@ redirect to("/report/#{id}/udo/manage") end - -#Delete an UDO +# Delete an UDO get '/report/:id/udo/:udo_id/delete' do @id = params[:id] udo_id = params[:udo_id] report = get_report(@id) - if report == nil - return "No Such Report" - end - #By asking only udos for the report we have access to, we're safe from - #delete by arbitrary user - udos = UserDefinedObjects.all(:report_id => @id, :id => udo_id) - if udos.empty? - return "No Such UDO for this report" - end - #there will always be only one udo in this collecion - udos.each do |udo_to_destroy| - udo_to_destroy.destroy - end + return 'No Such Report' if report.nil? + # By asking only udos for the report we have access to, we're safe from + # delete by arbitrary user + udos = UserDefinedObjects.all(report_id: @id, id: udo_id) + return 'No Such UDO for this report' if udos.empty? + # there will always be only one udo in this collecion + udos.each(&:destroy) redirect to("/report/#{@id}/udo/manage") end - - -#Edit user defined variable +# Edit user defined variable get '/report/:id/user_defined_variables' do id = params[:id] @report = get_report(id) - if @report.user_defined_variables + if @report.user_defined_variables @user_variables = JSON.parse(@report.user_defined_variables) # add in the global UDV from config - if config_options["user_defined_variables"].size > 0 and !@user_variables.include?(config_options["user_defined_variables"][0]) - config_options["user_defined_variables"].each do |key,value| - @user_variables.store(key,"") + if !config_options['user_defined_variables'].empty? && !@user_variables.include?(config_options['user_defined_variables'][0]) + config_options['user_defined_variables'].each do |key, _value| + @user_variables.store(key, '') end end - @user_variables.each do |k,v| - if v - @user_variables[k] = meta_markup(v) - end + @user_variables.each do |k, v| + @user_variables[k] = meta_markup(v) if v end else - @user_variables = config_options["user_defined_variables"] + @user_variables = config_options['user_defined_variables'] end - haml :user_defined_variable, :encode_html => true + haml :user_defined_variable, encode_html: true end -#Post user defined variables +# Post user defined variables post '/report/:id/user_defined_variables' do data = url_escape_hash(request.POST) # quick fix for udv not in paragraph when on only one line - data.each do |k,v| - if k =~ /variable_data/ and not v =~ /\/ + data.each do |k, v| + if k =~ /variable_data/ && (v !~ /\/) && !v.empty? data[k] = "#{v}" end end - variable_hash = Hash.new() - data.each do |k,v| - if k =~ /variable_name/ - key = k.split("variable_name_").last.split("_").first - - # remove certain elements from name %&"<> - v = v.gsub("%","_").gsub(""","'").gsub("&","").gsub(">","").gsub("<","") - variable_hash["#{key}%#{v}"] = "DEFAULT" - - end - if k =~ /variable_data/ - key = k.split("variable_data_").last.split("_").first - - variable_hash.each do |k1,v1| - if k1 =~ /%/ - kk = k1.split("%") - if kk.first == key - variable_hash[k1] = v - end - end - end - end - end - - # remove the % and any blank values - q = variable_hash.clone - variable_hash.each do |k,v| - if k =~ /%/ - p k.split("%") - if k.split("%").size == 1 - q.delete(k) - else - q[k.split("%").last] = v - q.delete(k) - end - end - end - variable_hash = q + variable_hash = {} + data.each do |k, v| + if k =~ /variable_name/ + key = k.split('variable_name_').last.split('_').first + + # remove certain elements from name %&"<> + v = v.tr('%', '_').gsub('"', "'").gsub('&', '').gsub('>', '').gsub('<', '') + variable_hash["#{key}%#{v}"] = 'DEFAULT' + + end + next unless k =~ /variable_data/ + key = k.split('variable_data_').last.split('_').first + + variable_hash.each do |k1, _v1| + next unless k1 =~ /%/ + kk = k1.split('%') + variable_hash[k1] = v if kk.first == key + end + end + + # remove the % and any blank values + q = variable_hash.clone + variable_hash.each do |k, v| + next unless k =~ /%/ + p k.split('%') + if k.split('%').size == 1 + q.delete(k) + else + q[k.split('%').last] = v + q.delete(k) + end + end + variable_hash = q id = params[:id] @report = get_report(id) @@ -671,12 +610,11 @@ @report.user_defined_variables = variable_hash.to_json @report.save redirect to("/report/#{id}/user_defined_variables") - end # Findings List Menu get '/report/:id/findings' do - @chart = config_options["chart"] + @chart = config_options['chart'] @report = true id = params[:id] @@ -684,22 +622,18 @@ @report = get_report(id) @plugin_side_menu = get_plugin_list - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? - unless @report.scoring - @report.update(:scoring => set_scoring(config_options)) - end + @report.update(scoring: set_scoring(config_options)) unless @report.scoring - @findings,@dread,@cvss,@cvssv3,@risk,@riskmatrix = get_scoring_findings(@report) + @findings, @dread, @cvss, @cvssv3, @risk, @riskmatrix = get_scoring_findings(@report) - if config_options.has_key?("cvssv2_scoring_override") - @cvssv2_scoring_override = config_options["cvssv2_scoring_override"] - else - @cvssv2_scoring_override = false - end - haml :findings_list, :encode_html => true + @cvssv2_scoring_override = if config_options.key?('cvssv2_scoring_override') + config_options['cvssv2_scoring_override'] + else + false + end + haml :findings_list, encode_html: true end # Generate a status report from the current findings @@ -709,28 +643,26 @@ # Query for the report @report = get_report(id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? - @findings,@dread,@cvss,@cvssv3,@risk,@riskmatrix = get_scoring_findings(@report) + @findings, @dread, @cvss, @cvssv3, @risk, @riskmatrix = get_scoring_findings(@report) ## We have to do some hackery here for wordml - findings_xml = "" - findings_xml << "" + findings_xml = '' + findings_xml << '' @findings.each do |finding| ### Let's find the diff between the original and the new overview and remediation - master_finding = TemplateFindings.first(:id => finding.master_id) + master_finding = TemplateFindings.first(id: finding.master_id) findings_xml << finding.to_xml end - findings_xml << "" + findings_xml << '' findings_xml = meta_markup_unencode(findings_xml, @report) - report_xml = "#{findings_xml}" + report_xml = findings_xml.to_s - xslt_elem = Xslt.first(:status_template => true) + xslt_elem = Xslt.first(status_template: true) if xslt_elem @@ -743,48 +675,46 @@ rand_file = "./tmp/#{rand(36**12).to_s(36)}.docx" # Create a temporary copy of the finding_template - FileUtils::copy_file(xslt_elem.docx_location,rand_file) + FileUtils.copy_file(xslt_elem.docx_location, rand_file) ### IMAGE INSERT CODE if docx_xml.to_s =~ /\[!!/ # first we read in the current [Content_Types.xml] - content_types = read_rels(rand_file,"[Content_Types].xml") + content_types = read_archive(rand_file, '[Content_Types].xml') # add the png and jpg handling to end of content types document - if !(content_types =~ /image\/jpg/) - content_types = content_types.sub("","") + if content_types !~ /image\/jpg/ + content_types = content_types.sub('', '') end - if !(content_types =~ /image\/png/) - content_types = content_types.sub("","") + if content_types !~ /image\/png/ + content_types = content_types.sub('', '') end - if !(content_types =~ /image\/jpeg/) - content_types = content_types.sub("","") + if content_types !~ /image\/jpeg/ + content_types = content_types.sub('', '') end - docx_modify(rand_file,content_types,"[Content_Types].xml") + archive_modify(rand_file, content_types, '[Content_Types].xml') # replace all [!! image !!] in the document - imgs = docx_xml.to_s.split("[!!") + imgs = docx_xml.to_s.split('[!!') docx = imgs.first imgs.delete_at(0) imgs.each do |image_i| - - name = image_i.split("!!]").first.gsub(" ","") - end_xml = image_i.split("!!]").last + name = image_i.split('!!]').first.delete(' ') + end_xml = image_i.split('!!]').last # search for the image in the attachments - image = Attachments.first(:description => name, :report_id => id) + image = Attachments.first(description: name, report_id: id) # tries to prevent breakage in the case image dne if image docx = image_insert(docx, rand_file, image, end_xml) else - docx = docx.sub(/]*?>((?]).)*\z/m,"") + docx = docx.sub(/]*?>((?]).)*\z/m, '') end_xml = end_xml.sub(/^<\/w:t>.*?<\/w:r>.*?<\/w:p>/m, '') docx << end_xml end - end else @@ -793,15 +723,13 @@ end #### END IMAGE INSERT CODE - docx_modify(rand_file,docx,'word/document.xml') + docx_modify(rand_file, docx, 'word/document.xml') - send_file rand_file, :type => 'docx', :filename => "status.docx" + send_file rand_file, type: 'docx', filename: 'status.docx' else "You don't have a Finding Template (did you delete the temp?) -_- ... If you're an admin go to here to add one." end - - end # Add a finding to the report @@ -812,14 +740,12 @@ # Query for the first report matching the report_name @report = get_report(id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? # Query for all Findings - @findings = TemplateFindings.all(:approved => true, :order => [:title.asc]) + @findings = TemplateFindings.all(approved: true, order: [:title.asc]) - haml :findings_add, :encode_html => true + haml :findings_add, encode_html: true end # Add a finding to the report @@ -830,112 +756,104 @@ # Query for the first report matching the report_name @report = get_report(id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? - hosts = "" + hosts = '' redirect to("/report/#{id}/findings") unless params[:finding] - params[:finding].each do |finding| - templated_finding = TemplateFindings.first(:id => finding.to_i) + params[:finding].each do |finding| + templated_finding = TemplateFindings.first(id: finding.to_i) - templated_finding.id = nil - attr = templated_finding.attributes - attr.delete(:approved) - attr["master_id"] = finding.to_i - @newfinding = Findings.new(attr) - @newfinding.report_id = id + templated_finding.id = nil + attr = templated_finding.attributes + attr.delete(:approved) + attr['master_id'] = finding.to_i + @newfinding = Findings.new(attr) + @newfinding.report_id = id # because of multiple scores we need to make sure all are set # => leave it up to the user to make the calculation if they switch mid report - @newfinding.dread_total = 0 if @newfinding.dread_total == nil - @newfinding.cvss_total = 0 if @newfinding.cvss_total == nil - @newfinding.risk = 0 if @newfinding.risk == nil + @newfinding.dread_total = 0 if @newfinding.dread_total.nil? + @newfinding.cvss_total = 0 if @newfinding.cvss_total.nil? + @newfinding.risk = 0 if @newfinding.risk.nil? - @newfinding.save - end + @newfinding.save + end # if we have hosts add them to the findings too params[:finding].each do |number| # if there are hosts to add with a finding they'll have a param syntax of "findingXXX=ip1,ip2,ip3" @findingnum = "finding#{number}" - #TODO: merge with existing hosts (if any) probably should handle this host stuff in the db - finding = Findings.first(:report_id => id, :master_id => number.to_i) + # TODO: merge with existing hosts (if any) probably should handle this host stuff in the db + finding = Findings.first(report_id: id, master_id: number.to_i) - if (params["#{@findingnum}"] != nil) - params["#{@findingnum}"].split(",").each do |ip| - #TODO: this is dirty. also should support different delimeters instead of just newline - hosts << "" + ip.to_s + "" + unless params[@findingnum.to_s].nil? + params[@findingnum.to_s].split(',').each do |ip| + # TODO: this is dirty. also should support different delimeters instead of just newline + hosts << '' + ip.to_s + '' end finding.affected_hosts = hosts - hosts = "" + hosts = '' end finding.save end serpico_log("#{@newfinding.title} added to report #{id}") - @findings,@dread,@cvss,@cvssv3,@risk,@riskmatrix = get_scoring_findings(@report) + @findings, @dread, @cvss, @cvssv3, @risk, @riskmatrix = get_scoring_findings(@report) - haml :findings_list, :encode_html => true + haml :findings_list, encode_html: true end # Create a new finding in the report get '/report/:id/findings/new' do # Query for the first report matching the report_name @report = get_report(params[:id]) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? # attachments autocomplete work - temp_attaches = Attachments.all(:report_id => params[:id]) + temp_attaches = Attachments.all(report_id: params[:id]) @attaches = [] temp_attaches.each do |ta| - next unless ta.description =~ /png/i or ta.description =~ /jpg/i + next unless ta.description =~ /png/i || ta.description =~ /jpg/i @attaches.push(ta.description) end - @findings,@dread,@cvss,@cvssv3,@risk,@riskmatrix = get_scoring_findings(@report) + @findings, @dread, @cvss, @cvssv3, @risk, @riskmatrix = get_scoring_findings(@report) - haml :create_finding, :encode_html => true + haml :create_finding, encode_html: true end # Create the finding in the DB post '/report/:id/findings/new' do error = mm_verify(request.POST) - if error.size > 1 - return error - end + return error if error.size > 1 data = url_escape_hash(request.POST) id = params[:id] @report = get_report(id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? - if(@report.scoring.downcase == "dread") - data["dread_total"] = data["damage"].to_i + data["reproducability"].to_i + data["exploitability"].to_i + data["affected_users"].to_i + data["discoverability"].to_i - elsif(@report.scoring.downcase == "cvss") + if @report.scoring.casecmp('dread').zero? + data['dread_total'] = data['damage'].to_i + data['reproducability'].to_i + data['exploitability'].to_i + data['affected_users'].to_i + data['discoverability'].to_i + elsif @report.scoring.casecmp('cvss').zero? data = cvss(data, false) - elsif(@report.scoring.downcase == "cvssv3") + elsif @report.scoring.casecmp('cvssv3').zero? data = cvss(data, true) end - data["report_id"] = id + data['report_id'] = id @finding = Findings.new(data) @finding.save # because of multiple scores we need to make sure all are set # => leave it up to the user to make the calculation if they switch mid report - @finding.dread_total = 0 if @finding.dread_total == nil - @finding.cvss_total = 0 if @finding.cvss_total == nil - @finding.risk = 0 if @finding.risk == nil + @finding.dread_total = 0 if @finding.dread_total.nil? + @finding.cvss_total = 0 if @finding.cvss_total.nil? + @finding.risk = 0 if @finding.risk.nil? @finding.save # for a parameter_pollution on report_id @@ -949,30 +867,26 @@ # Query for the first report matching the report_name @report = get_report(id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? finding_id = params[:finding_id] # Query for all Findings - @finding = Findings.first(:report_id => id, :id => finding_id) + @finding = Findings.first(report_id: id, id: finding_id) - if @finding == nil - return "No Such Finding" - end + return 'No Such Finding' if @finding.nil? # attachments autocomplete work - temp_attaches = Attachments.all(:report_id => id) + temp_attaches = Attachments.all(report_id: id) @attaches = [] temp_attaches.each do |ta| - next unless ta.description =~ /png/i or ta.description =~ /jpg/i + next unless ta.description =~ /png/i || ta.description =~ /jpg/i @attaches.push(ta.description) end - @findings,@dread,@cvss,@cvssv3,@risk,@riskmatrix = get_scoring_findings(@report) + @findings, @dread, @cvss, @cvssv3, @risk, @riskmatrix = get_scoring_findings(@report) - haml :findings_edit, :encode_html => true + haml :findings_edit, encode_html: true end # Edit a finding in the report @@ -983,33 +897,27 @@ # Query for the report @report = get_report(id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? finding_id = params[:finding_id] # Query for all Findings - @finding = Findings.first(:report_id => id, :id => finding_id) + @finding = Findings.first(report_id: id, id: finding_id) - if @finding == nil - return "No Such Finding" - end + return 'No Such Finding' if @finding.nil? error = mm_verify(request.POST) - if error.size > 1 - return error - end + return error if error.size > 1 data = url_escape_hash(request.POST) # to prevent title's from degenerating with >, etc. [issue 237] - data["title"] = data["title"].gsub('&','&') + data['title'] = data['title'].gsub('&', '&') - if(@report.scoring.downcase == "dread") - data["dread_total"] = data["damage"].to_i + data["reproducability"].to_i + data["exploitability"].to_i + data["affected_users"].to_i + data["discoverability"].to_i - elsif(@report.scoring.downcase == "cvss") + if @report.scoring.casecmp('dread').zero? + data['dread_total'] = data['damage'].to_i + data['reproducability'].to_i + data['exploitability'].to_i + data['affected_users'].to_i + data['discoverability'].to_i + elsif @report.scoring.casecmp('cvss').zero? data = cvss(data, false) - elsif(@report.scoring.downcase == "cvssv3") + elsif @report.scoring.casecmp('cvssv3').zero? data = cvss(data, true) end @@ -1018,9 +926,9 @@ # because of multiple scores we need to make sure all are set # => leave it up to the user to make the calculation if they switch mid report - @finding.dread_total = 0 if @finding.dread_total == nil - @finding.cvss_total = 0 if @finding.cvss_total == nil - @finding.risk = 0 if @finding.risk == nil + @finding.dread_total = 0 if @finding.dread_total.nil? + @finding.cvss_total = 0 if @finding.cvss_total.nil? + @finding.risk = 0 if @finding.risk.nil? @finding.save redirect to("/report/#{id}/findings") @@ -1034,72 +942,68 @@ # Query for the report @report = get_report(id) - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? finding_id = params[:finding_id] # Query for the finding - @finding = Findings.first(:report_id => id, :id => finding_id) + @finding = Findings.first(report_id: id, id: finding_id) - if @finding == nil - return "No Such Finding" - end + return 'No Such Finding' if @finding.nil? # We can't create a direct copy b/c TemplateFindings doesn't have everything findings does # Check model/master.rb to compare attr = { - :title => @finding.title, - :damage => @finding.damage, - :reproducability => @finding.reproducability, - :exploitability => @finding.exploitability, - :affected_users => @finding.affected_users, - :discoverability => @finding.discoverability, - :dread_total => @finding.dread_total, - :cvss_base => @finding.cvss_base, - :cvss_impact => @finding.cvss_impact, - :cvss_exploitability => @finding.cvss_exploitability, - :cvss_temporal => @finding.cvss_temporal, - :cvss_environmental => @finding.cvss_environmental, - :cvss_modified_impact => @finding.cvss_modified_impact, - :cvss_total => @finding.cvss_total, - :effort => @finding.effort, - :type => @finding.type, - :overview => @finding.overview, - :poc => @finding.poc, - :remediation => @finding.remediation, - :approved => false, - :references => @finding.references, - :risk => @finding.risk, - :attack_vector => @finding.attack_vector, - :attack_complexity => @finding.attack_complexity, - :privileges_required => @finding.privileges_required, - :user_interaction => @finding.user_interaction, - :scope_cvss => @finding.scope_cvss, - :confidentiality => @finding.confidentiality, - :integrity => @finding.integrity, - :availability => @finding.availability, - :exploit_maturity => @finding.exploit_maturity, - :remeditation_level => @finding.remeditation_level, - :report_confidence => @finding.report_confidence, - :confidentiality_requirement => @finding.confidentiality_requirement, - :integrity_requirement => @finding.integrity_requirement, - :availability_requirement => @finding.availability_requirement, - :mod_attack_vector => @finding.mod_attack_vector, - :mod_attack_complexity => @finding.mod_attack_complexity, - :mod_privileges_required => @finding.mod_privileges_required, - :mod_user_interaction => @finding.mod_user_interaction, - :mod_scope => @finding.mod_scope, - :mod_confidentiality => @finding.mod_confidentiality, - :mod_integrity => @finding.mod_integrity, - :mod_availability => @finding.mod_availability, - :cvss_base_score => @finding.cvss_base_score, - :cvss_impact_score => @finding.cvss_impact_score, - :cvss_mod_impact_score => @finding.cvss_mod_impact_score, - :severity => @finding.severity, - :likelihood => @finding.likelihood, - } + title: @finding.title, + damage: @finding.damage, + reproducability: @finding.reproducability, + exploitability: @finding.exploitability, + affected_users: @finding.affected_users, + discoverability: @finding.discoverability, + dread_total: @finding.dread_total, + cvss_base: @finding.cvss_base, + cvss_impact: @finding.cvss_impact, + cvss_exploitability: @finding.cvss_exploitability, + cvss_temporal: @finding.cvss_temporal, + cvss_environmental: @finding.cvss_environmental, + cvss_modified_impact: @finding.cvss_modified_impact, + cvss_total: @finding.cvss_total, + effort: @finding.effort, + type: @finding.type, + overview: @finding.overview, + poc: @finding.poc, + remediation: @finding.remediation, + approved: false, + references: @finding.references, + risk: @finding.risk, + attack_vector: @finding.attack_vector, + attack_complexity: @finding.attack_complexity, + privileges_required: @finding.privileges_required, + user_interaction: @finding.user_interaction, + scope_cvss: @finding.scope_cvss, + confidentiality: @finding.confidentiality, + integrity: @finding.integrity, + availability: @finding.availability, + exploit_maturity: @finding.exploit_maturity, + remeditation_level: @finding.remeditation_level, + report_confidence: @finding.report_confidence, + confidentiality_requirement: @finding.confidentiality_requirement, + integrity_requirement: @finding.integrity_requirement, + availability_requirement: @finding.availability_requirement, + mod_attack_vector: @finding.mod_attack_vector, + mod_attack_complexity: @finding.mod_attack_complexity, + mod_privileges_required: @finding.mod_privileges_required, + mod_user_interaction: @finding.mod_user_interaction, + mod_scope: @finding.mod_scope, + mod_confidentiality: @finding.mod_confidentiality, + mod_integrity: @finding.mod_integrity, + mod_availability: @finding.mod_availability, + cvss_base_score: @finding.cvss_base_score, + cvss_impact_score: @finding.cvss_impact_score, + cvss_mod_impact_score: @finding.cvss_mod_impact_score, + severity: @finding.severity, + likelihood: @finding.likelihood + } @new_finding = TemplateFindings.new(attr) @new_finding.save @@ -1123,170 +1027,66 @@ params[:finding_id].split(',').each do |current_id| finding = Findings.first(report_id: id, id: current_id) - return "No Such Finding : #{current_id}" if finding.nil? - # delete the entries - finding.destroy - serpico_log("#{finding.title} deleted from report #{id}") + return "No Such Finding : #{current_id}" if finding.nil? + # delete the entries + finding.destroy + serpico_log("#{finding.title} deleted from report #{id}") end redirect to("/report/#{id}/findings") end -# preview a finding -get '/report/:id/findings/:finding_id/preview' do +# Generate the document +#:template_type is the type of file to be generated +# it can be equal to : finding, report, status, summary +# Word files are : finding, report, status +# Excel files are : summary +get '/report/:id/generate/:template_type' do id = params[:id] - - # Query for the report - @report = get_report(id) - - if @report == nil - return "No Such Report" - end - - # Query for the Finding - @finding = Findings.first(:report_id => id, :id => params[:finding_id]) - - if @finding == nil - return "No Such Finding" - end - - # this flags edited findings - if @finding.master_id - master = TemplateFindings.first(:id => @finding.master_id) - @finding.overview = compare_text(@finding.overview, master.overview) - end - - ## We have to do some hackery here for wordml - findings_xml = "" - findings_xml << "" - findings_xml << @finding.to_xml - findings_xml << "" - - findings_xml = meta_markup_unencode(findings_xml, @report) - - report_xml = "#{findings_xml}" - - xslt_elem = Xslt.first(:finding_template => true) - - if xslt_elem - - # Push the finding from XML to XSLT - xslt = Nokogiri::XSLT(File.read(xslt_elem.xslt_location)) - - docx_xml = xslt.transform(Nokogiri::XML(report_xml)) - - # We use a temporary file with a random name - rand_file = "./tmp/#{rand(36**12).to_s(36)}.docx" - - # Create a temporary copy of the finding_template - FileUtils::copy_file(xslt_elem.docx_location,rand_file) - - ### IMAGE INSERT CODE - if docx_xml.to_s =~ /\[!!/ - # first we read in the current [Content_Types.xml] - content_types = read_rels(rand_file,"[Content_Types].xml") - - # add the png and jpg handling to end of content types document - if !(content_types =~ /image\/jpg/) - content_types = content_types.sub("","") - end - if !(content_types =~ /image\/png/) - content_types = content_types.sub("","") - end - if !(content_types =~ /image\/jpeg/) - content_types = content_types.sub("","") - end - - docx_modify(rand_file,content_types,"[Content_Types].xml") - - # replace all [!! image !!] in the document - imgs = docx_xml.to_s.split("[!!") - docx = imgs.first - imgs.delete_at(0) - - imgs.each do |image_i| - - name = image_i.split("!!]").first.gsub(" ","") - end_xml = image_i.split("!!]").last - - # search for the image in the attachments - image = Attachments.first(:description => name, :report_id => id) - - # tries to prevent breakage in the case image dne - if image - # inserts the image into the doc - docx = image_insert(docx, rand_file, image, end_xml) - else - docx = docx.sub(/]*?>((?]).)*\z/m,"") - end_xml = end_xml.sub(/^<\/w:t>.*?<\/w:r>.*?<\/w:p>/m, '') - docx << end_xml - end - - end - - else - # no images in finding - docx = docx_xml.to_s - end - #### END IMAGE INSERT CODE - - docx_modify(rand_file, docx,'word/document.xml') - - send_file rand_file, :type => 'docx', :filename => "#{@finding.title}.docx" - else - - "You don't have a Finding Template (did you delete the default one?) -_- ... If you're an admin go to here to add one." - + doc_to_generate = params[:template_type] + supported_template_types = %w[finding report status summary] + unless supported_template_types.include? doc_to_generate.downcase + return "Wrong Template Type : #{doc_to_generate}" end -end - -# Generate the report -get '/report/:id/generate' do - id = params[:id] - # Query for the report @report = get_report(id) - if @report == nil - return "No Such Report" - end - - unless @report.scoring - @report.update(:scoring => set_scoring(config_options)) - end + return 'No Such Report' if @report.nil? - user = User.first(:username => get_username) + @report.update(scoring: set_scoring(config_options)) unless @report.scoring + user = User.first(username: get_username) + @report.consultant_name = '' + @report.consultant_phone = '' + @report.consultant_email = '' + @report.consultant_title = '' + @report.consultant_company = '' if user @report.consultant_name = user.consultant_name @report.consultant_phone = user.consultant_phone @report.consultant_email = user.consultant_email @report.consultant_title = user.consultant_title @report.consultant_company = user.consultant_company - - else - @report.consultant_name = "" - @report.consultant_phone = "" - @report.consultant_email = "" - @report.consultant_title = "" - @report.consultant_company = "" - end @report.save - - @findings,@dread,@cvss,@cvssv3,@risk,@riskmatrix = get_scoring_findings(@report) - + @findings, @dread, @cvss, @cvssv3, @risk, @riskmatrix = get_scoring_findings(@report) ## We have to do some hackery here for wordml - findings_xml = "" - findings_xml << "" + findings_xml = '' + findings_xml << '' finding_number = 1 + if doc_to_generate == 'finding' + @findings = Findings.all(report_id: id, id: params[:finding_id]) + return "Given Finding ID doesn't exist !" if @findings.empty? + else + @findings, @dread, @cvss, @cvssv3, @risk, @riskmatrix = get_scoring_findings(@report) + end @findings.each do |finding| finding.finding_number = finding_number # This flags new or edited findings if finding.master_id - master = TemplateFindings.first(:id => finding.master_id) + master = TemplateFindings.first(id: finding.master_id) if master finding.overview = compare_text(finding.overview, master.overview) finding.remediation = compare_text(finding.remediation, master.remediation) @@ -1302,7 +1102,7 @@ finding_number += 1 end - findings_xml << "" + findings_xml << '' # Replace the stub elements with real XML elements findings_xml = meta_markup_unencode(findings_xml, @report) @@ -1313,15 +1113,15 @@ udv_hash = JSON.parse(@report.user_defined_variables) end - #adding the udvs to the XML + # adding the udvs to the XML # update udv_hash with findings totals udv_hash = add_findings_totals(udv_hash, @findings, config_options) udv = "\n" - udv_hash.each do |key,value| + udv_hash.each do |key, value| udv << "<#{key}>" - udv << "#{value}" + udv << value.to_s udv << "\n" end udv << "\n" @@ -1331,49 +1131,49 @@ udo_templates = UserDefinedObjectTemplates.all udo_templates.each do |udo_template| # we only add the udos that are linked to the current report, and linked to its respective template - udos = UserDefinedObjects.all(:report_id => @report.id, :template_id => udo_template.id) + udos = UserDefinedObjects.all(report_id: @report.id, template_id: udo_template.id) udos.each do |udo| - udo_xml << "\t<#{udo_template.type.downcase.gsub(" ","_")}>\n" + udo_xml << "\t<#{udo_template.type.downcase.tr(' ', '_')}>\n" properties = JSON.parse(udo.udo_properties) properties.each do |prop, value| - udo_xml << "\t\t<#{prop.downcase.gsub(" ","_")}>" - udo_xml << "#{value}" - udo_xml << "\n" + udo_xml << "\t\t<#{prop.downcase.tr(' ', '_')}>" + udo_xml << value.to_s + udo_xml << "\n" end - udo_xml << "\t\n" + udo_xml << "\t\n" end end udo_xml << "\n" - #if msf connection up, we add services and hosts to the xml - services_xml = "" - if (msfsettings = RemoteEndpoints.first(:report_id => @report.id)) + # if msf connection up, we add services and hosts to the xml + services_xml = '' + if (msfsettings = RemoteEndpoints.first(report_id: @report.id)) if (rpc = msfrpc(@report.id)) res = rpc.call('console.create') rpc.call('db.set_workspace', msfsettings.workspace) - #We create the XML from the opened services. onlyup undocumented but it does exist - res = rpc.call('db.services', {:limit => 10000, :only_up => true} ) - msfservices = res["services"] + # We create the XML from the opened services. onlyup undocumented but it does exist + res = rpc.call('db.services', limit: 100_000, only_up: true) + msfservices = res['services'] services_xml_raw = Nokogiri::XML::Builder.new do |xml| xml.services do msfservices.each do |msfservice| xml.service do msfservice.each do |key, value| - xml.send "#{key}_", value + xml.send "#{key}_", value end end end end end services_xml = services_xml_raw.doc.root.to_xml - #we create the XML from the hosts found. - res = rpc.call('db.hosts', {:limit => 10000} ) - msfhosts = res["hosts"] + # we create the XML from the hosts found. + res = rpc.call('db.hosts', limit: 10_000) + msfhosts = res['hosts'] hosts_xml_raw = Nokogiri::XML::Builder.new do |xml| xml.hosts do msfhosts.each do |msfhost| xml.host do msfhost.each do |key, value| - xml.send "#{key}_", value + xml.send "#{key}_", value end end end @@ -1382,101 +1182,184 @@ hosts_xml = hosts_xml_raw.doc.root.to_xml end end - #we bring all xml together + # we bring all xml together report_xml = "#{@report.to_xml}#{udv}#{findings_xml}#{udo_xml}#{services_xml}#{hosts_xml}" + File.open('/mnt/Kali_Shared/report_xml_will.xml', 'w') { |file| file.write(report_xml) } + ####### WORD GENERATION PART ########################### + # This part is used if the generated document is a docx + + if doc_to_generate == 'report' + xslt_elem = DocxXslts.first(template_title: @report.associated_docx_template) + document_name = "#{@report.report_name}.docx" + elsif doc_to_generate == 'status' + xslt_elem = DocxXslts.first(template_type: 'Word - Status Template') + document_name = "#{@report.report_name} - Status.docx" + elsif doc_to_generate == 'finding' + xslt_elem = DocxXslts.first(template_type: 'Word - Finding Template') + document_name = @findings[0].title + end + if doc_to_generate == 'report' || doc_to_generate == 'status' || doc_to_generate == 'finding' + unless xslt_elem + return "#{doc_to_generate} template not found :-). IF you're an admin, create one here!" + end + # Push the finding from XML to XSLT + xslt = Nokogiri::XSLT(File.read(xslt_elem.xslt_location)) - xslt_elem = Xslt.first(:report_type => @report.report_type) - - # Push the finding from XML to XSLT - xslt = Nokogiri::XSLT(File.read(xslt_elem.xslt_location)) - - docx_xml = xslt.transform(Nokogiri::XML(report_xml)) - - # We use a temporary file with a random name - rand_file = "./tmp/#{rand(36**12).to_s(36)}.docx" + docx_xml = xslt.transform(Nokogiri::XML(report_xml)) - # Create a temporary copy of the word doc - FileUtils::copy_file(xslt_elem.docx_location,rand_file) + # We use a temporary file with a random name + rand_file_name = "./tmp/#{rand(36**12).to_s(36)}.docx" - list_components = {} - xslt_elem.components.each do |component| - xslt = Nokogiri::XSLT(File.read(component.xslt_location)) - list_components[component.name] = xslt.transform(Nokogiri::XML(report_xml)) - end - ### IMAGE INSERT CODE - if docx_xml.to_s =~ /\[!!/ - # first we read in the current [Content_Types.xml] - content_types = read_rels(rand_file,"[Content_Types].xml") + # Create a temporary copy of the word doc + FileUtils.copy_file(xslt_elem.docx_location, rand_file_name) - # add the png and jpg handling to end of content types document - if !(content_types =~ /image\/jpg/) - content_types = content_types.sub("","") - end - if !(content_types =~ /image\/png/) - content_types = content_types.sub("","") - end - if !(content_types =~ /image\/jpeg/) - content_types = content_types.sub("","") + list_components = {} + xslt_elem.components.each do |component| + xslt = Nokogiri::XSLT(File.read(component.xslt_location)) + list_components[component.name] = xslt.transform(Nokogiri::XML(report_xml)) end + ### IMAGE INSERT CODE + if docx_xml.to_s =~ /\[!!/ + # first we read in the current [Content_Types.xml] + content_types = read_from_zip(rand_file_name, '[Content_Types].xml') - docx_modify(rand_file,content_types,"[Content_Types].xml") + # add the png and jpg handling to end of content types document + if content_types !~ /image\/jpg/ + content_types = content_types.sub('', '') + end + if content_types !~ /image\/png/ + content_types = content_types.sub('', '') + end + if content_types !~ /image\/jpeg/ + content_types = content_types.sub('', '') + end - # replace all [!! image !!] in the document - imgs = docx_xml.to_s.split("[!!") - docx = imgs.first - imgs.delete_at(0) + archive_modify(rand_file_name, content_types, '[Content_Types].xml') - imgs.each do |image_i| + # replace all [!! image !!] in the document + imgs = docx_xml.to_s.split('[!!') + docx = imgs.first + imgs.delete_at(0) - name = image_i.split("!!]").first.gsub(" ","") - end_xml = image_i.split("!!]").last + imgs.each do |image_i| + name = image_i.split('!!]').first.delete(' ') + end_xml = image_i.split('!!]').last - # search for the image in the attachments - image = Attachments.first(:description => name, :report_id => id) + # search for the image in the attachments + image = Attachments.first(report_id: id, conditions: ['lower(description) = ?', name.downcase]) # tries to prevent breakage in the case image dne if image # inserts the image - docx = image_insert(docx, rand_file, image, end_xml) + docx = image_insert(docx, rand_file_name, image, end_xml) else docx << end_xml end + end + else + # no images in finding + docx = docx_xml.to_s + end + #### END IMAGE INSERT CODE + # Get hyperlinks and References + hyperlinks = updateHyperlinks(docx) + # Update _rels directrory + rels_file = read_from_zip(rand_file_name, 'word/_rels/document.xml.rels') + # Noko syntax rels + noko_rels = Nokogiri::XML(rels_file) + urls = hyperlinks['urls'] + id = hyperlinks['id'] + for i in 0..id.length - 1 + url = urls[i] + cid = id[i] + noko_rels.root.first_element_child.after("") end - else - # no images in finding - docx = docx_xml.to_s - end - #### END IMAGE INSERT CODE - - # Get hyperlinks and References - hyperlinks = updateHyperlinks(docx) - # Update _rels directrory - rels_file = read_rels(rand_file, "word/_rels/document.xml.rels") - # Noko syntax rels - noko_rels = Nokogiri::XML(rels_file) - urls = hyperlinks["urls"] - id = hyperlinks["id"] - for i in 0..id.length - 1 - url = urls[i] - cid = id[i] - noko_rels.root.first_element_child.after("") - end - - content_to_write = noko_rels.to_xml(:save_with => Nokogiri::XML::Node::SaveOptions::AS_XML).strip - #Edit Relationships file - write_rels(rand_file, "word/_rels/document.xml.rels", content_to_write) - # Update hyperlinks - docx = hyperlinks["xmlText"] - - docx_modify(rand_file, docx,'word/document.xml') - - list_components.each do |name, xml| - docx_modify(rand_file, xml.to_s,name) - end - - serpico_log("Report generation attempted, Report Name: #{@report.report_name} #{rand_file} #{xslt_elem.xslt_location}") - send_file rand_file, :type => 'docx', :filename => "#{@report.report_name}.docx" + + content_to_write = noko_rels.to_xml(save_with: Nokogiri::XML::Node::SaveOptions::AS_XML).strip + # Edit Relationships file + write_to_zip(rand_file_name, 'word/_rels/document.xml.rels', content_to_write) + # Update hyperlinks + docx = hyperlinks['xmlText'] + # put the created document.xml in the archive that will become the docx + archive_modify(rand_file_name, docx, 'word/document.xml') + + list_components.each do |name, xml| + # put the created header/footer in the archive that will become the docx + archive_modify(rand_file_name, xml.to_s, name) + end + + serpico_log("Report generation attempted, Report Name: #{document_name}") + send_file rand_file_name, type: 'docx', filename: "#{document_name}.docx" + + #################################### EXCEL GENERATION PART ########################### + + elsif doc_to_generate == 'summary' + # xslt_elem = DocxXslts.first(template_title: @report.associated_docx_template) + excel_xslt = ExcelXslts.first(template_title: @report.associated_excel_template) + if excel_xslt.nil? + return "You didn't configure which Excel Template to use :-)... Go to here to fix this." + end + + # hackish stuff to have newlines + report_xml = report_xml.gsub('', 10.chr).gsub('', '') + # hackish stuff to have some kind of bullet transformation + report_xml = report_xml.gsub('', '=>').gsub('', '') + + # This file will store the excel sent to the user + rand_file_name = "./tmp/#{rand(36**12).to_s(36)}.xlsx" + FileUtils.copy_file(excel_xslt.excel_location, rand_file_name) + + ##### sharedstrings generation part + + xslt_to_transform = Nokogiri::XSLT(File.read(excel_xslt.xslt_shared_strings_location)) + shared_strings_xml = xslt_to_transform.transform(Nokogiri::XML(report_xml)) + archive_modify(rand_file_name, shared_strings_xml.to_s, 'xl/sharedStrings.xml') + + ###### worksheets generation part + + worksheets = excel_xslt.xslt_sheet_locations + + # archive_path is something like xl/worksheets/sheetX.xml + # xslt_path is something like ./templates/excel_worksheet_n91g3lav51i2r4l9riw.xslt + j = 0 + worksheets.each do |archive_path, xslt_path| + j += 1 + xslt_to_transform = Nokogiri::XSLT(File.read(xslt_path)) + worksheet_to_repair_xml = xslt_to_transform.transform(Nokogiri::XML(report_xml)) + # the xslt generation produces a broken xlsx file : because it added an arbitrary number of rows the indexes of cells and rows are messed up. + previous_index = '' + # we start reparing at the first indexed row + i = worksheet_to_repair_xml.at_xpath('//xmlns:row')['r'].to_i + worksheet_to_repair_xml.xpath('//xmlns:row').each do |row| + original_index = row['r'] + # if the current row was duplicated because of an xsl loop, we fix the row and cells index + if original_index.to_i <= previous_index.to_i + # we assign the new value for the fixed row + new_row_index = i + row['r'] = new_row_index + row.xpath('xmlns:c').each do |c| + original_cell_index = c['r'] + letter_part = original_cell_index.tr('0-9', '') + new_cell_index = letter_part + i.to_s + # we assign the new value for the fixed cell + c['r'] = new_cell_index + end + i += 1 + previous_index = new_row_index + next + end + i += 1 + previous_index = original_index + end + File.open("/mnt/Kali_Shared/excel_final_xml_#{j}", 'w') { |file| file.write(worksheet_to_repair_xml) } + # we modify the excel with the xslt produced worksheet + archive_modify(rand_file_name, worksheet_to_repair_xml.to_s, archive_path) + end + # we send the resulting file to the user + send_file rand_file_name, type: 'xlsx', filename: "#{@report.report_name} - Summary.xlsx" + + end end # Export a report @@ -1487,23 +1370,23 @@ report = get_report(id) # bail without a report - redirect to("/") unless report + redirect to('/') unless report # add the report - json["report"] = report + json['report'] = report # add the findings - findings = Findings.all(:report_id => id) - json["findings"] = findings + findings = Findings.all(report_id: id) + json['findings'] = findings # add the exports - attachments = Attachments.all(:report_id => id) - json["Attachments"] = attachments + attachments = Attachments.all(report_id: id) + json['Attachments'] = attachments local_filename = "./tmp/#{rand(36**12).to_s(36)}.json" - File.open(local_filename, 'w') {|f| f.write(JSON.pretty_generate(json)) } + File.open(local_filename, 'w') { |f| f.write(JSON.pretty_generate(json)) } - send_file local_filename, :type => 'json', :filename => "exported_report.json" + send_file local_filename, type: 'json', filename: 'exported_report.json' end # Import a report @@ -1513,54 +1396,54 @@ # Import a report post '/report/import' do - redirect to("/report/import") unless params[:file] + redirect to('/report/import') unless params[:file] # reject if the file is above a certain limit - if params[:file][:tempfile].size > 100000000 - return "File too large. 100MB limit" + if params[:file][:tempfile].size > 100_000_000 + return 'File too large. 100MB limit' end json_file = params[:file][:tempfile].read line = JSON.parse(json_file) - line["report"]["id"] = nil + line['report']['id'] = nil - f = Reports.create(line["report"]) + f = Reports.create(line['report']) f.save # now add the findings - line["findings"].each do |finding| - finding["id"] = nil - finding["master_id"] = nil - finding["report_id"] = f.id - finding["finding_modified"] = nil + line['findings'].each do |finding| + finding['id'] = nil + finding['master_id'] = nil + finding['report_id'] = f.id + finding['finding_modified'] = nil - finding["dread_total"] = 0 if finding["dread_total"] == nil - finding["cvss_total"] = 0 if finding["cvss_total"] == nil - finding["risk"] = 1 if finding["risk"] == nil + finding['dread_total'] = 0 if finding['dread_total'].nil? + finding['cvss_total'] = 0 if finding['cvss_total'].nil? + finding['risk'] = 1 if finding['risk'].nil? g = Findings.create(finding) g.save end - if line["Attachments"] + if line['Attachments'] # now add the attachments - line["Attachments"].each do |attach| + line['Attachments'].each do |attach| serpico_log("Importing attachments to #{f.id}") - attach["id"] = nil + attach['id'] = nil - attach["filename"] = "Unknown" if attach["filename"] == nil - if attach["filename_location"] =~ /./ - a = attach["filename_location"].split(".").last - loc = "./attachments/" + a.gsub("/attachments/","") - attach["filename_location"] = loc + attach['filename'] = 'Unknown' if attach['filename'].nil? + if attach['filename_location'] =~ /./ + a = attach['filename_location'].split('.').last + loc = './attachments/' + a.gsub('/attachments/', '') + attach['filename_location'] = loc else - loc = "./attachments/" + attach["filename_location"] + loc = './attachments/' + attach['filename_location'] end - attach["filename_location"] = loc + attach['filename_location'] = loc - attach["report_id"] = f.id - attach["description"] = "No description" if attach["description"] == nil + attach['report_id'] = f.id + attach['description'] = 'No description' if attach['description'].nil? g = Attachments.create(attach) g.save end @@ -1569,17 +1452,18 @@ # we should redirect to the newly imported report redirect to("/report/#{f.id}/edit") end + get '/report/:id/text_status' do id = params[:id] @report = get_report(id) # bail without a report - redirect to("/") unless @report + redirect to('/') unless @report # add the findings - @findings = Findings.all(:report_id => id) + @findings = Findings.all(report_id: id) - haml :text_status, :encode_html => true + haml :text_status, encode_html: true end # generate an asciidoc version of current findings @@ -1588,53 +1472,53 @@ report = get_report(id) # bail without a report - redirect to("/") unless report + redirect to('/') unless report # add the findings - findings = Findings.all(:report_id => id) + findings = Findings.all(report_id: id) - ascii_doc_ = "" + ascii_doc_ = '' findings.each do |finding| - ascii_doc_ << gen_asciidoc(finding,report.scoring) + ascii_doc_ << gen_asciidoc(finding, report.scoring) end local_filename = "./tmp/#{rand(36**12).to_s(36)}.asd" - File.open(local_filename, 'w') {|f| f.write(ascii_doc_) } + File.open(local_filename, 'w') { |f| f.write(ascii_doc_) } - send_file local_filename, :type => 'txt', :filename => "report_#{id}_findings.asd" + send_file local_filename, type: 'txt', filename: "report_#{id}_findings.asd" end # generate a csv with the current report findings get '/report/:id/csv_export' do id = params[:id] - @report = get_report(id) + @report = get_report(id) - # bail without a report - redirect to("/") unless @report + # bail without a report + redirect to('/') unless @report - # add the findings - @findings = Findings.all(:report_id => id) - csv_doc_ = "Finding Title|Risk Rating|Remediation Effort|Type|Overview|Remediation\n" - @findings.each do |finding| - csv_doc_ << "#{finding.title}|#{finding.risk}|#{finding.effort}|#{finding.type}|#{finding.overview}|#{finding.remediation}\n" - end - # change some text around so the findings actually make sense and don't have a ton of garbage in them - csv_doc_ = csv_doc_.gsub(//, "") - csv_doc_ = csv_doc_.gsub(/<\/paragraph>/, "") - csv_doc_ = csv_doc_.gsub(/\|0\|/, "|Informational|") - csv_doc_ = csv_doc_.gsub(/\|1\|/, "|Low|") - csv_doc_ = csv_doc_.gsub(/\|2\|/, "|Moderate|") - csv_doc_ = csv_doc_.gsub(/\|3\|/, "|High|") - csv_doc_ = csv_doc_.gsub(/\|4\|/, "|Critical|") - local_filename = "./tmp/#{rand(36**12).to_s(36)}.csv" - File.open(local_filename, 'w') {|f| f.write(csv_doc_) } - send_file local_filename, :type => 'txt', :filename => "report_#{id}_findings.csv" + # add the findings + @findings = Findings.all(report_id: id) + csv_doc_ = "Finding Title|Risk Rating|Remediation Effort|Type|Overview|Remediation\n" + @findings.each do |finding| + csv_doc_ << "#{finding.title}|#{finding.risk}|#{finding.effort}|#{finding.type}|#{finding.overview}|#{finding.remediation}\n" + end + # change some text around so the findings actually make sense and don't have a ton of garbage in them + csv_doc_ = csv_doc_.gsub(//, '') + csv_doc_ = csv_doc_.gsub(/<\/paragraph>/, '') + csv_doc_ = csv_doc_.gsub(/\|0\|/, '|Informational|') + csv_doc_ = csv_doc_.gsub(/\|1\|/, '|Low|') + csv_doc_ = csv_doc_.gsub(/\|2\|/, '|Moderate|') + csv_doc_ = csv_doc_.gsub(/\|3\|/, '|High|') + csv_doc_ = csv_doc_.gsub(/\|4\|/, '|Critical|') + local_filename = "./tmp/#{rand(36**12).to_s(36)}.csv" + File.open(local_filename, 'w') { |f| f.write(csv_doc_) } + send_file local_filename, type: 'txt', filename: "report_#{id}_findings.csv" end # generate a presentation of current report get '/report/:id/presentation' do # check the user has installed reveal - if !(File.directory?(Dir.pwd+"/public/reveal.js")) + unless File.directory?(Dir.pwd + '/public/reveal.js') return "reveal.js not found in /public/ directory. To install:

1. Goto [INSTALL_DIR]/public/
2.run 'git clone https://github.com/hakimel/reveal.js.git'
3. Restart Serpico" end @@ -1643,158 +1527,151 @@ @report = get_report(id) # bail without a report - redirect to("/") unless @report - + redirect to('/') unless @report - @findings,@dread,@cvss,@cvssv3,@risk,@riskmatrix = get_scoring_findings(@report) + @findings, @dread, @cvss, @cvssv3, @risk, @riskmatrix = get_scoring_findings(@report) # add images into presentations @images = [] @findings.each do |find| - if find.presentation_points - find.presentation_points.to_s.split("").each do |pp| - a = {} - next unless pp =~ /\[\!\!/ - img = pp.split("[!!")[1].split("!!]").first - a["name"] = img - if Attachments.first( :description => img) - img_p = Attachments.first( :description => img) - else - return "attachment #{img} from vulnerability #{find.title} doesn't exist. Did you mistype something?" - end - a["link"] = "/report/#{id}/attachments/"+img_p.id.to_s - @images.push(a) + next unless find.presentation_points + find.presentation_points.to_s.split('').each do |pp| + a = {} + next unless pp =~ /\[\!\!/ + img = pp.split('[!!')[1].split('!!]').first + a['name'] = img + if Attachments.first(description: img) + img_p = Attachments.first(description: img) + else + return "attachment #{img} from vulnerability #{find.title} doesn't exist. Did you mistype something?" end + a['link'] = "/report/#{id}/attachments/" + img_p.id.to_s + @images.push(a) end end - haml :presentation, :encode_html => true, :layout => false + haml :presentation, encode_html: true, layout: false end # export presentation of current report in html format, inside a zip - get '/report/:id/presentation_export' do - # check the user has installed reveal - if !(File.directory?(Dir.pwd+"/public/reveal.js")) +get '/report/:id/presentation_export' do + # check the user has installed reveal + unless File.directory?(Dir.pwd + '/public/reveal.js') return "reveal.js not found in /public/ directory. To install:

1. Goto [INSTALL_DIR]/public/
2.run 'git clone https://github.com/hakimel/reveal.js.git'
3. Restart Serpico" sleep(30) - redirect to("/") + redirect to('/') end - id = params[:id] + id = params[:id] - @report = get_report(id) + @report = get_report(id) - # bail without a report - redirect to("/") unless @report + # bail without a report + redirect to('/') unless @report + @findings, @dread, @cvss, @cvssv3, @risk, @riskmatrix = get_scoring_findings(@report) - @findings,@dread,@cvss,@cvssv3,@risk,@riskmatrix = get_scoring_findings(@report) + # add images into presentations + @images = [] + @findings.each do |find| + next unless find.presentation_points + find.presentation_points.to_s.split('').each do |pp| + a = {} + next unless pp =~ /\[\!\!/ + img = pp.split('[!!')[1].split('!!]').first + a['name'] = img + if Attachments.first(description: img) + img_p = Attachments.first(description: img) + else + return "attachment #{img} from vulnerability #{find.title} doesn't exist. Did you mistype something?" + end + a['link'] = "/report/#{id}/attachments/#{img_p.id}" + @images.push(a) + end + end - # add images into presentations - @images = [] - @findings.each do |find| - if find.presentation_points - find.presentation_points.to_s.split("").each do |pp| - a = {} - next unless pp =~ /\[\!\!/ - img = pp.split("[!!")[1].split("!!]").first - a["name"] = img - if Attachments.first( :description => img) - img_p = Attachments.first( :description => img) + # create html file from haml template + template = File.read(Dir.pwd + '/views/presentation.haml') + haml_engine = Haml::Engine.new(template) + output = haml_engine.render(Object.new, :@report => @report, :@findings => @findings, :@dread => @dread, :@cvss => @cvss, :@cvss3 => @cvss3, :@riskmatrix => @riskmatrix, :@images => @images) + rand_file = Dir.pwd + "/tmp/#{rand(36**12).to_s(36)}.html" + newHTML = Nokogiri::HTML(output) + + # Each link inside the HTML file is considered as a dependency that will need to be fixed to a relative local path + dependencies = [] + + # fix href and src based links in the html to relative local URL. This should cover most of the use cases. + newHTML.css('[href]').each do |el| + if el.attribute('href').to_s[1, 6] != 'report' && !(dependencies.include? el.attribute('href').to_s[1..-1]) + dependencies.push(el.attribute('href').to_s[1..-1]) + end + el.set_attribute('href', '.' + el.attribute('href')) + end + + newHTML.css('[src]').each do |el| + if el.attribute('src').to_s[1, 6] != 'report' && !(dependencies.include? el.attribute('src').to_s[1..-1]) + dependencies.push(el.attribute('src').to_s[1..-1]) + end + el.set_attribute('src', '.' + el.attribute('src')) + end + + # *slightly ugly* way to fix links in the HTML that aren't in a href or src (for exemple in javascript) + htmlDoc = newHTML.to_html + # the regex match stuff like '/img/reveal.js/foo/lib.js', "/css/reveal.js/theme/special.css" + link = htmlDoc[/(\'|\")(\/(img|js|css|reveal\.js|fonts)\/(\S*\/)*\S*\.\S*)(\'|\")/, 2] + until link.nil? + dependencies.push(link[1..-1]) unless dependencies.include? link[1..-1] + htmlDoc[/(\'|\")(\/(img|js|css|reveal\.js|fonts)\/(\S*\/)*\S*\.\S*)(\'|\")/, 2] = ".#{link}" + link = htmlDoc[/(\'|\")(\/(img|js|css|reveal\.js|fonts)\/(\S*\/)*\S*\.\S*)(\'|\")/, 2] + end + + # save html with links fixed to a relative local path + File.open(rand_file_name, 'w') do |f| + f.write htmlDoc + end + + rand_zip = Dir.pwd + "/tmp/#{rand(36**12).to_s(36)}.zip" + + # put the presentation and its dependencies (links, images, libraries...) in a zip file + Zip.setup do |c| + c.on_exists_proc = true + c.continue_on_exists_proc = true + end + Zip::File.open(rand_zip, Zip::File::CREATE) do |zipfile| + zipfile.add('presentation.html', rand_file_name) + + # put the public directory in the zip file. + list_public_file = Dir.glob(Dir.pwd + '/public/**/*') + list_public_file.each do |file| + # don't add directory or .git files in the zip + if file['.git'].nil? && File.file?(file) + # if file is .js or .css, check if it has dependencies that needs to be fixed to relative local path + if !file[/\.(js|css)$/].nil? + file_content = File.read(file) + until link.nil? + file_content[/(\'|\")(\/(img|js|css|reveal\.js|fonts)\/(\S*\/)*\S*\.\S*)(\'|\")/, 2] = ".#{link}" + link = file_content[/(\'|\")(\/(img|js|css|reveal\.js|fonts)\/(\S*\/)*\S*\.\S*)(\'|\")/, 2] + end + rand_temp_file = Dir.pwd + "/tmp/#{rand(36**12).to_s(36)}.tmp" + File.open(rand_temp_file, 'w') do |f| + f.write file_content + end + # remove Serpico/public from the file path and put it in the zip + zipfile.add(file[(Dir.pwd + '/public/').length..-1], rand_temp_file) else - return "attachment #{img} from vulnerability #{find.title} doesn't exist. Did you mistype something?" + # remove Serpico/public from the file path and put it in the zip + zipfile.add(file[(Dir.pwd + '/public/').length..-1], file) end - a["link"] = "/report/#{id}/attachments/#{img_p.id}" - @images.push(a) - end - end - end - - # create html file from haml template - template = File.read(Dir.pwd+"/views/presentation.haml") - haml_engine = Haml::Engine.new(template) - output = haml_engine.render(Object.new, {:@report => @report, :@findings => @findings, :@dread => @dread, :@cvss => @cvss, :@cvss3 => @cvss3, :@riskmatrix => @riskmatrix, :@images => @images}) - rand_file = Dir.pwd+"/tmp/#{rand(36**12).to_s(36)}.html" - newHTML = Nokogiri::HTML(output) - - # Each link inside the HTML file is considered as a dependency that will need to be fixed to a relative local path - dependencies = [] - - # fix href and src based links in the html to relative local URL. This should cover most of the use cases. - newHTML.css('[href]').each do |el| - if el.attribute('href').to_s[1, 6] != "report" && !(dependencies.include? el.attribute('href').to_s[1..-1]) - dependencies.push(el.attribute('href').to_s[1..-1]) - end - el.set_attribute('href', '.' + el.attribute('href')) - end - - newHTML.css('[src]').each do |el| - if el.attribute('src').to_s[1, 6] != "report" && !(dependencies.include? el.attribute('src').to_s[1..-1]) - dependencies.push(el.attribute('src').to_s[1..-1]) - end - el.set_attribute('src', '.' + el.attribute('src')) - end - - # *slightly ugly* way to fix links in the HTML that aren't in a href or src (for exemple in javascript) - htmlDoc = newHTML.to_html - # the regex match stuff like '/img/reveal.js/foo/lib.js', "/css/reveal.js/theme/special.css" - link = htmlDoc[/(\'|\")(\/(img|js|css|reveal\.js|fonts)\/(\S*\/)*\S*\.\S*)(\'|\")/,2] - while link != nil do - if !dependencies.include? link[1..-1] - dependencies.push(link[1..-1]) - end - htmlDoc[/(\'|\")(\/(img|js|css|reveal\.js|fonts)\/(\S*\/)*\S*\.\S*)(\'|\")/,2]= ".#{link}" - link = htmlDoc[/(\'|\")(\/(img|js|css|reveal\.js|fonts)\/(\S*\/)*\S*\.\S*)(\'|\")/,2] - end - - # save html with links fixed to a relative local path - File.open(rand_file, 'w') do |f| - f.write htmlDoc - end - - - rand_zip = Dir.pwd+"/tmp/#{rand(36**12).to_s(36)}.zip" - - # put the presentation and its dependencies (links, images, libraries...) in a zip file - Zip.setup do |c| - c.on_exists_proc = true - c.continue_on_exists_proc = true - end - Zip::File.open(rand_zip, Zip::File::CREATE) do |zipfile| - zipfile.add("presentation.html", rand_file) - - # put the public directory in the zip file. - list_public_file = Dir.glob(Dir.pwd+"/public/**/*") - list_public_file.each do |file| - # don't add directory or .git files in the zip - if file[".git"] == nil && File.file?(file) - # if file is .js or .css, check if it has dependencies that needs to be fixed to relative local path - if file[/\.(js|css)$/] != nil - file_content = File.read(file) - while link != nil - file_content[/(\'|\")(\/(img|js|css|reveal\.js|fonts)\/(\S*\/)*\S*\.\S*)(\'|\")/,2]= ".#{link}" - link = file_content[/(\'|\")(\/(img|js|css|reveal\.js|fonts)\/(\S*\/)*\S*\.\S*)(\'|\")/,2] - end - rand_temp_file = Dir.pwd+"/tmp/#{rand(36**12).to_s(36)}.tmp" - File.open(rand_temp_file, 'w') do |f| - f.write file_content - end - # remove Serpico/public from the file path and put it in the zip - zipfile.add(file[(Dir.pwd+"/public/").length..-1], rand_temp_file) - else - # remove Serpico/public from the file path and put it in the zip - zipfile.add(file[(Dir.pwd+"/public/").length..-1], file) - end - end - end - # put attachements in the zip - @images.each do | images| - img_p = Attachments.first( :description => images["name"]) - zipfile.add("report/#{id}/attachments/#{img_p.id}" , img_p.filename_location) - end - end - - send_file rand_zip, :type => 'zip', :filename => "#{@report.report_name}.zip" + end + end + # put attachements in the zip + @images.each do |images| + img_p = Attachments.first(description: images['name']) + zipfile.add("report/#{id}/attachments/#{img_p.id}", img_p.filename_location) + end + end + + send_file rand_zip, type: 'zip', filename: "#{@report.report_name}.zip" end # set msf rpc settings for report @@ -1803,12 +1680,12 @@ @report = get_report(id) # bail without a report - redirect to("/") unless @report + redirect to('/') unless @report - @vulnmap = config_options["vulnmap"] - @msfsettings = RemoteEndpoints.first(:report_id => id) + @vulnmap = config_options['vulnmap'] + @msfsettings = RemoteEndpoints.first(report_id: id) - haml :msfsettings, :encode_html => true + haml :msfsettings, encode_html: true end # set msf rpc settings for report @@ -1817,25 +1694,23 @@ @report = get_report(id) # bail without a report - redirect to("/") unless @report + redirect to('/') unless @report - if !config_options["vulnmap"] - return "Metasploit integration not enabled" - end + return 'Metasploit integration not enabled' unless config_options['vulnmap'] - msfsettings = RemoteEndpoints.first(:report_id => id) + msfsettings = RemoteEndpoints.first(report_id: id) if msfsettings - msfsettings.update(:ip => params[:ip], :port => params[:port], :workspace => params[:workspace], :user => params[:user], :pass => params[:pass]) + msfsettings.update(ip: params[:ip], port: params[:port], workspace: params[:workspace], user: params[:user], pass: params[:pass]) else msfsettings = RemoteEndpoints.new - msfsettings["report_id"] = @report.id - msfsettings["ip"] = params[:ip] - msfsettings["port"] = params[:port] - msfsettings["type"] = "msfrpc" - msfsettings["workspace"] = params[:workspace] - msfsettings["user"] = params[:user] - msfsettings["pass"] = params[:pass] + msfsettings['report_id'] = @report.id + msfsettings['ip'] = params[:ip] + msfsettings['port'] = params[:port] + msfsettings['type'] = 'msfrpc' + msfsettings['workspace'] = params[:workspace] + msfsettings['user'] = params[:user] + msfsettings['pass'] = params[:pass] msfsettings.save end @@ -1846,88 +1721,88 @@ get '/report/:id/hosts' do id = params[:id] @report = get_report(id) - @vulnmap = config_options["vulnmap"] + @vulnmap = config_options['vulnmap'] # bail without a report - redirect to("/") unless @report + redirect to('/') unless @report - msfsettings = RemoteEndpoints.first(:report_id => id) - if !msfsettings + msfsettings = RemoteEndpoints.first(report_id: id) + unless msfsettings return "You need to setup a metasploit RPC connection to use this feature. Do so here" end - #setup msfrpc handler + # setup msfrpc handler rpc = msfrpc(@report.id) if rpc == false - return "ERROR: Connection to metasploit failed. Make sure you have msfprcd running and the settings in Serpico are correct." + return 'ERROR: Connection to metasploit failed. Make sure you have msfprcd running and the settings in Serpico are correct.' end # get hosts from msf db res = rpc.call('console.create') rpc.call('db.set_workspace', msfsettings.workspace) - res = rpc.call('db.hosts', {:limit => 10000}) - @hosts = res["hosts"] + res = rpc.call('db.hosts', limit: 10_000) + @hosts = res['hosts'] - haml :dbhosts, :encode_html => true + haml :dbhosts, encode_html: true end # display services from msf db get '/report/:id/services' do id = params[:id] @report = get_report(id) - @vulnmap = config_options["vulnmap"] + @vulnmap = config_options['vulnmap'] # bail without a report - redirect to("/") unless @report + redirect to('/') unless @report - msfsettings = RemoteEndpoints.first(:report_id => id) - if !msfsettings + msfsettings = RemoteEndpoints.first(report_id: id) + unless msfsettings return "You need to setup a metasploit RPC connection to use this feature. Do so here" end - #setup msfrpc handler + # setup msfrpc handler rpc = msfrpc(@report.id) if rpc == false - return "ERROR: Connection to metasploit failed. Make sure you have msfprcd running and the settings in Serpico are correct." + return 'ERROR: Connection to metasploit failed. Make sure you have msfprcd running and the settings in Serpico are correct.' end # get hosts from msf db res = rpc.call('console.create') rpc.call('db.set_workspace', msfsettings.workspace) - #onlyup undocumented but it does exist - res = rpc.call('db.services', {:limit => 10000, :only_up => true} ) - @services = res["services"] + # onlyup undocumented but it does exist + res = rpc.call('db.services', limit: 10_000, only_up: true) + @services = res['services'] - haml :dbservices, :encode_html => true + haml :dbservices, encode_html: true end # display vulns from msf db get '/report/:id/vulns' do id = params[:id] @report = get_report(id) - @vulnmap = config_options["vulnmap"] + @vulnmap = config_options['vulnmap'] # bail without a report - redirect to("/") unless @report + redirect to('/') unless @report - msfsettings = RemoteEndpoints.first(:report_id => id) - if !msfsettings + msfsettings = RemoteEndpoints.first(report_id: id) + unless msfsettings return "You need to setup a metasploit RPC connection to use this feature. Do so here" end # setup msfrpc handler rpc = msfrpc(@report.id) if rpc == false - return "connection to MSF RPC deamon failed. Make sure you have msfprcd running and the settings in Serpico are correct." + return 'connection to MSF RPC deamon failed. Make sure you have msfprcd running and the settings in Serpico are correct.' end # get vulns from msf db res = rpc.call('console.create') rpc.call('db.set_workspace', msfsettings.workspace) - res = rpc.call('db.vulns', {:limit => 10000}) - @vulns = res["vulns"] + res = rpc.call('db.vulns', limit: 10_000) + @vulns = res['vulns'] - haml :dbvulns, :encode_html => true + haml :dbvulns, encode_html: true end # autoadd vulns from msf db @@ -1936,59 +1811,53 @@ @report = get_report(id) # bail without a report - redirect to("/") unless @report + redirect to('/') unless @report - if @report == nil - return "No Such Report" - end + return 'No Such Report' if @report.nil? - if not config_options["vulnmap"] - return "Metasploit integration not enabled." - end + return 'Metasploit integration not enabled.' unless config_options['vulnmap'] - add_findings = Array.new - dup_findings = Array.new - autoadd_hosts = Hash.new + add_findings = [] + dup_findings = [] + autoadd_hosts = {} # load msf settings - msfsettings = RemoteEndpoints.first(:report_id => id) - if !msfsettings + msfsettings = RemoteEndpoints.first(report_id: id) + unless msfsettings return "You need to setup a metasploit RPC connection to use this feature. Do so here" end # setup msfrpc handler rpc = msfrpc(@report.id) if rpc == false - return "connection to MSF RPC deamon failed. Make sure you have msfprcd running and the settings in Serpico are correct." + return 'connection to MSF RPC deamon failed. Make sure you have msfprcd running and the settings in Serpico are correct.' end # determine findings to add from vuln data vulns = get_vulns_from_msf(rpc, msfsettings.workspace) # load all findings - @findings = TemplateFindings.all(:order => [:title.asc]) + @findings = TemplateFindings.all(order: [:title.asc]) # determine findings to add from vuln data # host/ip is key, value is array of vuln ids vulns.keys.each do |i| vulns[i].each do |v| - # if serpico finding id maps to a ref from MSF vuln db, add to report - @mappings = VulnMappings.all(:msf_ref => v) + @mappings = VulnMappings.all(msf_ref: v) # add affected hosts for each finding - if (@mappings) - @mappings.each do |m| - if autoadd_hosts[m.templatefindings_id] - # only one host/url per finding (regardless of ports and urls). this should change in the future - if not autoadd_hosts[m.templatefindings_id].include?(i) - autoadd_hosts[m.templatefindings_id] << i - end - else - autoadd_hosts[m.templatefindings_id] = [] + next unless @mappings + @mappings.each do |m| + if autoadd_hosts[m.templatefindings_id] + # only one host/url per finding (regardless of ports and urls). this should change in the future + unless autoadd_hosts[m.templatefindings_id].include?(i) autoadd_hosts[m.templatefindings_id] << i end - add_findings << m.templatefindings_id + else + autoadd_hosts[m.templatefindings_id] = [] + autoadd_hosts[m.templatefindings_id] << i end + add_findings << m.templatefindings_id end end end @@ -1997,32 +1866,29 @@ # create new findings from an import # TODO: This will duplicate if the user already has a nessus id mapped - if config_options["auto_import"] - p "auto_import function not supported with MSF intergration" + if config_options['auto_import'] + p 'auto_import function not supported with MSF intergration' end - if add_findings.size == 0 + if add_findings.empty? redirect to("/report/#{id}/findings") else @autoadd = true add_findings.each do |finding| # if the finding already exists in the report dont add - currentfindings = Findings.all(:report_id => id) + currentfindings = Findings.all(report_id: id) currentfindings.each do |cf| - if cf.master_id == finding.to_i - if not dup_findings.include?(finding.to_i) - dup_findings << finding.to_i - end - add_findings.delete(finding.to_i) - end + next unless cf.master_id == finding.to_i + dup_findings << finding.to_i unless dup_findings.include?(finding.to_i) + add_findings.delete(finding.to_i) end end @autoadd_hosts = autoadd_hosts @dup_findings = dup_findings.uniq @autoadd_findings = add_findings end - haml :findings_add, :encode_html => true + haml :findings_add, encode_html: true end # get enabled plugins @@ -2031,19 +1897,18 @@ @report = get_report(id) # bail without a report - redirect to("/") unless @report + redirect to('/') unless @report @menu = [] - Dir[File.join(File.dirname(__FILE__), "../plugins/**/", "*.json")].each { |lib| + Dir[File.join(File.dirname(__FILE__), '../plugins/**/', '*.json')].each do |lib| pl = JSON.parse(File.open(lib).read) a = {} - if pl["enabled"] and pl["report_view"] - # add the plugin to the menu - a["name"] = pl["name"] - a["description"] = pl["description"] - a["link"] = pl["link"] - @menu.push(a) - end - } - haml :enabled_plugins, :encode_html => true + next unless pl['enabled'] && pl['report_view'] + # add the plugin to the menu + a['name'] = pl['name'] + a['description'] = pl['description'] + a['link'] = pl['link'] + @menu.push(a) + end + haml :enabled_plugins, encode_html: true end diff --git a/scripts/first_time.rb b/scripts/first_time.rb index 0c2d06c3..1fb94087 100644 --- a/scripts/first_time.rb +++ b/scripts/first_time.rb @@ -1,5 +1,6 @@ require './model/master.rb' -require './helpers/xslt_generation' +require './helpers/docx_xslt_generation' +require './helpers/xslx_xslt_generation' require 'openssl' require 'json' @@ -7,205 +8,228 @@ # If there are no users, create a first user if !userx - puts "No users in the database, creating a first user. \n" + puts "No users in the database, creating a first user. \n" - puts "Please enter username (default: administrator): " - username = gets.chomp - username = "administrator" if username == "" + puts 'Please enter username (default: administrator): ' + username = gets.chomp + username = 'administrator' if username == '' - puts "Generating random password and adding the Administrator with username #{username}..." + puts "Generating random password and adding the Administrator with username #{username}..." - password = rand(36**10).to_s(36) + password = rand(36**10).to_s(36) - exists = User.first(:username => username) + exists = User.first(username: username) - if exists - puts "That username already exists. Please use reset_pw.rb to reset a password" - else - user = User.new - user.username = username - user.password = password - user.type = "Administrator" - user.auth_type = "Local" - user.save + if exists + puts 'That username already exists. Please use reset_pw.rb to reset a password' + else + user = User.new + user.username = username + user.password = password + user.type = 'Administrator' + user.auth_type = 'Local' + user.save - puts "Please use the following login credentials" - puts "\t \t \t **** #{username} : #{password} ****" + puts 'Please use the following login credentials' + puts "\t \t \t **** #{username} : #{password} ****" - end + end else - puts "Skipping username creation (users exist), please use the create_user.rb script to add a user." + puts 'Skipping username creation (users exist), please use the create_user.rb script to add a user.' end -puts "Would you like to initialize the database with templated findings? (Y/n)" +puts 'Would you like to initialize the database with templated findings? (Y/n)' find_i = gets.chomp -if (find_i == "" or find_i.downcase == "y" or find_i.downcase == "yes") - puts "Importing Templated Findings template_findings.json..." +if (find_i == '') || find_i.casecmp('y').zero? || find_i.casecmp('yes').zero? + puts 'Importing Templated Findings template_findings.json...' - file = File.new('./templates/template_findings.json',"rb") - json = "" - while(line_j = file.gets) - json = json + line_j - end - line = JSON.parse(json) + file = File.new('./templates/template_findings.json', 'rb') + json = '' + while (line_j = file.gets) + json += line_j + end + line = JSON.parse(json) - line.each do |j| - j["id"] = nil + line.each do |j| + j['id'] = nil - finding = TemplateFindings.first(:title => j["title"]) + finding = TemplateFindings.first(title: j['title']) - j["approved"] = true - f = TemplateFindings.first_or_create(j) - f.save - end + j['approved'] = true + f = TemplateFindings.first_or_create(j) + f.save + end else - puts "Skipping templated finding import. Use the UI to import templated findings." + puts 'Skipping templated finding import. Use the UI to import templated findings.' end # add the Default templates into the DB -templates = Xslt.first +templates = DocxXslts.first if !templates - puts "Adding the Default Generic Risk Scoring Report Template" - xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" - docx = "./templates/Serpico - GenericRiskScoring.docx" - - xslt = generate_xslt(docx) - if xslt =~ /Error file DNE/ - return "ERROR!!!!!!" - end - - # open up a file handle and write the attachment - File.open(xslt_file, 'wb') {|f| f.write(xslt) } - - # delete the file data from the attachment - datax = Hash.new - datax["docx_location"] = "#{docx}" - datax["xslt_location"] = "#{xslt_file}" - datax["description"] = "Generic Risk Scoring Report" - datax["report_type"] = "Default Template - Generic Risk Scoring" - report = Xslt.new(datax) - report.save - - puts "Adding the Default DREAD Report Template" - xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" - docx = "./templates/Serpico - Report.docx" - - xslt = generate_xslt(docx) - if xslt =~ /Error file DNE/ - return "ERROR!!!!!!" + puts 'Adding the Default Excel Generic Summary Template' + excel = './templates/Serpico - Summary Generic.xslx' + + xslt_elements = generate_excel_xslt(excel) + xslt_shared_strings_location = "./templates/#{rand(36**36).to_s(36)}.xslt" + xslt_worksheet = {} + xslt_elements.each do |path_in_excel, xslt_element| + if path_in_excel == 'xl/sharedStrings.xml' + File.open(xslt_shared_strings_location, 'wb') { |f| f.write(xslt_element) } + else + xslt_worksheet_element_location = "./templates/#{rand(36**36).to_s(36)}.xslt" + File.open(xslt_worksheet_element_location, 'wb') { |f| f.write(xslt_element) } + xslt_worksheet[path_in_excel] = xslt_worksheet_element_location + end end - # open up a file handle and write the attachment - File.open(xslt_file, 'wb') {|f| f.write(xslt) } - - # delete the file data from the attachment - datax = Hash.new - datax["docx_location"] = "#{docx}" - datax["xslt_location"] = "#{xslt_file}" - datax["description"] = "Default Serpico Report - DREAD Scoring" - datax["report_type"] = "Default Template - DREAD Scoring" - report = Xslt.new(datax) - report.save - - puts "Adding the Default CVSS Report Template" - xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" - docx = "./templates/CVSS_Template.docx" - - xslt = generate_xslt(docx) - if xslt =~ /Error file DNE/ - return "ERROR!!!!!!" - end + datax = {} + datax['excel_location'] = excel.to_s + datax['xslt_shared_strings_location'] = xslt_shared_strings_location.to_s + datax['xslt_sheet_locations'] = xslt_worksheet.to_json + datax['description'] = 'Excel Generic Scoring Summary' + datax['template_title'] = 'Default Excel Summary - Generic Risk Scoring' + datax['template_type'] = 'Excel - Summary Template' + report = ExcelXslts.new(datax) + report.save - # open up a file handle and write the attachment - File.open(xslt_file, 'wb') {|f| f.write(xslt) } - # delete the file data from the attachment - datax = Hash.new - datax["docx_location"] = "#{docx}" - datax["xslt_location"] = "#{xslt_file}" - datax["description"] = "Default CVSS Report" - datax["report_type"] = "Default CVSS Report" - report = Xslt.new(datax) - report.save - puts "Adding the Default CVSSv3 Report Template" - xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" - docx = "./templates/Default CVSS 3 Report.docx" - xslt = generate_xslt(docx) - if xslt =~ /Error file DNE/ - return "ERROR!!!!!!" - end - # open up a file handle and write the attachment - File.open(xslt_file, 'wb') {|f| f.write(xslt) } - # delete the file data from the attachment - datax = Hash.new - datax["docx_location"] = "#{docx}" - datax["xslt_location"] = "#{xslt_file}" - datax["description"] = "Default CVSSv3 Report" - datax["report_type"] = "Default CVSSv3 Report" - report = Xslt.new(datax) - report.save + puts 'Adding the Default Generic Risk Scoring Report Template' + xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" + docx = './templates/Serpico - GenericRiskScoring.docx' + + xslt = generate_docx_xslt(docx) + return 'ERROR!!!!!!' if xslt =~ /Error file DNE/ + + # open up a file handle and write the attachment + File.open(xslt_file, 'wb') { |f| f.write(xslt) } - puts "Adding the Serpico Default Finding Template" + # delete the file data from the attachment + datax = {} + datax['docx_location'] = docx.to_s + datax['xslt_location'] = xslt_file.to_s + datax['description'] = 'Generic Risk Scoring Report' + datax['template_title'] = 'Default Template - Generic Risk Scoring' + datax['template_type'] = 'Word - Report Template' + report = DocxXslts.new(datax) + report.save + + puts 'Adding the Default DREAD Report Template' + xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" + docx = './templates/Serpico - Report.docx' - xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" - docx = "./templates/Serpico - Risk Finding.docx" + xslt = generate_docx_xslt(docx) + return 'ERROR!!!!!!' if xslt =~ /Error file DNE/ - xslt = generate_xslt(docx) - if xslt =~ /Error file DNE/ - return "ERROR!!!!!!" - end + # open up a file handle and write the attachment + File.open(xslt_file, 'wb') { |f| f.write(xslt) } - # open up a file handle and write the attachment - File.open(xslt_file, 'wb') {|f| f.write(xslt) } + # delete the file data from the attachment + datax = {} + datax['docx_location'] = docx.to_s + datax['xslt_location'] = xslt_file.to_s + datax['description'] = 'Default Serpico Report - DREAD Scoring' + datax['template_title'] = 'Default Template - DREAD Scoring' + datax['template_type'] = 'Word - Report Template' + report = DocxXslts.new(datax) + report.save - # delete the file data from the attachment - datax = Hash.new - datax["docx_location"] = "#{docx}" - datax["xslt_location"] = "#{xslt_file}" - datax["description"] = "Default Serpico Finding" - datax["report_type"] = "Default Finding" - datax["finding_template"] = true - report = Xslt.new(datax) - report.save + puts 'Adding the Default CVSS Report Template' + xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" + docx = './templates/CVSS_Template.docx' - puts "Adding the Serpico Default Status Template" + xslt = generate_docx_xslt(docx) + return 'ERROR!!!!!!' if xslt =~ /Error file DNE/ - xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" - docx = "./templates/Serpico - Finding.docx" + # open up a file handle and write the attachment + File.open(xslt_file, 'wb') { |f| f.write(xslt) } - xslt = generate_xslt(docx) - if xslt =~ /Error file DNE/ - return "ERROR!!!!!!" - end + # delete the file data from the attachment + datax = {} + datax['docx_location'] = docx.to_s + datax['xslt_location'] = xslt_file.to_s + datax['description'] = 'Default CVSS Report' + datax['template_title'] = 'Default CVSS Report' + datax['template_type'] = 'Word - Report Template' + report = DocxXslts.new(datax) + report.save + + puts 'Adding the Default CVSSv3 Report Template' + xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" + docx = './templates/Default CVSS 3 Report.docx' - # open up a file handle and write the attachment - File.open(xslt_file, 'wb') {|f| f.write(xslt) } + xslt = generate_docx_xslt(docx) + return 'ERROR!!!!!!' if xslt =~ /Error file DNE/ - # delete the file data from the attachment - datax = Hash.new - datax["docx_location"] = "#{docx}" - datax["xslt_location"] = "#{xslt_file}" - datax["description"] = "Default Serpico Status" - datax["report_type"] = "Default Status" - datax["status_template"] = true - report = Xslt.new(datax) - report.save + # open up a file handle and write the attachment + File.open(xslt_file, 'wb') { |f| f.write(xslt) } + + # delete the file data from the attachment + datax = {} + datax['docx_location'] = docx.to_s + datax['xslt_location'] = xslt_file.to_s + datax['description'] = 'Default CVSSv3 Report' + datax['template_title'] = 'Default CVSSv3 Report' + datax['template_type'] = 'Word - Report Template' + report = DocxXslts.new(datax) + report.save + + puts 'Adding the Serpico Default Finding Template' + + xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" + docx = './templates/Serpico - Risk Finding.docx' + + xslt = generate_docx_xslt(docx) + return 'ERROR!!!!!!' if xslt =~ /Error file DNE/ + + # open up a file handle and write the attachment + File.open(xslt_file, 'wb') { |f| f.write(xslt) } + + # delete the file data from the attachment + datax = {} + datax['docx_location'] = docx.to_s + datax['xslt_location'] = xslt_file.to_s + datax['description'] = 'Default Serpico Finding' + datax['template_title'] = 'Default Finding' + datax['template_type'] = 'Word - Finding Template' + report = DocxXslts.new(datax) + report.save + + puts 'Adding the Serpico Default Status Template' + + xslt_file = "./templates/#{rand(36**36).to_s(36)}.xslt" + docx = './templates/Serpico - Finding.docx' + + xslt = generate_docx_xslt(docx) + return 'ERROR!!!!!!' if xslt =~ /Error file DNE/ + + # open up a file handle and write the attachment + File.open(xslt_file, 'wb') { |f| f.write(xslt) } + + # delete the file data from the attachment + datax = {} + datax['docx_location'] = docx.to_s + datax['xslt_location'] = xslt_file.to_s + datax['description'] = 'Default Serpico Status' + datax['template_title'] = 'Default Status' + datax['template_type'] = 'Word - Status Template' + report = DocxXslts.new(datax) + report.save else - puts "Skipping XSLT creation, templates exist." + puts 'Skipping XSLT creation, templates exist.' end # create the SSL cert -puts "Creating self-signed SSL certificate, you should really have a legitimate one." +puts 'Creating self-signed SSL certificate, you should really have a legitimate one.' -name = "/C=US/ST=MD/L=MD/O=MD/CN=serpico" +name = '/C=US/ST=MD/L=MD/O=MD/CN=serpico' ca = OpenSSL::X509::Name.parse(name) key = OpenSSL::PKey::RSA.new(1024) @@ -222,24 +246,23 @@ ef.subject_certificate = crt ef.issuer_certificate = crt crt.extensions = [ -ef.create_extension("basicConstraints","CA:TRUE", true), -ef.create_extension("subjectKeyIdentifier", "hash"), + ef.create_extension('basicConstraints', 'CA:TRUE', true), + ef.create_extension('subjectKeyIdentifier', 'hash') ] -crt.add_extension ef.create_extension("authorityKeyIdentifier", -"keyid:always,issuer:always") +crt.add_extension ef.create_extension('authorityKeyIdentifier', + 'keyid:always,issuer:always') crt.sign key, OpenSSL::Digest::SHA1.new -File.open("./cert.pem", "w") do |f| +File.open('./cert.pem', 'w') do |f| f.write crt.to_pem end -File.open("./key.pem", "w") do |f| +File.open('./key.pem', 'w') do |f| f.write key.to_pem end # Copying the default configurations over -puts "Copying configuration settings over." -File.open("./config.json", "w") do |f| - f.write File.open("./config.json.defaults", "rb").read +puts 'Copying configuration settings over.' +File.open('./config.json', 'w') do |f| + f.write File.open('./config.json.defaults', 'rb').read end - diff --git a/serpico.rb b/serpico.rb index 4713efae..6101f30c 100644 --- a/serpico.rb +++ b/serpico.rb @@ -1,31 +1,29 @@ -require "bundler/setup" +require 'bundler/setup' require 'webrick/https' require 'openssl' require 'json' -require "./server.rb" +require './server.rb' config_options = JSON.parse(File.read('./config.json')) - ## SSL Settings -ssl_certificate = config_options["ssl_certificate"] -ssl_key = config_options["ssl_key"] -use_ssl = config_options["use_ssl"] -port = config_options["port"] -bind_address = config_options["bind_address"] +ssl_certificate = config_options['ssl_certificate'] +ssl_key = config_options['ssl_key'] +use_ssl = config_options['use_ssl'] +port = config_options['port'] +bind_address = config_options['bind_address'] server_options = { - :Port => port, - :Host => bind_address, + Port: port, + Host: bind_address } -if config_options["show_exceptions"].to_s.downcase == "false" or (not config_options["show_exceptions"]) - puts "|+| [#{DateTime.now.strftime("%d/%m/%Y %H:%M")}] Sending Webrick logging to /dev/null.." +if config_options['show_exceptions'].to_s.casecmp('false').zero? || !(config_options['show_exceptions']) + puts "|+| [#{DateTime.now.strftime('%d/%m/%Y %H:%M')}] Sending Webrick logging to /dev/null.." server_options[:Logger] = WEBrick::Log.new(File.open(File::NULL, 'w')) server_options[:AccessLog] = [] end - -if (use_ssl) then +if use_ssl certificate_content = File.open(ssl_certificate).read key_content = File.open(ssl_key).read server_options[:SSLEnable] = true @@ -33,19 +31,19 @@ server_options[:SSLPrivateKey] = OpenSSL::PKey::RSA.new(key_content) server_options[:SSLVerifyClient] = OpenSSL::SSL::VERIFY_NONE - if(config_options.key?("ssl_ciphers")) - CIPHERS = config_options["ssl_ciphers"] - puts "|+| Ciphers:"+CIPHERS.join(",") + if config_options.key?('ssl_ciphers') + CIPHERS = config_options['ssl_ciphers'] + puts '|+| Ciphers:' + CIPHERS.join(',') else - # SSL Ciphers - CIPHERS = ['ECDHE-RSA-AES128-GCM-SHA256','ECDHE-RSA-AES256-GCM-SHA384', - 'ECDHE-RSA-AES128-CBC-SHA','ECDHE-RSA-AES256-CBC-SHA', - 'AES128-GCM-SHA256','AES256-GCM-SHA384','AES128-SHA256', - 'AES256-SHA256','AES128-SHA','AES256-SHA'] + # SSL Ciphers + CIPHERS = ['ECDHE-RSA-AES128-GCM-SHA256', 'ECDHE-RSA-AES256-GCM-SHA384', + 'ECDHE-RSA-AES128-CBC-SHA', 'ECDHE-RSA-AES256-CBC-SHA', + 'AES128-GCM-SHA256', 'AES256-GCM-SHA384', 'AES128-SHA256', + 'AES256-SHA256', 'AES128-SHA', 'AES256-SHA'].freeze end server_options[:Ciphers] = CIPHERS end -Rack::Handler::WEBrick.run Server, server_options \ No newline at end of file +Rack::Handler::WEBrick.run Server, server_options diff --git a/server.rb b/server.rb index daa9aff6..c525be49 100644 --- a/server.rb +++ b/server.rb @@ -13,18 +13,18 @@ class Server < Sinatra::Application set :config_options, config_options ## Global variables - set :finding_types, config_options["finding_types"] - set :assessment_types, ["External", "Internal", "Internal/External", "Wireless", "Web Application", "DoS"] - set :status, ["EXPLOITED"] - set :show_exceptions, config_options["show_exceptions"] + set :finding_types, config_options['finding_types'] + set :assessment_types, ['External', 'Internal', 'Internal/External', 'Wireless', 'Web Application', 'DoS'] + set :status, ['EXPLOITED'] + set :show_exceptions, config_options['show_exceptions'] - if config_options["effort"] - set :effort, config_options["effort"] + if config_options['effort'] + set :effort, config_options['effort'] else - set :effort, ["Quick","Planned","Involved"] + set :effort, %w[Quick Planned Involved] end - if config_options["show_exceptions"].to_s.downcase == "false" or (not config_options["show_exceptions"]) + if config_options['show_exceptions'].to_s.casecmp('false').zero? || !(config_options['show_exceptions']) configure do disable :logging set :set_logging, nil @@ -33,69 +33,69 @@ class Server < Sinatra::Application set :logger_out, nil end - server_log("Using Serpico only logging ..") + server_log('Using Serpico only logging ..') end - #Set Logging - if(config_options["log_file"] != "") - log = File.new(config_options["log_file"], "a+") + # Set Logging + if config_options['log_file'] != '' + log = File.new(config_options['log_file'], 'a+') set :logger_out, log - server_log("Logging set to #{config_options["log_file"]}") + server_log("Logging set to #{config_options['log_file']}") end - #Set Alignment - if(config_options["image_align"] == "") - set :alignment, "center" + # Set Alignment + if config_options['image_align'] == '' + set :alignment, 'center' else - set :alignment, config_options["image_align"] + set :alignment, config_options['image_align'] end # CVSS - set :av, ["Local","Adjacent Network","Network"] - set :ac, ["High","Medium","Low"] - set :au, ["Multiple","Single","None"] - set :c, ["None","Partial","Complete"] - set :i, ["None","Partial","Complete"] - set :a, ["None","Partial","Complete"] - set :e, ["Not Defined","Unproven Exploit Exists","Proof-of-Concept Code","Functional Exploit Exists","High"] - set :rl, ["Not Defined","Official Fix","Temporary Fix","Workaround","Unavailable"] - set :rc, ["Not Defined","Unconfirmed","Uncorroborated","Confirmed"] - set :cdp, ["Not Defined","None","Low","Low-Medium","Medium-High","High"] - set :td, ["Not Defined","None","Low","Medium","High"] - set :cr, ["Not Defined","Low","Medium","High"] - set :ir, ["Not Defined","Low","Medium","High"] - set :ar, ["Not Defined","Low","Medium","High"] + set :av, ['Local', 'Adjacent Network', 'Network'] + set :ac, %w[High Medium Low] + set :au, %w[Multiple Single None] + set :c, %w[None Partial Complete] + set :i, %w[None Partial Complete] + set :a, %w[None Partial Complete] + set :e, ['Not Defined', 'Unproven Exploit Exists', 'Proof-of-Concept Code', 'Functional Exploit Exists', 'High'] + set :rl, ['Not Defined', 'Official Fix', 'Temporary Fix', 'Workaround', 'Unavailable'] + set :rc, ['Not Defined', 'Unconfirmed', 'Uncorroborated', 'Confirmed'] + set :cdp, ['Not Defined', 'None', 'Low', 'Low-Medium', 'Medium-High', 'High'] + set :td, ['Not Defined', 'None', 'Low', 'Medium', 'High'] + set :cr, ['Not Defined', 'Low', 'Medium', 'High'] + set :ir, ['Not Defined', 'Low', 'Medium', 'High'] + set :ar, ['Not Defined', 'Low', 'Medium', 'High'] # CVSSv3 - set :attack_vector, ["Local","Adjacent","Network","Physical"] - set :attack_complexity, ["Low","High"] - set :privileges_required, ["None","Low", "High"] - set :user_interaction, ["None", "Required"] - set :scope_cvss, ["Unchanged", "Changed"] - set :confidentiality, ["None","Low","High"] - set :integrity, ["None","Low","High"] - set :availability, ["None","Low","High"] - set :exploit_maturity, ["Not Defined","Unproven Exploit Exists","Proof-of-Concept Code","Functional Exploit Exists","High"] - set :remeditation_level, ["Not Defined","Official Fix","Temporary Fix","Workaround","Unavailable"] - set :report_confidence, ["Not Defined","Unknown","Reasonable","Confirmed"] - set :confidentiality_requirement, ["Not Defined","Low","Medium","High"] - set :integrity_requirement, ["Not Defined","Low","Medium","High"] - set :availability_requirement, ["Not Defined","Low","Medium","High"] - set :mod_attack_vector, ["Not Defined","Local","Adjacent","Network","Physical"] - set :mod_attack_complexity, ["Not Defined","Low","High"] - set :mod_privileges_required, ["Not Defined","None","Low","High"] - set :mod_user_interaction, ["Not Defined","None","Required"] - set :mod_scope, ["Not Defined","Unchanged","Changed"] - set :mod_confidentiality, ["Not Defined","None","Low","High"] - set :mod_integrity, ["Not Defined","None","Low","High"] - set :mod_availability, ["Not Defined","None","Low","High"] - - #Risk Matrix - set :severity, ["Low","Medium","High"] - set :likelihood, ["Low","Medium","High"] - - if config_options["cvssv2_scoring_override"] - if config_options["cvssv2_scoring_override"] == "true" + set :attack_vector, %w[Local Adjacent Network Physical] + set :attack_complexity, %w[Low High] + set :privileges_required, %w[None Low High] + set :user_interaction, %w[None Required] + set :scope_cvss, %w[Unchanged Changed] + set :confidentiality, %w[None Low High] + set :integrity, %w[None Low High] + set :availability, %w[None Low High] + set :exploit_maturity, ['Not Defined', 'Unproven Exploit Exists', 'Proof-of-Concept Code', 'Functional Exploit Exists', 'High'] + set :remeditation_level, ['Not Defined', 'Official Fix', 'Temporary Fix', 'Workaround', 'Unavailable'] + set :report_confidence, ['Not Defined', 'Unknown', 'Reasonable', 'Confirmed'] + set :confidentiality_requirement, ['Not Defined', 'Low', 'Medium', 'High'] + set :integrity_requirement, ['Not Defined', 'Low', 'Medium', 'High'] + set :availability_requirement, ['Not Defined', 'Low', 'Medium', 'High'] + set :mod_attack_vector, ['Not Defined', 'Local', 'Adjacent', 'Network', 'Physical'] + set :mod_attack_complexity, ['Not Defined', 'Low', 'High'] + set :mod_privileges_required, ['Not Defined', 'None', 'Low', 'High'] + set :mod_user_interaction, ['Not Defined', 'None', 'Required'] + set :mod_scope, ['Not Defined', 'Unchanged', 'Changed'] + set :mod_confidentiality, ['Not Defined', 'None', 'Low', 'High'] + set :mod_integrity, ['Not Defined', 'None', 'Low', 'High'] + set :mod_availability, ['Not Defined', 'None', 'Low', 'High'] + + # Risk Matrix + set :severity, %w[Low Medium High] + set :likelihood, %w[Low Medium High] + + if config_options['cvssv2_scoring_override'] + if config_options['cvssv2_scoring_override'] == 'true' set :cvssv2_scoring_override, true end else @@ -103,164 +103,156 @@ class Server < Sinatra::Application end ## LDAP Settings - if config_options["ldap"] == "true" + if config_options['ldap'] == 'true' set :ldap, true else set :ldap, false end - set :domain, config_options["ldap_domain"] - set :dc, config_options["ldap_dc"] + set :domain, config_options['ldap_domain'] + set :dc, config_options['ldap_dc'] enable :sessions set :session_secret, rand(36**12).to_s(36) # load the default stuff - Dir[File.join(File.dirname(__FILE__), "routes", "*.rb")].each { |lib| require lib } - Dir[File.join(File.dirname(__FILE__), "helpers", "*.rb")].each { |lib| require lib } - Dir[File.join(File.dirname(__FILE__), "lib", "*.rb")].each { |lib| require lib } + Dir[File.join(File.dirname(__FILE__), 'routes', '*.rb')].each { |lib| require lib } + Dir[File.join(File.dirname(__FILE__), 'helpers', '*.rb')].each { |lib| require lib } + Dir[File.join(File.dirname(__FILE__), 'lib', '*.rb')].each { |lib| require lib } # load plugins last, enables monkey patching - Dir[File.join(File.dirname(__FILE__), "plugins/**/", "*.json")].each { |lib| + Dir[File.join(File.dirname(__FILE__), 'plugins/**/', '*.json')].each do |lib| pl = JSON.parse(File.open(lib).read) - if pl["enabled"] - server_log("Loaded plugin #{pl['name']}") - # load the plugin - Dir[File.join(File.dirname(__FILE__), "plugins/#{pl['name']}/**/", "*.rb")].each{ |xlibx| - require xlibx - } + next unless pl['enabled'] + server_log("Loaded plugin #{pl['name']}") + # load the plugin + Dir[File.join(File.dirname(__FILE__), "plugins/#{pl['name']}/**/", '*.rb')].each do |xlibx| + require xlibx end - } + end end # Helper Functions # msfrpc handler def msfrpc(report_id) - @msfoptions = RemoteEndpoints.first(:report_id => report_id) + @msfoptions = RemoteEndpoints.first(report_id: report_id) opts = { - :host => @msfoptions.ip, - :port => @msfoptions.port, - :user => @msfoptions.user, - :pass => @msfoptions.pass + host: @msfoptions.ip, + port: @msfoptions.port, + user: @msfoptions.user, + pass: @msfoptions.pass } begin rpc = Msf::RPC::Client.new(opts) rescue Exception => log - server_log("[!] MSF CONNECTION FAILED") + server_log('[!] MSF CONNECTION FAILED') rpc = false end - return rpc + rpc end # Return if the user has a valid session or not def valid_session? - return Sessions.is_valid?(session[:session_id]) + Sessions.is_valid?(session[:session_id]) end # Get the current users type def user_type - return Sessions.type(session[:session_id]) + Sessions.type(session[:session_id]) end # Get the current users, username def get_username - return Sessions.get_username(session[:session_id]) + Sessions.get_username(session[:session_id]) end # Check if the user is an administrator def is_administrator? - return true if Sessions.type(session[:session_id]) == "Administrator" + return true if Sessions.type(session[:session_id]) == 'Administrator' end # Check if the user has plugin upload capability def is_plugin? - return true if (Sessions.type(session[:session_id]) == "Administrator" and Sessions.is_plugin?(session[:session_id]) == true) + return true if (Sessions.type(session[:session_id]) == 'Administrator') && (Sessions.is_plugin?(session[:session_id]) == true) end # authentication method used by API, returns Session Key -def auth(username,password) - user = User.first(:username => username) +def auth(username, password) + user = User.first(username: username) - if user and user.auth_type == "Local" - usern = User.authenticate(username,password) + if user && (user.auth_type == 'Local') + usern = User.authenticate(username, password) if usern # TODO : This needs an expiration, session fixation - @del_session = Sessions.first(:username => "#{usern}") + @del_session = Sessions.first(username: usern.to_s) @del_session.destroy if @del_session - @curr_session = Sessions.create(:username => "#{usern}",:session_key => "#{session[:session_id]}") + @curr_session = Sessions.create(username: usern.to_s, session_key: session[:session_id].to_s) @curr_session.save return @curr_session.session_key end elsif user if options.ldap - #try AD authentication + # try AD authentication usern = username - if usern == "" or password == "" - return "" - end + return '' if (usern == '') || (password == '') user = "#{options.domain}\\#{username}" - ldap = Net::LDAP.new :host => "#{options.dc}", :port => 636, :encryption => :simple_tls, :auth => {:method => :simple, :username => user, :password => password} + ldap = Net::LDAP.new host: options.dc.to_s, port: 636, encryption: :simple_tls, auth: { method: :simple, username: user, password: password } if ldap.bind # replace the session in the session table - @del_session = Sessions.first(:username => "#{usern}") + @del_session = Sessions.first(username: usern.to_s) @del_session.destroy if @del_session - @curr_session = Sessions.create(:username => "#{usern}",:session_key => "#{session[:session_id]}") + @curr_session = Sessions.create(username: usern.to_s, session_key: session[:session_id].to_s) @curr_session.save return @curr_session.session_key else - server_log("|!| LDAP Authentication failed") + server_log('|!| LDAP Authentication failed') end end end - return "" + '' end - # Grab a specific report def get_report(id) - begin - if is_administrator? - return Reports.first(:id => id) - else - report = Reports.first(:id => id) - if report - authors = report.authors - return report if report.owner == get_username - if authors - return report if authors.include?(get_username) - end + if is_administrator? + return Reports.first(id: id) + else + report = Reports.first(id: id) + if report + authors = report.authors + return report if report.owner == get_username + if authors + return report if authors.include?(get_username) end end - rescue Exception => log - # ignoring this error for now end +rescue Exception => log + # ignoring this error for now end # List out the reports def get_reports - begin - if is_administrator? - return Reports.all( :order => [:id.desc]) - else - reports = Reports.all( :order => [:id.desc]) - reports_array = [] - reports.each do |report| - next unless report and get_username - authors = report.authors - reports_array.push(report) if report.owner == get_username - if authors - reports_array.push(report) if authors.include?(get_username) - end + if is_administrator? + return Reports.all(order: [:id.desc]) + else + reports = Reports.all(order: [:id.desc]) + reports_array = [] + reports.each do |report| + next unless report && get_username + authors = report.authors + reports_array.push(report) if report.owner == get_username + if authors + reports_array.push(report) if authors.include?(get_username) end - return nil unless reports_array - return reports_array end - rescue Exception - return [] + return nil unless reports_array + return reports_array end +rescue Exception + return [] end def image_insert(docx, rand_file, image, end_xml) @@ -268,10 +260,10 @@ def image_insert(docx, rand_file, image, end_xml) p_id = "d#{rand(36**7).to_s(36)}" name = image.description - image_file = File.open(image.filename_location,'rb') - img_data = image_file.read() + image_file = File.open(image.filename_location, 'rb') + img_data = image_file.read - #resize picture to fit into word if it's too big + # resize picture to fit into word if it's too big if jpeg?(img_data) jpeg_dimension = JPEG.new(image.filename_location) width = jpeg_dimension.width @@ -279,35 +271,33 @@ def image_insert(docx, rand_file, image, end_xml) elsif png?(img_data) width = IO.read(image.filename_location)[0x10..0x18].unpack('NN')[0] height = IO.read(image.filename_location)[0x10..0x18].unpack('NN')[1] - #we don't want to break everything if another format is supported + # we don't want to break everything if another format is supported else width = 400 height = 200 end - while width > 710 or height > 790 do #fits nicely into word - width = width - (width/20) - height = height - (height/20) + while (width > 680) || (height > 790) # fits nicely into word + width -= (width / 20) + height -= (height / 20) end image_file.close # Image alignment setting - unless settings.alignment - settings.alignment = "center" - end + settings.alignment = 'center' unless settings.alignment + + imgAlign = case settings.alignment.downcase + when 'Left' + 'left' + when 'Right' + 'right' + when 'Center' + 'center' + else + 'center' + end - case settings.alignment.downcase - when "Left" - imgAlign = "left" - when "Right" - imgAlign = "right" - when "Center" - imgAlign = "center" - else - imgAlign = "center" - end - # insert picture into xml, allow the user to ignore alignment if they want - if settings.alignment == "ignore" + if settings.alignment == 'ignore' docx << "" else docx << "" @@ -318,52 +308,49 @@ def image_insert(docx, rand_file, image, end_xml) exists = false Zip::File.open(rand_file) do |zipfile| - #iterate zipfile to see if it has media dir, this could be better + # iterate zipfile to see if it has media dir, this could be better zipfile.each do |file| - if file.name =~ /word\/media/ - exists = true - end + exists = true if file.name =~ /word\/media/ end if exists - zipfile.get_output_stream("word/media/#{name}") {|f| f.write(img_data)} + zipfile.get_output_stream("word/media/#{name}") { |f| f.write(img_data) } else - zipfile.get_output_stream("word/#{name}") {|f| f.write(img_data)} + zipfile.get_output_stream("word/#{name}") { |f| f.write(img_data) } end end # update document.xml.rels - docu_rels = read_rels(rand_file,"word/_rels/document.xml.rels") + docu_rels = read_from_zip(rand_file, 'word/_rels/document.xml.rels') if exists - docu_rels = docu_rels.sub("","") + docu_rels = docu_rels.sub('', "") else - docu_rels = docu_rels.sub("","") + docu_rels = docu_rels.sub('', "") end - docx_modify(rand_file,docu_rels,"word/_rels/document.xml.rels") + archive_modify(rand_file, docu_rels, 'word/_rels/document.xml.rels') - return docx + docx end # Check if the user is an administrator -def get_plugins() - return plugins +def get_plugins + plugins end def get_plugin_list menu = [] - Dir[File.join(File.dirname(__FILE__), "plugins/**/", "*.json")].each { |lib| + Dir[File.join(File.dirname(__FILE__), 'plugins/**/', '*.json')].each do |lib| pl = JSON.parse(File.open(lib).read) a = {} - if pl["enabled"] and pl["admin_view"] - # add the plugin to the menu - a["name"] = pl["name"] - a["description"] = pl["description"] - a["link"] = pl["link"] - menu.push(a) - end - } - return menu -end \ No newline at end of file + next unless pl['enabled'] && pl['admin_view'] + # add the plugin to the menu + a['name'] = pl['name'] + a['description'] = pl['description'] + a['link'] = pl['link'] + menu.push(a) + end + menu +end diff --git a/views/add_template.haml b/views/add_template.haml index 24960759..b06c55b5 100644 --- a/views/add_template.haml +++ b/views/add_template.haml @@ -7,35 +7,38 @@ %table %tr %td - %label.col-md-3{ :for => "report_type" } - Report Type   + %label.col-md-3{ :for => "template_title" } + Template Title   %td - %input#report_type{ :type => "text", :name => "report_type", :required => true } + %input#template_title{ :type => "text", :name => "template_title", :required => true } %tr %td - %label.col-md-3{ :for => "description" } - File Description   + %label.col-md-3{ :for => "template_description" } + Template Description   %td - %input#description{ :type => "text", :name => "description" } + %input#template_description{ :type => "text", :name => "template_description" } %tr %td - %label.col-md-3{ :for => "finding_template" } - Finding Template   - %td - %input#finding_template{ :type => "checkbox", :name => "finding_template" } + %label.col-md-3{ :for => "template_type" } + Template Type   + %td + .control-group + .controls + %select{ :name => "template_type" } + %option{ :selected => "selected" } + Word - Report Template + %option + Word - Status Template + %option + Word - Finding Template + %option + Excel - Summary Template %tr %td - %label.col-md-3{ :for => "status_template" } - Status Template   - %td - %input#status_template{ :type => "checkbox", :name => "status_template" } - %tr + %label.col-md-3{ :for => "template_file" } + Template File   %td - %label.col-md-3{ :for => "file" } - DOCX - %td - %input#file{ :type => "file", :name => "file" } - + %input#file{:type => 'file', :name => 'file'} %br %input.btn.btn-default{ :type => "submit", :value => "Add" } %a.btn.btn-default{ :href => "/admin/templates" } diff --git a/views/edit_template.haml b/views/edit_template.haml index af259fa6..beed47cc 100644 --- a/views/edit_template.haml +++ b/views/edit_template.haml @@ -4,13 +4,14 @@ %br %form{ :method => "post", :action => "/admin/templates/edit", :enctype => "multipart/form-data" } + %input{:type => "text", :name => "old_template_title", :required=>true, :value => "#{@template.template_title}", type:"hidden"} %table %tr %td - %label.col-md-3{ :for => "report_type" } - Report Type   + %label.col-md-3{ :for => "template_title" } + Template Description   %td - %input#report_type{ :type => "text", :name => "report_type", :required => true, :value => "#{@template.report_type}" } + %input#new_template_title{ :type => "text", :name => "new_template_title", :required => true, :value => "#{@template.template_title}" } %tr %td %label.col-md-3{ :for => "description" } @@ -19,20 +20,24 @@ %input#description{ :type => "text", :name => "description", :value => "#{@template.description}" } %tr %td - %label.col-md-3{ :for => "finding_template" } - Finding Template   - %td - %input#finding_template{ :type => "checkbox", :name => "finding_template" } - %tr - %td - %label.col-md-3{ :for => "status_template" } - Status Template   - %td - %input#status_template{ :type => "checkbox", :name => "status_template" } + %label.col-md-3{ :for => "template_type" } + Template type   + %td + .control-group + .controls + %select{ :name => "template_type" } + %option{ :selected => "selected" } + Word - Report Template + %option + Word - Status Template + %option + Word - Finding Template + %option + Excel - Summary Template %tr %td - %label.col-md-3{ :for => "file" } - DOCX + %label.col-md-3{ :for => "template_file" } + Template File %td %input#file{ :type => "file", :name => "file", :required => true } %input{ :type => "hidden", :name => "id", :value => "#{@template.id}" } diff --git a/views/layout.haml b/views/layout.haml index edf1e87a..b89c3c08 100644 --- a/views/layout.haml +++ b/views/layout.haml @@ -1,4 +1,3 @@ -!!! %html{ :lang => "en" } %head %script{ :src => "/js/jquery-3.1.1.js" } @@ -142,7 +141,9 @@ %li %a{ :href => "/report/#{@report.id}/edit" } Edit Report Information %li - %a{ :href => "/report/#{@report.id}/generate" } Generate Report + %a{ :href => "/report/#{@report.id}/generate/report" } Generate Docx Report + %li + %a{ :href => "/report/#{@report.id}/generate/summary" } Generate Excel Summary %li.nav-header Findings %li %a{ :href => "/report/#{@report.id}/findings" } List Current Report Findings diff --git a/views/new_report.haml b/views/new_report.haml index 63739678..79c7b1f0 100644 --- a/views/new_report.haml +++ b/views/new_report.haml @@ -1,5 +1,5 @@ -.col-md-10 - %form{ :method => "post" } +.span10 + %form{:method => 'post'} %br %h2 Create Report (or Import) %br @@ -10,19 +10,19 @@ %label.col-md-3{ :for => "report_name" } Title   %td - %input#report_name{ :type => "text", :name => "report_name" } + %input{:type => 'text', :name => 'report_name'} %tr %td %label.col-md-3{ :for => "full_company_name" } Full Company Name   %td - %input#full_company_name{ :type => "text", :name => "full_company_name" } + %input{:type => 'text', :name => 'full_company_name'} %tr %td %label.col-md-3{ :for => "short_company_name" } Short Company Name   %td - %input#short_company_name{ :type => "text", :name => "short_company_name" } + %input{:type => 'text', :name => 'short_company_name'} %tr %tr %td @@ -33,23 +33,20 @@ - @assessment_types.each do |assessment_type| %option #{assessment_type} %tr - %td{ :style => "width: 30%" } - %label.col-md-3{ :for => "report_type" } - Report Type - %td{ :style => "width: 70%" } - %select#report_type{ :name => "report_type" } + %td{:style => 'width: 30%'} + Docx Template + %td{:style => 'width: 70%'} + %select{ :name => "associated_docx_template" } - @templates.each do |template| - - if template.finding_template == false and template.status_template == false - - if @report and template.report_type == @report.report_type + - if template.template_type == "Word - Report Template" + - if @report and template.template_title == @report.associated_docx_template %option{ :selected => "selected" } #{type} - else - %option #{template.report_type} - + %option #{template.template_title} %br - %input.btn.btn-default{ :type => "submit", :value => "Save" } - %a.btn.btn-default{ :href => "/" } + %input.btn.btn-default{:type => "submit", :value => "Save" } + %a.btn.btn-default{ :href => "/"} Cancel - - else %h4 Hrm, there don't seem to be any report templates. An admin will need to add one -_- diff --git a/views/report_edit.haml b/views/report_edit.haml index 669d7ea2..3373cbbe 100644 --- a/views/report_edit.haml +++ b/views/report_edit.haml @@ -3,7 +3,7 @@ %br %h2 #{@report.report_name} %br - - if @templates.size > 0 + - if @docx_templates.size > 0 %h4      Modify the information that will appear in the report. %br @@ -12,26 +12,36 @@ %tbody %tr %td{ :style => "width: 30%" } - %label.col-md-3{ :for => "report_type" } - Report Type + %label.col-md-3{ :for => "report_type_docx" } + Docx Report Template %td{ :style => "width: 70%" } - %select#report_type{ :name => "report_type" } - - @templates.each do |template| - - if template.finding_template == false and template.status_template == false - - if template.report_type == @report.report_type - %option{ :selected => "selected" } #{template.report_type} + %select#report_type_docx{ :name => "associated_docx_template" } + - @docx_templates.each do |template| + - if template.template_type == "Word - Report Template" + - if @report and template.template_title == @report.associated_docx_template + %option{ :selected => "selected" } #{template.template_title} - else - %option #{template.report_type} + %option #{template.template_title} + %tr + %td{:style => "width: 30%"} + %label.col-md-3{ :for => "report_type_excel" } + Excel Summary Template + %td{:style => "width: 70%"} + %select#report_type_excel{ :name => "associated_excel_template" } + - @excel_templates.each do |template| + - if template.template_type == "Excel - Summary Template" + - if @report and template.template_title == @report.associated_excel_template + %option{ :selected => "selected" } #{template.template_title} + - else + %option #{template.template_title} %tr - %td{ :style => "width: 30%" } - %label.col-md-3{ :for => "report_name" } - Title + %td{:style => "width: 30%"} + Title %td{ :style => "width: 70%" } - %input#report_name{ :type => "text", :style => "width: 90%", :name => "report_name", :value => "#{@report.report_name}" } + %input#report_name{ :type => "text", :style => "width: 90%", :name => "report_name", :value => "#{@report.report_name}"} %tr %td{ :style => "width: 30%" } - %label.col-md-3{ :for => "assessment_type" } - Assessment Type + Assessment Type %td{ :style => "width: 70%" } %select#assessment_type{ :name => "assessment_type" } - @assessment_types.each do |assessment_type| diff --git a/views/template_list.haml b/views/template_list.haml index d1ae1a4a..b0e98b13 100644 --- a/views/template_list.haml +++ b/views/template_list.haml @@ -1,40 +1,49 @@ .col-md-10 %br - %h3 Current Templates - %br - - .table.table-striped - %table{ :style => "width: 80%" } - %tbody - - if @templates - %tr - %td{ :style => "width: 30%" } - %b Report Type - %td{ :style => "width: 30%" } - %b Description - %td{ :style => "width: 30%" } - %b Template Type - %td{ :style => "width: 10%" } - %b - - @templates.each do |template| + %h3 Current Templates + %br + .table.table-striped + %table{ :style => "width: 80%" } + %tbody + - if @docx_templates or @excel_templates %tr - %td{ :style => "width: 30%" } - #{template.report_type} - %td{ :style => "width: 15%" } - #{template.description} - %td{ :style => "width: 15%" } - - if template.finding_template - Finding Template - - elsif template.status_template - Status Template - - else - Report Template - %td{ :style => "width: 60%" } - %a.btn.btn-warning{ :href => "/admin/templates/#{template.id}/edit" } - %i.icon-pencil.icon-white{ :title => "Edit" } - %a.btn.btn-danger{ :href => "/admin/delete/templates/#{template.id}" } - %i.icon-trash.icon-white{ :title => "Delete Template" } - %a.btn.btn-info{ :href => "/admin/templates/#{template.id}/download" } - %i.icon-play-circle.icon-white{ :title => "Preview" } - - else - No Templates. Silly goose, how will you create reports? + %th{ :style => 'width: 30%' } + Template title + %th{ :style => 'width: 30%' } + Description + %th{ :style => 'width: 30%' } + Type + %th{ :style => 'width: 10%' } + Actions + - @docx_templates.each do |template| + %tr + %td + #{template.template_title} + %td + #{template.description} + %td + #{template.template_type} + %td + %a.btn.btn-warning{:href => "/admin/templates/#{template.id}/edit/word"} + %i.icon-pencil.icon-white{:title => 'Edit'} + %a.btn.btn-danger{:href => "/admin/delete/templates/#{template.id}/word"} + %i.icon-trash.icon-white{:title => 'Delete Template'} + %a.btn.btn-info{:href => "/admin/templates/#{template.id}/download/word"} + %i.icon-play-circle.icon-white{:title => 'Preview'} + - @excel_templates.each do |template| + %tr + %td + #{template.template_title} + %td + #{template.description} + %td + #{template.template_type} + %td + %a.btn.btn-warning{:href => "/admin/templates/#{template.id}/edit/excel"} + %i.icon-pencil.icon-white{:title => 'Edit'} + %a.btn.btn-danger{:href => "/admin/delete/templates/#{template.id}/excel"} + %i.icon-trash.icon-white{:title => 'Delete Template'} + %a.btn.btn-info{:href => "/admin/templates/#{template.id}/download/excel"} + %i.icon-play-circle.icon-white{:title => 'Preview'} + - else + No Templates. Silly goose, how will you create reports? From b590f4ee8b83d67b5e3297b0bbbbaca03909a52f Mon Sep 17 00:00:00 2001 From: frisch-raphael Date: Tue, 23 Jan 2018 16:56:03 +0100 Subject: [PATCH 2/6] Deleting debugging code --- helpers/xslx_xslt_generation.rb | 6 ------ 1 file changed, 6 deletions(-) diff --git a/helpers/xslx_xslt_generation.rb b/helpers/xslx_xslt_generation.rb index d42d7bd9..7ed1c39e 100644 --- a/helpers/xslx_xslt_generation.rb +++ b/helpers/xslx_xslt_generation.rb @@ -253,12 +253,6 @@ def generate_excel_xslt(excel) shared_strings_noko = clean_shared_strings('æ', shared_strings_noko) # because we cleaned the shared string, we need to update it in the returned xslts_components xslts_components['xl/sharedStrings.xml'] = @top + shared_strings_noko.to_xml + @bottom - i = 1 - # debug - xslts_components.each do |_sheet, component| - i += 1 - File.open("/mnt/Kali_Shared/excel_xslt_#{i}", 'w') { |file| file.write(component) } - end # return the xslts xslts_components end From 0ad8d5bf4e6fd475a8cfce67e2bb5bf81d056d7d Mon Sep 17 00:00:00 2001 From: frisch-raphael Date: Tue, 23 Jan 2018 16:56:26 +0100 Subject: [PATCH 3/6] Deleting debugging code --- routes/admin.rb | 1 - 1 file changed, 1 deletion(-) diff --git a/routes/admin.rb b/routes/admin.rb index cc3456b7..8c33547b 100644 --- a/routes/admin.rb +++ b/routes/admin.rb @@ -567,7 +567,6 @@ if params[:template_type].split(' ')[0].casecmp('word').zero? docx_location = "./templates/#{rand(36**36).to_s(36)}.docx" File.open(docx_location, 'wb') { |f| f.write(params[:file][:tempfile].read) } - File.open('/mnt/Kali_Shared/original_xml.xml', 'w') { |file| file.write(params[:file][:tempfile].read) } xslt_file_location = "./templates/docx_#{rand(36**36).to_s(36)}.xslt" error = false From ef12e001867aaa72995e40cf30309604fbac9aaf Mon Sep 17 00:00:00 2001 From: frisch-raphael Date: Tue, 23 Jan 2018 16:56:56 +0100 Subject: [PATCH 4/6] Deleting debuging code --- routes/report.rb | 2 -- 1 file changed, 2 deletions(-) diff --git a/routes/report.rb b/routes/report.rb index a463dce8..24cfd34b 100644 --- a/routes/report.rb +++ b/routes/report.rb @@ -1184,7 +1184,6 @@ end # we bring all xml together report_xml = "#{@report.to_xml}#{udv}#{findings_xml}#{udo_xml}#{services_xml}#{hosts_xml}" - File.open('/mnt/Kali_Shared/report_xml_will.xml', 'w') { |file| file.write(report_xml) } ####### WORD GENERATION PART ########################### # This part is used if the generated document is a docx @@ -1352,7 +1351,6 @@ i += 1 previous_index = original_index end - File.open("/mnt/Kali_Shared/excel_final_xml_#{j}", 'w') { |file| file.write(worksheet_to_repair_xml) } # we modify the excel with the xslt produced worksheet archive_modify(rand_file_name, worksheet_to_repair_xml.to_s, archive_path) end From ff3680e38c17e47f804057a3cfa74549803ae84d Mon Sep 17 00:00:00 2001 From: frisch-raphael Date: Thu, 25 Jan 2018 11:57:49 +0100 Subject: [PATCH 5/6] deleting debug code --- routes/report.rb | 2 -- 1 file changed, 2 deletions(-) diff --git a/routes/report.rb b/routes/report.rb index 24cfd34b..5392cd81 100644 --- a/routes/report.rb +++ b/routes/report.rb @@ -1321,9 +1321,7 @@ # archive_path is something like xl/worksheets/sheetX.xml # xslt_path is something like ./templates/excel_worksheet_n91g3lav51i2r4l9riw.xslt - j = 0 worksheets.each do |archive_path, xslt_path| - j += 1 xslt_to_transform = Nokogiri::XSLT(File.read(xslt_path)) worksheet_to_repair_xml = xslt_to_transform.transform(Nokogiri::XML(report_xml)) # the xslt generation produces a broken xlsx file : because it added an arbitrary number of rows the indexes of cells and rows are messed up. From ce02937b2ee4ae897e546606368e1cf80daad837 Mon Sep 17 00:00:00 2001 From: frisch-raphael Date: Fri, 26 Jan 2018 10:49:06 +0100 Subject: [PATCH 6/6] Forgot to add the Excel template --- templates/Summary Generic.xlsx | Bin 0 -> 25371 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 templates/Summary Generic.xlsx diff --git a/templates/Summary Generic.xlsx b/templates/Summary Generic.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..2401f21bbe531c99190b9b279048359965367160 GIT binary patch literal 25371 zcmeFZWmsLyvNj6Af_rdxcemi~!9BPKcL?t8E+M!CcXtTx?(QD^n~^_kS@0KNCh}Ht7+1PsJYKqRWeW!HBB`q&a}x zG^2F!+UF*Fi}W=zfEVhFH}c+RPgFBcoRGZiK6q?a@FZ)}V5u~J6u@#Pkr}EWZ$H#6 z$yC`m-8>zqlgFp9j)TUftayi_GKohP+ouFd&Ay;dVwU4fL2`hJ>2Dacb~j9&D}uP1 z*AON_H-)`5@9a+Ca0Z$oGG!LDzXfh|FfVV@wAE)$COZ?l;rgT%TI3fpu2vcpK8R+` z%P*qGT7@Y!HVP}5h>lF|%L2qw7B;}hzypLx79e0{NczPYy?Ur)?I(DbC{~!=O7S&YW1P z&IjJHzIm^W%}2aDd~G}Y%ky6`2FV%k@Jq4R#wO`QZ=%iP+qn_Sckb4}dChKr(Tlg# zelT#dd8~I5;L#k$g*4=a?cF+NfIjIP=~)e2)~zEcpSD}d*at&H^7PHN)8&f3|2VjR zm{Qm5O;)k8cd}s@+9*q4iy`RK1#6)$gT)g<4H`_{%$9DMddn59u{Vw(xcCh=iN+%N z1v1YX5VS8ZAV6~eCG%^P=!vcX6e&PrLjy8j$JWroj*j;A^M5n`|Kc?NH`7aEq-DG4 zVFFIY9s~Ptr3DXxu)T_nwad2^&JN%%Z z;AVrlBoqmSi>TJAG$8Tb#sPwo)HXrHrewVv(Q*2E`X*Ua!iC(iC6cnZz93t2V3kN@ z@?59_VU$({6B7A5cEAUoWN-CeX|mk zwXOMQYisk@1M;t&0S26s0N($*|C-}Q%>Xged+OWbJMLi|i;2x=1oEjjnDJQE`5ag@>R)x2#*9G(Ard-z2=sYD@bS_} z!QMInE}F<910pU-{i@05id-_@HM6+SX)0VTyWdGDq-k{J3_ zzmIl>4)|%8&?%MY0NGiM55>+Rrh`^8AzIDo0j()MomX~WNKNIZ$}sY?6_`XOS!C9n z09DII%54`uvY*`pOsn#fIE@|CNAfz7J4&5ZJC8PL4vgKfE}xz%+(MRBR~faojac%8!JTcG1 z7(HMq^+WL{{jPT2(;%4w(~yVT`j!`11C5mQ8&FlPlT%izRXbJ6IAS>T$pMl_LtkrE zsjVfIG&U+jTEXGZI)vr3$Pi^P$??0<^S#vFUwN&|zc5&;k+}}KGnlU;(#DviJ{leS z!EhCWjOOE@ZLjTzsO_pwp-$`Kq$+qi*E$j2loCbEe zZo-=?8^HHs4gJmI+a{HX+J{fc{yF>Lt0NlyWCa1z3(D9owW|R&ZjV%VfLQ%!aU`G1 zJ<0$F0*b_Zy?1zvWP1}sOGCQ1Plnghc%-Hkip7TFL3hOmb9wU1+{l9N9Nf*qc=3po z;4pVf-h2qxkcEgT#z|iwi#XC>z*PNSVNV14yCuGNYp(1Q#bRDH#TMO_1k;U}d4u>Q zY%Ynpsu8oOis-F2jz@ai7p}Y!L>Ue}@x{`N;(9n45eump)5R8E?6{P&E-;DSpi0oX zWw4zS|6`|Bj>uXeKmDVRPkz?fwOEi0ik?-kP*TJ|6Tyg5P9zuE%X9Sl0+oW-pm8Ur zI0UMX1qAgSxx4H959PYS+)bKLuXhvbMia-CwSz-o9co-bt`ld6sKVgQi-zr8f0#{O zy3_5I98m$G9@cw)!@Kg0=K=fp4e^S3GiULtsnlw8v;vddt$OZ+#r3+#d-iB{mfgwy zj_C|4ZvJ-W%$NVYHZ$P+x<{xiX3f~^<#k^c_iq#XF0dfw9}!6g)-fA5Ag;g6FQOWB z?nR)kT(t0iOTeO&i-IjaijkI5C1l-J53{PU2Lgi36^rUd48_RELd>-ZBIghbu9D0{ zBwm3b2@|yWu8i^N;7sS4`#P7~rJeZ`Ec)#LM+Vr03nf zA{PRuhV%OVF1(l9?|qoNuc?UClHRdi-*K=k4z>KY8Awmt7KSWd(YbW^qHU@ zuCYjw5=qE2IX^*FyZa}0Bs3?#Y9UO2YxWLBG3)y!@qP1~@7& zrIMTOUvZW-UP*m!7HP+-#Yzoa!`;3_@^LDUN*qV5LIH4?J+u~v+Q;VkW_8WP_=nHP zNb35e!Hxbx`fBgxe{mrd85?)z80wA3?IdQai`Z;KWz~j2m1JV8I@D;b4#~XJRrDvv z9bz{Od^fZ=xRz#`%f=CVfB_U5YzU4LZ6#iY)kNl&_VI8a^n9SJ7CTI^^f3Nwjw9>x3eyMW5=&{n zonie8(5K;M4$?0<4M>%QFEY|n4Mzu}A(VlYwnD~;gT^RYKbh*0#!+#uYcITS5^gcD z8&5iY3)6I8u1g*^3e{a@kcU5yFn~)Z0WLpu&|KL^yrBe_!nGZw7cU_N}VR};6zcYT@h+p zCWjgYD!B4aigArLM(Taq8D2SzFhQ!wOU|jo-BqSY3U! z_t-Rio678R+qLs?UHRM8po;WVFML9BS1sZ(wM%EtG1HA!50{1$8OAQ9cx`pQx}laI zw*d~Te;#LAa=0r;fFEXXfJb5I|N3EOXJTk*Z%6kwFutB?VeuNWfaf6ot11gG2;1p- zDylim1#(c^3MaE{vo@Ha>>~?S&<}@BM`IT8YQ!0sUcr%ly!o4ZYg$_?Ar!-g3|CyG zwWk$6gMMhC*)L!C=I^%M7NQ6V8mQ;SUUV1ug1nxCW)xcX!x%Qb1+oJ@jifNZz4I`T zh3X6jpqyhRNP@hx)j=CSg%MlD^!r*hwP3yPglDg!fB z&ZK=1o|3g1jV#ncGlWAa42fiyVRyMMj8lgXNyv$ABI3%?fEywFW7ot_hIN1{~h6kmQxnP;qMhbW&Bilzq za4N7{c~5-WC(wR<8E<|!k?35*F;d3q=5@BDE6aokT7D_Y)p>a?e zk*#+>2_chE8A5}|7I?IV&Tv!*_97h)N~MN>~TzVqMGM>F|x=C(l7#$UmH zu!I6h!yTnJN!ywNA|oRa+!v*a3SHqZPyB@5<=H)75dm2k#k(^v9_&Ita2JX-R#IeU zd7%SQO(#=xO1*dSp!R$e;ep3(^{s70>|)eUoFht?H%E-4U-u3H$r<)9O z4>-^~%b!D+H<16SMxd4#akl_0r}}?G;H^^pL0}lFlODnE)aMl8#%jV9Av#Z6=#26j zIKkk-*Al7nJm1=y*2ibf(m3(SC~tu;Uw(<=Vl6#D;RAj@hBd^jX+3k_^aoHf`wyb0 zVqH=&53UUbBg~%%WtZT+h4a{F#_~}dB`Wq!1rm6ph7xD`AQclT6oH3PxfBj_<_dld ze7z5)kWm)oo<|a53OstuJYIC9w?Erqjh5u2jtl?QKg@`vFDmXEcZ$;%sIb$Ybs*uP zRH6g0@t+kEPUvd289*WVm-Pm=dQPTR#&&-mi~O1XyZ`@TeM`_-@f3vW#4&uWc)v)6 zwRIL6SjXfKt|K-s;p-H#^W64tRhYKP-uUL4TnpM=r42_B`0P2BHns`O)ZKFNi_$jV zA@w;o^scCpdMX15M7$J2hCeb@+GkkUa-tc9nZmSOn4HYTE*Q`A`^$pI=}5`fVG1no z;HUKDZ@pN|I(*BnVv0<*ctp(p`IUeaKOiFqoMINXJZ9*Mx}Z`{XM2$CKN|lPYsq^8 zK#2qxj|Uh5_wD*7dbWV0;dkJ_P5xzgNmPeKH$6hYtJ`<1CXe`qSeJeAkCbL1-tSk# zrW$lgAw1mVp;d&6MlyB1c<|YAdweh8jV6(+)osK@F_P8qukw!^I_yFwFQkp6RSFTL}xkde(W|f#fwRI zj$zuRx_$Q?=%6yes8uh2E!i(`!9^k^kK$Lj+0Trkr;YknIbzRp^KKD4ZvC3Iy7Z(n zRM4lo#mB%05h{&PyyBG>n_qxiynn_5tzunl6yUU61yKKo(~{}+w2X__vQDK(2{R6-l1}*mtR$+zGSMil(WJ%I3rfXmR2$3u;@KxxnP5WH0*s}-a+$WNkF3Kj z;XNaTRaOKVsQneOxM0wwM(nKXY4{vNq&J3=OhAdcL&NAy`2I$^S>DxDqL=zz;2?2* zb!~+E>R62UXKtk`s1+0m$?`e)e9CYi3PP%?+Fr&q*x4Poh-w?MZ{SstczOi4mz1{?42X-T%OVjlAH(7R%!{ zP^c_|Is?Mh2DUtUx5RhbK@n~Thf!ZiG#mn|6Clsp-PW2nXv_e#)u7`*<#bj21`@Q}CCk@jG^i zRCn8r0PG1SxqS!B?q|$F(acVRhmbG%XPJFqYh9ZFR=};)M#Q~0mpCSx>CN@}(9_Y* zQ)IlK$VzJyZ;H^Ys2+lQcU94{%NUN(8hnLL^Un=1B2ezM@nE+wFGcGa{6k`pu*_+r zAQ;T;FN|a-(dkzSrLizk%N1$mTL?@0=4)y75Nrn>4l%&>8grq7CX=&S95(QlOM-xf zCdoSqlP}DgZXarznn1&G|cZX0CEX%0V(3VU5@F0DU zah-qQL4CJiv)NrfnT>k}r0qYWm~$)2CL9+C=p67SKObRhBgX8-8yi`|3aGyP(-8qYA^@=JqGDV~wW z?dbJx*YKn(Z__26t19k{sOZyrm&G|dB<{!z39s_)ZRq0N^uf)Y-g7;y%*^Gs#o@)O zyBfD;PF>(4k`sK@b@`~QSBn-8HwRi9bwcQI=F0xo+WL*f{hdus##5!e!;5@P)#Bdb zOIN*@>Qb3X>h#l1xSbO^QVmNcZU!!=Y~rpb&a=74+6TAi+j-wm8zwJp>f(X*?Yrep zB{t0%-|NTc&lesJH-&tKfiIgn*O5Jsha!(px|fF6*H5lCw_O`HC$#=eL5uTre4~k% zi^q>6TFSSl)06O*564Y+w)>CMvA$kq%J6)vBLNE$IG(qoZSZJ6^}sj3gnOx#dg5$I zr)Pd6usn4Tvvh3IdgO)2wRW8)ZM!pA*3rrcY!cdD)t_K%rP4fa`^kFgRJ{E{ds3^T znL)d={OEbpyac6xombkl8C!JX_;Q;+Jqv%?vrDIOi)pOIW1U!bfw$8(nt0;hs_PX# za`N1qVr`|BU%$1=4nbSBI6d)w{n_p82fF%-*8V)+^JqCi=#DDrlEn*lLV@j%#ob}Y zYXn6cEif`Go)+ZW-Mw5l#TwD4DBG(wt6Vg0=5K+HyVN#=2{4N%TF94<%ev` z=klA0)c4K|&7s>`^?ageeWKPfjb+EO5yG%^_w2YHfNOY`y*)$F`%u>|uM;vFwZZ+p z&L@t>f(S_oc`fEE!M*qjNloJbRt&4qF*1kP{a7>=lf9g^o+pwr`OBSpPEN}$#?yyVF18-eZV7JQyEOU29C9}ArB`TT4AoRD zp%1y~-|12->ffPL_hpnUxgLG!DJ@frTUkci3W2f04!x(vEb_pD32)MEA}P8?YFXsp zK+^ihQZgg{C1Dn5TgIqDt(fD)p5t%Zi*mFrH5LbCVu!Pbxau>v%*PA{fgoS~a>-f2pi)rB-2E|SbT`SpJ& zpbT#q)QjP5fJ-uRPK|WZa430N+Iqagy!7nGKAw5(GO?kmx?S2+k+w@x*5{2tAAp!1 zhx!an#55e)UfC>8r$7eTcRjF3xR7&m#}SzD3XsM{Ga+*?B#fnwf02ZXLgD zTamRP!)RJltMuAhQhTcGKOUj&Jq003vZ^mE6Uz7+;yrG@pRejB+ZUnn(NL~>uXRb| zPVYQnuKRlh+c)77?+SPN1k)cuEa(IeY`L-*!e+1mk?y8MX`fanAbiPMz(_EF_TuAvA2|F8z(NR$%CI^#*xlhdyj-#Ji8xbr?GL8 z>gqKoM~1eNQdY9RkYbf1PTRMqO|K^eNr^3O%h5Xv$@ppI+IEYz3$_Krlgr&ee6};G zjQZe>wXA1Pvdo`Bx({uz2A#i>W$W-tee0&9rOBoLR^-;oXy$)nwWWDw&28G(TXeU* zfOV^-JVM!4?)Y1Wvfe%IvVb?i)9C!RuY%2THRe>DZI`@*m&eaY5?cBG(l^1vDgFZs zj&$YLxkIHvy^8%TRh8_8PR01G?=rRP=|TF|U`ko(K55F9?HtZGVQJA=&4_acTaLA| ziUn(|!(}!Kty>xZFRSbBfZ1I5H{oTWSWVa_r|N=zlbB+?y2t~Ed(Sy*{M+Jj_eqH0 z!4v$_{eMZc%zcvx*A)Ayd~&E_Rq`b9+lt4X+&#`>&4X2UtlvvOuUC;Po^;bFZ)UEr zq5LVE9YH9X_`yP#)?~2mcr`pJ4d%TW;uKJHL4F&!eY?@6gS=bp7n$mf2o;QNM?B1);+|9E;3mH!d#V)$d6*atEc zE}`<0s+7+*9fmbUh+c2JL*c@WWUil>an1y(!%rAOvT!?IH-Ud?wm9PxfG*z=aEjFz z=}R*Evx~)7Xp%RndQTVI7e?nE7K^+h3?PyK+>{;s{ERqBFX}yJ=i%TD%*0(w=g_Rh z*r$m>7+;|%*jnSL9Dqphj!rKXT$W(ji5#j$CNX*XA^dPX1k>@38yE$YpN#!y9IS;f z3}RKkxtjy|_2%e^K1Ugi;tO|B`#QsOqW`yv<0w8!2e;;`@p!e4h^aj6#0f?&d6FT* zDZkPyjv4$RKJz?c=IBD|_Vf|_>O=VB#0iMU{OS?>+}sUD0sMshV@g8|L4jl)Z))~3 zh?!#$r*tzCf}3&xEXrl9f(XXf!05v-HH3Hkos5{V*A=B@aKqs3gGRr$M$u0R!E))z)%{dA2o9FU#|lotBkjSnNR}{CY8td z92KFD0540)1cSt1%NW7W`KmFcAk4&@aKl%HKh4+yHsVxeKBkKO(1Xv@hmV^9`WL_J zU;M8;;&1%Q0Hdq{MhO`P@w>j-UHFq;40L{w9UNn4EC$~9xBUOi4~}8=XqIE}aQsKs zCsrvDWB}d}p?MyL6FKNxNw3=aV1WMysqeSVe=h%RL#z8OE5{CccUSCNu)LzA|Hls7pRdk|f89WF9ui<`nXm90p4q}W? zxBYj_^Is`Qf5FI6^Jl@^WrgHF9!bkmkiPs$J(=Bz<7%4s!MzCW-{$kD|h{2&&BoHN;j^2(*msSp%%jMkI zBVLrR5l4Rgc$nNFE8m}&|qjBi%RGSkmU;b zt6D&io=Y`PAmVco(GUYyY6(I`7?*{FhZo|RsegcpBfUm8yQtiSOMS@Zo$VOSf{_12 zZgMYtb)pwg`36uklu0saCnIW%6teN0(no%(^;~6Cz2vXNlimC_sAG)MeunQ}X%(-u z3W0uR&yUFs0gfrTA$Fscrl5Dk(t?cAX@ttCQ0l>`0(ppnU50i3>6C>V2xTP*!||Uy zh2qsFU?#g6R4@=2!|_B6-!1INP%>>m8EaB8Y^hi0CR5C=b&aPNAdEC9ccIgrYkg@a z(>(AX1_ry^qs19PQv_ym!#pK|e1FT#GQf-rzyY8y_2d-sq_aEbAN|}gU*sa0vz8}? zR7vp%n4iV*O+OAXBPKwKotPfLObVqo)8J1|ZDsUsPC|i|hG}BzB|{DHNMpOjZ&`}! zQJGHMW}^qtd`|hvY+=}^7f%5x#wcw>SX)U{F&GC*oJ`7usxd$?caGWFFgFNafpq#I zd0=i$P!TIFyz~*D3Yq?Le;}d{yiOl(AO?U{ppIeU0^&T>NWZdYcYNU+bpX8X4oW;P zSiho3;9*}}W|UM?f>9bfTnU(FvLAAR7@)Ncw=!6DX2upuDV!MGaATj$yuN^=g(F0j zrUcxGvJJVivWcj%(+{bSF{T^3B~T`OJRNuQgOP1Wb;3qItiQ&ji1N*1%>foODptp1 zr{FZ8jEpO9;aj$oS-0DMVFmZg%y=ZHuFA#8tJtiLXC`@7z$m9S2Bf+|pg#Ycs6xs= zSytAuq&6lmS0KieF}5ZiJ2xB^>S7#tV10p-Nvo+U*pe`D3wU>S93Z zUc+w-!74tvb9o+gvYlfo1iKsdm!Ij@+OaA@T2JXCIgJf0(`0=|pb;iVc5yiYN7P{k zj%i)6EfCwA)G2^eUY_}irN1`ZQMN_+O`7?=;qG4>3IH~2c#HS$ghKi*W=E;Izi?@c z$Nm`(h;@QMhvJ{rCnKau;3J31TkTfuqIu(?+kvLR#ou;pkX5#Lg`^ir%ldHwQoc)T zMmtrf!~urY!Xl2j?)YYCtpK{r(Xy#1^9^xN4*50l6|G=0msJ$!7=e(RTq9YXTfhgh z>;=}qJK1k}>wGn28Cz&TU>Ui2(atd*+H2yikfa#~sc6nIr&hsa^V?t?%I0s1S z&WKkdW76tzRrHgB$;$es!wffq4{caJ7V;GC_s`CXON)|b0I1(0->91UZ`A4}=M7g) zY2r81X22VXTJMcS{+k5+Mk4YYjD5Iw%T@ChOm< zNy();19fS>A+i>diRv?Fc~FjeV<<%A;KwbWu2$!lQJ9my4?Z|om(C{1D8Ma$Vpnfv znqHeboGxr&NRLfep+ef5Be zSPb&)rr8eBdG~ca=FV8gR8sFDPgx!_lB{NA@p)+XE47U9jhZCJZNpPWN`NH0q%F)t zP5pY&6-o8Y6LC?mPU7jJdgCMl$obk1kh25wCP&nqM~Uh3%Bl-RKzta*5o)#Lrvz+p)n450FT zMS;)3m&jrrAIdY(+s*L%mVrFOfkmg8uu20EIyG?SleYG%x|M=A z+0y0#wUc+g_su0w8JsDavri@m*0miVMFH~kr{4xV!q>!3o8-WH-QVx>kOKgSyl5gc zcSmxG$kQ*`S1H={)NQ9fM&=7Lj{*L>t>?tmTmuaE9qF&COna}q4YJ!P&Ye6X`)RNM zg&b;cuG3zZ2v)uAGx_89u4Qla0WX2#wVdEc(Yy{%#8Pj%lAFE_eVVVOWY>P=)tQ{r zcdoB!O=XY#Wwt2iYjB)B!ubNUcF+3lJ`S7J;YxWrqjo-O6y9t2x zuN8pg_*>Q=_1^NQ=ij!=+_m6%OAXD)&C7qV;{$5lxy#!=+?mMvcC`TMf3X9SZTmIH zIBa+#M*lMYZ}vB}09e18xNEmzKk~=+H^Tt%{=M|ytoXzJhRE)lMQ83Z00+N9`2TIE z|36DTtt^Mq-wM&}CZOPCkf&Gu0Vrb>5F;9YD=QDs=`Szm^Vd@=ll>b_-81yxL9jD_ zb;xh^HvqW4dhNABZSKz0{tgWQYMB~~w!42k`0rZ$YN)|^4HMS<2(D3#wRYqhm3=_g z0elXK7QitZ=r97PSpd%q9V>B`*g5`=JG1f$K$dR;fEwNH-cQ4s;o$mLOdS2Le{Uu7 zmF3NAe~EbupjYPK`u>sfpIHF)`ZdP>yS)G15|9r62CU3}T9=bC-OESgcHdls*u;9w zv-bGMfaY#g?%E}a!m4&u-pP52(yDIMNBJz7;B=C*04$<5<%3Vg!Bo!NZHl(W!Nuz{ zKx*PN1%M@U^AsIb-P2#}zLw--e%gS-JwA%tQrm!XYT{gc8<2RQH47)xpKmky9LE7~ zC&o);Qtc%mX>1}-fk^xl>N}l)oOY_Z)J^(Kvw<^CnNNNi2iA;5ObWaPk!d#0i~yGm zG%Uck-~Am`p^rIenA@v3uG2nXz6M|Be#M6cJ9U%S;CqsatQpJrz3$l9wZ#ACl?k8B zY;;ARrh2z}3=n+GX9jmD@~7+V$PQ%MAMyY-QQhQk<^Ffe-4XZSr-=Jb)vjmP+o561>sI1>vy?pgmj4+pwa4o_ zL+-6DDS-Bm-KyQ=1EY$iky(htyQiKlOShZCbj^%ax5cLEi@WvnHOHE?6)mX%I)%29 z)>e-dE0xGDK8RBD)3u97z1C-LFE$-owiA-k$i-0cW6k3dg3jCN`D#q*7F% zMT325OB&DfK?*VR{nh!t%fqI7RkDYp77J$i?BTRJh9I5i(FkwjsjE}VCoP$X)(6RD zkK4qNCzqpV`}F(e)~LZ{F3$9q*{i(i{dxOe1Y*%^UVwcr@tP!6)5ew`9naTjpI=-C zT2+P|h?y`>+wUtEQV)NIkVw`yaC_OJ9rn%NUv_HUZ9lHqjE!SCU6w5KAilI}@wgNp z_Ub4{4B>9Hwf;0dxxs$k-r+e>q3R1+$BQThZ{r)S8aRW(++mb6Dm7E#vxXL~(h2cKTjrv@!7o=|A?RTS7}~w{->Yj_e`81S)G>Vl+2Gkm!k6<8OUV->8jcV#Q&~_ENb9Ctd!LqUp!~3U zJGYY{fFu#KGS!NL{jfB8-tiou!zxeyBqSapel)lcPboPQTxnW?5U;N?>kAH;Wss2PMZNa*JET(gkbL%T`ytZ-1r$v$t#0D;e1f3H zaTvZR)|B`?4hY^)`bg{e1Qf}WRQN?vci0dGw8h`pL-69_zFM$Jq!bo3%DFSA#U|w` ze4Jr$FoY;z$}Y##zo5FAlU+kr`-urVxjG@s=Z7e*EYkJWk2snf9m{^3r~;3GVRHul zoVmkdVJY%UJ!%&x*4p#Hjxc&j4>&5R=!o&4(edv0O0eCHY3+OnFLA;)`R_RUspO=g zg;Z`CgD;LfeEvt@!^;<0L{lq;7((MSTn*VH^szt$anSRuY<88Gk@!KUj(R{0+v>tG zyF#6t_$x3o;Wje;so7+p8oqY@=$DEmA%F=Bkh3S3_zbU|dvvGQ zHu1Ca}MMdX?LqyQ`Ue@jD@UZDvhN`31Q`g4a+UpLDC`YNu$E9+!pR~ z4T#UKXPVlYJYN>$h?(X_E@I5Q2fD{h&~rGzV3C?tD_`PDl6d_T3huwH!#qGEJrEQJ z+=U!S%z^9m*%>za;ZV`F$R0-_Wb8#2>b*6;;qUunu>JxMFEmHZGnnMFuaG&J7kDlKY+#wiE$ z8oxDZwd$uIH@qDBN>$jSu{LoNTy#^iXB;XUdBzI|0m#Q?iHMk2boQtwP9}A7bwb8OPL@+0Ew%X z+3s}Zo8c#$uZiqjk~fSj%c^0n!Z*WQGv%mbzccQv?%o>U#kr+Q7O7-^$mUwYag(v0 zsvrFeDuuOgr-!9QY6Ny+XDRz=q40W_rfhUYD#8lq%bAUPrj^o{mCgjGdby13_Zj-3 z0SjYgKQdeVoDy1*NJ_tZsVpjdA8&AmN&gYeYHXf=vsTIsnrK*>f9<6#dK+RAj7z3wr4(U$n@t{+eO!Et3F zyB%wP<;EkSj=S4g5@RJN)uecO_uPlQv*}0%cBQ`!du5x~d0*2j``hFw2%Beb%ge`V zR~U!+FrYB03`Fa-{^QCmWaPj_BeZR3%n*L|<$@V1*4QB(6158_8&bxC|e9vv-iw zDOvDJWe)=eK@g)%N{U~2L1;#+s3We~lV_Y6D^5;ia3bZtoR~l?$I_k(IN0AMMQ22gm<)1&yt6rO&4uWo8C+fmsc635yu(hqDG?(eX$v) zOFpLDqSMohFO&t#$n;IvdN6We!}T(PA8$yL51CX}67|b8T@XjM96ofH4UV+SvgPI7 zG`ALA!wl8A)lRdyWfbls523FnXQ)Ll9|a*Pqe>6ScVZ1U=dlYAL|pa{Z`N=^5kQwi z#L3ypl9RZ*UGe?Lb4RW^1hxXuRJ8&E1cdVE+yV4M8X73t+rD;$z8*jJu|qP8A5j8s zqc7lQj`Rf75d>=0zEV=67H-tmklu)MA80Od^i{7^94X?n;%~F`X9TW^qc3JLBUe@K zJdAGh8TX!_>y+kbd?7CtpIbTc6V|oW5UUcKyPmE~7yLi~srcRymbe7ipl{z9g-Bt` z4wkfpxF~|XuNjo)%dE0)Gh>|$^o3~MnkokZSOQQkYNiY+geJ79YTV+2TxT8xby4`E zR@VU6oUv&bV>ktmO^gApX-yc%cLre6r2b&W`Q17TxlmGSrYry0Z) zXRANHio!9YC`7Fed~EMAR`3i?{mAtxezExs)qm&ptLC_rak>TeES)U|XP3%-;l z@ZDmvHXLQCbB1@OHmBA_)ivme%@ciP@%?^byRDHwmB4zQGI&~&IEkrw5p-}{ryrot zX%K#JccR)Ffr-S%L!IXiwplW{@ET@m40eTwU56{Vd4(rTbXEEV>B$g^1($~}LNvn( z*iEY){=pbomdo7=+B#iV@nOQ$*jC=^q8#UR)ohg%+pP(jJqw+S79Jtlgvf>0}h#4M`r`q|c9K-oz~qV_pwxSIAX{tzjOgyNoz1 z?*GGXqEb39Ry-jIj^k6_`VuWffCvvezqF5BHH?>$)4g}yam(lMUf?46A%f41H02+H zPul|tI>6{0w6sUdcIF>bFXgi2PX|+hZ`Olcc!!PZ)Z*+k#UK~>z)GJf((+!)xzlmz zc-n>|lyMJB5gSE*p!#eHy%W?LGl^Q@J&7(KC%S3&5GE}086lUaEzatyiG(FnOzCFL zfRDk!<>aMZnB5Bt)SPA6hdy9En)K2_+G?)zgT==h$7dn!`S7@mPTTQrK8szCUlqh8 z9he$kUX#VKQUT@HZ+DAZd?mzn-u~G61o&d>e1@I{9@1*_H#H?>^VQp*o*6f%Y z%WnnfSQjowx&yL7!Y?K4%sUJLBSV+A_&jifzxe%&Tbi7SAOTVb*TZ4kofk3PxQbZH z5-E0#7@WxBuQl_TfFH%|w{uV+yev>SE-?hcjA^-_$elOWH0EjODx25le4u&!pLEyo z{2e=c5o6N4*A#6Rp}Y3P1|+OLswJQ!E~Km@*60SCnI#xo)c6(267g0`-ej4B2XzLd zod>Ej{G2d^6kMbrYFlzqNW=!}%ZkLsOPb^3njgAq(gc z{Zr)r!t1s7`wgxmwPou?Rs=7+A20mK_pIc#)I$;BbN)1a8Wr}iF=eOKLh}OT3M7T@ z)q5|kTne@*+uvj+^yU}1*$G{^W7;0pme1F&4M)=KD3wTmbzk2$mI=C0$Ap!Wo#>n< zuCyMKOFIiBt=VsA%Wt=4JU>qIb4RcV9ZdRkPXUu^hHnNpjW?n@aZ;{~r4{hV%faav zMsbXf#U08J4uH{*s^!lf!*K=#YcSx;>PE$e&0cQPUJ9oeF=z$V|D>38M2lg}0S2xa zfKsvxeeyn9-bga8K#|CGrTgI0$5;#x_Ve*a70)!vM~R@k=YmD25y;eftvcWIu(O(D zvF~-<73m6b+}Yy9#1J;jdza4K`Y94{C4?O;M~$6StB~hu{Z*A`x=mo%w(%8GI?4J? zlOSt2mY{3vX=9Owq=hN<6{~FlSud(Uja?(2`6>gbaEG^j5;_cK@LNu9&I==sT2=le zdKSVPOx=0>%_J?jeZiLWUlQbr9>HZ$(O>2+^I|onx{%*j5mY9OH>|PZ~Tr2Ply%Cs!=1?Rz z`*;&$Ac?{wM3gQ3om+^0@C#B(M!VG%8UC@89v@Xz@z2KuHs7^#R*#mj9@ z<1A2mFql5C|L;uvXwx85*x&V?q zpKqokC_%HLug{M$U0qo+IFBZ^mz>+p`CR&piV=SHuII##u$-_%==di51?YQD*5En2 zB(1ecVH9L3Orv@y<*myi{?`h58Ll+!)S87QmB&{}#QrI~!?1*RKW@)uF%*UFF$k&S7GcU~N0K8ex=BO!N{+Fep z!I(#F*gcl0Hl}&ii4eQGTny3E6c3cp%2ngOgIH5%_ zl{pf=#1`p7mfbywS|H_oCR852(eR*yY9_!AFt8qo<6be%MIS#AFxhf^zZiMer(~#Z z!9WCgTJjsrpU+<`i6HmRdVGIzW;Qf^an#smDsuA~&%b5N;d2RjZj@LT;D$<0Yt6iX z^ao4sy{f9SghJc!3W1g-@%5tS%X;q0ryTln5Bhokf*OZLu5TAz{OB;ot##v_-*%0AMg) zs-*U;FKx{~HB5wql&3yIi`AN_VH3{1T(Lp1NH+81ra5i9m! zLuDA7DTC3QD`@s{2>o}PX{J^cV% zBl*(A4K$BXJXnH@h=snU>ybcfNNRfO} zwqi?FYwNTwFYlv((Pc=OHng8PQFM97r4-VGGUZSSjBHPP`RvIIKl)U5lxJtI)UMWu z-6g-+aD?Rd7z1m8F|jbYYWTVTD-1q8QSPE^Wy^oqB92@toF$-6IhOEoVp+4ERHB2G(&LWQ){?LycOXLxqZKU%dOBu+NZwnj$;MM z8K#m)NLoY(M#LE6hCD+!qcz45(Jx#{G`w*_7+8lCQ8X;nc=ELqb+B)d`MOLkJk*w@MSJ<;26UVp;N zHP<*l+@E>odCuiN&wZcsk>-l9ZrG((aPE~Ho26816zm_;u8NG~muF6VLQPX=lrb51 zO@`UT@8k^D%{mCCh0(I4(rK<>x4&N;|6q1Yr1MO9$L)ygI-}9cOldh#OtpNP;`1?M zoy6JKMtw`TCYG@L)du?F5@r?pDC^tXZ+dKHMO>xET&SHShblP;U}2d-O=_ctL22kO zaMl@)bDM)x(&^4V(mGiQ;gXQY(RE`p<6ekq{l#1e8dAylj5BPA(G~Xnu8_{qgdb&W zQEe)3U*c2NSmp*pwA~L~B}%9Wy&I=#$FKuO*quWCtVr3G28nNlQkl9|c zdZG-3MC@+ClokAWiY(nYl}ZeMVnTz#Hd(v>o0xa)NgvUt_{YhatA#ru-S>GdPCd+zG&3ilKGxbc$X%EUonRRsw2g3UU&N!XW^06|RB?AnOxR7ybNX_pJe zcGG{oBD+p>7k18QXuzADlB-m$_p+A|Wnp99l-27PlCw*UeeaXxX?<~vFY>?BW#eG_ znR9(T3wBkGr5kJ5v7>7agnSfTg}{Zn9k6|&26S9DE(l$B7grBqD_7TF^#(v(_n(9d z*w&>zsskiafVl1W+Fzk_4+d|X2~{wHqi5XvN(e6d+1W}l@M{F~*ezxRln^~c@xC~C2TUk zF2(NjmV4$;M(VuA(3a4751%aL{3Q5Ir!Y9O=*pL>Or=vB26QS_sy)Q%QYqwe77>Mz z5CPFd`sDE7THXd0luJLcXSg0~*HRFFb*$=kSie_Csa<&AS2`?#Vu;qgo}}nVE(y78 zPL7hEM%`-~?NbUpP;UB%2!5V>CnP53>H&*|EpeZ@DAbNMUOB*#-x zoLUbrb3zg;NfOCq22hRODjW|Rm*w&CE<_;+H>kQ=&$BzidN&5HUn!%^WhAv-OJ&?L z;0&!h6^=&;p_a3zd9g7HAJ2+#F z%v{V=sWV)>-r6}eCGlvdmRF~t#nJb(86kvIABsF2$OsK`=}Xu!@N)rbC9lN}{#f3pul2Dv!AO!=I>Ar}O$22tPYeeDqG!;MC@B~}| zzFCg8Eo9;&%wy3?azI-yE5b)PNgw-Oj}y?fL&rVXK*cyz``DvCLlZuGWMUI#8DC$B z$V%XMJQ)D`8KApgE%aXiR3khw_P0n9K5QOCZi2Z9ugiR#V*IZ{R`Qgkx?VHc_r3zuI ze6m?$Kg%)G4zih@qo!#eSTSQVLZjRn?`1NY$bI093-LA2i>G+sJ~v*;|DU0_f@_}H zthfkpvuZFev6*m6Y2MR_0I{bbSCvoR%jU!;vsKZ0bPhbQxoB7ttA5Hn>o5jMJ*qjfGhsB6a7s*Q4F@!xct?gvk*$Nj+xbK7&A%ZqVwO4kR3(QY;J5GJ2Y!_JW|*PDD0pKoA(pe_CT>Nm%Q2(%iOj% z1`Kz86txVn3NAwC5_hJ5&cuktd3kxUkM77xFNF67Hgvs{enD<6Q`;i#gWN{oROPxizt%mgr(!JcAxAN zus7R!`&=>&A6vx9>cq987FZJJ`U95?aOka^_1m1w4;^>BkNlqin;>An1*E-jpr%K2 zFzq2++-?8Md%&Fi{<#}(p;Ld3>HfUd7L~@lx-l^!QsmiP6=*ndC{r|})+Qvdhj@$F=i{TG=fJ_UQ@$6?AVI`G% zr?^bfMUG82nmDqG^a*oY){XYQ2LrpJL~d1)IhuvcHp3q&*X9DFE&GSc4BNH`IAxNM zb7t(1Rr_XO%5=TQyH-Lz!V2#Zmb)p>Ok`;;BWkcOYMZH2c3_p;K^v>*xVW4`#qHB* zVuCKzGX}zTj;t$p4*YaP1B);Vyr_ZKFFA&lBSP8U%E9^932Khf`Xxaj8hty;R*!X> zzDDoRuUbbtbxcXB@^2eGQN@+D8z}l$%*pe0UwdZTq!$~SC1Qh};K2%_s4DR@FGq@y zVx-!szOxWiu%u{thp*wuSxw2C8r+i90!;d;LVBglpKb}&-%EI;9&VYl)u^~c2Va-r zK0QBCbXMSp$1(8=8n~i2#EO1mW(gWAUsNOW-t9yfFLsx664gQSPaZx75d^BdN2}3) zpJ9LAju55mY8(a}HZ1>%1pfF5B=~)6$etnw9C+9|doYj#7%MQh`>_YLp+|DSs!=@2 z9kAN~oPz}LAi#DHo58fhl0;5S}8)0H*9u3&=nO z27(nnc%U%gYdt)y13>-?#)0 z1$UeAp;Aiwq2S&#m;i2O;t6ajdxSsBbZ~*d6mSC#PnlQUqx{nz!nf1F2yjmckKj;0 zg!t280u#V>JUn4m>mUJN)dLd_Z=~_@1O!ET1O(u1H8>i)N5RL(8SZ^PXon&UI^Nr{ zfT4#ka_~q3f-KVmL;B~E2OJL0KzM+<`Tm?7%tyK!L_k-MfPew`#R9G~W{Y1({|AKI B4%7eu literal 0 HcmV?d00001