summaryrefslogtreecommitdiffstats
path: root/meta/recipes-core/meta/cve-update-db-native.bb
diff options
context:
space:
mode:
Diffstat (limited to 'meta/recipes-core/meta/cve-update-db-native.bb')
-rw-r--r--meta/recipes-core/meta/cve-update-db-native.bb192
1 files changed, 113 insertions, 79 deletions
diff --git a/meta/recipes-core/meta/cve-update-db-native.bb b/meta/recipes-core/meta/cve-update-db-native.bb
index 2c427a5884..e5822cee58 100644
--- a/meta/recipes-core/meta/cve-update-db-native.bb
+++ b/meta/recipes-core/meta/cve-update-db-native.bb
@@ -12,40 +12,47 @@ deltask do_compile
deltask do_install
deltask do_populate_sysroot
+NVDCVE_URL ?= "https://nvd.nist.gov/feeds/json/cve/1.1/nvdcve-1.1-"
+
python () {
- if not d.getVar("CVE_CHECK_DB_FILE"):
+ if not bb.data.inherits_class("cve-check", d):
raise bb.parse.SkipRecipe("Skip recipe when cve-check class is not loaded.")
}
-python do_populate_cve_db() {
+python do_fetch() {
"""
Update NVD database with json data feed
"""
-
- import sqlite3, urllib, urllib.parse, shutil, gzip
+ import bb.utils
+ import bb.progress
+ import sqlite3, urllib, urllib.parse, gzip
from datetime import date
- BASE_URL = "https://nvd.nist.gov/feeds/json/cve/1.0/nvdcve-1.0-"
+ bb.utils.export_proxies(d)
+
YEAR_START = 2002
- db_dir = os.path.join(d.getVar("DL_DIR"), 'CVE_CHECK')
- db_file = os.path.join(db_dir, 'nvdcve_1.0.db')
- json_tmpfile = os.path.join(db_dir, 'nvd.json.gz')
- proxy = d.getVar("https_proxy")
+ db_file = d.getVar("CVE_CHECK_DB_FILE")
+ db_dir = os.path.dirname(db_file)
+
+ if os.path.exists("{0}-journal".format(db_file)):
+ # If a journal is present the last update might have been interrupted. In that case,
+ # just wipe any leftovers and force the DB to be recreated.
+ os.remove("{0}-journal".format(db_file))
- if proxy:
- # instantiate an opener but do not install it as the global
- # opener unless if we're really sure it's applicable for all
- # urllib requests
- proxy_handler = urllib.request.ProxyHandler({'https': proxy})
- proxy_opener = urllib.request.build_opener(proxy_handler)
- else:
- proxy_opener = None
+ if os.path.exists(db_file):
+ os.remove(db_file)
- cve_f = open(os.path.join(d.getVar("TMPDIR"), 'cve_check'), 'a')
+ # Don't refresh the database more than once an hour
+ try:
+ import time
+ if time.time() - os.path.getmtime(db_file) < (60*60):
+ bb.debug(2, "Recently updated, skipping")
+ return
+ except OSError:
+ pass
- if not os.path.isdir(db_dir):
- os.mkdir(db_dir)
+ bb.utils.mkdirhier(db_dir)
# Connect to database
conn = sqlite3.connect(db_file)
@@ -53,70 +60,75 @@ python do_populate_cve_db() {
initialize_db(c)
- for year in range(YEAR_START, date.today().year + 1):
- year_url = BASE_URL + str(year)
- meta_url = year_url + ".meta"
- json_url = year_url + ".json.gz"
-
- # Retrieve meta last modified date
-
- response = None
-
- if proxy_opener:
- response = proxy_opener.open(meta_url)
- else:
- req = urllib.request.Request(meta_url)
- response = urllib.request.urlopen(req)
+ with bb.progress.ProgressHandler(d) as ph, open(os.path.join(d.getVar("TMPDIR"), 'cve_check'), 'a') as cve_f:
+ total_years = date.today().year + 1 - YEAR_START
+ for i, year in enumerate(range(YEAR_START, date.today().year + 1)):
+ bb.debug(2, "Updating %d" % year)
+ ph.update((float(i + 1) / total_years) * 100)
+ year_url = (d.getVar('NVDCVE_URL')) + str(year)
+ meta_url = year_url + ".meta"
+ json_url = year_url + ".json.gz"
- if response:
- for l in response.read().decode("utf-8").splitlines():
- key, value = l.split(":", 1)
- if key == "lastModifiedDate":
- last_modified = value
- break
- else:
- bb.warn("Cannot parse CVE metadata, update failed")
- return
-
- # Compare with current db last modified date
- c.execute("select DATE from META where YEAR = ?", (year,))
- meta = c.fetchone()
- if not meta or meta[0] != last_modified:
- # Clear products table entries corresponding to current year
- c.execute("delete from PRODUCTS where ID like ?", ('CVE-%d%%' % year,))
-
- # Update db with current year json file
+ # Retrieve meta last modified date
try:
- if proxy_opener:
- response = proxy_opener.open(json_url)
- else:
- req = urllib.request.Request(json_url)
- response = urllib.request.urlopen(req)
-
- if response:
- update_db(c, gzip.decompress(response.read()).decode('utf-8'))
- c.execute("insert or replace into META values (?, ?)", [year, last_modified])
+ response = urllib.request.urlopen(meta_url)
except urllib.error.URLError as e:
- cve_f.write('Warning: CVE db update error, CVE data is outdated.\n\n')
- bb.warn("Cannot parse CVE data (%s), update failed" % e.reason)
+ cve_f.write('Warning: CVE db update error, Unable to fetch CVE data.\n\n')
+ bb.warn("Failed to fetch CVE data (%s)" % e.reason)
return
- # Update success, set the date to cve_check file.
- if year == date.today().year:
- cve_f.write('CVE database update : %s\n\n' % date.today())
+ if response:
+ for l in response.read().decode("utf-8").splitlines():
+ key, value = l.split(":", 1)
+ if key == "lastModifiedDate":
+ last_modified = value
+ break
+ else:
+ bb.warn("Cannot parse CVE metadata, update failed")
+ return
+
+ # Compare with current db last modified date
+ c.execute("select DATE from META where YEAR = ?", (year,))
+ meta = c.fetchone()
+ if not meta or meta[0] != last_modified:
+ bb.debug(2, "Updating entries")
+ # Clear products table entries corresponding to current year
+ c.execute("delete from PRODUCTS where ID like ?", ('CVE-%d%%' % year,))
+
+ # Update db with current year json file
+ try:
+ response = urllib.request.urlopen(json_url)
+ if response:
+ update_db(c, gzip.decompress(response.read()).decode('utf-8'))
+ c.execute("insert or replace into META values (?, ?)", [year, last_modified])
+ except urllib.error.URLError as e:
+ cve_f.write('Warning: CVE db update error, CVE data is outdated.\n\n')
+ bb.warn("Cannot parse CVE data (%s), update failed" % e.reason)
+ return
+ else:
+ bb.debug(2, "Already up to date (last modified %s)" % last_modified)
+ # Update success, set the date to cve_check file.
+ if year == date.today().year:
+ cve_f.write('CVE database update : %s\n\n' % date.today())
- cve_f.close()
- conn.commit()
- conn.close()
+ conn.commit()
+ conn.close()
}
+do_fetch[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}"
+do_fetch[file-checksums] = ""
+do_fetch[vardeps] = ""
+
def initialize_db(c):
c.execute("CREATE TABLE IF NOT EXISTS META (YEAR INTEGER UNIQUE, DATE TEXT)")
+
c.execute("CREATE TABLE IF NOT EXISTS NVD (ID TEXT UNIQUE, SUMMARY TEXT, \
SCOREV2 TEXT, SCOREV3 TEXT, MODIFIED INTEGER, VECTOR TEXT)")
+
c.execute("CREATE TABLE IF NOT EXISTS PRODUCTS (ID TEXT, \
VENDOR TEXT, PRODUCT TEXT, VERSION_START TEXT, OPERATOR_START TEXT, \
VERSION_END TEXT, OPERATOR_END TEXT)")
+ c.execute("CREATE INDEX IF NOT EXISTS PRODUCT_ID_IDX on PRODUCTS(ID);")
def parse_node_and_insert(c, node, cveId):
# Parse children node if needed
@@ -127,14 +139,27 @@ def parse_node_and_insert(c, node, cveId):
for cpe in node.get('cpe_match', ()):
if not cpe['vulnerable']:
return
- cpe23 = cpe['cpe23Uri'].split(':')
+ cpe23 = cpe.get('cpe23Uri')
+ if not cpe23:
+ return
+ cpe23 = cpe23.split(':')
+ if len(cpe23) < 6:
+ return
vendor = cpe23[3]
product = cpe23[4]
version = cpe23[5]
- if version != '*':
+ if cpe23[6] == '*' or cpe23[6] == '-':
+ version_suffix = ""
+ else:
+ version_suffix = "_" + cpe23[6]
+
+ if version != '*' and version != '-':
# Version is defined, this is a '=' match
- yield [cveId, vendor, product, version, '=', '', '']
+ yield [cveId, vendor, product, version + version_suffix, '=', '', '']
+ elif version == '-':
+ # no version information is available
+ yield [cveId, vendor, product, version, '', '', '']
else:
# Parse start version, end version and operators
op_start = ''
@@ -158,7 +183,12 @@ def parse_node_and_insert(c, node, cveId):
op_end = '<'
v_end = cpe['versionEndExcluding']
- yield [cveId, vendor, product, v_start, op_start, v_end, op_end]
+ if op_start or op_end or v_start or v_end:
+ yield [cveId, vendor, product, v_start, op_start, v_end, op_end]
+ else:
+ # This is no version information, expressed differently.
+ # Save processing by representing as -.
+ yield [cveId, vendor, product, '-', '', '', '']
c.executemany("insert into PRODUCTS values (?, ?, ?, ?, ?, ?, ?)", cpe_generator())
@@ -170,15 +200,20 @@ def update_db(c, jsondata):
if not elt['impact']:
continue
+ accessVector = None
cveId = elt['cve']['CVE_data_meta']['ID']
cveDesc = elt['cve']['description']['description_data'][0]['value']
date = elt['lastModifiedDate']
- accessVector = elt['impact']['baseMetricV2']['cvssV2']['accessVector']
- cvssv2 = elt['impact']['baseMetricV2']['cvssV2']['baseScore']
-
try:
+ accessVector = elt['impact']['baseMetricV2']['cvssV2']['accessVector']
+ cvssv2 = elt['impact']['baseMetricV2']['cvssV2']['baseScore']
+ except KeyError:
+ cvssv2 = 0.0
+ try:
+ accessVector = accessVector or elt['impact']['baseMetricV3']['cvssV3']['attackVector']
cvssv3 = elt['impact']['baseMetricV3']['cvssV3']['baseScore']
- except:
+ except KeyError:
+ accessVector = accessVector or "UNKNOWN"
cvssv3 = 0.0
c.execute("insert or replace into NVD values (?, ?, ?, ?, ?, ?)",
@@ -189,7 +224,6 @@ def update_db(c, jsondata):
parse_node_and_insert(c, config, cveId)
-addtask do_populate_cve_db before do_fetch
-do_populate_cve_db[nostamp] = "1"
+do_fetch[nostamp] = "1"
EXCLUDE_FROM_WORLD = "1"