@@ -267,17 +267,17 @@ def get_cve_info(d, cves):
cve_data = {}
conn = sqlite3.connect(d.getVar("CVE_CHECK_DB_FILE"))
- placeholders = ",".join("?" * len(cves))
- query = "SELECT * FROM NVD WHERE id IN (%s)" % placeholders
- for row in conn.execute(query, tuple(cves)):
- cve_data[row[0]] = {}
- cve_data[row[0]]["summary"] = row[1]
- cve_data[row[0]]["scorev2"] = row[2]
- cve_data[row[0]]["scorev3"] = row[3]
- cve_data[row[0]]["modified"] = row[4]
- cve_data[row[0]]["vector"] = row[5]
- conn.close()
+ for cve in cves:
+ for row in conn.execute("SELECT * FROM NVD WHERE ID IS ?", (cve,)):
+ cve_data[row[0]] = {}
+ cve_data[row[0]]["summary"] = row[1]
+ cve_data[row[0]]["scorev2"] = row[2]
+ cve_data[row[0]]["scorev3"] = row[3]
+ cve_data[row[0]]["modified"] = row[4]
+ cve_data[row[0]]["vector"] = row[5]
+
+ conn.close()
return cve_data
def cve_write_data(d, patched, unpatched, cve_data):
This code used to construct a single SQL statement that fetched the NVD data for every CVE requested. For recipes such as the kernel where there are over 2000 CVEs to report this can hit the variable count limit and the query fails with "sqlite3.OperationalError: too many SQL variables". The default limit is 999 variables, but some distributions such as Debian set the default to 250000. As the NVD table has an index on the ID column, whilst requesting the data CVE-by-CVE is five times slower when working with 2000 CVEs the absolute time different is insignificant: 0.05s verses 0.01s on my machine. Signed-off-by: Ross Burton <ross.burton@intel.com> --- meta/classes/cve-check.bbclass | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) -- 2.20.1 -- _______________________________________________ Openembedded-core mailing list Openembedded-core@lists.openembedded.org http://lists.openembedded.org/mailman/listinfo/openembedded-core