dataset: cleanup datasets that hit the memcap while loading

Datasets that hit the memcap limit need to be discarded if the memcap is
hit or otherwise the datasets are still loaded with partial data while
the signature is not loaded due to the memcap error.

Ticket: #6678
pull/10865/head
Andreas Herz 2 years ago committed by Victor Julien
parent dc5b78ec71
commit 1f9600e487

@ -746,6 +746,11 @@ Dataset *DatasetGet(const char *name, enum DatasetTypes type, const char *save,
break;
}
if (set->hash && SC_ATOMIC_GET(set->hash->memcap_reached)) {
SCLogError("dataset too large for set memcap");
goto out_err;
}
SCLogDebug("set %p/%s type %u save %s load %s",
set, set->name, set->type, set->save, set->load);

@ -406,10 +406,6 @@ int DetectDatasetSetup (DetectEngineCtx *de_ctx, Signature *s, const char *rawst
SCLogError("failed to set up dataset '%s'.", name);
return -1;
}
if (set->hash && SC_ATOMIC_GET(set->hash->memcap_reached)) {
SCLogError("dataset too large for set memcap");
return -1;
}
cd = SCCalloc(1, sizeof(DetectDatasetData));
if (unlikely(cd == NULL))

Loading…
Cancel
Save