Skip to content

Commit 3ef7f06

Browse files
chore: ADDON-80802 fixing linting failures
1 parent baf6cf0 commit 3ef7f06

File tree

10 files changed

+62
-46
lines changed

10 files changed

+62
-46
lines changed

pytest_splunk_addon/cim_tests/test_generator.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,6 @@ def __init__(
5353
test_field_type=["required", "conditional"],
5454
common_fields_path=None,
5555
):
56-
5756
self.data_model_handler = DataModelHandler(data_model_path)
5857
self.addon_parser = AddonParser(addon_path)
5958
self.tokenized_events = tokenized_events

pytest_splunk_addon/event_ingestors/hec_event_ingestor.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,14 +87,13 @@ def ingest(self, events, thread_count):
8787
"""
8888
data = list()
8989
for event in events:
90-
9190
event_dict = {
9291
"sourcetype": event.metadata.get("sourcetype", "pytest_splunk_addon"),
9392
"source": event.metadata.get("source", "pytest_splunk_addon:hec:event"),
9493
"event": event.event,
9594
"index": event.metadata.get("index", "main"),
9695
}
97-
if event.metadata["ingest_with_uuid"] == "true":
96+
if event.metadata.get("ingest_with_uuid") == "true":
9897
event_dict["fields"] = {"unique_identifier": event.unique_identifier}
9998

10099
if event.metadata.get("host_type") in ("plugin", None):

pytest_splunk_addon/event_ingestors/ingestor_helper.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,9 @@ def ingest_events(
9595
thread_count (int): number of threads to use for ingestion
9696
store_events (bool): Boolean param for generating json files with tokenised events
9797
"""
98-
sample_generator = SampleXdistGenerator(addon_path, ingest_meta_data["ingest_with_uuid"], config_path)
98+
sample_generator = SampleXdistGenerator(
99+
addon_path, ingest_meta_data["ingest_with_uuid"], config_path
100+
)
99101
store_sample = sample_generator.get_samples(store_events)
100102
tokenized_events = store_sample.get("tokenized_events")
101103
ingestor_dict = cls.get_consolidated_events(tokenized_events)

pytest_splunk_addon/fields_tests/test_generator.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -191,11 +191,11 @@ def generate_requirements_datamodels_tests(self):
191191
for datamodel in datamodels
192192
]
193193
sample_event = {
194-
"datamodels": datamodels,
195-
"stanza": escaped_event,
196-
}
197-
if event.metadata["ingest_with_uuid"] == "true":
198-
sample_event["unique_identifier"] = event.unique_identifier
194+
"datamodels": datamodels,
195+
"stanza": escaped_event,
196+
}
197+
if event.metadata.get("ingest_with_uuid") == "true":
198+
sample_event["unique_identifier"] = event.unique_identifier
199199
yield pytest.param(
200200
sample_event,
201201
id=f"{'-'.join(datamodels)}::sample_name::{event.sample_name}::host::{event.metadata.get('host')}",
@@ -265,11 +265,11 @@ def generate_requirements_tests(self):
265265
if field not in exceptions
266266
}
267267
sample_event = {
268-
"escaped_event": escaped_event,
269-
"fields": requirement_fields,
270-
"modinput_params": modinput_params,
271-
}
272-
if metadata["ingest_with_uuid"] == "true":
268+
"escaped_event": escaped_event,
269+
"fields": requirement_fields,
270+
"modinput_params": modinput_params,
271+
}
272+
if metadata.get("ingest_with_uuid") == "true":
273273
sample_event["unique_identifier"] = event.unique_identifier
274274
yield pytest.param(
275275
sample_event,

pytest_splunk_addon/fields_tests/test_templates.py

Lines changed: 28 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,7 @@ def test_requirements_fields(
163163

164164
# Search Query
165165
record_property(
166-
"stanza_name", splunk_searchtime_fields_requirements["escaped_event"]
166+
"Event_with", splunk_searchtime_fields_requirements["escaped_event"]
167167
)
168168
record_property("fields", splunk_searchtime_fields_requirements["fields"])
169169
record_property(
@@ -187,11 +187,13 @@ def test_requirements_fields(
187187

188188
if splunk_searchtime_fields_requirements.get("unique_identifier"):
189189
record_property(
190-
"stanza_name", splunk_searchtime_fields_requirements["unique_identifier"]
191-
)
192-
unique_identifier = splunk_searchtime_fields_requirements["unique_identifier"]
193-
194-
search = f"search {index_list} {basic_search} unique_identifier=\"{unique_identifier}\" | fields *"
190+
"Event_with", splunk_searchtime_fields_requirements["unique_identifier"]
191+
)
192+
unique_identifier = splunk_searchtime_fields_requirements[
193+
"unique_identifier"
194+
]
195+
196+
search = f'search {index_list} {basic_search} unique_identifier="{unique_identifier}" | fields *'
195197
else:
196198
search = f"search {index_list} {basic_search} {escaped_event} | fields *"
197199

@@ -230,12 +232,16 @@ def test_requirements_fields(
230232
if not wrong_value_fields == {}:
231233
self.logger.error("Wrong field values:\n" + wrong_values_table)
232234

233-
assert wrong_value_fields == {}, (
235+
error_message = (
234236
f"\nNot all required fields have correct values or some fields are missing in Splunk. Wrong field values:\n{wrong_values_table}"
235237
f"{format_search_query_log(search)}"
236-
f"Test failed for event: {escaped_event}\n"
237238
)
238239

240+
if splunk_searchtime_fields_requirements.get("unique_identifier"):
241+
error_message += f"Test failed for event: {escaped_event}\n"
242+
243+
assert wrong_value_fields == {}, error_message
244+
239245
@pytest.mark.splunk_searchtime_fields
240246
@pytest.mark.splunk_searchtime_fields_negative
241247
def test_props_fields_no_dash_not_empty(
@@ -415,19 +421,19 @@ def test_datamodels(
415421

416422
if splunk_searchtime_fields_datamodels.get("unique_identifier"):
417423
record_property(
418-
"stanza_name", splunk_searchtime_fields_datamodels["unique_identifier"]
419-
)
424+
"Event_with", splunk_searchtime_fields_datamodels["unique_identifier"]
425+
)
420426
unique_identifier = splunk_searchtime_fields_datamodels["unique_identifier"]
421427

422428
self.logger.info(
423-
f"Testing for tag {datamodels} with unique_identifier=\"{unique_identifier}\""
424-
)
425-
426-
search = f"search {index_list} unique_identifier=\"{unique_identifier}\" | fields *"
429+
f'Testing for tag {datamodels} with unique_identifier="{unique_identifier}"'
430+
)
431+
432+
search = f'search {index_list} unique_identifier="{unique_identifier}" | fields *'
427433
else:
428434
self.logger.info(
429-
f"Testing for tag {datamodels} with tag_query {escaped_event}"
430-
)
435+
f"Testing for tag {datamodels} with tag_query {escaped_event}"
436+
)
431437
search = f"search {index_list} {escaped_event} | fields *"
432438

433439
self.logger.info(f"Search: {search}")
@@ -487,9 +493,12 @@ def test_datamodels(
487493
],
488494
)
489495

490-
assert (
491-
missing_datamodels == [] and wrong_datamodels == []
492-
), f"Incorrect datamodels found:\n{exc_message}"
496+
error_message = f"Incorrect datamodels found:\n{exc_message}"
497+
498+
if splunk_searchtime_fields_datamodels.get("unique_identifier"):
499+
error_message += f"\nTest failed for event: {escaped_event}\n"
500+
501+
assert missing_datamodels == [] and wrong_datamodels == [], error_message
493502

494503
@pytest.mark.splunk_searchtime_fields
495504
@pytest.mark.splunk_searchtime_fields_eventtypes

pytest_splunk_addon/sample_generation/pytest_splunk_addon_data_parser.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,9 @@ def get_sample_stanzas(self):
107107
results = []
108108
for sample_name, stanza_params in sorted(_psa_data.items()):
109109
sample_path = os.path.join(self._path_to_samples, sample_name)
110-
results.append(SampleStanza(sample_path, stanza_params, self.ingest_with_uuid))
110+
results.append(
111+
SampleStanza(sample_path, stanza_params, self.ingest_with_uuid)
112+
)
111113
return results
112114

113115
def _get_psa_data_stanzas(self):

pytest_splunk_addon/sample_generation/sample_event.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def __init__(self, event_string, metadata, sample_name, requirement_test_data=No
6868
self.time_values = list()
6969
self.metadata = metadata
7070
self.sample_name = sample_name
71-
if metadata["ingest_with_uuid"] == "true":
71+
if metadata.get("ingest_with_uuid") == "true":
7272
self.unique_identifier = str(uuid.uuid4())
7373
self.host_count = 0
7474
self.requirement_test_data = requirement_test_data

pytest_splunk_addon/sample_generation/sample_generator.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,9 @@ def get_samples(self):
4545
"""
4646
if not SampleGenerator.sample_stanzas:
4747
psa_data_parser = PytestSplunkAddonDataParser(
48-
self.addon_path, config_path=self.config_path, ingest_with_uuid=self.ingest_with_uuid
48+
self.addon_path,
49+
config_path=self.config_path,
50+
ingest_with_uuid=self.ingest_with_uuid,
4951
)
5052
sample_stanzas = psa_data_parser.get_sample_stanzas()
5153
SampleGenerator.conf_name = psa_data_parser.conf_name

pytest_splunk_addon/sample_generation/sample_xdist_generator.py

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,9 @@ def get_samples(self, store_events):
8080
with open(file_path, "wb") as file_obj:
8181
pickle.dump(store_sample, file_obj)
8282
else:
83-
sample_generator = SampleGenerator(self.addon_path, self.ingest_with_uuid, self.config_path)
83+
sample_generator = SampleGenerator(
84+
self.addon_path, self.ingest_with_uuid, self.config_path
85+
)
8486
tokenized_events = list(sample_generator.get_samples())
8587
store_sample = {
8688
"conf_name": SampleGenerator.conf_name,
@@ -140,14 +142,16 @@ def store_events(self, tokenized_events):
140142
],
141143
}
142144
if self.ingest_with_uuid == "true":
143-
tokenized_samples_dict[each_event.sample_name]["events"][0]["unique_identifier"] = each_event.unique_identifier
145+
tokenized_samples_dict[each_event.sample_name]["events"][0][
146+
"unique_identifier"
147+
] = each_event.unique_identifier
144148
else:
145149
sample_event = {
146-
"event": each_event.event,
147-
"key_fields": each_event.key_fields,
148-
"time_values": each_event.time_values,
149-
"requirement_test_data": each_event.requirement_test_data,
150-
}
150+
"event": each_event.event,
151+
"key_fields": each_event.key_fields,
152+
"time_values": each_event.time_values,
153+
"requirement_test_data": each_event.requirement_test_data,
154+
}
151155
if self.ingest_with_uuid == "true":
152156
sample_event["unique_identifier"] = each_event.unique_identifier
153157
tokenized_samples_dict[each_event.sample_name]["events"].append(

pytest_splunk_addon/splunk.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -52,10 +52,9 @@ def pytest_addoption(parser):
5252
"--ingest-with-uuid",
5353
action="store",
5454
dest="ingest_with_uuid",
55-
default="False",
55+
default="false",
5656
help=(
57-
"Type of ingesting and searching the events into Splunk "
58-
"with uuid or without uuid."
57+
'Use generated UUID for ingesting and searching events. Setting this parameter to "true" will lead to matching events in search by the ID and not by escaped _raw. Default is "false".'
5958
),
6059
)
6160

0 commit comments

Comments
 (0)