Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Netskope improvements #37493

Draft
wants to merge 2 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ def setup_last_run(last_run_dict: dict, event_types_to_fetch: list[str]) -> dict


def handle_data_export_single_event_type(client: Client, event_type: str, operation: str, limit: int,
execution_start_time: datetime) -> tuple[list, bool]:
execution_start_time: datetime, all_event_types: list) -> bool:
"""
Pulls events per each given event type. Each event type receives a dedicated index name that is constructed using the event
type and the integration instance name. The function keeps pulling events as long as the limit was not exceeded.
Expand Down Expand Up @@ -216,12 +216,12 @@ def handle_data_export_single_event_type(client: Client, event_type: str, operat

# If the execution exceeded the timeout we will break
if is_execution_time_exceeded(start_time=execution_start_time):
return events, True
return True

# Wait time between queries
if wait_time:
demisto.debug(f'Going to sleep between queries, wait_time is {wait_time} seconds')
time.sleep(wait_time) # pylint: disable=E9003
time.sleep(wait_time) # pylint: disable=E9003
else:
demisto.debug('No wait time received, going to sleep for 1 second')
time.sleep(1)
Expand All @@ -239,16 +239,18 @@ def handle_data_export_single_event_type(client: Client, event_type: str, operat

events.extend(results)

all_event_types.extend(prepare_events(results, event_type))

if not results or len(results) < MAX_EVENTS_PAGE_SIZE:
break

print_event_statistics_logs(events=events, event_type=event_type)
# We mark this event type as successfully fetched
client.fetch_status[event_type] = True
return events, False
return False


def get_all_events(client: Client, last_run: dict, limit: int = MAX_EVENTS_PAGE_SIZE) -> tuple[list, dict]:
def get_all_events(client: Client, last_run: dict, all_event_types: list, limit: int = MAX_EVENTS_PAGE_SIZE) -> dict:
"""
Iterates over all supported event types and call the handle data export logic. Once each event type is done the operation for
next run is set to 'next'.
Expand All @@ -263,35 +265,35 @@ def get_all_events(client: Client, last_run: dict, limit: int = MAX_EVENTS_PAGE_
dict: The updated last_run object.
"""

all_types_events_result = []
# all_event_types = []
execution_start_time = datetime.utcnow()
for event_type in client.event_types_to_fetch:
event_type_operation = last_run.get(event_type, {}).get('operation')

events, time_out = handle_data_export_single_event_type(client=client, event_type=event_type,
operation=event_type_operation, limit=limit,
execution_start_time=execution_start_time)
all_types_events_result.extend(prepare_events(events, event_type))
time_out = handle_data_export_single_event_type(client=client, event_type=event_type,
operation=event_type_operation, limit=limit,
execution_start_time=execution_start_time,
all_event_types=all_event_types)
last_run[event_type] = {'operation': 'next'}

if time_out:
demisto.info('Timeout reached, stopped pulling events')
break

return all_types_events_result, last_run
return last_run


''' COMMAND FUNCTIONS '''


def test_module(client: Client, last_run: dict, max_fetch: int) -> str:
get_all_events(client, last_run, limit=max_fetch, )
get_all_events(client, last_run, limit=max_fetch, all_event_types=[])
return 'ok'


def get_events_command(client: Client, args: dict[str, Any], last_run: dict) -> tuple[CommandResults, list]:
def get_events_command(client: Client, args: dict[str, Any], last_run: dict, events: list) -> tuple[CommandResults, list]:
limit = arg_to_number(args.get('limit')) or MAX_EVENTS_PAGE_SIZE
events, _ = get_all_events(client=client, last_run=last_run, limit=limit)
_ = get_all_events(client=client, last_run=last_run, limit=limit, all_event_types=events)

for event in events:
event['timestamp'] = timestamp_to_datestring(event['timestamp'] * 1000)
Expand Down Expand Up @@ -357,37 +359,41 @@ def main() -> None: # pragma: no cover
last_run = setup_last_run(demisto.getLastRun(), event_types_to_fetch)
demisto.debug(f'Running with the following last_run - {last_run}')

events: list[dict] = []
# events: list[dict] = []
all_event_types: list[dict] = []
new_last_run: dict = {}
if command_name == 'test-module':
# This is the call made when pressing the integration Test button.
result = test_module(client, last_run, max_fetch=MAX_EVENTS_PAGE_SIZE) # type: ignore[arg-type]
return_results(result)

elif command_name == 'netskope-get-events':
results, events = get_events_command(client, demisto.args(), last_run)
results, events = get_events_command(client, demisto.args(), last_run, events=[])
if argToBoolean(demisto.args().get('should_push_events', 'true')):
send_events_to_xsiam(events=events, vendor=vendor, product=product) # type: ignore
send_events_to_xsiam(events=events, vendor=vendor, product=product,
chunk_size=XSIAM_EVENT_CHUNK_SIZE_LIMIT) # type: ignore
return_results(results)

elif command_name == 'fetch-events':
# We have this try-finally block for fetch events where wrapping up should be done if errors occur
start = datetime.utcnow()
try:
demisto.debug(f'Sending request with last run {last_run}')
events, new_last_run = get_all_events(client, last_run, max_fetch)
new_last_run = get_all_events(client=client, last_run=last_run, limit=max_fetch,
all_event_types=all_event_types)
finally:
demisto.debug(f'sending {len(events)} to xsiam')
send_events_to_xsiam(events=events, vendor=vendor, product=product)
demisto.debug(f'sending {len(all_event_types)} to xsiam')
send_events_to_xsiam(events=all_event_types, vendor=vendor, product=product,
chunk_size=XSIAM_EVENT_CHUNK_SIZE_LIMIT)

for event_type, status, in client.fetch_status.items():
if not status:
new_last_run[event_type] = {'operation': 'resend'}

end = datetime.utcnow()

demisto.debug(f'Handled {len(events)} total events in {(end - start).seconds} seconds')
next_trigger_time(len(events), max_fetch, new_last_run)
demisto.debug(f'Handled {len(all_event_types)} total events in {(end - start).seconds} seconds')
next_trigger_time(len(all_event_types), max_fetch, new_last_run)
demisto.debug(f'Setting the last_run to: {new_last_run}')
demisto.setLastRun(new_last_run)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,8 @@ def json_callback(request, _):
proxy=False, event_types_to_fetch=ALL_SUPPORTED_EVENT_TYPES)
url_matcher = re.compile('https://netskope[.]example[.]com/events/dataexport/events')
requests_mock.get(url_matcher, json=json_callback)
events, new_last_run = get_all_events(client, FIRST_LAST_RUN)
events = []
new_last_run = get_all_events(client, FIRST_LAST_RUN, all_event_types=events)
assert len(events) == 26
assert events[0].get('event_id') == '1'
assert events[0].get('_time') == '2023-05-22T10:30:16.000Z'
Expand All @@ -95,9 +96,9 @@ def test_get_events_command(mocker):
"""
from NetskopeEventCollector import get_events_command
client = Client(BASE_URL, 'dummy_token', False, False, event_types_to_fetch=ALL_SUPPORTED_EVENT_TYPES)
mocker.patch('NetskopeEventCollector.get_all_events', return_value=[MOCK_ENTRY, {}])
mocker.patch('NetskopeEventCollector.get_all_events', return_value={})
mocker.patch.object(time, "sleep")
results, events = get_events_command(client, args={}, last_run=FIRST_LAST_RUN)
results, events = get_events_command(client, args={}, last_run=FIRST_LAST_RUN, events=MOCK_ENTRY)
assert 'Events List' in results.readable_output
assert len(events) == 9
assert results.outputs_prefix == 'Netskope.Event'
Expand Down Expand Up @@ -274,9 +275,10 @@ def test_incident_endpoint(mocker):
mocker.patch('NetskopeEventCollector.print_event_statistics_logs')
client = Client(BASE_URL, 'dummy_token', False, False, event_types_to_fetch=['incident'])
mock_response = MagicMock()
mock_response.json.return_value = {'result': 'fake_result', 'wait_time': 0}
mock_response.json.return_value = {'result': EVENTS_RAW['result'], 'wait_time': 0}
request_mock = mocker.patch.object(Client, '_http_request', return_value=mock_response)
handle_data_export_single_event_type(client, 'incident', 'next', limit=50, execution_start_time=datetime.now())
handle_data_export_single_event_type(client, 'incident', 'next', limit=50,
execution_start_time=datetime.now(), all_event_types=[])
kwargs = request_mock.call_args.kwargs
assert kwargs['url_suffix'] == 'events/dataexport/events/incident'
assert kwargs['params'] == {'index': 'xsoar_collector_test_instance_incident', 'operation': 'next'}
6 changes: 6 additions & 0 deletions Packs/Netskope/ReleaseNotes/4_0_2.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

#### Integrations

##### Netskope Event Collector

- Fixed an issue where in rare cases not all events were created.
2 changes: 1 addition & 1 deletion Packs/Netskope/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "Netskope",
"description": "Cloud access security broker that enables to find, understand, and secure cloud apps.",
"support": "xsoar",
"currentVersion": "4.0.1",
"currentVersion": "4.0.2",
"author": "Cortex XSOAR",
"url": "https://www.paloaltonetworks.com/cortex",
"email": "",
Expand Down
Loading