diff --git a/README.md b/README.md index f389b30..7d15849 100644 --- a/README.md +++ b/README.md @@ -30,6 +30,10 @@ For users with J1 accounts in the EU region, the 'url' parameter will need to be If no 'url' parameter is passed, the default of "https://graphql.us.jupiterone.io" is used. +##### Method Exmaples: + +See the examples/examples.py for full usage example documentation + ##### Execute a query: ```python diff --git a/examples/exampleEntitiesRelated.png b/examples/exampleEntitiesRelated.png new file mode 100644 index 0000000..699da93 Binary files /dev/null and b/examples/exampleEntitiesRelated.png differ diff --git a/examples/examples.py b/examples/examples.py index 8ad04bc..ed468f0 100644 --- a/examples/examples.py +++ b/examples/examples.py @@ -5,12 +5,14 @@ account = os.environ.get("JUPITERONE_ACCOUNT") token = os.environ.get("JUPITERONE_TOKEN") +url = "https://graphql.us.jupiterone.io" -j1 = JupiterOneClient(account=account, token=token) +j1 = JupiterOneClient(account=account, token=token, url=url) # query_v1 q = "FIND jupiterone_user" query_r = j1.query_v1(q) +print("query_v1()") print(query_r) # create_entity @@ -31,6 +33,7 @@ properties=properties, timestamp=int(time.time()) * 1000 # Optional, defaults to current datetime ) +print("create_entity()") print(create_r) properties = { @@ -42,6 +45,7 @@ entity_id='{}'.format(create_r['entity']['_id']), properties=properties ) +print("update_entity()") print(update_r) # create_entity_2 @@ -61,6 +65,7 @@ properties=properties, timestamp=int(time.time()) * 1000 # Optional, defaults to current datetime ) +print("create_entity()") print(create_r_2) # create_relationship @@ -71,20 +76,214 @@ from_entity_id=create_r['entity']['_id'], to_entity_id=create_r_2['entity']['_id'], ) +print("create_relationship()") print(create_relationship_r) # delete_relationship delete_relationship_r = j1.delete_relationship(relationship_id=create_relationship_r['relationship']['_id']) +print("delete_relationship()") print(delete_relationship_r) # delete_entity delete_entity_r1 = j1.delete_entity(entity_id=create_r['entity']['_id']) +print("delete_entity()") print(delete_entity_r1) delete_entity_r2 = j1.delete_entity(entity_id=create_r_2['entity']['_id']) +print("delete_entity()") print(delete_entity_r2) +# cursor_query q = "FIND Person" cursor_query_r = j1._cursor_query(q) +print("cursor_query()") print(cursor_query_r) -print(len(cursor_query_r['data'])) + +# fetch_all_entity_properties +fetch_all_entity_properties_r = j1.fetch_all_entity_properties() +print("fetch_all_entity_properties()") +print(fetch_all_entity_properties_r) + +# fetch_all_entity_tags +fetch_all_entity_tags_r = j1.fetch_all_entity_tags() +print("fetch_all_entity_tags()") +print(fetch_all_entity_tags_r) + +# create_integration_instance +create_integration_instance_r = j1.create_integration_instance(instance_name="pythonclient-customintegration", + instance_description="dev-testing") +print("create_integration_instance()") +print(create_integration_instance_r) + +integration_instance_id = "" + +# start_sync_job +start_sync_job_r = j1.start_sync_job(instance_id=integration_instance_id) +print("start_sync_job()") +print(start_sync_job_r) + +# upload_entities_batch_json +entity_payload = [ + { + "_key": "1", + "_type": "pythonclient", + "_class": "API", + "displayName": "pythonclient1", + "propertyName": "value", + "relationshipProperty": "source", + }, + { + "_key": "2", + "_type": "pythonclient", + "_class": "API", + "displayName": "pythonclient2", + "propertyName": "value" + }, + { + "_key": "3", + "_type": "pythonclient", + "_class": "API", + "displayName": "pythonclient3", + "propertyName": "value" + } +] + +# update_entities_batch_json +upload_entities_batch_json_r = j1.upload_entities_batch_json(instance_job_id=start_sync_job_r['job']['id'], + entities_list=entity_payload) +print("upload_entities_batch_json()") +print(upload_entities_batch_json_r) + +# upload_relationships_batch_json +relationships_payload = [ + { + "_key": "1:2", + "_class": "EXTENDS", + "_type": "pythonclient_extends_pythonclient", + "_fromEntityKey": "1", + "_toEntityKey": "2", + "relationshipProperty": "value" + }, + { + "_key": "2:3", + "_class": "EXTENDS", + "_type": "pythonclient_extends_pythonclient", + "_fromEntityKey": "2", + "_toEntityKey": "3", + "relationshipProperty": "value" + } +] + +# update_relationships_batch_json +upload_relationships_batch_json_r = j1.upload_relationships_batch_json(instance_job_id=start_sync_job_r['job']['id'], + relationships_list=relationships_payload) +print("upload_relationships_batch_json()") +print(upload_relationships_batch_json_r) + +# upload_entities_batch_json +combined_payload = { + "entities": [ + { + "_key": "4", + "_type": "pythonclient", + "_class": "API", + "displayName": "pythonclient4", + "propertyName": "value", + "relationshipProperty": "source", + }, + { + "_key": "5", + "_type": "pythonclient", + "_class": "API", + "displayName": "pythonclient5", + "propertyName": "value" + }, + { + "_key": "6", + "_type": "pythonclient", + "_class": "API", + "displayName": "pythonclient6", + "propertyName": "value" + } +], + "relationships": [ + { + "_key": "4:5", + "_class": "EXTENDS", + "_type": "pythonclient_extends_pythonclient", + "_fromEntityKey": "4", + "_toEntityKey": "5", + "relationshipProperty": "value" + }, + { + "_key": "5:6", + "_class": "EXTENDS", + "_type": "pythonclient_extends_pythonclient", + "_fromEntityKey": "5", + "_toEntityKey": "6", + "relationshipProperty": "value" + } +] +} + +# upload_combined_batch_json +upload_combined_batch_json_r = j1.upload_combined_batch_json(instance_job_id=start_sync_job_r['job']['id'], + combined_payload=combined_payload) +print("upload_combined_batch_json()") +print(upload_combined_batch_json_r) + +# finalize_sync_job +finalize_sync_job_r = j1.finalize_sync_job(instance_job_id=start_sync_job_r['job']['id']) +print("finalize_sync_job()") +print(finalize_sync_job_r) + +# fetch_integration_jobs +fetch_integration_jobs_r = j1.fetch_integration_jobs(instance_id=integration_instance_id) +print("fetch_integration_jobs()") +print(fetch_integration_jobs_r) + +while j1.fetch_integration_jobs(instance_id=integration_instance_id)['jobs'][0]['status'] == "IN_PROGRESS": + + fetch_integration_jobs_r = j1.fetch_integration_jobs(instance_id=integration_instance_id) + + print("fetch_integration_jobs()") + print(fetch_integration_jobs_r) + +# fetch_integration_job_events +fetch_integration_job_events_r = j1.fetch_integration_job_events(instance_id=integration_instance_id, + instance_job_id=fetch_integration_jobs_r['jobs'][0]['id']) +print("fetch_integration_job_events()") +print(fetch_integration_job_events_r) + +# create_smartclass +create_smartclass_r = j1.create_smartclass(smartclass_name="SmartClass1", + smartclass_description="Created via create_smartclass() method") +print("create_smartclass()") +print(create_smartclass_r) + +# create_smartclass_query +create_smartclass_query_r = j1.create_smartclass_query(smartclass_id=create_smartclass_r['id'], + query="FIND (Device|Host) with osType ~= \'Windows\'", + query_description="all windows devices and hosts") +print("create_smartclass_query()") +print(create_smartclass_query_r) + +# evaluate_smartclass +evaluate_smartclass_r = j1.evaluate_smartclass(smartclass_id=create_smartclass_query_r['smartClassId']) +print("evaluate_smartclass()") +print(evaluate_smartclass_r) + +# get_smartclass_details +get_smartclass_details_r = j1.get_smartclass_details(smartclass_id=create_smartclass_query_r['smartClassId']) +print("get_smartclass_details()") +print(get_smartclass_details_r) + +# list_configured_alert_rules +list_configured_alert_rules_r = j1.list_configured_alert_rules() +print("list_configured_alert_rules()") +print(list_configured_alert_rules_r) + +# generate_j1ql +generate_j1ql_r = j1.generate_j1ql(natural_language_prompt="show me all Users containing 'jupiterone' in their email address") +print("generate_j1ql()") +print(generate_j1ql_r) diff --git a/jupiterone/client.py b/jupiterone/client.py index 0a4684c..f32465b 100644 --- a/jupiterone/client.py +++ b/jupiterone/client.py @@ -5,6 +5,7 @@ import json from warnings import warn from typing import Dict, List +from datetime import datetime import re import requests @@ -25,8 +26,19 @@ DELETE_ENTITY, UPDATE_ENTITY, CREATE_RELATIONSHIP, + UPDATE_RELATIONSHIP, DELETE_RELATIONSHIP, CURSOR_QUERY_V1, + CREATE_INSTANCE, + INTEGRATION_JOB_VALUES, + INTEGRATION_INSTANCE_EVENT_VALUES, + ALL_PROPERTIES, + CREATE_SMARTCLASS, + CREATE_SMARTCLASS_QUERY, + EVALUATE_SMARTCLASS, + GET_SMARTCLASS_DETAILS, + LIST_RULE_INSTANCES, + J1QL_FROM_NATURAL_LANGUAGE ) @@ -41,6 +53,7 @@ class JupiterOneClient: # pylint: disable=too-many-instance-attributes DEFAULT_URL = "https://graphql.us.jupiterone.io" + SYNC_API_URL = "https://api.us.jupiterone.io" RETRY_OPTS = { "wait_exponential_multiplier": 1000, @@ -49,15 +62,15 @@ class JupiterOneClient: "retry_on_exception": retry_on_429, } - def __init__(self, account: str = None, token: str = None, url: str = DEFAULT_URL): + def __init__(self, account: str = None, token: str = None, url: str = DEFAULT_URL, sync_url: str = SYNC_API_URL): self.account = account self.token = token - self.url = url - self.query_endpoint = self.url - self.rules_endpoint = self.url + "/rules/graphql" + self.graphql_url = url + self.sync_url = sync_url self.headers = { "Authorization": "Bearer {}".format(self.token), "JupiterOne-Account": self.account, + "Content-Type": "application/json" } @property @@ -98,11 +111,11 @@ def _execute_query(self, query: str, variables: Dict = None) -> Dict: # initiate requests session and implement retry logic of 5 request retries with 1 second between s = requests.Session() - retries = Retry(total=5, backoff_factor=1, status_forcelist=[429, 502, 503, 504]) + retries = Retry(total=5, backoff_factor=1, status_forcelist=[502, 503, 504]) s.mount('https://', HTTPAdapter(max_retries=retries)) response = s.post( - self.query_endpoint, headers=self.headers, json=data, timeout=60 + self.graphql_url, headers=self.headers, json=data, timeout=60 ) # It is still unclear if all responses will have a status @@ -127,8 +140,6 @@ def _execute_query(self, query: str, variables: Dict = None) -> Dict: ) elif response.status_code in [429, 503]: - print(response.status_code) - print(response.content) raise JupiterOneApiRetryError("JupiterOne API rate limit exceeded.") elif response.status_code in [504]: @@ -233,6 +244,57 @@ def _limit_and_skip_query( return {"data": results} + def _execute_syncapi_request(self, endpoint: str, payload: Dict = None) -> Dict: + """Executes POST request to SyncAPI endpoints""" + + # initiate requests session and implement retry logic of 5 request retries with 1 second between + s = requests.Session() + retries = Retry(total=5, backoff_factor=1, status_forcelist=[502, 503, 504]) + s.mount('https://', HTTPAdapter(max_retries=retries)) + + response = s.post( + self.sync_url + endpoint, headers=self.headers, json=payload, timeout=60 + ) + + # It is still unclear if all responses will have a status + # code of 200 or if 429 will eventually be used to + # indicate rate limits being hit. J1 devs are aware. + if response.status_code == 200: + if response._content: + content = json.loads(response._content) + if "errors" in content: + errors = content["errors"] + if len(errors) == 1: + if "429" in errors[0]["message"]: + raise JupiterOneApiRetryError( + "JupiterOne API rate limit exceeded" + ) + raise JupiterOneApiError(content.get("errors")) + return response.json() + + elif response.status_code == 401: + raise JupiterOneApiError( + "401: Unauthorized. Please supply a valid account id and API token." + ) + + elif response.status_code in [429, 503]: + raise JupiterOneApiRetryError("JupiterOne API rate limit exceeded.") + + elif response.status_code in [504]: + raise JupiterOneApiRetryError("Gateway Timeout.") + + elif response.status_code in [500]: + raise JupiterOneApiError("JupiterOne API internal server error.") + + else: + content = response._content + if isinstance(content, (bytes, bytearray)): + content = content.decode("utf-8") + if "application/json" in response.headers.get("Content-Type", "text/plain"): + data = json.loads(content) + content = data.get("error", data.get("errors", content)) + raise JupiterOneApiError("{}:{}".format(response.status_code, content)) + def query_v1(self, query: str, **kwargs) -> Dict: """Performs a V1 graph query args: @@ -339,6 +401,28 @@ def create_relationship(self, **kwargs) -> Dict: response = self._execute_query(query=CREATE_RELATIONSHIP, variables=variables) return response["data"]["createRelationship"] + def update_relationship(self, **kwargs) -> Dict: + """ + Update a relationship (edge) between two entities (vertices). + + args: + relationship_id (str): Unique _id of the relationship + properties (dict): Dictionary of key/value relationship properties + """ + now_dt = datetime.now() + + variables = { + "relationshipId": kwargs.pop("relationship_id"), + "timestamp": datetime.strptime(str(now_dt), "%Y-%m-%d %H:%M:%S.%f").timestamp() + } + + properties = kwargs.pop("properties", None) + if properties: + variables["properties"] = properties + + response = self._execute_query(query=UPDATE_RELATIONSHIP, variables=variables) + return response["data"]["updateRelationship"] + def delete_relationship(self, relationship_id: str = None): """Deletes a relationship between two entities. @@ -349,3 +433,290 @@ def delete_relationship(self, relationship_id: str = None): response = self._execute_query(DELETE_RELATIONSHIP, variables=variables) return response["data"]["deleteRelationship"] + + def create_integration_instance(self, + instance_name: str = None, + instance_description: str = None, + integration_definition_id: str = "8013680b-311a-4c2e-b53b-c8735fd97a5c"): + """Creates a new Custom Integration Instance. + + args: + instance_name (str): The "Account name" for integration instance + instance_description (str): The "Description" for integration instance + integration_definition_id (str): The "Integration definition ID" for integration instance, + if no parameter is passed, then the Custom Integration definition ID will be used. + """ + variables = { + "instance": { + "name": instance_name, + "description": instance_description, + "integrationDefinitionId": integration_definition_id, + "pollingInterval": "DISABLED", + "config": { + "@tag": { + "Production": False, + "AccountName": True + } + }, + "pollingIntervalCronExpression": {}, + "ingestionSourcesOverrides": [] + } + } + + response = self._execute_query(CREATE_INSTANCE, variables=variables) + return response['data']['createIntegrationInstance'] + + def fetch_all_entity_properties(self): + """Fetch list of aggregated property keys from all entities in the graph. + + """ + + response = self._execute_query(query=ALL_PROPERTIES) + + return_list = [] + + for i in response['data']['getAllAssetProperties']: + + if i.startswith(('parameter.', 'tag.')) == False: + + return_list.append(i) + + return return_list + + def fetch_all_entity_tags(self): + """Fetch list of aggregated property keys from all entities in the graph. + + """ + + response = self._execute_query(query=ALL_PROPERTIES) + + return_list = [] + + for i in response['data']['getAllAssetProperties']: + + if i.startswith(('tag.')) == True: + + return_list.append(i) + + return return_list + + def start_sync_job(self, instance_id: str = None): + """Start a synchronization job. + + args: + instance_id (str): The "integrationInstanceId" request param for synchronization job + """ + endpoint = "/persister/synchronization/jobs" + + data = { + "source": "integration-managed", + "integrationInstanceId": instance_id + } + + response = self._execute_syncapi_request(endpoint=endpoint, payload=data) + + return response + + def upload_entities_batch_json(self, instance_job_id: str = None, entities_list: list = None): + """Upload batch of entities. + + args: + instance_job_id (str): The "Job ID" for the Custom Integration job + entities_list (list): List of Dictionaries containing entities data to upload + """ + endpoint = f"/persister/synchronization/jobs/{instance_job_id}/entities" + + data = { + "entities": entities_list + } + + response = self._execute_syncapi_request(endpoint=endpoint, payload=data) + + return response + + def upload_relationships_batch_json(self, instance_job_id: str = None, relationships_list: list = None): + """Upload batch of relationships. + + args: + instance_job_id (str): The "Job ID" for the Custom Integration job + relationships_list (list): List of Dictionaries containing relationships data to upload + """ + endpoint = f"/persister/synchronization/jobs/{instance_job_id}/relationships" + + data = { + "relationships": relationships_list + } + + response = self._execute_syncapi_request(endpoint=endpoint, payload=data) + + return response + + def upload_combined_batch_json(self, instance_job_id: str = None, combined_payload: Dict = None): + """Upload batch of entities and relationships together. + + args: + instance_job_id (str): The "Job ID" for the Custom Integration job. + combined_payload (list): Dictionary containing combined entities and relationships data to upload. + """ + endpoint = f"/persister/synchronization/jobs/{instance_job_id}/upload" + + response = self._execute_syncapi_request(endpoint=endpoint, payload=combined_payload) + + return response + + def bulk_delete_entities(self, instance_job_id: str = None, entities_list: list = None): + """Send a request to bulk delete existing entities. + + args: + instance_job_id (str): The "Job ID" for the Custom Integration job. + entities_list (list): List of dictionaries containing entities _id's to be deleted. + """ + endpoint = f"/persister/synchronization/jobs/{instance_job_id}/upload" + + data = { + "deleteEntities": entities_list + } + + response = self._execute_syncapi_request(endpoint=endpoint, payload=data) + + return response + + def finalize_sync_job(self, instance_job_id: str = None): + """Start a synchronization job. + + args: + instance_job_id (str): The "Job ID" for the Custom Integration job + """ + endpoint = f"/persister/synchronization/jobs/{instance_job_id}/finalize" + + data = {} + + response = self._execute_syncapi_request(endpoint=endpoint, payload=data) + + return response + + def fetch_integration_jobs(self, instance_id: str = None): + """Fetch Integration Job details from defined integration instance. + + args: + instance_id (str): The "integrationInstanceId" of the integration to fetch jobs from. + """ + variables = { + "integrationInstanceId": instance_id, + "size": 100 + } + + response = self._execute_query(INTEGRATION_JOB_VALUES, variables=variables) + + return response['data']['integrationJobs'] + + def fetch_integration_job_events(self, instance_id: str = None, instance_job_id: str = None): + """Fetch events within an integration job run. + + args: + instance_id (str): The integration Instance Id of the integration to fetch job events from. + instance_job_id (str): The integration Job ID of the integration to fetch job events from. + """ + variables = { + "integrationInstanceId": instance_id, + "jobId": instance_job_id, + "size": 1000 + } + + response = self._execute_query(INTEGRATION_INSTANCE_EVENT_VALUES, variables=variables) + + return response['data']['integrationEvents'] + + def create_smartclass(self, smartclass_name: str = None, smartclass_description: str = None): + """Creates a new Smart Class within Assets. + + args: + smartclass_name (str): The "Smart class name" for Smart Class to be created. + smartclass_description (str): The "Description" for Smart Class to be created. + """ + variables = { + "input": { + "tagName": smartclass_name, + "description": smartclass_description + } + } + + response = self._execute_query(CREATE_SMARTCLASS, variables=variables) + + return response['data']['createSmartClass'] + + def create_smartclass_query(self, smartclass_id: str = None, query: str = None, query_description: str = None): + """Creates a new J1QL Query within a defined Smart Class. + + args: + smartclass_id (str): The unique ID of the Smart Class the query is created within. + query (str): The J1QL for the query being created. + query_description (str): The description of the query being created. + """ + variables = { + "input": { + "smartClassId": smartclass_id, + "query": query, + "description": query_description + } + } + + response = self._execute_query(CREATE_SMARTCLASS_QUERY, variables=variables) + + return response['data']['createSmartClassQuery'] + + def evaluate_smartclass(self, smartclass_id: str = None): + """Execute an on-demand Evaluation of a defined Smartclass. + + args: + smartclass_id (str): The unique ID of the Smart Class to trigger the evaluation for. + """ + variables = { + "smartClassId": smartclass_id + } + + response = self._execute_query(EVALUATE_SMARTCLASS, variables=variables) + + return response['data']['evaluateSmartClassRule'] + + def get_smartclass_details(self, smartclass_id: str = None): + """Fetch config details from defined Smart Class. + + args: + smartclass_id (str): The unique ID of the Smart Class to fetch details from. + """ + variables = { + "id": smartclass_id + } + + response = self._execute_query(GET_SMARTCLASS_DETAILS, variables=variables) + + return response['data']['smartClass'] + + def list_configured_alert_rules(self): + """List defined Alert Rules configured in J1 account + + """ + variables = { + "limit": 100 + } + + response = self._execute_query(LIST_RULE_INSTANCES, variables=variables) + + return response['data']['listRuleInstances'] + + def generate_j1ql(self, natural_language_prompt: str = None): + """Generate J1QL query syntax from natural language user input. + + args: + natural_language_prompt (str): The naturalLanguageQuery prompt input to generate J1QL from. + """ + variables = { + "input": { + "naturalLanguageQuery": natural_language_prompt + } + } + + response = self._execute_query(J1QL_FROM_NATURAL_LANGUAGE, variables=variables) + + return response['data']['j1qlFromNaturalLanguage'] diff --git a/jupiterone/constants.py b/jupiterone/constants.py index 1383e97..e6b0902 100644 --- a/jupiterone/constants.py +++ b/jupiterone/constants.py @@ -117,6 +117,35 @@ } """ +UPDATE_RELATIONSHIP = """ + mutation UpdateRelationship ( + $relationshipId: String! + $timestamp: Long + $properties: JSON + ) { + updateRelationship ( + relationshipId: $relationshipId, + timestamp: $timestamp, + properties: $properties + ) { + relationship { + _id + ... + } + edge { + id + toVertexId + fromVertexId + relationship { + _id + ... + } + properties + } + } + } +""" + DELETE_RELATIONSHIP = """ mutation DeleteRelationship($relationshipId: String! $timestamp: Long) { deleteRelationship (relationshipId: $relationshipId, timestamp: $timestamp) { @@ -134,4 +163,246 @@ } } } +""" + +CREATE_INSTANCE = """ + mutation CreateInstance($instance: CreateIntegrationInstanceInput!) { + createIntegrationInstance(instance: $instance) { + id + name + accountId + pollingInterval + integrationDefinitionId + description + config + } + } +""" + +ALL_PROPERTIES = """ + query getAllAssetProperties { + getAllAssetProperties + } +""" + +CREATE_SMARTCLASS = """ + mutation CreateSmartClass($input: CreateSmartClassInput!) { + createSmartClass(input: $input) { + id + accountId + tagName + description + ruleId + __typename + } + } +""" + +CREATE_SMARTCLASS_QUERY = """ + mutation CreateSmartClassQuery($input: CreateSmartClassQueryInput!) { + createSmartClassQuery(input: $input) { + id + smartClassId + description + query + __typename + } + } +""" + +EVALUATE_SMARTCLASS = """ + mutation EvaluateSmartClassRule($smartClassId: ID!) { + evaluateSmartClassRule(smartClassId: $smartClassId) { + ruleId + __typename + } + } +""" + +GET_SMARTCLASS_DETAILS = """ + query GetSmartClass($id: ID!) { + smartClass(id: $id) { + id + accountId + tagName + description + ruleId + queries { + id + smartClassId + description + query + __typename + } + tags { + id + smartClassId + name + type + value + __typename + } + rule { + lastEvaluationEndOn + evaluationStep + __typename + } + __typename + } + } +""" + +INTEGRATION_JOB_VALUES = """ + query IntegrationJobs( + $status: IntegrationJobStatus + $integrationInstanceId: String + $integrationDefinitionId: String + $integrationInstanceIds: [String] + $cursor: String + $size: Int + ) { + integrationJobs( + status: $status + integrationInstanceId: $integrationInstanceId + integrationDefinitionId: $integrationDefinitionId + integrationInstanceIds: $integrationInstanceIds + cursor: $cursor + size: $size + ) { + jobs { + id + status + integrationInstanceId + createDate + endDate + hasSkippedSteps + integrationInstance { + id + name + __typename + } + integrationDefinition { + id + title + integrationType + __typename + } + __typename + } + pageInfo { + endCursor + __typename + } + __typename + } + } +""" + +INTEGRATION_INSTANCE_EVENT_VALUES = """ + query ListEvents( + $jobId: String! + $integrationInstanceId: String! + $cursor: String + $size: Int + ) { + integrationEvents( + size: $size + cursor: $cursor + jobId: $jobId + integrationInstanceId: $integrationInstanceId + ) { + events { + id + name + description + createDate + jobId + level + eventCode + __typename + } + pageInfo { + endCursor + hasNextPage + __typename + } + __typename + } + } +""" + +LIST_RULE_INSTANCES = """ + query listRuleInstances( + $limit: Int, + $cursor: String, + $filters: ListRuleInstancesFilters) { + listRuleInstances( + limit: $limit, + cursor: $cursor, + filters: $filters) { + questionInstances { + ...RuleInstanceFields + __typename + } + pageInfo { + hasNextPage + endCursor + __typename + } + __typename + } + } + + fragment RuleInstanceFields on QuestionRuleInstance { + id + accountId + name + description + version + lastEvaluationStartOn + lastEvaluationEndOn + evaluationStep + specVersion + notifyOnFailure + triggerActionsOnNewEntitiesOnly + pollingInterval + templates + outputs + question { + queries { + query + name + version + includeDeleted + __typename + } + __typename + } + questionId + latest + deleted + type + operations { + when + actions + __typename + } + latestAlertId + latestAlertIsActive + state { + actions + __typename + } + tags + remediationSteps + __typename + } +""" + +J1QL_FROM_NATURAL_LANGUAGE = """ + query j1qlFromNaturalLanguage($input: J1qlFromNaturalLanguageInput!) { + j1qlFromNaturalLanguage(input: $input) { + j1ql + } + } """ \ No newline at end of file diff --git a/setup.py b/setup.py index 93329fb..eab50e7 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name="jupiterone", - version="1.0.1", + version="1.1.0", description="A Python client for the JupiterOne API", license="MIT License", author="JupiterOne",