cancel
Showing results for 
Show  only  | Search instead for 
Did you mean: 

Parameter Nextpagekey

Lu
Newcomer

We want to use dynatrace api to retrieve problems and comments data from dynatrace. But if we add the impactAnalysis field, the limit on the number of retrieved items is too small, allowing only 10 items to be fetched at once. Is there have any way can break this limitation?

1 REPLY 1

gopher
Pro

Hi @Lu ,

So..., you'll need to create a script / code of some description that uses the nextPageKey. 
I'll give you an example python script that I created for getting entities. I'm sure it could be done more eloquently, however I'm not a coder and it's functional.   
You'll just need to replace 'mytokencode' with the API key and 'myenvironment' with your tenant id. 
* this is for entities, but wouldn't take much to modify for pulling from the problems API. 
 

import requests
import json
import csv
import urllib.parse

def fetch_data(api_url, params=None):
    all_data = []
    next_page_key = None
    
    # Make initial request
    request_url = api_url + "?" + urllib.parse.urlencode(params)
    print(f"Making API call to: {request_url}")
    response = requests.get(api_url, params=params, verify=False)
    if response.status_code != 200:
        print(f"Error: Failed to fetch data from {api_url}")
        return all_data
    
    # Write initial response to JSON file
    with open("output_preprod.json", "w", encoding="utf-8") as json_file:
        json.dump(response.json(), json_file, indent=4)
    
    # Parse initial response
    json_response = response.json()
    entities = json_response.get('entities', [])
    all_data.extend(entities)
    next_page_key = json_response.get('nextPageKey')
    
    # Paginate through subsequent responses
    while next_page_key:
        paramsN = {
            "Api-Token": "dt0c01.mytokencode",        
            "nextPageKey": next_page_key
        }    
        request_url = api_url + "?" + urllib.parse.urlencode(paramsN)
        print(f"Making nextPageKey API call to: {request_url}")        
        response = requests.get(api_url, params=paramsN, verify=False)
        next_page_key = json_response.get('nextPageKey')

        if response.status_code != 200:
            print(f"Error: Failed to fetch data from {api_url}")
            break
        
        # Append paginated response to JSON file
        with open("output_preprod.json", "a") as json_file:
            json_file.write("\n")
            json.dump(response.json(), json_file, indent=4)
        
        # Parse paginated response
        json_response = response.json()
        entities = json_response.get('entities', [])
        all_data.extend(entities)
        next_page_key = json_response.get('nextPageKey')
    
    return all_data

# Example usage
api_url = "https://{environmentid}.live.dynatrace.com/api/v2/entities"
params = {
    "entitySelector": 'type("Process_Group")',
    "fields": "properties.softwareTechnologies,fromRelationships,toRelationships,properties,tags",
    "Api-Token": "dt0c01.mytokencode"
}

all_data = fetch_data(api_url, params)

# Write data to CSV file
if all_data:
    with open("output_preprod.csv", "w", encoding="utf-8", newline="") as csvfile:
        csv_writer = csv.DictWriter(csvfile, fieldnames=all_data[0].keys())
        csv_writer.writeheader()
        for entity in all_data:
            csv_writer.writerow(entity)

print("Data has been written to output.csv")

 

Featured Posts