Data Source Connections API Tutorial: Difference between revisions
No edit summary |
|||
(3 intermediate revisions by the same user not shown) | |||
Line 2: | Line 2: | ||
In this article, you'll learn: | In this article, you'll learn: | ||
* How to export and save all data source definitions from an existing BigID system. | * How to export and save all data source definitions from an existing BigID system. | ||
* How to import those data sources into the new BigID system using the API | * How to import those data sources into the new BigID system using the API. | ||
* How to verify that all data sources were transferred successfully | * How to verify that all data sources were transferred successfully | ||
{{Box/end}} | {{Box/end}} | ||
Line 13: | Line 13: | ||
To view the '''complete code''' for all steps, see the section labelled [[#Code_Samples|'''Code Samples''']].'' | To view the '''complete code''' for all steps, see the section labelled [[#Code_Samples|'''Code Samples''']].'' | ||
For more information on the API capabilities used in this tutorial, check out the[https://api.bigid.com/doc/data-sources/ '''Data Source Connections API Docs''']. | For more information on the API capabilities used in this tutorial, check out the [https://api.bigid.com/doc/data-sources/ '''Data Source Connections API Docs''']. | ||
== 1. Authenticate Using Your API Key == | == 1. Authenticate Using Your API Key == | ||
Line 76: | Line 76: | ||
<tab name="Python"><syntaxhighlight lang="python" line> | <tab name="Python"><syntaxhighlight lang="python" line> | ||
# Scan Insights API Tutorial | # Scan Insights API Tutorial | ||
# in | import requests | ||
import json | |||
API_TOKEN = "SAMPLE" | |||
HEADERS = { | |||
"Authorization": f"Bearer {API_TOKEN}", | |||
"Content-Type": "application/json" | |||
} | |||
# 1. Export all data sources and return the contents of the response file | |||
def export_data_sources(): | |||
url = "https://bigid-ui:9090/api/v1/ds-connections/file-download/export" | |||
response = requests.get(url, headers=HEADERS) | |||
if response.status_code == 200: | |||
with open("exported_datasources.json", "w") as f: | |||
f.write(response.text) | |||
print("Data sources exported.") | |||
return json.loads(response.text) | |||
else: | |||
print("Failed to export data sources.") | |||
print(response.text) | |||
return [] | |||
# 2. Import data sources by cycling through the provided list | |||
def import_data_sources(data_sources): | |||
url = "https://bigid-ui:9090/api/v1/ds_connections" | |||
for ds in data_sources: // for each data source in the provided list... | |||
body = { "ds_connection": ds } | |||
response = requests.post(url, headers=HEADERS, json=body) // add data source to the new system | |||
if response.status_code == 200: | |||
print(f"Imported: {ds.get('name')}") | |||
else: | |||
print(f"Failed to import: {ds.get('name')}") | |||
print(response.text) | |||
# 3. Verify successful migration | |||
def verify_imported_sources(): | |||
url = "https://bigid-ui:9090/api/v1/ds-connections" | |||
response = requests.get(url, headers=HEADERS) | |||
if response.status_code == 200: | |||
data = response.json() | |||
print(f"Total data sources in new environment: {len(data.get('data', []))}") // check that the number of new data sources equals the original count. You can also compare the imported and exported data sources directly for a more thorough check | |||
else: | |||
print("Failed to verify data sources.") | |||
print(response.text) | |||
# Run through the full process | |||
data = export_data_sources() | |||
if data: | |||
import_data_sources(data) | |||
verify_imported_sources() | |||
</syntaxhighlight></tab> | </syntaxhighlight></tab> | ||
<tab name="JavaScript"><syntaxhighlight lang="javascript" line> | <tab name="JavaScript"><syntaxhighlight lang="javascript" line> | ||
// Scan Insights API Tutorial | // Scan Insights API Tutorial | ||
// in | const API_TOKEN = 'SAMPLE'; | ||
const BASE_URL = 'https://bigid-ui:9090/api/v1'; | |||
const headers = { | |||
'Authorization': `Bearer ${API_TOKEN}`, | |||
'Content-Type': 'application/json' | |||
}; | |||
// Step 1: Export all data sources and return the contents of the response file | |||
async function exportDataSources() { | |||
try { | |||
const response = await fetch(`${BASE_URL}/ds-connections/file-download/export`, { | |||
method: 'GET', | |||
headers | |||
}); | |||
if (!response.ok) throw new Error('Export failed.'); | |||
const blob = await response.blob(); | |||
const text = await blob.text(); | |||
const data = JSON.parse(text); | |||
console.log('Exported data sources:', data); | |||
return data; | |||
} catch (err) { | |||
console.error('Error exporting:', err.message); | |||
} | |||
} | |||
// Step 2: Import data sources by cycling through the provided list | |||
async function importDataSources(dataSources) { | |||
for (const ds of dataSources) { # for each data source in the provided list... | |||
try { | |||
const res = await fetch(`${BASE_URL}/ds_connections`, { # POST request is made for each source | |||
method: 'POST', | |||
headers, | |||
body: JSON.stringify({ ds_connection: ds }) | |||
}); | |||
if (!res.ok) { | |||
const errText = await res.text(); | |||
console.error(`Failed to import ${ds.name}:`, errText); | |||
} else { | |||
const result = await res.json(); | |||
console.log(`Imported: ${result.name}`); | |||
} | |||
} catch (err) { | |||
console.error(`Import error for ${ds.name}:`, err.message); | |||
} | |||
} | |||
} | |||
// Step 3: Verify successful transfer | |||
async function verifyTransfer() { | |||
try { | |||
const response = await fetch(`${BASE_URL}/ds-connections`, { | |||
method: 'GET', | |||
headers | |||
}); | |||
if (!response.ok) throw new Error('Verification failed.'); | |||
const data = await response.json(); | |||
console.log(`Verified ${data.data.length} data sources in the new system.`); # check that the number of new data sources equals the original count. You can also compare the imported and exported data sources directly for a more thorough check | |||
} catch (err) { | |||
console.error('Error verifying transfer:', err.message); | |||
} | |||
} | |||
// Function to run all steps in order | |||
async function migrateDataSources() { | |||
const exported = await exportDataSources(); | |||
if (exported && Array.isArray(exported)) { | |||
await importDataSources(exported); | |||
await verifyTransfer(); | |||
} else { | |||
console.warn('No data to import.'); | |||
} | |||
} | |||
// Run the following to execute all the steps and complete the transfer | |||
migrateDataSources() | |||
</syntaxhighlight> | </syntaxhighlight> | ||
</tab> | </tab> |
Latest revision as of 18:34, 7 July 2025
- How to export and save all data source definitions from an existing BigID system.
- How to import those data sources into the new BigID system using the API.
- How to verify that all data sources were transferred successfully
In this tutorial, we'll use SAMPLE as our session token. This is unique to the training sandbox and will not work in other environments. See BigID API/Tutorial for information on authenticating with BigID.
To view the complete code for all steps, see the section labelled Code Samples.
For more information on the API capabilities used in this tutorial, check out the Data Source Connections API Docs.
1. Authenticate Using Your API Key
All API requests require authentication using a valid API key. Refer to BigID Documentation to obtain your token. Then, define the Authorization header using the format `Authorization: Bearer YOUR_API_KEY`. This header must be included in every request to ensure proper authentication and access to BigID’s API endpoints. Throughout the tutorial, we will be using SAMPLE as our token.
2. Export All Existing Data Sources
Use the GET /ds-connections/file-download/export endpoint to export your configured data sources into a JSON file. There is an optional ids parameter that can be used to fetch specific data sources only, but because we are transferring all the data, we do not need to include it.
The file returned will contain all the necessary configuration metadata for each data source, including connection type, credentials (if stored), scan options, and more.
3. Import Data Sources into the New BigID System
Now that you've exported your data sources as a JSON file in Step 2, it's time to import them into the new BigID environment. BigID does not support bulk importing via file upload, so each data source must be created individually using the POST /ds_connections endpoint.
This endpoint accepts a ds_connection object with the configuration values for the new data source. These should match the fields exported from your old system, and they must include any required values as defined in the data source’s template.
4. Verify Data Source are Properly Transferred
Once all data sources have been created in the new BigID environment, it’s important to verify that the transfer was successful.
You can confirm this by using the GET /ds-connections endpoint on the new system to retrieve the list of all configured data sources. Compare this list to your original export to ensure that each data source has been recreated accurately.
5. Troubleshooting
Status Code | Example Response | What It Means | How to Fix It |
---|---|---|---|
200 | Successful response with scan data | Everything’s looking good! | Keep cruising. |
400 | { "error": "Scan ID is invalid" } |
Bad or malformed scan ID provided | Double-check the scan ID you’re using. |
404 | { "error": "Scan 1234 was not found" } |
Scan ID doesn’t exist | Make sure the ID is valid and fetched from the parent scans endpoint. |
401 | Unauthorized | API key missing or invalid | Verify your API key and authorization header. |
500 | { "status": "error", "message": "Server error", "errors": [{}] } |
BigID server hit a snag (internal error) | Wait a moment and retry. If it persists, reach out to support. |
Code Samples
# Scan Insights API Tutorial
import requests
import json
API_TOKEN = "SAMPLE"
HEADERS = {
"Authorization": f"Bearer {API_TOKEN}",
"Content-Type": "application/json"
}
# 1. Export all data sources and return the contents of the response file
def export_data_sources():
url = "https://bigid-ui:9090/api/v1/ds-connections/file-download/export"
response = requests.get(url, headers=HEADERS)
if response.status_code == 200:
with open("exported_datasources.json", "w") as f:
f.write(response.text)
print("Data sources exported.")
return json.loads(response.text)
else:
print("Failed to export data sources.")
print(response.text)
return []
# 2. Import data sources by cycling through the provided list
def import_data_sources(data_sources):
url = "https://bigid-ui:9090/api/v1/ds_connections"
for ds in data_sources: // for each data source in the provided list...
body = { "ds_connection": ds }
response = requests.post(url, headers=HEADERS, json=body) // add data source to the new system
if response.status_code == 200:
print(f"Imported: {ds.get('name')}")
else:
print(f"Failed to import: {ds.get('name')}")
print(response.text)
# 3. Verify successful migration
def verify_imported_sources():
url = "https://bigid-ui:9090/api/v1/ds-connections"
response = requests.get(url, headers=HEADERS)
if response.status_code == 200:
data = response.json()
print(f"Total data sources in new environment: {len(data.get('data', []))}") // check that the number of new data sources equals the original count. You can also compare the imported and exported data sources directly for a more thorough check
else:
print("Failed to verify data sources.")
print(response.text)
# Run through the full process
data = export_data_sources()
if data:
import_data_sources(data)
verify_imported_sources()
// Scan Insights API Tutorial
const API_TOKEN = 'SAMPLE';
const BASE_URL = 'https://bigid-ui:9090/api/v1';
const headers = {
'Authorization': `Bearer ${API_TOKEN}`,
'Content-Type': 'application/json'
};
// Step 1: Export all data sources and return the contents of the response file
async function exportDataSources() {
try {
const response = await fetch(`${BASE_URL}/ds-connections/file-download/export`, {
method: 'GET',
headers
});
if (!response.ok) throw new Error('Export failed.');
const blob = await response.blob();
const text = await blob.text();
const data = JSON.parse(text);
console.log('Exported data sources:', data);
return data;
} catch (err) {
console.error('Error exporting:', err.message);
}
}
// Step 2: Import data sources by cycling through the provided list
async function importDataSources(dataSources) {
for (const ds of dataSources) { # for each data source in the provided list...
try {
const res = await fetch(`${BASE_URL}/ds_connections`, { # POST request is made for each source
method: 'POST',
headers,
body: JSON.stringify({ ds_connection: ds })
});
if (!res.ok) {
const errText = await res.text();
console.error(`Failed to import ${ds.name}:`, errText);
} else {
const result = await res.json();
console.log(`Imported: ${result.name}`);
}
} catch (err) {
console.error(`Import error for ${ds.name}:`, err.message);
}
}
}
// Step 3: Verify successful transfer
async function verifyTransfer() {
try {
const response = await fetch(`${BASE_URL}/ds-connections`, {
method: 'GET',
headers
});
if (!response.ok) throw new Error('Verification failed.');
const data = await response.json();
console.log(`Verified ${data.data.length} data sources in the new system.`); # check that the number of new data sources equals the original count. You can also compare the imported and exported data sources directly for a more thorough check
} catch (err) {
console.error('Error verifying transfer:', err.message);
}
}
// Function to run all steps in order
async function migrateDataSources() {
const exported = await exportDataSources();
if (exported && Array.isArray(exported)) {
await importDataSources(exported);
await verifyTransfer();
} else {
console.warn('No data to import.');
}
}
// Run the following to execute all the steps and complete the transfer
migrateDataSources()
Summary
Congratulations! In this tutorial, you have learned how to efficiently export existing data sources from one BigID environment and import them into another using the BigID API.