Overview
Any CI/CD tool that can execute shell commands or REST API calls can integrate with Validatar's export/import workflow. This guide provides complete, copy-paste-ready scripts in Python, PowerShell, and Bash that you can adapt for your CI/CD platform.
Prerequisites
- Validatar API token with Authoring scope (add Execution scope if running post-import tests) — see User Tokens
- Network access from your CI/CD runner to the Validatar API endpoint
- Source and target Validatar project IDs and data source IDs
Export and Import (Python)
import requests
import base64
import json
import os
import sys
# ---------------------------------------------------------------------------
# Configuration — set as environment variables
# ---------------------------------------------------------------------------
SOURCE_API_URL = os.environ.get("SOURCE_VALIDATAR_API_URL") # e.g., https://dev.cloud.validatar.com
SOURCE_API_TOKEN = os.environ.get("SOURCE_VALIDATAR_API_TOKEN")
SOURCE_PROJECT_ID = int(os.environ.get("SOURCE_PROJECT_ID", "0"))
TARGET_API_URL = os.environ.get("TARGET_VALIDATAR_API_URL") # e.g., https://prod.cloud.validatar.com
TARGET_API_TOKEN = os.environ.get("TARGET_VALIDATAR_API_TOKEN")
TARGET_PROJECT_ID = int(os.environ.get("TARGET_PROJECT_ID", "0"))
# Folder IDs to export (comma-separated in env var, or set directly)
TEST_FOLDER_IDS = [int(x) for x in os.environ.get("EXPORT_TEST_FOLDER_IDS", "").split(",") if x.strip()]
JOB_FOLDER_IDS = [int(x) for x in os.environ.get("EXPORT_JOB_FOLDER_IDS", "").split(",") if x.strip()]
# Data source mapping: source_id:target_id pairs (comma-separated)
# Example: "42:87,43:88"
DS_MAPPING_RAW = os.environ.get("DATA_SOURCE_MAPPINGS", "")
# Target folder IDs for import (optional — omit to preserve original structure)
TARGET_TEST_FOLDER_ID = os.environ.get("TARGET_TEST_FOLDER_ID")
TARGET_JOB_FOLDER_ID = os.environ.get("TARGET_JOB_FOLDER_ID")
# ---------------------------------------------------------------------------
# Build data source mappings
# ---------------------------------------------------------------------------
data_source_mappings = []
if DS_MAPPING_RAW:
for pair in DS_MAPPING_RAW.split(","):
source_id, target_id = pair.strip().split(":")
data_source_mappings.append({
"sourceFileKey": source_id.strip(),
"targetId": int(target_id.strip())
})
print(f"Data source mappings: {json.dumps(data_source_mappings, indent=2)}")
# ---------------------------------------------------------------------------
# Helper: list data sources (useful for discovering IDs)
# ---------------------------------------------------------------------------
def list_data_sources(api_url, api_token):
"""List data sources to help build data source mappings."""
headers = {"x-val-api-token": api_token, "Content-Type": "application/json"}
url = f"{api_url}/core/api/v1/data-sources"
response = requests.get(url, headers=headers)
response.raise_for_status()
sources = response.json()
print(f"\nData sources at {api_url}:")
for ds in sources.get("dataSources", []):
print(f" ID: {ds.get('id')} Name: {ds.get('name')}")
return sources
# ---------------------------------------------------------------------------
# Step 1 — Export from source project
# ---------------------------------------------------------------------------
print(f"\n=== EXPORT ===")
print(f"Source: {SOURCE_API_URL} / Project {SOURCE_PROJECT_ID}")
print(f"Test folders: {TEST_FOLDER_IDS}")
print(f"Job folders: {JOB_FOLDER_IDS}")
source_headers = {
"x-val-api-token": SOURCE_API_TOKEN,
"Content-Type": "application/json"
}
export_payload = {
"name": "ci-cd-export",
"description": "Automated CI/CD export",
"testFolderIds": TEST_FOLDER_IDS,
"jobFolderIds": JOB_FOLDER_IDS,
"includeCustomFields": True,
"includeJobSchedules": False
}
export_url = f"{SOURCE_API_URL}/core/api/v1/projects/{SOURCE_PROJECT_ID}/export"
export_response = requests.post(export_url, headers=source_headers, json=export_payload)
if export_response.status_code != 200:
print(f"ERROR: Export failed with status {export_response.status_code}")
print(f"Response: {export_response.text}")
sys.exit(1)
export_data = export_response.json()
content_base64 = export_data.get("contentBase64", "")
file_name = export_data.get("fileName", "export.xml")
print(f"Export successful: {file_name}")
print(f"Content size: {len(content_base64)} characters (base64)")
# Optional: save the export XML locally for troubleshooting
SAVE_EXPORT_FILE = os.environ.get("SAVE_EXPORT_FILE", "false").lower() == "true"
if SAVE_EXPORT_FILE:
xml_bytes = base64.b64decode(content_base64)
with open(file_name, "wb") as f:
f.write(xml_bytes)
print(f"Export file saved locally: {file_name}")
# ---------------------------------------------------------------------------
# Step 2 — Import into target project
# ---------------------------------------------------------------------------
print(f"\n=== IMPORT ===")
print(f"Target: {TARGET_API_URL} / Project {TARGET_PROJECT_ID}")
print(f"Conflict resolution: Overwrite")
target_headers = {
"x-val-api-token": TARGET_API_TOKEN,
"Content-Type": "application/json"
}
import_payload = {
"mimeType": "application/xml",
"contentBase64": content_base64,
"dataSourceMappings": data_source_mappings,
"includeJobSchedules": False,
"conflictResolution": "Overwrite"
}
# Add target folder IDs if specified
if TARGET_TEST_FOLDER_ID:
import_payload["targetTestFolderId"] = int(TARGET_TEST_FOLDER_ID)
if TARGET_JOB_FOLDER_ID:
import_payload["targetJobFolderId"] = int(TARGET_JOB_FOLDER_ID)
import_url = f"{TARGET_API_URL}/core/api/v1/projects/{TARGET_PROJECT_ID}/import"
import_response = requests.post(import_url, headers=target_headers, json=import_payload)
if import_response.status_code == 200:
print("Import successful.")
sys.exit(0)
else:
print(f"ERROR: Import failed with status {import_response.status_code}")
try:
error_data = import_response.json()
if error_data.get("validationErrors"):
print("Validation errors:")
for err in error_data["validationErrors"]:
print(f" - {err}")
if error_data.get("invalidDataSources"):
print("Invalid data source mappings:")
for ds in error_data["invalidDataSources"]:
print(f" - Source key: {ds.get('sourceFileKey')}")
if error_data.get("commitErrors"):
print("Commit errors:")
for err in error_data["commitErrors"]:
print(f" - {err}")
except Exception:
print(f"Response: {import_response.text}")
sys.exit(1)
Export and Import (PowerShell)
# ---------------------------------------------------------------------------
# Configuration — set as environment variables
# ---------------------------------------------------------------------------
$SourceApiUrl = $env:SOURCE_VALIDATAR_API_URL # e.g., https://dev.cloud.validatar.com
$SourceApiToken = $env:SOURCE_VALIDATAR_API_TOKEN
$SourceProjectId = [int]$env:SOURCE_PROJECT_ID
$TargetApiUrl = $env:TARGET_VALIDATAR_API_URL # e.g., https://prod.cloud.validatar.com
$TargetApiToken = $env:TARGET_VALIDATAR_API_TOKEN
$TargetProjectId = [int]$env:TARGET_PROJECT_ID
# Folder IDs to export (comma-separated)
$TestFolderIds = if ($env:EXPORT_TEST_FOLDER_IDS) {
$env:EXPORT_TEST_FOLDER_IDS -split "," | ForEach-Object { [int]$_.Trim() }
} else { @() }
$JobFolderIds = if ($env:EXPORT_JOB_FOLDER_IDS) {
$env:EXPORT_JOB_FOLDER_IDS -split "," | ForEach-Object { [int]$_.Trim() }
} else { @() }
# Data source mappings: "source_id:target_id,source_id:target_id"
$DataSourceMappings = @()
if ($env:DATA_SOURCE_MAPPINGS) {
$DataSourceMappings = $env:DATA_SOURCE_MAPPINGS -split "," | ForEach-Object {
$parts = $_.Trim() -split ":"
@{ sourceFileKey = $parts[0].Trim(); targetId = [int]$parts[1].Trim() }
}
}
# Optional target folder IDs
$TargetTestFolderId = $env:TARGET_TEST_FOLDER_ID
$TargetJobFolderId = $env:TARGET_JOB_FOLDER_ID
# ---------------------------------------------------------------------------
# Compatibility — TLS 1.2 for Windows PowerShell 5.1
# ---------------------------------------------------------------------------
if ($PSVersionTable.PSVersion.Major -le 5) {
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
}
# ---------------------------------------------------------------------------
# Step 1 — Export from source project
# ---------------------------------------------------------------------------
Write-Host "`n=== EXPORT ==="
Write-Host "Source: $SourceApiUrl / Project $SourceProjectId"
Write-Host "Test folders: $($TestFolderIds -join ', ')"
Write-Host "Job folders: $($JobFolderIds -join ', ')"
$SourceHeaders = @{
"x-val-api-token" = $SourceApiToken
"Content-Type" = "application/json"
}
$ExportPayload = @{
name = "ci-cd-export"
description = "Automated CI/CD export"
testFolderIds = $TestFolderIds
jobFolderIds = $JobFolderIds
includeCustomFields = $true
includeJobSchedules = $false
} | ConvertTo-Json -Depth 10
$ExportUrl = "$SourceApiUrl/core/api/v1/projects/$SourceProjectId/export"
try {
$ExportResponse = Invoke-RestMethod -Uri $ExportUrl -Method Post -Headers $SourceHeaders -Body $ExportPayload -ErrorAction Stop
}
catch {
Write-Error "Export failed: $_"
exit 1
}
$ContentBase64 = $ExportResponse.contentBase64
$FileName = $ExportResponse.fileName
Write-Host "Export successful: $FileName"
Write-Host "Content size: $($ContentBase64.Length) characters (base64)"
# Optional: save export XML locally for troubleshooting
$SaveExportFile = $env:SAVE_EXPORT_FILE -eq "true"
if ($SaveExportFile) {
$XmlBytes = [Convert]::FromBase64String($ContentBase64)
[System.IO.File]::WriteAllBytes($FileName, $XmlBytes)
Write-Host "Export file saved locally: $FileName"
}
# ---------------------------------------------------------------------------
# Step 2 — Import into target project
# ---------------------------------------------------------------------------
Write-Host "`n=== IMPORT ==="
Write-Host "Target: $TargetApiUrl / Project $TargetProjectId"
Write-Host "Conflict resolution: Overwrite"
$TargetHeaders = @{
"x-val-api-token" = $TargetApiToken
"Content-Type" = "application/json"
}
$ImportPayload = @{
mimeType = "application/xml"
contentBase64 = $ContentBase64
dataSourceMappings = $DataSourceMappings
includeJobSchedules = $false
conflictResolution = "Overwrite"
}
if ($TargetTestFolderId) {
$ImportPayload.targetTestFolderId = [int]$TargetTestFolderId
}
if ($TargetJobFolderId) {
$ImportPayload.targetJobFolderId = [int]$TargetJobFolderId
}
$ImportBody = $ImportPayload | ConvertTo-Json -Depth 10
$ImportUrl = "$TargetApiUrl/core/api/v1/projects/$TargetProjectId/import"
try {
$ImportResponse = Invoke-WebRequest -Uri $ImportUrl -Method Post -Headers $TargetHeaders -Body $ImportBody -ErrorAction Stop
Write-Host "Import successful."
exit 0
}
catch {
Write-Error "Import failed: $_"
try {
$ErrorBody = $_.ErrorDetails.Message | ConvertFrom-Json
if ($ErrorBody.validationErrors) {
Write-Host "Validation errors:"
$ErrorBody.validationErrors | ForEach-Object { Write-Host " - $_" }
}
if ($ErrorBody.invalidDataSources) {
Write-Host "Invalid data source mappings:"
$ErrorBody.invalidDataSources | ForEach-Object { Write-Host " - Source key: $($_.sourceFileKey)" }
}
if ($ErrorBody.commitErrors) {
Write-Host "Commit errors:"
$ErrorBody.commitErrors | ForEach-Object { Write-Host " - $_" }
}
}
catch {
Write-Host "Could not parse error response."
}
exit 1
}
Export and Import (Bash/curl)
#!/usr/bin/env bash
set -euo pipefail
# ---------------------------------------------------------------------------
# Configuration
# ---------------------------------------------------------------------------
SOURCE_API_URL="${SOURCE_VALIDATAR_API_URL:?Set SOURCE_VALIDATAR_API_URL}"
SOURCE_API_TOKEN="${SOURCE_VALIDATAR_API_TOKEN:?Set SOURCE_VALIDATAR_API_TOKEN}"
SOURCE_PROJECT_ID="${SOURCE_PROJECT_ID:?Set SOURCE_PROJECT_ID}"
TARGET_API_URL="${TARGET_VALIDATAR_API_URL:?Set TARGET_VALIDATAR_API_URL}"
TARGET_API_TOKEN="${TARGET_VALIDATAR_API_TOKEN:?Set TARGET_VALIDATAR_API_TOKEN}"
TARGET_PROJECT_ID="${TARGET_PROJECT_ID:?Set TARGET_PROJECT_ID}"
# Comma-separated folder IDs
TEST_FOLDER_IDS="${EXPORT_TEST_FOLDER_IDS:-}"
JOB_FOLDER_IDS="${EXPORT_JOB_FOLDER_IDS:-}"
# Data source mappings JSON array
# Example: [{"sourceFileKey":"42","targetId":87}]
DS_MAPPINGS_JSON="${DATA_SOURCE_MAPPINGS_JSON:-[]}"
# ---------------------------------------------------------------------------
# Build export payload
# ---------------------------------------------------------------------------
# Convert comma-separated IDs to JSON arrays
to_json_array() {
local ids="$1"
if [ -z "$ids" ]; then echo "[]"; return; fi
echo "$ids" | tr ',' '\n' | sed 's/^/ /;s/$/,/' | sed '$ s/,$//' | awk 'BEGIN{print "["} {print} END{print "]"}'
}
TEST_FOLDER_JSON=$(to_json_array "$TEST_FOLDER_IDS")
JOB_FOLDER_JSON=$(to_json_array "$JOB_FOLDER_IDS")
EXPORT_PAYLOAD=$(cat <<PAYLOAD
{
"name": "ci-cd-export",
"description": "Automated CI/CD export",
"testFolderIds": $TEST_FOLDER_JSON,
"jobFolderIds": $JOB_FOLDER_JSON,
"includeCustomFields": true,
"includeJobSchedules": false
}
PAYLOAD
)
# ---------------------------------------------------------------------------
# Step 1 — Export
# ---------------------------------------------------------------------------
echo "=== EXPORT ==="
echo "Source: ${SOURCE_API_URL} / Project ${SOURCE_PROJECT_ID}"
EXPORT_RESPONSE=$(curl -s -f -X POST \
"${SOURCE_API_URL}/core/api/v1/projects/${SOURCE_PROJECT_ID}/export" \
-H "x-val-api-token: ${SOURCE_API_TOKEN}" \
-H "Content-Type: application/json" \
-d "${EXPORT_PAYLOAD}")
CONTENT_BASE64=$(echo "$EXPORT_RESPONSE" | python3 -c "import sys,json; print(json.load(sys.stdin)['contentBase64'])")
echo "Export successful. Content size: ${#CONTENT_BASE64} characters"
# ---------------------------------------------------------------------------
# Step 2 — Import
# ---------------------------------------------------------------------------
echo ""
echo "=== IMPORT ==="
echo "Target: ${TARGET_API_URL} / Project ${TARGET_PROJECT_ID}"
IMPORT_PAYLOAD=$(cat <<PAYLOAD
{
"mimeType": "application/xml",
"contentBase64": "${CONTENT_BASE64}",
"dataSourceMappings": ${DS_MAPPINGS_JSON},
"includeJobSchedules": false,
"conflictResolution": "Overwrite"
}
PAYLOAD
)
HTTP_CODE=$(curl -s -o /tmp/import_response.json -w "%{http_code}" -X POST \
"${TARGET_API_URL}/core/api/v1/projects/${TARGET_PROJECT_ID}/import" \
-H "x-val-api-token: ${TARGET_API_TOKEN}" \
-H "Content-Type: application/json" \
-d "${IMPORT_PAYLOAD}")
if [ "$HTTP_CODE" -eq 200 ]; then
echo "Import successful."
exit 0
else
echo "ERROR: Import failed with HTTP ${HTTP_CODE}"
cat /tmp/import_response.json
exit 1
fi
Discovering Data Source IDs
Before running the export/import, you need to know the data source IDs in both projects to build the mapping. Use this helper snippet to list them:
import requests
import os
def list_data_sources(api_url, api_token):
"""Print data sources with their IDs and names."""
headers = {"x-val-api-token": api_token, "Content-Type": "application/json"}
response = requests.get(f"{api_url}/core/api/v1/data-sources", headers=headers)
response.raise_for_status()
sources = response.json()
print(f"Data sources at {api_url}:")
for ds in sources.get("dataSources", []):
print(f" ID: {ds.get('id'):>5} Name: {ds.get('name')}")
return sources
# List sources on both instances to build the mapping
print("=== SOURCE ===")
list_data_sources(os.environ["SOURCE_VALIDATAR_API_URL"], os.environ["SOURCE_VALIDATAR_API_TOKEN"])
print("\n=== TARGET ===")
list_data_sources(os.environ["TARGET_VALIDATAR_API_URL"], os.environ["TARGET_VALIDATAR_API_TOKEN"])
Adapting for Your CI/CD Tool
| CI/CD Tool | How to Run Scripts | Secret Storage | Notes |
|---|---|---|---|
| Jenkins | Shell or PowerShell build step | Jenkins Credentials (Secret Text) | Use withCredentials block to inject secrets as env vars |
| GitLab CI | script block in .gitlab-ci.yml |
CI/CD Variables (masked + protected) | Add Python to the Docker image or use python:3 image |
| CircleCI | run step in config |
Context or Project Environment Variables | Use a python orb or Docker executor |
| TeamCity | Command Line build step | Parameters (password type) | Reference parameters as %env.VARIABLE_NAME% |
| AWS CodePipeline | CodeBuild buildspec or Lambda | AWS Secrets Manager | Store tokens in Secrets Manager; reference in buildspec |
Troubleshooting
Export Returns Empty Content
- Verify the folder IDs or test/job IDs exist in the source project
- Confirm the API token user has access to the specified project
- Check that the project contains at least one test or job in the requested folders
Import Fails with Data Source Mapping Errors
- The
sourceFileKeyvalues must match the data source IDs used in the source project - Use the data source listing helper above to verify IDs on both sides
- Every data source referenced by the exported tests must have a mapping entry
Import Fails with Validation Errors
- Check that the target project exists and the token user has access
- Verify the target folder IDs exist if you specified
targetTestFolderIdortargetJobFolderId - Review the
validationErrorsarray in the error response for specific issues
Cross-Version Import Issues
- Export/import is generally compatible across Validatar versions
- If importing from a newer version to an older one, some features may not be supported — check the error response for details