Files
sf-cli-wrapper/sf-data-import
reynold 452e095f90 Reorganize directory structure: move utility files to misc/ directory
- Move all non-wrapper files (testing scripts, documentation, utilities) to misc/ directory
- Keep SF CLI wrapper scripts and README.md in root directory for better organization
- Maintain clean root directory with only the actual wrapper scripts and main documentation
- All wrapper scripts remain easily accessible and discoverable
- Supporting files are organized in misc/ subdirectory
2025-08-28 22:32:19 +08:00

398 lines
11 KiB
Bash
Executable File

#!/bin/bash
# Data import wrapper for Salesforce CLI
# Provides streamlined data import functionality with CSV/JSON support and upsert operations
# Color codes for output formatting
readonly RED='\033[0;31m'
readonly GREEN='\033[0;32m'
readonly YELLOW='\033[0;33m'
readonly BLUE='\033[0;34m'
readonly CYAN='\033[0;36m'
readonly GRAY='\033[0;37m'
readonly NC='\033[0m' # No Color
# Function to display usage information
show_usage() {
echo -e "${BLUE}sf-data-import - Data Import Wrapper for Salesforce CLI${NC}"
echo ""
echo "USAGE:"
echo " sf-data-import [OPTIONS]"
echo ""
echo "OPTIONS:"
echo " -fl CSV or JSON file to import"
echo " -so Target sObject type"
echo " -to Target org username or alias"
echo " -op Operation: insert, update, upsert (default: insert)"
echo " -ei External ID field for upsert/update operations"
echo " -bk Use bulk API for large datasets"
echo " -wt Wait time in minutes (default: 10)"
echo " -bs Batch size for bulk operations (default: 10000)"
echo " -ie Continue on errors (don't fail entire job)"
echo " -ve Enable verbose output"
echo " -hp Show this help message"
echo ""
echo "EXAMPLES:"
echo " sf-data-import -fl accounts.csv -so Account"
echo " sf-data-import -fl contacts.json -so Contact -op upsert -ei Email"
echo " sf-data-import -fl leads.csv -so Lead -bk -bs 5000"
echo " sf-data-import -fl updates.csv -so Account -op update -ei AccountNumber"
echo ""
echo "SUPPORTED FORMATS:"
echo " • CSV files with header row"
echo " • JSON files (array of objects or newline-delimited JSON)"
echo ""
echo "This script automatically checks for Salesforce CLI installation."
}
# Function to check if Salesforce CLI is installed
check_salesforce_cli() {
if ! command -v sf &> /dev/null; then
return 1
fi
return 0
}
# Function to run sf-check diagnostics
run_salesforce_check() {
if [[ -f "sf-check" ]]; then
echo -e "${YELLOW}Running Salesforce CLI diagnostics...${NC}"
./sf-check
elif [[ -f "sf-check.sh" ]]; then
echo -e "${YELLOW}Running Salesforce CLI diagnostics...${NC}"
bash sf-check.sh
elif [[ -f "sf-check.ps1" ]]; then
echo -e "${YELLOW}Running Salesforce CLI diagnostics...${NC}"
pwsh sf-check.ps1
else
echo -e "${RED}Salesforce CLI not found and no diagnostic script available.${NC}"
echo -e "${RED}Please install the Salesforce CLI: https://developer.salesforce.com/tools/salesforcecli${NC}"
fi
}
# Function to detect file format
detect_file_format() {
local file="$1"
local extension="${file##*.}"
case "$extension" in
csv|CSV)
echo "csv"
;;
json|JSON)
echo "json"
;;
*)
# Try to detect by content
if head -n 1 "$file" | grep -q "^{.*}$\|^\["; then
echo "json"
else
echo "csv"
fi
;;
esac
}
# Function to validate CSV file
validate_csv_file() {
local file="$1"
# Check if file has header
if [[ $(wc -l < "$file") -lt 2 ]]; then
echo -e "${RED}Error: CSV file must have at least a header and one data row${NC}"
return 1
fi
# Basic CSV validation - check for consistent field count
local header_fields=$(head -n 1 "$file" | tr ',' '\n' | wc -l)
local first_row_fields=$(sed -n '2p' "$file" | tr ',' '\n' | wc -l)
if [[ $header_fields -ne $first_row_fields ]]; then
echo -e "${YELLOW}Warning: Header field count ($header_fields) differs from first row ($first_row_fields)${NC}"
fi
return 0
}
# Function to validate JSON file
validate_json_file() {
local file="$1"
# Try to parse JSON
if ! jq empty "$file" 2>/dev/null; then
echo -e "${RED}Error: Invalid JSON format in file${NC}"
return 1
fi
return 0
}
# Function to show file preview
show_file_preview() {
local file="$1"
local format="$2"
echo -e "${YELLOW}📄 File Preview ($format):${NC}"
echo -e "${GRAY}----------------------------------------${NC}"
case "$format" in
csv)
echo -e "${GRAY}Header: $(head -n 1 "$file")${NC}"
echo -e "${GRAY}Sample: $(sed -n '2p' "$file")${NC}"
echo -e "${GRAY}Records: $(($(wc -l < "$file") - 1))${NC}"
;;
json)
if jq -e 'type == "array"' "$file" >/dev/null 2>&1; then
echo -e "${GRAY}Array format with $(jq '. | length' "$file") records${NC}"
echo -e "${GRAY}Sample keys: $(jq -r '.[0] | keys | join(", ")' "$file" 2>/dev/null || echo "N/A")${NC}"
else
echo -e "${GRAY}NDJSON format${NC}"
echo -e "${GRAY}Records: $(wc -l < "$file")${NC}"
fi
;;
esac
echo -e "${GRAY}File size: $(du -h "$file" | cut -f1)${NC}"
echo -e "${GRAY}----------------------------------------${NC}"
}
# Initialize variables
FILE=""
SOBJECT=""
OPERATION="insert"
EXTERNAL_ID=""
TARGET_ORG=""
USE_BULK=false
WAIT_TIME="10"
BATCH_SIZE="10000"
IGNORE_ERRORS=false
VERBOSE=false
# Show help if no arguments provided
if [[ $# -eq 0 ]]; then
show_usage
exit 0
fi
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case $1 in
-fl)
FILE="$2"
shift 2
;;
-so)
SOBJECT="$2"
shift 2
;;
-to)
TARGET_ORG="$2"
shift 2
;;
-op)
OPERATION="$2"
shift 2
;;
-ei)
EXTERNAL_ID="$2"
shift 2
;;
-bk)
USE_BULK=true
shift
;;
-wt)
WAIT_TIME="$2"
shift 2
;;
-bs)
BATCH_SIZE="$2"
shift 2
;;
-ie)
IGNORE_ERRORS=true
shift
;;
-ve)
VERBOSE=true
shift
;;
-hp)
show_usage
exit 0
;;
*)
echo -e "${RED}Unknown option: $1${NC}"
echo ""
show_usage
exit 1
;;
esac
done
# Silently check for Salesforce CLI
if ! check_salesforce_cli; then
run_salesforce_check
exit 1
fi
# Validate required parameters
if [[ -z "$FILE" ]]; then
echo -e "${RED}Error: Must specify -fl parameter${NC}"
echo ""
echo -e "${YELLOW}Usage examples:${NC}"
echo -e "${GRAY} sf-data-import -fl data.csv -so Account${NC}"
echo -e "${GRAY} sf-data-import -fl contacts.json -so Contact -op upsert -ei Email${NC}"
echo ""
echo -e "${YELLOW}Use -hp for detailed usage information.${NC}"
exit 1
fi
if [[ -z "$SOBJECT" ]]; then
echo -e "${RED}Error: Must specify -so parameter${NC}"
exit 1
fi
# Validate file exists
if [[ ! -f "$FILE" ]]; then
echo -e "${RED}Error: File not found: $FILE${NC}"
exit 1
fi
# Validate operation
if [[ "$OPERATION" != "insert" && "$OPERATION" != "update" && "$OPERATION" != "upsert" ]]; then
echo -e "${RED}Error: Operation must be 'insert', 'update', or 'upsert'${NC}"
exit 1
fi
# Validate external ID for upsert/update operations
if [[ ("$OPERATION" == "upsert" || "$OPERATION" == "update") && -z "$EXTERNAL_ID" ]]; then
echo -e "${RED}Error: External ID field is required for $OPERATION operations${NC}"
exit 1
fi
# Detect and validate file format
FILE_FORMAT=$(detect_file_format "$FILE")
echo -e "${GREEN}Using file: $FILE${NC}"
echo -e "${CYAN}Detected format: $FILE_FORMAT${NC}"
# Validate file content
case "$FILE_FORMAT" in
csv)
if ! validate_csv_file "$FILE"; then
exit 1
fi
;;
json)
if ! validate_json_file "$FILE"; then
exit 1
fi
;;
esac
# Show file preview if verbose
if [[ "$VERBOSE" == true ]]; then
show_file_preview "$FILE" "$FILE_FORMAT"
fi
# Build the sf command - SF CLI now uses specific commands for different operations
case "$OPERATION" in
"insert")
# For insert operations, use bulk import (works for all data types)
SF_ARGS=("data" "import" "bulk")
SF_ARGS+=("--file" "$FILE")
SF_ARGS+=("--sobject" "$SOBJECT")
;;
"update")
# For update operations, use bulk update
SF_ARGS=("data" "update" "bulk")
SF_ARGS+=("--file" "$FILE")
SF_ARGS+=("--sobject" "$SOBJECT")
;;
"upsert")
# For upsert operations, use bulk upsert
SF_ARGS=("data" "upsert" "bulk")
SF_ARGS+=("--file" "$FILE")
SF_ARGS+=("--sobject" "$SOBJECT")
;;
esac
# Add optional parameters
if [[ -n "$TARGET_ORG" ]]; then
SF_ARGS+=("--target-org" "$TARGET_ORG")
echo -e "${CYAN}Target org: $TARGET_ORG${NC}"
fi
if [[ -n "$EXTERNAL_ID" ]]; then
SF_ARGS+=("--external-id" "$EXTERNAL_ID")
echo -e "${CYAN}External ID field: $EXTERNAL_ID${NC}"
fi
if [[ "$USE_BULK" == true ]]; then
SF_ARGS+=("--bulk")
echo -e "${YELLOW}Using Bulk API${NC}"
fi
if [[ "$WAIT_TIME" != "10" ]]; then
SF_ARGS+=("--wait" "$WAIT_TIME")
fi
if [[ "$USE_BULK" == true && "$BATCH_SIZE" != "10000" ]]; then
SF_ARGS+=("--batch-size" "$BATCH_SIZE")
echo -e "${CYAN}Batch size: $BATCH_SIZE${NC}"
fi
if [[ "$IGNORE_ERRORS" == true ]]; then
SF_ARGS+=("--ignore-errors")
echo -e "${YELLOW}Ignoring individual record errors${NC}"
fi
# Add verbose flag if requested
if [[ "$VERBOSE" == true ]]; then
SF_ARGS+=("--verbose")
fi
# Display import information
echo ""
echo -e "${BLUE}📥 Starting Data Import${NC}"
echo -e "${BLUE}=======================${NC}"
echo -e "${CYAN}Operation: $OPERATION${NC}"
echo -e "${CYAN}sObject: $SOBJECT${NC}"
# Display the command being run
echo ""
echo -e "${GRAY}Executing: sf ${SF_ARGS[*]}${NC}"
echo ""
# Execute the command
if sf "${SF_ARGS[@]}"; then
IMPORT_EXIT_CODE=0
else
IMPORT_EXIT_CODE=$?
fi
echo ""
if [[ $IMPORT_EXIT_CODE -eq 0 ]]; then
echo -e "${GREEN}✅ Data import completed successfully!${NC}"
case "$OPERATION" in
insert)
echo -e "${CYAN}📊 Records inserted into $SOBJECT${NC}"
;;
update)
echo -e "${CYAN}📊 Records updated in $SOBJECT${NC}"
;;
upsert)
echo -e "${CYAN}📊 Records upserted in $SOBJECT (using $EXTERNAL_ID as external ID)${NC}"
;;
esac
if [[ "$VERBOSE" == true ]]; then
echo -e "${YELLOW}💡 Check the output above for detailed results and any warnings${NC}"
fi
else
echo -e "${RED}❌ Data import failed with exit code: $IMPORT_EXIT_CODE${NC}"
echo -e "${YELLOW}💡 Check data format, field mappings, and validation rules${NC}"
exit $IMPORT_EXIT_CODE
fi