added addl wrappers
This commit is contained in:
376
sf-data-import
Executable file
376
sf-data-import
Executable file
@@ -0,0 +1,376 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Data import wrapper for Salesforce CLI
|
||||
# Provides streamlined data import functionality with CSV/JSON support and upsert operations
|
||||
|
||||
# Color codes for output formatting
|
||||
readonly RED='\033[0;31m'
|
||||
readonly GREEN='\033[0;32m'
|
||||
readonly YELLOW='\033[0;33m'
|
||||
readonly BLUE='\033[0;34m'
|
||||
readonly CYAN='\033[0;36m'
|
||||
readonly GRAY='\033[0;37m'
|
||||
readonly NC='\033[0m' # No Color
|
||||
|
||||
# Function to display usage information
|
||||
show_usage() {
|
||||
echo -e "${BLUE}sf-data-import - Data Import Wrapper for Salesforce CLI${NC}"
|
||||
echo ""
|
||||
echo "USAGE:"
|
||||
echo " sf-data-import [OPTIONS]"
|
||||
echo ""
|
||||
echo "OPTIONS:"
|
||||
echo " -f, --file FILE CSV or JSON file to import"
|
||||
echo " -s, --sobject SOBJECT Target sObject type"
|
||||
echo " -o, --operation OP Operation: insert, update, upsert (default: insert)"
|
||||
echo " -e, --external-id FIELD External ID field for upsert/update operations"
|
||||
echo " -t, --target-org ORG Target org username or alias"
|
||||
echo " --bulk Use bulk API for large datasets"
|
||||
echo " --wait MINUTES Wait time in minutes (default: 10)"
|
||||
echo " --batch-size SIZE Batch size for bulk operations (default: 10000)"
|
||||
echo " --ignore-errors Continue on errors (don't fail entire job)"
|
||||
echo " -v, --verbose Enable verbose output"
|
||||
echo " -h, --help Show this help message"
|
||||
echo ""
|
||||
echo "EXAMPLES:"
|
||||
echo " sf-data-import --file accounts.csv --sobject Account"
|
||||
echo " sf-data-import --file contacts.json --sobject Contact --operation upsert --external-id Email"
|
||||
echo " sf-data-import --file leads.csv --sobject Lead --bulk --batch-size 5000"
|
||||
echo " sf-data-import --file updates.csv --sobject Account --operation update --external-id AccountNumber"
|
||||
echo ""
|
||||
echo "SUPPORTED FORMATS:"
|
||||
echo " • CSV files with header row"
|
||||
echo " • JSON files (array of objects or newline-delimited JSON)"
|
||||
echo ""
|
||||
echo "This script automatically checks for Salesforce CLI installation."
|
||||
}
|
||||
|
||||
# Function to check if Salesforce CLI is installed
|
||||
check_salesforce_cli() {
|
||||
if ! command -v sf &> /dev/null; then
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# Function to run sf-check diagnostics
|
||||
run_salesforce_check() {
|
||||
if [[ -f "sf-check" ]]; then
|
||||
echo -e "${YELLOW}Running Salesforce CLI diagnostics...${NC}"
|
||||
./sf-check
|
||||
elif [[ -f "sf-check.sh" ]]; then
|
||||
echo -e "${YELLOW}Running Salesforce CLI diagnostics...${NC}"
|
||||
bash sf-check.sh
|
||||
elif [[ -f "sf-check.ps1" ]]; then
|
||||
echo -e "${YELLOW}Running Salesforce CLI diagnostics...${NC}"
|
||||
pwsh sf-check.ps1
|
||||
else
|
||||
echo -e "${RED}Salesforce CLI not found and no diagnostic script available.${NC}"
|
||||
echo -e "${RED}Please install the Salesforce CLI: https://developer.salesforce.com/tools/salesforcecli${NC}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to detect file format
|
||||
detect_file_format() {
|
||||
local file="$1"
|
||||
local extension="${file##*.}"
|
||||
|
||||
case "$extension" in
|
||||
csv|CSV)
|
||||
echo "csv"
|
||||
;;
|
||||
json|JSON)
|
||||
echo "json"
|
||||
;;
|
||||
*)
|
||||
# Try to detect by content
|
||||
if head -n 1 "$file" | grep -q "^{.*}$\|^\["; then
|
||||
echo "json"
|
||||
else
|
||||
echo "csv"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Function to validate CSV file
|
||||
validate_csv_file() {
|
||||
local file="$1"
|
||||
|
||||
# Check if file has header
|
||||
if [[ $(wc -l < "$file") -lt 2 ]]; then
|
||||
echo -e "${RED}Error: CSV file must have at least a header and one data row${NC}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Basic CSV validation - check for consistent field count
|
||||
local header_fields=$(head -n 1 "$file" | tr ',' '\n' | wc -l)
|
||||
local first_row_fields=$(sed -n '2p' "$file" | tr ',' '\n' | wc -l)
|
||||
|
||||
if [[ $header_fields -ne $first_row_fields ]]; then
|
||||
echo -e "${YELLOW}Warning: Header field count ($header_fields) differs from first row ($first_row_fields)${NC}"
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# Function to validate JSON file
|
||||
validate_json_file() {
|
||||
local file="$1"
|
||||
|
||||
# Try to parse JSON
|
||||
if ! jq empty "$file" 2>/dev/null; then
|
||||
echo -e "${RED}Error: Invalid JSON format in file${NC}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# Function to show file preview
|
||||
show_file_preview() {
|
||||
local file="$1"
|
||||
local format="$2"
|
||||
|
||||
echo -e "${YELLOW}📄 File Preview ($format):${NC}"
|
||||
echo -e "${GRAY}----------------------------------------${NC}"
|
||||
|
||||
case "$format" in
|
||||
csv)
|
||||
echo -e "${GRAY}Header: $(head -n 1 "$file")${NC}"
|
||||
echo -e "${GRAY}Sample: $(sed -n '2p' "$file")${NC}"
|
||||
echo -e "${GRAY}Records: $(($(wc -l < "$file") - 1))${NC}"
|
||||
;;
|
||||
json)
|
||||
if jq -e 'type == "array"' "$file" >/dev/null 2>&1; then
|
||||
echo -e "${GRAY}Array format with $(jq '. | length' "$file") records${NC}"
|
||||
echo -e "${GRAY}Sample keys: $(jq -r '.[0] | keys | join(", ")' "$file" 2>/dev/null || echo "N/A")${NC}"
|
||||
else
|
||||
echo -e "${GRAY}NDJSON format${NC}"
|
||||
echo -e "${GRAY}Records: $(wc -l < "$file")${NC}"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
echo -e "${GRAY}File size: $(du -h "$file" | cut -f1)${NC}"
|
||||
echo -e "${GRAY}----------------------------------------${NC}"
|
||||
}
|
||||
|
||||
# Initialize variables
|
||||
FILE=""
|
||||
SOBJECT=""
|
||||
OPERATION="insert"
|
||||
EXTERNAL_ID=""
|
||||
TARGET_ORG=""
|
||||
USE_BULK=false
|
||||
WAIT_TIME="10"
|
||||
BATCH_SIZE="10000"
|
||||
IGNORE_ERRORS=false
|
||||
VERBOSE=false
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-f|--file)
|
||||
FILE="$2"
|
||||
shift 2
|
||||
;;
|
||||
-s|--sobject)
|
||||
SOBJECT="$2"
|
||||
shift 2
|
||||
;;
|
||||
-o|--operation)
|
||||
OPERATION="$2"
|
||||
shift 2
|
||||
;;
|
||||
-e|--external-id)
|
||||
EXTERNAL_ID="$2"
|
||||
shift 2
|
||||
;;
|
||||
-t|--target-org)
|
||||
TARGET_ORG="$2"
|
||||
shift 2
|
||||
;;
|
||||
--bulk)
|
||||
USE_BULK=true
|
||||
shift
|
||||
;;
|
||||
--wait)
|
||||
WAIT_TIME="$2"
|
||||
shift 2
|
||||
;;
|
||||
--batch-size)
|
||||
BATCH_SIZE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--ignore-errors)
|
||||
IGNORE_ERRORS=true
|
||||
shift
|
||||
;;
|
||||
-v|--verbose)
|
||||
VERBOSE=true
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
show_usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Unknown option: $1${NC}"
|
||||
echo ""
|
||||
show_usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Silently check for Salesforce CLI
|
||||
if ! check_salesforce_cli; then
|
||||
run_salesforce_check
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate required parameters
|
||||
if [[ -z "$FILE" ]]; then
|
||||
echo -e "${RED}Error: Must specify --file parameter${NC}"
|
||||
echo ""
|
||||
echo -e "${YELLOW}Usage examples:${NC}"
|
||||
echo -e "${GRAY} sf-data-import --file data.csv --sobject Account${NC}"
|
||||
echo -e "${GRAY} sf-data-import --file contacts.json --sobject Contact --operation upsert --external-id Email${NC}"
|
||||
echo ""
|
||||
echo -e "${YELLOW}Use --help for detailed usage information.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$SOBJECT" ]]; then
|
||||
echo -e "${RED}Error: Must specify --sobject parameter${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate file exists
|
||||
if [[ ! -f "$FILE" ]]; then
|
||||
echo -e "${RED}Error: File not found: $FILE${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate operation
|
||||
if [[ "$OPERATION" != "insert" && "$OPERATION" != "update" && "$OPERATION" != "upsert" ]]; then
|
||||
echo -e "${RED}Error: Operation must be 'insert', 'update', or 'upsert'${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate external ID for upsert/update operations
|
||||
if [[ ("$OPERATION" == "upsert" || "$OPERATION" == "update") && -z "$EXTERNAL_ID" ]]; then
|
||||
echo -e "${RED}Error: External ID field is required for $OPERATION operations${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Detect and validate file format
|
||||
FILE_FORMAT=$(detect_file_format "$FILE")
|
||||
echo -e "${GREEN}Using file: $FILE${NC}"
|
||||
echo -e "${CYAN}Detected format: $FILE_FORMAT${NC}"
|
||||
|
||||
# Validate file content
|
||||
case "$FILE_FORMAT" in
|
||||
csv)
|
||||
if ! validate_csv_file "$FILE"; then
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
json)
|
||||
if ! validate_json_file "$FILE"; then
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
# Show file preview if verbose
|
||||
if [[ "$VERBOSE" == true ]]; then
|
||||
show_file_preview "$FILE" "$FILE_FORMAT"
|
||||
fi
|
||||
|
||||
# Build the sf command
|
||||
SF_ARGS=("data" "$OPERATION")
|
||||
|
||||
# Add the file and sobject
|
||||
SF_ARGS+=("--file" "$FILE")
|
||||
SF_ARGS+=("--sobject" "$SOBJECT")
|
||||
|
||||
# Add optional parameters
|
||||
if [[ -n "$TARGET_ORG" ]]; then
|
||||
SF_ARGS+=("--target-org" "$TARGET_ORG")
|
||||
echo -e "${CYAN}Target org: $TARGET_ORG${NC}"
|
||||
fi
|
||||
|
||||
if [[ -n "$EXTERNAL_ID" ]]; then
|
||||
SF_ARGS+=("--external-id" "$EXTERNAL_ID")
|
||||
echo -e "${CYAN}External ID field: $EXTERNAL_ID${NC}"
|
||||
fi
|
||||
|
||||
if [[ "$USE_BULK" == true ]]; then
|
||||
SF_ARGS+=("--bulk")
|
||||
echo -e "${YELLOW}Using Bulk API${NC}"
|
||||
fi
|
||||
|
||||
if [[ "$WAIT_TIME" != "10" ]]; then
|
||||
SF_ARGS+=("--wait" "$WAIT_TIME")
|
||||
fi
|
||||
|
||||
if [[ "$USE_BULK" == true && "$BATCH_SIZE" != "10000" ]]; then
|
||||
SF_ARGS+=("--batch-size" "$BATCH_SIZE")
|
||||
echo -e "${CYAN}Batch size: $BATCH_SIZE${NC}"
|
||||
fi
|
||||
|
||||
if [[ "$IGNORE_ERRORS" == true ]]; then
|
||||
SF_ARGS+=("--ignore-errors")
|
||||
echo -e "${YELLOW}Ignoring individual record errors${NC}"
|
||||
fi
|
||||
|
||||
# Add verbose flag if requested
|
||||
if [[ "$VERBOSE" == true ]]; then
|
||||
SF_ARGS+=("--verbose")
|
||||
fi
|
||||
|
||||
# Display import information
|
||||
echo ""
|
||||
echo -e "${BLUE}📥 Starting Data Import${NC}"
|
||||
echo -e "${BLUE}=======================${NC}"
|
||||
echo -e "${CYAN}Operation: $OPERATION${NC}"
|
||||
echo -e "${CYAN}sObject: $SOBJECT${NC}"
|
||||
|
||||
# Display the command being run
|
||||
echo ""
|
||||
echo -e "${GRAY}Executing: sf ${SF_ARGS[*]}${NC}"
|
||||
echo ""
|
||||
|
||||
# Execute the command
|
||||
if sf "${SF_ARGS[@]}"; then
|
||||
IMPORT_EXIT_CODE=0
|
||||
else
|
||||
IMPORT_EXIT_CODE=$?
|
||||
fi
|
||||
|
||||
echo ""
|
||||
if [[ $IMPORT_EXIT_CODE -eq 0 ]]; then
|
||||
echo -e "${GREEN}✅ Data import completed successfully!${NC}"
|
||||
|
||||
case "$OPERATION" in
|
||||
insert)
|
||||
echo -e "${CYAN}📊 Records inserted into $SOBJECT${NC}"
|
||||
;;
|
||||
update)
|
||||
echo -e "${CYAN}📊 Records updated in $SOBJECT${NC}"
|
||||
;;
|
||||
upsert)
|
||||
echo -e "${CYAN}📊 Records upserted in $SOBJECT (using $EXTERNAL_ID as external ID)${NC}"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$VERBOSE" == true ]]; then
|
||||
echo -e "${YELLOW}💡 Check the output above for detailed results and any warnings${NC}"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}❌ Data import failed with exit code: $IMPORT_EXIT_CODE${NC}"
|
||||
echo -e "${YELLOW}💡 Check data format, field mappings, and validation rules${NC}"
|
||||
exit $IMPORT_EXIT_CODE
|
||||
fi
|
||||
Reference in New Issue
Block a user