|
|
#!/usr/bin/env node |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const fs = require('fs').promises |
|
|
const crypto = require('crypto') |
|
|
const redis = require('../src/models/redis') |
|
|
const logger = require('../src/utils/logger') |
|
|
const readline = require('readline') |
|
|
const config = require('../config/config') |
|
|
|
|
|
|
|
|
const args = process.argv.slice(2) |
|
|
const command = args[0] |
|
|
const params = {} |
|
|
|
|
|
args.slice(1).forEach((arg) => { |
|
|
const [key, value] = arg.split('=') |
|
|
params[key.replace('--', '')] = value || true |
|
|
}) |
|
|
|
|
|
|
|
|
const rl = readline.createInterface({ |
|
|
input: process.stdin, |
|
|
output: process.stdout |
|
|
}) |
|
|
|
|
|
async function askConfirmation(question) { |
|
|
return new Promise((resolve) => { |
|
|
rl.question(`${question} (yes/no): `, (answer) => { |
|
|
resolve(answer.toLowerCase() === 'yes' || answer.toLowerCase() === 'y') |
|
|
}) |
|
|
}) |
|
|
} |
|
|
|
|
|
|
|
|
function decryptClaudeData(encryptedData) { |
|
|
if (!encryptedData || !config.security.encryptionKey) { |
|
|
return encryptedData |
|
|
} |
|
|
|
|
|
try { |
|
|
if (encryptedData.includes(':')) { |
|
|
const parts = encryptedData.split(':') |
|
|
const key = crypto.scryptSync(config.security.encryptionKey, 'salt', 32) |
|
|
const iv = Buffer.from(parts[0], 'hex') |
|
|
const encrypted = parts[1] |
|
|
|
|
|
const decipher = crypto.createDecipheriv('aes-256-cbc', key, iv) |
|
|
let decrypted = decipher.update(encrypted, 'hex', 'utf8') |
|
|
decrypted += decipher.final('utf8') |
|
|
return decrypted |
|
|
} |
|
|
return encryptedData |
|
|
} catch (error) { |
|
|
logger.warn(`⚠️ Failed to decrypt data: ${error.message}`) |
|
|
return encryptedData |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
function decryptGeminiData(encryptedData) { |
|
|
if (!encryptedData || !config.security.encryptionKey) { |
|
|
return encryptedData |
|
|
} |
|
|
|
|
|
try { |
|
|
if (encryptedData.includes(':')) { |
|
|
const parts = encryptedData.split(':') |
|
|
const key = crypto.scryptSync(config.security.encryptionKey, 'gemini-account-salt', 32) |
|
|
const iv = Buffer.from(parts[0], 'hex') |
|
|
const encrypted = parts[1] |
|
|
|
|
|
const decipher = crypto.createDecipheriv('aes-256-cbc', key, iv) |
|
|
let decrypted = decipher.update(encrypted, 'hex', 'utf8') |
|
|
decrypted += decipher.final('utf8') |
|
|
return decrypted |
|
|
} |
|
|
return encryptedData |
|
|
} catch (error) { |
|
|
logger.warn(`⚠️ Failed to decrypt data: ${error.message}`) |
|
|
return encryptedData |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
function hashApiKey(apiKey) { |
|
|
if (!apiKey || !config.security.encryptionKey) { |
|
|
return apiKey |
|
|
} |
|
|
|
|
|
return crypto |
|
|
.createHash('sha256') |
|
|
.update(apiKey + config.security.encryptionKey) |
|
|
.digest('hex') |
|
|
} |
|
|
|
|
|
|
|
|
function isPlaintextApiKey(apiKey) { |
|
|
if (!apiKey || typeof apiKey !== 'string') { |
|
|
return false |
|
|
} |
|
|
|
|
|
|
|
|
if (apiKey.length === 64 && /^[a-f0-9]+$/i.test(apiKey)) { |
|
|
return false |
|
|
} |
|
|
|
|
|
|
|
|
return true |
|
|
} |
|
|
|
|
|
|
|
|
function encryptClaudeData(data) { |
|
|
if (!data || !config.security.encryptionKey) { |
|
|
return data |
|
|
} |
|
|
|
|
|
const key = crypto.scryptSync(config.security.encryptionKey, 'salt', 32) |
|
|
const iv = crypto.randomBytes(16) |
|
|
|
|
|
const cipher = crypto.createCipheriv('aes-256-cbc', key, iv) |
|
|
let encrypted = cipher.update(data, 'utf8', 'hex') |
|
|
encrypted += cipher.final('hex') |
|
|
|
|
|
return `${iv.toString('hex')}:${encrypted}` |
|
|
} |
|
|
|
|
|
function encryptGeminiData(data) { |
|
|
if (!data || !config.security.encryptionKey) { |
|
|
return data |
|
|
} |
|
|
|
|
|
const key = crypto.scryptSync(config.security.encryptionKey, 'gemini-account-salt', 32) |
|
|
const iv = crypto.randomBytes(16) |
|
|
|
|
|
const cipher = crypto.createCipheriv('aes-256-cbc', key, iv) |
|
|
let encrypted = cipher.update(data, 'utf8', 'hex') |
|
|
encrypted += cipher.final('hex') |
|
|
|
|
|
return `${iv.toString('hex')}:${encrypted}` |
|
|
} |
|
|
|
|
|
|
|
|
async function exportUsageStats(keyId) { |
|
|
try { |
|
|
const stats = { |
|
|
total: {}, |
|
|
daily: {}, |
|
|
monthly: {}, |
|
|
hourly: {}, |
|
|
models: {} |
|
|
} |
|
|
|
|
|
|
|
|
const totalKey = `usage:${keyId}` |
|
|
const totalData = await redis.client.hgetall(totalKey) |
|
|
if (totalData && Object.keys(totalData).length > 0) { |
|
|
stats.total = totalData |
|
|
} |
|
|
|
|
|
|
|
|
const today = new Date() |
|
|
for (let i = 0; i < 30; i++) { |
|
|
const date = new Date(today) |
|
|
date.setDate(date.getDate() - i) |
|
|
const dateStr = date.toISOString().split('T')[0] |
|
|
const dailyKey = `usage:daily:${keyId}:${dateStr}` |
|
|
|
|
|
const dailyData = await redis.client.hgetall(dailyKey) |
|
|
if (dailyData && Object.keys(dailyData).length > 0) { |
|
|
stats.daily[dateStr] = dailyData |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
for (let i = 0; i < 12; i++) { |
|
|
const date = new Date(today) |
|
|
date.setMonth(date.getMonth() - i) |
|
|
const monthStr = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}` |
|
|
const monthlyKey = `usage:monthly:${keyId}:${monthStr}` |
|
|
|
|
|
const monthlyData = await redis.client.hgetall(monthlyKey) |
|
|
if (monthlyData && Object.keys(monthlyData).length > 0) { |
|
|
stats.monthly[monthStr] = monthlyData |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
for (let i = 0; i < 24; i++) { |
|
|
const date = new Date(today) |
|
|
date.setHours(date.getHours() - i) |
|
|
const dateStr = date.toISOString().split('T')[0] |
|
|
const hour = String(date.getHours()).padStart(2, '0') |
|
|
const hourKey = `${dateStr}:${hour}` |
|
|
const hourlyKey = `usage:hourly:${keyId}:${hourKey}` |
|
|
|
|
|
const hourlyData = await redis.client.hgetall(hourlyKey) |
|
|
if (hourlyData && Object.keys(hourlyData).length > 0) { |
|
|
stats.hourly[hourKey] = hourlyData |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
const modelDailyPattern = `usage:${keyId}:model:daily:*` |
|
|
const modelDailyKeys = await redis.client.keys(modelDailyPattern) |
|
|
for (const key of modelDailyKeys) { |
|
|
const match = key.match(/usage:.+:model:daily:(.+):(\d{4}-\d{2}-\d{2})$/) |
|
|
if (match) { |
|
|
const model = match[1] |
|
|
const date = match[2] |
|
|
const data = await redis.client.hgetall(key) |
|
|
if (data && Object.keys(data).length > 0) { |
|
|
if (!stats.models[model]) { |
|
|
stats.models[model] = { daily: {}, monthly: {} } |
|
|
} |
|
|
stats.models[model].daily[date] = data |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
const modelMonthlyPattern = `usage:${keyId}:model:monthly:*` |
|
|
const modelMonthlyKeys = await redis.client.keys(modelMonthlyPattern) |
|
|
for (const key of modelMonthlyKeys) { |
|
|
const match = key.match(/usage:.+:model:monthly:(.+):(\d{4}-\d{2})$/) |
|
|
if (match) { |
|
|
const model = match[1] |
|
|
const month = match[2] |
|
|
const data = await redis.client.hgetall(key) |
|
|
if (data && Object.keys(data).length > 0) { |
|
|
if (!stats.models[model]) { |
|
|
stats.models[model] = { daily: {}, monthly: {} } |
|
|
} |
|
|
stats.models[model].monthly[month] = data |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
return stats |
|
|
} catch (error) { |
|
|
logger.warn(`⚠️ Failed to export usage stats for ${keyId}: ${error.message}`) |
|
|
return null |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
async function importUsageStats(keyId, stats) { |
|
|
try { |
|
|
if (!stats) { |
|
|
return |
|
|
} |
|
|
|
|
|
const pipeline = redis.client.pipeline() |
|
|
let importCount = 0 |
|
|
|
|
|
|
|
|
if (stats.total && Object.keys(stats.total).length > 0) { |
|
|
for (const [field, value] of Object.entries(stats.total)) { |
|
|
pipeline.hset(`usage:${keyId}`, field, value) |
|
|
} |
|
|
importCount++ |
|
|
} |
|
|
|
|
|
|
|
|
if (stats.daily) { |
|
|
for (const [date, data] of Object.entries(stats.daily)) { |
|
|
for (const [field, value] of Object.entries(data)) { |
|
|
pipeline.hset(`usage:daily:${keyId}:${date}`, field, value) |
|
|
} |
|
|
importCount++ |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (stats.monthly) { |
|
|
for (const [month, data] of Object.entries(stats.monthly)) { |
|
|
for (const [field, value] of Object.entries(data)) { |
|
|
pipeline.hset(`usage:monthly:${keyId}:${month}`, field, value) |
|
|
} |
|
|
importCount++ |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (stats.hourly) { |
|
|
for (const [hour, data] of Object.entries(stats.hourly)) { |
|
|
for (const [field, value] of Object.entries(data)) { |
|
|
pipeline.hset(`usage:hourly:${keyId}:${hour}`, field, value) |
|
|
} |
|
|
importCount++ |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (stats.models) { |
|
|
for (const [model, modelStats] of Object.entries(stats.models)) { |
|
|
|
|
|
if (modelStats.daily) { |
|
|
for (const [date, data] of Object.entries(modelStats.daily)) { |
|
|
for (const [field, value] of Object.entries(data)) { |
|
|
pipeline.hset(`usage:${keyId}:model:daily:${model}:${date}`, field, value) |
|
|
} |
|
|
importCount++ |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (modelStats.monthly) { |
|
|
for (const [month, data] of Object.entries(modelStats.monthly)) { |
|
|
for (const [field, value] of Object.entries(data)) { |
|
|
pipeline.hset(`usage:${keyId}:model:monthly:${model}:${month}`, field, value) |
|
|
} |
|
|
importCount++ |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
await pipeline.exec() |
|
|
logger.info(` 📊 Imported ${importCount} usage stat entries for API Key ${keyId}`) |
|
|
} catch (error) { |
|
|
logger.warn(`⚠️ Failed to import usage stats for ${keyId}: ${error.message}`) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
function sanitizeData(data, type) { |
|
|
const sanitized = { ...data } |
|
|
|
|
|
switch (type) { |
|
|
case 'apikey': |
|
|
if (sanitized.apiKey) { |
|
|
sanitized.apiKey = `${sanitized.apiKey.substring(0, 10)}...[REDACTED]` |
|
|
} |
|
|
break |
|
|
|
|
|
case 'claude_account': |
|
|
if (sanitized.email) { |
|
|
sanitized.email = '[REDACTED]' |
|
|
} |
|
|
if (sanitized.password) { |
|
|
sanitized.password = '[REDACTED]' |
|
|
} |
|
|
if (sanitized.accessToken) { |
|
|
sanitized.accessToken = '[REDACTED]' |
|
|
} |
|
|
if (sanitized.refreshToken) { |
|
|
sanitized.refreshToken = '[REDACTED]' |
|
|
} |
|
|
if (sanitized.claudeAiOauth) { |
|
|
sanitized.claudeAiOauth = '[REDACTED]' |
|
|
} |
|
|
if (sanitized.proxyPassword) { |
|
|
sanitized.proxyPassword = '[REDACTED]' |
|
|
} |
|
|
break |
|
|
|
|
|
case 'gemini_account': |
|
|
if (sanitized.geminiOauth) { |
|
|
sanitized.geminiOauth = '[REDACTED]' |
|
|
} |
|
|
if (sanitized.accessToken) { |
|
|
sanitized.accessToken = '[REDACTED]' |
|
|
} |
|
|
if (sanitized.refreshToken) { |
|
|
sanitized.refreshToken = '[REDACTED]' |
|
|
} |
|
|
if (sanitized.proxyPassword) { |
|
|
sanitized.proxyPassword = '[REDACTED]' |
|
|
} |
|
|
break |
|
|
|
|
|
case 'admin': |
|
|
if (sanitized.password) { |
|
|
sanitized.password = '[REDACTED]' |
|
|
} |
|
|
break |
|
|
} |
|
|
|
|
|
return sanitized |
|
|
} |
|
|
|
|
|
|
|
|
async function exportData() { |
|
|
try { |
|
|
const outputFile = params.output || `backup-${new Date().toISOString().split('T')[0]}.json` |
|
|
const types = params.types ? params.types.split(',') : ['all'] |
|
|
const shouldSanitize = params.sanitize === true |
|
|
const shouldDecrypt = params.decrypt !== false |
|
|
|
|
|
logger.info('🔄 Starting data export...') |
|
|
logger.info(`📁 Output file: ${outputFile}`) |
|
|
logger.info(`📋 Data types: ${types.join(', ')}`) |
|
|
logger.info(`🔒 Sanitize sensitive data: ${shouldSanitize ? 'YES' : 'NO'}`) |
|
|
logger.info(`🔓 Decrypt data: ${shouldDecrypt ? 'YES' : 'NO'}`) |
|
|
|
|
|
await redis.connect() |
|
|
logger.success('✅ Connected to Redis') |
|
|
|
|
|
const exportDataObj = { |
|
|
metadata: { |
|
|
version: '2.0', |
|
|
exportDate: new Date().toISOString(), |
|
|
sanitized: shouldSanitize, |
|
|
decrypted: shouldDecrypt, |
|
|
types |
|
|
}, |
|
|
data: {} |
|
|
} |
|
|
|
|
|
|
|
|
if (types.includes('all') || types.includes('apikeys')) { |
|
|
logger.info('📤 Exporting API Keys...') |
|
|
const keys = await redis.client.keys('apikey:*') |
|
|
const apiKeys = [] |
|
|
|
|
|
for (const key of keys) { |
|
|
if (key === 'apikey:hash_map') { |
|
|
continue |
|
|
} |
|
|
|
|
|
const data = await redis.client.hgetall(key) |
|
|
if (data && Object.keys(data).length > 0) { |
|
|
|
|
|
const keyId = data.id |
|
|
|
|
|
|
|
|
if (keyId && (types.includes('all') || types.includes('stats'))) { |
|
|
data.usageStats = await exportUsageStats(keyId) |
|
|
} |
|
|
|
|
|
apiKeys.push(shouldSanitize ? sanitizeData(data, 'apikey') : data) |
|
|
} |
|
|
} |
|
|
|
|
|
exportDataObj.data.apiKeys = apiKeys |
|
|
logger.success(`✅ Exported ${apiKeys.length} API Keys`) |
|
|
} |
|
|
|
|
|
|
|
|
if (types.includes('all') || types.includes('accounts')) { |
|
|
logger.info('📤 Exporting Claude accounts...') |
|
|
const keys = await redis.client.keys('claude:account:*') |
|
|
logger.info(`Found ${keys.length} Claude account keys in Redis`) |
|
|
const accounts = [] |
|
|
|
|
|
for (const key of keys) { |
|
|
const data = await redis.client.hgetall(key) |
|
|
|
|
|
if (data && Object.keys(data).length > 0) { |
|
|
|
|
|
if (shouldDecrypt && !shouldSanitize) { |
|
|
if (data.email) { |
|
|
data.email = decryptClaudeData(data.email) |
|
|
} |
|
|
if (data.password) { |
|
|
data.password = decryptClaudeData(data.password) |
|
|
} |
|
|
if (data.accessToken) { |
|
|
data.accessToken = decryptClaudeData(data.accessToken) |
|
|
} |
|
|
if (data.refreshToken) { |
|
|
data.refreshToken = decryptClaudeData(data.refreshToken) |
|
|
} |
|
|
if (data.claudeAiOauth) { |
|
|
const decrypted = decryptClaudeData(data.claudeAiOauth) |
|
|
try { |
|
|
data.claudeAiOauth = JSON.parse(decrypted) |
|
|
} catch (e) { |
|
|
data.claudeAiOauth = decrypted |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
accounts.push(shouldSanitize ? sanitizeData(data, 'claude_account') : data) |
|
|
} |
|
|
} |
|
|
|
|
|
exportDataObj.data.claudeAccounts = accounts |
|
|
logger.success(`✅ Exported ${accounts.length} Claude accounts`) |
|
|
|
|
|
|
|
|
logger.info('📤 Exporting Gemini accounts...') |
|
|
const geminiKeys = await redis.client.keys('gemini_account:*') |
|
|
logger.info(`Found ${geminiKeys.length} Gemini account keys in Redis`) |
|
|
const geminiAccounts = [] |
|
|
|
|
|
for (const key of geminiKeys) { |
|
|
const data = await redis.client.hgetall(key) |
|
|
|
|
|
if (data && Object.keys(data).length > 0) { |
|
|
|
|
|
if (shouldDecrypt && !shouldSanitize) { |
|
|
if (data.geminiOauth) { |
|
|
const decrypted = decryptGeminiData(data.geminiOauth) |
|
|
try { |
|
|
data.geminiOauth = JSON.parse(decrypted) |
|
|
} catch (e) { |
|
|
data.geminiOauth = decrypted |
|
|
} |
|
|
} |
|
|
if (data.accessToken) { |
|
|
data.accessToken = decryptGeminiData(data.accessToken) |
|
|
} |
|
|
if (data.refreshToken) { |
|
|
data.refreshToken = decryptGeminiData(data.refreshToken) |
|
|
} |
|
|
} |
|
|
|
|
|
geminiAccounts.push(shouldSanitize ? sanitizeData(data, 'gemini_account') : data) |
|
|
} |
|
|
} |
|
|
|
|
|
exportDataObj.data.geminiAccounts = geminiAccounts |
|
|
logger.success(`✅ Exported ${geminiAccounts.length} Gemini accounts`) |
|
|
} |
|
|
|
|
|
|
|
|
if (types.includes('all') || types.includes('admins')) { |
|
|
logger.info('📤 Exporting admins...') |
|
|
const keys = await redis.client.keys('admin:*') |
|
|
const admins = [] |
|
|
|
|
|
for (const key of keys) { |
|
|
if (key.includes('admin_username:')) { |
|
|
continue |
|
|
} |
|
|
|
|
|
const data = await redis.client.hgetall(key) |
|
|
if (data && Object.keys(data).length > 0) { |
|
|
admins.push(shouldSanitize ? sanitizeData(data, 'admin') : data) |
|
|
} |
|
|
} |
|
|
|
|
|
exportDataObj.data.admins = admins |
|
|
logger.success(`✅ Exported ${admins.length} admins`) |
|
|
} |
|
|
|
|
|
|
|
|
if (types.includes('all') || types.includes('stats')) { |
|
|
logger.info('📤 Exporting global model statistics...') |
|
|
const globalStats = { |
|
|
daily: {}, |
|
|
monthly: {}, |
|
|
hourly: {} |
|
|
} |
|
|
|
|
|
|
|
|
const globalDailyPattern = 'usage:model:daily:*' |
|
|
const globalDailyKeys = await redis.client.keys(globalDailyPattern) |
|
|
for (const key of globalDailyKeys) { |
|
|
const match = key.match(/usage:model:daily:(.+):(\d{4}-\d{2}-\d{2})$/) |
|
|
if (match) { |
|
|
const model = match[1] |
|
|
const date = match[2] |
|
|
const data = await redis.client.hgetall(key) |
|
|
if (data && Object.keys(data).length > 0) { |
|
|
if (!globalStats.daily[date]) { |
|
|
globalStats.daily[date] = {} |
|
|
} |
|
|
globalStats.daily[date][model] = data |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
const globalMonthlyPattern = 'usage:model:monthly:*' |
|
|
const globalMonthlyKeys = await redis.client.keys(globalMonthlyPattern) |
|
|
for (const key of globalMonthlyKeys) { |
|
|
const match = key.match(/usage:model:monthly:(.+):(\d{4}-\d{2})$/) |
|
|
if (match) { |
|
|
const model = match[1] |
|
|
const month = match[2] |
|
|
const data = await redis.client.hgetall(key) |
|
|
if (data && Object.keys(data).length > 0) { |
|
|
if (!globalStats.monthly[month]) { |
|
|
globalStats.monthly[month] = {} |
|
|
} |
|
|
globalStats.monthly[month][model] = data |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
const globalHourlyPattern = 'usage:model:hourly:*' |
|
|
const globalHourlyKeys = await redis.client.keys(globalHourlyPattern) |
|
|
for (const key of globalHourlyKeys) { |
|
|
const match = key.match(/usage:model:hourly:(.+):(\d{4}-\d{2}-\d{2}:\d{2})$/) |
|
|
if (match) { |
|
|
const model = match[1] |
|
|
const hour = match[2] |
|
|
const data = await redis.client.hgetall(key) |
|
|
if (data && Object.keys(data).length > 0) { |
|
|
if (!globalStats.hourly[hour]) { |
|
|
globalStats.hourly[hour] = {} |
|
|
} |
|
|
globalStats.hourly[hour][model] = data |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
exportDataObj.data.globalModelStats = globalStats |
|
|
logger.success('✅ Exported global model statistics') |
|
|
} |
|
|
|
|
|
|
|
|
await fs.writeFile(outputFile, JSON.stringify(exportDataObj, null, 2)) |
|
|
|
|
|
|
|
|
console.log(`\n${'='.repeat(60)}`) |
|
|
console.log('✅ Export Complete!') |
|
|
console.log('='.repeat(60)) |
|
|
console.log(`Output file: ${outputFile}`) |
|
|
console.log(`File size: ${(await fs.stat(outputFile)).size} bytes`) |
|
|
|
|
|
if (exportDataObj.data.apiKeys) { |
|
|
console.log(`API Keys: ${exportDataObj.data.apiKeys.length}`) |
|
|
} |
|
|
if (exportDataObj.data.claudeAccounts) { |
|
|
console.log(`Claude Accounts: ${exportDataObj.data.claudeAccounts.length}`) |
|
|
} |
|
|
if (exportDataObj.data.geminiAccounts) { |
|
|
console.log(`Gemini Accounts: ${exportDataObj.data.geminiAccounts.length}`) |
|
|
} |
|
|
if (exportDataObj.data.admins) { |
|
|
console.log(`Admins: ${exportDataObj.data.admins.length}`) |
|
|
} |
|
|
console.log('='.repeat(60)) |
|
|
|
|
|
if (shouldSanitize) { |
|
|
logger.warn('⚠️ Sensitive data has been sanitized in this export.') |
|
|
} |
|
|
if (shouldDecrypt) { |
|
|
logger.info('🔓 Encrypted data has been decrypted for portability.') |
|
|
} |
|
|
} catch (error) { |
|
|
logger.error('💥 Export failed:', error) |
|
|
process.exit(1) |
|
|
} finally { |
|
|
await redis.disconnect() |
|
|
rl.close() |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
function showHelp() { |
|
|
console.log(` |
|
|
Enhanced Data Transfer Tool for Claude Relay Service |
|
|
|
|
|
This tool handles encrypted data export/import between environments. |
|
|
|
|
|
Usage: |
|
|
node scripts/data-transfer-enhanced.js <command> [options] |
|
|
|
|
|
Commands: |
|
|
export Export data from Redis to a JSON file |
|
|
import Import data from a JSON file to Redis |
|
|
|
|
|
Export Options: |
|
|
--output=FILE Output filename (default: backup-YYYY-MM-DD.json) |
|
|
--types=TYPE,... Data types: apikeys,accounts,admins,stats,all (default: all) |
|
|
stats: Include usage statistics with API keys |
|
|
--sanitize Remove sensitive data from export |
|
|
--decrypt=false Keep data encrypted (default: true - decrypt for portability) |
|
|
|
|
|
Import Options: |
|
|
--input=FILE Input filename (required) |
|
|
--force Overwrite existing data without asking |
|
|
--skip-conflicts Skip conflicting data without asking |
|
|
|
|
|
Important Notes: |
|
|
- The tool automatically handles encryption/decryption during import |
|
|
- If importing decrypted data, it will be re-encrypted automatically |
|
|
- If importing encrypted data, it will be stored as-is |
|
|
- Sanitized exports cannot be properly imported (missing sensitive data) |
|
|
- Automatic handling of plaintext API Keys |
|
|
* Uses your configured API_KEY_PREFIX from config (sk-, cr_, etc.) |
|
|
* Automatically detects plaintext vs hashed API Keys by format |
|
|
* Plaintext API Keys are automatically hashed during import |
|
|
* Hash mappings are created correctly for plaintext keys |
|
|
* Supports custom prefixes and legacy format detection |
|
|
* No manual conversion needed - just import your backup file |
|
|
|
|
|
Examples: |
|
|
# Export all data with decryption (for migration) |
|
|
node scripts/data-transfer-enhanced.js export |
|
|
|
|
|
# Export without decrypting (for backup) |
|
|
node scripts/data-transfer-enhanced.js export --decrypt=false |
|
|
|
|
|
# Import data (auto-handles encryption and plaintext API keys) |
|
|
node scripts/data-transfer-enhanced.js import --input=backup.json |
|
|
|
|
|
# Import with force overwrite |
|
|
node scripts/data-transfer-enhanced.js import --input=backup.json --force |
|
|
`) |
|
|
} |
|
|
|
|
|
|
|
|
async function importData() { |
|
|
try { |
|
|
const inputFile = params.input |
|
|
if (!inputFile) { |
|
|
logger.error('❌ Please specify input file with --input=filename.json') |
|
|
process.exit(1) |
|
|
} |
|
|
|
|
|
const forceOverwrite = params.force === true |
|
|
const skipConflicts = params['skip-conflicts'] === true |
|
|
|
|
|
logger.info('🔄 Starting data import...') |
|
|
logger.info(`📁 Input file: ${inputFile}`) |
|
|
logger.info( |
|
|
`⚡ Mode: ${forceOverwrite ? 'FORCE OVERWRITE' : skipConflicts ? 'SKIP CONFLICTS' : 'ASK ON CONFLICT'}` |
|
|
) |
|
|
|
|
|
|
|
|
const fileContent = await fs.readFile(inputFile, 'utf8') |
|
|
const importDataObj = JSON.parse(fileContent) |
|
|
|
|
|
|
|
|
if (!importDataObj.metadata || !importDataObj.data) { |
|
|
logger.error('❌ Invalid backup file format') |
|
|
process.exit(1) |
|
|
} |
|
|
|
|
|
logger.info(`📅 Backup date: ${importDataObj.metadata.exportDate}`) |
|
|
logger.info(`🔒 Sanitized: ${importDataObj.metadata.sanitized ? 'YES' : 'NO'}`) |
|
|
logger.info(`🔓 Decrypted: ${importDataObj.metadata.decrypted ? 'YES' : 'NO'}`) |
|
|
|
|
|
if (importDataObj.metadata.sanitized) { |
|
|
logger.warn('⚠️ This backup contains sanitized data. Sensitive fields will be missing!') |
|
|
const proceed = await askConfirmation('Continue with sanitized data?') |
|
|
if (!proceed) { |
|
|
logger.info('❌ Import cancelled') |
|
|
return |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
console.log(`\n${'='.repeat(60)}`) |
|
|
console.log('📋 Import Summary:') |
|
|
console.log('='.repeat(60)) |
|
|
if (importDataObj.data.apiKeys) { |
|
|
console.log(`API Keys to import: ${importDataObj.data.apiKeys.length}`) |
|
|
} |
|
|
if (importDataObj.data.claudeAccounts) { |
|
|
console.log(`Claude Accounts to import: ${importDataObj.data.claudeAccounts.length}`) |
|
|
} |
|
|
if (importDataObj.data.geminiAccounts) { |
|
|
console.log(`Gemini Accounts to import: ${importDataObj.data.geminiAccounts.length}`) |
|
|
} |
|
|
if (importDataObj.data.admins) { |
|
|
console.log(`Admins to import: ${importDataObj.data.admins.length}`) |
|
|
} |
|
|
console.log(`${'='.repeat(60)}\n`) |
|
|
|
|
|
|
|
|
const confirmed = await askConfirmation('⚠️ Proceed with import?') |
|
|
if (!confirmed) { |
|
|
logger.info('❌ Import cancelled') |
|
|
return |
|
|
} |
|
|
|
|
|
|
|
|
await redis.connect() |
|
|
logger.success('✅ Connected to Redis') |
|
|
|
|
|
const stats = { |
|
|
imported: 0, |
|
|
skipped: 0, |
|
|
errors: 0 |
|
|
} |
|
|
|
|
|
|
|
|
if (importDataObj.data.apiKeys) { |
|
|
logger.info('\n📥 Importing API Keys...') |
|
|
for (const apiKey of importDataObj.data.apiKeys) { |
|
|
try { |
|
|
const exists = await redis.client.exists(`apikey:${apiKey.id}`) |
|
|
|
|
|
if (exists && !forceOverwrite) { |
|
|
if (skipConflicts) { |
|
|
logger.warn(`⏭️ Skipped existing API Key: ${apiKey.name} (${apiKey.id})`) |
|
|
stats.skipped++ |
|
|
continue |
|
|
} else { |
|
|
const overwrite = await askConfirmation( |
|
|
`API Key "${apiKey.name}" (${apiKey.id}) exists. Overwrite?` |
|
|
) |
|
|
if (!overwrite) { |
|
|
stats.skipped++ |
|
|
continue |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
const { usageStats } = apiKey |
|
|
|
|
|
|
|
|
const apiKeyData = { ...apiKey } |
|
|
delete apiKeyData.usageStats |
|
|
|
|
|
|
|
|
let plainTextApiKey = null |
|
|
let hashedApiKey = null |
|
|
|
|
|
if (apiKeyData.apiKey && isPlaintextApiKey(apiKeyData.apiKey)) { |
|
|
|
|
|
plainTextApiKey = apiKeyData.apiKey |
|
|
hashedApiKey = hashApiKey(plainTextApiKey) |
|
|
logger.info(`🔐 Detected plaintext API Key for: ${apiKey.name} (${apiKey.id})`) |
|
|
} else if (apiKeyData.apiKey) { |
|
|
|
|
|
hashedApiKey = apiKeyData.apiKey |
|
|
logger.info(`🔍 Using existing hashed API Key for: ${apiKey.name} (${apiKey.id})`) |
|
|
} |
|
|
|
|
|
|
|
|
if (hashedApiKey) { |
|
|
apiKeyData.apiKey = hashedApiKey |
|
|
} |
|
|
|
|
|
|
|
|
const pipeline = redis.client.pipeline() |
|
|
for (const [field, value] of Object.entries(apiKeyData)) { |
|
|
pipeline.hset(`apikey:${apiKey.id}`, field, value) |
|
|
} |
|
|
await pipeline.exec() |
|
|
|
|
|
|
|
|
if (!importDataObj.metadata.sanitized && hashedApiKey) { |
|
|
await redis.client.hset('apikey:hash_map', hashedApiKey, apiKey.id) |
|
|
logger.info( |
|
|
`📝 Updated hash mapping: ${hashedApiKey.substring(0, 8)}... -> ${apiKey.id}` |
|
|
) |
|
|
} |
|
|
|
|
|
|
|
|
if (usageStats) { |
|
|
await importUsageStats(apiKey.id, usageStats) |
|
|
} |
|
|
|
|
|
logger.success(`✅ Imported API Key: ${apiKey.name} (${apiKey.id})`) |
|
|
stats.imported++ |
|
|
} catch (error) { |
|
|
logger.error(`❌ Failed to import API Key ${apiKey.id}:`, error.message) |
|
|
stats.errors++ |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (importDataObj.data.claudeAccounts) { |
|
|
logger.info('\n📥 Importing Claude accounts...') |
|
|
for (const account of importDataObj.data.claudeAccounts) { |
|
|
try { |
|
|
const exists = await redis.client.exists(`claude:account:${account.id}`) |
|
|
|
|
|
if (exists && !forceOverwrite) { |
|
|
if (skipConflicts) { |
|
|
logger.warn(`⏭️ Skipped existing Claude account: ${account.name} (${account.id})`) |
|
|
stats.skipped++ |
|
|
continue |
|
|
} else { |
|
|
const overwrite = await askConfirmation( |
|
|
`Claude account "${account.name}" (${account.id}) exists. Overwrite?` |
|
|
) |
|
|
if (!overwrite) { |
|
|
stats.skipped++ |
|
|
continue |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
const accountData = { ...account } |
|
|
|
|
|
|
|
|
if (importDataObj.metadata.decrypted && !importDataObj.metadata.sanitized) { |
|
|
logger.info(`🔐 Re-encrypting sensitive data for Claude account: ${account.name}`) |
|
|
|
|
|
if (accountData.email) { |
|
|
accountData.email = encryptClaudeData(accountData.email) |
|
|
} |
|
|
if (accountData.password) { |
|
|
accountData.password = encryptClaudeData(accountData.password) |
|
|
} |
|
|
if (accountData.accessToken) { |
|
|
accountData.accessToken = encryptClaudeData(accountData.accessToken) |
|
|
} |
|
|
if (accountData.refreshToken) { |
|
|
accountData.refreshToken = encryptClaudeData(accountData.refreshToken) |
|
|
} |
|
|
if (accountData.claudeAiOauth) { |
|
|
|
|
|
const oauthStr = |
|
|
typeof accountData.claudeAiOauth === 'object' |
|
|
? JSON.stringify(accountData.claudeAiOauth) |
|
|
: accountData.claudeAiOauth |
|
|
accountData.claudeAiOauth = encryptClaudeData(oauthStr) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
const pipeline = redis.client.pipeline() |
|
|
for (const [field, value] of Object.entries(accountData)) { |
|
|
if (field === 'claudeAiOauth' && typeof value === 'object') { |
|
|
|
|
|
pipeline.hset(`claude:account:${account.id}`, field, JSON.stringify(value)) |
|
|
} else { |
|
|
pipeline.hset(`claude:account:${account.id}`, field, value) |
|
|
} |
|
|
} |
|
|
await pipeline.exec() |
|
|
|
|
|
logger.success(`✅ Imported Claude account: ${account.name} (${account.id})`) |
|
|
stats.imported++ |
|
|
} catch (error) { |
|
|
logger.error(`❌ Failed to import Claude account ${account.id}:`, error.message) |
|
|
stats.errors++ |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (importDataObj.data.geminiAccounts) { |
|
|
logger.info('\n📥 Importing Gemini accounts...') |
|
|
for (const account of importDataObj.data.geminiAccounts) { |
|
|
try { |
|
|
const exists = await redis.client.exists(`gemini_account:${account.id}`) |
|
|
|
|
|
if (exists && !forceOverwrite) { |
|
|
if (skipConflicts) { |
|
|
logger.warn(`⏭️ Skipped existing Gemini account: ${account.name} (${account.id})`) |
|
|
stats.skipped++ |
|
|
continue |
|
|
} else { |
|
|
const overwrite = await askConfirmation( |
|
|
`Gemini account "${account.name}" (${account.id}) exists. Overwrite?` |
|
|
) |
|
|
if (!overwrite) { |
|
|
stats.skipped++ |
|
|
continue |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
const accountData = { ...account } |
|
|
|
|
|
|
|
|
if (importDataObj.metadata.decrypted && !importDataObj.metadata.sanitized) { |
|
|
logger.info(`🔐 Re-encrypting sensitive data for Gemini account: ${account.name}`) |
|
|
|
|
|
if (accountData.geminiOauth) { |
|
|
const oauthStr = |
|
|
typeof accountData.geminiOauth === 'object' |
|
|
? JSON.stringify(accountData.geminiOauth) |
|
|
: accountData.geminiOauth |
|
|
accountData.geminiOauth = encryptGeminiData(oauthStr) |
|
|
} |
|
|
if (accountData.accessToken) { |
|
|
accountData.accessToken = encryptGeminiData(accountData.accessToken) |
|
|
} |
|
|
if (accountData.refreshToken) { |
|
|
accountData.refreshToken = encryptGeminiData(accountData.refreshToken) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
const pipeline = redis.client.pipeline() |
|
|
for (const [field, value] of Object.entries(accountData)) { |
|
|
pipeline.hset(`gemini_account:${account.id}`, field, value) |
|
|
} |
|
|
await pipeline.exec() |
|
|
|
|
|
logger.success(`✅ Imported Gemini account: ${account.name} (${account.id})`) |
|
|
stats.imported++ |
|
|
} catch (error) { |
|
|
logger.error(`❌ Failed to import Gemini account ${account.id}:`, error.message) |
|
|
stats.errors++ |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (importDataObj.data.admins) { |
|
|
logger.info('\n📥 Importing admins...') |
|
|
for (const admin of importDataObj.data.admins) { |
|
|
try { |
|
|
const exists = await redis.client.exists(`admin:${admin.id}`) |
|
|
|
|
|
if (exists && !forceOverwrite) { |
|
|
if (skipConflicts) { |
|
|
logger.warn(`⏭️ Skipped existing admin: ${admin.username} (${admin.id})`) |
|
|
stats.skipped++ |
|
|
continue |
|
|
} else { |
|
|
const overwrite = await askConfirmation( |
|
|
`Admin "${admin.username}" (${admin.id}) exists. Overwrite?` |
|
|
) |
|
|
if (!overwrite) { |
|
|
stats.skipped++ |
|
|
continue |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
const pipeline = redis.client.pipeline() |
|
|
for (const [field, value] of Object.entries(admin)) { |
|
|
pipeline.hset(`admin:${admin.id}`, field, value) |
|
|
} |
|
|
await pipeline.exec() |
|
|
|
|
|
|
|
|
await redis.client.set(`admin_username:${admin.username}`, admin.id) |
|
|
|
|
|
logger.success(`✅ Imported admin: ${admin.username} (${admin.id})`) |
|
|
stats.imported++ |
|
|
} catch (error) { |
|
|
logger.error(`❌ Failed to import admin ${admin.id}:`, error.message) |
|
|
stats.errors++ |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (importDataObj.data.globalModelStats) { |
|
|
logger.info('\n📥 Importing global model statistics...') |
|
|
try { |
|
|
const globalStats = importDataObj.data.globalModelStats |
|
|
const pipeline = redis.client.pipeline() |
|
|
let globalStatCount = 0 |
|
|
|
|
|
|
|
|
if (globalStats.daily) { |
|
|
for (const [date, models] of Object.entries(globalStats.daily)) { |
|
|
for (const [model, data] of Object.entries(models)) { |
|
|
for (const [field, value] of Object.entries(data)) { |
|
|
pipeline.hset(`usage:model:daily:${model}:${date}`, field, value) |
|
|
} |
|
|
globalStatCount++ |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (globalStats.monthly) { |
|
|
for (const [month, models] of Object.entries(globalStats.monthly)) { |
|
|
for (const [model, data] of Object.entries(models)) { |
|
|
for (const [field, value] of Object.entries(data)) { |
|
|
pipeline.hset(`usage:model:monthly:${model}:${month}`, field, value) |
|
|
} |
|
|
globalStatCount++ |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (globalStats.hourly) { |
|
|
for (const [hour, models] of Object.entries(globalStats.hourly)) { |
|
|
for (const [model, data] of Object.entries(models)) { |
|
|
for (const [field, value] of Object.entries(data)) { |
|
|
pipeline.hset(`usage:model:hourly:${model}:${hour}`, field, value) |
|
|
} |
|
|
globalStatCount++ |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
await pipeline.exec() |
|
|
logger.success(`✅ Imported ${globalStatCount} global model stat entries`) |
|
|
stats.imported += globalStatCount |
|
|
} catch (error) { |
|
|
logger.error('❌ Failed to import global model stats:', error.message) |
|
|
stats.errors++ |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
console.log(`\n${'='.repeat(60)}`) |
|
|
console.log('✅ Import Complete!') |
|
|
console.log('='.repeat(60)) |
|
|
console.log(`Successfully imported: ${stats.imported}`) |
|
|
console.log(`Skipped: ${stats.skipped}`) |
|
|
console.log(`Errors: ${stats.errors}`) |
|
|
console.log('='.repeat(60)) |
|
|
} catch (error) { |
|
|
logger.error('💥 Import failed:', error) |
|
|
process.exit(1) |
|
|
} finally { |
|
|
await redis.disconnect() |
|
|
rl.close() |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
async function main() { |
|
|
if (!command || command === '--help' || command === 'help') { |
|
|
showHelp() |
|
|
process.exit(0) |
|
|
} |
|
|
|
|
|
switch (command) { |
|
|
case 'export': |
|
|
await exportData() |
|
|
break |
|
|
|
|
|
case 'import': |
|
|
await importData() |
|
|
break |
|
|
|
|
|
default: |
|
|
logger.error(`❌ Unknown command: ${command}`) |
|
|
showHelp() |
|
|
process.exit(1) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
main().catch((error) => { |
|
|
logger.error('💥 Unexpected error:', error) |
|
|
process.exit(1) |
|
|
}) |
|
|
|