Compare commits

...

2 Commits

Author SHA1 Message Date
Grief 5579c329f8 pull-missing.ts 2025-10-24 13:24:05 +01:00
Grief 6010b5a24c photos-diff.ts 2025-10-13 17:31:41 +01:00
4 changed files with 743 additions and 1 deletions

569
bin/photos-diff.ts 100755
View File

@ -0,0 +1,569 @@
#!/usr/bin/env node
import { execSync, spawn } from 'child_process';
import { createHash } from 'crypto';
import { readFileSync, statSync, writeFileSync } from 'fs';
import { parseArgs } from 'util';
import Redis from 'iovalkey';
import { createInterface } from 'readline';
const CACHE_TTL = 3600; // 1 hour
const CACHE_PREFIX_ANDROID = 'photo-sync:android:';
const CACHE_PREFIX_LOCAL = 'photo-sync:local:';
interface FileInfo {
path: string;
size: number;
hash?: string;
}
interface Results {
matched: number;
missingInBackup: FileInfo[];
duplicatesOnPhone: Record<string, string[]>;
duplicatesInBackup: Record<string, string[]>;
}
function parseArguments() {
const { values } = parseArgs({
options: {
local: { type: 'string' },
android: { type: 'string' },
},
});
if (!values.local || !values.android) {
console.error('Usage: ./photo-backup-checker.ts --local DIR --android DIR');
process.exit(1);
}
return { localDir: values.local, androidDir: values.android };
}
async function getLocalFiles(dir: string, redis: Redis): Promise<FileInfo[]> {
console.log(`\n📁 Scanning local directory: ${dir}`);
// Stream files instead of buffering
const files: string[] = [];
const find = spawn('find', [dir, '-type', 'f']);
const rl = createInterface({ input: find.stdout });
for await (const line of rl) {
if (line.trim()) {
files.push(line.trim());
}
}
await new Promise((resolve, reject) => {
find.on('close', resolve);
find.on('error', reject);
});
console.log(`📊 Found ${files.length} files, getting sizes...`);
const fileInfos: FileInfo[] = [];
const startTime = Date.now();
for (let i = 0; i < files.length; i++) {
const file = files[i];
const cacheKey = CACHE_PREFIX_LOCAL + file;
try {
// Check cache first
const cached = await redis.get(cacheKey);
if (cached) {
const data = JSON.parse(cached);
fileInfos.push({ path: file, size: data.size, hash: data.hash });
} else {
// Get from filesystem
const stat = statSync(file);
const fileInfo: FileInfo = { path: file, size: stat.size };
fileInfos.push(fileInfo);
// Cache it
await redis.setex(cacheKey, CACHE_TTL, JSON.stringify({ size: stat.size }));
}
// Progress and ETA
const processed = i + 1;
const elapsed = Date.now() - startTime;
const avgTime = elapsed / processed;
const remaining = files.length - processed;
const eta = Math.round((avgTime * remaining) / 1000);
const etaStr = eta > 60
? `${Math.floor(eta / 60)}m ${eta % 60}s`
: `${eta}s`;
process.stdout.write(`\r📁 Progress: ${processed}/${files.length} files | ETA: ${etaStr} `);
} catch (err) {
console.error(`\n❌ Error reading local file: ${file}`);
throw err;
}
}
console.log(`\n✅ Found ${fileInfos.length} local files`);
return fileInfos;
}
async function getAndroidFiles(dir: string, redis: Redis): Promise<FileInfo[]> {
console.log(`\n📱 Scanning Android directory: ${dir}`);
// Stream files instead of buffering
const files: string[] = [];
const adb = spawn('adb', ['shell', `find '${dir}' -type f 2>/dev/null`]);
const rl = createInterface({ input: adb.stdout });
for await (const line of rl) {
const trimmed = line.trim();
if (trimmed) {
files.push(trimmed);
}
}
await new Promise((resolve, reject) => {
adb.on('close', resolve);
adb.on('error', reject);
});
console.log(`📊 Getting file sizes for ${files.length} files...`);
const fileInfos: FileInfo[] = [];
const startTime = Date.now();
const BATCH_SIZE = 50;
let processed = 0;
for (let i = 0; i < files.length; i += BATCH_SIZE) {
const batch = files.slice(i, i + BATCH_SIZE);
// Check cache first for this batch
const cachedResults: FileInfo[] = [];
const needFetch: string[] = [];
for (const file of batch) {
const cacheKey = CACHE_PREFIX_ANDROID + file;
const cached = await redis.get(cacheKey);
if (cached) {
const data = JSON.parse(cached);
cachedResults.push({ path: file, size: data.size, hash: data.hash });
} else {
needFetch.push(file);
}
}
fileInfos.push(...cachedResults);
processed += cachedResults.length;
// Fetch uncached files in batch
if (needFetch.length > 0) {
try {
// Build shell script to get all sizes in one adb call
const script = needFetch.map(f => `stat -c '%s' '${f}' 2>/dev/null || echo "ERROR"`).join('; ');
const statOutput = execSync(`adb shell "${script}"`, { encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 });
const sizes = statOutput.trim().split('\n');
if (sizes.length !== needFetch.length) {
console.error(`\n❌ Batch size mismatch: expected ${needFetch.length}, got ${sizes.length}`);
throw new Error('Batch stat failed');
}
for (let j = 0; j < needFetch.length; j++) {
const file = needFetch[j];
const sizeStr = sizes[j].trim();
if (sizeStr === 'ERROR' || sizeStr === '') {
console.error(`\n❌ Failed to get size for: ${file}`);
continue;
}
const size = parseInt(sizeStr, 10);
if (isNaN(size)) {
console.error(`\n❌ Invalid size for Android file: ${file} (got: ${sizeStr})`);
continue;
}
const fileInfo: FileInfo = { path: file, size };
fileInfos.push(fileInfo);
// Cache it
await redis.setex(CACHE_PREFIX_ANDROID + file, CACHE_TTL, JSON.stringify({ size }));
}
processed += needFetch.length;
} catch (err) {
console.error(`\n❌ Error in batch stat, falling back to individual calls`);
// Fallback to individual calls for this batch
for (const file of needFetch) {
try {
const stat = execSync(`adb shell "stat -c '%s' '${file}'"`, { encoding: 'utf-8' });
const size = parseInt(stat.trim(), 10);
if (!isNaN(size)) {
const fileInfo: FileInfo = { path: file, size };
fileInfos.push(fileInfo);
await redis.setex(CACHE_PREFIX_ANDROID + file, CACHE_TTL, JSON.stringify({ size }));
}
} catch (e) {
console.error(`\n❌ Failed to stat: ${file}`);
}
processed++;
}
}
}
// Progress and ETA
if (processed > 0) {
const elapsed = Date.now() - startTime;
const avgTime = elapsed / processed;
const remaining = files.length - processed;
const eta = Math.round((avgTime * remaining) / 1000);
const etaStr = eta > 60
? `${Math.floor(eta / 60)}m ${eta % 60}s`
: `${eta}s`;
process.stdout.write(`\r📊 Progress: ${processed}/${files.length} files | ETA: ${etaStr} `);
}
}
console.log(`\n✅ Found ${fileInfos.length} Android files`);
return fileInfos;
}
function sha256Local(path: string): string {
const hash = createHash('sha256');
const data = readFileSync(path);
hash.update(data);
return hash.digest('hex');
}
function sha256Android(path: string): string {
const output = execSync(`adb shell "sha256sum '${path}'"`, { encoding: 'utf-8' });
const hash = output.trim().split(/\s+/)[0];
if (!hash || hash.length !== 64) {
throw new Error(`Invalid hash from Android: ${output}`);
}
return hash;
}
function groupBySize(files: FileInfo[]): Map<number, FileInfo[]> {
const groups = new Map<number, FileInfo[]>();
for (const file of files) {
const group = groups.get(file.size) || [];
group.push(file);
groups.set(file.size, group);
}
return groups;
}
async function calculateHashes(
files: FileInfo[],
source: 'local' | 'android',
redis: Redis,
cachePrefix: string
): Promise<void> {
const totalFiles = files.length;
if (totalFiles === 0) return;
console.log(`\n🔐 Computing hashes for ${totalFiles} ${source} files...`);
const startTime = Date.now();
if (source === 'android') {
// Batch processing for Android
const BATCH_SIZE = 20;
let processed = 0;
for (let i = 0; i < files.length; i += BATCH_SIZE) {
const batch = files.slice(i, i + BATCH_SIZE);
// Check cache first
const needHash: FileInfo[] = [];
for (const file of batch) {
const cacheKey = cachePrefix + file.path;
const cached = await redis.get(cacheKey);
if (cached) {
const data = JSON.parse(cached);
if (data.hash) {
file.hash = data.hash;
processed++;
continue;
}
}
needHash.push(file);
}
// Hash uncached files in batch
if (needHash.length > 0) {
try {
// Build batch sha256sum command
const paths = needHash.map(f => `'${f.path}'`).join(' ');
const hashOutput = execSync(`adb shell "sha256sum ${paths} 2>/dev/null"`, {
encoding: 'utf-8',
maxBuffer: 10 * 1024 * 1024
});
const lines = hashOutput.trim().split('\n');
for (let j = 0; j < needHash.length && j < lines.length; j++) {
const line = lines[j].trim();
const parts = line.split(/\s+/);
const hash = parts[0];
if (hash && hash.length === 64) {
needHash[j].hash = hash;
await redis.setex(
cachePrefix + needHash[j].path,
CACHE_TTL,
JSON.stringify({ size: needHash[j].size, hash })
);
}
}
processed += needHash.length;
} catch (err) {
console.error(`\n❌ Batch hashing failed, falling back to individual hashing`);
// Fallback to individual hashing
for (const file of needHash) {
try {
const output = execSync(`adb shell "sha256sum '${file.path}'"`, { encoding: 'utf-8' });
const hash = output.trim().split(/\s+/)[0];
if (hash && hash.length === 64) {
file.hash = hash;
await redis.setex(
cachePrefix + file.path,
CACHE_TTL,
JSON.stringify({ size: file.size, hash })
);
}
} catch (e) {
console.error(`\n❌ Failed to hash: ${file.path}`);
}
processed++;
}
}
}
// Progress and ETA
const elapsed = Date.now() - startTime;
const avgTime = elapsed / processed;
const remaining = totalFiles - processed;
const eta = Math.round((avgTime * remaining) / 1000);
const etaStr = eta > 60
? `${Math.floor(eta / 60)}m ${eta % 60}s`
: `${eta}s`;
process.stdout.write(`\r🔐 Progress: ${processed}/${totalFiles} files | ETA: ${etaStr} `);
}
} else {
// Local files - keep sequential for now
for (let i = 0; i < files.length; i++) {
const file = files[i];
const cacheKey = cachePrefix + file.path;
try {
// Check if hash is already in cache
const cached = await redis.get(cacheKey);
if (cached) {
const data = JSON.parse(cached);
if (data.hash) {
file.hash = data.hash;
// Progress and ETA
const processed = i + 1;
const elapsed = Date.now() - startTime;
const avgTime = elapsed / processed;
const remaining = totalFiles - processed;
const eta = Math.round((avgTime * remaining) / 1000);
const etaStr = eta > 60
? `${Math.floor(eta / 60)}m ${eta % 60}s`
: `${eta}s`;
process.stdout.write(`\r🔐 Progress: ${processed}/${totalFiles} files | ETA: ${etaStr} (cached) `);
continue;
}
}
// Compute hash
file.hash = sha256Local(file.path);
// Update cache with hash
await redis.setex(cacheKey, CACHE_TTL, JSON.stringify({ size: file.size, hash: file.hash }));
} catch (err) {
console.error(`\n❌ Error hashing ${source} file: ${file.path}`);
throw err;
}
// Progress and ETA
const processed = i + 1;
const elapsed = Date.now() - startTime;
const avgTime = elapsed / processed;
const remaining = totalFiles - processed;
const eta = Math.round((avgTime * remaining) / 1000);
const etaStr = eta > 60
? `${Math.floor(eta / 60)}m ${eta % 60}s`
: `${eta}s`;
process.stdout.write(`\r🔐 Progress: ${processed}/${totalFiles} files | ETA: ${etaStr} `);
}
}
console.log('\n✅ Hashing complete');
}
function findDuplicates(files: FileInfo[]): Record<string, string[]> {
const hashMap = new Map<string, string[]>();
for (const file of files) {
if (!file.hash) continue;
const paths = hashMap.get(file.hash) || [];
paths.push(file.path);
hashMap.set(file.hash, paths);
}
const duplicates: Record<string, string[]> = {};
for (const [hash, paths] of hashMap.entries()) {
if (paths.length > 1) {
duplicates[hash] = paths;
}
}
return duplicates;
}
async function main() {
const { localDir, androidDir } = parseArguments();
console.log('🚀 Starting backup verification...');
console.log('🔌 Connecting to Redis...');
const redis = new Redis();
redis.on('error', (err) => {
console.error('❌ Redis connection error:', err);
process.exit(1);
});
try {
// Step 1: Collect file lists
const localFiles = await getLocalFiles(localDir, redis);
const androidFiles = await getAndroidFiles(androidDir, redis);
// Step 2: Group by size
console.log('\n📊 Grouping files by size...');
const localBySize = groupBySize(localFiles);
const androidBySize = groupBySize(androidFiles);
// Step 3: Determine which files need hashing
const localNeedHash: FileInfo[] = [];
const androidNeedHash: FileInfo[] = [];
for (const [size, localGroup] of localBySize.entries()) {
const androidGroup = androidBySize.get(size);
if (androidGroup) {
// Need to hash all files that have matches by size
localNeedHash.push(...localGroup);
androidNeedHash.push(...androidGroup);
}
}
// Also need to hash Android files that don't have local matches
for (const [size, androidGroup] of androidBySize.entries()) {
if (!localBySize.has(size)) {
androidNeedHash.push(...androidGroup);
}
}
console.log(`🔐 ${localNeedHash.length} local + ${androidNeedHash.length} Android files need hashing`);
// Step 4: Calculate hashes
await calculateHashes(localNeedHash, 'local', redis, CACHE_PREFIX_LOCAL);
await calculateHashes(androidNeedHash, 'android', redis, CACHE_PREFIX_ANDROID);
// Step 5: Build hash maps
const localHashes = new Map<string, FileInfo[]>();
const androidHashes = new Map<string, FileInfo[]>();
for (const file of localFiles) {
if (!file.hash) continue;
const group = localHashes.get(file.hash) || [];
group.push(file);
localHashes.set(file.hash, group);
}
for (const file of androidFiles) {
if (!file.hash) continue;
const group = androidHashes.get(file.hash) || [];
group.push(file);
androidHashes.set(file.hash, group);
}
// Step 6: Find differences
console.log('\n🔍 Comparing files...');
const missingInBackup: FileInfo[] = [];
let matched = 0;
// Files on Android but not in backup
for (const [hash, androidGroup] of androidHashes.entries()) {
if (!localHashes.has(hash)) {
missingInBackup.push(...androidGroup);
} else {
matched += androidGroup.length;
}
}
// Step 7: Find duplicates
const duplicatesOnPhone = findDuplicates(androidFiles);
const duplicatesInBackup = findDuplicates(localFiles);
// Step 8: Output results
const results: Results = {
matched,
missingInBackup,
duplicatesOnPhone,
duplicatesInBackup,
};
console.log('\n' + '='.repeat(60));
console.log('📊 RESULTS');
console.log('='.repeat(60));
console.log(`📱 Total files on phone: ${androidFiles.length}`);
console.log(`✅ Matched in backup: ${results.matched}`);
console.log(`❌ MISSING in backup: ${results.missingInBackup.length}`);
if (results.missingInBackup.length > 0) {
console.log(`\n Missing files:`);
results.missingInBackup.forEach(f => console.log(` - ${f.path} (${f.size} bytes)`));
}
console.log(`\n🔄 Duplicates on phone: ${Object.keys(results.duplicatesOnPhone).length} groups`);
console.log(`🔄 Duplicates in backup: ${Object.keys(results.duplicatesInBackup).length} groups`);
console.log('='.repeat(60));
console.log('\n💾 Writing results to results.json...');
writeFileSync('results.json', JSON.stringify(results, null, 2));
console.log('✅ Done! Check results.json for details.');
if (results.missingInBackup.length > 0) {
console.log('\n⚠ WARNING: Some files are missing in backup!');
await redis.quit();
process.exit(1);
}
await redis.quit();
} catch (err) {
await redis.quit();
throw err;
}
}
main();

149
bin/pull-missing.ts 100755
View File

@ -0,0 +1,149 @@
#!/usr/bin/env node
import { execSync } from 'child_process';
import { existsSync, mkdirSync } from 'fs';
import { readFileSync } from 'fs';
import { basename, dirname, join } from 'path';
interface FileInfo {
path: string;
size: number;
hash?: string;
}
interface Results {
matched: number;
missingInBackup: FileInfo[];
missingOnPhone: FileInfo[];
duplicatesOnPhone: Record<string, string[]>;
duplicatesInBackup: Record<string, string[]>;
}
function getUniqueFilename(targetPath: string): string {
if (!existsSync(targetPath)) {
return targetPath;
}
const dir = dirname(targetPath);
const ext = targetPath.match(/\.[^.]+$/)?.[0] || '';
const nameWithoutExt = basename(targetPath, ext);
let counter = 1;
let newPath: string;
do {
newPath = join(dir, `${nameWithoutExt}-${counter}${ext}`);
counter++;
} while (existsSync(newPath));
return newPath;
}
async function pullFile(androidPath: string, localDir: string): Promise<string> {
const filename = basename(androidPath);
const targetPath = join(localDir, filename);
const finalPath = getUniqueFilename(targetPath);
console.log(`📥 Pulling: ${androidPath}`);
console.log(` -> ${finalPath}`);
try {
execSync(`adb pull "${androidPath}" "${finalPath}"`, {
encoding: 'utf-8',
stdio: 'pipe'
});
return finalPath;
} catch (err: any) {
console.error(`❌ Failed to pull: ${androidPath}`);
console.error(` Error: ${err.message}`);
throw err;
}
}
async function main() {
const resultsPath = 'results.json';
if (!existsSync(resultsPath)) {
console.error(`❌ Error: ${resultsPath} not found`);
console.error('Run photos-diff.ts first to generate results.json');
process.exit(1);
}
console.log('📖 Reading results.json...');
const results: Results = JSON.parse(readFileSync(resultsPath, 'utf-8'));
const missingFiles = results.missingInBackup;
if (missingFiles.length === 0) {
console.log('✅ No missing files! Backup is complete.');
return;
}
console.log(`\n🔍 Found ${missingFiles.length} missing files`);
console.log(`📦 Total size: ${(missingFiles.reduce((sum, f) => sum + f.size, 0) / 1024 / 1024).toFixed(2)} MB`);
// Create target directory if needed
const targetDir = './pulled-files';
if (!existsSync(targetDir)) {
console.log(`\n📁 Creating directory: ${targetDir}`);
mkdirSync(targetDir, { recursive: true });
}
console.log(`\n🚀 Starting download to: ${targetDir}\n`);
const startTime = Date.now();
const pulled: string[] = [];
const failed: string[] = [];
for (let i = 0; i < missingFiles.length; i++) {
const file = missingFiles[i];
try {
const localPath = await pullFile(file.path, targetDir);
pulled.push(localPath);
} catch (err) {
failed.push(file.path);
}
// Progress
const processed = i + 1;
const elapsed = Date.now() - startTime;
const avgTime = elapsed / processed;
const remaining = missingFiles.length - processed;
const eta = Math.round((avgTime * remaining) / 1000);
const etaStr = eta > 60
? `${Math.floor(eta / 60)}m ${eta % 60}s`
: `${eta}s`;
console.log(`\n📊 Progress: ${processed}/${missingFiles.length} | ETA: ${etaStr}`);
console.log(` ✅ Success: ${pulled.length} | ❌ Failed: ${failed.length}\n`);
}
// Final report
const totalTime = Math.round((Date.now() - startTime) / 1000);
const timeStr = totalTime > 60
? `${Math.floor(totalTime / 60)}m ${totalTime % 60}s`
: `${totalTime}s`;
console.log('='.repeat(60));
console.log('📊 DOWNLOAD COMPLETE');
console.log('='.repeat(60));
console.log(`✅ Successfully pulled: ${pulled.length} files`);
console.log(`❌ Failed: ${failed.length} files`);
console.log(`⏱️ Total time: ${timeStr}`);
console.log(`📁 Files saved to: ${targetDir}`);
console.log('='.repeat(60));
if (failed.length > 0) {
console.log('\n❌ Failed files:');
failed.forEach(f => console.log(` - ${f}`));
process.exit(1);
}
}
main().catch(err => {
console.error('💥 Fatal error:', err);
process.exit(1);
});

View File

@ -4,6 +4,7 @@
"": {
"dependencies": {
"chalk": "^5.5.0",
"iovalkey": "^0.3.3",
"ollama": "^0.5.17",
},
"devDependencies": {
@ -12,12 +13,34 @@
},
},
"packages": {
"@iovalkey/commands": ["@iovalkey/commands@0.1.0", "", {}, "sha512-/B9W4qKSSITDii5nkBCHyPkIkAi+ealUtr1oqBJsLxjSRLka4pxun2VvMNSmcwgAMxgXtQfl0qRv7TE+udPJzg=="],
"@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"chalk": ["chalk@5.5.0", "", {}, "sha512-1tm8DTaJhPBG3bIkVeZt1iZM9GfSX2lzOeDVZH9R9ffRHpmHvxZ/QhgQH/aDTkswQVt+YHdXAdS/In/30OjCbg=="],
"cluster-key-slot": ["cluster-key-slot@1.1.2", "", {}, "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA=="],
"debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
"denque": ["denque@2.1.0", "", {}, "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw=="],
"iovalkey": ["iovalkey@0.3.3", "", { "dependencies": { "@iovalkey/commands": "^0.1.0", "cluster-key-slot": "^1.1.0", "debug": "^4.3.4", "denque": "^2.1.0", "lodash.defaults": "^4.2.0", "lodash.isarguments": "^3.1.0", "redis-errors": "^1.2.0", "redis-parser": "^3.0.0", "standard-as-callback": "^2.1.0" } }, "sha512-4rTJX6Q5wTYEvxboXi8DsEiUo+OvqJGtLYOSGm37KpdRXsG5XJjbVtYKGJpPSWP+QT7rWscA4vsrdmzbEbenpw=="],
"lodash.defaults": ["lodash.defaults@4.2.0", "", {}, "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ=="],
"lodash.isarguments": ["lodash.isarguments@3.1.0", "", {}, "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg=="],
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
"ollama": ["ollama@0.5.17", "", { "dependencies": { "whatwg-fetch": "^3.6.20" } }, "sha512-q5LmPtk6GLFouS+3aURIVl+qcAOPC4+Msmx7uBb3pd+fxI55WnGjmLZ0yijI/CYy79x0QPGx3BwC3u5zv9fBvQ=="],
"redis-errors": ["redis-errors@1.2.0", "", {}, "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w=="],
"redis-parser": ["redis-parser@3.0.0", "", { "dependencies": { "redis-errors": "^1.0.0" } }, "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A=="],
"standard-as-callback": ["standard-as-callback@2.1.0", "", {}, "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A=="],
"undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"whatwg-fetch": ["whatwg-fetch@3.6.20", "", {}, "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg=="],

View File

@ -5,6 +5,7 @@
},
"dependencies": {
"chalk": "^5.5.0",
"ollama": "^0.5.17"
"ollama": "^0.5.17",
"iovalkey": "^0.3.3"
}
}