Major restructure: Remove Confluence, add V2 data structure, organize for dev/prod
- Import real data from PDF (35 Bambu Lab filaments) - Remove all Confluence integration and dependencies - Implement new V2 data structure with proper inventory tracking - Add backwards compatibility for existing data - Create enhanced UI components (ColorSwatch, InventoryBadge, MaterialBadge) - Add advanced filtering with quick filters and multi-criteria search - Organize codebase for dev/prod environments - Update Lambda functions to support both V1/V2 formats - Add inventory summary dashboard - Clean up project structure and documentation 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
179
scripts/data-import/import-pdf-data.js
Executable file
179
scripts/data-import/import-pdf-data.js
Executable file
@@ -0,0 +1,179 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
require('dotenv').config({ path: '.env.local' });
|
||||
const AWS = require('aws-sdk');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// Configure AWS
|
||||
AWS.config.update({
|
||||
region: process.env.AWS_REGION || 'eu-central-1'
|
||||
});
|
||||
|
||||
const dynamodb = new AWS.DynamoDB.DocumentClient();
|
||||
const TABLE_NAME = process.env.DYNAMODB_TABLE_NAME || 'filamenteka-filaments';
|
||||
|
||||
async function clearTable() {
|
||||
console.log(`Clearing all items from ${TABLE_NAME}...`);
|
||||
|
||||
try {
|
||||
// First, scan to get all items
|
||||
const scanParams = {
|
||||
TableName: TABLE_NAME,
|
||||
ProjectionExpression: 'id'
|
||||
};
|
||||
|
||||
const items = [];
|
||||
let lastEvaluatedKey = null;
|
||||
|
||||
do {
|
||||
if (lastEvaluatedKey) {
|
||||
scanParams.ExclusiveStartKey = lastEvaluatedKey;
|
||||
}
|
||||
|
||||
const result = await dynamodb.scan(scanParams).promise();
|
||||
items.push(...result.Items);
|
||||
lastEvaluatedKey = result.LastEvaluatedKey;
|
||||
} while (lastEvaluatedKey);
|
||||
|
||||
console.log(`Found ${items.length} items to delete`);
|
||||
|
||||
if (items.length === 0) {
|
||||
console.log('Table is already empty');
|
||||
return;
|
||||
}
|
||||
|
||||
// Delete in batches of 25
|
||||
const chunks = [];
|
||||
for (let i = 0; i < items.length; i += 25) {
|
||||
chunks.push(items.slice(i, i + 25));
|
||||
}
|
||||
|
||||
for (const chunk of chunks) {
|
||||
const params = {
|
||||
RequestItems: {
|
||||
[TABLE_NAME]: chunk.map(item => ({
|
||||
DeleteRequest: { Key: { id: item.id } }
|
||||
}))
|
||||
}
|
||||
};
|
||||
|
||||
await dynamodb.batchWrite(params).promise();
|
||||
console.log(`Deleted ${chunk.length} items`);
|
||||
}
|
||||
|
||||
console.log('Table cleared successfully!');
|
||||
} catch (error) {
|
||||
console.error('Error clearing table:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function importData() {
|
||||
console.log('Importing data from PDF...');
|
||||
|
||||
try {
|
||||
// Read the PDF data
|
||||
const pdfData = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, 'pdf-filaments.json'), 'utf8')
|
||||
);
|
||||
|
||||
console.log(`Found ${pdfData.length} filaments to import`);
|
||||
|
||||
// Process each filament
|
||||
const timestamp = new Date().toISOString();
|
||||
const processedFilaments = pdfData.map(filament => {
|
||||
// Determine status based on vakum and otvoreno fields
|
||||
let status = 'new';
|
||||
if (filament.otvoreno && filament.otvoreno.toLowerCase().includes('otvorena')) {
|
||||
status = 'opened';
|
||||
} else if (filament.refill && filament.refill.toLowerCase() === 'da') {
|
||||
status = 'refill';
|
||||
}
|
||||
|
||||
// Clean up finish field - if empty, default to "Basic"
|
||||
const finish = filament.finish || 'Basic';
|
||||
|
||||
return {
|
||||
id: uuidv4(),
|
||||
...filament,
|
||||
finish,
|
||||
status,
|
||||
createdAt: timestamp,
|
||||
updatedAt: timestamp
|
||||
};
|
||||
});
|
||||
|
||||
// Import to DynamoDB in batches
|
||||
const chunks = [];
|
||||
for (let i = 0; i < processedFilaments.length; i += 25) {
|
||||
chunks.push(processedFilaments.slice(i, i + 25));
|
||||
}
|
||||
|
||||
let totalImported = 0;
|
||||
for (const chunk of chunks) {
|
||||
const params = {
|
||||
RequestItems: {
|
||||
[TABLE_NAME]: chunk.map(item => ({
|
||||
PutRequest: { Item: item }
|
||||
}))
|
||||
}
|
||||
};
|
||||
|
||||
await dynamodb.batchWrite(params).promise();
|
||||
totalImported += chunk.length;
|
||||
console.log(`Imported ${totalImported}/${processedFilaments.length} items`);
|
||||
}
|
||||
|
||||
console.log('Import completed successfully!');
|
||||
|
||||
// Verify the import
|
||||
const scanParams = {
|
||||
TableName: TABLE_NAME,
|
||||
Select: 'COUNT'
|
||||
};
|
||||
|
||||
const result = await dynamodb.scan(scanParams).promise();
|
||||
console.log(`\nVerification: ${result.Count} total items now in DynamoDB`);
|
||||
|
||||
// Show sample data
|
||||
const sampleParams = {
|
||||
TableName: TABLE_NAME,
|
||||
Limit: 3
|
||||
};
|
||||
|
||||
const sampleResult = await dynamodb.scan(sampleParams).promise();
|
||||
console.log('\nSample imported data:');
|
||||
sampleResult.Items.forEach(item => {
|
||||
console.log(`- ${item.brand} ${item.tip} ${item.finish} - ${item.boja} (${item.status})`);
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error importing data:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
console.log('PDF Data Import Tool');
|
||||
console.log('===================');
|
||||
|
||||
// Clear existing data
|
||||
await clearTable();
|
||||
|
||||
// Import new data
|
||||
await importData();
|
||||
|
||||
console.log('\n✅ Import completed successfully!');
|
||||
} catch (error) {
|
||||
console.error('\n❌ Import failed:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run if called directly
|
||||
if (require.main === module) {
|
||||
main();
|
||||
}
|
||||
Reference in New Issue
Block a user