Major restructure: Remove Confluence, add V2 data structure, organize for dev/prod

- Import real data from PDF (35 Bambu Lab filaments)
- Remove all Confluence integration and dependencies
- Implement new V2 data structure with proper inventory tracking
- Add backwards compatibility for existing data
- Create enhanced UI components (ColorSwatch, InventoryBadge, MaterialBadge)
- Add advanced filtering with quick filters and multi-criteria search
- Organize codebase for dev/prod environments
- Update Lambda functions to support both V1/V2 formats
- Add inventory summary dashboard
- Clean up project structure and documentation

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
DaX
2025-06-20 01:12:50 +02:00
parent a2252fa923
commit 18110ab159
40 changed files with 2171 additions and 1094 deletions

View File

@@ -1,84 +0,0 @@
# Data Migration Scripts
This directory contains scripts for migrating filament data from Confluence to DynamoDB.
## Prerequisites
1. AWS credentials configured (either via AWS CLI or environment variables)
2. DynamoDB table created via Terraform
3. Confluence API credentials (if migrating from Confluence)
## Setup
```bash
cd scripts
npm install
```
## Configuration
Create a `.env.local` file in the project root with:
```env
# AWS Configuration
AWS_REGION=eu-central-1
DYNAMODB_TABLE_NAME=filamenteka-filaments
# Confluence Configuration (optional)
CONFLUENCE_API_URL=https://your-domain.atlassian.net
CONFLUENCE_TOKEN=your-email:your-api-token
CONFLUENCE_PAGE_ID=your-page-id
```
## Usage
### Migrate from local data (data.json)
```bash
npm run migrate
```
### Clear existing data and migrate
```bash
npm run migrate:clear
```
### Manual execution
```bash
# Migrate without clearing
node migrate-with-parser.js
# Clear existing data first
node migrate-with-parser.js --clear
```
## What the script does
1. **Checks for Confluence credentials**
- If found: Fetches data from Confluence page
- If not found: Uses local `public/data.json` file
2. **Parses the data**
- Extracts filament information from HTML table (Confluence)
- Or reads JSON directly (local file)
3. **Prepares data for DynamoDB**
- Generates unique IDs for each filament
- Adds timestamps (createdAt, updatedAt)
4. **Writes to DynamoDB**
- Writes in batches of 25 items (DynamoDB limit)
- Shows progress during migration
5. **Verifies the migration**
- Counts total items in DynamoDB
- Shows a sample item for verification
## Troubleshooting
- **Table not found**: Make sure you've run `terraform apply` first
- **Access denied**: Check your AWS credentials and permissions
- **Confluence errors**: Verify your API token and page ID
- **Empty migration**: Check that the Confluence page has a table with the expected format

View File

@@ -0,0 +1,68 @@
#!/usr/bin/env node
require('dotenv').config({ path: '.env.local' });
const AWS = require('aws-sdk');
// Configure AWS
AWS.config.update({
region: process.env.AWS_REGION || 'eu-central-1'
});
const dynamodb = new AWS.DynamoDB.DocumentClient();
const TABLE_NAME = process.env.DYNAMODB_TABLE_NAME || 'filamenteka-filaments';
async function clearTable() {
console.log(`Clearing all items from ${TABLE_NAME}...`);
try {
// First, scan to get all items
const scanParams = {
TableName: TABLE_NAME,
ProjectionExpression: 'id'
};
const items = [];
let lastEvaluatedKey = null;
do {
if (lastEvaluatedKey) {
scanParams.ExclusiveStartKey = lastEvaluatedKey;
}
const result = await dynamodb.scan(scanParams).promise();
items.push(...result.Items);
lastEvaluatedKey = result.LastEvaluatedKey;
} while (lastEvaluatedKey);
console.log(`Found ${items.length} items to delete`);
// Delete in batches of 25
const chunks = [];
for (let i = 0; i < items.length; i += 25) {
chunks.push(items.slice(i, i + 25));
}
for (const chunk of chunks) {
const params = {
RequestItems: {
[TABLE_NAME]: chunk.map(item => ({
DeleteRequest: { Key: { id: item.id } }
}))
}
};
await dynamodb.batchWrite(params).promise();
console.log(`Deleted ${chunk.length} items`);
}
console.log('Table cleared successfully!');
} catch (error) {
console.error('Error clearing table:', error);
process.exit(1);
}
}
// Run if called directly
if (require.main === module) {
clearTable();
}

View File

@@ -0,0 +1,179 @@
#!/usr/bin/env node
require('dotenv').config({ path: '.env.local' });
const AWS = require('aws-sdk');
const { v4: uuidv4 } = require('uuid');
const fs = require('fs');
const path = require('path');
// Configure AWS
AWS.config.update({
region: process.env.AWS_REGION || 'eu-central-1'
});
const dynamodb = new AWS.DynamoDB.DocumentClient();
const TABLE_NAME = process.env.DYNAMODB_TABLE_NAME || 'filamenteka-filaments';
async function clearTable() {
console.log(`Clearing all items from ${TABLE_NAME}...`);
try {
// First, scan to get all items
const scanParams = {
TableName: TABLE_NAME,
ProjectionExpression: 'id'
};
const items = [];
let lastEvaluatedKey = null;
do {
if (lastEvaluatedKey) {
scanParams.ExclusiveStartKey = lastEvaluatedKey;
}
const result = await dynamodb.scan(scanParams).promise();
items.push(...result.Items);
lastEvaluatedKey = result.LastEvaluatedKey;
} while (lastEvaluatedKey);
console.log(`Found ${items.length} items to delete`);
if (items.length === 0) {
console.log('Table is already empty');
return;
}
// Delete in batches of 25
const chunks = [];
for (let i = 0; i < items.length; i += 25) {
chunks.push(items.slice(i, i + 25));
}
for (const chunk of chunks) {
const params = {
RequestItems: {
[TABLE_NAME]: chunk.map(item => ({
DeleteRequest: { Key: { id: item.id } }
}))
}
};
await dynamodb.batchWrite(params).promise();
console.log(`Deleted ${chunk.length} items`);
}
console.log('Table cleared successfully!');
} catch (error) {
console.error('Error clearing table:', error);
throw error;
}
}
async function importData() {
console.log('Importing data from PDF...');
try {
// Read the PDF data
const pdfData = JSON.parse(
fs.readFileSync(path.join(__dirname, 'pdf-filaments.json'), 'utf8')
);
console.log(`Found ${pdfData.length} filaments to import`);
// Process each filament
const timestamp = new Date().toISOString();
const processedFilaments = pdfData.map(filament => {
// Determine status based on vakum and otvoreno fields
let status = 'new';
if (filament.otvoreno && filament.otvoreno.toLowerCase().includes('otvorena')) {
status = 'opened';
} else if (filament.refill && filament.refill.toLowerCase() === 'da') {
status = 'refill';
}
// Clean up finish field - if empty, default to "Basic"
const finish = filament.finish || 'Basic';
return {
id: uuidv4(),
...filament,
finish,
status,
createdAt: timestamp,
updatedAt: timestamp
};
});
// Import to DynamoDB in batches
const chunks = [];
for (let i = 0; i < processedFilaments.length; i += 25) {
chunks.push(processedFilaments.slice(i, i + 25));
}
let totalImported = 0;
for (const chunk of chunks) {
const params = {
RequestItems: {
[TABLE_NAME]: chunk.map(item => ({
PutRequest: { Item: item }
}))
}
};
await dynamodb.batchWrite(params).promise();
totalImported += chunk.length;
console.log(`Imported ${totalImported}/${processedFilaments.length} items`);
}
console.log('Import completed successfully!');
// Verify the import
const scanParams = {
TableName: TABLE_NAME,
Select: 'COUNT'
};
const result = await dynamodb.scan(scanParams).promise();
console.log(`\nVerification: ${result.Count} total items now in DynamoDB`);
// Show sample data
const sampleParams = {
TableName: TABLE_NAME,
Limit: 3
};
const sampleResult = await dynamodb.scan(sampleParams).promise();
console.log('\nSample imported data:');
sampleResult.Items.forEach(item => {
console.log(`- ${item.brand} ${item.tip} ${item.finish} - ${item.boja} (${item.status})`);
});
} catch (error) {
console.error('Error importing data:', error);
throw error;
}
}
async function main() {
try {
console.log('PDF Data Import Tool');
console.log('===================');
// Clear existing data
await clearTable();
// Import new data
await importData();
console.log('\n✅ Import completed successfully!');
} catch (error) {
console.error('\n❌ Import failed:', error);
process.exit(1);
}
}
// Run if called directly
if (require.main === module) {
main();
}

View File

@@ -0,0 +1,343 @@
#!/usr/bin/env node
require('dotenv').config({ path: '.env.local' });
const AWS = require('aws-sdk');
const { v4: uuidv4 } = require('uuid');
// Configure AWS
AWS.config.update({
region: process.env.AWS_REGION || 'eu-central-1'
});
const dynamodb = new AWS.DynamoDB.DocumentClient();
const TABLE_NAME = process.env.DYNAMODB_TABLE_NAME || 'filamenteka-filaments';
// Color mappings for common filament colors
const colorMappings = {
'Black': '#000000',
'White': '#FFFFFF',
'Red': '#FF0000',
'Blue': '#0000FF',
'Green': '#00FF00',
'Yellow': '#FFFF00',
'Orange': '#FFA500',
'Purple': '#800080',
'Gray': '#808080',
'Grey': '#808080',
'Silver': '#C0C0C0',
'Gold': '#FFD700',
'Brown': '#964B00',
'Pink': '#FFC0CB',
'Cyan': '#00FFFF',
'Magenta': '#FF00FF',
'Beige': '#F5F5DC',
'Transparent': '#FFFFFF00',
// Specific Bambu Lab colors
'Mistletoe Green': '#50C878',
'Indingo Purple': '#4B0082',
'Jade White': '#F0F8FF',
'Hot Pink': '#FF69B4',
'Cocoa Brown': '#D2691E',
'Cotton Candy Cloud': '#FFB6C1',
'Sunflower Yellow': '#FFDA03',
'Scarlet Red': '#FF2400',
'Mandarin Orange': '#FF8C00',
'Marine Blue': '#0066CC',
'Charcoal': '#36454F',
'Ivory White': '#FFFFF0',
'Ash Gray': '#B2BEB5',
'Cobalt Blue': '#0047AB',
'Turquoise': '#40E0D0',
'Nardo Gray': '#4A4A4A',
'Bright Green': '#66FF00',
'Glow Green': '#90EE90',
'Black Walnut': '#5C4033',
'Jeans Blue': '#5670A1',
'Forest Green': '#228B22',
'Lavender Purple': '#B57EDC'
};
function getColorHex(colorName) {
// Try exact match first
if (colorMappings[colorName]) {
return colorMappings[colorName];
}
// Try to find color in the name
for (const [key, value] of Object.entries(colorMappings)) {
if (colorName.toLowerCase().includes(key.toLowerCase())) {
return value;
}
}
return null;
}
function parseInventory(oldFilament) {
let total = 1;
let vacuum = 0;
let opened = 0;
// Parse kolicina (quantity)
if (oldFilament.kolicina) {
const qty = parseInt(oldFilament.kolicina);
if (!isNaN(qty)) {
total = qty;
}
}
// Parse vakum field
if (oldFilament.vakum) {
const vakumLower = oldFilament.vakum.toLowerCase();
if (vakumLower.includes('vakuum') || vakumLower.includes('vakum')) {
// Check for multiplier
const match = vakumLower.match(/x(\d+)/);
vacuum = match ? parseInt(match[1]) : 1;
}
}
// Parse otvoreno field
if (oldFilament.otvoreno) {
const otvorenoLower = oldFilament.otvoreno.toLowerCase();
if (otvorenoLower.includes('otvorena') || otvorenoLower.includes('otvoreno')) {
// Check for multiplier
const match = otvorenoLower.match(/(\d+)x/);
if (match) {
opened = parseInt(match[1]);
} else {
const match2 = otvorenoLower.match(/x(\d+)/);
opened = match2 ? parseInt(match2[1]) : 1;
}
}
}
// Calculate available
const available = vacuum + opened;
const inUse = Math.max(0, total - available);
return {
total: total || 1,
available: available,
inUse: inUse,
locations: {
vacuum: vacuum,
opened: opened,
printer: 0
}
};
}
function determineStorageCondition(oldFilament) {
if (oldFilament.vakum && oldFilament.vakum.toLowerCase().includes('vakuum')) {
return 'vacuum';
}
if (oldFilament.otvoreno && oldFilament.otvoreno.toLowerCase().includes('otvorena')) {
return 'opened';
}
return 'sealed';
}
function parseMaterial(oldFilament) {
const base = oldFilament.tip || 'PLA';
let modifier = null;
if (oldFilament.finish && oldFilament.finish !== 'Basic' && oldFilament.finish !== '') {
modifier = oldFilament.finish;
}
// Handle special PLA types
if (base === 'PLA' && modifier) {
// These are actually base materials, not modifiers
if (modifier === 'PETG' || modifier === 'ABS' || modifier === 'TPU') {
return { base: modifier, modifier: null };
}
}
return { base, modifier };
}
function generateSKU(brand, material, color) {
const brandCode = brand.substring(0, 3).toUpperCase();
const materialCode = material.base.substring(0, 3);
const colorCode = color.name.substring(0, 3).toUpperCase();
const random = Math.random().toString(36).substring(2, 5).toUpperCase();
return `${brandCode}-${materialCode}-${colorCode}-${random}`;
}
function migrateFilament(oldFilament) {
const material = parseMaterial(oldFilament);
const inventory = parseInventory(oldFilament);
const colorHex = getColorHex(oldFilament.boja);
const newFilament = {
// Keep existing fields
id: oldFilament.id || uuidv4(),
sku: generateSKU(oldFilament.brand, material, { name: oldFilament.boja }),
// Product info
brand: oldFilament.brand,
type: oldFilament.tip || 'PLA',
material: material,
color: {
name: oldFilament.boja || 'Unknown',
hex: colorHex
},
// Physical properties
weight: {
value: 1000, // Default to 1kg
unit: 'g'
},
diameter: 1.75, // Standard diameter
// Inventory
inventory: inventory,
// Pricing
pricing: {
purchasePrice: oldFilament.cena ? parseFloat(oldFilament.cena) : null,
currency: 'RSD',
supplier: null,
purchaseDate: null
},
// Condition
condition: {
isRefill: oldFilament.refill === 'Da',
openedDate: oldFilament.otvoreno ? new Date().toISOString() : null,
expiryDate: null,
storageCondition: determineStorageCondition(oldFilament),
humidity: null
},
// Metadata
tags: [],
notes: null,
images: [],
// Timestamps
createdAt: oldFilament.createdAt || new Date().toISOString(),
updatedAt: new Date().toISOString(),
lastUsed: null,
// Keep old structure temporarily for backwards compatibility
_legacy: {
tip: oldFilament.tip,
finish: oldFilament.finish,
boja: oldFilament.boja,
refill: oldFilament.refill,
vakum: oldFilament.vakum,
otvoreno: oldFilament.otvoreno,
kolicina: oldFilament.kolicina,
cena: oldFilament.cena,
status: oldFilament.status
}
};
// Add tags based on properties
if (material.modifier === 'Silk') newFilament.tags.push('silk');
if (material.modifier === 'Matte') newFilament.tags.push('matte');
if (material.modifier === 'CF') newFilament.tags.push('engineering', 'carbon-fiber');
if (material.modifier === 'Wood') newFilament.tags.push('specialty', 'wood-fill');
if (material.modifier === 'Glow') newFilament.tags.push('specialty', 'glow-in-dark');
if (material.base === 'PETG') newFilament.tags.push('engineering', 'chemical-resistant');
if (material.base === 'ABS') newFilament.tags.push('engineering', 'high-temp');
if (material.base === 'TPU') newFilament.tags.push('flexible', 'engineering');
if (newFilament.condition.isRefill) newFilament.tags.push('refill', 'eco-friendly');
return newFilament;
}
async function migrateData() {
console.log('Starting migration to new data structure...');
try {
// Scan all existing items
const scanParams = {
TableName: TABLE_NAME
};
const items = [];
let lastEvaluatedKey = null;
do {
if (lastEvaluatedKey) {
scanParams.ExclusiveStartKey = lastEvaluatedKey;
}
const result = await dynamodb.scan(scanParams).promise();
items.push(...result.Items);
lastEvaluatedKey = result.LastEvaluatedKey;
} while (lastEvaluatedKey);
console.log(`Found ${items.length} items to migrate`);
// Check if already migrated
if (items.length > 0 && items[0].material && items[0].inventory) {
console.log('Data appears to be already migrated!');
const confirm = process.argv.includes('--force');
if (!confirm) {
console.log('Use --force flag to force migration');
return;
}
}
// Migrate each item
const migratedItems = items.map(item => migrateFilament(item));
// Show sample
console.log('\nSample migrated data:');
console.log(JSON.stringify(migratedItems[0], null, 2));
// Update items in batches
const chunks = [];
for (let i = 0; i < migratedItems.length; i += 25) {
chunks.push(migratedItems.slice(i, i + 25));
}
console.log(`\nUpdating ${migratedItems.length} items in DynamoDB...`);
for (const chunk of chunks) {
const params = {
RequestItems: {
[TABLE_NAME]: chunk.map(item => ({
PutRequest: { Item: item }
}))
}
};
await dynamodb.batchWrite(params).promise();
console.log(`Updated ${chunk.length} items`);
}
console.log('\n✅ Migration completed successfully!');
// Show summary
const summary = {
totalItems: migratedItems.length,
brands: [...new Set(migratedItems.map(i => i.brand))],
materials: [...new Set(migratedItems.map(i => i.material.base))],
modifiers: [...new Set(migratedItems.map(i => i.material.modifier).filter(Boolean))],
storageConditions: [...new Set(migratedItems.map(i => i.condition.storageCondition))],
totalInventory: migratedItems.reduce((sum, i) => sum + i.inventory.total, 0),
availableInventory: migratedItems.reduce((sum, i) => sum + i.inventory.available, 0)
};
console.log('\nMigration Summary:');
console.log(JSON.stringify(summary, null, 2));
} catch (error) {
console.error('Migration failed:', error);
process.exit(1);
}
}
// Run migration
if (require.main === module) {
console.log('Data Structure Migration Tool');
console.log('============================');
console.log('This will migrate all filaments to the new structure');
console.log('Old data will be preserved in _legacy field\n');
migrateData();
}

View File

@@ -1,36 +0,0 @@
const fs = require('fs');
const path = require('path');
const { fetchFromConfluence } = require('../src/server/confluence.ts');
async function fetchData() {
console.log('Fetching data from Confluence...');
const env = {
CONFLUENCE_API_URL: process.env.CONFLUENCE_API_URL,
CONFLUENCE_TOKEN: process.env.CONFLUENCE_TOKEN,
CONFLUENCE_PAGE_ID: process.env.CONFLUENCE_PAGE_ID
};
try {
const data = await fetchFromConfluence(env);
// Create public directory if it doesn't exist
const publicDir = path.join(__dirname, '..', 'public');
if (!fs.existsSync(publicDir)) {
fs.mkdirSync(publicDir, { recursive: true });
}
// Write data to public directory
fs.writeFileSync(
path.join(publicDir, 'data.json'),
JSON.stringify(data, null, 2)
);
console.log(`✅ Fetched ${data.length} filaments`);
} catch (error) {
console.error('❌ Failed to fetch data:', error.message);
process.exit(1);
}
}
fetchData();

View File

@@ -1,194 +0,0 @@
#!/usr/bin/env node
require('dotenv').config({ path: '.env.local' });
const axios = require('axios');
const AWS = require('aws-sdk');
const { v4: uuidv4 } = require('uuid');
// Configure AWS
AWS.config.update({
region: process.env.AWS_REGION || 'eu-central-1'
});
const dynamodb = new AWS.DynamoDB.DocumentClient();
const TABLE_NAME = process.env.DYNAMODB_TABLE_NAME || 'filamenteka-filaments';
// Confluence configuration
const CONFLUENCE_API_URL = process.env.CONFLUENCE_API_URL;
const CONFLUENCE_TOKEN = process.env.CONFLUENCE_TOKEN;
const CONFLUENCE_PAGE_ID = process.env.CONFLUENCE_PAGE_ID;
async function fetchConfluenceData() {
try {
console.log('Fetching data from Confluence...');
const response = await axios.get(
`${CONFLUENCE_API_URL}/wiki/rest/api/content/${CONFLUENCE_PAGE_ID}?expand=body.storage`,
{
headers: {
'Authorization': `Basic ${Buffer.from(CONFLUENCE_TOKEN).toString('base64')}`,
'Accept': 'application/json'
}
}
);
const htmlContent = response.data.body.storage.value;
return parseConfluenceTable(htmlContent);
} catch (error) {
console.error('Error fetching from Confluence:', error.message);
throw error;
}
}
function parseConfluenceTable(html) {
// Simple HTML table parser - in production, use a proper HTML parser like cheerio
const rows = [];
const tableRegex = /<tr[^>]*>(.*?)<\/tr>/gs;
const cellRegex = /<t[dh][^>]*>(.*?)<\/t[dh]>/gs;
let match;
let isHeader = true;
while ((match = tableRegex.exec(html)) !== null) {
const rowHtml = match[1];
const cells = [];
let cellMatch;
while ((cellMatch = cellRegex.exec(rowHtml)) !== null) {
// Remove HTML tags from cell content
const cellContent = cellMatch[1]
.replace(/<[^>]*>/g, '')
.replace(/&nbsp;/g, ' ')
.replace(/&amp;/g, '&')
.replace(/&lt;/g, '<')
.replace(/&gt;/g, '>')
.trim();
cells.push(cellContent);
}
if (!isHeader && cells.length > 0) {
rows.push(cells);
}
isHeader = false;
}
// Map rows to filament objects
return rows.map(row => ({
brand: row[0] || '',
tip: row[1] || '',
finish: row[2] || '',
boja: row[3] || '',
refill: row[4] || '',
vakum: row[5] || '',
otvoreno: row[6] || '',
kolicina: row[7] || '',
cena: row[8] || ''
}));
}
async function migrateToLocalJSON() {
try {
console.log('Migrating to local JSON file for testing...');
// For now, use the mock data we created
const fs = require('fs');
const data = JSON.parse(fs.readFileSync('./public/data.json', 'utf8'));
const filaments = data.map(item => ({
id: uuidv4(),
...item,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString()
}));
console.log(`Found ${filaments.length} filaments to migrate`);
return filaments;
} catch (error) {
console.error('Error reading local data:', error);
throw error;
}
}
async function migrateToDynamoDB(filaments) {
console.log(`Migrating ${filaments.length} filaments to DynamoDB...`);
// Check if table exists
try {
const dynamo = new AWS.DynamoDB();
await dynamo.describeTable({ TableName: TABLE_NAME }).promise();
console.log(`Table ${TABLE_NAME} exists`);
} catch (error) {
if (error.code === 'ResourceNotFoundException') {
console.error(`Table ${TABLE_NAME} does not exist. Please run Terraform first.`);
process.exit(1);
}
throw error;
}
// Batch write items
const chunks = [];
for (let i = 0; i < filaments.length; i += 25) {
chunks.push(filaments.slice(i, i + 25));
}
for (const chunk of chunks) {
const params = {
RequestItems: {
[TABLE_NAME]: chunk.map(item => ({
PutRequest: { Item: item }
}))
}
};
try {
await dynamodb.batchWrite(params).promise();
console.log(`Migrated ${chunk.length} items`);
} catch (error) {
console.error('Error writing batch:', error);
throw error;
}
}
console.log('Migration completed successfully!');
}
async function main() {
try {
let filaments;
if (CONFLUENCE_API_URL && CONFLUENCE_TOKEN && CONFLUENCE_PAGE_ID) {
// Fetch from Confluence
const confluenceData = await fetchConfluenceData();
filaments = confluenceData.map(item => ({
id: uuidv4(),
...item,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString()
}));
} else {
console.log('Confluence credentials not found, using local data...');
filaments = await migrateToLocalJSON();
}
// Migrate to DynamoDB
await migrateToDynamoDB(filaments);
// Verify migration
const params = {
TableName: TABLE_NAME,
Select: 'COUNT'
};
const result = await dynamodb.scan(params).promise();
console.log(`\nVerification: ${result.Count} items in DynamoDB`);
} catch (error) {
console.error('Migration failed:', error);
process.exit(1);
}
}
// Run migration
if (require.main === module) {
main();
}

View File

@@ -1,241 +0,0 @@
#!/usr/bin/env node
require('dotenv').config({ path: '.env.local' });
const axios = require('axios');
const AWS = require('aws-sdk');
const { v4: uuidv4 } = require('uuid');
const cheerio = require('cheerio');
// Configure AWS
AWS.config.update({
region: process.env.AWS_REGION || 'eu-central-1'
});
const dynamodb = new AWS.DynamoDB.DocumentClient();
const TABLE_NAME = process.env.DYNAMODB_TABLE_NAME || 'filamenteka-filaments';
// Confluence configuration
const CONFLUENCE_API_URL = process.env.CONFLUENCE_API_URL;
const CONFLUENCE_TOKEN = process.env.CONFLUENCE_TOKEN;
const CONFLUENCE_PAGE_ID = process.env.CONFLUENCE_PAGE_ID;
async function fetchConfluenceData() {
try {
console.log('Fetching data from Confluence...');
const response = await axios.get(
`${CONFLUENCE_API_URL}/wiki/rest/api/content/${CONFLUENCE_PAGE_ID}?expand=body.storage`,
{
headers: {
'Authorization': `Basic ${Buffer.from(CONFLUENCE_TOKEN).toString('base64')}`,
'Accept': 'application/json'
}
}
);
const htmlContent = response.data.body.storage.value;
return parseConfluenceTable(htmlContent);
} catch (error) {
console.error('Error fetching from Confluence:', error.message);
throw error;
}
}
function parseConfluenceTable(html) {
const $ = cheerio.load(html);
const filaments = [];
// Find the table and iterate through rows
$('table').find('tr').each((index, row) => {
// Skip header row
if (index === 0) return;
const cells = $(row).find('td');
if (cells.length >= 9) {
const filament = {
brand: $(cells[0]).text().trim(),
tip: $(cells[1]).text().trim(),
finish: $(cells[2]).text().trim(),
boja: $(cells[3]).text().trim(),
refill: $(cells[4]).text().trim(),
vakum: $(cells[5]).text().trim(),
otvoreno: $(cells[6]).text().trim(),
kolicina: $(cells[7]).text().trim(),
cena: $(cells[8]).text().trim()
};
// Only add if row has valid data
if (filament.brand || filament.boja) {
filaments.push(filament);
}
}
});
return filaments;
}
async function clearDynamoTable() {
console.log('Clearing existing data from DynamoDB...');
// Scan all items
const scanParams = {
TableName: TABLE_NAME,
ProjectionExpression: 'id'
};
try {
const scanResult = await dynamodb.scan(scanParams).promise();
if (scanResult.Items.length === 0) {
console.log('Table is already empty');
return;
}
// Delete in batches
const deleteRequests = scanResult.Items.map(item => ({
DeleteRequest: { Key: { id: item.id } }
}));
// DynamoDB batchWrite supports max 25 items
for (let i = 0; i < deleteRequests.length; i += 25) {
const batch = deleteRequests.slice(i, i + 25);
const params = {
RequestItems: {
[TABLE_NAME]: batch
}
};
await dynamodb.batchWrite(params).promise();
console.log(`Deleted ${batch.length} items`);
}
console.log('Table cleared successfully');
} catch (error) {
console.error('Error clearing table:', error);
throw error;
}
}
async function migrateToDynamoDB(filaments) {
console.log(`Migrating ${filaments.length} filaments to DynamoDB...`);
// Check if table exists
try {
const dynamo = new AWS.DynamoDB();
await dynamo.describeTable({ TableName: TABLE_NAME }).promise();
console.log(`Table ${TABLE_NAME} exists`);
} catch (error) {
if (error.code === 'ResourceNotFoundException') {
console.error(`Table ${TABLE_NAME} does not exist. Please run Terraform first.`);
process.exit(1);
}
throw error;
}
// Add IDs and timestamps
const itemsToInsert = filaments.map(item => ({
id: uuidv4(),
...item,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString()
}));
// Batch write items (max 25 per batch)
const chunks = [];
for (let i = 0; i < itemsToInsert.length; i += 25) {
chunks.push(itemsToInsert.slice(i, i + 25));
}
let totalMigrated = 0;
for (const chunk of chunks) {
const params = {
RequestItems: {
[TABLE_NAME]: chunk.map(item => ({
PutRequest: { Item: item }
}))
}
};
try {
await dynamodb.batchWrite(params).promise();
totalMigrated += chunk.length;
console.log(`Migrated ${totalMigrated}/${itemsToInsert.length} items`);
} catch (error) {
console.error('Error writing batch:', error);
throw error;
}
}
console.log('Migration completed successfully!');
return totalMigrated;
}
async function main() {
try {
let filaments;
// Check for --clear flag
const shouldClear = process.argv.includes('--clear');
if (shouldClear) {
await clearDynamoTable();
}
if (CONFLUENCE_API_URL && CONFLUENCE_TOKEN && CONFLUENCE_PAGE_ID) {
// Fetch from Confluence
console.log('Using Confluence as data source');
filaments = await fetchConfluenceData();
} else {
console.log('Confluence credentials not found, using local mock data...');
const fs = require('fs');
const data = JSON.parse(fs.readFileSync('../public/data.json', 'utf8'));
filaments = data;
}
console.log(`Found ${filaments.length} filaments to migrate`);
// Show sample data
if (filaments.length > 0) {
console.log('\nSample data:');
console.log(JSON.stringify(filaments[0], null, 2));
}
// Migrate to DynamoDB
const migrated = await migrateToDynamoDB(filaments);
// Verify migration
const params = {
TableName: TABLE_NAME,
Select: 'COUNT'
};
const result = await dynamodb.scan(params).promise();
console.log(`\nVerification: ${result.Count} total items now in DynamoDB`);
// Show sample from DynamoDB
const sampleParams = {
TableName: TABLE_NAME,
Limit: 1
};
const sampleResult = await dynamodb.scan(sampleParams).promise();
if (sampleResult.Items.length > 0) {
console.log('\nSample from DynamoDB:');
console.log(JSON.stringify(sampleResult.Items[0], null, 2));
}
} catch (error) {
console.error('Migration failed:', error);
process.exit(1);
}
}
// Run migration
if (require.main === module) {
console.log('Confluence to DynamoDB Migration Tool');
console.log('=====================================');
console.log('Usage: node migrate-with-parser.js [--clear]');
console.log(' --clear: Clear existing data before migration\n');
main();
}