- Remove old Confluence variables - Add NEXT_PUBLIC_API_URL for API access 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
194 lines
5.1 KiB
JavaScript
194 lines
5.1 KiB
JavaScript
#!/usr/bin/env node
|
|
|
|
require('dotenv').config({ path: '.env.local' });
|
|
const axios = require('axios');
|
|
const AWS = require('aws-sdk');
|
|
const { v4: uuidv4 } = require('uuid');
|
|
|
|
// Configure AWS
|
|
AWS.config.update({
|
|
region: process.env.AWS_REGION || 'eu-central-1'
|
|
});
|
|
|
|
const dynamodb = new AWS.DynamoDB.DocumentClient();
|
|
const TABLE_NAME = process.env.DYNAMODB_TABLE_NAME || 'filamenteka-filaments';
|
|
|
|
// Confluence configuration
|
|
const CONFLUENCE_API_URL = process.env.CONFLUENCE_API_URL;
|
|
const CONFLUENCE_TOKEN = process.env.CONFLUENCE_TOKEN;
|
|
const CONFLUENCE_PAGE_ID = process.env.CONFLUENCE_PAGE_ID;
|
|
|
|
async function fetchConfluenceData() {
|
|
try {
|
|
console.log('Fetching data from Confluence...');
|
|
|
|
const response = await axios.get(
|
|
`${CONFLUENCE_API_URL}/wiki/rest/api/content/${CONFLUENCE_PAGE_ID}?expand=body.storage`,
|
|
{
|
|
headers: {
|
|
'Authorization': `Basic ${Buffer.from(CONFLUENCE_TOKEN).toString('base64')}`,
|
|
'Accept': 'application/json'
|
|
}
|
|
}
|
|
);
|
|
|
|
const htmlContent = response.data.body.storage.value;
|
|
return parseConfluenceTable(htmlContent);
|
|
} catch (error) {
|
|
console.error('Error fetching from Confluence:', error.message);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
function parseConfluenceTable(html) {
|
|
// Simple HTML table parser - in production, use a proper HTML parser like cheerio
|
|
const rows = [];
|
|
const tableRegex = /<tr[^>]*>(.*?)<\/tr>/gs;
|
|
const cellRegex = /<t[dh][^>]*>(.*?)<\/t[dh]>/gs;
|
|
|
|
let match;
|
|
let isHeader = true;
|
|
|
|
while ((match = tableRegex.exec(html)) !== null) {
|
|
const rowHtml = match[1];
|
|
const cells = [];
|
|
let cellMatch;
|
|
|
|
while ((cellMatch = cellRegex.exec(rowHtml)) !== null) {
|
|
// Remove HTML tags from cell content
|
|
const cellContent = cellMatch[1]
|
|
.replace(/<[^>]*>/g, '')
|
|
.replace(/ /g, ' ')
|
|
.replace(/&/g, '&')
|
|
.replace(/</g, '<')
|
|
.replace(/>/g, '>')
|
|
.trim();
|
|
cells.push(cellContent);
|
|
}
|
|
|
|
if (!isHeader && cells.length > 0) {
|
|
rows.push(cells);
|
|
}
|
|
isHeader = false;
|
|
}
|
|
|
|
// Map rows to filament objects
|
|
return rows.map(row => ({
|
|
brand: row[0] || '',
|
|
tip: row[1] || '',
|
|
finish: row[2] || '',
|
|
boja: row[3] || '',
|
|
refill: row[4] || '',
|
|
vakum: row[5] || '',
|
|
otvoreno: row[6] || '',
|
|
kolicina: row[7] || '',
|
|
cena: row[8] || ''
|
|
}));
|
|
}
|
|
|
|
async function migrateToLocalJSON() {
|
|
try {
|
|
console.log('Migrating to local JSON file for testing...');
|
|
|
|
// For now, use the mock data we created
|
|
const fs = require('fs');
|
|
const data = JSON.parse(fs.readFileSync('./public/data.json', 'utf8'));
|
|
|
|
const filaments = data.map(item => ({
|
|
id: uuidv4(),
|
|
...item,
|
|
createdAt: new Date().toISOString(),
|
|
updatedAt: new Date().toISOString()
|
|
}));
|
|
|
|
console.log(`Found ${filaments.length} filaments to migrate`);
|
|
return filaments;
|
|
} catch (error) {
|
|
console.error('Error reading local data:', error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
async function migrateToDynamoDB(filaments) {
|
|
console.log(`Migrating ${filaments.length} filaments to DynamoDB...`);
|
|
|
|
// Check if table exists
|
|
try {
|
|
const dynamo = new AWS.DynamoDB();
|
|
await dynamo.describeTable({ TableName: TABLE_NAME }).promise();
|
|
console.log(`Table ${TABLE_NAME} exists`);
|
|
} catch (error) {
|
|
if (error.code === 'ResourceNotFoundException') {
|
|
console.error(`Table ${TABLE_NAME} does not exist. Please run Terraform first.`);
|
|
process.exit(1);
|
|
}
|
|
throw error;
|
|
}
|
|
|
|
// Batch write items
|
|
const chunks = [];
|
|
for (let i = 0; i < filaments.length; i += 25) {
|
|
chunks.push(filaments.slice(i, i + 25));
|
|
}
|
|
|
|
for (const chunk of chunks) {
|
|
const params = {
|
|
RequestItems: {
|
|
[TABLE_NAME]: chunk.map(item => ({
|
|
PutRequest: { Item: item }
|
|
}))
|
|
}
|
|
};
|
|
|
|
try {
|
|
await dynamodb.batchWrite(params).promise();
|
|
console.log(`Migrated ${chunk.length} items`);
|
|
} catch (error) {
|
|
console.error('Error writing batch:', error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
console.log('Migration completed successfully!');
|
|
}
|
|
|
|
async function main() {
|
|
try {
|
|
let filaments;
|
|
|
|
if (CONFLUENCE_API_URL && CONFLUENCE_TOKEN && CONFLUENCE_PAGE_ID) {
|
|
// Fetch from Confluence
|
|
const confluenceData = await fetchConfluenceData();
|
|
filaments = confluenceData.map(item => ({
|
|
id: uuidv4(),
|
|
...item,
|
|
createdAt: new Date().toISOString(),
|
|
updatedAt: new Date().toISOString()
|
|
}));
|
|
} else {
|
|
console.log('Confluence credentials not found, using local data...');
|
|
filaments = await migrateToLocalJSON();
|
|
}
|
|
|
|
// Migrate to DynamoDB
|
|
await migrateToDynamoDB(filaments);
|
|
|
|
// Verify migration
|
|
const params = {
|
|
TableName: TABLE_NAME,
|
|
Select: 'COUNT'
|
|
};
|
|
|
|
const result = await dynamodb.scan(params).promise();
|
|
console.log(`\nVerification: ${result.Count} items in DynamoDB`);
|
|
|
|
} catch (error) {
|
|
console.error('Migration failed:', error);
|
|
process.exit(1);
|
|
}
|
|
}
|
|
|
|
// Run migration
|
|
if (require.main === module) {
|
|
main();
|
|
} |