Fix production environment variables
- Remove old Confluence variables - Add NEXT_PUBLIC_API_URL for API access 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
84
scripts/README.md
Normal file
84
scripts/README.md
Normal file
@@ -0,0 +1,84 @@
|
||||
# Data Migration Scripts
|
||||
|
||||
This directory contains scripts for migrating filament data from Confluence to DynamoDB.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. AWS credentials configured (either via AWS CLI or environment variables)
|
||||
2. DynamoDB table created via Terraform
|
||||
3. Confluence API credentials (if migrating from Confluence)
|
||||
|
||||
## Setup
|
||||
|
||||
```bash
|
||||
cd scripts
|
||||
npm install
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Create a `.env.local` file in the project root with:
|
||||
|
||||
```env
|
||||
# AWS Configuration
|
||||
AWS_REGION=eu-central-1
|
||||
DYNAMODB_TABLE_NAME=filamenteka-filaments
|
||||
|
||||
# Confluence Configuration (optional)
|
||||
CONFLUENCE_API_URL=https://your-domain.atlassian.net
|
||||
CONFLUENCE_TOKEN=your-email:your-api-token
|
||||
CONFLUENCE_PAGE_ID=your-page-id
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Migrate from local data (data.json)
|
||||
|
||||
```bash
|
||||
npm run migrate
|
||||
```
|
||||
|
||||
### Clear existing data and migrate
|
||||
|
||||
```bash
|
||||
npm run migrate:clear
|
||||
```
|
||||
|
||||
### Manual execution
|
||||
|
||||
```bash
|
||||
# Migrate without clearing
|
||||
node migrate-with-parser.js
|
||||
|
||||
# Clear existing data first
|
||||
node migrate-with-parser.js --clear
|
||||
```
|
||||
|
||||
## What the script does
|
||||
|
||||
1. **Checks for Confluence credentials**
|
||||
- If found: Fetches data from Confluence page
|
||||
- If not found: Uses local `public/data.json` file
|
||||
|
||||
2. **Parses the data**
|
||||
- Extracts filament information from HTML table (Confluence)
|
||||
- Or reads JSON directly (local file)
|
||||
|
||||
3. **Prepares data for DynamoDB**
|
||||
- Generates unique IDs for each filament
|
||||
- Adds timestamps (createdAt, updatedAt)
|
||||
|
||||
4. **Writes to DynamoDB**
|
||||
- Writes in batches of 25 items (DynamoDB limit)
|
||||
- Shows progress during migration
|
||||
|
||||
5. **Verifies the migration**
|
||||
- Counts total items in DynamoDB
|
||||
- Shows a sample item for verification
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
- **Table not found**: Make sure you've run `terraform apply` first
|
||||
- **Access denied**: Check your AWS credentials and permissions
|
||||
- **Confluence errors**: Verify your API token and page ID
|
||||
- **Empty migration**: Check that the Confluence page has a table with the expected format
|
||||
13
scripts/generate-password-hash.js
Normal file
13
scripts/generate-password-hash.js
Normal file
@@ -0,0 +1,13 @@
|
||||
const bcrypt = require('bcryptjs');
|
||||
|
||||
const password = process.argv[2];
|
||||
|
||||
if (!password) {
|
||||
console.error('Usage: node generate-password-hash.js <password>');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const hash = bcrypt.hashSync(password, 10);
|
||||
console.log('Password hash:', hash);
|
||||
console.log('\nAdd this to your terraform.tfvars:');
|
||||
console.log(`admin_password_hash = "${hash}"`);
|
||||
194
scripts/migrate-confluence-to-dynamo.js
Normal file
194
scripts/migrate-confluence-to-dynamo.js
Normal file
@@ -0,0 +1,194 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
require('dotenv').config({ path: '.env.local' });
|
||||
const axios = require('axios');
|
||||
const AWS = require('aws-sdk');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
|
||||
// Configure AWS
|
||||
AWS.config.update({
|
||||
region: process.env.AWS_REGION || 'eu-central-1'
|
||||
});
|
||||
|
||||
const dynamodb = new AWS.DynamoDB.DocumentClient();
|
||||
const TABLE_NAME = process.env.DYNAMODB_TABLE_NAME || 'filamenteka-filaments';
|
||||
|
||||
// Confluence configuration
|
||||
const CONFLUENCE_API_URL = process.env.CONFLUENCE_API_URL;
|
||||
const CONFLUENCE_TOKEN = process.env.CONFLUENCE_TOKEN;
|
||||
const CONFLUENCE_PAGE_ID = process.env.CONFLUENCE_PAGE_ID;
|
||||
|
||||
async function fetchConfluenceData() {
|
||||
try {
|
||||
console.log('Fetching data from Confluence...');
|
||||
|
||||
const response = await axios.get(
|
||||
`${CONFLUENCE_API_URL}/wiki/rest/api/content/${CONFLUENCE_PAGE_ID}?expand=body.storage`,
|
||||
{
|
||||
headers: {
|
||||
'Authorization': `Basic ${Buffer.from(CONFLUENCE_TOKEN).toString('base64')}`,
|
||||
'Accept': 'application/json'
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const htmlContent = response.data.body.storage.value;
|
||||
return parseConfluenceTable(htmlContent);
|
||||
} catch (error) {
|
||||
console.error('Error fetching from Confluence:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function parseConfluenceTable(html) {
|
||||
// Simple HTML table parser - in production, use a proper HTML parser like cheerio
|
||||
const rows = [];
|
||||
const tableRegex = /<tr[^>]*>(.*?)<\/tr>/gs;
|
||||
const cellRegex = /<t[dh][^>]*>(.*?)<\/t[dh]>/gs;
|
||||
|
||||
let match;
|
||||
let isHeader = true;
|
||||
|
||||
while ((match = tableRegex.exec(html)) !== null) {
|
||||
const rowHtml = match[1];
|
||||
const cells = [];
|
||||
let cellMatch;
|
||||
|
||||
while ((cellMatch = cellRegex.exec(rowHtml)) !== null) {
|
||||
// Remove HTML tags from cell content
|
||||
const cellContent = cellMatch[1]
|
||||
.replace(/<[^>]*>/g, '')
|
||||
.replace(/ /g, ' ')
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.trim();
|
||||
cells.push(cellContent);
|
||||
}
|
||||
|
||||
if (!isHeader && cells.length > 0) {
|
||||
rows.push(cells);
|
||||
}
|
||||
isHeader = false;
|
||||
}
|
||||
|
||||
// Map rows to filament objects
|
||||
return rows.map(row => ({
|
||||
brand: row[0] || '',
|
||||
tip: row[1] || '',
|
||||
finish: row[2] || '',
|
||||
boja: row[3] || '',
|
||||
refill: row[4] || '',
|
||||
vakum: row[5] || '',
|
||||
otvoreno: row[6] || '',
|
||||
kolicina: row[7] || '',
|
||||
cena: row[8] || ''
|
||||
}));
|
||||
}
|
||||
|
||||
async function migrateToLocalJSON() {
|
||||
try {
|
||||
console.log('Migrating to local JSON file for testing...');
|
||||
|
||||
// For now, use the mock data we created
|
||||
const fs = require('fs');
|
||||
const data = JSON.parse(fs.readFileSync('./public/data.json', 'utf8'));
|
||||
|
||||
const filaments = data.map(item => ({
|
||||
id: uuidv4(),
|
||||
...item,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString()
|
||||
}));
|
||||
|
||||
console.log(`Found ${filaments.length} filaments to migrate`);
|
||||
return filaments;
|
||||
} catch (error) {
|
||||
console.error('Error reading local data:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateToDynamoDB(filaments) {
|
||||
console.log(`Migrating ${filaments.length} filaments to DynamoDB...`);
|
||||
|
||||
// Check if table exists
|
||||
try {
|
||||
const dynamo = new AWS.DynamoDB();
|
||||
await dynamo.describeTable({ TableName: TABLE_NAME }).promise();
|
||||
console.log(`Table ${TABLE_NAME} exists`);
|
||||
} catch (error) {
|
||||
if (error.code === 'ResourceNotFoundException') {
|
||||
console.error(`Table ${TABLE_NAME} does not exist. Please run Terraform first.`);
|
||||
process.exit(1);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Batch write items
|
||||
const chunks = [];
|
||||
for (let i = 0; i < filaments.length; i += 25) {
|
||||
chunks.push(filaments.slice(i, i + 25));
|
||||
}
|
||||
|
||||
for (const chunk of chunks) {
|
||||
const params = {
|
||||
RequestItems: {
|
||||
[TABLE_NAME]: chunk.map(item => ({
|
||||
PutRequest: { Item: item }
|
||||
}))
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
await dynamodb.batchWrite(params).promise();
|
||||
console.log(`Migrated ${chunk.length} items`);
|
||||
} catch (error) {
|
||||
console.error('Error writing batch:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Migration completed successfully!');
|
||||
}
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
let filaments;
|
||||
|
||||
if (CONFLUENCE_API_URL && CONFLUENCE_TOKEN && CONFLUENCE_PAGE_ID) {
|
||||
// Fetch from Confluence
|
||||
const confluenceData = await fetchConfluenceData();
|
||||
filaments = confluenceData.map(item => ({
|
||||
id: uuidv4(),
|
||||
...item,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString()
|
||||
}));
|
||||
} else {
|
||||
console.log('Confluence credentials not found, using local data...');
|
||||
filaments = await migrateToLocalJSON();
|
||||
}
|
||||
|
||||
// Migrate to DynamoDB
|
||||
await migrateToDynamoDB(filaments);
|
||||
|
||||
// Verify migration
|
||||
const params = {
|
||||
TableName: TABLE_NAME,
|
||||
Select: 'COUNT'
|
||||
};
|
||||
|
||||
const result = await dynamodb.scan(params).promise();
|
||||
console.log(`\nVerification: ${result.Count} items in DynamoDB`);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Migration failed:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run migration
|
||||
if (require.main === module) {
|
||||
main();
|
||||
}
|
||||
241
scripts/migrate-with-parser.js
Normal file
241
scripts/migrate-with-parser.js
Normal file
@@ -0,0 +1,241 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
require('dotenv').config({ path: '.env.local' });
|
||||
const axios = require('axios');
|
||||
const AWS = require('aws-sdk');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const cheerio = require('cheerio');
|
||||
|
||||
// Configure AWS
|
||||
AWS.config.update({
|
||||
region: process.env.AWS_REGION || 'eu-central-1'
|
||||
});
|
||||
|
||||
const dynamodb = new AWS.DynamoDB.DocumentClient();
|
||||
const TABLE_NAME = process.env.DYNAMODB_TABLE_NAME || 'filamenteka-filaments';
|
||||
|
||||
// Confluence configuration
|
||||
const CONFLUENCE_API_URL = process.env.CONFLUENCE_API_URL;
|
||||
const CONFLUENCE_TOKEN = process.env.CONFLUENCE_TOKEN;
|
||||
const CONFLUENCE_PAGE_ID = process.env.CONFLUENCE_PAGE_ID;
|
||||
|
||||
async function fetchConfluenceData() {
|
||||
try {
|
||||
console.log('Fetching data from Confluence...');
|
||||
|
||||
const response = await axios.get(
|
||||
`${CONFLUENCE_API_URL}/wiki/rest/api/content/${CONFLUENCE_PAGE_ID}?expand=body.storage`,
|
||||
{
|
||||
headers: {
|
||||
'Authorization': `Basic ${Buffer.from(CONFLUENCE_TOKEN).toString('base64')}`,
|
||||
'Accept': 'application/json'
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const htmlContent = response.data.body.storage.value;
|
||||
return parseConfluenceTable(htmlContent);
|
||||
} catch (error) {
|
||||
console.error('Error fetching from Confluence:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function parseConfluenceTable(html) {
|
||||
const $ = cheerio.load(html);
|
||||
const filaments = [];
|
||||
|
||||
// Find the table and iterate through rows
|
||||
$('table').find('tr').each((index, row) => {
|
||||
// Skip header row
|
||||
if (index === 0) return;
|
||||
|
||||
const cells = $(row).find('td');
|
||||
if (cells.length >= 9) {
|
||||
const filament = {
|
||||
brand: $(cells[0]).text().trim(),
|
||||
tip: $(cells[1]).text().trim(),
|
||||
finish: $(cells[2]).text().trim(),
|
||||
boja: $(cells[3]).text().trim(),
|
||||
refill: $(cells[4]).text().trim(),
|
||||
vakum: $(cells[5]).text().trim(),
|
||||
otvoreno: $(cells[6]).text().trim(),
|
||||
kolicina: $(cells[7]).text().trim(),
|
||||
cena: $(cells[8]).text().trim()
|
||||
};
|
||||
|
||||
// Only add if row has valid data
|
||||
if (filament.brand || filament.boja) {
|
||||
filaments.push(filament);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return filaments;
|
||||
}
|
||||
|
||||
async function clearDynamoTable() {
|
||||
console.log('Clearing existing data from DynamoDB...');
|
||||
|
||||
// Scan all items
|
||||
const scanParams = {
|
||||
TableName: TABLE_NAME,
|
||||
ProjectionExpression: 'id'
|
||||
};
|
||||
|
||||
try {
|
||||
const scanResult = await dynamodb.scan(scanParams).promise();
|
||||
|
||||
if (scanResult.Items.length === 0) {
|
||||
console.log('Table is already empty');
|
||||
return;
|
||||
}
|
||||
|
||||
// Delete in batches
|
||||
const deleteRequests = scanResult.Items.map(item => ({
|
||||
DeleteRequest: { Key: { id: item.id } }
|
||||
}));
|
||||
|
||||
// DynamoDB batchWrite supports max 25 items
|
||||
for (let i = 0; i < deleteRequests.length; i += 25) {
|
||||
const batch = deleteRequests.slice(i, i + 25);
|
||||
const params = {
|
||||
RequestItems: {
|
||||
[TABLE_NAME]: batch
|
||||
}
|
||||
};
|
||||
|
||||
await dynamodb.batchWrite(params).promise();
|
||||
console.log(`Deleted ${batch.length} items`);
|
||||
}
|
||||
|
||||
console.log('Table cleared successfully');
|
||||
} catch (error) {
|
||||
console.error('Error clearing table:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateToDynamoDB(filaments) {
|
||||
console.log(`Migrating ${filaments.length} filaments to DynamoDB...`);
|
||||
|
||||
// Check if table exists
|
||||
try {
|
||||
const dynamo = new AWS.DynamoDB();
|
||||
await dynamo.describeTable({ TableName: TABLE_NAME }).promise();
|
||||
console.log(`Table ${TABLE_NAME} exists`);
|
||||
} catch (error) {
|
||||
if (error.code === 'ResourceNotFoundException') {
|
||||
console.error(`Table ${TABLE_NAME} does not exist. Please run Terraform first.`);
|
||||
process.exit(1);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Add IDs and timestamps
|
||||
const itemsToInsert = filaments.map(item => ({
|
||||
id: uuidv4(),
|
||||
...item,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString()
|
||||
}));
|
||||
|
||||
// Batch write items (max 25 per batch)
|
||||
const chunks = [];
|
||||
for (let i = 0; i < itemsToInsert.length; i += 25) {
|
||||
chunks.push(itemsToInsert.slice(i, i + 25));
|
||||
}
|
||||
|
||||
let totalMigrated = 0;
|
||||
for (const chunk of chunks) {
|
||||
const params = {
|
||||
RequestItems: {
|
||||
[TABLE_NAME]: chunk.map(item => ({
|
||||
PutRequest: { Item: item }
|
||||
}))
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
await dynamodb.batchWrite(params).promise();
|
||||
totalMigrated += chunk.length;
|
||||
console.log(`Migrated ${totalMigrated}/${itemsToInsert.length} items`);
|
||||
} catch (error) {
|
||||
console.error('Error writing batch:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Migration completed successfully!');
|
||||
return totalMigrated;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
let filaments;
|
||||
|
||||
// Check for --clear flag
|
||||
const shouldClear = process.argv.includes('--clear');
|
||||
|
||||
if (shouldClear) {
|
||||
await clearDynamoTable();
|
||||
}
|
||||
|
||||
if (CONFLUENCE_API_URL && CONFLUENCE_TOKEN && CONFLUENCE_PAGE_ID) {
|
||||
// Fetch from Confluence
|
||||
console.log('Using Confluence as data source');
|
||||
filaments = await fetchConfluenceData();
|
||||
} else {
|
||||
console.log('Confluence credentials not found, using local mock data...');
|
||||
const fs = require('fs');
|
||||
const data = JSON.parse(fs.readFileSync('../public/data.json', 'utf8'));
|
||||
filaments = data;
|
||||
}
|
||||
|
||||
console.log(`Found ${filaments.length} filaments to migrate`);
|
||||
|
||||
// Show sample data
|
||||
if (filaments.length > 0) {
|
||||
console.log('\nSample data:');
|
||||
console.log(JSON.stringify(filaments[0], null, 2));
|
||||
}
|
||||
|
||||
// Migrate to DynamoDB
|
||||
const migrated = await migrateToDynamoDB(filaments);
|
||||
|
||||
// Verify migration
|
||||
const params = {
|
||||
TableName: TABLE_NAME,
|
||||
Select: 'COUNT'
|
||||
};
|
||||
|
||||
const result = await dynamodb.scan(params).promise();
|
||||
console.log(`\nVerification: ${result.Count} total items now in DynamoDB`);
|
||||
|
||||
// Show sample from DynamoDB
|
||||
const sampleParams = {
|
||||
TableName: TABLE_NAME,
|
||||
Limit: 1
|
||||
};
|
||||
|
||||
const sampleResult = await dynamodb.scan(sampleParams).promise();
|
||||
if (sampleResult.Items.length > 0) {
|
||||
console.log('\nSample from DynamoDB:');
|
||||
console.log(JSON.stringify(sampleResult.Items[0], null, 2));
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Migration failed:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run migration
|
||||
if (require.main === module) {
|
||||
console.log('Confluence to DynamoDB Migration Tool');
|
||||
console.log('=====================================');
|
||||
console.log('Usage: node migrate-with-parser.js [--clear]');
|
||||
console.log(' --clear: Clear existing data before migration\n');
|
||||
|
||||
main();
|
||||
}
|
||||
1019
scripts/package-lock.json
generated
Normal file
1019
scripts/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
16
scripts/package.json
Normal file
16
scripts/package.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"name": "filamenteka-scripts",
|
||||
"version": "1.0.0",
|
||||
"description": "Migration and utility scripts for Filamenteka",
|
||||
"scripts": {
|
||||
"migrate": "node migrate-with-parser.js",
|
||||
"migrate:clear": "node migrate-with-parser.js --clear"
|
||||
},
|
||||
"dependencies": {
|
||||
"aws-sdk": "^2.1472.0",
|
||||
"axios": "^1.6.2",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"dotenv": "^16.3.1",
|
||||
"uuid": "^9.0.1"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user