Update package dependencies and refactor API files to implement database interactions for analytics, authentication, collections, ratings, and scripts. Enhance user management and script handling with improved error handling and validation. Introduce database schema for structured data storage and retrieval.
This commit is contained in:
490
setup-database-v2.cjs
Normal file
490
setup-database-v2.cjs
Normal file
@ -0,0 +1,490 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const mysql = require('mysql2/promise');
|
||||
const { nanoid } = require('nanoid');
|
||||
|
||||
// Database configuration
|
||||
const dbConfig = {
|
||||
host: '192.168.1.146',
|
||||
port: 5444,
|
||||
user: 'root',
|
||||
password: 'j3bv5YmVN4CVwLmoMV6oVIMF62hhc8pBRaSWrIWvLIKIdZOAkNFbUa3ntKwCKABC',
|
||||
database: 'scriptshare',
|
||||
};
|
||||
|
||||
// SQL to create tables (individual queries)
|
||||
const createTableQueries = [
|
||||
`CREATE TABLE IF NOT EXISTS users (
|
||||
id VARCHAR(255) PRIMARY KEY,
|
||||
email VARCHAR(255) NOT NULL UNIQUE,
|
||||
username VARCHAR(100) NOT NULL UNIQUE,
|
||||
display_name VARCHAR(100) NOT NULL,
|
||||
avatar_url VARCHAR(500),
|
||||
bio TEXT,
|
||||
is_admin BOOLEAN DEFAULT FALSE,
|
||||
is_moderator BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX email_idx (email),
|
||||
INDEX username_idx (username)
|
||||
)`,
|
||||
|
||||
`CREATE TABLE IF NOT EXISTS scripts (
|
||||
id VARCHAR(255) PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
compatible_os JSON NOT NULL,
|
||||
categories JSON NOT NULL,
|
||||
tags JSON,
|
||||
git_repository_url VARCHAR(500),
|
||||
author_id VARCHAR(255) NOT NULL,
|
||||
author_name VARCHAR(100) NOT NULL,
|
||||
view_count INT DEFAULT 0,
|
||||
download_count INT DEFAULT 0,
|
||||
rating INT DEFAULT 0,
|
||||
rating_count INT DEFAULT 0,
|
||||
is_approved BOOLEAN DEFAULT FALSE,
|
||||
is_public BOOLEAN DEFAULT TRUE,
|
||||
version VARCHAR(20) DEFAULT '1.0.0',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX author_idx (author_id),
|
||||
INDEX approved_idx (is_approved),
|
||||
INDEX public_idx (is_public),
|
||||
INDEX created_at_idx (created_at)
|
||||
)`,
|
||||
|
||||
`CREATE TABLE IF NOT EXISTS script_versions (
|
||||
id VARCHAR(255) PRIMARY KEY,
|
||||
script_id VARCHAR(255) NOT NULL,
|
||||
version VARCHAR(20) NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
changelog TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_by VARCHAR(255) NOT NULL,
|
||||
INDEX script_idx (script_id),
|
||||
INDEX version_idx (version)
|
||||
)`,
|
||||
|
||||
`CREATE TABLE IF NOT EXISTS ratings (
|
||||
id VARCHAR(255) PRIMARY KEY,
|
||||
script_id VARCHAR(255) NOT NULL,
|
||||
user_id VARCHAR(255) NOT NULL,
|
||||
rating INT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX script_idx (script_id),
|
||||
INDEX user_idx (user_id),
|
||||
INDEX unique_rating (script_id, user_id)
|
||||
)`,
|
||||
|
||||
`CREATE TABLE IF NOT EXISTS script_collections (
|
||||
id VARCHAR(255) PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
description TEXT,
|
||||
author_id VARCHAR(255) NOT NULL,
|
||||
is_public BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX author_idx (author_id),
|
||||
INDEX public_idx (is_public)
|
||||
)`,
|
||||
|
||||
`CREATE TABLE IF NOT EXISTS collection_scripts (
|
||||
id VARCHAR(255) PRIMARY KEY,
|
||||
collection_id VARCHAR(255) NOT NULL,
|
||||
script_id VARCHAR(255) NOT NULL,
|
||||
added_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX collection_idx (collection_id),
|
||||
INDEX script_idx (script_id)
|
||||
)`,
|
||||
|
||||
`CREATE TABLE IF NOT EXISTS script_analytics (
|
||||
id VARCHAR(255) PRIMARY KEY,
|
||||
script_id VARCHAR(255) NOT NULL,
|
||||
event_type VARCHAR(50) NOT NULL,
|
||||
user_id VARCHAR(255),
|
||||
user_agent TEXT,
|
||||
ip_address VARCHAR(45),
|
||||
referrer VARCHAR(500),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX script_idx (script_id),
|
||||
INDEX event_idx (event_type),
|
||||
INDEX user_idx (user_id),
|
||||
INDEX created_at_idx (created_at)
|
||||
)`
|
||||
];
|
||||
|
||||
// Generate demo data
|
||||
const generateDemoData = () => {
|
||||
const users = [
|
||||
{
|
||||
id: nanoid(),
|
||||
email: 'admin@scriptshare.com',
|
||||
username: 'admin',
|
||||
display_name: 'Admin User',
|
||||
avatar_url: 'https://api.dicebear.com/7.x/avataaars/svg?seed=admin',
|
||||
bio: 'Platform administrator',
|
||||
is_admin: true,
|
||||
is_moderator: true,
|
||||
},
|
||||
{
|
||||
id: nanoid(),
|
||||
email: 'john.doe@example.com',
|
||||
username: 'johndoe',
|
||||
display_name: 'John Doe',
|
||||
avatar_url: 'https://api.dicebear.com/7.x/avataaars/svg?seed=john',
|
||||
bio: 'Full-stack developer and automation enthusiast',
|
||||
is_admin: false,
|
||||
is_moderator: false,
|
||||
},
|
||||
{
|
||||
id: nanoid(),
|
||||
email: 'jane.smith@example.com',
|
||||
username: 'janesmith',
|
||||
display_name: 'Jane Smith',
|
||||
avatar_url: 'https://api.dicebear.com/7.x/avataaars/svg?seed=jane',
|
||||
bio: 'DevOps engineer who loves scripting',
|
||||
is_admin: false,
|
||||
is_moderator: true,
|
||||
},
|
||||
];
|
||||
|
||||
const scripts = [
|
||||
{
|
||||
id: nanoid(),
|
||||
name: 'System Monitor Dashboard',
|
||||
description: 'A comprehensive system monitoring script that displays CPU, memory, disk usage, and network statistics in a beautiful dashboard format.',
|
||||
content: `#!/bin/bash
|
||||
|
||||
# System Monitor Dashboard
|
||||
# Displays real-time system statistics
|
||||
|
||||
echo "=== SYSTEM MONITOR DASHBOARD ==="
|
||||
echo "Generated: $(date)"
|
||||
echo "==============================="
|
||||
|
||||
# CPU Usage
|
||||
echo "📊 CPU Usage:"
|
||||
top -bn1 | grep "Cpu(s)" | awk '{print $2 $3}' | awk -F'%' '{print $1"%"}'
|
||||
|
||||
# Memory Usage
|
||||
echo "💾 Memory Usage:"
|
||||
free -h | awk 'NR==2{printf "Used: %s/%s (%.2f%%)", $3,$2,$3*100/$2 }'
|
||||
|
||||
# Disk Usage
|
||||
echo "💿 Disk Usage:"
|
||||
df -h | awk '$NF=="/"{printf "Used: %s/%s (%s)", $3,$2,$5}'
|
||||
|
||||
# Network Stats
|
||||
echo "🌐 Network Statistics:"
|
||||
cat /proc/net/dev | awk 'NR>2 {print $1 $2 $10}' | head -5
|
||||
|
||||
echo "==============================="`,
|
||||
compatible_os: ['linux', 'macos'],
|
||||
categories: ['monitoring', 'system'],
|
||||
tags: ['bash', 'system-info', 'dashboard'],
|
||||
git_repository_url: 'https://github.com/example/system-monitor',
|
||||
author_id: users[1].id,
|
||||
author_name: users[1].display_name,
|
||||
view_count: 245,
|
||||
download_count: 89,
|
||||
rating: 4.5,
|
||||
rating_count: 12,
|
||||
is_approved: true,
|
||||
is_public: true,
|
||||
version: '2.1.0',
|
||||
},
|
||||
{
|
||||
id: nanoid(),
|
||||
name: 'Automated Backup Script',
|
||||
description: 'Intelligent backup solution that automatically backs up specified directories to multiple destinations with compression and encryption.',
|
||||
content: `#!/bin/bash
|
||||
|
||||
# Automated Backup Script v1.5
|
||||
# Creates encrypted backups with rotation
|
||||
|
||||
BACKUP_DIR="/path/to/backup"
|
||||
SOURCE_DIRS=("/home/user/documents" "/home/user/projects")
|
||||
RETENTION_DAYS=30
|
||||
|
||||
echo "🔒 Starting automated backup..."
|
||||
|
||||
for dir in "\${SOURCE_DIRS[@]}"; do
|
||||
if [ -d "$dir" ]; then
|
||||
timestamp=$(date +"%Y%m%d_%H%M%S")
|
||||
backup_name="backup_$(basename $dir)_$timestamp.tar.gz"
|
||||
|
||||
echo "📦 Backing up $dir..."
|
||||
tar -czf "$BACKUP_DIR/$backup_name" "$dir"
|
||||
|
||||
# Encrypt backup
|
||||
gpg --cipher-algo AES256 --compress-algo 1 --symmetric \\
|
||||
--output "$BACKUP_DIR/$backup_name.gpg" "$BACKUP_DIR/$backup_name"
|
||||
|
||||
rm "$BACKUP_DIR/$backup_name"
|
||||
echo "✅ Backup completed: $backup_name.gpg"
|
||||
fi
|
||||
done
|
||||
|
||||
# Cleanup old backups
|
||||
find "$BACKUP_DIR" -name "*.gpg" -mtime +$RETENTION_DAYS -delete
|
||||
|
||||
echo "🎉 Backup process completed!"`,
|
||||
compatible_os: ['linux', 'macos'],
|
||||
categories: ['backup', 'automation'],
|
||||
tags: ['bash', 'backup', 'encryption', 'cron'],
|
||||
author_id: users[2].id,
|
||||
author_name: users[2].display_name,
|
||||
view_count: 156,
|
||||
download_count: 67,
|
||||
rating: 4.8,
|
||||
rating_count: 8,
|
||||
is_approved: true,
|
||||
is_public: true,
|
||||
version: '1.5.0',
|
||||
},
|
||||
{
|
||||
id: nanoid(),
|
||||
name: 'Development Environment Setup',
|
||||
description: 'One-click setup script for complete development environment including Node.js, Python, Docker, and essential tools.',
|
||||
content: `#!/bin/bash
|
||||
|
||||
# Development Environment Setup Script
|
||||
# Sets up a complete development environment
|
||||
|
||||
echo "🚀 Setting up development environment..."
|
||||
|
||||
# Update system
|
||||
echo "📦 Updating system packages..."
|
||||
sudo apt update && sudo apt upgrade -y
|
||||
|
||||
# Install Node.js via NVM
|
||||
echo "📗 Installing Node.js..."
|
||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
|
||||
source ~/.bashrc
|
||||
nvm install --lts
|
||||
nvm use --lts
|
||||
|
||||
# Install Python and pip
|
||||
echo "🐍 Installing Python..."
|
||||
sudo apt install python3 python3-pip -y
|
||||
|
||||
# Install Docker
|
||||
echo "🐳 Installing Docker..."
|
||||
curl -fsSL https://get.docker.com -o get-docker.sh
|
||||
sudo sh get-docker.sh
|
||||
sudo usermod -aG docker $USER
|
||||
|
||||
# Install VS Code
|
||||
echo "💻 Installing VS Code..."
|
||||
wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
|
||||
sudo install -o root -g root -m 644 packages.microsoft.gpg /etc/apt/trusted.gpg.d/
|
||||
sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/trusted.gpg.d/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
|
||||
sudo apt update && sudo apt install code -y
|
||||
|
||||
# Install essential tools
|
||||
echo "🔧 Installing essential tools..."
|
||||
sudo apt install git curl wget htop tree jq -y
|
||||
|
||||
echo "✅ Development environment setup complete!"
|
||||
echo "Please log out and back in for Docker permissions to take effect."`,
|
||||
compatible_os: ['linux'],
|
||||
categories: ['development', 'setup'],
|
||||
tags: ['bash', 'setup', 'nodejs', 'python', 'docker'],
|
||||
author_id: users[1].id,
|
||||
author_name: users[1].display_name,
|
||||
view_count: 89,
|
||||
download_count: 34,
|
||||
rating: 4.2,
|
||||
rating_count: 5,
|
||||
is_approved: true,
|
||||
is_public: true,
|
||||
version: '1.0.0',
|
||||
},
|
||||
{
|
||||
id: nanoid(),
|
||||
name: 'Log Analyzer Pro',
|
||||
description: 'Advanced log file analyzer that searches for patterns, generates reports, and alerts on suspicious activities.',
|
||||
content: `#!/bin/bash
|
||||
|
||||
# Log Analyzer Pro v2.0
|
||||
# Advanced log analysis and reporting tool
|
||||
|
||||
LOG_FILE="\${1:-/var/log/syslog}"
|
||||
OUTPUT_DIR="\${2:-./reports}"
|
||||
DATE_RANGE="\${3:-7}"
|
||||
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
|
||||
echo "🔍 Analyzing logs: $LOG_FILE"
|
||||
echo "📊 Generating report for last $DATE_RANGE days"
|
||||
|
||||
# Generate timestamp for report
|
||||
REPORT_TIME=$(date +"%Y%m%d_%H%M%S")
|
||||
REPORT_FILE="$OUTPUT_DIR/log_analysis_$REPORT_TIME.txt"
|
||||
|
||||
echo "=== LOG ANALYSIS REPORT ===" > "$REPORT_FILE"
|
||||
echo "Generated: $(date)" >> "$REPORT_FILE"
|
||||
echo "Log file: $LOG_FILE" >> "$REPORT_FILE"
|
||||
echo "=========================" >> "$REPORT_FILE"
|
||||
|
||||
# Error analysis
|
||||
echo "🚨 Error Analysis:" >> "$REPORT_FILE"
|
||||
grep -i "error\\|fail\\|critical" "$LOG_FILE" | tail -20 >> "$REPORT_FILE"
|
||||
|
||||
# Authentication attempts
|
||||
echo "🔐 Authentication Events:" >> "$REPORT_FILE"
|
||||
grep -i "auth\\|login\\|sudo" "$LOG_FILE" | tail -15 >> "$REPORT_FILE"
|
||||
|
||||
# Network connections
|
||||
echo "🌐 Network Activity:" >> "$REPORT_FILE"
|
||||
grep -i "connection\\|network\\|ssh" "$LOG_FILE" | tail -10 >> "$REPORT_FILE"
|
||||
|
||||
# Generate summary
|
||||
TOTAL_LINES=$(wc -l < "$LOG_FILE")
|
||||
ERROR_COUNT=$(grep -c -i "error" "$LOG_FILE")
|
||||
WARNING_COUNT=$(grep -c -i "warning" "$LOG_FILE")
|
||||
|
||||
echo "📈 Summary Statistics:" >> "$REPORT_FILE"
|
||||
echo "Total log entries: $TOTAL_LINES" >> "$REPORT_FILE"
|
||||
echo "Errors found: $ERROR_COUNT" >> "$REPORT_FILE"
|
||||
echo "Warnings found: $WARNING_COUNT" >> "$REPORT_FILE"
|
||||
|
||||
echo "✅ Analysis complete! Report saved: $REPORT_FILE"`,
|
||||
compatible_os: ['linux', 'macos'],
|
||||
categories: ['monitoring', 'security'],
|
||||
tags: ['bash', 'logs', 'analysis', 'security'],
|
||||
author_id: users[2].id,
|
||||
author_name: users[2].display_name,
|
||||
view_count: 123,
|
||||
download_count: 45,
|
||||
rating: 4.6,
|
||||
rating_count: 7,
|
||||
is_approved: true,
|
||||
is_public: true,
|
||||
version: '2.0.0',
|
||||
},
|
||||
];
|
||||
|
||||
return { users, scripts };
|
||||
};
|
||||
|
||||
async function setupDatabase() {
|
||||
let connection;
|
||||
|
||||
try {
|
||||
console.log('🔌 Connecting to MariaDB server...');
|
||||
// First connect without specifying a database
|
||||
const { database, ...dbConfigWithoutDb } = dbConfig;
|
||||
connection = await mysql.createConnection(dbConfigWithoutDb);
|
||||
|
||||
console.log('✅ Connected to MariaDB server successfully!');
|
||||
|
||||
// Create database if it doesn't exist
|
||||
console.log('🗄️ Creating scriptshare database...');
|
||||
await connection.execute('CREATE DATABASE IF NOT EXISTS scriptshare');
|
||||
await connection.execute('USE scriptshare');
|
||||
console.log('✅ Database scriptshare is ready!');
|
||||
|
||||
// Create tables one by one
|
||||
console.log('📊 Creating database tables...');
|
||||
for (let i = 0; i < createTableQueries.length; i++) {
|
||||
const query = createTableQueries[i];
|
||||
const tableName = query.match(/CREATE TABLE IF NOT EXISTS (\w+)/)[1];
|
||||
console.log(` Creating table: ${tableName}`);
|
||||
await connection.execute(query);
|
||||
}
|
||||
console.log('✅ All tables created successfully!');
|
||||
|
||||
// Generate and insert demo data
|
||||
console.log('📝 Generating demo data...');
|
||||
const { users, scripts } = generateDemoData();
|
||||
|
||||
// Insert users
|
||||
console.log('👥 Inserting demo users...');
|
||||
for (const user of users) {
|
||||
await connection.execute(
|
||||
'INSERT IGNORE INTO users (id, email, username, display_name, avatar_url, bio, is_admin, is_moderator) VALUES (?, ?, ?, ?, ?, ?, ?, ?)',
|
||||
[
|
||||
user.id,
|
||||
user.email,
|
||||
user.username,
|
||||
user.display_name,
|
||||
user.avatar_url || null,
|
||||
user.bio || null,
|
||||
user.is_admin,
|
||||
user.is_moderator
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
// Insert scripts
|
||||
console.log('📜 Inserting demo scripts...');
|
||||
for (const script of scripts) {
|
||||
await connection.execute(
|
||||
'INSERT IGNORE INTO scripts (id, name, description, content, compatible_os, categories, tags, git_repository_url, author_id, author_name, view_count, download_count, rating, rating_count, is_approved, is_public, version) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
||||
[
|
||||
script.id,
|
||||
script.name,
|
||||
script.description,
|
||||
script.content,
|
||||
JSON.stringify(script.compatible_os),
|
||||
JSON.stringify(script.categories),
|
||||
JSON.stringify(script.tags || []),
|
||||
script.git_repository_url || null,
|
||||
script.author_id,
|
||||
script.author_name,
|
||||
script.view_count,
|
||||
script.download_count,
|
||||
script.rating,
|
||||
script.rating_count,
|
||||
script.is_approved,
|
||||
script.is_public,
|
||||
script.version
|
||||
]
|
||||
);
|
||||
|
||||
// Insert script version
|
||||
await connection.execute(
|
||||
'INSERT IGNORE INTO script_versions (id, script_id, version, content, changelog, created_by) VALUES (?, ?, ?, ?, ?, ?)',
|
||||
[nanoid(), script.id, script.version, script.content, 'Initial version', script.author_id]
|
||||
);
|
||||
}
|
||||
|
||||
// Insert some demo ratings
|
||||
console.log('⭐ Inserting demo ratings...');
|
||||
const ratings = [
|
||||
{ script_id: scripts[0].id, user_id: users[0].id, rating: 5 },
|
||||
{ script_id: scripts[0].id, user_id: users[2].id, rating: 4 },
|
||||
{ script_id: scripts[1].id, user_id: users[0].id, rating: 5 },
|
||||
{ script_id: scripts[1].id, user_id: users[1].id, rating: 4 },
|
||||
{ script_id: scripts[2].id, user_id: users[2].id, rating: 4 },
|
||||
];
|
||||
|
||||
for (const rating of ratings) {
|
||||
await connection.execute(
|
||||
'INSERT IGNORE INTO ratings (id, script_id, user_id, rating) VALUES (?, ?, ?, ?)',
|
||||
[nanoid(), rating.script_id, rating.user_id, rating.rating]
|
||||
);
|
||||
}
|
||||
|
||||
console.log('🎉 Database setup completed successfully!');
|
||||
console.log('📊 Demo data inserted:');
|
||||
console.log(` - ${users.length} users`);
|
||||
console.log(` - ${scripts.length} scripts`);
|
||||
console.log(` - ${ratings.length} ratings`);
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Database setup failed:', error);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
if (connection) {
|
||||
await connection.end();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Run the setup
|
||||
setupDatabase();
|
Reference in New Issue
Block a user