Uploaded code
This commit is contained in:
25
scripts/clean.sh
Executable file
25
scripts/clean.sh
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Clean up temporary files and caches
|
||||
|
||||
echo "🧹 Cleaning up Sharey project..."
|
||||
|
||||
# Remove Python cache
|
||||
echo "🐍 Removing Python cache..."
|
||||
find . -name "__pycache__" -type d -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -name "*.pyc" -delete 2>/dev/null || true
|
||||
find . -name "*.pyo" -delete 2>/dev/null || true
|
||||
|
||||
# Remove temporary files
|
||||
echo "🗑️ Removing temporary files..."
|
||||
rm -f *.tmp *.temp debug_*.html test_*.ppm
|
||||
|
||||
# Remove old log files (keep recent ones)
|
||||
echo "📜 Cleaning old logs..."
|
||||
find logs/ -name "*.log" -mtime +7 -delete 2>/dev/null || true
|
||||
|
||||
# Remove backup files
|
||||
echo "💾 Removing backup files..."
|
||||
rm -f *.backup *.bak config.json.backup.*
|
||||
|
||||
echo "✅ Cleanup complete!"
|
||||
35
scripts/config.json
Normal file
35
scripts/config.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"b2": {
|
||||
"application_key_id": "your_key_id_here",
|
||||
"application_key": "your_application_key_here",
|
||||
"bucket_name": "your_bucket_name_here"
|
||||
},
|
||||
"flask": {
|
||||
"host": "127.0.0.1",
|
||||
"port": 8866,
|
||||
"debug": true
|
||||
},
|
||||
"upload": {
|
||||
"max_file_size_mb": 100,
|
||||
"allowed_extensions": [
|
||||
".jpg",
|
||||
".jpeg",
|
||||
".png",
|
||||
".gif",
|
||||
".pdf",
|
||||
".txt",
|
||||
".doc",
|
||||
".docx",
|
||||
".zip",
|
||||
".mp4",
|
||||
".mp3"
|
||||
]
|
||||
},
|
||||
"paste": {
|
||||
"max_length": 1000000
|
||||
},
|
||||
"security": {
|
||||
"rate_limit_enabled": false,
|
||||
"max_uploads_per_hour": 50
|
||||
}
|
||||
}
|
||||
450
scripts/migrate.py
Normal file
450
scripts/migrate.py
Normal file
@@ -0,0 +1,450 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Sharey Local-to-B2 Migration Script
|
||||
|
||||
This script migrates existing local files and pastes to Backblaze B2
|
||||
while preserving their original IDs and structure.
|
||||
|
||||
Sharey Naming Conventions:
|
||||
- Files: 6-char random ID + original extension (e.g., abc123.jpg)
|
||||
- Pastes: 6-char UUID prefix + .txt extension (e.g., def456.txt)
|
||||
- B2 Structure: files/{file_id} and pastes/{paste_id}.txt
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
try:
|
||||
from b2sdk.v2 import InMemoryAccountInfo, B2Api
|
||||
from config import config
|
||||
except ImportError as e:
|
||||
print(f"❌ Missing dependencies: {e}")
|
||||
print("💡 Make sure you're running this script in the same environment as your Sharey app")
|
||||
print("💡 Run: pip install -r requirements.txt")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
class ShareyMigrator:
|
||||
"""Handles migration of local Sharey files to B2"""
|
||||
|
||||
def __init__(self):
|
||||
self.b2_api = None
|
||||
self.bucket = None
|
||||
self.stats = {
|
||||
'files_migrated': 0,
|
||||
'pastes_migrated': 0,
|
||||
'files_skipped': 0,
|
||||
'pastes_skipped': 0,
|
||||
'errors': 0,
|
||||
'total_size': 0
|
||||
}
|
||||
self.migration_log = []
|
||||
|
||||
def initialize_b2(self) -> bool:
|
||||
"""Initialize B2 connection"""
|
||||
print("🔧 Initializing B2 connection...")
|
||||
|
||||
# Validate B2 configuration
|
||||
if not config.validate_b2_config():
|
||||
print("❌ Invalid B2 configuration. Please check your config.json")
|
||||
return False
|
||||
|
||||
try:
|
||||
b2_config = config.get_b2_config()
|
||||
print(f"📋 Target bucket: {b2_config['bucket_name']}")
|
||||
|
||||
info = InMemoryAccountInfo()
|
||||
self.b2_api = B2Api(info)
|
||||
self.b2_api.authorize_account("production", b2_config['key_id'], b2_config['key'])
|
||||
self.bucket = self.b2_api.get_bucket_by_name(b2_config['bucket_name'])
|
||||
print("✅ B2 connection established")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Failed to connect to B2: {e}")
|
||||
return False
|
||||
|
||||
def scan_local_directories(self, base_path: str = ".") -> Tuple[List[str], List[str]]:
|
||||
"""Scan for local uploads and pastes directories"""
|
||||
print(f"🔍 Scanning for local files in: {os.path.abspath(base_path)}")
|
||||
|
||||
uploads_dir = os.path.join(base_path, "uploads")
|
||||
pastes_dir = os.path.join(base_path, "pastes")
|
||||
|
||||
file_paths = []
|
||||
paste_paths = []
|
||||
|
||||
# Scan uploads directory
|
||||
if os.path.exists(uploads_dir):
|
||||
print(f"📁 Found uploads directory: {uploads_dir}")
|
||||
for root, dirs, files in os.walk(uploads_dir):
|
||||
for file in files:
|
||||
# Skip hidden files, metadata files, and any Sharey system files
|
||||
if (not file.startswith('.') and
|
||||
not file.endswith('.sharey-meta') and
|
||||
'.sharey-meta' not in file):
|
||||
file_paths.append(os.path.join(root, file))
|
||||
print(f" Found {len(file_paths)} files (skipped .sharey-meta files)")
|
||||
else:
|
||||
print(f"⚠️ No uploads directory found at: {uploads_dir}")
|
||||
|
||||
# Scan pastes directory
|
||||
if os.path.exists(pastes_dir):
|
||||
print(f"📝 Found pastes directory: {pastes_dir}")
|
||||
for root, dirs, files in os.walk(pastes_dir):
|
||||
for file in files:
|
||||
if not file.startswith('.'): # Skip hidden files
|
||||
paste_paths.append(os.path.join(root, file))
|
||||
print(f" Found {len(paste_paths)} pastes")
|
||||
else:
|
||||
print(f"⚠️ No pastes directory found at: {pastes_dir}")
|
||||
|
||||
return file_paths, paste_paths
|
||||
|
||||
def extract_id_from_path(self, file_path: str, base_dir: str) -> str:
|
||||
"""Extract the file ID from the file path"""
|
||||
# Get relative path from base directory
|
||||
rel_path = os.path.relpath(file_path, base_dir)
|
||||
|
||||
# Extract filename without extension for ID
|
||||
filename = os.path.basename(rel_path)
|
||||
file_id = os.path.splitext(filename)[0]
|
||||
|
||||
# Validate ID format (should be 6 characters for Sharey)
|
||||
if len(file_id) != 6:
|
||||
print(f"⚠️ Warning: {filename} has non-standard ID length ({len(file_id)} chars, expected 6)")
|
||||
|
||||
return file_id
|
||||
|
||||
def file_exists_in_b2(self, b2_path: str) -> bool:
|
||||
"""Check if a file already exists in B2"""
|
||||
try:
|
||||
# Try different methods depending on B2 SDK version
|
||||
if hasattr(self.bucket, 'get_file_info_by_name'):
|
||||
file_info = self.bucket.get_file_info_by_name(b2_path)
|
||||
return True
|
||||
elif hasattr(self.bucket, 'ls'):
|
||||
for file_version, _ in self.bucket.ls(b2_path, recursive=False):
|
||||
if file_version.file_name == b2_path:
|
||||
return True
|
||||
return False
|
||||
else:
|
||||
# Fallback - assume doesn't exist to avoid skipping
|
||||
return False
|
||||
except:
|
||||
return False
|
||||
|
||||
def migrate_file(self, local_path: str, uploads_dir: str, dry_run: bool = False) -> bool:
|
||||
"""Migrate a single file to B2"""
|
||||
try:
|
||||
# Extract file ID and determine B2 path
|
||||
file_id = self.extract_id_from_path(local_path, uploads_dir)
|
||||
file_extension = os.path.splitext(local_path)[1]
|
||||
b2_path = f"files/{file_id}{file_extension}"
|
||||
|
||||
# Check if file already exists in B2
|
||||
if self.file_exists_in_b2(b2_path):
|
||||
print(f"⏭️ Skipping {file_id} (already exists in B2)")
|
||||
self.stats['files_skipped'] += 1
|
||||
return True
|
||||
|
||||
# Get file info
|
||||
file_size = os.path.getsize(local_path)
|
||||
content_type = mimetypes.guess_type(local_path)[0] or 'application/octet-stream'
|
||||
|
||||
print(f"📤 Uploading file: {file_id}{file_extension} ({file_size:,} bytes)")
|
||||
|
||||
if dry_run:
|
||||
print(f" [DRY RUN] Would upload to: {b2_path}")
|
||||
self.stats['files_migrated'] += 1
|
||||
self.stats['total_size'] += file_size
|
||||
return True
|
||||
|
||||
# Upload to B2 - try different methods for different SDK versions
|
||||
with open(local_path, 'rb') as file_data:
|
||||
data = file_data.read()
|
||||
|
||||
# Try different upload methods
|
||||
try:
|
||||
# Method 1: upload_bytes (newer SDK)
|
||||
if hasattr(self.bucket, 'upload_bytes'):
|
||||
file_info = self.bucket.upload_bytes(
|
||||
data,
|
||||
b2_path,
|
||||
content_type=content_type
|
||||
)
|
||||
# Method 2: upload with file-like object (older SDK)
|
||||
elif hasattr(self.bucket, 'upload_file'):
|
||||
from io import BytesIO
|
||||
file_obj = BytesIO(data)
|
||||
file_info = self.bucket.upload_file(
|
||||
file_obj,
|
||||
b2_path,
|
||||
content_type=content_type
|
||||
)
|
||||
# Method 3: upload with upload source (alternative)
|
||||
elif hasattr(self.bucket, 'upload'):
|
||||
from io import BytesIO
|
||||
file_obj = BytesIO(data)
|
||||
file_info = self.bucket.upload(
|
||||
file_obj,
|
||||
b2_path,
|
||||
content_type=content_type
|
||||
)
|
||||
else:
|
||||
raise Exception("No compatible upload method found in B2 SDK")
|
||||
|
||||
except Exception as upload_error:
|
||||
raise Exception(f"Upload failed: {upload_error}")
|
||||
|
||||
self.stats['files_migrated'] += 1
|
||||
self.stats['total_size'] += file_size
|
||||
self.migration_log.append(f"FILE: {file_id}{file_extension} -> {b2_path}")
|
||||
print(f" ✅ Uploaded successfully")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ❌ Failed to upload {local_path}: {e}")
|
||||
self.stats['errors'] += 1
|
||||
self.migration_log.append(f"ERROR: {local_path} -> {e}")
|
||||
return False
|
||||
|
||||
def migrate_paste(self, local_path: str, pastes_dir: str, dry_run: bool = False) -> bool:
|
||||
"""Migrate a single paste to B2"""
|
||||
try:
|
||||
# Extract paste ID and determine B2 path
|
||||
paste_id = self.extract_id_from_path(local_path, pastes_dir)
|
||||
b2_path = f"pastes/{paste_id}.txt"
|
||||
|
||||
# Check if paste already exists in B2
|
||||
if self.file_exists_in_b2(b2_path):
|
||||
print(f"⏭️ Skipping paste {paste_id} (already exists in B2)")
|
||||
self.stats['pastes_skipped'] += 1
|
||||
return True
|
||||
|
||||
# Get paste info
|
||||
file_size = os.path.getsize(local_path)
|
||||
|
||||
print(f"📝 Uploading paste: {paste_id} ({file_size:,} bytes)")
|
||||
|
||||
if dry_run:
|
||||
print(f" [DRY RUN] Would upload to: {b2_path}")
|
||||
self.stats['pastes_migrated'] += 1
|
||||
self.stats['total_size'] += file_size
|
||||
return True
|
||||
|
||||
# Read and upload paste content
|
||||
with open(local_path, 'r', encoding='utf-8', errors='ignore') as file:
|
||||
content = file.read()
|
||||
|
||||
# Upload to B2 as UTF-8 text - try different methods
|
||||
data = content.encode('utf-8')
|
||||
|
||||
try:
|
||||
# Method 1: upload_bytes (newer SDK)
|
||||
if hasattr(self.bucket, 'upload_bytes'):
|
||||
self.bucket.upload_bytes(
|
||||
data,
|
||||
b2_path,
|
||||
content_type='text/plain; charset=utf-8'
|
||||
)
|
||||
# Method 2: upload with file-like object (older SDK)
|
||||
elif hasattr(self.bucket, 'upload_file'):
|
||||
from io import BytesIO
|
||||
file_obj = BytesIO(data)
|
||||
self.bucket.upload_file(
|
||||
file_obj,
|
||||
b2_path,
|
||||
content_type='text/plain; charset=utf-8'
|
||||
)
|
||||
# Method 3: upload with upload source (alternative)
|
||||
elif hasattr(self.bucket, 'upload'):
|
||||
from io import BytesIO
|
||||
file_obj = BytesIO(data)
|
||||
self.bucket.upload(
|
||||
file_obj,
|
||||
b2_path,
|
||||
content_type='text/plain; charset=utf-8'
|
||||
)
|
||||
else:
|
||||
raise Exception("No compatible upload method found in B2 SDK")
|
||||
|
||||
except Exception as upload_error:
|
||||
raise Exception(f"Upload failed: {upload_error}")
|
||||
|
||||
self.stats['pastes_migrated'] += 1
|
||||
self.stats['total_size'] += file_size
|
||||
self.migration_log.append(f"PASTE: {paste_id} -> {b2_path}")
|
||||
print(f" ✅ Uploaded successfully")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ❌ Failed to upload paste {local_path}: {e}")
|
||||
self.stats['errors'] += 1
|
||||
self.migration_log.append(f"ERROR: {local_path} -> {e}")
|
||||
return False
|
||||
|
||||
def migrate_all(self, base_path: str = ".", dry_run: bool = False, skip_files: bool = False, skip_pastes: bool = False):
|
||||
"""Migrate all local files and pastes to B2"""
|
||||
if dry_run:
|
||||
print("🧪 DRY RUN MODE - No files will actually be uploaded")
|
||||
|
||||
print(f"\n🚀 Starting migration from: {os.path.abspath(base_path)}")
|
||||
print("=" * 60)
|
||||
|
||||
# Scan for local files
|
||||
file_paths, paste_paths = self.scan_local_directories(base_path)
|
||||
|
||||
if not file_paths and not paste_paths:
|
||||
print("❌ No files or pastes found to migrate")
|
||||
return False
|
||||
|
||||
total_items = len(file_paths) + len(paste_paths)
|
||||
print(f"\n📊 Migration Plan:")
|
||||
print(f" Files to migrate: {len(file_paths)}")
|
||||
print(f" Pastes to migrate: {len(paste_paths)}")
|
||||
print(f" Total items: {total_items}")
|
||||
|
||||
if not dry_run:
|
||||
confirm = input(f"\n❓ Proceed with migration? (y/N): ").strip().lower()
|
||||
if confirm != 'y':
|
||||
print("Migration cancelled")
|
||||
return False
|
||||
|
||||
print(f"\n🔄 Starting migration...")
|
||||
print("-" * 40)
|
||||
|
||||
# Migrate files
|
||||
if file_paths and not skip_files:
|
||||
print(f"\n📁 Migrating {len(file_paths)} files...")
|
||||
uploads_dir = os.path.join(base_path, "uploads")
|
||||
|
||||
for i, file_path in enumerate(file_paths, 1):
|
||||
print(f"[{i}/{len(file_paths)}] ", end="")
|
||||
self.migrate_file(file_path, uploads_dir, dry_run)
|
||||
|
||||
# Migrate pastes
|
||||
if paste_paths and not skip_pastes:
|
||||
print(f"\n📝 Migrating {len(paste_paths)} pastes...")
|
||||
pastes_dir = os.path.join(base_path, "pastes")
|
||||
|
||||
for i, paste_path in enumerate(paste_paths, 1):
|
||||
print(f"[{i}/{len(paste_paths)}] ", end="")
|
||||
self.migrate_paste(paste_path, pastes_dir, dry_run)
|
||||
|
||||
self.print_summary(dry_run)
|
||||
self.save_migration_log()
|
||||
return True
|
||||
|
||||
def print_summary(self, dry_run: bool = False):
|
||||
"""Print migration summary"""
|
||||
print("\n" + "=" * 60)
|
||||
print("📊 MIGRATION SUMMARY")
|
||||
print("=" * 60)
|
||||
|
||||
if dry_run:
|
||||
print("🧪 DRY RUN RESULTS:")
|
||||
|
||||
print(f"✅ Files migrated: {self.stats['files_migrated']}")
|
||||
print(f"✅ Pastes migrated: {self.stats['pastes_migrated']}")
|
||||
print(f"⏭️ Files skipped: {self.stats['files_skipped']}")
|
||||
print(f"⏭️ Pastes skipped: {self.stats['pastes_skipped']}")
|
||||
print(f"❌ Errors: {self.stats['errors']}")
|
||||
print(f"📦 Total data: {self.stats['total_size']:,} bytes ({self.stats['total_size'] / 1024 / 1024:.2f} MB)")
|
||||
|
||||
success_rate = ((self.stats['files_migrated'] + self.stats['pastes_migrated']) /
|
||||
max(1, self.stats['files_migrated'] + self.stats['pastes_migrated'] + self.stats['errors'])) * 100
|
||||
print(f"📈 Success rate: {success_rate:.1f}%")
|
||||
|
||||
if not dry_run and (self.stats['files_migrated'] > 0 or self.stats['pastes_migrated'] > 0):
|
||||
print(f"\n🎉 Migration completed successfully!")
|
||||
print(f"💡 Your files are now accessible via your Sharey B2 URLs")
|
||||
|
||||
def save_migration_log(self):
|
||||
"""Save migration log to file"""
|
||||
if not self.migration_log:
|
||||
return
|
||||
|
||||
log_filename = f"migration_log_{datetime.now().strftime('%Y%m%d_%H%M%S')}.txt"
|
||||
|
||||
try:
|
||||
with open(log_filename, 'w') as f:
|
||||
f.write(f"Sharey B2 Migration Log\n")
|
||||
f.write(f"Generated: {datetime.now().isoformat()}\n")
|
||||
f.write(f"=" * 50 + "\n\n")
|
||||
|
||||
for entry in self.migration_log:
|
||||
f.write(f"{entry}\n")
|
||||
|
||||
f.write(f"\n" + "=" * 50 + "\n")
|
||||
f.write(f"SUMMARY:\n")
|
||||
f.write(f"Files migrated: {self.stats['files_migrated']}\n")
|
||||
f.write(f"Pastes migrated: {self.stats['pastes_migrated']}\n")
|
||||
f.write(f"Files skipped: {self.stats['files_skipped']}\n")
|
||||
f.write(f"Pastes skipped: {self.stats['pastes_skipped']}\n")
|
||||
f.write(f"Errors: {self.stats['errors']}\n")
|
||||
f.write(f"Total size: {self.stats['total_size']:,} bytes\n")
|
||||
|
||||
print(f"📄 Migration log saved to: {log_filename}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"⚠️ Failed to save migration log: {e}")
|
||||
|
||||
|
||||
def main():
|
||||
"""Main migration function"""
|
||||
print("🚀 Sharey Local-to-B2 Migration Tool")
|
||||
print("=" * 50)
|
||||
|
||||
# Parse command line arguments
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='Migrate local Sharey files to Backblaze B2')
|
||||
parser.add_argument('--path', '-p', default='.', help='Path to Sharey directory (default: current directory)')
|
||||
parser.add_argument('--dry-run', '-d', action='store_true', help='Perform a dry run without uploading')
|
||||
parser.add_argument('--skip-files', action='store_true', help='Skip file migration')
|
||||
parser.add_argument('--skip-pastes', action='store_true', help='Skip paste migration')
|
||||
parser.add_argument('--force', '-f', action='store_true', help='Skip confirmation prompt')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Initialize migrator
|
||||
migrator = ShareyMigrator()
|
||||
|
||||
# Initialize B2 connection
|
||||
if not migrator.initialize_b2():
|
||||
print("❌ Failed to initialize B2 connection")
|
||||
sys.exit(1)
|
||||
|
||||
# Run migration
|
||||
try:
|
||||
success = migrator.migrate_all(
|
||||
base_path=args.path,
|
||||
dry_run=args.dry_run,
|
||||
skip_files=args.skip_files,
|
||||
skip_pastes=args.skip_pastes
|
||||
)
|
||||
|
||||
if success:
|
||||
print(f"\n💡 Next steps:")
|
||||
print(f" 1. Test your Sharey app to ensure URLs work correctly")
|
||||
print(f" 2. Consider backing up your local files before deletion")
|
||||
print(f" 3. Update any hardcoded URLs to use the new B2 structure")
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print(f"\n⏹️ Migration cancelled by user")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f"\n❌ Migration failed: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
51
scripts/set_admin_password.py
Normal file
51
scripts/set_admin_password.py
Normal file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Simple script to set admin password for Sharey
|
||||
"""
|
||||
import hashlib
|
||||
import json
|
||||
|
||||
def set_admin_password(password):
|
||||
"""Set admin password in config.json"""
|
||||
try:
|
||||
# Load current config
|
||||
with open('config.json', 'r') as f:
|
||||
config = json.load(f)
|
||||
|
||||
# Hash the password
|
||||
password_hash = hashlib.sha256(password.encode()).hexdigest()
|
||||
|
||||
# Ensure admin section exists
|
||||
if 'admin' not in config:
|
||||
config['admin'] = {}
|
||||
|
||||
config['admin']['password_hash'] = password_hash
|
||||
config['admin']['session_timeout_minutes'] = config['admin'].get('session_timeout_minutes', 30)
|
||||
|
||||
# Save config
|
||||
with open('config.json', 'w') as f:
|
||||
json.dump(config, f, indent=2)
|
||||
|
||||
print("✅ Admin password set successfully!")
|
||||
print("💡 You can now access the admin panel at /admin")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error setting admin password: {e}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: python set_admin_password.py <password>")
|
||||
print("Example: python set_admin_password.py mySecurePassword123")
|
||||
sys.exit(1)
|
||||
|
||||
password = sys.argv[1]
|
||||
|
||||
if len(password) < 6:
|
||||
print("❌ Password must be at least 6 characters long")
|
||||
sys.exit(1)
|
||||
|
||||
set_admin_password(password)
|
||||
280
scripts/setup.py
Normal file
280
scripts/setup.py
Normal file
@@ -0,0 +1,280 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Sharey B2 Setup Script
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import shutil
|
||||
|
||||
def check_python():
|
||||
"""Check if we have a compatible Python version"""
|
||||
if sys.version_info < (3, 7):
|
||||
print("❌ Python 3.7+ is required. Current version:", sys.version)
|
||||
sys.exit(1)
|
||||
print(f"✅ Python {sys.version.split()[0]} detected")
|
||||
|
||||
def create_env_file():
|
||||
"""Create config.json file from template if it doesn't exist"""
|
||||
if not os.path.exists('config.json'):
|
||||
if os.path.exists('config.json.example'):
|
||||
shutil.copy('config.json.example', 'config.json')
|
||||
print("📄 Created config.json file from template")
|
||||
print("\nPlease edit config.json with your Backblaze B2 credentials:")
|
||||
print(" - b2.application_key_id: Your B2 application key ID")
|
||||
print(" - b2.application_key: Your B2 application key")
|
||||
print(" - b2.bucket_name: Your B2 bucket name")
|
||||
print("\nYou can get these credentials from your Backblaze account:")
|
||||
print(" 1. Go to https://secure.backblaze.com/app_keys.htm")
|
||||
print(" 2. Create a new application key or use an existing one")
|
||||
print(" 3. Create a bucket or use an existing one")
|
||||
else:
|
||||
print("❌ config.json.example not found. Creating basic template...")
|
||||
basic_config = {
|
||||
"b2": {
|
||||
"application_key_id": "your_key_id_here",
|
||||
"application_key": "your_application_key_here",
|
||||
"bucket_name": "your_bucket_name_here"
|
||||
},
|
||||
"flask": {
|
||||
"host": "127.0.0.1",
|
||||
"port": 8866,
|
||||
"debug": True
|
||||
},
|
||||
"upload": {
|
||||
"max_file_size_mb": 100,
|
||||
"allowed_extensions": [".jpg", ".jpeg", ".png", ".gif", ".pdf", ".txt", ".doc", ".docx", ".zip", ".mp4", ".mp3"]
|
||||
},
|
||||
"paste": {
|
||||
"max_length": 1000000
|
||||
},
|
||||
"security": {
|
||||
"rate_limit_enabled": False,
|
||||
"max_uploads_per_hour": 50
|
||||
}
|
||||
}
|
||||
|
||||
import json
|
||||
with open('config.json', 'w') as f:
|
||||
json.dump(basic_config, f, indent=2)
|
||||
print("📄 Created basic config.json template")
|
||||
else:
|
||||
print("📄 config.json file already exists. Please ensure it has the correct B2 credentials.")
|
||||
|
||||
# Also create .env for backwards compatibility if it doesn't exist
|
||||
if not os.path.exists('.env'):
|
||||
if os.path.exists('.env.example'):
|
||||
shutil.copy('.env.example', '.env')
|
||||
print("📄 Also created .env file for backwards compatibility")
|
||||
|
||||
def install_dependencies():
|
||||
"""Install Python dependencies"""
|
||||
print("\n📦 Installing Python dependencies...")
|
||||
|
||||
if not os.path.exists('requirements.txt'):
|
||||
print("❌ requirements.txt not found!")
|
||||
return False
|
||||
|
||||
# Check if we're in a virtual environment
|
||||
in_venv = (hasattr(sys, 'real_prefix') or
|
||||
(hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix))
|
||||
|
||||
if not in_venv:
|
||||
print("⚠️ Not in a virtual environment")
|
||||
print("Creating virtual environment...")
|
||||
try:
|
||||
subprocess.run([sys.executable, '-m', 'venv', 'venv'], check=True)
|
||||
print("✅ Virtual environment created")
|
||||
|
||||
# Determine the correct pip path
|
||||
if os.name == 'nt': # Windows
|
||||
pip_path = os.path.join('venv', 'Scripts', 'pip')
|
||||
python_path = os.path.join('venv', 'Scripts', 'python')
|
||||
else: # Unix-like
|
||||
pip_path = os.path.join('venv', 'bin', 'pip')
|
||||
python_path = os.path.join('venv', 'bin', 'python')
|
||||
|
||||
# Install dependencies in virtual environment
|
||||
subprocess.run([pip_path, 'install', '-r', 'requirements.txt'], check=True)
|
||||
print("✅ Dependencies installed in virtual environment")
|
||||
print(f"💡 To activate the virtual environment:")
|
||||
if os.name == 'nt':
|
||||
print(" venv\\Scripts\\activate")
|
||||
else:
|
||||
print(" source venv/bin/activate")
|
||||
print(f"💡 Then run the app with: {python_path} app.py")
|
||||
return True
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Failed to create virtual environment or install dependencies: {e}")
|
||||
return False
|
||||
else:
|
||||
print("✅ In virtual environment")
|
||||
try:
|
||||
subprocess.run([sys.executable, '-m', 'pip', 'install', '-r', 'requirements.txt'],
|
||||
check=True, capture_output=True, text=True)
|
||||
print("✅ Dependencies installed successfully")
|
||||
return True
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Failed to install dependencies: {e}")
|
||||
print("Error output:", e.stderr)
|
||||
return False
|
||||
|
||||
def test_imports():
|
||||
"""Test if required modules can be imported"""
|
||||
print("\n🧪 Testing imports...")
|
||||
|
||||
# Check if we're in a virtual environment or if venv was created
|
||||
in_venv = (hasattr(sys, 'real_prefix') or
|
||||
(hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix))
|
||||
|
||||
if not in_venv and os.path.exists('venv'):
|
||||
print("💡 Skipping import test - using virtual environment")
|
||||
print(" Imports will be tested when you activate the virtual environment")
|
||||
return True
|
||||
|
||||
required_modules = ['flask', 'b2sdk', 'dotenv']
|
||||
missing_modules = []
|
||||
|
||||
for module in required_modules:
|
||||
try:
|
||||
__import__(module)
|
||||
print(f" ✅ {module}")
|
||||
except ImportError:
|
||||
print(f" ❌ {module}")
|
||||
missing_modules.append(module)
|
||||
|
||||
if missing_modules:
|
||||
print(f"\n❌ Missing modules: {', '.join(missing_modules)}")
|
||||
if not in_venv:
|
||||
print("💡 This is expected if using a virtual environment")
|
||||
return True
|
||||
else:
|
||||
print("Try running: pip install -r requirements.txt")
|
||||
return False
|
||||
|
||||
print("✅ All required modules available")
|
||||
return True
|
||||
|
||||
def check_b2_config():
|
||||
"""Check if B2 configuration looks valid"""
|
||||
print("\n🔧 Checking B2 configuration...")
|
||||
|
||||
# Check config.json first
|
||||
if os.path.exists('config.json'):
|
||||
try:
|
||||
import json
|
||||
with open('config.json', 'r') as f:
|
||||
config_data = json.load(f)
|
||||
|
||||
b2_config = config_data.get('b2', {})
|
||||
required_keys = ['application_key_id', 'application_key', 'bucket_name']
|
||||
invalid_values = ['your_key_id_here', 'your_application_key_here', 'your_bucket_name_here', '']
|
||||
missing_keys = []
|
||||
|
||||
for key in required_keys:
|
||||
value = b2_config.get(key)
|
||||
if not value or value in invalid_values:
|
||||
missing_keys.append(f'b2.{key}')
|
||||
|
||||
if missing_keys:
|
||||
print(f"❌ Please configure these B2 settings in config.json: {', '.join(missing_keys)}")
|
||||
return False
|
||||
|
||||
print("✅ B2 configuration looks valid in config.json")
|
||||
return True
|
||||
|
||||
except (json.JSONDecodeError, KeyError) as e:
|
||||
print(f"❌ Error reading config.json: {e}")
|
||||
return False
|
||||
|
||||
# Fall back to .env file
|
||||
elif os.path.exists('.env'):
|
||||
b2_config = {}
|
||||
with open('.env', 'r') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if '=' in line and not line.startswith('#'):
|
||||
key, value = line.split('=', 1)
|
||||
b2_config[key] = value
|
||||
|
||||
required_keys = ['B2_APPLICATION_KEY_ID', 'B2_APPLICATION_KEY', 'B2_BUCKET_NAME']
|
||||
missing_keys = []
|
||||
|
||||
for key in required_keys:
|
||||
if key not in b2_config or b2_config[key] in ['', 'your_key_id_here', 'your_application_key_here', 'your_bucket_name_here']:
|
||||
missing_keys.append(key)
|
||||
|
||||
if missing_keys:
|
||||
print(f"❌ Please configure these B2 settings in .env: {', '.join(missing_keys)}")
|
||||
return False
|
||||
|
||||
print("✅ B2 configuration looks valid in .env")
|
||||
return True
|
||||
|
||||
else:
|
||||
print("❌ No configuration file found (config.json or .env)")
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Main setup function"""
|
||||
print("🚀 Setting up Sharey with Backblaze B2 Storage...\n")
|
||||
|
||||
# Check Python version
|
||||
check_python()
|
||||
|
||||
# Create .env file
|
||||
create_env_file()
|
||||
|
||||
# Install dependencies
|
||||
if not install_dependencies():
|
||||
print("\n❌ Setup failed during dependency installation")
|
||||
sys.exit(1)
|
||||
|
||||
# Test imports
|
||||
if not test_imports():
|
||||
print("\n❌ Setup failed: missing required modules")
|
||||
sys.exit(1)
|
||||
|
||||
# Check B2 configuration
|
||||
config_valid = check_b2_config()
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("🎉 Setup complete!")
|
||||
print("="*60)
|
||||
|
||||
# Check if we're in a virtual environment
|
||||
in_venv = (hasattr(sys, 'real_prefix') or
|
||||
(hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix))
|
||||
|
||||
if not in_venv and os.path.exists('venv'):
|
||||
print("\n💡 Virtual environment created!")
|
||||
print("To use Sharey:")
|
||||
if os.name == 'nt': # Windows
|
||||
print(" 1. Activate virtual environment: venv\\Scripts\\activate")
|
||||
print(" 2. Edit config.json with your B2 credentials")
|
||||
print(" 3. Test B2 connection: venv\\Scripts\\python test_b2.py")
|
||||
print(" 4. Run the app: venv\\Scripts\\python app.py")
|
||||
else: # Unix-like
|
||||
print(" 1. Activate virtual environment: source venv/bin/activate")
|
||||
print(" 2. Edit config.json with your B2 credentials")
|
||||
print(" 3. Test B2 connection: python test_b2.py")
|
||||
print(" 4. Run the app: python app.py")
|
||||
else:
|
||||
if not config_valid:
|
||||
print("\n⚠️ Next steps:")
|
||||
print(" 1. Edit config.json with your B2 credentials")
|
||||
print(" 2. Run: python test_b2.py (to test B2 connection)")
|
||||
print(" 3. Run: python app.py (to start the application)")
|
||||
else:
|
||||
print("\n✅ Next steps:")
|
||||
print(" 1. Run: python test_b2.py (to test B2 connection)")
|
||||
print(" 2. Run: python app.py (to start the application)")
|
||||
|
||||
print("\n📋 Notes:")
|
||||
print(" - Make sure your B2 bucket allows public downloads")
|
||||
print(" - Application will be available at http://127.0.0.1:8866")
|
||||
print(" - Check DEPLOYMENT.md for production deployment guide")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user