Spaces:
Sleeping
Sleeping
| import express from 'express'; | |
| import multer from 'multer'; | |
| import cors from 'cors'; | |
| const app = express(); | |
| const PORT = process.env.PORT || 7860; | |
| // Middleware | |
| app.use(cors()); | |
| app.use(express.json({ limit: '50mb' })); | |
| app.use(express.urlencoded({ extended: true, limit: '50mb' })); | |
| // Serve static files from public directory | |
| app.use(express.static('public')); | |
| // Configure multer for file uploads | |
| const upload = multer({ | |
| storage: multer.memoryStorage(), | |
| limits: { | |
| fileSize: 10 * 1024 * 1024, // 10MB limit | |
| }, | |
| fileFilter: (req, file, cb) => { | |
| if (file.mimetype.startsWith('image/')) { | |
| cb(null, true); | |
| } else { | |
| cb(new Error('Only image files are allowed'), false); | |
| } | |
| } | |
| }); | |
| // Health check endpoint | |
| app.get('/', (req, res) => { | |
| res.json({ | |
| status: 'ok', | |
| message: 'AI Image Upscaler API (Simple Mode)', | |
| version: '1.0.0', | |
| note: 'This is a simplified version. Full AI upscaling requires TensorFlow.js dependencies.', | |
| endpoints: { | |
| upscale: 'POST /upscale', | |
| health: 'GET /' | |
| } | |
| }); | |
| }); | |
| // Simplified upscale endpoint (placeholder) | |
| app.post('/upscale', upload.single('image'), async (req, res) => { | |
| try { | |
| if (!req.file) { | |
| return res.status(400).json({ error: 'No image file provided' }); | |
| } | |
| const { scale = 2, modelType = 'esrgan-slim', patchSize = 128, padding = 8 } = req.body; | |
| console.log(`Received upscale request: ${req.file.originalname}, scale: ${scale}x, model: ${modelType}`); | |
| // For now, return the original image as base64 (placeholder) | |
| const originalImageBase64 = `data:${req.file.mimetype};base64,${req.file.buffer.toString('base64')}`; | |
| // Simulate processing time | |
| await new Promise(resolve => setTimeout(resolve, 1000)); | |
| res.json({ | |
| success: true, | |
| result: originalImageBase64, | |
| metadata: { | |
| scale: parseInt(scale), | |
| modelType: modelType, | |
| patchSize: parseInt(patchSize), | |
| padding: parseInt(padding), | |
| processingTime: 1000, | |
| backend: 'placeholder', | |
| note: 'This is a placeholder response. Install TensorFlow.js dependencies for actual AI upscaling.' | |
| } | |
| }); | |
| } catch (error) { | |
| console.error('Upscaling error:', error); | |
| res.status(500).json({ | |
| error: 'Failed to process image', | |
| message: error.message | |
| }); | |
| } | |
| }); | |
| // Error handling middleware | |
| app.use((error, req, res, next) => { | |
| if (error instanceof multer.MulterError) { | |
| if (error.code === 'LIMIT_FILE_SIZE') { | |
| return res.status(400).json({ error: 'File too large. Maximum size is 10MB' }); | |
| } | |
| } | |
| console.error('Unhandled error:', error); | |
| res.status(500).json({ error: 'Internal server error' }); | |
| }); | |
| // Start server | |
| app.listen(PORT, '0.0.0.0', () => { | |
| console.log(`π Simple Upscaler API server running on port ${PORT}`); | |
| console.log(`π Note: This is a simplified version without AI processing`); | |
| console.log(`π Health check: http://localhost:${PORT}/`); | |
| console.log(`π Web interface: http://localhost:${PORT}/index.html`); | |
| }); | |
| // Handle graceful shutdown | |
| process.on('SIGTERM', () => { | |
| console.log('Received SIGTERM, shutting down gracefully...'); | |
| process.exit(0); | |
| }); | |