
Security News
Opengrep Adds Apex Support and New Rule Controls in Latest Updates
The latest Opengrep releases add Apex scanning, precision rule tuning, and performance gains for open source static code analysis.
cloudku-uploader
Advanced tools
Blazing-fast, zero-dependency uploader for CloudKu. Supports auto-conversion, chunked uploads, and TypeScript. Easily upload images, videos, audio, and documents via Node.js.
Revolutionary File Upload Solution - Zero Dependencies, Maximum Performance
๐ Built for Modern JavaScript Environments | ๐ Global CDN | โก Lightning Fast
๐ฆ Quick Install โข ๐ Get Started โข ๐ API Docs โข ๐ก Examples โข ๐ Support
// Quick time-based uploads
import { upload30s, upload7d, upload1y } from 'cloudku-uploader'
// Batch uploads
import { uploadBatch } from 'cloudku-uploader'
// Smart parsing
import { parseExpireTime } from 'cloudku-uploader'
โก Lightning Performance
|
๐ก๏ธ Enterprise Ready
|
๐ Universal Support
|
# Using npm
npm install cloudku-uploader
# Using yarn
yarn add cloudku-uploader
# Using pnpm
pnpm add cloudku-uploader
# Using bun
bun add cloudku-uploader
import { uploadFile } from 'cloudku-uploader'
// Simple permanent upload
const result = await uploadFile(fileBuffer, 'image.jpg')
console.log('โ
Upload URL:', result.result.url)
// Temporary upload with expiry
const tempResult = await uploadFile(fileBuffer, 'temp.pdf', '7d')
console.log('โฐ Expires in 7 days:', tempResult.result.url)
import { uploadSmart } from 'cloudku-uploader'
// Auto-detects expiry format
const result = await uploadSmart(fileBuffer, 'document.pdf', '30d')
console.log('๐ฏ Smart upload:', result)
import {
upload30s, upload15m, upload6h,
upload7d, upload3M, upload1y
} from 'cloudku-uploader'
// Ultra-fast temporary uploads
const quick = await upload30s(buffer, 'temp.jpg') // 30 seconds
const short = await upload15m(buffer, 'preview.png') // 15 minutes
const daily = await upload6h(buffer, 'report.pdf') // 6 hours
const weekly = await upload7d(buffer, 'backup.zip') // 7 days
const quarterly = await upload3M(buffer, 'archive.tar') // 3 months
const longterm = await upload1y(buffer, 'storage.mp4') // 1 year
import React, { useState } from 'react'
import { uploadSmart } from 'cloudku-uploader'
function FileUploader() {
const [uploading, setUploading] = useState(false)
const [result, setResult] = useState(null)
const handleUpload = async (file, expiry = null) => {
setUploading(true)
try {
const buffer = await file.arrayBuffer()
const response = await uploadSmart(
new Uint8Array(buffer),
file.name,
expiry
)
setResult(response)
} catch (error) {
console.error('Upload failed:', error)
} finally {
setUploading(false)
}
}
return (
<div className="upload-container">
<input
type="file"
onChange={(e) => handleUpload(e.target.files[0], '7d')}
disabled={uploading}
/>
{uploading && <p>โณ Uploading...</p>}
{result && (
<div>
<p>โ
Success!</p>
<a href={result.result.url} target="_blank">
View File: {result.result.filename}
</a>
</div>
)}
</div>
)
}
import express from 'express'
import multer from 'multer'
import { uploadSmart, uploadBatch } from 'cloudku-uploader'
const app = express()
const upload = multer({
limits: { fileSize: 100 * 1024 * 1024 }, // 100MB
storage: multer.memoryStorage()
})
// Single file upload
app.post('/upload', upload.single('file'), async (req, res) => {
try {
const { buffer, originalname } = req.file
const expiry = req.body.expiry || null
const result = await uploadSmart(buffer, originalname, expiry)
if (result.status === 'success') {
res.json({
success: true,
data: {
url: result.result.url,
filename: result.result.filename,
size: result.result.size,
expires: expiry ? `in ${expiry}` : 'never'
}
})
} else {
res.status(400).json({ error: result.message })
}
} catch (error) {
res.status(500).json({ error: error.message })
}
})
// Batch upload
app.post('/upload/batch', upload.array('files'), async (req, res) => {
try {
const files = req.files.map(file => ({
buffer: file.buffer,
name: file.originalname,
expire: req.body.expiry || null
}))
const results = await uploadBatch(files)
res.json({
success: true,
total: files.length,
results: results.map(r => ({
status: r.status,
data: r.data?.result || null,
error: r.error?.message || null
}))
})
} catch (error) {
res.status(500).json({ error: error.message })
}
})
app.listen(3000, () => {
console.log('๐ Server running on port 3000')
})
// app/api/upload/route.js
import { uploadSmart } from 'cloudku-uploader'
export async function POST(request) {
try {
const formData = await request.formData()
const file = formData.get('file')
const expiry = formData.get('expiry') || null
if (!file) {
return Response.json(
{ error: 'No file provided' },
{ status: 400 }
)
}
const buffer = new Uint8Array(await file.arrayBuffer())
const result = await uploadSmart(buffer, file.name, expiry)
if (result.status === 'success') {
return Response.json({
success: true,
url: result.result.url,
filename: result.result.filename,
size: result.result.size
})
} else {
return Response.json(
{ error: result.message },
{ status: 400 }
)
}
} catch (error) {
return Response.json(
{ error: error.message },
{ status: 500 }
)
}
}
import { uploadBatch, upload7d } from 'cloudku-uploader'
import fs from 'fs'
import path from 'path'
class BatchUploader {
async uploadDirectory(dirPath, options = {}) {
const { concurrency = 3, expiry = null } = options
const files = fs.readdirSync(dirPath)
.map(filename => ({
buffer: fs.readFileSync(path.join(dirPath, filename)),
name: filename,
expire: expiry
}))
console.log(`๐ฆ Processing ${files.length} files...`)
// Process in batches for better performance
const results = []
for (let i = 0; i < files.length; i += concurrency) {
const batch = files.slice(i, i + concurrency)
const batchResults = await uploadBatch(batch)
results.push(...batchResults)
console.log(`โ
Processed batch ${Math.ceil((i + 1) / concurrency)}`)
}
return results
}
}
// Usage
const uploader = new BatchUploader()
const results = await uploader.uploadDirectory('./uploads', {
concurrency: 5,
expiry: '30d'
})
console.table(results.map(r => ({
status: r.status,
filename: r.data?.result?.filename || 'failed',
url: r.data?.result?.url || 'N/A'
})))
Unit | Description | Example | Use Case |
---|---|---|---|
s | Seconds | 30s | Real-time processing |
m | Minutes | 15m | Quick previews |
h | Hours | 6h | Daily tasks |
d | Days | 7d | Weekly backups |
M | Months | 3M | Quarterly archives |
y | Years | 1y | Long-term storage |
import { parseExpireTime } from 'cloudku-uploader'
// Auto-converts to ISO date
console.log(parseExpireTime('7d')) // 2025-06-30
console.log(parseExpireTime('3M')) // 2025-09-23
console.log(parseExpireTime('1y')) // 2026-06-23
console.log(parseExpireTime(null)) // null (permanent)
uploadFile(buffer, fileName?, expireDate?)
Primary upload function with manual expiry control.
uploadFile(
buffer: Buffer | Uint8Array,
fileName?: string,
expireDate?: string | null
): Promise<UploadResponse>
uploadSmart(buffer, fileName?, expireTime?)
Intelligent upload with automatic time parsing.
uploadSmart(
buffer: Buffer | Uint8Array,
fileName?: string,
expireTime?: string | null
): Promise<UploadResponse>
uploadBatch(files)
Upload multiple files simultaneously.
uploadBatch(
files: Array<{
buffer: Buffer | Uint8Array,
name: string,
expire?: string | null
}>
): Promise<BatchResult[]>
// Time-based upload shortcuts
upload30s(buffer: Buffer | Uint8Array, name: string): Promise<UploadResponse>
upload15m(buffer: Buffer | Uint8Array, name: string): Promise<UploadResponse>
upload6h(buffer: Buffer | Uint8Array, name: string): Promise<UploadResponse>
upload7d(buffer: Buffer | Uint8Array, name: string): Promise<UploadResponse>
upload3M(buffer: Buffer | Uint8Array, name: string): Promise<UploadResponse>
upload1y(buffer: Buffer | Uint8Array, name: string): Promise<UploadResponse>
interface UploadResponse {
status: 'success' | 'error'
creator?: 'AlfiDev'
information: string
result?: {
filename: string
type: string
size: string
url: string
}
message?: string
}
interface BatchResult {
index: number
status: 'fulfilled' | 'rejected'
data: UploadResponse | null
error: Error | null
}
Category | Extensions | Max Size | Features |
---|---|---|---|
๐ผ๏ธ Images | JPG, PNG, GIF, WebP, SVG, AVIF, HEIC | 100 MB | Auto-optimization |
๐ Documents | PDF, DOC, DOCX, TXT, MD, RTF | 50 MB | Text extraction |
๐๏ธ Archives | ZIP, RAR, 7Z, TAR, GZ | 500 MB | Compression analysis |
๐ต Audio | MP3, WAV, FLAC, AAC, OGG | 200 MB | Metadata preservation |
๐ฌ Video | MP4, AVI, MOV, MKV, WebM | 1 GB | Thumbnail generation |
๐ป Code | JS, TS, PY, GO, RS, C, CPP | 10 MB | Syntax highlighting |
Primary: https://cloudkuimages.guru
Fallback: https://cloudkuimages-guru.us.itpanel.app
Region | Locations | Avg Latency |
---|---|---|
๐ Europe | London, Frankfurt, Paris, Amsterdam | < 25ms |
๐ Americas | New York, Toronto, Sรฃo Paulo, LA | < 30ms |
๐ Asia-Pacific | Tokyo, Singapore, Sydney, Mumbai | < 35ms |
// Security headers automatically applied
const securityHeaders = {
'x-content-type-options': 'nosniff',
'x-frame-options': 'DENY',
'x-xss-protection': '0',
'referrer-policy': 'strict-origin-when-cross-origin',
'x-provided-by': 'StackCDN'
}
Original Bundle: 2.4KB
Minified: 1.8KB
Gzipped: 0.7KB
Brotli: 0.5KB
Cold Start: < 20ms
First Upload: < 80ms
Subsequent: < 40ms
Throughput: > 35MB/s
Baseline: < 1MB
Per Upload: < 100KB
Peak Usage: < 5MB
Cleanup: Automatic
uploadFile()
calls// Old way (still works)
import UploadFile from 'cloudku-uploader'
const result = await new UploadFile().upload(buffer, 'file.jpg', '7d')
// New way (recommended)
import { uploadSmart } from 'cloudku-uploader'
const result = await uploadSmart(buffer, 'file.jpg', '7d')
// Even better - use shortcuts
import { upload7d } from 'cloudku-uploader'
const result = await upload7d(buffer, 'file.jpg')
// v2.5
import UploadFile from 'cloudku-uploader'
// v2.7 - Multiple import options
import {
uploadFile, // Core function
uploadSmart, // Smart parsing
uploadBatch, // Batch processing
upload30s, // Quick shortcuts
upload7d,
upload1y,
parseExpireTime // Utility function
} from 'cloudku-uploader'
// Or import everything
import * as CloudKu from 'cloudku-uploader'
import { uploadSmart, parseExpireTime } from 'cloudku-uploader'
import { describe, it, expect } from 'vitest'
describe('CloudKu Uploader', () => {
it('should parse expiry times correctly', () => {
expect(parseExpireTime('7d')).toMatch(/^\d{4}-\d{2}-\d{2}$/)
expect(parseExpireTime('1y')).toMatch(/^\d{4}-\d{2}-\d{2}$/)
expect(parseExpireTime(null)).toBe(null)
})
it('should upload file successfully', async () => {
const buffer = new Uint8Array([0xFF, 0xD8, 0xFF]) // JPEG header
const result = await uploadSmart(buffer, 'test.jpg', '1d')
expect(result.status).toBe('success')
expect(result.result.url).toContain('cloudkuimages')
})
})
import { uploadBatch } from 'cloudku-uploader'
import { performance } from 'perf_hooks'
async function benchmarkUpload() {
const files = Array.from({ length: 10 }, (_, i) => ({
buffer: new Uint8Array(1024 * 100), // 100KB each
name: `test-${i}.bin`,
expire: '1d'
}))
const start = performance.now()
const results = await uploadBatch(files)
const end = performance.now()
console.log(`โก Uploaded ${files.length} files in ${end - start}ms`)
console.log(`๐ Success rate: ${results.filter(r => r.status === 'fulfilled').length}/${files.length}`)
}
import { uploadSmart } from 'cloudku-uploader'
import sharp from 'sharp'
class ImageProcessor {
async processAndUpload(imageBuffer, options = {}) {
const {
width = 1920,
quality = 85,
format = 'jpeg',
expiry = '30d'
} = options
// Process image
const processed = await sharp(imageBuffer)
.resize(width, null, { withoutEnlargement: true })
.jpeg({ quality })
.toBuffer()
// Upload processed image
const result = await uploadSmart(
processed,
`processed-${Date.now()}.${format}`,
expiry
)
return {
...result,
originalSize: imageBuffer.length,
processedSize: processed.length,
compression: `${((1 - processed.length / imageBuffer.length) * 100).toFixed(1)}%`
}
}
}
import { uploadSmart } from 'cloudku-uploader'
class UploadAnalytics {
constructor() {
this.metrics = {
uploads: 0,
successes: 0,
failures: 0,
totalSize: 0,
avgResponseTime: 0
}
}
async trackUpload(buffer, filename, expiry) {
const start = Date.now()
this.metrics.uploads++
this.metrics.totalSize += buffer.length
try {
const result = await uploadSmart(buffer, filename, expiry)
if (result.status === 'success') {
this.metrics.successes++
} else {
this.metrics.failures++
}
const responseTime = Date.now() - start
this.metrics.avgResponseTime =
(this.metrics.avgResponseTime + responseTime) / 2
return result
} catch (error) {
this.metrics.failures++
throw error
}
}
getStats() {
return {
...this.metrics,
successRate: `${(this.metrics.successes / this.metrics.uploads * 100).toFixed(2)}%`,
totalSizeMB: `${(this.metrics.totalSize / 1024 / 1024).toFixed(2)} MB`,
avgResponseTimeMs: `${this.metrics.avgResponseTime.toFixed(0)} ms`
}
}
}
๐ Official WebsiteComplete documentation and examples |
๐ฆ NPM PackagePackage info and version history |
๐ฌ WhatsApp SupportInstant technical assistance |
๐ง Enterprise SalesCustom solutions and SLA |
cloudku-uploader
This project is licensed under the MIT License - see the LICENSE file for details.
# Get started in seconds
npm install cloudku-uploader
Made with โค๏ธ by AlfiDev | Powered by CloudKu Infrastructure
Empowering developers worldwide with reliable, lightning-fast file uploads
โญ Star us on GitHub โข ๐ฆ Follow on Twitter โข ๐ง Subscribe to Updates โข ๐ฌ Join Discord
FAQs
Blazing-fast, zero-dependency uploader for CloudKu. Supports auto-conversion, chunked uploads, and TypeScript. Easily upload images, videos, audio, and documents via Node.js.
We found that cloudku-uploader demonstrated a healthy version release cadence and project activity because the last version was released less than a year ago.ย It has 3 open source maintainers collaborating on the project.
Did you know?
Socket for GitHub automatically highlights issues in each pull request and monitors the health of all your open source dependencies. Discover the contents of your packages and block harmful activity before you install or update your dependencies.
Security News
The latest Opengrep releases add Apex scanning, precision rule tuning, and performance gains for open source static code analysis.
Security News
npm now supports Trusted Publishing with OIDC, enabling secure package publishing directly from CI/CD workflows without relying on long-lived tokens.
Research
/Security News
A RubyGems malware campaign used 60 malicious packages posing as automation tools to steal credentials from social media and marketing tool users.