Fonctionnalités Avancées
Fonctionnalités avancées et capacités de Qwen Image Edit
Fonctionnalités Avancées
Découvrez les capacités avancées de Qwen Image Edit pour des cas d'usage complexes et des performances optimales.
🔄 Traitement Asynchrone
Webhooks pour les Traitements Longs
Pour les images volumineuses ou les traitements complexes, utilisez le mode asynchrone avec webhooks :
import express from 'express';
import { QwenImageEdit } from 'qwen-image-edit';
const app = express();
app.use(express.json());
const editor = new QwenImageEdit({
apiKey: process.env.QWEN_API_KEY,
webhookSecret: process.env.WEBHOOK_SECRET
});
// Démarrer un traitement asynchrone
async function demarrerTraitementAsync() {
const job = await editor.editTextAsync({
image: './grande-image.jpg',
prompt: 'Remplacer tous les textes en anglais par du français',
callbackUrl: 'https://monsite.com/webhook/qwen',
metadata: {
userId: '12345',
projectId: 'projet-abc'
}
});
console.log('Job démarré:', job.id);
return job;
}
// Endpoint pour recevoir les webhooks
app.post('/webhook/qwen', (req, res) => {
const signature = req.headers['x-qwen-signature'];
// Vérifier la signature du webhook
if (!editor.verifyWebhookSignature(req.body, signature)) {
return res.status(401).send('Signature invalide');
}
const { jobId, status, result, error } = req.body;
switch (status) {
case 'completed':
console.log(`Job ${jobId} terminé:`, result.imageUrl);
// Traiter le résultat
break;
case 'failed':
console.error(`Job ${jobId} échoué:`, error);
// Gérer l'erreur
break;
case 'progress':
console.log(`Job ${jobId} progression: ${result.progress}%`);
break;
}
res.status(200).send('OK');
});
app.listen(3000);
Suivi des Jobs
class JobTracker {
constructor(editor) {
this.editor = editor;
this.jobs = new Map();
}
async demarrerJob(options) {
const job = await this.editor.editTextAsync(options);
this.jobs.set(job.id, {
...job,
startTime: Date.now(),
status: 'pending'
});
// Démarrer le polling pour ce job
this.pollJob(job.id);
return job;
}
async pollJob(jobId) {
const interval = setInterval(async () => {
try {
const status = await this.editor.getJobStatus(jobId);
const job = this.jobs.get(jobId);
if (!job) {
clearInterval(interval);
return;
}
job.status = status.status;
job.progress = status.progress;
job.lastUpdate = Date.now();
console.log(`Job ${jobId}: ${status.status} (${status.progress}%)`);
if (status.status === 'completed' || status.status === 'failed') {
clearInterval(interval);
job.endTime = Date.now();
job.duration = job.endTime - job.startTime;
if (status.status === 'completed') {
job.result = status.result;
console.log(`✅ Job ${jobId} terminé en ${job.duration}ms`);
} else {
job.error = status.error;
console.error(`❌ Job ${jobId} échoué après ${job.duration}ms`);
}
}
} catch (error) {
console.error(`Erreur lors du polling du job ${jobId}:`, error);
clearInterval(interval);
}
}, 5000); // Vérifier toutes les 5 secondes
}
getJobStatus(jobId) {
return this.jobs.get(jobId);
}
getAllJobs() {
return Array.from(this.jobs.values());
}
getActiveJobs() {
return this.getAllJobs().filter(job =>
job.status === 'pending' || job.status === 'processing'
);
}
}
// Utilisation
const tracker = new JobTracker(editor);
const job = await tracker.demarrerJob({
image: './image.jpg',
prompt: 'Traitement complexe'
});
// Vérifier le statut plus tard
setTimeout(() => {
const status = tracker.getJobStatus(job.id);
console.log('Statut actuel:', status);
}, 10000);
🚀 Traitement par Lots Optimisé
Traitement Parallèle avec Contrôle de Concurrence
import pLimit from 'p-limit';
import pRetry from 'p-retry';
class BatchProcessor {
constructor(editor, options = {}) {
this.editor = editor;
this.concurrency = options.concurrency || 3;
this.retries = options.retries || 2;
this.limit = pLimit(this.concurrency);
this.stats = {
total: 0,
completed: 0,
failed: 0,
startTime: null,
endTime: null
};
}
async processImages(images, prompt, options = {}) {
this.stats.total = images.length;
this.stats.startTime = Date.now();
this.stats.completed = 0;
this.stats.failed = 0;
console.log(`🚀 Démarrage du traitement de ${images.length} images avec ${this.concurrency} workers`);
const results = await Promise.allSettled(
images.map((image, index) =>
this.limit(() => this.processImageWithRetry(image, prompt, index, options))
)
);
this.stats.endTime = Date.now();
this.stats.duration = this.stats.endTime - this.stats.startTime;
const successful = results.filter(r => r.status === 'fulfilled');
const failed = results.filter(r => r.status === 'rejected');
this.stats.completed = successful.length;
this.stats.failed = failed.length;
console.log(`✅ Traitement terminé: ${successful.length} succès, ${failed.length} échecs en ${this.stats.duration}ms`);
return {
successful: successful.map(r => r.value),
failed: failed.map(r => r.reason),
stats: this.stats
};
}
async processImageWithRetry(image, prompt, index, options) {
return pRetry(async () => {
try {
console.log(`📸 Traitement image ${index + 1}/${this.stats.total}`);
const result = await this.editor.editText({
image,
prompt,
...options
});
console.log(`✅ Image ${index + 1} terminée`);
return { index, result, image };
} catch (error) {
console.error(`❌ Erreur image ${index + 1}:`, error.message);
// Ne pas retry pour certaines erreurs
if (error.code === 'UNSUPPORTED_FORMAT' || error.code === 'IMAGE_TOO_LARGE') {
throw new pRetry.AbortError(error);
}
throw error;
}
}, {
retries: this.retries,
factor: 2,
minTimeout: 1000,
maxTimeout: 10000,
onFailedAttempt: (error) => {
console.log(`🔄 Retry image ${index + 1}, tentative ${error.attemptNumber}/${this.retries + 1}`);
}
});
}
getProgress() {
const processed = this.stats.completed + this.stats.failed;
const percentage = this.stats.total > 0 ? (processed / this.stats.total) * 100 : 0;
return {
processed,
total: this.stats.total,
percentage: Math.round(percentage),
remaining: this.stats.total - processed,
eta: this.calculateETA()
};
}
calculateETA() {
if (!this.stats.startTime || this.stats.completed === 0) {
return null;
}
const elapsed = Date.now() - this.stats.startTime;
const avgTimePerImage = elapsed / this.stats.completed;
const remaining = this.stats.total - this.stats.completed - this.stats.failed;
return Math.round(remaining * avgTimePerImage);
}
}
// Utilisation
const processor = new BatchProcessor(editor, {
concurrency: 5,
retries: 3
});
const images = [
'./image1.jpg',
'./image2.jpg',
'./image3.jpg'
// ... plus d'images
];
const results = await processor.processImages(
images,
'Remplacer "Sale" par "Soldes"'
);
console.log('Résultats:', results.stats);
Traitement Parallèle Intelligent
import pLimit from 'p-limit';
import pRetry from 'p-retry';
class IntelligentProcessor {
constructor(editor) {
this.editor = editor;
this.editors = []; // Pool d'éditeurs
this.currentEditorIndex = 0;
}
// Initialiser un pool d'éditeurs pour la répartition de charge
async initializeEditorPool(poolSize = 3) {
this.editors = Array.from({ length: poolSize }, (_, index) =>
new QwenImageEdit({
apiKey: process.env.QWEN_API_KEY,
region: this.getOptimalRegion(index),
timeout: 45000
})
);
console.log(`🏊 Pool d'éditeurs initialisé avec ${poolSize} instances`);
}
getOptimalRegion(index) {
const regions = ['eu-west-1', 'us-east-1', 'ap-southeast-1'];
return regions[index % regions.length];
}
// Obtenir l'éditeur suivant (round-robin)
getNextEditor() {
const editor = this.editors[this.currentEditorIndex];
this.currentEditorIndex = (this.currentEditorIndex + 1) % this.editors.length;
return editor;
}
async processWithLoadBalancing(images, prompt, options = {}) {
if (this.editors.length === 0) {
await this.initializeEditorPool();
}
const limit = pLimit(this.editors.length);
return Promise.allSettled(
images.map((image, index) =>
limit(async () => {
const editor = this.getNextEditor();
return pRetry(async () => {
try {
return await editor.editText({
image,
prompt,
...options
});
} catch (error) {
// Basculer vers un autre éditeur en cas d'erreur de région
if (error.code === 'REGION_UNAVAILABLE') {
const fallbackEditor = this.getNextEditor();
return await fallbackEditor.editText({ image, prompt, ...options });
}
throw error;
}
}, {
retries: 2,
factor: 1.5
});
})
)
);
}
}
// Utilisation
const intelligentProcessor = new IntelligentProcessor(editor);
const results = await intelligentProcessor.processWithLoadBalancing(
images,
'Moderniser le design'
);
🔍 Analyse d'Image Approfondie
Analyse Complète avec Détection Multi-Éléments
class AdvancedImageAnalyzer {
constructor(editor) {
this.editor = editor;
}
async analyzeComprehensive(image) {
console.log('🔍 Analyse complète de l\'image...');
const analyses = await Promise.allSettled([
this.analyzeText(image),
this.analyzeObjects(image),
this.analyzeFaces(image),
this.analyzeColors(image),
this.analyzeStyle(image),
this.analyzeQuality(image)
]);
const [text, objects, faces, colors, style, quality] = analyses.map(a =>
a.status === 'fulfilled' ? a.value : null
);
return {
text,
objects,
faces,
colors,
style,
quality,
summary: this.generateSummary({ text, objects, faces, colors, style, quality })
};
}
async analyzeText(image) {
const result = await this.editor.analyzeImage({
image,
options: {
detectText: true,
confidence: 0.7,
languages: ['fr', 'en', 'es']
}
});
return {
texts: result.text || [],
languages: this.detectLanguages(result.text || []),
readability: this.assessReadability(result.text || [])
};
}
async analyzeObjects(image) {
const result = await this.editor.analyzeImage({
image,
options: {
detectObjects: true,
confidence: 0.6
}
});
return {
objects: result.objects || [],
categories: this.categorizeObjects(result.objects || []),
dominantObjects: this.findDominantObjects(result.objects || [])
};
}
async analyzeFaces(image) {
const result = await this.editor.analyzeImage({
image,
options: {
detectFaces: true,
faceAttributes: ['age', 'gender', 'emotion']
}
});
return {
faces: result.faces || [],
demographics: this.analyzeDemographics(result.faces || []),
emotions: this.analyzeEmotions(result.faces || [])
};
}
async analyzeColors(image) {
const result = await this.editor.analyzeImage({
image,
options: {
extractColors: true,
colorCount: 10
}
});
return {
palette: result.colors || [],
dominantColor: result.colors?.[0],
colorHarmony: this.assessColorHarmony(result.colors || []),
mood: this.inferMoodFromColors(result.colors || [])
};
}
async analyzeStyle(image) {
const result = await this.editor.analyzeImage({
image,
options: {
detectStyle: true,
styleCategories: ['photography', 'illustration', 'design', 'art']
}
});
return {
style: result.style,
artistic: result.artistic || {},
technical: result.technical || {}
};
}
async analyzeQuality(image) {
const result = await this.editor.analyzeImage({
image,
options: {
assessQuality: true,
metrics: ['sharpness', 'brightness', 'contrast', 'noise']
}
});
return {
overall: result.quality?.overall || 0,
metrics: result.quality?.metrics || {},
recommendations: this.generateQualityRecommendations(result.quality || {})
};
}
detectLanguages(texts) {
const languages = new Set();
texts.forEach(text => {
if (text.language) {
languages.add(text.language);
}
});
return Array.from(languages);
}
assessReadability(texts) {
if (texts.length === 0) return null;
const avgConfidence = texts.reduce((sum, text) => sum + text.confidence, 0) / texts.length;
const avgSize = texts.reduce((sum, text) => sum + (text.bbox?.height || 0), 0) / texts.length;
return {
confidence: avgConfidence,
averageTextSize: avgSize,
readabilityScore: this.calculateReadabilityScore(avgConfidence, avgSize)
};
}
calculateReadabilityScore(confidence, size) {
// Score de lisibilité basé sur la confiance et la taille
const confidenceScore = confidence * 0.7;
const sizeScore = Math.min(size / 50, 1) * 0.3; // Normaliser la taille
return Math.round((confidenceScore + sizeScore) * 100);
}
categorizeObjects(objects) {
const categories = {};
objects.forEach(obj => {
const category = obj.category || 'unknown';
if (!categories[category]) {
categories[category] = [];
}
categories[category].push(obj);
});
return categories;
}
findDominantObjects(objects) {
return objects
.sort((a, b) => (b.confidence * b.area) - (a.confidence * a.area))
.slice(0, 3);
}
analyzeDemographics(faces) {
if (faces.length === 0) return null;
const ages = faces.map(f => f.age).filter(Boolean);
const genders = faces.map(f => f.gender).filter(Boolean);
return {
averageAge: ages.length > 0 ? Math.round(ages.reduce((a, b) => a + b, 0) / ages.length) : null,
genderDistribution: this.countOccurrences(genders),
totalFaces: faces.length
};
}
analyzeEmotions(faces) {
const emotions = faces.flatMap(f => f.emotions || []);
const emotionCounts = this.countOccurrences(emotions.map(e => e.emotion));
return {
dominant: Object.keys(emotionCounts).reduce((a, b) =>
emotionCounts[a] > emotionCounts[b] ? a : b, 'neutral'
),
distribution: emotionCounts,
averageConfidence: emotions.length > 0 ?
emotions.reduce((sum, e) => sum + e.confidence, 0) / emotions.length : 0
};
}
countOccurrences(array) {
return array.reduce((acc, item) => {
acc[item] = (acc[item] || 0) + 1;
return acc;
}, {});
}
assessColorHarmony(colors) {
if (colors.length < 2) return 'insufficient';
// Analyse simplifiée de l'harmonie des couleurs
const hues = colors.map(c => this.rgbToHsl(c.rgb).h);
const hueVariance = this.calculateVariance(hues);
if (hueVariance < 30) return 'monochromatic';
if (hueVariance < 60) return 'analogous';
if (hueVariance < 120) return 'complementary';
return 'triadic';
}
inferMoodFromColors(colors) {
if (colors.length === 0) return 'neutral';
const dominantColor = colors[0];
const hsl = this.rgbToHsl(dominantColor.rgb);
if (hsl.l < 0.3) return 'dark';
if (hsl.l > 0.8) return 'bright';
if (hsl.s < 0.3) return 'muted';
if (hsl.s > 0.7) return 'vibrant';
return 'balanced';
}
rgbToHsl(rgb) {
const r = rgb.r / 255;
const g = rgb.g / 255;
const b = rgb.b / 255;
const max = Math.max(r, g, b);
const min = Math.min(r, g, b);
let h, s, l = (max + min) / 2;
if (max === min) {
h = s = 0;
} else {
const d = max - min;
s = l > 0.5 ? d / (2 - max - min) : d / (max + min);
switch (max) {
case r: h = (g - b) / d + (g < b ? 6 : 0); break;
case g: h = (b - r) / d + 2; break;
case b: h = (r - g) / d + 4; break;
}
h /= 6;
}
return { h: h * 360, s, l };
}
calculateVariance(numbers) {
const mean = numbers.reduce((a, b) => a + b, 0) / numbers.length;
const variance = numbers.reduce((sum, num) => sum + Math.pow(num - mean, 2), 0) / numbers.length;
return Math.sqrt(variance);
}
generateQualityRecommendations(quality) {
const recommendations = [];
if (quality.metrics?.sharpness < 0.7) {
recommendations.push('Améliorer la netteté de l\'image');
}
if (quality.metrics?.brightness < 0.4) {
recommendations.push('Augmenter la luminosité');
}
if (quality.metrics?.contrast < 0.5) {
recommendations.push('Améliorer le contraste');
}
if (quality.metrics?.noise > 0.3) {
recommendations.push('Réduire le bruit de l\'image');
}
return recommendations;
}
generateSummary(analysis) {
const summary = [];
if (analysis.text?.texts?.length > 0) {
summary.push(`${analysis.text.texts.length} éléments de texte détectés`);
}
if (analysis.objects?.objects?.length > 0) {
summary.push(`${analysis.objects.objects.length} objets identifiés`);
}
if (analysis.faces?.faces?.length > 0) {
summary.push(`${analysis.faces.faces.length} visage(s) détecté(s)`);
}
if (analysis.style?.style) {
summary.push(`Style: ${analysis.style.style}`);
}
if (analysis.quality?.overall) {
summary.push(`Qualité: ${Math.round(analysis.quality.overall * 100)}%`);
}
return summary.join(', ');
}
}
// Utilisation
const analyzer = new AdvancedImageAnalyzer(editor);
const analysis = await analyzer.analyzeComprehensive('./image-complexe.jpg');
console.log('Analyse complète:', analysis.summary);
console.log('Détails:', analysis);
🛡️ Détection de Contenu Sensible
Système de Modération Avancé
class ContentModerationSystem {
constructor(editor) {
this.editor = editor;
this.moderationRules = {
violence: { threshold: 0.3, action: 'block' },
adult: { threshold: 0.2, action: 'flag' },
hate: { threshold: 0.1, action: 'block' },
spam: { threshold: 0.5, action: 'flag' }
};
}
async moderateImage(image, options = {}) {
console.log('🛡️ Modération du contenu...');
const analysis = await this.editor.analyzeImage({
image,
options: {
detectUnsafeContent: true,
categories: ['violence', 'adult', 'hate', 'spam'],
confidence: 0.1
}
});
const moderationResult = {
safe: true,
flags: [],
blocked: false,
confidence: 1.0,
details: analysis.moderation || {}
};
// Vérifier chaque catégorie
for (const [category, detection] of Object.entries(analysis.moderation || {})) {
const rule = this.moderationRules[category];
if (!rule) continue;
if (detection.confidence >= rule.threshold) {
moderationResult.flags.push({
category,
confidence: detection.confidence,
action: rule.action,
reason: detection.reason
});
if (rule.action === 'block') {
moderationResult.blocked = true;
moderationResult.safe = false;
} else if (rule.action === 'flag') {
moderationResult.safe = false;
}
}
}
moderationResult.confidence = this.calculateOverallConfidence(moderationResult.flags);
return moderationResult;
}
calculateOverallConfidence(flags) {
if (flags.length === 0) return 1.0;
const maxConfidence = Math.max(...flags.map(f => f.confidence));
return 1.0 - maxConfidence;
}
async processWithModeration(image, prompt, options = {}) {
// Modération avant traitement
const moderation = await this.moderateImage(image);
if (moderation.blocked) {
throw new Error(`Contenu bloqué: ${moderation.flags.map(f => f.category).join(', ')}`);
}
if (!moderation.safe && options.strictMode) {
console.warn('⚠️ Contenu signalé mais traitement autorisé:', moderation.flags);
}
// Traitement de l'image
const result = await this.editor.editText({ image, prompt, ...options });
// Modération après traitement
const postModeration = await this.moderateImage(result.imageUrl);
return {
...result,
moderation: {
pre: moderation,
post: postModeration
}
};
}
}
// Utilisation
const moderator = new ContentModerationSystem(editor);
try {
const result = await moderator.processWithModeration(
'./image-a-moderer.jpg',
'Modifier le texte',
{ strictMode: true }
);
console.log('Traitement réussi:', result.imageUrl);
console.log('Modération:', result.moderation);
} catch (error) {
console.error('Contenu bloqué:', error.message);
}
🎯 Édition Conditionnelle
Édition Basée sur l'Analyse d'Image
class ConditionalEditor {
constructor(editor) {
this.editor = editor;
this.analyzer = new AdvancedImageAnalyzer(editor);
}
async editBasedOnContent(image, rules) {
console.log('🎯 Analyse pour édition conditionnelle...');
const analysis = await this.analyzer.analyzeComprehensive(image);
const applicableRules = this.findApplicableRules(analysis, rules);
if (applicableRules.length === 0) {
console.log('Aucune règle applicable trouvée');
return { image, analysis, rulesApplied: [] };
}
console.log(`📋 Application de ${applicableRules.length} règle(s)`);
let currentImage = image;
const rulesApplied = [];
for (const rule of applicableRules) {
try {
console.log(`🔧 Application de la règle: ${rule.name}`);
const result = await this.applyRule(currentImage, rule, analysis);
currentImage = result.imageUrl;
rulesApplied.push({
rule: rule.name,
success: true,
result: result
});
} catch (error) {
console.error(`❌ Échec de la règle ${rule.name}:`, error.message);
rulesApplied.push({
rule: rule.name,
success: false,
error: error.message
});
if (rule.required) {
throw new Error(`Règle requise échouée: ${rule.name}`);
}
}
}
return {
image: currentImage,
analysis,
rulesApplied
};
}
findApplicableRules(analysis, rules) {
return rules.filter(rule => this.evaluateCondition(analysis, rule.condition));
}
evaluateCondition(analysis, condition) {
switch (condition.type) {
case 'hasText':
return analysis.text?.texts?.length > 0;
case 'hasLanguage':
return analysis.text?.languages?.includes(condition.language);
case 'hasObject':
return analysis.objects?.objects?.some(obj =>
obj.label.toLowerCase().includes(condition.object.toLowerCase())
);
case 'hasFaces':
return analysis.faces?.faces?.length > 0;
case 'qualityBelow':
return analysis.quality?.overall < condition.threshold;
case 'colorDominant':
return analysis.colors?.dominantColor?.name === condition.color;
case 'styleIs':
return analysis.style?.style === condition.style;
case 'and':
return condition.conditions.every(c => this.evaluateCondition(analysis, c));
case 'or':
return condition.conditions.some(c => this.evaluateCondition(analysis, c));
default:
return false;
}
}
async applyRule(image, rule, analysis) {
const prompt = this.generatePromptFromRule(rule, analysis);
return await this.editor.editText({
image,
prompt,
...rule.options
});
}
generatePromptFromRule(rule, analysis) {
let prompt = rule.prompt;
// Remplacer les variables dans le prompt
if (analysis.text?.texts?.length > 0) {
const firstText = analysis.text.texts[0].content;
prompt = prompt.replace('{firstText}', firstText);
}
if (analysis.colors?.dominantColor) {
prompt = prompt.replace('{dominantColor}', analysis.colors.dominantColor.name);
}
if (analysis.style?.style) {
prompt = prompt.replace('{style}', analysis.style.style);
}
return prompt;
}
}
// Définition des règles
const editingRules = [
{
name: 'Traduire texte anglais',
condition: {
type: 'and',
conditions: [
{ type: 'hasText' },
{ type: 'hasLanguage', language: 'en' }
]
},
prompt: 'Traduire tout le texte anglais en français',
required: false
},
{
name: 'Améliorer qualité faible',
condition: {
type: 'qualityBelow',
threshold: 0.6
},
prompt: 'Améliorer la netteté et le contraste de l\'image',
required: false
},
{
name: 'Ajouter logo sur produits',
condition: {
type: 'and',
conditions: [
{ type: 'hasObject', object: 'product' },
{ type: 'styleIs', style: 'photography' }
]
},
prompt: 'Ajouter discrètement le logo de la marque en bas à droite',
required: false
},
{
name: 'Flouter visages',
condition: {
type: 'hasFaces'
},
prompt: 'Flouter tous les visages pour protéger la vie privée',
required: true
}
];
// Utilisation
const conditionalEditor = new ConditionalEditor(editor);
const result = await conditionalEditor.editBasedOnContent(
'./image-mixte.jpg',
editingRules
);
console.log('Édition conditionnelle terminée:');
console.log('- Image finale:', result.image);
console.log('- Règles appliquées:', result.rulesApplied.length);
result.rulesApplied.forEach(rule => {
console.log(` ${rule.success ? '✅' : '❌'} ${rule.rule}`);
});
🔧 Pipeline de Traitement Configurable
Système de Pipeline Modulaire
class ProcessingPipeline {
constructor(editor) {
this.editor = editor;
this.steps = [];
this.middleware = [];
}
// Ajouter une étape au pipeline
addStep(name, processor, options = {}) {
this.steps.push({
name,
processor,
options,
enabled: options.enabled !== false
});
return this;
}
// Ajouter un middleware
addMiddleware(middleware) {
this.middleware.push(middleware);
return this;
}
async process(image, context = {}) {
let currentImage = image;
const results = [];
const pipelineContext = { ...context, startTime: Date.now() };
console.log(`🔧 Démarrage du pipeline avec ${this.steps.length} étapes`);
for (const [index, step] of this.steps.entries()) {
if (!step.enabled) {
console.log(`⏭️ Étape ${index + 1} ignorée: ${step.name}`);
continue;
}
try {
console.log(`🔄 Étape ${index + 1}/${this.steps.length}: ${step.name}`);
// Appliquer les middlewares avant
for (const middleware of this.middleware) {
if (middleware.before) {
await middleware.before(step, pipelineContext);
}
}
const stepStartTime = Date.now();
const result = await step.processor(currentImage, step.options, pipelineContext);
const stepDuration = Date.now() - stepStartTime;
currentImage = result.imageUrl || result.image || currentImage;
results.push({
step: step.name,
success: true,
duration: stepDuration,
result
});
// Appliquer les middlewares après
for (const middleware of this.middleware) {
if (middleware.after) {
await middleware.after(step, result, pipelineContext);
}
}
console.log(`✅ Étape ${step.name} terminée en ${stepDuration}ms`);
} catch (error) {
console.error(`❌ Erreur à l'étape ${step.name}:`, error.message);
results.push({
step: step.name,
success: false,
error: error.message
});
if (step.options.required) {
throw new Error(`Étape requise échouée: ${step.name}`);
}
}
}
const totalDuration = Date.now() - pipelineContext.startTime;
return {
finalImage: currentImage,
results,
duration: totalDuration,
context: pipelineContext
};
}
}
// Processeurs prédéfinis
const processors = {
analyze: async (image, options, context) => {
const analyzer = new AdvancedImageAnalyzer(context.editor);
const analysis = await analyzer.analyzeComprehensive(image);
context.analysis = analysis;
return { analysis };
},
moderate: async (image, options, context) => {
const moderator = new ContentModerationSystem(context.editor);
const moderation = await moderator.moderateImage(image);
if (moderation.blocked) {
throw new Error('Contenu bloqué par la modération');
}
context.moderation = moderation;
return { moderation };
},
translateText: async (image, options, context) => {
const targetLanguage = options.targetLanguage || 'fr';
return await context.editor.editText({
image,
prompt: `Traduire tout le texte en ${targetLanguage}`
});
},
enhanceQuality: async (image, options, context) => {
const quality = context.analysis?.quality?.overall || 1;
if (quality < (options.threshold || 0.7)) {
return await context.editor.enhanceImage({
image,
enhancements: ['sharpen', 'denoise', 'upscale']
});
}
return { image }; // Pas d'amélioration nécessaire
},
addWatermark: async (image, options, context) => {
return await context.editor.editElement({
image,
prompt: `Ajouter un filigrane "${options.text || 'Copyright'}" en ${options.position || 'bas droite'}`,
opacity: options.opacity || 0.3
});
},
resize: async (image, options, context) => {
return await context.editor.resizeImage({
image,
width: options.width,
height: options.height,
maintainAspectRatio: options.maintainAspectRatio !== false
});
}
};
// Middleware de logging
const loggingMiddleware = {
before: async (step, context) => {
console.log(`📊 Début de l'étape: ${step.name}`);
},
after: async (step, result, context) => {
console.log(`📈 Fin de l'étape: ${step.name}`);
if (result.credits) {
console.log(`💳 Crédits utilisés: ${result.credits}`);
}
}
};
// Middleware de cache
const cacheMiddleware = {
cache: new Map(),
before: async (step, context) => {
if (step.options.cache) {
const key = this.generateCacheKey(step, context);
const cached = this.cache.get(key);
if (cached) {
console.log(`💾 Cache hit pour ${step.name}`);
context.cacheHit = cached;
}
}
},
after: async (step, result, context) => {
if (step.options.cache && !context.cacheHit) {
const key = this.generateCacheKey(step, context);
this.cache.set(key, result);
console.log(`💾 Résultat mis en cache pour ${step.name}`);
}
},
generateCacheKey: (step, context) => {
return `${step.name}_${JSON.stringify(step.options)}_${context.imageHash || 'unknown'}`;
}
};
// Utilisation du pipeline
const pipeline = new ProcessingPipeline(editor)
.addMiddleware(loggingMiddleware)
.addMiddleware(cacheMiddleware)
.addStep('analyze', processors.analyze, { cache: true })
.addStep('moderate', processors.moderate, { required: true })
.addStep('translateText', processors.translateText, {
targetLanguage: 'fr',
cache: true
})
.addStep('enhanceQuality', processors.enhanceQuality, {
threshold: 0.6
})
.addStep('addWatermark', processors.addWatermark, {
text: 'Mon Entreprise',
position: 'bas droite',
opacity: 0.2
})
.addStep('resize', processors.resize, {
width: 1200,
height: 800
});
// Traitement d'une image
const result = await pipeline.process('./image-source.jpg', {
editor,
userId: '12345',
projectId: 'abc'
});
console.log('Pipeline terminé:');
console.log('- Image finale:', result.finalImage);
console.log('- Durée totale:', result.duration + 'ms');
console.log('- Étapes réussies:', result.results.filter(r => r.success).length);
console.log('- Étapes échouées:', result.results.filter(r => !r.success).length);