commit
2c87acd607
968 changed files with 108767 additions and 0 deletions
@ -0,0 +1,10 @@ |
|||
PORT=3002 |
|||
NODE_ENV=development |
|||
API_VERSION=1.0.0 |
|||
|
|||
# Database Configuration |
|||
DB_HOST=localhost |
|||
DB_USER=restaurant |
|||
DB_PASSWORD=QOelY5SSWWgGeig |
|||
DB_NAME=restaurant |
|||
DB_PORT=3306 |
|||
@ -0,0 +1,12 @@ |
|||
{ |
|||
"name": "ITRIMOBE", |
|||
"host": "185.70.105.157", |
|||
"protocol": "sftp", |
|||
"port": 22, |
|||
"username": "restaurant", |
|||
"password": "HRiTCyzE1ii3pY8", |
|||
"remotePath": "/home/restaurant/public_html/backend", |
|||
"uploadOnSave": true, |
|||
"useTempFile": false, |
|||
"openSsh": false |
|||
} |
|||
@ -0,0 +1,22 @@ |
|||
const { Sequelize } = require('sequelize'); |
|||
require('dotenv').config(); |
|||
|
|||
const sequelize = new Sequelize( |
|||
process.env.DB_NAME || 'restaurant', |
|||
process.env.DB_USER || 'restaurant', |
|||
process.env.DB_PASSWORD || 'QOelY5SSWWgGeig', |
|||
{ |
|||
host: process.env.DB_HOST || 'localhost', |
|||
port: process.env.DB_PORT || 3306, |
|||
dialect: 'mysql', |
|||
logging: process.env.NODE_ENV === 'development' ? console.log : false, |
|||
pool: { |
|||
max: 5, |
|||
min: 0, |
|||
acquire: 30000, |
|||
idle: 10000 |
|||
} |
|||
} |
|||
); |
|||
|
|||
module.exports = sequelize; |
|||
@ -0,0 +1,587 @@ |
|||
const { Op } = require('sequelize'); |
|||
const { Utilisateur, sequelize } = require('../models/associations'); |
|||
const bcrypt = require('bcryptjs'); |
|||
const jwt = require('jsonwebtoken'); |
|||
const multer = require('multer'); |
|||
const path = require('path'); |
|||
const fs = require('fs').promises; |
|||
|
|||
// Configuration Multer pour upload de photos
|
|||
const storage = multer.diskStorage({ |
|||
destination: async (req, file, cb) => { |
|||
const uploadDir = path.join(__dirname, '../uploads/utilisateurs'); |
|||
try { |
|||
await fs.mkdir(uploadDir, { recursive: true }); |
|||
cb(null, uploadDir); |
|||
} catch (error) { |
|||
cb(error); |
|||
} |
|||
}, |
|||
filename: (req, file, cb) => { |
|||
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9); |
|||
cb(null, 'user-' + uniqueSuffix + path.extname(file.originalname)); |
|||
} |
|||
}); |
|||
|
|||
const upload = multer({ |
|||
storage: storage, |
|||
limits: { fileSize: 5 * 1024 * 1024 }, // 5MB
|
|||
fileFilter: (req, file, cb) => { |
|||
const allowedTypes = /jpeg|jpg|png|gif/; |
|||
const extname = allowedTypes.test(path.extname(file.originalname).toLowerCase()); |
|||
const mimetype = allowedTypes.test(file.mimetype); |
|||
|
|||
if (mimetype && extname) { |
|||
return cb(null, true); |
|||
} else { |
|||
cb(new Error('Seules les images sont autorisées')); |
|||
} |
|||
} |
|||
}); |
|||
|
|||
class UtilisateurController { |
|||
// Lister tous les utilisateurs
|
|||
async getAllUtilisateurs(req, res) { |
|||
try { |
|||
const { |
|||
page = 1, |
|||
limit = 10, |
|||
role, |
|||
statut, |
|||
search, |
|||
sort_by = 'cree_le', |
|||
sort_order = 'DESC' |
|||
} = req.query; |
|||
|
|||
const offset = (page - 1) * limit; |
|||
const whereConditions = {}; |
|||
|
|||
// Filtres
|
|||
if (role) whereConditions.role = role; |
|||
if (statut) whereConditions.statut = statut; |
|||
if (search) { |
|||
whereConditions[Op.or] = [ |
|||
{ nom: { [Op.like]: `%${search}%` } }, |
|||
{ prenom: { [Op.like]: `%${search}%` } }, |
|||
{ email: { [Op.like]: `%${search}%` } } |
|||
]; |
|||
} |
|||
|
|||
const { rows: utilisateurs, count } = await Utilisateur.findAndCountAll({ |
|||
where: whereConditions, |
|||
attributes: { exclude: ['mot_de_passe', 'token_reset'] }, |
|||
order: [[sort_by, sort_order]], |
|||
limit: parseInt(limit), |
|||
offset: parseInt(offset) |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
utilisateurs, |
|||
pagination: { |
|||
current_page: parseInt(page), |
|||
total_pages: Math.ceil(count / limit), |
|||
total_items: count, |
|||
items_per_page: parseInt(limit) |
|||
} |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des utilisateurs', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Obtenir un utilisateur par ID
|
|||
async getUtilisateurById(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const utilisateur = await Utilisateur.findByPk(id, { |
|||
attributes: { exclude: ['mot_de_passe', 'token_reset'] } |
|||
}); |
|||
|
|||
if (!utilisateur) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Utilisateur non trouvé' |
|||
}); |
|||
} |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: utilisateur |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération de l\'utilisateur', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Créer un nouvel utilisateur
|
|||
async createUtilisateur(req, res) { |
|||
const transaction = await sequelize.transaction(); |
|||
|
|||
try { |
|||
const { |
|||
nom, |
|||
prenom, |
|||
email, |
|||
mot_de_passe, |
|||
telephone, |
|||
role, |
|||
date_embauche, |
|||
salaire, |
|||
adresse, |
|||
date_naissance |
|||
} = req.body; |
|||
|
|||
// Vérifier si l'email existe déjà
|
|||
const existingUser = await Utilisateur.findOne({ |
|||
where: { email } |
|||
}); |
|||
|
|||
if (existingUser) { |
|||
await transaction.rollback(); |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Cet email est déjà utilisé' |
|||
}); |
|||
} |
|||
|
|||
// Créer l'utilisateur
|
|||
const nouveauUtilisateur = await Utilisateur.create({ |
|||
nom, |
|||
prenom, |
|||
email, |
|||
mot_de_passe, |
|||
telephone, |
|||
role: role || 'serveur', |
|||
date_embauche: date_embauche ? new Date(date_embauche) : null, |
|||
salaire: salaire ? parseFloat(salaire) : null, |
|||
adresse, |
|||
date_naissance: date_naissance ? new Date(date_naissance) : null, |
|||
photo: req.file ? `/uploads/utilisateurs/${req.file.filename}` : null |
|||
}, { transaction }); |
|||
|
|||
await transaction.commit(); |
|||
|
|||
res.status(201).json({ |
|||
success: true, |
|||
message: 'Utilisateur créé avec succès', |
|||
data: nouveauUtilisateur.toSafeJSON() |
|||
}); |
|||
} catch (error) { |
|||
await transaction.rollback(); |
|||
|
|||
// Supprimer le fichier uploadé en cas d'erreur
|
|||
if (req.file) { |
|||
try { |
|||
await fs.unlink(req.file.path); |
|||
} catch (unlinkError) { |
|||
console.error('Erreur lors de la suppression du fichier:', unlinkError); |
|||
} |
|||
} |
|||
|
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la création de l\'utilisateur', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Mettre à jour un utilisateur
|
|||
async updateUtilisateur(req, res) { |
|||
const transaction = await sequelize.transaction(); |
|||
|
|||
try { |
|||
const { id } = req.params; |
|||
const updateData = { ...req.body }; |
|||
|
|||
// Retirer le mot de passe des données si il est vide
|
|||
if (updateData.mot_de_passe === '') { |
|||
delete updateData.mot_de_passe; |
|||
} |
|||
|
|||
// Vérifier si l'utilisateur existe
|
|||
const utilisateur = await Utilisateur.findByPk(id); |
|||
if (!utilisateur) { |
|||
await transaction.rollback(); |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Utilisateur non trouvé' |
|||
}); |
|||
} |
|||
|
|||
// Vérifier l'unicité de l'email si modifié
|
|||
if (updateData.email && updateData.email !== utilisateur.email) { |
|||
const existingUser = await Utilisateur.findOne({ |
|||
where: { |
|||
email: updateData.email, |
|||
id: { [Op.ne]: id } |
|||
} |
|||
}); |
|||
|
|||
if (existingUser) { |
|||
await transaction.rollback(); |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Cet email est déjà utilisé par un autre utilisateur' |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Ajouter la nouvelle photo si uploadée
|
|||
if (req.file) { |
|||
// Supprimer l'ancienne photo
|
|||
if (utilisateur.photo) { |
|||
const oldPhotoPath = path.join(__dirname, '../uploads/utilisateurs', path.basename(utilisateur.photo)); |
|||
try { |
|||
await fs.unlink(oldPhotoPath); |
|||
} catch (error) { |
|||
console.log('Ancienne photo non trouvée ou déjà supprimée'); |
|||
} |
|||
} |
|||
updateData.photo = `/uploads/utilisateurs/${req.file.filename}`; |
|||
} |
|||
|
|||
// Convertir les dates
|
|||
if (updateData.date_embauche) { |
|||
updateData.date_embauche = new Date(updateData.date_embauche); |
|||
} |
|||
if (updateData.date_naissance) { |
|||
updateData.date_naissance = new Date(updateData.date_naissance); |
|||
} |
|||
if (updateData.salaire) { |
|||
updateData.salaire = parseFloat(updateData.salaire); |
|||
} |
|||
|
|||
// Mettre à jour l'utilisateur
|
|||
await utilisateur.update(updateData, { transaction }); |
|||
|
|||
await transaction.commit(); |
|||
|
|||
// Recharger l'utilisateur avec les nouvelles données
|
|||
await utilisateur.reload(); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Utilisateur mis à jour avec succès', |
|||
data: utilisateur.toSafeJSON() |
|||
}); |
|||
} catch (error) { |
|||
await transaction.rollback(); |
|||
|
|||
// Supprimer le nouveau fichier en cas d'erreur
|
|||
if (req.file) { |
|||
try { |
|||
await fs.unlink(req.file.path); |
|||
} catch (unlinkError) { |
|||
console.error('Erreur lors de la suppression du fichier:', unlinkError); |
|||
} |
|||
} |
|||
|
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la mise à jour de l\'utilisateur', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Supprimer un utilisateur
|
|||
async deleteUtilisateur(req, res) { |
|||
const transaction = await sequelize.transaction(); |
|||
|
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const utilisateur = await Utilisateur.findByPk(id); |
|||
if (!utilisateur) { |
|||
await transaction.rollback(); |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Utilisateur non trouvé' |
|||
}); |
|||
} |
|||
|
|||
// Supprimer la photo si elle existe
|
|||
if (utilisateur.photo) { |
|||
const photoPath = path.join(__dirname, '../uploads/utilisateurs', path.basename(utilisateur.photo)); |
|||
try { |
|||
await fs.unlink(photoPath); |
|||
} catch (error) { |
|||
console.log('Photo non trouvée ou déjà supprimée'); |
|||
} |
|||
} |
|||
|
|||
await utilisateur.destroy({ transaction }); |
|||
await transaction.commit(); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Utilisateur supprimé avec succès' |
|||
}); |
|||
} catch (error) { |
|||
await transaction.rollback(); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la suppression de l\'utilisateur', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Changer le statut d'un utilisateur
|
|||
async changeStatut(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
const { statut } = req.body; |
|||
|
|||
if (!['actif', 'inactif', 'suspendu'].includes(statut)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Statut invalide' |
|||
}); |
|||
} |
|||
|
|||
const utilisateur = await Utilisateur.findByPk(id); |
|||
if (!utilisateur) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Utilisateur non trouvé' |
|||
}); |
|||
} |
|||
|
|||
await utilisateur.update({ statut }); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: `Statut changé vers "${statut}" avec succès`, |
|||
data: utilisateur.toSafeJSON() |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors du changement de statut', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Réinitialiser le mot de passe
|
|||
async resetPassword(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
const { nouveau_mot_de_passe } = req.body; |
|||
|
|||
if (!nouveau_mot_de_passe || nouveau_mot_de_passe.length < 6) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Le mot de passe doit contenir au moins 6 caractères' |
|||
}); |
|||
} |
|||
|
|||
const utilisateur = await Utilisateur.findByPk(id); |
|||
if (!utilisateur) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Utilisateur non trouvé' |
|||
}); |
|||
} |
|||
|
|||
await utilisateur.update({ |
|||
mot_de_passe: nouveau_mot_de_passe, |
|||
token_reset: null, |
|||
token_reset_expire: null |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Mot de passe réinitialisé avec succès' |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la réinitialisation du mot de passe', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Obtenir les statistiques des utilisateurs
|
|||
async getStats(req, res) { |
|||
try { |
|||
const stats = await sequelize.query(` |
|||
SELECT |
|||
COUNT(*) as total_utilisateurs, |
|||
COUNT(CASE WHEN statut = 'actif' THEN 1 END) as actifs, |
|||
COUNT(CASE WHEN statut = 'inactif' THEN 1 END) as inactifs, |
|||
COUNT(CASE WHEN statut = 'suspendu' THEN 1 END) as suspendus, |
|||
COUNT(CASE WHEN role = 'admin' THEN 1 END) as admins, |
|||
COUNT(CASE WHEN role = 'manager' THEN 1 END) as managers, |
|||
COUNT(CASE WHEN role = 'serveur' THEN 1 END) as serveurs, |
|||
COUNT(CASE WHEN role = 'cuisinier' THEN 1 END) as cuisiniers, |
|||
COUNT(CASE WHEN role = 'caissier' THEN 1 END) as caissiers, |
|||
COUNT(CASE WHEN derniere_connexion >= DATE_SUB(NOW(), INTERVAL 7 DAY) THEN 1 END) as connectes_7_jours, |
|||
COUNT(CASE WHEN derniere_connexion >= DATE_SUB(NOW(), INTERVAL 30 DAY) THEN 1 END) as connectes_30_jours |
|||
FROM utilisateurs |
|||
WHERE est_actif = 1 |
|||
`, {
|
|||
type: sequelize.QueryTypes.SELECT |
|||
}); |
|||
|
|||
const recentUsers = await Utilisateur.findAll({ |
|||
attributes: ['id', 'nom', 'prenom', 'role', 'cree_le'], |
|||
order: [['cree_le', 'DESC']], |
|||
limit: 5 |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
stats: stats[0], |
|||
recent_users: recentUsers |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des statistiques', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Rechercher des utilisateurs
|
|||
async searchUtilisateurs(req, res) { |
|||
try { |
|||
const { q, role, limit = 10 } = req.query; |
|||
|
|||
if (!q || q.length < 2) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'La recherche doit contenir au moins 2 caractères' |
|||
}); |
|||
} |
|||
|
|||
const whereConditions = { |
|||
[Op.and]: [ |
|||
{ est_actif: true }, |
|||
{ |
|||
[Op.or]: [ |
|||
{ nom: { [Op.like]: `%${q}%` } }, |
|||
{ prenom: { [Op.like]: `%${q}%` } }, |
|||
{ email: { [Op.like]: `%${q}%` } } |
|||
] |
|||
} |
|||
] |
|||
}; |
|||
|
|||
if (role) { |
|||
whereConditions[Op.and].push({ role }); |
|||
} |
|||
|
|||
const utilisateurs = await Utilisateur.findAll({ |
|||
where: whereConditions, |
|||
attributes: ['id', 'nom', 'prenom', 'email', 'role', 'photo'], |
|||
order: [['nom', 'ASC'], ['prenom', 'ASC']], |
|||
limit: parseInt(limit) |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: utilisateurs |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la recherche', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Login utilisateur
|
|||
async login(req, res) { |
|||
try { |
|||
const { email, mot_de_passe } = req.body; |
|||
|
|||
if (!email || !mot_de_passe) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Email et mot de passe requis' |
|||
}); |
|||
} |
|||
|
|||
// Trouver l'utilisateur par email
|
|||
const utilisateur = await Utilisateur.findOne({ |
|||
where: { |
|||
email, |
|||
est_actif: true, |
|||
statut: 'actif' |
|||
} |
|||
}); |
|||
|
|||
if (!utilisateur) { |
|||
return res.status(401).json({ |
|||
success: false, |
|||
message: 'Email ou mot de passe incorrect' |
|||
}); |
|||
} |
|||
|
|||
// Vérifier le mot de passe
|
|||
const isValid = await utilisateur.verifierMotDePasse(mot_de_passe); |
|||
if (!isValid) { |
|||
return res.status(401).json({ |
|||
success: false, |
|||
message: 'Email ou mot de passe incorrect' |
|||
}); |
|||
} |
|||
|
|||
// Mettre à jour la dernière connexion
|
|||
await utilisateur.update({ |
|||
derniere_connexion: new Date() |
|||
}); |
|||
|
|||
// Générer le token JWT
|
|||
const token = jwt.sign( |
|||
{ |
|||
userId: utilisateur.id, |
|||
email: utilisateur.email, |
|||
role: utilisateur.role |
|||
}, |
|||
process.env.JWT_SECRET || 'secret_key', |
|||
{ expiresIn: '24h' } |
|||
); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Connexion réussie', |
|||
data: { |
|||
token, |
|||
utilisateur: utilisateur.toSafeJSON() |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la connexion', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
} |
|||
|
|||
module.exports = { |
|||
UtilisateurController: new UtilisateurController(), |
|||
uploadPhoto: upload.single('photo') |
|||
}; |
|||
@ -0,0 +1,193 @@ |
|||
const { MenuCategory, Menu } = require('../models/associations'); |
|||
|
|||
const categoryController = { |
|||
// Get all categories
|
|||
getAllCategories: async (req, res) => { |
|||
try { |
|||
const { include_menus = 'false' } = req.query; |
|||
|
|||
const options = { |
|||
order: [['ordre', 'ASC'], ['nom', 'ASC']] |
|||
}; |
|||
|
|||
if (include_menus === 'true') { |
|||
options.include = [{ |
|||
model: Menu, |
|||
as: 'menus', |
|||
attributes: ['id', 'nom', 'prix', 'disponible'] |
|||
}]; |
|||
} |
|||
|
|||
const categories = await MenuCategory.findAll(options); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: categories |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error fetching categories:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des catégories', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}, |
|||
|
|||
// Get category by ID
|
|||
getCategoryById: async (req, res) => { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const category = await MenuCategory.findByPk(id, { |
|||
include: [{ |
|||
model: Menu, |
|||
as: 'menus', |
|||
attributes: ['id', 'nom', 'prix', 'disponible'] |
|||
}] |
|||
}); |
|||
|
|||
if (!category) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Catégorie non trouvée' |
|||
}); |
|||
} |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: category |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error fetching category:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération de la catégorie', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}, |
|||
|
|||
// Create category
|
|||
createCategory: async (req, res) => { |
|||
try { |
|||
const { nom, description, ordre = 0, actif = true } = req.body; |
|||
|
|||
if (!nom) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Le nom de la catégorie est requis' |
|||
}); |
|||
} |
|||
|
|||
const category = await MenuCategory.create({ |
|||
nom, |
|||
description, |
|||
ordre, |
|||
actif |
|||
}); |
|||
|
|||
res.status(201).json({ |
|||
success: true, |
|||
message: 'Catégorie créée avec succès', |
|||
data: category |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error creating category:', error); |
|||
|
|||
if (error.name === 'SequelizeUniqueConstraintError') { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Une catégorie avec ce nom existe déjà' |
|||
}); |
|||
} |
|||
|
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la création de la catégorie', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}, |
|||
|
|||
// Update category
|
|||
updateCategory: async (req, res) => { |
|||
try { |
|||
const { id } = req.params; |
|||
const { nom, description, ordre, actif } = req.body; |
|||
|
|||
const category = await MenuCategory.findByPk(id); |
|||
if (!category) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Catégorie non trouvée' |
|||
}); |
|||
} |
|||
|
|||
await category.update({ |
|||
nom: nom !== undefined ? nom : category.nom, |
|||
description: description !== undefined ? description : category.description, |
|||
ordre: ordre !== undefined ? ordre : category.ordre, |
|||
actif: actif !== undefined ? actif : category.actif |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Catégorie mise à jour avec succès', |
|||
data: category |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error updating category:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la mise à jour de la catégorie', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}, |
|||
|
|||
// Delete category
|
|||
deleteCategory: async (req, res) => { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const category = await MenuCategory.findByPk(id); |
|||
if (!category) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Catégorie non trouvée' |
|||
}); |
|||
} |
|||
|
|||
// Check if category has menus
|
|||
const menuCount = await Menu.count({ where: { categorie_id: id } }); |
|||
if (menuCount > 0) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: `Impossible de supprimer la catégorie. Elle contient ${menuCount} menu(s)` |
|||
}); |
|||
} |
|||
|
|||
await category.destroy(); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Catégorie supprimée avec succès' |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error deleting category:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la suppression de la catégorie', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
}; |
|||
|
|||
module.exports = categoryController; |
|||
@ -0,0 +1,244 @@ |
|||
const { Client } = require('../models/associations'); |
|||
const { Op } = require('sequelize'); |
|||
|
|||
class ClientController { |
|||
// Get all clients with search and pagination
|
|||
async getAllClients(req, res) { |
|||
try { |
|||
const { |
|||
page = 1, |
|||
limit = 10, |
|||
search, |
|||
actif, |
|||
sort_by = 'created_at', |
|||
sort_order = 'DESC' |
|||
} = req.query; |
|||
|
|||
const offset = (parseInt(page) - 1) * parseInt(limit); |
|||
const whereClause = {}; |
|||
|
|||
// Search filter
|
|||
if (search) { |
|||
whereClause[Op.or] = [ |
|||
{ nom: { [Op.like]: `%${search}%` } }, |
|||
{ prenom: { [Op.like]: `%${search}%` } }, |
|||
{ email: { [Op.like]: `%${search}%` } }, |
|||
{ telephone: { [Op.like]: `%${search}%` } } |
|||
]; |
|||
} |
|||
|
|||
// Active filter
|
|||
if (actif !== undefined) { |
|||
whereClause.actif = actif === 'true'; |
|||
} |
|||
|
|||
const { count, rows } = await Client.findAndCountAll({ |
|||
where: whereClause, |
|||
limit: parseInt(limit), |
|||
offset: offset, |
|||
order: [[sort_by, sort_order.toUpperCase()]], |
|||
attributes: { exclude: ['updated_at'] } |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
clients: rows, |
|||
pagination: { |
|||
currentPage: parseInt(page), |
|||
totalPages: Math.ceil(count / parseInt(limit)), |
|||
totalItems: count, |
|||
itemsPerPage: parseInt(limit) |
|||
} |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des clients', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get client by ID
|
|||
async getClientById(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const client = await Client.findByPk(id, { |
|||
include: [ |
|||
{ |
|||
association: 'reservations', |
|||
limit: 5, |
|||
order: [['date_reservation', 'DESC']] |
|||
}, |
|||
{ |
|||
association: 'commandes', |
|||
limit: 5, |
|||
order: [['date_commande', 'DESC']] |
|||
} |
|||
] |
|||
}); |
|||
|
|||
if (!client) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Client non trouvé' |
|||
}); |
|||
} |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: client |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération du client', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Create new client
|
|||
async createClient(req, res) { |
|||
try { |
|||
const clientData = req.body; |
|||
|
|||
const client = await Client.create(clientData); |
|||
|
|||
res.status(201).json({ |
|||
success: true, |
|||
message: 'Client créé avec succès', |
|||
data: client |
|||
}); |
|||
} catch (error) { |
|||
if (error.name === 'SequelizeValidationError') { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Données invalides', |
|||
errors: error.errors.map(e => ({ |
|||
field: e.path, |
|||
message: e.message |
|||
})) |
|||
}); |
|||
} |
|||
|
|||
if (error.name === 'SequelizeUniqueConstraintError') { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Email déjà utilisé' |
|||
}); |
|||
} |
|||
|
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la création du client', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Update client
|
|||
async updateClient(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
const updateData = req.body; |
|||
|
|||
const client = await Client.findByPk(id); |
|||
if (!client) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Client non trouvé' |
|||
}); |
|||
} |
|||
|
|||
await client.update(updateData); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Client mis à jour avec succès', |
|||
data: client |
|||
}); |
|||
} catch (error) { |
|||
if (error.name === 'SequelizeValidationError') { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Données invalides', |
|||
errors: error.errors.map(e => ({ |
|||
field: e.path, |
|||
message: e.message |
|||
})) |
|||
}); |
|||
} |
|||
|
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la mise à jour du client', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Delete client
|
|||
async deleteClient(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const client = await Client.findByPk(id); |
|||
if (!client) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Client non trouvé' |
|||
}); |
|||
} |
|||
|
|||
await client.destroy(); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Client supprimé avec succès' |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la suppression du client', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get client statistics
|
|||
async getClientStats(req, res) { |
|||
try { |
|||
const totalClients = await Client.count(); |
|||
const activeClients = await Client.count({ where: { actif: true } }); |
|||
const inactiveClients = await Client.count({ where: { actif: false } }); |
|||
|
|||
const topClients = await Client.findAll({ |
|||
order: [['points_fidelite', 'DESC']], |
|||
limit: 5, |
|||
attributes: ['nom', 'prenom', 'email', 'points_fidelite'] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
totalClients, |
|||
activeClients, |
|||
inactiveClients, |
|||
topClients |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des statistiques', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
} |
|||
|
|||
module.exports = new ClientController(); |
|||
@ -0,0 +1,313 @@ |
|||
// controllers/commandesController.js
|
|||
const Commande = require('../models/Commande'); |
|||
const { Op } = require('sequelize'); |
|||
|
|||
// Get all commandes
|
|||
const getAllCommandes = async (req, res) => { |
|||
try { |
|||
const page = parseInt(req.query.page) || 1; |
|||
const limit = parseInt(req.query.limit) || 10; |
|||
const offset = (page - 1) * limit; |
|||
|
|||
const where = {}; |
|||
|
|||
// Filters
|
|||
if (req.query.statut) { |
|||
where.statut = req.query.statut; |
|||
} |
|||
|
|||
if (req.query.client_id) { |
|||
where.client_id = req.query.client_id; |
|||
} |
|||
|
|||
if (req.query.table_id) { |
|||
where.table_id = req.query.table_id; |
|||
} |
|||
|
|||
if (req.query.serveur) { |
|||
where.serveur = { [Op.like]: `%${req.query.serveur}%` }; |
|||
} |
|||
|
|||
if (req.query.date_debut && req.query.date_fin) { |
|||
where.date_commande = { |
|||
[Op.between]: [req.query.date_debut, req.query.date_fin] |
|||
}; |
|||
} |
|||
|
|||
const { count, rows } = await Commande.findAndCountAll({ |
|||
where, |
|||
limit, |
|||
offset, |
|||
order: [['date_commande', 'DESC']] |
|||
}); |
|||
|
|||
res.status(200).json({ |
|||
success: true, |
|||
data: { |
|||
commandes: rows, |
|||
pagination: { |
|||
currentPage: page, |
|||
totalPages: Math.ceil(count / limit), |
|||
totalItems: count, |
|||
itemsPerPage: limit |
|||
} |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des commandes', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}; |
|||
|
|||
// Get commande by ID
|
|||
const getCommandeById = async (req, res) => { |
|||
try { |
|||
const commande = await Commande.findByPk(req.params.id); |
|||
|
|||
if (!commande) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Commande non trouvée' |
|||
}); |
|||
} |
|||
|
|||
res.status(200).json({ |
|||
success: true, |
|||
data: commande |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération de la commande', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}; |
|||
|
|||
// Create new commande
|
|||
const createCommande = async (req, res) => { |
|||
try { |
|||
const { |
|||
client_id, |
|||
table_id, |
|||
reservation_id, |
|||
numero_commande, |
|||
statut, |
|||
total_ht, |
|||
total_tva, |
|||
total_ttc, |
|||
mode_paiement, |
|||
commentaires, |
|||
serveur, |
|||
date_commande, |
|||
date_service |
|||
} = req.body; |
|||
|
|||
const newCommande = await Commande.create({ |
|||
client_id, |
|||
table_id, |
|||
reservation_id, |
|||
numero_commande, |
|||
statut: statut || 'en_attente', |
|||
total_ht: total_ht || 0.00, |
|||
total_tva: total_tva || 0.00, |
|||
total_ttc: total_ttc || 0.00, |
|||
mode_paiement, |
|||
commentaires, |
|||
serveur, |
|||
date_commande: date_commande || new Date(), |
|||
date_service |
|||
}); |
|||
|
|||
res.status(201).json({ |
|||
success: true, |
|||
message: 'Commande créée avec succès', |
|||
data: newCommande |
|||
}); |
|||
} catch (error) { |
|||
res.status(400).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la création de la commande', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}; |
|||
|
|||
// Update commande
|
|||
const updateCommande = async (req, res) => { |
|||
try { |
|||
const commandeId = req.params.id; |
|||
const updateData = req.body; |
|||
|
|||
const commande = await Commande.findByPk(commandeId); |
|||
|
|||
if (!commande) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Commande non trouvée' |
|||
}); |
|||
} |
|||
|
|||
await commande.update(updateData); |
|||
|
|||
res.status(200).json({ |
|||
success: true, |
|||
message: 'Commande mise à jour avec succès', |
|||
data: commande |
|||
}); |
|||
} catch (error) { |
|||
res.status(400).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la mise à jour de la commande', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}; |
|||
|
|||
// Delete commande
|
|||
const deleteCommande = async (req, res) => { |
|||
try { |
|||
const commandeId = req.params.id; |
|||
|
|||
const commande = await Commande.findByPk(commandeId); |
|||
|
|||
if (!commande) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Commande non trouvée' |
|||
}); |
|||
} |
|||
|
|||
await commande.destroy(); |
|||
|
|||
res.status(200).json({ |
|||
success: true, |
|||
message: 'Commande supprimée avec succès' |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la suppression de la commande', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}; |
|||
|
|||
// Update commande status
|
|||
const updateStatut = async (req, res) => { |
|||
try { |
|||
const commandeId = req.params.id; |
|||
const { statut } = req.body; |
|||
|
|||
if (!statut) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Le statut est requis' |
|||
}); |
|||
} |
|||
|
|||
const commande = await Commande.findByPk(commandeId); |
|||
|
|||
if (!commande) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Commande non trouvée' |
|||
}); |
|||
} |
|||
|
|||
const updateData = { statut }; |
|||
if (statut === 'servie' && !commande.date_service) { |
|||
updateData.date_service = new Date(); |
|||
} |
|||
|
|||
await commande.update(updateData); |
|||
|
|||
res.status(200).json({ |
|||
success: true, |
|||
message: 'Statut de la commande mis à jour avec succès', |
|||
data: commande |
|||
}); |
|||
} catch (error) { |
|||
res.status(400).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la mise à jour du statut', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}; |
|||
|
|||
// Get commandes by status
|
|||
const getCommandesByStatut = async (req, res) => { |
|||
try { |
|||
const { statut } = req.params; |
|||
|
|||
const commandes = await Commande.findAll({ |
|||
where: { statut }, |
|||
order: [['date_commande', 'DESC']] |
|||
}); |
|||
|
|||
res.status(200).json({ |
|||
success: true, |
|||
data: commandes |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des commandes par statut', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}; |
|||
|
|||
// Get daily statistics
|
|||
const getDailyStats = async (req, res) => { |
|||
try { |
|||
const date = req.query.date || new Date().toISOString().split('T')[0]; |
|||
|
|||
const startOfDay = new Date(date + 'T00:00:00.000Z'); |
|||
const endOfDay = new Date(date + 'T23:59:59.999Z'); |
|||
|
|||
const stats = await Commande.findAll({ |
|||
where: { |
|||
date_commande: { |
|||
[Op.between]: [startOfDay, endOfDay] |
|||
} |
|||
}, |
|||
attributes: [ |
|||
'statut', |
|||
[sequelize.fn('COUNT', sequelize.col('id')), 'count'], |
|||
[sequelize.fn('SUM', sequelize.col('total_ttc')), 'total_amount'] |
|||
], |
|||
group: ['statut'] |
|||
}); |
|||
|
|||
res.status(200).json({ |
|||
success: true, |
|||
data: { |
|||
date, |
|||
statistics: stats |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des statistiques', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}; |
|||
|
|||
// IMPORTANT: Make sure this export is at the very end of the file
|
|||
module.exports = { |
|||
getAllCommandes, |
|||
getCommandeById, |
|||
createCommande, |
|||
updateCommande, |
|||
deleteCommande, |
|||
updateStatut, |
|||
getCommandesByStatut, |
|||
getDailyStats |
|||
}; |
|||
@ -0,0 +1,495 @@ |
|||
const { MenuCategory, Menu, sequelize } = require('../models/associations'); |
|||
const { Op } = require('sequelize'); |
|||
|
|||
class MenuCategoryController { |
|||
// Get all categories with search and pagination
|
|||
async getAllCategories(req, res) { |
|||
try { |
|||
const { |
|||
page = 1, |
|||
limit = 10, |
|||
search = '', |
|||
actif, |
|||
sort_by = 'ordre', |
|||
sort_order = 'ASC' |
|||
} = req.query; |
|||
|
|||
const offset = (parseInt(page) - 1) * parseInt(limit); |
|||
|
|||
// Build where conditions
|
|||
const whereConditions = {}; |
|||
|
|||
if (search) { |
|||
whereConditions[Op.or] = [ |
|||
{ nom: { [Op.like]: `%${search}%` } }, |
|||
{ description: { [Op.like]: `%${search}%` } } |
|||
]; |
|||
} |
|||
|
|||
if (actif !== undefined) { |
|||
whereConditions.actif = actif === 'true'; |
|||
} |
|||
|
|||
// Validate sort fields
|
|||
const validSortFields = ['nom', 'ordre', 'created_at', 'updated_at']; |
|||
const sortField = validSortFields.includes(sort_by) ? sort_by : 'ordre'; |
|||
const sortOrder = ['ASC', 'DESC'].includes(sort_order.toUpperCase()) ? |
|||
sort_order.toUpperCase() : 'ASC'; |
|||
|
|||
const { count, rows } = await MenuCategory.findAndCountAll({ |
|||
where: whereConditions, |
|||
include: [{ |
|||
model: Menu, |
|||
as: 'menus', // ✅ Utiliser l'alias défini dans associations.js
|
|||
attributes: ['id'], |
|||
required: false |
|||
}], |
|||
order: [[sortField, sortOrder]], |
|||
limit: parseInt(limit), |
|||
offset: offset, |
|||
distinct: true |
|||
}); |
|||
|
|||
// Add menu count to each category
|
|||
const categoriesWithCount = rows.map(category => ({ |
|||
...category.toJSON(), |
|||
menu_count: category.menus ? category.menus.length : 0, // ✅ Utiliser l'alias
|
|||
menus: undefined // Remove the menus array from response
|
|||
})); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
categories: categoriesWithCount, |
|||
pagination: { |
|||
currentPage: parseInt(page), |
|||
totalPages: Math.ceil(count / parseInt(limit)), |
|||
totalItems: count, |
|||
itemsPerPage: parseInt(limit) |
|||
} |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in getAllCategories:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des catégories', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get active categories only (for dropdowns, etc.)
|
|||
async getActiveCategories(req, res) { |
|||
try { |
|||
const categories = await MenuCategory.findAll({ |
|||
where: { actif: true }, |
|||
order: [['ordre', 'ASC'], ['nom', 'ASC']], |
|||
attributes: ['id', 'nom', 'description', 'ordre'] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: categories |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des catégories actives', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get category by ID
|
|||
async getCategoryById(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const category = await MenuCategory.findByPk(id, { |
|||
include: [{ |
|||
model: Menu, |
|||
as: 'menus', // ✅ Utiliser l'alias
|
|||
attributes: ['id', 'nom', 'prix', 'actif'], |
|||
required: false |
|||
}] |
|||
}); |
|||
|
|||
if (!category) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Catégorie non trouvée' |
|||
}); |
|||
} |
|||
|
|||
const categoryData = { |
|||
...category.toJSON(), |
|||
menu_count: category.menus ? category.menus.length : 0 // ✅ Utiliser l'alias
|
|||
}; |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: categoryData |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération de la catégorie', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get all menus in a category
|
|||
async getCategoryMenus(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
const { actif } = req.query; |
|||
|
|||
const category = await MenuCategory.findByPk(id); |
|||
if (!category) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Catégorie non trouvée' |
|||
}); |
|||
} |
|||
|
|||
const whereConditions = { categorie_id: id }; // ✅ Utiliser le bon nom de colonne
|
|||
if (actif !== undefined) { |
|||
whereConditions.actif = actif === 'true'; |
|||
} |
|||
|
|||
const menus = await Menu.findAll({ |
|||
where: whereConditions, |
|||
order: [['nom', 'ASC']] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
category: { |
|||
id: category.id, |
|||
nom: category.nom, |
|||
description: category.description |
|||
}, |
|||
menus: menus |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des menus de la catégorie', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get category statistics
|
|||
async getCategoryStats(req, res) { |
|||
try { |
|||
const [total, active, inactive] = await Promise.all([ |
|||
MenuCategory.count(), |
|||
MenuCategory.count({ where: { actif: true } }), |
|||
MenuCategory.count({ where: { actif: false } }) |
|||
]); |
|||
|
|||
// Get total menus across all categories
|
|||
const totalMenus = await Menu.count(); |
|||
|
|||
// Get categories with most menus
|
|||
const categoriesWithMenuCount = await MenuCategory.findAll({ |
|||
attributes: [ |
|||
'id', |
|||
'nom', |
|||
[sequelize.fn('COUNT', sequelize.col('menus.id')), 'menu_count'] // ✅ Utiliser l'alias
|
|||
], |
|||
include: [{ |
|||
model: Menu, |
|||
as: 'menus', // ✅ Utiliser l'alias
|
|||
attributes: [], |
|||
required: false |
|||
}], |
|||
group: ['MenuCategory.id', 'MenuCategory.nom'], // ✅ Ajouter tous les champs non-agrégés
|
|||
order: [[sequelize.fn('COUNT', sequelize.col('menus.id')), 'DESC']], |
|||
limit: 5 |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
total, |
|||
active, |
|||
inactive, |
|||
totalMenus, |
|||
topCategories: categoriesWithMenuCount.map(cat => ({ |
|||
id: cat.id, |
|||
nom: cat.nom, |
|||
menu_count: parseInt(cat.dataValues.menu_count || 0) |
|||
})) |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in getCategoryStats:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des statistiques', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Create new category
|
|||
async createCategory(req, res) { |
|||
try { |
|||
const { nom, description, ordre = 0, actif = true } = req.body; |
|||
|
|||
// Validation
|
|||
if (!nom || nom.trim().length === 0) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Le nom de la catégorie est requis' |
|||
}); |
|||
} |
|||
|
|||
if (nom.length > 100) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Le nom ne peut pas dépasser 100 caractères' |
|||
}); |
|||
} |
|||
|
|||
// Check if category name already exists
|
|||
const existingCategory = await MenuCategory.findOne({ |
|||
where: { nom: nom.trim() } |
|||
}); |
|||
|
|||
if (existingCategory) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Une catégorie avec ce nom existe déjà' |
|||
}); |
|||
} |
|||
|
|||
// If no order specified, set it to be last
|
|||
let finalOrder = ordre; |
|||
if (!ordre || ordre === 0) { |
|||
const maxOrder = await MenuCategory.max('ordre') || 0; |
|||
finalOrder = maxOrder + 1; |
|||
} |
|||
|
|||
const category = await MenuCategory.create({ |
|||
nom: nom.trim(), |
|||
description: description?.trim(), |
|||
ordre: finalOrder, |
|||
actif |
|||
}); |
|||
|
|||
res.status(201).json({ |
|||
success: true, |
|||
message: 'Catégorie créée avec succès', |
|||
data: category |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in createCategory:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la création de la catégorie', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Update category
|
|||
async updateCategory(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
const { nom, description, ordre, actif } = req.body; |
|||
|
|||
const category = await MenuCategory.findByPk(id); |
|||
if (!category) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Catégorie non trouvée' |
|||
}); |
|||
} |
|||
|
|||
// Validation
|
|||
if (!nom || nom.trim().length === 0) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Le nom de la catégorie est requis' |
|||
}); |
|||
} |
|||
|
|||
if (nom.length > 100) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Le nom ne peut pas dépasser 100 caractères' |
|||
}); |
|||
} |
|||
|
|||
// Check if category name already exists (excluding current category)
|
|||
const existingCategory = await MenuCategory.findOne({ |
|||
where: { |
|||
nom: nom.trim(), |
|||
id: { [Op.ne]: id } |
|||
} |
|||
}); |
|||
|
|||
if (existingCategory) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Une catégorie avec ce nom existe déjà' |
|||
}); |
|||
} |
|||
|
|||
// Update category
|
|||
await category.update({ |
|||
nom: nom.trim(), |
|||
description: description?.trim(), |
|||
ordre: ordre !== undefined ? ordre : category.ordre, |
|||
actif: actif !== undefined ? actif : category.actif |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Catégorie mise à jour avec succès', |
|||
data: category |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la mise à jour de la catégorie', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Toggle category status
|
|||
async toggleCategoryStatus(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const category = await MenuCategory.findByPk(id); |
|||
if (!category) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Catégorie non trouvée' |
|||
}); |
|||
} |
|||
|
|||
await category.update({ actif: !category.actif }); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: `Catégorie ${category.actif ? 'activée' : 'désactivée'} avec succès`, |
|||
data: category |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la mise à jour du statut', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Delete category
|
|||
async deleteCategory(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const category = await MenuCategory.findByPk(id, { |
|||
include: [{ |
|||
model: Menu, |
|||
as: 'menus', // ✅ Utiliser l'alias
|
|||
attributes: ['id'], |
|||
required: false |
|||
}] |
|||
}); |
|||
|
|||
if (!category) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Catégorie non trouvée' |
|||
}); |
|||
} |
|||
|
|||
// Check if category has associated menus
|
|||
if (category.menus && category.menus.length > 0) { // ✅ Utiliser l'alias
|
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: `Impossible de supprimer la catégorie. Elle contient ${category.menus.length} menu(s). Veuillez d'abord supprimer ou déplacer les menus.` |
|||
}); |
|||
} |
|||
|
|||
await category.destroy(); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Catégorie supprimée avec succès' |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la suppression de la catégorie', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Reorder categories
|
|||
async reorderCategories(req, res) { |
|||
const transaction = await sequelize.transaction(); |
|||
|
|||
try { |
|||
const { categories } = req.body; |
|||
|
|||
if (!Array.isArray(categories) || categories.length === 0) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Liste des catégories requise' |
|||
}); |
|||
} |
|||
|
|||
// Validate each category object
|
|||
for (const cat of categories) { |
|||
if (!cat.id || cat.ordre === undefined) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Chaque catégorie doit avoir un ID et un ordre' |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Update each category's order
|
|||
const updatePromises = categories.map(cat => |
|||
MenuCategory.update( |
|||
{ ordre: cat.ordre }, |
|||
{ |
|||
where: { id: cat.id }, |
|||
transaction |
|||
} |
|||
) |
|||
); |
|||
|
|||
await Promise.all(updatePromises); |
|||
await transaction.commit(); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Ordre des catégories mis à jour avec succès' |
|||
}); |
|||
} catch (error) { |
|||
await transaction.rollback(); |
|||
|
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la réorganisation des catégories', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
} |
|||
|
|||
module.exports = new MenuCategoryController(); |
|||
@ -0,0 +1,478 @@ |
|||
const { Menu, MenuCategory } = require('../models/associations'); |
|||
const { Op } = require('sequelize'); |
|||
|
|||
const menuController = { |
|||
// Get all menus with pagination and filters
|
|||
getAllMenus: async (req, res) => { |
|||
try { |
|||
const { |
|||
page = 1, |
|||
limit = 10, |
|||
categorie_id, |
|||
disponible, |
|||
prix_min, |
|||
prix_max, |
|||
search, |
|||
sort_by = 'nom', |
|||
sort_order = 'ASC' |
|||
} = req.query; |
|||
|
|||
const offset = (parseInt(page) - 1) * parseInt(limit); |
|||
const whereClause = {}; |
|||
|
|||
// Apply filters
|
|||
if (categorie_id) { |
|||
whereClause.categorie_id = categorie_id; |
|||
} |
|||
|
|||
if (disponible !== undefined) { |
|||
whereClause.disponible = disponible === 'true'; |
|||
} |
|||
|
|||
if (prix_min || prix_max) { |
|||
whereClause.prix = {}; |
|||
if (prix_min) whereClause.prix[Op.gte] = parseFloat(prix_min); |
|||
if (prix_max) whereClause.prix[Op.lte] = parseFloat(prix_max); |
|||
} |
|||
|
|||
if (search) { |
|||
whereClause[Op.or] = [ |
|||
{ nom: { [Op.like]: `%${search}%` } }, |
|||
{ commentaire: { [Op.like]: `%${search}%` } }, |
|||
{ ingredients: { [Op.like]: `%${search}%` } } |
|||
]; |
|||
} |
|||
|
|||
const { count, rows } = await Menu.findAndCountAll({ |
|||
where: whereClause, |
|||
include: [{ |
|||
model: MenuCategory, |
|||
as: 'category', |
|||
attributes: ['id', 'nom', 'description'] |
|||
}], |
|||
limit: parseInt(limit), |
|||
offset: offset, |
|||
order: [[sort_by, sort_order.toUpperCase()]], |
|||
distinct: true |
|||
}); |
|||
|
|||
const totalPages = Math.ceil(count / parseInt(limit)); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
menus: rows, |
|||
pagination: { |
|||
currentPage: parseInt(page), |
|||
totalPages, |
|||
totalItems: count, |
|||
itemsPerPage: parseInt(limit) |
|||
} |
|||
} |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error fetching menus:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des menus', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}, |
|||
|
|||
// Get menu by ID
|
|||
getMenuById: async (req, res) => { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
if (!id || isNaN(id)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'ID de menu invalide' |
|||
}); |
|||
} |
|||
|
|||
const menu = await Menu.findByPk(id, { |
|||
include: [{ |
|||
model: MenuCategory, |
|||
as: 'category', |
|||
attributes: ['id', 'nom', 'description'] |
|||
}] |
|||
}); |
|||
|
|||
if (!menu) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Menu non trouvé' |
|||
}); |
|||
} |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: menu |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error fetching menu:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération du menu', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}, |
|||
|
|||
// Get menus by category
|
|||
getMenusByCategory: async (req, res) => { |
|||
try { |
|||
const { categoryId } = req.params; |
|||
const { disponible_only = 'false' } = req.query; |
|||
|
|||
const whereClause = { categorie_id: categoryId }; |
|||
if (disponible_only === 'true') { |
|||
whereClause.disponible = true; |
|||
} |
|||
|
|||
const menus = await Menu.findAll({ |
|||
where: whereClause, |
|||
include: [{ |
|||
model: MenuCategory, |
|||
as: 'category', |
|||
attributes: ['id', 'nom', 'description'] |
|||
}], |
|||
order: [['nom', 'ASC']] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: menus, |
|||
count: menus.length |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error fetching menus by category:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des menus par catégorie', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}, |
|||
|
|||
// Create new menu
|
|||
createMenu: async (req, res) => { |
|||
try { |
|||
const { |
|||
nom, |
|||
commentaire, |
|||
prix, |
|||
categorie_id, |
|||
disponible = true, |
|||
image_url, |
|||
ingredients, |
|||
allergenes, |
|||
calories, |
|||
temps_preparation |
|||
} = req.body; |
|||
|
|||
// Validate required fields
|
|||
if (!nom || !prix || !categorie_id) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Les champs nom, prix et categorie_id sont requis' |
|||
}); |
|||
} |
|||
|
|||
// Check if category exists
|
|||
const category = await MenuCategory.findByPk(categorie_id); |
|||
if (!category) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Catégorie non trouvée' |
|||
}); |
|||
} |
|||
|
|||
const newMenu = await Menu.create({ |
|||
nom, |
|||
commentaire, |
|||
prix, |
|||
categorie_id, |
|||
disponible, |
|||
image_url, |
|||
ingredients, |
|||
allergenes, |
|||
calories, |
|||
temps_preparation |
|||
}); |
|||
|
|||
// Fetch the created menu with category info
|
|||
const menuWithCategory = await Menu.findByPk(newMenu.id, { |
|||
include: [{ |
|||
model: MenuCategory, |
|||
as: 'category', |
|||
attributes: ['id', 'nom', 'description'] |
|||
}] |
|||
}); |
|||
|
|||
res.status(201).json({ |
|||
success: true, |
|||
message: 'Menu créé avec succès', |
|||
data: menuWithCategory |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error creating menu:', error); |
|||
|
|||
if (error.name === 'SequelizeValidationError') { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Erreur de validation', |
|||
errors: error.errors.map(err => ({ |
|||
field: err.path, |
|||
message: err.message |
|||
})) |
|||
}); |
|||
} |
|||
|
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la création du menu', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}, |
|||
|
|||
// Update menu
|
|||
updateMenu: async (req, res) => { |
|||
try { |
|||
const { id } = req.params; |
|||
const { |
|||
nom, |
|||
commentaire, |
|||
prix, |
|||
categorie_id, |
|||
disponible, |
|||
image_url, |
|||
ingredients, |
|||
allergenes, |
|||
calories, |
|||
temps_preparation |
|||
} = req.body; |
|||
|
|||
if (!id || isNaN(id)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'ID de menu invalide' |
|||
}); |
|||
} |
|||
|
|||
const menu = await Menu.findByPk(id); |
|||
if (!menu) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Menu non trouvé' |
|||
}); |
|||
} |
|||
|
|||
// Check if new category exists
|
|||
if (categorie_id && categorie_id !== menu.categorie_id) { |
|||
const category = await MenuCategory.findByPk(categorie_id); |
|||
if (!category) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Catégorie non trouvée' |
|||
}); |
|||
} |
|||
} |
|||
|
|||
await menu.update({ |
|||
nom: nom !== undefined ? nom : menu.nom, |
|||
commentaire: commentaire !== undefined ? commentaire : menu.commentaire, |
|||
prix: prix !== undefined ? prix : menu.prix, |
|||
categorie_id: categorie_id !== undefined ? categorie_id : menu.categorie_id, |
|||
disponible: disponible !== undefined ? disponible : menu.disponible, |
|||
image_url: image_url !== undefined ? image_url : menu.image_url, |
|||
ingredients: ingredients !== undefined ? ingredients : menu.ingredients, |
|||
allergenes: allergenes !== undefined ? allergenes : menu.allergenes, |
|||
calories: calories !== undefined ? calories : menu.calories, |
|||
temps_preparation: temps_preparation !== undefined ? temps_preparation : menu.temps_preparation |
|||
}); |
|||
|
|||
// Fetch updated menu with category
|
|||
const updatedMenu = await Menu.findByPk(id, { |
|||
include: [{ |
|||
model: MenuCategory, |
|||
as: 'category', |
|||
attributes: ['id', 'nom', 'description'] |
|||
}] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Menu mis à jour avec succès', |
|||
data: updatedMenu |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error updating menu:', error); |
|||
|
|||
if (error.name === 'SequelizeValidationError') { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Erreur de validation', |
|||
errors: error.errors.map(err => ({ |
|||
field: err.path, |
|||
message: err.message |
|||
})) |
|||
}); |
|||
} |
|||
|
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la mise à jour du menu', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}, |
|||
|
|||
// Update menu availability
|
|||
updateMenuAvailability: async (req, res) => { |
|||
try { |
|||
const { id } = req.params; |
|||
const { disponible } = req.body; |
|||
|
|||
if (!id || isNaN(id)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'ID de menu invalide' |
|||
}); |
|||
} |
|||
|
|||
if (typeof disponible !== 'boolean') { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Le champ disponible doit être un booléen' |
|||
}); |
|||
} |
|||
|
|||
const menu = await Menu.findByPk(id); |
|||
if (!menu) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Menu non trouvé' |
|||
}); |
|||
} |
|||
|
|||
await menu.update({ disponible }); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: `Menu ${disponible ? 'activé' : 'désactivé'} avec succès`, |
|||
data: { id: menu.id, disponible } |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error updating menu availability:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la mise à jour de la disponibilité', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}, |
|||
|
|||
// Delete menu
|
|||
deleteMenu: async (req, res) => { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
if (!id || isNaN(id)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'ID de menu invalide' |
|||
}); |
|||
} |
|||
|
|||
const menu = await Menu.findByPk(id); |
|||
if (!menu) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Menu non trouvé' |
|||
}); |
|||
} |
|||
|
|||
await menu.destroy(); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Menu supprimé avec succès' |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error deleting menu:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la suppression du menu', |
|||
error: error.message |
|||
}); |
|||
} |
|||
}, |
|||
|
|||
// Get menu statistics
|
|||
getMenuStats: async (req, res) => { |
|||
try { |
|||
const totalMenus = await Menu.count(); |
|||
const availableMenus = await Menu.count({ where: { disponible: true } }); |
|||
const unavailableMenus = await Menu.count({ where: { disponible: false } }); |
|||
|
|||
const menusByCategory = await Menu.findAll({ |
|||
attributes: [ |
|||
'categorie_id', |
|||
[Menu.sequelize.fn('COUNT', Menu.sequelize.col('id')), 'count'] |
|||
], |
|||
include: [{ |
|||
model: MenuCategory, |
|||
as: 'category', |
|||
attributes: ['nom'] |
|||
}], |
|||
group: ['categorie_id', 'category.id'], |
|||
raw: false |
|||
}); |
|||
|
|||
const priceStats = await Menu.findOne({ |
|||
attributes: [ |
|||
[Menu.sequelize.fn('MIN', Menu.sequelize.col('prix')), 'min_prix'], |
|||
[Menu.sequelize.fn('MAX', Menu.sequelize.col('prix')), 'max_prix'], |
|||
[Menu.sequelize.fn('AVG', Menu.sequelize.col('prix')), 'avg_prix'] |
|||
], |
|||
raw: true |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
total: totalMenus, |
|||
disponible: availableMenus, |
|||
non_disponible: unavailableMenus, |
|||
by_category: menusByCategory, |
|||
price_stats: { |
|||
min_price: parseFloat(priceStats.min_prix) || 0, |
|||
max_price: parseFloat(priceStats.max_prix) || 0, |
|||
avg_price: parseFloat(priceStats.avg_prix) || 0 |
|||
} |
|||
} |
|||
}); |
|||
|
|||
} catch (error) { |
|||
console.error('Error fetching menu statistics:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des statistiques', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
}; |
|||
|
|||
module.exports = menuController; |
|||
@ -0,0 +1,421 @@ |
|||
const { Reservation, Client, Table } = require('../models/associations'); |
|||
const { Op } = require('sequelize'); |
|||
|
|||
class ReservationController { |
|||
// Get all reservations
|
|||
async getAllReservations(req, res) { |
|||
try { |
|||
const { |
|||
page = 1, |
|||
limit = 10, |
|||
statut, |
|||
date_debut, |
|||
date_fin, |
|||
table_id, |
|||
sort_by = 'date_reservation', |
|||
sort_order = 'ASC' |
|||
} = req.query; |
|||
|
|||
const offset = (parseInt(page) - 1) * parseInt(limit); |
|||
const whereClause = {}; |
|||
|
|||
// Status filter
|
|||
if (statut) { |
|||
whereClause.statut = statut; |
|||
} |
|||
|
|||
// Date range filter
|
|||
if (date_debut && date_fin) { |
|||
whereClause.date_reservation = { |
|||
[Op.between]: [new Date(date_debut), new Date(date_fin)] |
|||
}; |
|||
} else if (date_debut) { |
|||
whereClause.date_reservation = { |
|||
[Op.gte]: new Date(date_debut) |
|||
}; |
|||
} else if (date_fin) { |
|||
whereClause.date_reservation = { |
|||
[Op.lte]: new Date(date_fin) |
|||
}; |
|||
} |
|||
|
|||
// Table filter
|
|||
if (table_id) { |
|||
whereClause.table_id = table_id; |
|||
} |
|||
|
|||
const { count, rows } = await Reservation.findAndCountAll({ |
|||
where: whereClause, |
|||
include: [ |
|||
{ |
|||
model: Client, |
|||
as: 'client', |
|||
attributes: ['nom', 'prenom', 'email', 'telephone'] |
|||
}, |
|||
{ |
|||
model: Table, |
|||
as: 'table', |
|||
attributes: ['nom', 'capacity', 'location'] |
|||
} |
|||
], |
|||
limit: parseInt(limit), |
|||
offset: offset, |
|||
order: [[sort_by, sort_order.toUpperCase()]] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
reservations: rows, |
|||
pagination: { |
|||
currentPage: parseInt(page), |
|||
totalPages: Math.ceil(count / parseInt(limit)), |
|||
totalItems: count, |
|||
itemsPerPage: parseInt(limit) |
|||
} |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des réservations', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get reservation by ID
|
|||
async getReservationById(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const reservation = await Reservation.findByPk(id, { |
|||
include: [ |
|||
{ |
|||
model: Client, |
|||
as: 'client', |
|||
attributes: ['nom', 'prenom', 'email', 'telephone'] |
|||
}, |
|||
{ |
|||
model: Table, |
|||
as: 'table', |
|||
attributes: ['nom', 'capacity', 'location'] |
|||
}, |
|||
{ |
|||
association: 'commandes' |
|||
} |
|||
] |
|||
}); |
|||
|
|||
if (!reservation) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Réservation non trouvée' |
|||
}); |
|||
} |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: reservation |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération de la réservation', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Create new reservation
|
|||
async createReservation(req, res) { |
|||
try { |
|||
const reservationData = req.body; |
|||
|
|||
// Check table availability
|
|||
const existingReservation = await Reservation.findOne({ |
|||
where: { |
|||
table_id: reservationData.table_id, |
|||
date_reservation: { |
|||
[Op.between]: [ |
|||
new Date(new Date(reservationData.date_reservation).getTime() - 2 * 60 * 60 * 1000), |
|||
new Date(new Date(reservationData.date_reservation).getTime() + 2 * 60 * 60 * 1000) |
|||
] |
|||
}, |
|||
statut: { |
|||
[Op.in]: ['confirmee', 'en_attente'] |
|||
} |
|||
} |
|||
}); |
|||
|
|||
if (existingReservation) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Table déjà réservée à cette heure' |
|||
}); |
|||
} |
|||
|
|||
// Check table capacity
|
|||
const table = await Table.findByPk(reservationData.table_id); |
|||
if (!table) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Table non trouvée' |
|||
}); |
|||
} |
|||
|
|||
if (reservationData.nombre_personnes > table.capacity) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Nombre de personnes supérieur à la capacité de la table' |
|||
}); |
|||
} |
|||
|
|||
const reservation = await Reservation.create(reservationData); |
|||
|
|||
// Include related data in response
|
|||
const createdReservation = await Reservation.findByPk(reservation.id, { |
|||
include: [ |
|||
{ |
|||
model: Client, |
|||
as: 'client', |
|||
attributes: ['nom', 'prenom', 'email', 'telephone'] |
|||
}, |
|||
{ |
|||
model: Table, |
|||
as: 'table', |
|||
attributes: ['nom', 'capacity', 'location'] |
|||
} |
|||
] |
|||
}); |
|||
|
|||
res.status(201).json({ |
|||
success: true, |
|||
message: 'Réservation créée avec succès', |
|||
data: createdReservation |
|||
}); |
|||
} catch (error) { |
|||
if (error.name === 'SequelizeValidationError') { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Données invalides', |
|||
errors: error.errors.map(e => ({ |
|||
field: e.path, |
|||
message: e.message |
|||
})) |
|||
}); |
|||
} |
|||
|
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la création de la réservation', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Update reservation
|
|||
async updateReservation(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
const updateData = req.body; |
|||
|
|||
const reservation = await Reservation.findByPk(id); |
|||
if (!reservation) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Réservation non trouvée' |
|||
}); |
|||
} |
|||
|
|||
await reservation.update(updateData); |
|||
|
|||
const updatedReservation = await Reservation.findByPk(id, { |
|||
include: [ |
|||
{ |
|||
model: Client, |
|||
as: 'client', |
|||
attributes: ['nom', 'prenom', 'email', 'telephone'] |
|||
}, |
|||
{ |
|||
model: Table, |
|||
as: 'table', |
|||
attributes: ['nom', 'capacity', 'location'] |
|||
} |
|||
] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Réservation mise à jour avec succès', |
|||
data: updatedReservation |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la mise à jour de la réservation', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Delete reservation
|
|||
async deleteReservation(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const reservation = await Reservation.findByPk(id); |
|||
if (!reservation) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Réservation non trouvée' |
|||
}); |
|||
} |
|||
|
|||
await reservation.destroy(); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Réservation supprimée avec succès' |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la suppression de la réservation', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get reservations by status
|
|||
async getReservationsByStatus(req, res) { |
|||
try { |
|||
const { status } = req.params; |
|||
const { page = 1, limit = 10 } = req.query; |
|||
|
|||
const offset = (parseInt(page) - 1) * parseInt(limit); |
|||
|
|||
const { count, rows } = await Reservation.findAndCountAll({ |
|||
where: { statut: status }, |
|||
include: [ |
|||
{ |
|||
model: Client, |
|||
as: 'client', |
|||
attributes: ['nom', 'prenom', 'email', 'telephone'] |
|||
}, |
|||
{ |
|||
model: Table, |
|||
as: 'table', |
|||
attributes: ['nom', 'capacity', 'location'] |
|||
} |
|||
], |
|||
limit: parseInt(limit), |
|||
offset: offset, |
|||
order: [['date_reservation', 'ASC']] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
reservations: rows, |
|||
pagination: { |
|||
currentPage: parseInt(page), |
|||
totalPages: Math.ceil(count / parseInt(limit)), |
|||
totalItems: count, |
|||
itemsPerPage: parseInt(limit) |
|||
} |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des réservations', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get today's reservations
|
|||
async getTodayReservations(req, res) { |
|||
try { |
|||
const today = new Date(); |
|||
const startOfDay = new Date(today.getFullYear(), today.getMonth(), today.getDate()); |
|||
const endOfDay = new Date(today.getFullYear(), today.getMonth(), today.getDate() + 1); |
|||
|
|||
const reservations = await Reservation.findAll({ |
|||
where: { |
|||
date_reservation: { |
|||
[Op.between]: [startOfDay, endOfDay] |
|||
} |
|||
}, |
|||
include: [ |
|||
{ |
|||
model: Client, |
|||
as: 'client', |
|||
attributes: ['nom', 'prenom', 'email', 'telephone'] |
|||
}, |
|||
{ |
|||
model: Table, |
|||
as: 'table', |
|||
attributes: ['nom', 'capacity', 'location'] |
|||
} |
|||
], |
|||
order: [['date_reservation', 'ASC']] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: reservations |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des réservations du jour', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get reservation statistics
|
|||
async getReservationStats(req, res) { |
|||
try { |
|||
const total = await Reservation.count(); |
|||
const confirmees = await Reservation.count({ where: { statut: 'confirmee' } }); |
|||
const en_attente = await Reservation.count({ where: { statut: 'en_attente' } }); |
|||
const annulees = await Reservation.count({ where: { statut: 'annulee' } }); |
|||
const terminees = await Reservation.count({ where: { statut: 'terminee' } }); |
|||
|
|||
const today = new Date(); |
|||
const startOfDay = new Date(today.getFullYear(), today.getMonth(), today.getDate()); |
|||
const endOfDay = new Date(today.getFullYear(), today.getMonth(), today.getDate() + 1); |
|||
|
|||
const todayCount = await Reservation.count({ |
|||
where: { |
|||
date_reservation: { |
|||
[Op.between]: [startOfDay, endOfDay] |
|||
} |
|||
} |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
total, |
|||
confirmees, |
|||
en_attente, |
|||
annulees, |
|||
terminees, |
|||
todayCount |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des statistiques', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
} |
|||
|
|||
module.exports = new ReservationController(); |
|||
@ -0,0 +1,635 @@ |
|||
const Table = require('../models/Table'); |
|||
const { Op } = require('sequelize'); |
|||
|
|||
class TableController { |
|||
// Get all tables with optional filtering
|
|||
static async getAllTables(req, res) { |
|||
try { |
|||
console.log('🔍 TableController.getAllTables - Début'); |
|||
|
|||
const { |
|||
status, |
|||
location, |
|||
capacity_min, |
|||
capacity_max, |
|||
page = 1, |
|||
limit = 10, |
|||
sort = 'id', |
|||
order = 'ASC' |
|||
} = req.query; |
|||
|
|||
const offset = (page - 1) * limit; |
|||
const whereClause = {}; |
|||
|
|||
// Apply filters
|
|||
if (status) { |
|||
whereClause.status = status; |
|||
} |
|||
|
|||
if (location) { |
|||
whereClause.location = { |
|||
[Op.like]: `%${location}%` |
|||
}; |
|||
} |
|||
|
|||
if (capacity_min || capacity_max) { |
|||
whereClause.capacity = {}; |
|||
if (capacity_min) whereClause.capacity[Op.gte] = parseInt(capacity_min); |
|||
if (capacity_max) whereClause.capacity[Op.lte] = parseInt(capacity_max); |
|||
} |
|||
|
|||
console.log('Where conditions:', whereClause); |
|||
|
|||
const { count, rows } = await Table.findAndCountAll({ |
|||
where: whereClause, |
|||
limit: parseInt(limit), |
|||
offset: parseInt(offset), |
|||
order: [[sort, order.toUpperCase()]] |
|||
}); |
|||
|
|||
console.log(`✅ Found ${count} tables, returning ${rows.length} tables`); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: rows, |
|||
pagination: { |
|||
total: count, |
|||
page: parseInt(page), |
|||
limit: parseInt(limit), |
|||
totalPages: Math.ceil(count / limit) |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in getAllTables:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
error: 'Failed to fetch tables', |
|||
message: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get table by ID
|
|||
static async getTableById(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
if (!id || isNaN(id)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Valid table ID is required' |
|||
}); |
|||
} |
|||
|
|||
const table = await Table.findByPk(id); |
|||
|
|||
if (!table) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
error: 'Table not found' |
|||
}); |
|||
} |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: table |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in getTableById:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
error: 'Failed to fetch table', |
|||
message: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Create new table
|
|||
static async createTable(req, res) { |
|||
try { |
|||
const { nom, capacity, status, location } = req.body; |
|||
|
|||
// Validation
|
|||
if (!nom) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Table name is required' |
|||
}); |
|||
} |
|||
|
|||
if (nom.length > 100) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Table name must be less than 100 characters' |
|||
}); |
|||
} |
|||
|
|||
if (capacity && (capacity < 1 || capacity > 20)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Capacity must be between 1 and 20' |
|||
}); |
|||
} |
|||
|
|||
if (status && !['available', 'occupied', 'reserved', 'maintenance'].includes(status)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Status must be: available, occupied, reserved, or maintenance' |
|||
}); |
|||
} |
|||
|
|||
if (location && location.length > 50) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Location must be less than 50 characters' |
|||
}); |
|||
} |
|||
|
|||
// Check if table name already exists
|
|||
const existingTable = await Table.findOne({ |
|||
where: { nom } |
|||
}); |
|||
|
|||
if (existingTable) { |
|||
return res.status(409).json({ |
|||
success: false, |
|||
error: 'Table name already exists' |
|||
}); |
|||
} |
|||
|
|||
const table = await Table.create({ |
|||
nom, |
|||
capacity: capacity || 4, |
|||
status: status || 'available', |
|||
location: location || null |
|||
}); |
|||
|
|||
res.status(201).json({ |
|||
success: true, |
|||
data: table, |
|||
message: 'Table created successfully' |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in createTable:', error); |
|||
|
|||
if (error.name === 'SequelizeValidationError') { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Validation error', |
|||
details: error.errors.map(err => ({ |
|||
field: err.path, |
|||
message: err.message |
|||
})) |
|||
}); |
|||
} |
|||
|
|||
if (error.name === 'SequelizeUniqueConstraintError') { |
|||
return res.status(409).json({ |
|||
success: false, |
|||
error: 'Table name already exists' |
|||
}); |
|||
} |
|||
|
|||
res.status(500).json({ |
|||
success: false, |
|||
error: 'Failed to create table', |
|||
message: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Update table
|
|||
static async updateTable(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
const { nom, capacity, status, location } = req.body; |
|||
|
|||
if (!id || isNaN(id)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Valid table ID is required' |
|||
}); |
|||
} |
|||
|
|||
const table = await Table.findByPk(id); |
|||
|
|||
if (!table) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
error: 'Table not found' |
|||
}); |
|||
} |
|||
|
|||
// Validation
|
|||
if (nom !== undefined) { |
|||
if (!nom || nom.length === 0) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Table name cannot be empty' |
|||
}); |
|||
} |
|||
|
|||
if (nom.length > 100) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Table name must be less than 100 characters' |
|||
}); |
|||
} |
|||
|
|||
// Check if new name already exists (excluding current table)
|
|||
const existingTable = await Table.findOne({ |
|||
where: { |
|||
nom, |
|||
id: { [Op.ne]: id } |
|||
} |
|||
}); |
|||
|
|||
if (existingTable) { |
|||
return res.status(409).json({ |
|||
success: false, |
|||
error: 'Table name already exists' |
|||
}); |
|||
} |
|||
} |
|||
|
|||
if (capacity !== undefined && (capacity < 1 || capacity > 20)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Capacity must be between 1 and 20' |
|||
}); |
|||
} |
|||
|
|||
if (status && !['available', 'occupied', 'reserved', 'maintenance'].includes(status)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Status must be: available, occupied, reserved, or maintenance' |
|||
}); |
|||
} |
|||
|
|||
if (location !== undefined && location && location.length > 50) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Location must be less than 50 characters' |
|||
}); |
|||
} |
|||
|
|||
// Update fields
|
|||
const updateData = {}; |
|||
if (nom !== undefined) updateData.nom = nom; |
|||
if (capacity !== undefined) updateData.capacity = capacity; |
|||
if (status !== undefined) updateData.status = status; |
|||
if (location !== undefined) updateData.location = location; |
|||
|
|||
await table.update(updateData); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: table, |
|||
message: 'Table updated successfully' |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in updateTable:', error); |
|||
|
|||
if (error.name === 'SequelizeValidationError') { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Validation error', |
|||
details: error.errors.map(err => ({ |
|||
field: err.path, |
|||
message: err.message |
|||
})) |
|||
}); |
|||
} |
|||
|
|||
if (error.name === 'SequelizeUniqueConstraintError') { |
|||
return res.status(409).json({ |
|||
success: false, |
|||
error: 'Table name already exists' |
|||
}); |
|||
} |
|||
|
|||
res.status(500).json({ |
|||
success: false, |
|||
error: 'Failed to update table', |
|||
message: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Delete table
|
|||
static async deleteTable(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
if (!id || isNaN(id)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Valid table ID is required' |
|||
}); |
|||
} |
|||
|
|||
const table = await Table.findByPk(id); |
|||
|
|||
if (!table) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
error: 'Table not found' |
|||
}); |
|||
} |
|||
|
|||
// Check if table is currently occupied or reserved
|
|||
if (table.status === 'occupied' || table.status === 'reserved') { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Cannot delete table that is currently occupied or reserved' |
|||
}); |
|||
} |
|||
|
|||
await table.destroy(); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Table deleted successfully' |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in deleteTable:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
error: 'Failed to delete table', |
|||
message: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get tables by status
|
|||
static async getTablesByStatus(req, res) { |
|||
try { |
|||
const { status } = req.params; |
|||
|
|||
const validStatuses = ['available', 'occupied', 'reserved', 'maintenance']; |
|||
if (!validStatuses.includes(status)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: `Invalid status. Must be: ${validStatuses.join(', ')}` |
|||
}); |
|||
} |
|||
|
|||
const tables = await Table.findAll({ |
|||
where: { status }, |
|||
order: [['nom', 'ASC']] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: tables, |
|||
count: tables.length |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in getTablesByStatus:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
error: 'Failed to fetch tables by status', |
|||
message: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Update table status
|
|||
static async updateTableStatus(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
const { status } = req.body; |
|||
|
|||
if (!id || isNaN(id)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Valid table ID is required' |
|||
}); |
|||
} |
|||
|
|||
const validStatuses = ['available', 'occupied', 'reserved', 'maintenance']; |
|||
if (!status || !validStatuses.includes(status)) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: `Valid status is required (${validStatuses.join(', ')})` |
|||
}); |
|||
} |
|||
|
|||
const table = await Table.findByPk(id); |
|||
|
|||
if (!table) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
error: 'Table not found' |
|||
}); |
|||
} |
|||
|
|||
const oldStatus = table.status; |
|||
await table.update({ status }); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: table, |
|||
message: `Table status updated from ${oldStatus} to ${status}` |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in updateTableStatus:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
error: 'Failed to update table status', |
|||
message: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get table statistics
|
|||
static async getTableStats(req, res) { |
|||
try { |
|||
const totalTables = await Table.count(); |
|||
|
|||
const statusStats = await Table.findAll({ |
|||
attributes: [ |
|||
'status', |
|||
[Table.sequelize.fn('COUNT', Table.sequelize.col('status')), 'count'] |
|||
], |
|||
group: ['status'], |
|||
raw: true |
|||
}); |
|||
|
|||
const capacityStats = await Table.findOne({ |
|||
attributes: [ |
|||
[Table.sequelize.fn('SUM', Table.sequelize.col('capacity')), 'total_capacity'], |
|||
[Table.sequelize.fn('AVG', Table.sequelize.col('capacity')), 'avg_capacity'], |
|||
[Table.sequelize.fn('MIN', Table.sequelize.col('capacity')), 'min_capacity'], |
|||
[Table.sequelize.fn('MAX', Table.sequelize.col('capacity')), 'max_capacity'] |
|||
], |
|||
raw: true |
|||
}); |
|||
|
|||
const locationStats = await Table.findAll({ |
|||
where: { |
|||
location: { [Op.ne]: null } |
|||
}, |
|||
attributes: [ |
|||
'location', |
|||
[Table.sequelize.fn('COUNT', Table.sequelize.col('location')), 'count'] |
|||
], |
|||
group: ['location'], |
|||
raw: true |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
total: totalTables, |
|||
statusBreakdown: statusStats.reduce((acc, stat) => { |
|||
acc[stat.status] = parseInt(stat.count); |
|||
return acc; |
|||
}, {}), |
|||
locationBreakdown: locationStats.reduce((acc, stat) => { |
|||
acc[stat.location] = parseInt(stat.count); |
|||
return acc; |
|||
}, {}), |
|||
capacityStats: { |
|||
total: parseInt(capacityStats?.total_capacity) || 0, |
|||
average: parseFloat(capacityStats?.avg_capacity) || 0, |
|||
min: parseInt(capacityStats?.min_capacity) || 0, |
|||
max: parseInt(capacityStats?.max_capacity) || 0 |
|||
} |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in getTableStats:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
error: 'Failed to get table statistics', |
|||
message: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Search tables
|
|||
static async searchTables(req, res) { |
|||
try { |
|||
const { q, status, location, capacity_min, limit = 10 } = req.query; |
|||
|
|||
if (!q || q.length < 2) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Search term must be at least 2 characters long' |
|||
}); |
|||
} |
|||
|
|||
const whereClause = { |
|||
[Op.or]: [ |
|||
{ nom: { [Op.like]: `%${q}%` } }, |
|||
{ location: { [Op.like]: `%${q}%` } } |
|||
] |
|||
}; |
|||
|
|||
if (status) { |
|||
whereClause.status = status; |
|||
} |
|||
|
|||
if (location) { |
|||
whereClause.location = { [Op.like]: `%${location}%` }; |
|||
} |
|||
|
|||
if (capacity_min) { |
|||
whereClause.capacity = { [Op.gte]: parseInt(capacity_min) }; |
|||
} |
|||
|
|||
const tables = await Table.findAll({ |
|||
where: whereClause, |
|||
limit: parseInt(limit), |
|||
order: [['nom', 'ASC']] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: tables, |
|||
count: tables.length |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in searchTables:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
error: 'Failed to search tables', |
|||
message: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get tables by location
|
|||
static async getTablesByLocation(req, res) { |
|||
try { |
|||
const { location } = req.params; |
|||
|
|||
if (!location) { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
error: 'Location parameter is required' |
|||
}); |
|||
} |
|||
|
|||
const tables = await Table.findAll({ |
|||
where: { |
|||
location: { [Op.like]: `%${location}%` } |
|||
}, |
|||
order: [['nom', 'ASC']] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: tables, |
|||
count: tables.length |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in getTablesByLocation:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
error: 'Failed to fetch tables by location', |
|||
message: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Get available tables with optional capacity filter
|
|||
static async getAvailableTables(req, res) { |
|||
try { |
|||
const { capacity_min, location } = req.query; |
|||
|
|||
const whereClause = { |
|||
status: 'available' |
|||
}; |
|||
|
|||
if (capacity_min) { |
|||
whereClause.capacity = { [Op.gte]: parseInt(capacity_min) }; |
|||
} |
|||
|
|||
if (location) { |
|||
whereClause.location = { [Op.like]: `%${location}%` }; |
|||
} |
|||
|
|||
const tables = await Table.findAll({ |
|||
where: whereClause, |
|||
order: [['capacity', 'ASC'], ['nom', 'ASC']] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: tables, |
|||
count: tables.length |
|||
}); |
|||
} catch (error) { |
|||
console.error('❌ Error in getAvailableTables:', error); |
|||
res.status(500).json({ |
|||
success: false, |
|||
error: 'Failed to fetch available tables', |
|||
message: error.message |
|||
}); |
|||
} |
|||
} |
|||
} |
|||
|
|||
module.exports = TableController; |
|||
@ -0,0 +1,760 @@ |
|||
const { Ticket, TicketItem, Commande, CommandeItem, Client, Utilisateur, Menu, sequelize } = require('../models/associations'); |
|||
const { Op } = require('sequelize'); |
|||
const PDFDocument = require('pdfkit'); |
|||
const fs = require('fs').promises; |
|||
const path = require('path'); |
|||
|
|||
class TicketController { |
|||
// Générer un numéro de ticket unique
|
|||
async generateTicketNumber() { |
|||
const date = new Date(); |
|||
const year = date.getFullYear().toString().substr(-2); |
|||
const month = (date.getMonth() + 1).toString().padStart(2, '0'); |
|||
const day = date.getDate().toString().padStart(2, '0'); |
|||
|
|||
const prefix = `T${year}${month}${day}`; |
|||
|
|||
// Trouver le dernier ticket du jour
|
|||
const lastTicket = await Ticket.findOne({ |
|||
where: { |
|||
numero_ticket: { |
|||
[Op.like]: `${prefix}%` |
|||
} |
|||
}, |
|||
order: [['numero_ticket', 'DESC']] |
|||
}); |
|||
|
|||
let sequence = 1; |
|||
if (lastTicket) { |
|||
const lastSequence = parseInt(lastTicket.numero_ticket.substr(-4)); |
|||
sequence = lastSequence + 1; |
|||
} |
|||
|
|||
return `${prefix}${sequence.toString().padStart(4, '0')}`; |
|||
} |
|||
|
|||
// Calculer les montants avec TVA
|
|||
calculateAmounts(items, taux_tva = 20, remise = 0) { |
|||
let montant_ht = 0; |
|||
|
|||
items.forEach(item => { |
|||
const prix_unitaire_ht = item.prix_unitaire_ttc / (1 + taux_tva / 100); |
|||
const montant_item_ht = prix_unitaire_ht * item.quantite - (item.remise_unitaire || 0); |
|||
montant_ht += montant_item_ht; |
|||
}); |
|||
|
|||
montant_ht -= remise; |
|||
const montant_tva = montant_ht * (taux_tva / 100); |
|||
const montant_ttc = montant_ht + montant_tva; |
|||
|
|||
return { |
|||
montant_ht: Math.max(0, montant_ht), |
|||
montant_tva: Math.max(0, montant_tva), |
|||
montant_ttc: Math.max(0, montant_ttc) |
|||
}; |
|||
} |
|||
|
|||
// Obtenir tous les tickets avec pagination et filtres
|
|||
async getAllTickets(req, res) { |
|||
try { |
|||
const { |
|||
page = 1, |
|||
limit = 10, |
|||
search = '', |
|||
statut, |
|||
mode_paiement, |
|||
date_debut, |
|||
date_fin, |
|||
client_id, |
|||
utilisateur_id, |
|||
sort_by = 'date_emission', |
|||
sort_order = 'DESC' |
|||
} = req.query; |
|||
|
|||
const offset = (parseInt(page) - 1) * parseInt(limit); |
|||
|
|||
const whereConditions = {}; |
|||
|
|||
if (search) { |
|||
whereConditions[Op.or] = [ |
|||
{ numero_ticket: { [Op.like]: `%${search}%` } }, |
|||
{ '$Client.nom$': { [Op.like]: `%${search}%` } }, |
|||
{ '$Client.prenom$': { [Op.like]: `%${search}%` } } |
|||
]; |
|||
} |
|||
|
|||
if (statut) whereConditions.statut = statut; |
|||
if (mode_paiement) whereConditions.mode_paiement = mode_paiement; |
|||
if (client_id) whereConditions.client_id = client_id; |
|||
if (utilisateur_id) whereConditions.utilisateur_id = utilisateur_id; |
|||
|
|||
if (date_debut || date_fin) { |
|||
whereConditions.date_emission = {}; |
|||
if (date_debut) whereConditions.date_emission[Op.gte] = new Date(date_debut); |
|||
if (date_fin) whereConditions.date_emission[Op.lte] = new Date(date_fin); |
|||
} |
|||
|
|||
const validSortFields = ['numero_ticket', 'date_emission', 'montant_ttc', 'statut']; |
|||
const sortField = validSortFields.includes(sort_by) ? sort_by : 'date_emission'; |
|||
const sortOrder = ['ASC', 'DESC'].includes(sort_order.toUpperCase()) ? |
|||
sort_order.toUpperCase() : 'DESC'; |
|||
|
|||
const { count, rows } = await Ticket.findAndCountAll({ |
|||
where: whereConditions, |
|||
include: [ |
|||
{ |
|||
model: Client, |
|||
attributes: ['id', 'nom', 'prenom', 'email', 'telephone'], |
|||
required: false |
|||
}, |
|||
{ |
|||
model: Utilisateur, |
|||
attributes: ['id', 'nom', 'prenom'], |
|||
required: true |
|||
}, |
|||
{ |
|||
model: Commande, |
|||
attributes: ['id', 'numero_commande'], |
|||
required: true |
|||
}, |
|||
{ |
|||
model: TicketItem, |
|||
attributes: ['id', 'nom_item', 'quantite', 'montant_ttc'], |
|||
required: false |
|||
} |
|||
], |
|||
order: [[sortField, sortOrder]], |
|||
limit: parseInt(limit), |
|||
offset: offset, |
|||
distinct: true |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
tickets: rows, |
|||
pagination: { |
|||
currentPage: parseInt(page), |
|||
totalPages: Math.ceil(count / parseInt(limit)), |
|||
totalItems: count, |
|||
itemsPerPage: parseInt(limit) |
|||
} |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des tickets', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Obtenir un ticket par ID
|
|||
async getTicketById(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const ticket = await Ticket.findByPk(id, { |
|||
include: [ |
|||
{ |
|||
model: Client, |
|||
required: false |
|||
}, |
|||
{ |
|||
model: Utilisateur, |
|||
attributes: ['id', 'nom', 'prenom', 'email'] |
|||
}, |
|||
{ |
|||
model: Commande, |
|||
include: [{ |
|||
model: CommandeItem, |
|||
include: [{ model: Menu, attributes: ['nom', 'description'] }] |
|||
}] |
|||
}, |
|||
{ |
|||
model: TicketItem, |
|||
required: false |
|||
} |
|||
] |
|||
}); |
|||
|
|||
if (!ticket) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Ticket non trouvé' |
|||
}); |
|||
} |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: ticket |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération du ticket', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Créer un ticket depuis une commande
|
|||
async createTicketFromOrder(req, res) { |
|||
const transaction = await sequelize.transaction(); |
|||
|
|||
try { |
|||
const { |
|||
commande_id, |
|||
client_id, |
|||
utilisateur_id, |
|||
mode_paiement = 'especes', |
|||
taux_tva = 20, |
|||
remise = 0, |
|||
notes |
|||
} = req.body; |
|||
|
|||
// Vérifier que la commande existe
|
|||
const commande = await Commande.findByPk(commande_id, { |
|||
include: [{ |
|||
model: CommandeItem, |
|||
include: [{ model: Menu }] |
|||
}], |
|||
transaction |
|||
}); |
|||
|
|||
if (!commande) { |
|||
await transaction.rollback(); |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Commande non trouvée' |
|||
}); |
|||
} |
|||
|
|||
if (commande.CommandeItems.length === 0) { |
|||
await transaction.rollback(); |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'La commande ne contient aucun item' |
|||
}); |
|||
} |
|||
|
|||
// Générer le numéro de ticket
|
|||
const numero_ticket = await this.generateTicketNumber(); |
|||
|
|||
// Calculer les montants
|
|||
const amounts = this.calculateAmounts( |
|||
commande.CommandeItems.map(item => ({ |
|||
prix_unitaire_ttc: parseFloat(item.prix_unitaire), |
|||
quantite: item.quantite, |
|||
remise_unitaire: 0 |
|||
})), |
|||
taux_tva, |
|||
remise |
|||
); |
|||
|
|||
// Récupérer les données client si fourni
|
|||
let donnees_client = null; |
|||
if (client_id) { |
|||
const client = await Client.findByPk(client_id, { transaction }); |
|||
if (client) { |
|||
donnees_client = { |
|||
nom: client.nom, |
|||
prenom: client.prenom, |
|||
email: client.email, |
|||
telephone: client.telephone, |
|||
adresse: client.adresse |
|||
}; |
|||
} |
|||
} |
|||
|
|||
// Créer le ticket
|
|||
const ticket = await Ticket.create({ |
|||
numero_ticket, |
|||
commande_id, |
|||
client_id, |
|||
utilisateur_id, |
|||
montant_ht: amounts.montant_ht, |
|||
montant_tva: amounts.montant_tva, |
|||
montant_ttc: amounts.montant_ttc, |
|||
remise, |
|||
taux_tva, |
|||
mode_paiement, |
|||
statut: 'emis', |
|||
date_emission: new Date(), |
|||
notes, |
|||
donnees_client |
|||
}, { transaction }); |
|||
|
|||
// Créer les items du ticket
|
|||
const ticketItems = await Promise.all( |
|||
commande.CommandeItems.map(async (item) => { |
|||
const prix_unitaire_ht = parseFloat(item.prix_unitaire) / (1 + taux_tva / 100); |
|||
const montant_ht = prix_unitaire_ht * item.quantite; |
|||
const montant_tva = montant_ht * (taux_tva / 100); |
|||
const montant_ttc = montant_ht + montant_tva; |
|||
|
|||
return TicketItem.create({ |
|||
ticket_id: ticket.id, |
|||
commande_item_id: item.id, |
|||
nom_item: item.Menu ? item.Menu.nom : `Item ${item.id}`, |
|||
description: item.notes, |
|||
quantite: item.quantite, |
|||
prix_unitaire_ht, |
|||
prix_unitaire_ttc: parseFloat(item.prix_unitaire), |
|||
montant_ht, |
|||
montant_tva, |
|||
montant_ttc, |
|||
taux_tva, |
|||
remise_unitaire: 0 |
|||
}, { transaction }); |
|||
}) |
|||
); |
|||
|
|||
await transaction.commit(); |
|||
|
|||
// Récupérer le ticket complet
|
|||
const ticketComplet = await Ticket.findByPk(ticket.id, { |
|||
include: [ |
|||
{ model: Client }, |
|||
{ model: Utilisateur, attributes: ['nom', 'prenom'] }, |
|||
{ model: Commande }, |
|||
{ model: TicketItem } |
|||
] |
|||
}); |
|||
|
|||
res.status(201).json({ |
|||
success: true, |
|||
message: 'Ticket créé avec succès', |
|||
data: ticketComplet |
|||
}); |
|||
} catch (error) { |
|||
await transaction.rollback(); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la création du ticket', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Mettre à jour le statut d'un ticket
|
|||
async updateTicketStatus(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
const { statut, date_paiement, notes } = req.body; |
|||
|
|||
const ticket = await Ticket.findByPk(id); |
|||
if (!ticket) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Ticket non trouvé' |
|||
}); |
|||
} |
|||
|
|||
const updateData = { statut }; |
|||
|
|||
if (statut === 'paye' && date_paiement) { |
|||
updateData.date_paiement = new Date(date_paiement); |
|||
} |
|||
|
|||
if (notes !== undefined) { |
|||
updateData.notes = notes; |
|||
} |
|||
|
|||
await ticket.update(updateData); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Statut du ticket mis à jour avec succès', |
|||
data: ticket |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la mise à jour du statut', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Obtenir les statistiques des tickets
|
|||
async getTicketStats(req, res) { |
|||
try { |
|||
const { date_debut, date_fin } = req.query; |
|||
|
|||
const whereConditions = {}; |
|||
if (date_debut || date_fin) { |
|||
whereConditions.date_emission = {}; |
|||
if (date_debut) whereConditions.date_emission[Op.gte] = new Date(date_debut); |
|||
if (date_fin) whereConditions.date_emission[Op.lte] = new Date(date_fin); |
|||
} |
|||
|
|||
const [ |
|||
total, |
|||
emis, |
|||
payes, |
|||
annules, |
|||
totalRevenue, |
|||
payedRevenue |
|||
] = await Promise.all([ |
|||
Ticket.count({ where: whereConditions }), |
|||
Ticket.count({ where: { ...whereConditions, statut: 'emis' } }), |
|||
Ticket.count({ where: { ...whereConditions, statut: 'paye' } }), |
|||
Ticket.count({ where: { ...whereConditions, statut: 'annule' } }), |
|||
Ticket.sum('montant_ttc', { where: whereConditions }), |
|||
Ticket.sum('montant_ttc', { |
|||
where: { ...whereConditions, statut: 'paye' } |
|||
}) |
|||
]); |
|||
|
|||
// Statistiques par mode de paiement
|
|||
const paymentStats = await Ticket.findAll({ |
|||
attributes: [ |
|||
'mode_paiement', |
|||
[sequelize.fn('COUNT', sequelize.col('id')), 'count'], |
|||
[sequelize.fn('SUM', sequelize.col('montant_ttc')), 'total'] |
|||
], |
|||
where: { ...whereConditions, statut: 'paye' }, |
|||
group: ['mode_paiement'] |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
total, |
|||
emis, |
|||
payes, |
|||
annules, |
|||
totalRevenue: parseFloat(totalRevenue || 0), |
|||
payedRevenue: parseFloat(payedRevenue || 0), |
|||
paymentMethods: paymentStats.map(stat => ({ |
|||
mode: stat.mode_paiement, |
|||
count: parseInt(stat.dataValues.count), |
|||
total: parseFloat(stat.dataValues.total || 0) |
|||
})) |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la récupération des statistiques', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Générer un PDF pour un ticket
|
|||
async generatePDF(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const ticket = await Ticket.findByPk(id, { |
|||
include: [ |
|||
{ model: Client }, |
|||
{ model: Utilisateur, attributes: ['nom', 'prenom'] }, |
|||
{ model: TicketItem } |
|||
] |
|||
}); |
|||
|
|||
if (!ticket) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Ticket non trouvé' |
|||
}); |
|||
} |
|||
|
|||
// Créer le dossier s'il n'existe pas
|
|||
const pdfDir = path.join(__dirname, '../uploads/tickets'); |
|||
await fs.mkdir(pdfDir, { recursive: true }); |
|||
|
|||
const pdfPath = path.join(pdfDir, `ticket_${ticket.numero_ticket}.pdf`); |
|||
|
|||
// Créer le document PDF
|
|||
const doc = new PDFDocument(); |
|||
doc.pipe(fs.createWriteStream(pdfPath)); |
|||
|
|||
// En-tête
|
|||
doc.fontSize(20).text('TICKET DE CAISSE', { align: 'center' }); |
|||
doc.moveDown(); |
|||
|
|||
doc.fontSize(12).text(`Numéro: ${ticket.numero_ticket}`, { align: 'left' }); |
|||
doc.text(`Date: ${ticket.date_emission.toLocaleDateString('fr-FR')}`, { align: 'left' }); |
|||
doc.text(`Serveur: ${ticket.Utilisateur.nom} ${ticket.Utilisateur.prenom}`, { align: 'left' }); |
|||
|
|||
if (ticket.Client) { |
|||
doc.text(`Client: ${ticket.Client.nom} ${ticket.Client.prenom}`, { align: 'left' }); |
|||
} |
|||
|
|||
doc.moveDown(); |
|||
|
|||
// Détail des items
|
|||
doc.text('DÉTAIL:', { underline: true }); |
|||
doc.moveDown(0.5); |
|||
|
|||
ticket.TicketItems.forEach(item => { |
|||
doc.text(`${item.nom_item} x${item.quantite}`, { continued: true }); |
|||
doc.text(`${parseFloat(item.montant_ttc).toFixed(2)}€`, { align: 'right' }); |
|||
}); |
|||
|
|||
doc.moveDown(); |
|||
|
|||
// Totaux
|
|||
doc.text(`Montant HT: ${parseFloat(ticket.montant_ht).toFixed(2)}€`, { align: 'right' }); |
|||
doc.text(`TVA (${ticket.taux_tva}%): ${parseFloat(ticket.montant_tva).toFixed(2)}€`, { align: 'right' }); |
|||
if (ticket.remise > 0) { |
|||
doc.text(`Remise: ${parseFloat(ticket.remise).toFixed(2)}€`, { align: 'right' }); |
|||
} |
|||
doc.fontSize(14).text(`TOTAL TTC: ${parseFloat(ticket.montant_ttc).toFixed(2)}€`, { align: 'right' }); |
|||
|
|||
doc.moveDown(); |
|||
doc.fontSize(12).text(`Mode de paiement: ${ticket.mode_paiement.toUpperCase()}`, { align: 'left' }); |
|||
doc.text(`Statut: ${ticket.statut.toUpperCase()}`, { align: 'left' }); |
|||
|
|||
if (ticket.notes) { |
|||
doc.moveDown(); |
|||
doc.text(`Notes: ${ticket.notes}`, { align: 'left' }); |
|||
} |
|||
|
|||
// Pied de page
|
|||
doc.moveDown(2); |
|||
doc.fontSize(10).text('Merci de votre visite !', { align: 'center' }); |
|||
doc.text('À bientôt dans notre restaurant', { align: 'center' }); |
|||
|
|||
doc.end(); |
|||
|
|||
// Mettre à jour le chemin du PDF dans la base
|
|||
await ticket.update({ facture_pdf: `tickets/ticket_${ticket.numero_ticket}.pdf` }); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'PDF généré avec succès', |
|||
data: { |
|||
pdf_path: `/uploads/tickets/ticket_${ticket.numero_ticket}.pdf`, |
|||
ticket_id: ticket.id |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la génération du PDF', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Supprimer un ticket
|
|||
async deleteTicket(req, res) { |
|||
try { |
|||
const { id } = req.params; |
|||
|
|||
const ticket = await Ticket.findByPk(id); |
|||
if (!ticket) { |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Ticket non trouvé' |
|||
}); |
|||
} |
|||
|
|||
// Vérifier si le ticket peut être supprimé
|
|||
if (ticket.statut === 'paye') { |
|||
return res.status(400).json({ |
|||
success: false, |
|||
message: 'Impossible de supprimer un ticket payé. Vous pouvez l\'annuler.' |
|||
}); |
|||
} |
|||
|
|||
// Supprimer le fichier PDF s'il existe
|
|||
if (ticket.facture_pdf) { |
|||
const pdfPath = path.join(__dirname, '../uploads/', ticket.facture_pdf); |
|||
try { |
|||
await fs.unlink(pdfPath); |
|||
} catch (err) { |
|||
console.log('PDF file not found or already deleted'); |
|||
} |
|||
} |
|||
|
|||
await ticket.destroy(); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
message: 'Ticket supprimé avec succès' |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la suppression du ticket', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Dupliquer un ticket
|
|||
async duplicateTicket(req, res) { |
|||
const transaction = await sequelize.transaction(); |
|||
|
|||
try { |
|||
const { id } = req.params; |
|||
const { utilisateur_id, notes } = req.body; |
|||
|
|||
const originalTicket = await Ticket.findByPk(id, { |
|||
include: [{ model: TicketItem }], |
|||
transaction |
|||
}); |
|||
|
|||
if (!originalTicket) { |
|||
await transaction.rollback(); |
|||
return res.status(404).json({ |
|||
success: false, |
|||
message: 'Ticket original non trouvé' |
|||
}); |
|||
} |
|||
|
|||
// Générer un nouveau numéro de ticket
|
|||
const numero_ticket = await this.generateTicketNumber(); |
|||
|
|||
// Créer le nouveau ticket
|
|||
const newTicket = await Ticket.create({ |
|||
numero_ticket, |
|||
commande_id: originalTicket.commande_id, |
|||
client_id: originalTicket.client_id, |
|||
utilisateur_id: utilisateur_id || originalTicket.utilisateur_id, |
|||
montant_ht: originalTicket.montant_ht, |
|||
montant_tva: originalTicket.montant_tva, |
|||
montant_ttc: originalTicket.montant_ttc, |
|||
remise: originalTicket.remise, |
|||
taux_tva: originalTicket.taux_tva, |
|||
mode_paiement: originalTicket.mode_paiement, |
|||
statut: 'brouillon', |
|||
date_emission: new Date(), |
|||
notes: notes || `Copie du ticket ${originalTicket.numero_ticket}`, |
|||
donnees_client: originalTicket.donnees_client |
|||
}, { transaction }); |
|||
|
|||
// Dupliquer les items
|
|||
const newItems = await Promise.all( |
|||
originalTicket.TicketItems.map(item => |
|||
TicketItem.create({ |
|||
ticket_id: newTicket.id, |
|||
commande_item_id: item.commande_item_id, |
|||
nom_item: item.nom_item, |
|||
description: item.description, |
|||
quantite: item.quantite, |
|||
prix_unitaire_ht: item.prix_unitaire_ht, |
|||
prix_unitaire_ttc: item.prix_unitaire_ttc, |
|||
montant_ht: item.montant_ht, |
|||
montant_tva: item.montant_tva, |
|||
montant_ttc: item.montant_ttc, |
|||
taux_tva: item.taux_tva, |
|||
remise_unitaire: item.remise_unitaire |
|||
}, { transaction }) |
|||
) |
|||
); |
|||
|
|||
await transaction.commit(); |
|||
|
|||
// Récupérer le ticket complet
|
|||
const ticketComplet = await Ticket.findByPk(newTicket.id, { |
|||
include: [ |
|||
{ model: Client }, |
|||
{ model: Utilisateur, attributes: ['nom', 'prenom'] }, |
|||
{ model: TicketItem } |
|||
] |
|||
}); |
|||
|
|||
res.status(201).json({ |
|||
success: true, |
|||
message: 'Ticket dupliqué avec succès', |
|||
data: ticketComplet |
|||
}); |
|||
} catch (error) { |
|||
await transaction.rollback(); |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la duplication du ticket', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
|
|||
// Recherche avancée de tickets
|
|||
async searchTickets(req, res) { |
|||
try { |
|||
const { |
|||
numero_ticket, |
|||
client_nom, |
|||
montant_min, |
|||
montant_max, |
|||
date_debut, |
|||
date_fin, |
|||
statut, |
|||
mode_paiement, |
|||
limit = 50 |
|||
} = req.query; |
|||
|
|||
const whereConditions = {}; |
|||
const clientWhereConditions = {}; |
|||
|
|||
if (numero_ticket) { |
|||
whereConditions.numero_ticket = { [Op.like]: `%${numero_ticket}%` }; |
|||
} |
|||
|
|||
if (client_nom) { |
|||
clientWhereConditions[Op.or] = [ |
|||
{ nom: { [Op.like]: `%${client_nom}%` } }, |
|||
{ prenom: { [Op.like]: `%${client_nom}%` } } |
|||
]; |
|||
} |
|||
|
|||
if (montant_min || montant_max) { |
|||
whereConditions.montant_ttc = {}; |
|||
if (montant_min) whereConditions.montant_ttc[Op.gte] = parseFloat(montant_min); |
|||
if (montant_max) whereConditions.montant_ttc[Op.lte] = parseFloat(montant_max); |
|||
} |
|||
|
|||
if (date_debut || date_fin) { |
|||
whereConditions.date_emission = {}; |
|||
if (date_debut) whereConditions.date_emission[Op.gte] = new Date(date_debut); |
|||
if (date_fin) whereConditions.date_emission[Op.lte] = new Date(date_fin); |
|||
} |
|||
|
|||
if (statut) whereConditions.statut = statut; |
|||
if (mode_paiement) whereConditions.mode_paiement = mode_paiement; |
|||
|
|||
const tickets = await Ticket.findAll({ |
|||
where: whereConditions, |
|||
include: [ |
|||
{ |
|||
model: Client, |
|||
where: Object.keys(clientWhereConditions).length > 0 ? clientWhereConditions : undefined, |
|||
required: false, |
|||
attributes: ['id', 'nom', 'prenom', 'email'] |
|||
}, |
|||
{ |
|||
model: Utilisateur, |
|||
attributes: ['id', 'nom', 'prenom'] |
|||
} |
|||
], |
|||
order: [['date_emission', 'DESC']], |
|||
limit: parseInt(limit) |
|||
}); |
|||
|
|||
res.json({ |
|||
success: true, |
|||
data: { |
|||
tickets, |
|||
count: tickets.length |
|||
} |
|||
}); |
|||
} catch (error) { |
|||
res.status(500).json({ |
|||
success: false, |
|||
message: 'Erreur lors de la recherche', |
|||
error: error.message |
|||
}); |
|||
} |
|||
} |
|||
} |
|||
|
|||
module.exports = new TicketController(); |
|||
|
|||
@ -0,0 +1,78 @@ |
|||
const { DataTypes } = require('sequelize'); |
|||
const sequelize = require('../config/database'); |
|||
|
|||
module.exports = (sequelize) => { |
|||
const Client = sequelize.define('Client', { |
|||
id: { |
|||
type: DataTypes.INTEGER, |
|||
primaryKey: true, |
|||
autoIncrement: true |
|||
}, |
|||
nom: { |
|||
type: DataTypes.STRING(100), |
|||
allowNull: false, |
|||
validate: { |
|||
notEmpty: { |
|||
msg: 'Le nom est requis' |
|||
} |
|||
} |
|||
}, |
|||
prenom: { |
|||
type: DataTypes.STRING(100), |
|||
allowNull: false, |
|||
validate: { |
|||
notEmpty: { |
|||
msg: 'Le prénom est requis' |
|||
} |
|||
} |
|||
}, |
|||
email: { |
|||
type: DataTypes.STRING(255), |
|||
allowNull: true, |
|||
unique: true, |
|||
validate: { |
|||
isEmail: { |
|||
msg: 'Email invalide' |
|||
} |
|||
} |
|||
}, |
|||
telephone: { |
|||
type: DataTypes.STRING(20), |
|||
allowNull: true, |
|||
validate: { |
|||
is: { |
|||
args: /^[0-9+\-\s]+$/, |
|||
msg: 'Numéro de téléphone invalide' |
|||
} |
|||
} |
|||
}, |
|||
adresse: { |
|||
type: DataTypes.TEXT, |
|||
allowNull: true |
|||
}, |
|||
date_naissance: { |
|||
type: DataTypes.DATEONLY, |
|||
allowNull: true |
|||
}, |
|||
points_fidelite: { |
|||
type: DataTypes.INTEGER, |
|||
defaultValue: 0, |
|||
validate: { |
|||
min: { |
|||
args: [0], |
|||
msg: 'Les points de fidélité doivent être positifs' |
|||
} |
|||
} |
|||
}, |
|||
actif: { |
|||
type: DataTypes.BOOLEAN, |
|||
defaultValue: true |
|||
} |
|||
}, { |
|||
tableName: 'clients', |
|||
timestamps: true, |
|||
createdAt: 'created_at', |
|||
updatedAt: 'updated_at' |
|||
}); |
|||
return Client; |
|||
} |
|||
@ -0,0 +1,86 @@ |
|||
// models/commandes.js
|
|||
const { DataTypes } = require('sequelize'); |
|||
const sequelize = require('../config/database'); |
|||
|
|||
const Commande = sequelize.define('Commande', { |
|||
id: { |
|||
type: DataTypes.INTEGER, |
|||
primaryKey: true, |
|||
autoIncrement: true |
|||
}, |
|||
client_id: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: true |
|||
}, |
|||
table_id: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: true |
|||
}, |
|||
reservation_id: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: true |
|||
}, |
|||
numero_commande: { |
|||
type: DataTypes.STRING, |
|||
allowNull: false, |
|||
unique: true |
|||
}, |
|||
statut: { |
|||
type: DataTypes.ENUM, |
|||
values: ['en_attente', 'en_preparation', 'prete', 'servie', 'annulee'], |
|||
defaultValue: 'en_attente', |
|||
allowNull: false |
|||
}, |
|||
total_ht: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
defaultValue: 0.00 |
|||
}, |
|||
total_tva: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
defaultValue: 0.00 |
|||
}, |
|||
total_ttc: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
defaultValue: 0.00 |
|||
}, |
|||
mode_paiement: { |
|||
type: DataTypes.ENUM, |
|||
values: ['especes', 'carte_bancaire', 'cheque', 'virement', 'ticket_restaurant'], |
|||
allowNull: true |
|||
}, |
|||
commentaires: { |
|||
type: DataTypes.TEXT, |
|||
allowNull: true |
|||
}, |
|||
serveur: { |
|||
type: DataTypes.STRING, |
|||
allowNull: true |
|||
}, |
|||
date_commande: { |
|||
type: DataTypes.DATE, |
|||
allowNull: false, |
|||
defaultValue: DataTypes.NOW |
|||
}, |
|||
date_service: { |
|||
type: DataTypes.DATE, |
|||
allowNull: true |
|||
} |
|||
}, { |
|||
tableName: 'commandes', |
|||
timestamps: true, |
|||
createdAt: 'created_at', |
|||
updatedAt: 'updated_at', |
|||
hooks: { |
|||
beforeCreate: async (commande) => { |
|||
if (!commande.numero_commande) { |
|||
const timestamp = Date.now(); |
|||
commande.numero_commande = `CMD-${timestamp}`; |
|||
} |
|||
} |
|||
} |
|||
}); |
|||
|
|||
module.exports = Commande; |
|||
@ -0,0 +1,61 @@ |
|||
const { DataTypes } = require('sequelize'); |
|||
const sequelize = require('../config/database'); |
|||
|
|||
module.exports = (sequelize) => { |
|||
const CommandeItem = sequelize.define('CommandeItem', { |
|||
id: { |
|||
type: DataTypes.INTEGER, |
|||
primaryKey: true, |
|||
autoIncrement: true |
|||
}, |
|||
commande_id: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: false, |
|||
references: { |
|||
model: 'commandes', |
|||
key: 'id' |
|||
} |
|||
}, |
|||
menu_id: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: false, |
|||
references: { |
|||
model: 'menus', |
|||
key: 'id' |
|||
} |
|||
}, |
|||
quantite: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: false, |
|||
defaultValue: 1, |
|||
validate: { |
|||
min: { |
|||
args: [1], |
|||
msg: 'La quantité doit être au moins de 1' |
|||
} |
|||
} |
|||
}, |
|||
prix_unitaire: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false |
|||
}, |
|||
total_item: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false |
|||
}, |
|||
commentaires: { |
|||
type: DataTypes.TEXT, |
|||
allowNull: true |
|||
}, |
|||
statut: { |
|||
type: DataTypes.ENUM('commande', 'en_preparation', 'pret', 'servi'), |
|||
defaultValue: 'commande' |
|||
} |
|||
}, { |
|||
tableName: 'commande_items', |
|||
timestamps: true, |
|||
createdAt: 'created_at', |
|||
updatedAt: 'updated_at' |
|||
}); |
|||
return CommandeItem; |
|||
} |
|||
@ -0,0 +1,108 @@ |
|||
const { DataTypes } = require('sequelize'); |
|||
const sequelize = require('../config/database'); |
|||
|
|||
const Menu = sequelize.define('Menu', { |
|||
id: { |
|||
type: DataTypes.INTEGER, |
|||
primaryKey: true, |
|||
autoIncrement: true |
|||
}, |
|||
nom: { |
|||
type: DataTypes.STRING(255), |
|||
allowNull: false, |
|||
validate: { |
|||
notEmpty: { |
|||
msg: 'Le nom du plat est requis' |
|||
}, |
|||
len: { |
|||
args: [2, 255], |
|||
msg: 'Le nom doit contenir entre 2 et 255 caractères' |
|||
} |
|||
} |
|||
}, |
|||
commentaire: { |
|||
type: DataTypes.TEXT, |
|||
allowNull: true, |
|||
validate: { |
|||
len: { |
|||
args: [0, 1000], |
|||
msg: 'Le commentaire ne doit pas dépasser 1000 caractères' |
|||
} |
|||
} |
|||
}, |
|||
prix: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
validate: { |
|||
isDecimal: { |
|||
msg: 'Le prix doit être un nombre décimal' |
|||
}, |
|||
min: { |
|||
args: [0], |
|||
msg: 'Le prix doit être positif' |
|||
} |
|||
} |
|||
}, |
|||
categorie_id: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: false, |
|||
validate: { |
|||
isInt: { |
|||
msg: 'L\'ID de catégorie doit être un entier' |
|||
}, |
|||
min: { |
|||
args: [1], |
|||
msg: 'L\'ID de catégorie doit être positif' |
|||
} |
|||
} |
|||
}, |
|||
disponible: { |
|||
type: DataTypes.BOOLEAN, |
|||
defaultValue: true, |
|||
allowNull: false |
|||
}, |
|||
image_url: { |
|||
type: DataTypes.STRING(500), |
|||
allowNull: true, |
|||
validate: { |
|||
isUrl: { |
|||
msg: 'L\'URL de l\'image doit être valide' |
|||
} |
|||
} |
|||
}, |
|||
ingredients: { |
|||
type: DataTypes.TEXT, |
|||
allowNull: true |
|||
}, |
|||
allergenes: { |
|||
type: DataTypes.STRING(500), |
|||
allowNull: true |
|||
}, |
|||
calories: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: true, |
|||
validate: { |
|||
min: { |
|||
args: [0], |
|||
msg: 'Les calories doivent être positives' |
|||
} |
|||
} |
|||
}, |
|||
temps_preparation: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: true, |
|||
validate: { |
|||
min: { |
|||
args: [1], |
|||
msg: 'Le temps de préparation doit être positif' |
|||
} |
|||
} |
|||
} |
|||
}, { |
|||
tableName: 'menus', |
|||
timestamps: true, |
|||
createdAt: 'created_at', |
|||
updatedAt: 'updated_at' |
|||
}); |
|||
|
|||
module.exports = Menu; |
|||
@ -0,0 +1,40 @@ |
|||
const { DataTypes } = require('sequelize'); |
|||
const sequelize = require('../config/database'); |
|||
|
|||
|
|||
const MenuCategory = sequelize.define('MenuCategory', { |
|||
id: { |
|||
type: DataTypes.INTEGER, |
|||
primaryKey: true, |
|||
autoIncrement: true |
|||
}, |
|||
nom: { |
|||
type: DataTypes.STRING(100), |
|||
allowNull: false, |
|||
unique: true, |
|||
validate: { |
|||
notEmpty: { |
|||
msg: 'Le nom de la catégorie est requis' |
|||
} |
|||
} |
|||
}, |
|||
description: { |
|||
type: DataTypes.TEXT, |
|||
allowNull: true |
|||
}, |
|||
ordre: { |
|||
type: DataTypes.INTEGER, |
|||
defaultValue: 0 |
|||
}, |
|||
actif: { |
|||
type: DataTypes.BOOLEAN, |
|||
defaultValue: true |
|||
} |
|||
}, { |
|||
tableName: 'menu_categories', |
|||
timestamps: true, |
|||
createdAt: 'created_at', |
|||
updatedAt: 'updated_at' |
|||
}); |
|||
|
|||
module.exports = MenuCategory; |
|||
@ -0,0 +1,92 @@ |
|||
const { DataTypes } = require('sequelize'); |
|||
const sequelize = require('../config/database'); |
|||
|
|||
module.exports = (sequelize) => { |
|||
const Reservation = sequelize.define('Reservation', { |
|||
id: { |
|||
type: DataTypes.INTEGER, |
|||
primaryKey: true, |
|||
autoIncrement: true |
|||
}, |
|||
client_id: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: true, |
|||
references: { |
|||
model: 'clients', |
|||
key: 'id' |
|||
} |
|||
}, |
|||
table_id: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: false, |
|||
references: { |
|||
model: 'tables', |
|||
key: 'id' |
|||
} |
|||
}, |
|||
date_reservation: { |
|||
type: DataTypes.DATE, |
|||
allowNull: false, |
|||
validate: { |
|||
isDate: { |
|||
msg: 'Date de réservation invalide' |
|||
}, |
|||
isAfter: { |
|||
args: new Date().toISOString().split('T')[0], |
|||
msg: 'La date de réservation doit être future' |
|||
} |
|||
} |
|||
}, |
|||
nombre_personnes: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: false, |
|||
validate: { |
|||
min: { |
|||
args: [1], |
|||
msg: 'Le nombre de personnes doit être au moins de 1' |
|||
}, |
|||
max: { |
|||
args: [20], |
|||
msg: 'Le nombre de personnes ne peut dépasser 20' |
|||
} |
|||
} |
|||
}, |
|||
statut: { |
|||
type: DataTypes.ENUM('confirmee', 'en_attente', 'annulee', 'terminee'), |
|||
defaultValue: 'en_attente' |
|||
}, |
|||
commentaires: { |
|||
type: DataTypes.TEXT, |
|||
allowNull: true |
|||
}, |
|||
telephone: { |
|||
type: DataTypes.STRING(20), |
|||
allowNull: true, |
|||
validate: { |
|||
is: { |
|||
args: /^[0-9+\-\s]+$/, |
|||
msg: 'Numéro de téléphone invalide' |
|||
} |
|||
} |
|||
}, |
|||
nom_contact: { |
|||
type: DataTypes.STRING(100), |
|||
allowNull: true |
|||
}, |
|||
email_contact: { |
|||
type: DataTypes.STRING(255), |
|||
allowNull: true, |
|||
validate: { |
|||
isEmail: { |
|||
msg: 'Email de contact invalide' |
|||
} |
|||
} |
|||
} |
|||
}, { |
|||
tableName: 'reservations', |
|||
timestamps: true, |
|||
createdAt: 'created_at', |
|||
updatedAt: 'updated_at' |
|||
}); |
|||
return Reservation; |
|||
} |
|||
@ -0,0 +1,70 @@ |
|||
const { DataTypes } = require('sequelize'); |
|||
const sequelize = require('../config/database'); |
|||
|
|||
|
|||
const Table = sequelize.define('Table', { |
|||
id: { |
|||
type: DataTypes.INTEGER, |
|||
primaryKey: true, |
|||
autoIncrement: true |
|||
}, |
|||
nom: { |
|||
type: DataTypes.STRING(100), |
|||
allowNull: false, |
|||
validate: { |
|||
notEmpty: { |
|||
msg: 'Table name cannot be empty' |
|||
}, |
|||
len: { |
|||
args: [1, 100], |
|||
msg: 'Table name must be between 1 and 100 characters' |
|||
} |
|||
} |
|||
}, |
|||
capacity: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: true, |
|||
defaultValue: 4, |
|||
validate: { |
|||
min: { |
|||
args: [1], |
|||
msg: 'Capacity must be at least 1' |
|||
}, |
|||
max: { |
|||
args: [20], |
|||
msg: 'Capacity cannot exceed 20' |
|||
} |
|||
} |
|||
}, |
|||
status: { |
|||
type: DataTypes.ENUM('available', 'occupied', 'reserved', 'maintenance'), |
|||
defaultValue: 'available', |
|||
allowNull: false |
|||
}, |
|||
location: { |
|||
type: DataTypes.STRING(50), |
|||
allowNull: true, |
|||
validate: { |
|||
len: { |
|||
args: [0, 50], |
|||
msg: 'Location must be less than 50 characters' |
|||
} |
|||
} |
|||
} |
|||
}, { |
|||
tableName: 'tables', |
|||
timestamps: true, |
|||
createdAt: 'created_at', |
|||
updatedAt: 'updated_at', |
|||
indexes: [ |
|||
{ |
|||
fields: ['status'] |
|||
}, |
|||
{ |
|||
fields: ['nom'], |
|||
unique: true |
|||
} |
|||
] |
|||
}); |
|||
|
|||
module.exports = Table; |
|||
@ -0,0 +1,163 @@ |
|||
const { DataTypes } = require('sequelize'); |
|||
|
|||
module.exports = (sequelize) => { |
|||
const Ticket = sequelize.define('Ticket', { |
|||
id: { |
|||
type: DataTypes.INTEGER, |
|||
primaryKey: true, |
|||
autoIncrement: true |
|||
}, |
|||
numero_ticket: { |
|||
type: DataTypes.STRING(50), |
|||
allowNull: false, |
|||
unique: true, |
|||
comment: 'Numéro unique du ticket' |
|||
}, |
|||
commande_id: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: false, |
|||
references: { |
|||
model: 'commandes', |
|||
key: 'id' |
|||
}, |
|||
comment: 'ID de la commande associée' |
|||
}, |
|||
table_id: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: true, |
|||
references: { |
|||
model: 'tables', |
|||
key: 'id' |
|||
}, |
|||
comment: 'ID de la table (optionnel)' |
|||
}, |
|||
// Informations du serveur directement dans le ticket
|
|||
serveur_nom: { |
|||
type: DataTypes.STRING(100), |
|||
allowNull: true, |
|||
comment: 'Nom du serveur' |
|||
}, |
|||
serveur_prenom: { |
|||
type: DataTypes.STRING(100), |
|||
allowNull: true, |
|||
comment: 'Prénom du serveur' |
|||
}, |
|||
// Données client directement dans le ticket (optionnel)
|
|||
client_nom: { |
|||
type: DataTypes.STRING(100), |
|||
allowNull: true, |
|||
comment: 'Nom du client (optionnel)' |
|||
}, |
|||
client_prenom: { |
|||
type: DataTypes.STRING(100), |
|||
allowNull: true, |
|||
comment: 'Prénom du client (optionnel)' |
|||
}, |
|||
client_telephone: { |
|||
type: DataTypes.STRING(20), |
|||
allowNull: true, |
|||
comment: 'Téléphone du client (optionnel)' |
|||
}, |
|||
client_email: { |
|||
type: DataTypes.STRING(100), |
|||
allowNull: true, |
|||
comment: 'Email du client (optionnel)' |
|||
}, |
|||
client_adresse: { |
|||
type: DataTypes.TEXT, |
|||
allowNull: true, |
|||
comment: 'Adresse du client (optionnel)' |
|||
}, |
|||
montant_ht: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
defaultValue: 0.00, |
|||
comment: 'Montant hors taxes' |
|||
}, |
|||
montant_tva: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
defaultValue: 0.00, |
|||
comment: 'Montant de la TVA' |
|||
}, |
|||
montant_ttc: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
defaultValue: 0.00, |
|||
comment: 'Montant toutes taxes comprises' |
|||
}, |
|||
remise: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
defaultValue: 0.00, |
|||
comment: 'Montant de la remise appliquée' |
|||
}, |
|||
taux_tva: { |
|||
type: DataTypes.DECIMAL(5, 2), |
|||
allowNull: false, |
|||
defaultValue: 20.00, |
|||
comment: 'Taux de TVA en pourcentage' |
|||
}, |
|||
mode_paiement: { |
|||
type: DataTypes.ENUM('especes', 'carte', 'cheque', 'virement', 'mobile', 'autre'), |
|||
allowNull: false, |
|||
defaultValue: 'especes', |
|||
comment: 'Mode de paiement' |
|||
}, |
|||
statut: { |
|||
type: DataTypes.ENUM('brouillon', 'emis', 'paye', 'rembourse', 'annule'), |
|||
allowNull: false, |
|||
defaultValue: 'brouillon', |
|||
comment: 'Statut du ticket' |
|||
}, |
|||
type_service: { |
|||
type: DataTypes.ENUM('sur_place', 'emporter', 'livraison'), |
|||
allowNull: false, |
|||
defaultValue: 'sur_place', |
|||
comment: 'Type de service' |
|||
}, |
|||
date_emission: { |
|||
type: DataTypes.DATE, |
|||
allowNull: false, |
|||
defaultValue: DataTypes.NOW, |
|||
comment: 'Date d\'émission du ticket' |
|||
}, |
|||
date_paiement: { |
|||
type: DataTypes.DATE, |
|||
allowNull: true, |
|||
comment: 'Date de paiement' |
|||
}, |
|||
notes: { |
|||
type: DataTypes.TEXT, |
|||
allowNull: true, |
|||
comment: 'Notes sur le ticket' |
|||
}, |
|||
facture_pdf: { |
|||
type: DataTypes.STRING(500), |
|||
allowNull: true, |
|||
comment: 'Chemin vers le PDF généré' |
|||
}, |
|||
donnees_supplementaires: { |
|||
type: DataTypes.JSON, |
|||
allowNull: true, |
|||
comment: 'Données supplémentaires en JSON' |
|||
} |
|||
}, { |
|||
tableName: 'tickets', |
|||
timestamps: true, |
|||
createdAt: 'created_at', |
|||
updatedAt: 'updated_at', |
|||
indexes: [ |
|||
{ fields: ['numero_ticket'], unique: true }, |
|||
{ fields: ['commande_id'] }, |
|||
{ fields: ['table_id'] }, |
|||
{ fields: ['statut'] }, |
|||
{ fields: ['date_emission'] }, |
|||
{ fields: ['client_email'] }, |
|||
{ fields: ['client_telephone'] }, |
|||
{ fields: ['serveur_nom'] } |
|||
] |
|||
}); |
|||
|
|||
return Ticket; |
|||
}; |
|||
@ -0,0 +1,85 @@ |
|||
const { DataTypes } = require('sequelize'); |
|||
|
|||
module.exports = (sequelize) => { |
|||
const TicketItem = sequelize.define('TicketItem', { |
|||
id: { |
|||
type: DataTypes.INTEGER, |
|||
primaryKey: true, |
|||
autoIncrement: true |
|||
}, |
|||
ticket_id: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: false, |
|||
references: { |
|||
model: 'tickets', |
|||
key: 'id' |
|||
}, |
|||
comment: 'ID du ticket' |
|||
}, |
|||
commande_item_id: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: true, |
|||
comment: 'ID de l\'item de commande (si applicable)' |
|||
}, |
|||
nom_item: { |
|||
type: DataTypes.STRING(100), |
|||
allowNull: false, |
|||
comment: 'Nom de l\'item' |
|||
}, |
|||
description: { |
|||
type: DataTypes.TEXT, |
|||
allowNull: true, |
|||
comment: 'Description de l\'item' |
|||
}, |
|||
quantite: { |
|||
type: DataTypes.INTEGER, |
|||
allowNull: false, |
|||
defaultValue: 1, |
|||
comment: 'Quantité' |
|||
}, |
|||
prix_unitaire_ht: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
comment: 'Prix unitaire HT' |
|||
}, |
|||
prix_unitaire_ttc: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
comment: 'Prix unitaire TTC' |
|||
}, |
|||
montant_ht: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
comment: 'Montant total HT pour cet item' |
|||
}, |
|||
montant_tva: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
comment: 'Montant de la TVA pour cet item' |
|||
}, |
|||
montant_ttc: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
comment: 'Montant total TTC pour cet item' |
|||
}, |
|||
taux_tva: { |
|||
type: DataTypes.DECIMAL(5, 2), |
|||
allowNull: false, |
|||
defaultValue: 20.00, |
|||
comment: 'Taux de TVA appliqué' |
|||
}, |
|||
remise_unitaire: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: false, |
|||
defaultValue: 0.00, |
|||
comment: 'Remise par unité' |
|||
} |
|||
}, { |
|||
tableName: 'ticket_items', |
|||
timestamps: true, |
|||
createdAt: 'cree_le', |
|||
updatedAt: 'modifie_le' |
|||
}); |
|||
|
|||
return TicketItem; |
|||
}; |
|||
@ -0,0 +1,146 @@ |
|||
const { DataTypes } = require('sequelize'); |
|||
const bcrypt = require('bcryptjs'); |
|||
|
|||
module.exports = (sequelize) => { |
|||
const Utilisateur = sequelize.define('Utilisateur', { |
|||
id: { |
|||
type: DataTypes.INTEGER, |
|||
primaryKey: true, |
|||
autoIncrement: true |
|||
}, |
|||
nom: { |
|||
type: DataTypes.STRING(50), |
|||
allowNull: false, |
|||
comment: 'Nom de famille' |
|||
}, |
|||
prenom: { |
|||
type: DataTypes.STRING(50), |
|||
allowNull: false, |
|||
comment: 'Prénom' |
|||
}, |
|||
email: { |
|||
type: DataTypes.STRING(100), |
|||
allowNull: false, |
|||
unique: true, |
|||
validate: { |
|||
isEmail: true |
|||
}, |
|||
comment: 'Adresse email (unique)' |
|||
}, |
|||
mot_de_passe: { |
|||
type: DataTypes.STRING(255), |
|||
allowNull: false, |
|||
comment: 'Mot de passe hashé' |
|||
}, |
|||
telephone: { |
|||
type: DataTypes.STRING(20), |
|||
allowNull: true, |
|||
comment: 'Numéro de téléphone' |
|||
}, |
|||
role: { |
|||
type: DataTypes.ENUM('admin', 'manager', 'serveur', 'cuisinier', 'caissier'), |
|||
allowNull: false, |
|||
defaultValue: 'serveur', |
|||
comment: 'Rôle de l\'utilisateur' |
|||
}, |
|||
statut: { |
|||
type: DataTypes.ENUM('actif', 'inactif', 'suspendu'), |
|||
allowNull: false, |
|||
defaultValue: 'actif', |
|||
comment: 'Statut du compte' |
|||
}, |
|||
date_embauche: { |
|||
type: DataTypes.DATE, |
|||
allowNull: true, |
|||
comment: 'Date d\'embauche' |
|||
}, |
|||
salaire: { |
|||
type: DataTypes.DECIMAL(10, 2), |
|||
allowNull: true, |
|||
comment: 'Salaire de base' |
|||
}, |
|||
adresse: { |
|||
type: DataTypes.TEXT, |
|||
allowNull: true, |
|||
comment: 'Adresse complète' |
|||
}, |
|||
date_naissance: { |
|||
type: DataTypes.DATE, |
|||
allowNull: true, |
|||
comment: 'Date de naissance' |
|||
}, |
|||
photo: { |
|||
type: DataTypes.STRING(255), |
|||
allowNull: true, |
|||
comment: 'Chemin vers la photo de profil' |
|||
}, |
|||
derniere_connexion: { |
|||
type: DataTypes.DATE, |
|||
allowNull: true, |
|||
comment: 'Date de dernière connexion' |
|||
}, |
|||
token_reset: { |
|||
type: DataTypes.STRING(255), |
|||
allowNull: true, |
|||
comment: 'Token pour réinitialisation mot de passe' |
|||
}, |
|||
token_reset_expire: { |
|||
type: DataTypes.DATE, |
|||
allowNull: true, |
|||
comment: 'Expiration du token de reset' |
|||
}, |
|||
preferences: { |
|||
type: DataTypes.JSON, |
|||
allowNull: true, |
|||
defaultValue: {}, |
|||
comment: 'Préférences utilisateur (JSON)' |
|||
}, |
|||
est_actif: { |
|||
type: DataTypes.BOOLEAN, |
|||
allowNull: false, |
|||
defaultValue: true, |
|||
comment: 'Utilisateur actif ou non' |
|||
} |
|||
}, { |
|||
tableName: 'utilisateurs', |
|||
timestamps: true, |
|||
createdAt: 'cree_le', |
|||
updatedAt: 'modifie_le', |
|||
hooks: { |
|||
// Hash le mot de passe avant création
|
|||
beforeCreate: async (utilisateur) => { |
|||
if (utilisateur.mot_de_passe) { |
|||
const salt = await bcrypt.genSalt(10); |
|||
utilisateur.mot_de_passe = await bcrypt.hash(utilisateur.mot_de_passe, salt); |
|||
} |
|||
}, |
|||
// Hash le mot de passe avant mise à jour
|
|||
beforeUpdate: async (utilisateur) => { |
|||
if (utilisateur.changed('mot_de_passe')) { |
|||
const salt = await bcrypt.genSalt(10); |
|||
utilisateur.mot_de_passe = await bcrypt.hash(utilisateur.mot_de_passe, salt); |
|||
} |
|||
} |
|||
} |
|||
}); |
|||
|
|||
// Méthode pour vérifier le mot de passe
|
|||
Utilisateur.prototype.verifierMotDePasse = async function(motDePasse) { |
|||
return await bcrypt.compare(motDePasse, this.mot_de_passe); |
|||
}; |
|||
|
|||
// Méthode pour obtenir le nom complet
|
|||
Utilisateur.prototype.getNomComplet = function() { |
|||
return `${this.prenom} ${this.nom}`; |
|||
}; |
|||
|
|||
// Méthode pour masquer les données sensibles
|
|||
Utilisateur.prototype.toSafeJSON = function() { |
|||
const values = Object.assign({}, this.get()); |
|||
delete values.mot_de_passe; |
|||
delete values.token_reset; |
|||
return values; |
|||
}; |
|||
|
|||
return Utilisateur; |
|||
}; |
|||
@ -0,0 +1,165 @@ |
|||
const sequelize = require('../config/database'); |
|||
|
|||
// Import all models and initialize them with sequelize
|
|||
const Menu = require('./Menu'); |
|||
const MenuCategory = require('./MenuCategory'); |
|||
const Table = require('./Table'); |
|||
const Client = require('./Client'); |
|||
const Reservation = require('./Reservation'); |
|||
const Commande = require('./Commande'); |
|||
const CommandeItem = require('./CommandeItem'); |
|||
|
|||
// Vérifier que tous les modèles sont bien initialisés
|
|||
console.log('🔍 Checking models initialization:'); |
|||
console.log('Menu:', !!Menu && typeof Menu.hasMany === 'function'); |
|||
console.log('MenuCategory:', !!MenuCategory && typeof MenuCategory.hasMany === 'function'); |
|||
console.log('Table:', !!Table && typeof Table.hasMany === 'function'); |
|||
console.log('Client:', !!Client && typeof Client.hasMany === 'function'); |
|||
|
|||
// Sync database (create tables if they don't exist)
|
|||
const initDatabase = async () => { |
|||
try { |
|||
await sequelize.authenticate(); |
|||
console.log('✅ Database connection established successfully.'); |
|||
|
|||
// Sync all models
|
|||
await sequelize.sync({ alter: true }); |
|||
console.log('✅ All models synchronized successfully.'); |
|||
|
|||
} catch (error) { |
|||
console.error('❌ Unable to connect to the database:', error); |
|||
} |
|||
}; |
|||
|
|||
// Define associations only after models are properly initialized
|
|||
const defineAssociations = () => { |
|||
try { |
|||
console.log('🔗 Defining associations...'); |
|||
|
|||
// Vérifier que MenuCategory est bien un modèle Sequelize
|
|||
if (!MenuCategory || typeof MenuCategory.hasMany !== 'function') { |
|||
console.error('❌ MenuCategory is not a valid Sequelize model'); |
|||
console.log('MenuCategory type:', typeof MenuCategory); |
|||
console.log('MenuCategory methods:', MenuCategory ? Object.getOwnPropertyNames(MenuCategory) : 'undefined'); |
|||
return; |
|||
} |
|||
|
|||
// Menu associations
|
|||
MenuCategory.hasMany(Menu, { |
|||
foreignKey: 'categorie_id', |
|||
as: 'menus' |
|||
}); |
|||
|
|||
Menu.belongsTo(MenuCategory, { |
|||
foreignKey: 'categorie_id', |
|||
as: 'category' |
|||
}); |
|||
|
|||
// Client associations
|
|||
if (Client && typeof Client.hasMany === 'function') { |
|||
// Client.hasMany(Reservation, {
|
|||
// foreignKey: 'client_id',
|
|||
// as: 'reservations'
|
|||
// });
|
|||
|
|||
// Client.hasMany(Commande, {
|
|||
// foreignKey: 'client_id',
|
|||
// as: 'commandes'
|
|||
// });
|
|||
} |
|||
|
|||
// Table associations
|
|||
if (Table && typeof Table.hasMany === 'function') { |
|||
// Table.hasMany(Reservation, {
|
|||
// foreignKey: 'table_id',
|
|||
// as: 'reservations'
|
|||
// });
|
|||
|
|||
// Table.hasMany(Commande, {
|
|||
// foreignKey: 'table_id',
|
|||
// as: 'commandes'
|
|||
// });
|
|||
} |
|||
|
|||
// Reservation associations
|
|||
if (Reservation && typeof Reservation.belongsTo === 'function') { |
|||
Reservation.belongsTo(Client, { |
|||
foreignKey: 'client_id', |
|||
as: 'client' |
|||
}); |
|||
|
|||
Reservation.belongsTo(Table, { |
|||
foreignKey: 'table_id', |
|||
as: 'table' |
|||
}); |
|||
|
|||
Reservation.hasMany(Commande, { |
|||
foreignKey: 'reservation_id', |
|||
as: 'commandes' |
|||
}); |
|||
} |
|||
|
|||
// // Order associations
|
|||
// if (Commande && typeof Commande.belongsTo === 'function') {
|
|||
// Commande.belongsTo(Client, {
|
|||
// foreignKey: 'client_id',
|
|||
// as: 'client'
|
|||
// });
|
|||
|
|||
// Commande.belongsTo(Table, {
|
|||
// foreignKey: 'table_id',
|
|||
// as: 'table'
|
|||
// });
|
|||
|
|||
// Commande.belongsTo(Reservation, {
|
|||
// foreignKey: 'reservation_id',
|
|||
// as: 'reservation'
|
|||
// });
|
|||
|
|||
// Commande.hasMany(CommandeItem, {
|
|||
// foreignKey: 'commande_id',
|
|||
// as: 'items'
|
|||
// });
|
|||
// }
|
|||
|
|||
// Order item associations
|
|||
if (CommandeItem && typeof CommandeItem.belongsTo === 'function') { |
|||
CommandeItem.belongsTo(Commande, { |
|||
foreignKey: 'commande_id', |
|||
as: 'commande' |
|||
}); |
|||
|
|||
CommandeItem.belongsTo(Menu, { |
|||
foreignKey: 'menu_id', |
|||
as: 'menu' |
|||
}); |
|||
} |
|||
|
|||
|
|||
|
|||
if (Reservation) { |
|||
// Reservation.hasMany(Commande, { foreignKey: 'reservation_id', as: 'commandes' });
|
|||
// Commande.belongsTo(Reservation, { foreignKey: 'reservation_id', as: 'reservation' });
|
|||
} |
|||
|
|||
console.log('✅ All associations defined successfully'); |
|||
|
|||
} catch (error) { |
|||
console.error('❌ Error defining associations:', error); |
|||
} |
|||
}; |
|||
|
|||
// Initialize associations
|
|||
defineAssociations(); |
|||
|
|||
module.exports = { |
|||
sequelize, |
|||
Menu, |
|||
MenuCategory, |
|||
Table, |
|||
Client, |
|||
Reservation, |
|||
Commande, |
|||
CommandeItem, |
|||
initDatabase |
|||
}; |
|||
@ -0,0 +1 @@ |
|||
../nodemon/bin/nodemon.js |
|||
@ -0,0 +1 @@ |
|||
../touch/bin/nodetouch.js |
|||
@ -0,0 +1 @@ |
|||
../semver/bin/semver.js |
|||
File diff suppressed because it is too large
@ -0,0 +1,250 @@ |
|||
2.0.0 / 2024-08-31 |
|||
================== |
|||
|
|||
* Drop node <18 support |
|||
* deps: mime-types@^3.0.0 |
|||
* deps: negotiator@^1.0.0 |
|||
|
|||
1.3.8 / 2022-02-02 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.1.34 |
|||
- deps: mime-db@~1.51.0 |
|||
* deps: negotiator@0.6.3 |
|||
|
|||
1.3.7 / 2019-04-29 |
|||
================== |
|||
|
|||
* deps: negotiator@0.6.2 |
|||
- Fix sorting charset, encoding, and language with extra parameters |
|||
|
|||
1.3.6 / 2019-04-28 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.1.24 |
|||
- deps: mime-db@~1.40.0 |
|||
|
|||
1.3.5 / 2018-02-28 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.1.18 |
|||
- deps: mime-db@~1.33.0 |
|||
|
|||
1.3.4 / 2017-08-22 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.1.16 |
|||
- deps: mime-db@~1.29.0 |
|||
|
|||
1.3.3 / 2016-05-02 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.1.11 |
|||
- deps: mime-db@~1.23.0 |
|||
* deps: negotiator@0.6.1 |
|||
- perf: improve `Accept` parsing speed |
|||
- perf: improve `Accept-Charset` parsing speed |
|||
- perf: improve `Accept-Encoding` parsing speed |
|||
- perf: improve `Accept-Language` parsing speed |
|||
|
|||
1.3.2 / 2016-03-08 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.1.10 |
|||
- Fix extension of `application/dash+xml` |
|||
- Update primary extension for `audio/mp4` |
|||
- deps: mime-db@~1.22.0 |
|||
|
|||
1.3.1 / 2016-01-19 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.1.9 |
|||
- deps: mime-db@~1.21.0 |
|||
|
|||
1.3.0 / 2015-09-29 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.1.7 |
|||
- deps: mime-db@~1.19.0 |
|||
* deps: negotiator@0.6.0 |
|||
- Fix including type extensions in parameters in `Accept` parsing |
|||
- Fix parsing `Accept` parameters with quoted equals |
|||
- Fix parsing `Accept` parameters with quoted semicolons |
|||
- Lazy-load modules from main entry point |
|||
- perf: delay type concatenation until needed |
|||
- perf: enable strict mode |
|||
- perf: hoist regular expressions |
|||
- perf: remove closures getting spec properties |
|||
- perf: remove a closure from media type parsing |
|||
- perf: remove property delete from media type parsing |
|||
|
|||
1.2.13 / 2015-09-06 |
|||
=================== |
|||
|
|||
* deps: mime-types@~2.1.6 |
|||
- deps: mime-db@~1.18.0 |
|||
|
|||
1.2.12 / 2015-07-30 |
|||
=================== |
|||
|
|||
* deps: mime-types@~2.1.4 |
|||
- deps: mime-db@~1.16.0 |
|||
|
|||
1.2.11 / 2015-07-16 |
|||
=================== |
|||
|
|||
* deps: mime-types@~2.1.3 |
|||
- deps: mime-db@~1.15.0 |
|||
|
|||
1.2.10 / 2015-07-01 |
|||
=================== |
|||
|
|||
* deps: mime-types@~2.1.2 |
|||
- deps: mime-db@~1.14.0 |
|||
|
|||
1.2.9 / 2015-06-08 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.1.1 |
|||
- perf: fix deopt during mapping |
|||
|
|||
1.2.8 / 2015-06-07 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.1.0 |
|||
- deps: mime-db@~1.13.0 |
|||
* perf: avoid argument reassignment & argument slice |
|||
* perf: avoid negotiator recursive construction |
|||
* perf: enable strict mode |
|||
* perf: remove unnecessary bitwise operator |
|||
|
|||
1.2.7 / 2015-05-10 |
|||
================== |
|||
|
|||
* deps: negotiator@0.5.3 |
|||
- Fix media type parameter matching to be case-insensitive |
|||
|
|||
1.2.6 / 2015-05-07 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.0.11 |
|||
- deps: mime-db@~1.9.1 |
|||
* deps: negotiator@0.5.2 |
|||
- Fix comparing media types with quoted values |
|||
- Fix splitting media types with quoted commas |
|||
|
|||
1.2.5 / 2015-03-13 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.0.10 |
|||
- deps: mime-db@~1.8.0 |
|||
|
|||
1.2.4 / 2015-02-14 |
|||
================== |
|||
|
|||
* Support Node.js 0.6 |
|||
* deps: mime-types@~2.0.9 |
|||
- deps: mime-db@~1.7.0 |
|||
* deps: negotiator@0.5.1 |
|||
- Fix preference sorting to be stable for long acceptable lists |
|||
|
|||
1.2.3 / 2015-01-31 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.0.8 |
|||
- deps: mime-db@~1.6.0 |
|||
|
|||
1.2.2 / 2014-12-30 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.0.7 |
|||
- deps: mime-db@~1.5.0 |
|||
|
|||
1.2.1 / 2014-12-30 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.0.5 |
|||
- deps: mime-db@~1.3.1 |
|||
|
|||
1.2.0 / 2014-12-19 |
|||
================== |
|||
|
|||
* deps: negotiator@0.5.0 |
|||
- Fix list return order when large accepted list |
|||
- Fix missing identity encoding when q=0 exists |
|||
- Remove dynamic building of Negotiator class |
|||
|
|||
1.1.4 / 2014-12-10 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.0.4 |
|||
- deps: mime-db@~1.3.0 |
|||
|
|||
1.1.3 / 2014-11-09 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.0.3 |
|||
- deps: mime-db@~1.2.0 |
|||
|
|||
1.1.2 / 2014-10-14 |
|||
================== |
|||
|
|||
* deps: negotiator@0.4.9 |
|||
- Fix error when media type has invalid parameter |
|||
|
|||
1.1.1 / 2014-09-28 |
|||
================== |
|||
|
|||
* deps: mime-types@~2.0.2 |
|||
- deps: mime-db@~1.1.0 |
|||
* deps: negotiator@0.4.8 |
|||
- Fix all negotiations to be case-insensitive |
|||
- Stable sort preferences of same quality according to client order |
|||
|
|||
1.1.0 / 2014-09-02 |
|||
================== |
|||
|
|||
* update `mime-types` |
|||
|
|||
1.0.7 / 2014-07-04 |
|||
================== |
|||
|
|||
* Fix wrong type returned from `type` when match after unknown extension |
|||
|
|||
1.0.6 / 2014-06-24 |
|||
================== |
|||
|
|||
* deps: negotiator@0.4.7 |
|||
|
|||
1.0.5 / 2014-06-20 |
|||
================== |
|||
|
|||
* fix crash when unknown extension given |
|||
|
|||
1.0.4 / 2014-06-19 |
|||
================== |
|||
|
|||
* use `mime-types` |
|||
|
|||
1.0.3 / 2014-06-11 |
|||
================== |
|||
|
|||
* deps: negotiator@0.4.6 |
|||
- Order by specificity when quality is the same |
|||
|
|||
1.0.2 / 2014-05-29 |
|||
================== |
|||
|
|||
* Fix interpretation when header not in request |
|||
* deps: pin negotiator@0.4.5 |
|||
|
|||
1.0.1 / 2014-01-18 |
|||
================== |
|||
|
|||
* Identity encoding isn't always acceptable |
|||
* deps: negotiator@~0.4.0 |
|||
|
|||
1.0.0 / 2013-12-27 |
|||
================== |
|||
|
|||
* Genesis |
|||
@ -0,0 +1,23 @@ |
|||
(The MIT License) |
|||
|
|||
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com> |
|||
Copyright (c) 2015 Douglas Christopher Wilson <doug@somethingdoug.com> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining |
|||
a copy of this software and associated documentation files (the |
|||
'Software'), to deal in the Software without restriction, including |
|||
without limitation the rights to use, copy, modify, merge, publish, |
|||
distribute, sublicense, and/or sell copies of the Software, and to |
|||
permit persons to whom the Software is furnished to do so, subject to |
|||
the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be |
|||
included in all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, |
|||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
|||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
|||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, |
|||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
|||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
|||
@ -0,0 +1,140 @@ |
|||
# accepts |
|||
|
|||
[![NPM Version][npm-version-image]][npm-url] |
|||
[![NPM Downloads][npm-downloads-image]][npm-url] |
|||
[![Node.js Version][node-version-image]][node-version-url] |
|||
[![Build Status][github-actions-ci-image]][github-actions-ci-url] |
|||
[![Test Coverage][coveralls-image]][coveralls-url] |
|||
|
|||
Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator). |
|||
Extracted from [koa](https://www.npmjs.com/package/koa) for general use. |
|||
|
|||
In addition to negotiator, it allows: |
|||
|
|||
- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])` |
|||
as well as `('text/html', 'application/json')`. |
|||
- Allows type shorthands such as `json`. |
|||
- Returns `false` when no types match |
|||
- Treats non-existent headers as `*` |
|||
|
|||
## Installation |
|||
|
|||
This is a [Node.js](https://nodejs.org/en/) module available through the |
|||
[npm registry](https://www.npmjs.com/). Installation is done using the |
|||
[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): |
|||
|
|||
```sh |
|||
$ npm install accepts |
|||
``` |
|||
|
|||
## API |
|||
|
|||
```js |
|||
var accepts = require('accepts') |
|||
``` |
|||
|
|||
### accepts(req) |
|||
|
|||
Create a new `Accepts` object for the given `req`. |
|||
|
|||
#### .charset(charsets) |
|||
|
|||
Return the first accepted charset. If nothing in `charsets` is accepted, |
|||
then `false` is returned. |
|||
|
|||
#### .charsets() |
|||
|
|||
Return the charsets that the request accepts, in the order of the client's |
|||
preference (most preferred first). |
|||
|
|||
#### .encoding(encodings) |
|||
|
|||
Return the first accepted encoding. If nothing in `encodings` is accepted, |
|||
then `false` is returned. |
|||
|
|||
#### .encodings() |
|||
|
|||
Return the encodings that the request accepts, in the order of the client's |
|||
preference (most preferred first). |
|||
|
|||
#### .language(languages) |
|||
|
|||
Return the first accepted language. If nothing in `languages` is accepted, |
|||
then `false` is returned. |
|||
|
|||
#### .languages() |
|||
|
|||
Return the languages that the request accepts, in the order of the client's |
|||
preference (most preferred first). |
|||
|
|||
#### .type(types) |
|||
|
|||
Return the first accepted type (and it is returned as the same text as what |
|||
appears in the `types` array). If nothing in `types` is accepted, then `false` |
|||
is returned. |
|||
|
|||
The `types` array can contain full MIME types or file extensions. Any value |
|||
that is not a full MIME type is passed to `require('mime-types').lookup`. |
|||
|
|||
#### .types() |
|||
|
|||
Return the types that the request accepts, in the order of the client's |
|||
preference (most preferred first). |
|||
|
|||
## Examples |
|||
|
|||
### Simple type negotiation |
|||
|
|||
This simple example shows how to use `accepts` to return a different typed |
|||
respond body based on what the client wants to accept. The server lists it's |
|||
preferences in order and will get back the best match between the client and |
|||
server. |
|||
|
|||
```js |
|||
var accepts = require('accepts') |
|||
var http = require('http') |
|||
|
|||
function app (req, res) { |
|||
var accept = accepts(req) |
|||
|
|||
// the order of this list is significant; should be server preferred order |
|||
switch (accept.type(['json', 'html'])) { |
|||
case 'json': |
|||
res.setHeader('Content-Type', 'application/json') |
|||
res.write('{"hello":"world!"}') |
|||
break |
|||
case 'html': |
|||
res.setHeader('Content-Type', 'text/html') |
|||
res.write('<b>hello, world!</b>') |
|||
break |
|||
default: |
|||
// the fallback is text/plain, so no need to specify it above |
|||
res.setHeader('Content-Type', 'text/plain') |
|||
res.write('hello, world!') |
|||
break |
|||
} |
|||
|
|||
res.end() |
|||
} |
|||
|
|||
http.createServer(app).listen(3000) |
|||
``` |
|||
|
|||
You can test this out with the cURL program: |
|||
```sh |
|||
curl -I -H'Accept: text/html' http://localhost:3000/ |
|||
``` |
|||
|
|||
## License |
|||
|
|||
[MIT](LICENSE) |
|||
|
|||
[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master |
|||
[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master |
|||
[github-actions-ci-image]: https://badgen.net/github/checks/jshttp/accepts/master?label=ci |
|||
[github-actions-ci-url]: https://github.com/jshttp/accepts/actions/workflows/ci.yml |
|||
[node-version-image]: https://badgen.net/npm/node/accepts |
|||
[node-version-url]: https://nodejs.org/en/download |
|||
[npm-downloads-image]: https://badgen.net/npm/dm/accepts |
|||
[npm-url]: https://npmjs.org/package/accepts |
|||
[npm-version-image]: https://badgen.net/npm/v/accepts |
|||
@ -0,0 +1,238 @@ |
|||
/*! |
|||
* accepts |
|||
* Copyright(c) 2014 Jonathan Ong |
|||
* Copyright(c) 2015 Douglas Christopher Wilson |
|||
* MIT Licensed |
|||
*/ |
|||
|
|||
'use strict' |
|||
|
|||
/** |
|||
* Module dependencies. |
|||
* @private |
|||
*/ |
|||
|
|||
var Negotiator = require('negotiator') |
|||
var mime = require('mime-types') |
|||
|
|||
/** |
|||
* Module exports. |
|||
* @public |
|||
*/ |
|||
|
|||
module.exports = Accepts |
|||
|
|||
/** |
|||
* Create a new Accepts object for the given req. |
|||
* |
|||
* @param {object} req |
|||
* @public |
|||
*/ |
|||
|
|||
function Accepts (req) { |
|||
if (!(this instanceof Accepts)) { |
|||
return new Accepts(req) |
|||
} |
|||
|
|||
this.headers = req.headers |
|||
this.negotiator = new Negotiator(req) |
|||
} |
|||
|
|||
/** |
|||
* Check if the given `type(s)` is acceptable, returning |
|||
* the best match when true, otherwise `undefined`, in which |
|||
* case you should respond with 406 "Not Acceptable". |
|||
* |
|||
* The `type` value may be a single mime type string |
|||
* such as "application/json", the extension name |
|||
* such as "json" or an array `["json", "html", "text/plain"]`. When a list |
|||
* or array is given the _best_ match, if any is returned. |
|||
* |
|||
* Examples: |
|||
* |
|||
* // Accept: text/html
|
|||
* this.types('html'); |
|||
* // => "html"
|
|||
* |
|||
* // Accept: text/*, application/json
|
|||
* this.types('html'); |
|||
* // => "html"
|
|||
* this.types('text/html'); |
|||
* // => "text/html"
|
|||
* this.types('json', 'text'); |
|||
* // => "json"
|
|||
* this.types('application/json'); |
|||
* // => "application/json"
|
|||
* |
|||
* // Accept: text/*, application/json
|
|||
* this.types('image/png'); |
|||
* this.types('png'); |
|||
* // => undefined
|
|||
* |
|||
* // Accept: text/*;q=.5, application/json
|
|||
* this.types(['html', 'json']); |
|||
* this.types('html', 'json'); |
|||
* // => "json"
|
|||
* |
|||
* @param {String|Array} types... |
|||
* @return {String|Array|Boolean} |
|||
* @public |
|||
*/ |
|||
|
|||
Accepts.prototype.type = |
|||
Accepts.prototype.types = function (types_) { |
|||
var types = types_ |
|||
|
|||
// support flattened arguments
|
|||
if (types && !Array.isArray(types)) { |
|||
types = new Array(arguments.length) |
|||
for (var i = 0; i < types.length; i++) { |
|||
types[i] = arguments[i] |
|||
} |
|||
} |
|||
|
|||
// no types, return all requested types
|
|||
if (!types || types.length === 0) { |
|||
return this.negotiator.mediaTypes() |
|||
} |
|||
|
|||
// no accept header, return first given type
|
|||
if (!this.headers.accept) { |
|||
return types[0] |
|||
} |
|||
|
|||
var mimes = types.map(extToMime) |
|||
var accepts = this.negotiator.mediaTypes(mimes.filter(validMime)) |
|||
var first = accepts[0] |
|||
|
|||
return first |
|||
? types[mimes.indexOf(first)] |
|||
: false |
|||
} |
|||
|
|||
/** |
|||
* Return accepted encodings or best fit based on `encodings`. |
|||
* |
|||
* Given `Accept-Encoding: gzip, deflate` |
|||
* an array sorted by quality is returned: |
|||
* |
|||
* ['gzip', 'deflate'] |
|||
* |
|||
* @param {String|Array} encodings... |
|||
* @return {String|Array} |
|||
* @public |
|||
*/ |
|||
|
|||
Accepts.prototype.encoding = |
|||
Accepts.prototype.encodings = function (encodings_) { |
|||
var encodings = encodings_ |
|||
|
|||
// support flattened arguments
|
|||
if (encodings && !Array.isArray(encodings)) { |
|||
encodings = new Array(arguments.length) |
|||
for (var i = 0; i < encodings.length; i++) { |
|||
encodings[i] = arguments[i] |
|||
} |
|||
} |
|||
|
|||
// no encodings, return all requested encodings
|
|||
if (!encodings || encodings.length === 0) { |
|||
return this.negotiator.encodings() |
|||
} |
|||
|
|||
return this.negotiator.encodings(encodings)[0] || false |
|||
} |
|||
|
|||
/** |
|||
* Return accepted charsets or best fit based on `charsets`. |
|||
* |
|||
* Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5` |
|||
* an array sorted by quality is returned: |
|||
* |
|||
* ['utf-8', 'utf-7', 'iso-8859-1'] |
|||
* |
|||
* @param {String|Array} charsets... |
|||
* @return {String|Array} |
|||
* @public |
|||
*/ |
|||
|
|||
Accepts.prototype.charset = |
|||
Accepts.prototype.charsets = function (charsets_) { |
|||
var charsets = charsets_ |
|||
|
|||
// support flattened arguments
|
|||
if (charsets && !Array.isArray(charsets)) { |
|||
charsets = new Array(arguments.length) |
|||
for (var i = 0; i < charsets.length; i++) { |
|||
charsets[i] = arguments[i] |
|||
} |
|||
} |
|||
|
|||
// no charsets, return all requested charsets
|
|||
if (!charsets || charsets.length === 0) { |
|||
return this.negotiator.charsets() |
|||
} |
|||
|
|||
return this.negotiator.charsets(charsets)[0] || false |
|||
} |
|||
|
|||
/** |
|||
* Return accepted languages or best fit based on `langs`. |
|||
* |
|||
* Given `Accept-Language: en;q=0.8, es, pt` |
|||
* an array sorted by quality is returned: |
|||
* |
|||
* ['es', 'pt', 'en'] |
|||
* |
|||
* @param {String|Array} langs... |
|||
* @return {Array|String} |
|||
* @public |
|||
*/ |
|||
|
|||
Accepts.prototype.lang = |
|||
Accepts.prototype.langs = |
|||
Accepts.prototype.language = |
|||
Accepts.prototype.languages = function (languages_) { |
|||
var languages = languages_ |
|||
|
|||
// support flattened arguments
|
|||
if (languages && !Array.isArray(languages)) { |
|||
languages = new Array(arguments.length) |
|||
for (var i = 0; i < languages.length; i++) { |
|||
languages[i] = arguments[i] |
|||
} |
|||
} |
|||
|
|||
// no languages, return all requested languages
|
|||
if (!languages || languages.length === 0) { |
|||
return this.negotiator.languages() |
|||
} |
|||
|
|||
return this.negotiator.languages(languages)[0] || false |
|||
} |
|||
|
|||
/** |
|||
* Convert extnames to mime. |
|||
* |
|||
* @param {String} type |
|||
* @return {String} |
|||
* @private |
|||
*/ |
|||
|
|||
function extToMime (type) { |
|||
return type.indexOf('/') === -1 |
|||
? mime.lookup(type) |
|||
: type |
|||
} |
|||
|
|||
/** |
|||
* Check if mime is valid. |
|||
* |
|||
* @param {String} type |
|||
* @return {Boolean} |
|||
* @private |
|||
*/ |
|||
|
|||
function validMime (type) { |
|||
return typeof type === 'string' |
|||
} |
|||
@ -0,0 +1,47 @@ |
|||
{ |
|||
"name": "accepts", |
|||
"description": "Higher-level content negotiation", |
|||
"version": "2.0.0", |
|||
"contributors": [ |
|||
"Douglas Christopher Wilson <doug@somethingdoug.com>", |
|||
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)" |
|||
], |
|||
"license": "MIT", |
|||
"repository": "jshttp/accepts", |
|||
"dependencies": { |
|||
"mime-types": "^3.0.0", |
|||
"negotiator": "^1.0.0" |
|||
}, |
|||
"devDependencies": { |
|||
"deep-equal": "1.0.1", |
|||
"eslint": "7.32.0", |
|||
"eslint-config-standard": "14.1.1", |
|||
"eslint-plugin-import": "2.25.4", |
|||
"eslint-plugin-markdown": "2.2.1", |
|||
"eslint-plugin-node": "11.1.0", |
|||
"eslint-plugin-promise": "4.3.1", |
|||
"eslint-plugin-standard": "4.1.0", |
|||
"mocha": "9.2.0", |
|||
"nyc": "15.1.0" |
|||
}, |
|||
"files": [ |
|||
"LICENSE", |
|||
"HISTORY.md", |
|||
"index.js" |
|||
], |
|||
"engines": { |
|||
"node": ">= 0.6" |
|||
}, |
|||
"scripts": { |
|||
"lint": "eslint .", |
|||
"test": "mocha --reporter spec --check-leaks --bail test/", |
|||
"test-ci": "nyc --reporter=lcov --reporter=text npm test", |
|||
"test-cov": "nyc --reporter=html --reporter=text npm test" |
|||
}, |
|||
"keywords": [ |
|||
"content", |
|||
"negotiation", |
|||
"accept", |
|||
"accepts" |
|||
] |
|||
} |
|||
@ -0,0 +1,15 @@ |
|||
The ISC License |
|||
|
|||
Copyright (c) 2019 Elan Shanker, Paul Miller (https://paulmillr.com) |
|||
|
|||
Permission to use, copy, modify, and/or distribute this software for any |
|||
purpose with or without fee is hereby granted, provided that the above |
|||
copyright notice and this permission notice appear in all copies. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES |
|||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF |
|||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR |
|||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES |
|||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN |
|||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR |
|||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. |
|||
@ -0,0 +1,87 @@ |
|||
anymatch [](https://travis-ci.org/micromatch/anymatch) [](https://coveralls.io/r/micromatch/anymatch?branch=master) |
|||
====== |
|||
Javascript module to match a string against a regular expression, glob, string, |
|||
or function that takes the string as an argument and returns a truthy or falsy |
|||
value. The matcher can also be an array of any or all of these. Useful for |
|||
allowing a very flexible user-defined config to define things like file paths. |
|||
|
|||
__Note: This module has Bash-parity, please be aware that Windows-style backslashes are not supported as separators. See https://github.com/micromatch/micromatch#backslashes for more information.__ |
|||
|
|||
|
|||
Usage |
|||
----- |
|||
```sh |
|||
npm install anymatch |
|||
``` |
|||
|
|||
#### anymatch(matchers, testString, [returnIndex], [options]) |
|||
* __matchers__: (_Array|String|RegExp|Function_) |
|||
String to be directly matched, string with glob patterns, regular expression |
|||
test, function that takes the testString as an argument and returns a truthy |
|||
value if it should be matched, or an array of any number and mix of these types. |
|||
* __testString__: (_String|Array_) The string to test against the matchers. If |
|||
passed as an array, the first element of the array will be used as the |
|||
`testString` for non-function matchers, while the entire array will be applied |
|||
as the arguments for function matchers. |
|||
* __options__: (_Object_ [optional]_) Any of the [picomatch](https://github.com/micromatch/picomatch#options) options. |
|||
* __returnIndex__: (_Boolean [optional]_) If true, return the array index of |
|||
the first matcher that that testString matched, or -1 if no match, instead of a |
|||
boolean result. |
|||
|
|||
```js |
|||
const anymatch = require('anymatch'); |
|||
|
|||
const matchers = [ 'path/to/file.js', 'path/anyjs/**/*.js', /foo.js$/, string => string.includes('bar') && string.length > 10 ] ; |
|||
|
|||
anymatch(matchers, 'path/to/file.js'); // true |
|||
anymatch(matchers, 'path/anyjs/baz.js'); // true |
|||
anymatch(matchers, 'path/to/foo.js'); // true |
|||
anymatch(matchers, 'path/to/bar.js'); // true |
|||
anymatch(matchers, 'bar.js'); // false |
|||
|
|||
// returnIndex = true |
|||
anymatch(matchers, 'foo.js', {returnIndex: true}); // 2 |
|||
anymatch(matchers, 'path/anyjs/foo.js', {returnIndex: true}); // 1 |
|||
|
|||
// any picomatc |
|||
|
|||
// using globs to match directories and their children |
|||
anymatch('node_modules', 'node_modules'); // true |
|||
anymatch('node_modules', 'node_modules/somelib/index.js'); // false |
|||
anymatch('node_modules/**', 'node_modules/somelib/index.js'); // true |
|||
anymatch('node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // false |
|||
anymatch('**/node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // true |
|||
|
|||
const matcher = anymatch(matchers); |
|||
['foo.js', 'bar.js'].filter(matcher); // [ 'foo.js' ] |
|||
anymatch master* ❯ |
|||
|
|||
``` |
|||
|
|||
#### anymatch(matchers) |
|||
You can also pass in only your matcher(s) to get a curried function that has |
|||
already been bound to the provided matching criteria. This can be used as an |
|||
`Array#filter` callback. |
|||
|
|||
```js |
|||
var matcher = anymatch(matchers); |
|||
|
|||
matcher('path/to/file.js'); // true |
|||
matcher('path/anyjs/baz.js', true); // 1 |
|||
|
|||
['foo.js', 'bar.js'].filter(matcher); // ['foo.js'] |
|||
``` |
|||
|
|||
Changelog |
|||
---------- |
|||
[See release notes page on GitHub](https://github.com/micromatch/anymatch/releases) |
|||
|
|||
- **v3.0:** Removed `startIndex` and `endIndex` arguments. Node 8.x-only. |
|||
- **v2.0:** [micromatch](https://github.com/jonschlinkert/micromatch) moves away from minimatch-parity and inline with Bash. This includes handling backslashes differently (see https://github.com/micromatch/micromatch#backslashes for more information). |
|||
- **v1.2:** anymatch uses [micromatch](https://github.com/jonschlinkert/micromatch) |
|||
for glob pattern matching. Issues with glob pattern matching should be |
|||
reported directly to the [micromatch issue tracker](https://github.com/jonschlinkert/micromatch/issues). |
|||
|
|||
License |
|||
------- |
|||
[ISC](https://raw.github.com/micromatch/anymatch/master/LICENSE) |
|||
@ -0,0 +1,20 @@ |
|||
type AnymatchFn = (testString: string) => boolean; |
|||
type AnymatchPattern = string|RegExp|AnymatchFn; |
|||
type AnymatchMatcher = AnymatchPattern|AnymatchPattern[] |
|||
type AnymatchTester = { |
|||
(testString: string|any[], returnIndex: true): number; |
|||
(testString: string|any[]): boolean; |
|||
} |
|||
|
|||
type PicomatchOptions = {dot: boolean}; |
|||
|
|||
declare const anymatch: { |
|||
(matchers: AnymatchMatcher): AnymatchTester; |
|||
(matchers: AnymatchMatcher, testString: null, returnIndex: true | PicomatchOptions): AnymatchTester; |
|||
(matchers: AnymatchMatcher, testString: string|any[], returnIndex: true | PicomatchOptions): number; |
|||
(matchers: AnymatchMatcher, testString: string|any[]): boolean; |
|||
} |
|||
|
|||
export {AnymatchMatcher as Matcher} |
|||
export {AnymatchTester as Tester} |
|||
export default anymatch |
|||
@ -0,0 +1,104 @@ |
|||
'use strict'; |
|||
|
|||
Object.defineProperty(exports, "__esModule", { value: true }); |
|||
|
|||
const picomatch = require('picomatch'); |
|||
const normalizePath = require('normalize-path'); |
|||
|
|||
/** |
|||
* @typedef {(testString: string) => boolean} AnymatchFn |
|||
* @typedef {string|RegExp|AnymatchFn} AnymatchPattern |
|||
* @typedef {AnymatchPattern|AnymatchPattern[]} AnymatchMatcher |
|||
*/ |
|||
const BANG = '!'; |
|||
const DEFAULT_OPTIONS = {returnIndex: false}; |
|||
const arrify = (item) => Array.isArray(item) ? item : [item]; |
|||
|
|||
/** |
|||
* @param {AnymatchPattern} matcher |
|||
* @param {object} options |
|||
* @returns {AnymatchFn} |
|||
*/ |
|||
const createPattern = (matcher, options) => { |
|||
if (typeof matcher === 'function') { |
|||
return matcher; |
|||
} |
|||
if (typeof matcher === 'string') { |
|||
const glob = picomatch(matcher, options); |
|||
return (string) => matcher === string || glob(string); |
|||
} |
|||
if (matcher instanceof RegExp) { |
|||
return (string) => matcher.test(string); |
|||
} |
|||
return (string) => false; |
|||
}; |
|||
|
|||
/** |
|||
* @param {Array<Function>} patterns |
|||
* @param {Array<Function>} negPatterns |
|||
* @param {String|Array} args |
|||
* @param {Boolean} returnIndex |
|||
* @returns {boolean|number} |
|||
*/ |
|||
const matchPatterns = (patterns, negPatterns, args, returnIndex) => { |
|||
const isList = Array.isArray(args); |
|||
const _path = isList ? args[0] : args; |
|||
if (!isList && typeof _path !== 'string') { |
|||
throw new TypeError('anymatch: second argument must be a string: got ' + |
|||
Object.prototype.toString.call(_path)) |
|||
} |
|||
const path = normalizePath(_path, false); |
|||
|
|||
for (let index = 0; index < negPatterns.length; index++) { |
|||
const nglob = negPatterns[index]; |
|||
if (nglob(path)) { |
|||
return returnIndex ? -1 : false; |
|||
} |
|||
} |
|||
|
|||
const applied = isList && [path].concat(args.slice(1)); |
|||
for (let index = 0; index < patterns.length; index++) { |
|||
const pattern = patterns[index]; |
|||
if (isList ? pattern(...applied) : pattern(path)) { |
|||
return returnIndex ? index : true; |
|||
} |
|||
} |
|||
|
|||
return returnIndex ? -1 : false; |
|||
}; |
|||
|
|||
/** |
|||
* @param {AnymatchMatcher} matchers |
|||
* @param {Array|string} testString |
|||
* @param {object} options |
|||
* @returns {boolean|number|Function} |
|||
*/ |
|||
const anymatch = (matchers, testString, options = DEFAULT_OPTIONS) => { |
|||
if (matchers == null) { |
|||
throw new TypeError('anymatch: specify first argument'); |
|||
} |
|||
const opts = typeof options === 'boolean' ? {returnIndex: options} : options; |
|||
const returnIndex = opts.returnIndex || false; |
|||
|
|||
// Early cache for matchers.
|
|||
const mtchers = arrify(matchers); |
|||
const negatedGlobs = mtchers |
|||
.filter(item => typeof item === 'string' && item.charAt(0) === BANG) |
|||
.map(item => item.slice(1)) |
|||
.map(item => picomatch(item, opts)); |
|||
const patterns = mtchers |
|||
.filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG)) |
|||
.map(matcher => createPattern(matcher, opts)); |
|||
|
|||
if (testString == null) { |
|||
return (testString, ri = false) => { |
|||
const returnIndex = typeof ri === 'boolean' ? ri : false; |
|||
return matchPatterns(patterns, negatedGlobs, testString, returnIndex); |
|||
} |
|||
} |
|||
|
|||
return matchPatterns(patterns, negatedGlobs, testString, returnIndex); |
|||
}; |
|||
|
|||
anymatch.default = anymatch; |
|||
module.exports = anymatch; |
|||
@ -0,0 +1,48 @@ |
|||
{ |
|||
"name": "anymatch", |
|||
"version": "3.1.3", |
|||
"description": "Matches strings against configurable strings, globs, regular expressions, and/or functions", |
|||
"files": [ |
|||
"index.js", |
|||
"index.d.ts" |
|||
], |
|||
"dependencies": { |
|||
"normalize-path": "^3.0.0", |
|||
"picomatch": "^2.0.4" |
|||
}, |
|||
"author": { |
|||
"name": "Elan Shanker", |
|||
"url": "https://github.com/es128" |
|||
}, |
|||
"license": "ISC", |
|||
"homepage": "https://github.com/micromatch/anymatch", |
|||
"repository": { |
|||
"type": "git", |
|||
"url": "https://github.com/micromatch/anymatch" |
|||
}, |
|||
"keywords": [ |
|||
"match", |
|||
"any", |
|||
"string", |
|||
"file", |
|||
"fs", |
|||
"list", |
|||
"glob", |
|||
"regex", |
|||
"regexp", |
|||
"regular", |
|||
"expression", |
|||
"function" |
|||
], |
|||
"scripts": { |
|||
"test": "nyc mocha", |
|||
"mocha": "mocha" |
|||
}, |
|||
"devDependencies": { |
|||
"mocha": "^6.1.3", |
|||
"nyc": "^14.0.0" |
|||
}, |
|||
"engines": { |
|||
"node": ">= 8" |
|||
} |
|||
} |
|||
@ -0,0 +1,2 @@ |
|||
tidelift: "npm/balanced-match" |
|||
patreon: juliangruber |
|||
@ -0,0 +1,21 @@ |
|||
(MIT) |
|||
|
|||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy of |
|||
this software and associated documentation files (the "Software"), to deal in |
|||
the Software without restriction, including without limitation the rights to |
|||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies |
|||
of the Software, and to permit persons to whom the Software is furnished to do |
|||
so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in all |
|||
copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,97 @@ |
|||
# balanced-match |
|||
|
|||
Match balanced string pairs, like `{` and `}` or `<b>` and `</b>`. Supports regular expressions as well! |
|||
|
|||
[](http://travis-ci.org/juliangruber/balanced-match) |
|||
[](https://www.npmjs.org/package/balanced-match) |
|||
|
|||
[](https://ci.testling.com/juliangruber/balanced-match) |
|||
|
|||
## Example |
|||
|
|||
Get the first matching pair of braces: |
|||
|
|||
```js |
|||
var balanced = require('balanced-match'); |
|||
|
|||
console.log(balanced('{', '}', 'pre{in{nested}}post')); |
|||
console.log(balanced('{', '}', 'pre{first}between{second}post')); |
|||
console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); |
|||
``` |
|||
|
|||
The matches are: |
|||
|
|||
```bash |
|||
$ node example.js |
|||
{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } |
|||
{ start: 3, |
|||
end: 9, |
|||
pre: 'pre', |
|||
body: 'first', |
|||
post: 'between{second}post' } |
|||
{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } |
|||
``` |
|||
|
|||
## API |
|||
|
|||
### var m = balanced(a, b, str) |
|||
|
|||
For the first non-nested matching pair of `a` and `b` in `str`, return an |
|||
object with those keys: |
|||
|
|||
* **start** the index of the first match of `a` |
|||
* **end** the index of the matching `b` |
|||
* **pre** the preamble, `a` and `b` not included |
|||
* **body** the match, `a` and `b` not included |
|||
* **post** the postscript, `a` and `b` not included |
|||
|
|||
If there's no match, `undefined` will be returned. |
|||
|
|||
If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. |
|||
|
|||
### var r = balanced.range(a, b, str) |
|||
|
|||
For the first non-nested matching pair of `a` and `b` in `str`, return an |
|||
array with indexes: `[ <a index>, <b index> ]`. |
|||
|
|||
If there's no match, `undefined` will be returned. |
|||
|
|||
If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. |
|||
|
|||
## Installation |
|||
|
|||
With [npm](https://npmjs.org) do: |
|||
|
|||
```bash |
|||
npm install balanced-match |
|||
``` |
|||
|
|||
## Security contact information |
|||
|
|||
To report a security vulnerability, please use the |
|||
[Tidelift security contact](https://tidelift.com/security). |
|||
Tidelift will coordinate the fix and disclosure. |
|||
|
|||
## License |
|||
|
|||
(MIT) |
|||
|
|||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy of |
|||
this software and associated documentation files (the "Software"), to deal in |
|||
the Software without restriction, including without limitation the rights to |
|||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies |
|||
of the Software, and to permit persons to whom the Software is furnished to do |
|||
so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in all |
|||
copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,62 @@ |
|||
'use strict'; |
|||
module.exports = balanced; |
|||
function balanced(a, b, str) { |
|||
if (a instanceof RegExp) a = maybeMatch(a, str); |
|||
if (b instanceof RegExp) b = maybeMatch(b, str); |
|||
|
|||
var r = range(a, b, str); |
|||
|
|||
return r && { |
|||
start: r[0], |
|||
end: r[1], |
|||
pre: str.slice(0, r[0]), |
|||
body: str.slice(r[0] + a.length, r[1]), |
|||
post: str.slice(r[1] + b.length) |
|||
}; |
|||
} |
|||
|
|||
function maybeMatch(reg, str) { |
|||
var m = str.match(reg); |
|||
return m ? m[0] : null; |
|||
} |
|||
|
|||
balanced.range = range; |
|||
function range(a, b, str) { |
|||
var begs, beg, left, right, result; |
|||
var ai = str.indexOf(a); |
|||
var bi = str.indexOf(b, ai + 1); |
|||
var i = ai; |
|||
|
|||
if (ai >= 0 && bi > 0) { |
|||
if(a===b) { |
|||
return [ai, bi]; |
|||
} |
|||
begs = []; |
|||
left = str.length; |
|||
|
|||
while (i >= 0 && !result) { |
|||
if (i == ai) { |
|||
begs.push(i); |
|||
ai = str.indexOf(a, i + 1); |
|||
} else if (begs.length == 1) { |
|||
result = [ begs.pop(), bi ]; |
|||
} else { |
|||
beg = begs.pop(); |
|||
if (beg < left) { |
|||
left = beg; |
|||
right = bi; |
|||
} |
|||
|
|||
bi = str.indexOf(b, i + 1); |
|||
} |
|||
|
|||
i = ai < bi && ai >= 0 ? ai : bi; |
|||
} |
|||
|
|||
if (begs.length) { |
|||
result = [ left, right ]; |
|||
} |
|||
} |
|||
|
|||
return result; |
|||
} |
|||
@ -0,0 +1,48 @@ |
|||
{ |
|||
"name": "balanced-match", |
|||
"description": "Match balanced character pairs, like \"{\" and \"}\"", |
|||
"version": "1.0.2", |
|||
"repository": { |
|||
"type": "git", |
|||
"url": "git://github.com/juliangruber/balanced-match.git" |
|||
}, |
|||
"homepage": "https://github.com/juliangruber/balanced-match", |
|||
"main": "index.js", |
|||
"scripts": { |
|||
"test": "tape test/test.js", |
|||
"bench": "matcha test/bench.js" |
|||
}, |
|||
"devDependencies": { |
|||
"matcha": "^0.7.0", |
|||
"tape": "^4.6.0" |
|||
}, |
|||
"keywords": [ |
|||
"match", |
|||
"regexp", |
|||
"test", |
|||
"balanced", |
|||
"parse" |
|||
], |
|||
"author": { |
|||
"name": "Julian Gruber", |
|||
"email": "mail@juliangruber.com", |
|||
"url": "http://juliangruber.com" |
|||
}, |
|||
"license": "MIT", |
|||
"testling": { |
|||
"files": "test/*.js", |
|||
"browsers": [ |
|||
"ie/8..latest", |
|||
"firefox/20..latest", |
|||
"firefox/nightly", |
|||
"chrome/25..latest", |
|||
"chrome/canary", |
|||
"opera/12..latest", |
|||
"opera/next", |
|||
"safari/5.1..latest", |
|||
"ipad/6.0..latest", |
|||
"iphone/6.0..latest", |
|||
"android-browser/4.2..latest" |
|||
] |
|||
} |
|||
} |
|||
@ -0,0 +1,52 @@ |
|||
2.0.1 / 2018-09-19 |
|||
================== |
|||
|
|||
* deps: safe-buffer@5.1.2 |
|||
|
|||
2.0.0 / 2017-09-12 |
|||
================== |
|||
|
|||
* Drop support for Node.js below 0.8 |
|||
* Remove `auth(ctx)` signature -- pass in header or `auth(ctx.req)` |
|||
* Use `safe-buffer` for improved Buffer API |
|||
|
|||
1.1.0 / 2016-11-18 |
|||
================== |
|||
|
|||
* Add `auth.parse` for low-level string parsing |
|||
|
|||
1.0.4 / 2016-05-10 |
|||
================== |
|||
|
|||
* Improve error message when `req` argument is not an object |
|||
* Improve error message when `req` missing `headers` property |
|||
|
|||
1.0.3 / 2015-07-01 |
|||
================== |
|||
|
|||
* Fix regression accepting a Koa context |
|||
|
|||
1.0.2 / 2015-06-12 |
|||
================== |
|||
|
|||
* Improve error message when `req` argument missing |
|||
* perf: enable strict mode |
|||
* perf: hoist regular expression |
|||
* perf: parse with regular expressions |
|||
* perf: remove argument reassignment |
|||
|
|||
1.0.1 / 2015-05-04 |
|||
================== |
|||
|
|||
* Update readme |
|||
|
|||
1.0.0 / 2014-07-01 |
|||
================== |
|||
|
|||
* Support empty password |
|||
* Support empty username |
|||
|
|||
0.0.1 / 2013-11-30 |
|||
================== |
|||
|
|||
* Initial release |
|||
@ -0,0 +1,24 @@ |
|||
(The MIT License) |
|||
|
|||
Copyright (c) 2013 TJ Holowaychuk |
|||
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com> |
|||
Copyright (c) 2015-2016 Douglas Christopher Wilson <doug@somethingdoug.com> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining |
|||
a copy of this software and associated documentation files (the |
|||
'Software'), to deal in the Software without restriction, including |
|||
without limitation the rights to use, copy, modify, merge, publish, |
|||
distribute, sublicense, and/or sell copies of the Software, and to |
|||
permit persons to whom the Software is furnished to do so, subject to |
|||
the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be |
|||
included in all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, |
|||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
|||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
|||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, |
|||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
|||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
|||
@ -0,0 +1,113 @@ |
|||
# basic-auth |
|||
|
|||
[![NPM Version][npm-image]][npm-url] |
|||
[![NPM Downloads][downloads-image]][downloads-url] |
|||
[![Node.js Version][node-version-image]][node-version-url] |
|||
[![Build Status][travis-image]][travis-url] |
|||
[![Test Coverage][coveralls-image]][coveralls-url] |
|||
|
|||
Generic basic auth Authorization header field parser for whatever. |
|||
|
|||
## Installation |
|||
|
|||
This is a [Node.js](https://nodejs.org/en/) module available through the |
|||
[npm registry](https://www.npmjs.com/). Installation is done using the |
|||
[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): |
|||
|
|||
``` |
|||
$ npm install basic-auth |
|||
``` |
|||
|
|||
## API |
|||
|
|||
<!-- eslint-disable no-unused-vars --> |
|||
|
|||
```js |
|||
var auth = require('basic-auth') |
|||
``` |
|||
|
|||
### auth(req) |
|||
|
|||
Get the basic auth credentials from the given request. The `Authorization` |
|||
header is parsed and if the header is invalid, `undefined` is returned, |
|||
otherwise an object with `name` and `pass` properties. |
|||
|
|||
### auth.parse(string) |
|||
|
|||
Parse a basic auth authorization header string. This will return an object |
|||
with `name` and `pass` properties, or `undefined` if the string is invalid. |
|||
|
|||
## Example |
|||
|
|||
Pass a Node.js request object to the module export. If parsing fails |
|||
`undefined` is returned, otherwise an object with `.name` and `.pass`. |
|||
|
|||
<!-- eslint-disable no-unused-vars, no-undef --> |
|||
|
|||
```js |
|||
var auth = require('basic-auth') |
|||
var user = auth(req) |
|||
// => { name: 'something', pass: 'whatever' } |
|||
``` |
|||
|
|||
A header string from any other location can also be parsed with |
|||
`auth.parse`, for example a `Proxy-Authorization` header: |
|||
|
|||
<!-- eslint-disable no-unused-vars, no-undef --> |
|||
|
|||
```js |
|||
var auth = require('basic-auth') |
|||
var user = auth.parse(req.getHeader('Proxy-Authorization')) |
|||
``` |
|||
|
|||
### With vanilla node.js http server |
|||
|
|||
```js |
|||
var http = require('http') |
|||
var auth = require('basic-auth') |
|||
var compare = require('tsscmp') |
|||
|
|||
// Create server |
|||
var server = http.createServer(function (req, res) { |
|||
var credentials = auth(req) |
|||
|
|||
// Check credentials |
|||
// The "check" function will typically be against your user store |
|||
if (!credentials || !check(credentials.name, credentials.pass)) { |
|||
res.statusCode = 401 |
|||
res.setHeader('WWW-Authenticate', 'Basic realm="example"') |
|||
res.end('Access denied') |
|||
} else { |
|||
res.end('Access granted') |
|||
} |
|||
}) |
|||
|
|||
// Basic function to validate credentials for example |
|||
function check (name, pass) { |
|||
var valid = true |
|||
|
|||
// Simple method to prevent short-circut and use timing-safe compare |
|||
valid = compare(name, 'john') && valid |
|||
valid = compare(pass, 'secret') && valid |
|||
|
|||
return valid |
|||
} |
|||
|
|||
// Listen |
|||
server.listen(3000) |
|||
``` |
|||
|
|||
# License |
|||
|
|||
[MIT](LICENSE) |
|||
|
|||
[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/basic-auth/master |
|||
[coveralls-url]: https://coveralls.io/r/jshttp/basic-auth?branch=master |
|||
[downloads-image]: https://badgen.net/npm/dm/basic-auth |
|||
[downloads-url]: https://npmjs.org/package/basic-auth |
|||
[node-version-image]: https://badgen.net/npm/node/basic-auth |
|||
[node-version-url]: https://nodejs.org/en/download |
|||
[npm-image]: https://badgen.net/npm/v/basic-auth |
|||
[npm-url]: https://npmjs.org/package/basic-auth |
|||
[travis-image]: https://badgen.net/travis/jshttp/basic-auth/master |
|||
[travis-url]: https://travis-ci.org/jshttp/basic-auth |
|||
@ -0,0 +1,133 @@ |
|||
/*! |
|||
* basic-auth |
|||
* Copyright(c) 2013 TJ Holowaychuk |
|||
* Copyright(c) 2014 Jonathan Ong |
|||
* Copyright(c) 2015-2016 Douglas Christopher Wilson |
|||
* MIT Licensed |
|||
*/ |
|||
|
|||
'use strict' |
|||
|
|||
/** |
|||
* Module dependencies. |
|||
* @private |
|||
*/ |
|||
|
|||
var Buffer = require('safe-buffer').Buffer |
|||
|
|||
/** |
|||
* Module exports. |
|||
* @public |
|||
*/ |
|||
|
|||
module.exports = auth |
|||
module.exports.parse = parse |
|||
|
|||
/** |
|||
* RegExp for basic auth credentials |
|||
* |
|||
* credentials = auth-scheme 1*SP token68 |
|||
* auth-scheme = "Basic" ; case insensitive |
|||
* token68 = 1*( ALPHA / DIGIT / "-" / "." / "_" / "~" / "+" / "/" ) *"=" |
|||
* @private |
|||
*/ |
|||
|
|||
var CREDENTIALS_REGEXP = /^ *(?:[Bb][Aa][Ss][Ii][Cc]) +([A-Za-z0-9._~+/-]+=*) *$/ |
|||
|
|||
/** |
|||
* RegExp for basic auth user/pass |
|||
* |
|||
* user-pass = userid ":" password |
|||
* userid = *<TEXT excluding ":"> |
|||
* password = *TEXT |
|||
* @private |
|||
*/ |
|||
|
|||
var USER_PASS_REGEXP = /^([^:]*):(.*)$/ |
|||
|
|||
/** |
|||
* Parse the Authorization header field of a request. |
|||
* |
|||
* @param {object} req |
|||
* @return {object} with .name and .pass |
|||
* @public |
|||
*/ |
|||
|
|||
function auth (req) { |
|||
if (!req) { |
|||
throw new TypeError('argument req is required') |
|||
} |
|||
|
|||
if (typeof req !== 'object') { |
|||
throw new TypeError('argument req is required to be an object') |
|||
} |
|||
|
|||
// get header
|
|||
var header = getAuthorization(req) |
|||
|
|||
// parse header
|
|||
return parse(header) |
|||
} |
|||
|
|||
/** |
|||
* Decode base64 string. |
|||
* @private |
|||
*/ |
|||
|
|||
function decodeBase64 (str) { |
|||
return Buffer.from(str, 'base64').toString() |
|||
} |
|||
|
|||
/** |
|||
* Get the Authorization header from request object. |
|||
* @private |
|||
*/ |
|||
|
|||
function getAuthorization (req) { |
|||
if (!req.headers || typeof req.headers !== 'object') { |
|||
throw new TypeError('argument req is required to have headers property') |
|||
} |
|||
|
|||
return req.headers.authorization |
|||
} |
|||
|
|||
/** |
|||
* Parse basic auth to object. |
|||
* |
|||
* @param {string} string |
|||
* @return {object} |
|||
* @public |
|||
*/ |
|||
|
|||
function parse (string) { |
|||
if (typeof string !== 'string') { |
|||
return undefined |
|||
} |
|||
|
|||
// parse header
|
|||
var match = CREDENTIALS_REGEXP.exec(string) |
|||
|
|||
if (!match) { |
|||
return undefined |
|||
} |
|||
|
|||
// decode user pass
|
|||
var userPass = USER_PASS_REGEXP.exec(decodeBase64(match[1])) |
|||
|
|||
if (!userPass) { |
|||
return undefined |
|||
} |
|||
|
|||
// return credentials object
|
|||
return new Credentials(userPass[1], userPass[2]) |
|||
} |
|||
|
|||
/** |
|||
* Object to represent user credentials. |
|||
* @private |
|||
*/ |
|||
|
|||
function Credentials (name, pass) { |
|||
this.name = name |
|||
this.pass = pass |
|||
} |
|||
@ -0,0 +1,21 @@ |
|||
The MIT License (MIT) |
|||
|
|||
Copyright (c) Feross Aboukhadijeh |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in |
|||
all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |
|||
THE SOFTWARE. |
|||
@ -0,0 +1,584 @@ |
|||
# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] |
|||
|
|||
[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg |
|||
[travis-url]: https://travis-ci.org/feross/safe-buffer |
|||
[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg |
|||
[npm-url]: https://npmjs.org/package/safe-buffer |
|||
[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg |
|||
[downloads-url]: https://npmjs.org/package/safe-buffer |
|||
[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg |
|||
[standard-url]: https://standardjs.com |
|||
|
|||
#### Safer Node.js Buffer API |
|||
|
|||
**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, |
|||
`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** |
|||
|
|||
**Uses the built-in implementation when available.** |
|||
|
|||
## install |
|||
|
|||
``` |
|||
npm install safe-buffer |
|||
``` |
|||
|
|||
## usage |
|||
|
|||
The goal of this package is to provide a safe replacement for the node.js `Buffer`. |
|||
|
|||
It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to |
|||
the top of your node.js modules: |
|||
|
|||
```js |
|||
var Buffer = require('safe-buffer').Buffer |
|||
|
|||
// Existing buffer code will continue to work without issues: |
|||
|
|||
new Buffer('hey', 'utf8') |
|||
new Buffer([1, 2, 3], 'utf8') |
|||
new Buffer(obj) |
|||
new Buffer(16) // create an uninitialized buffer (potentially unsafe) |
|||
|
|||
// But you can use these new explicit APIs to make clear what you want: |
|||
|
|||
Buffer.from('hey', 'utf8') // convert from many types to a Buffer |
|||
Buffer.alloc(16) // create a zero-filled buffer (safe) |
|||
Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) |
|||
``` |
|||
|
|||
## api |
|||
|
|||
### Class Method: Buffer.from(array) |
|||
<!-- YAML |
|||
added: v3.0.0 |
|||
--> |
|||
|
|||
* `array` {Array} |
|||
|
|||
Allocates a new `Buffer` using an `array` of octets. |
|||
|
|||
```js |
|||
const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); |
|||
// creates a new Buffer containing ASCII bytes |
|||
// ['b','u','f','f','e','r'] |
|||
``` |
|||
|
|||
A `TypeError` will be thrown if `array` is not an `Array`. |
|||
|
|||
### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) |
|||
<!-- YAML |
|||
added: v5.10.0 |
|||
--> |
|||
|
|||
* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or |
|||
a `new ArrayBuffer()` |
|||
* `byteOffset` {Number} Default: `0` |
|||
* `length` {Number} Default: `arrayBuffer.length - byteOffset` |
|||
|
|||
When passed a reference to the `.buffer` property of a `TypedArray` instance, |
|||
the newly created `Buffer` will share the same allocated memory as the |
|||
TypedArray. |
|||
|
|||
```js |
|||
const arr = new Uint16Array(2); |
|||
arr[0] = 5000; |
|||
arr[1] = 4000; |
|||
|
|||
const buf = Buffer.from(arr.buffer); // shares the memory with arr; |
|||
|
|||
console.log(buf); |
|||
// Prints: <Buffer 88 13 a0 0f> |
|||
|
|||
// changing the TypedArray changes the Buffer also |
|||
arr[1] = 6000; |
|||
|
|||
console.log(buf); |
|||
// Prints: <Buffer 88 13 70 17> |
|||
``` |
|||
|
|||
The optional `byteOffset` and `length` arguments specify a memory range within |
|||
the `arrayBuffer` that will be shared by the `Buffer`. |
|||
|
|||
```js |
|||
const ab = new ArrayBuffer(10); |
|||
const buf = Buffer.from(ab, 0, 2); |
|||
console.log(buf.length); |
|||
// Prints: 2 |
|||
``` |
|||
|
|||
A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. |
|||
|
|||
### Class Method: Buffer.from(buffer) |
|||
<!-- YAML |
|||
added: v3.0.0 |
|||
--> |
|||
|
|||
* `buffer` {Buffer} |
|||
|
|||
Copies the passed `buffer` data onto a new `Buffer` instance. |
|||
|
|||
```js |
|||
const buf1 = Buffer.from('buffer'); |
|||
const buf2 = Buffer.from(buf1); |
|||
|
|||
buf1[0] = 0x61; |
|||
console.log(buf1.toString()); |
|||
// 'auffer' |
|||
console.log(buf2.toString()); |
|||
// 'buffer' (copy is not changed) |
|||
``` |
|||
|
|||
A `TypeError` will be thrown if `buffer` is not a `Buffer`. |
|||
|
|||
### Class Method: Buffer.from(str[, encoding]) |
|||
<!-- YAML |
|||
added: v5.10.0 |
|||
--> |
|||
|
|||
* `str` {String} String to encode. |
|||
* `encoding` {String} Encoding to use, Default: `'utf8'` |
|||
|
|||
Creates a new `Buffer` containing the given JavaScript string `str`. If |
|||
provided, the `encoding` parameter identifies the character encoding. |
|||
If not provided, `encoding` defaults to `'utf8'`. |
|||
|
|||
```js |
|||
const buf1 = Buffer.from('this is a tést'); |
|||
console.log(buf1.toString()); |
|||
// prints: this is a tést |
|||
console.log(buf1.toString('ascii')); |
|||
// prints: this is a tC)st |
|||
|
|||
const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); |
|||
console.log(buf2.toString()); |
|||
// prints: this is a tést |
|||
``` |
|||
|
|||
A `TypeError` will be thrown if `str` is not a string. |
|||
|
|||
### Class Method: Buffer.alloc(size[, fill[, encoding]]) |
|||
<!-- YAML |
|||
added: v5.10.0 |
|||
--> |
|||
|
|||
* `size` {Number} |
|||
* `fill` {Value} Default: `undefined` |
|||
* `encoding` {String} Default: `utf8` |
|||
|
|||
Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the |
|||
`Buffer` will be *zero-filled*. |
|||
|
|||
```js |
|||
const buf = Buffer.alloc(5); |
|||
console.log(buf); |
|||
// <Buffer 00 00 00 00 00> |
|||
``` |
|||
|
|||
The `size` must be less than or equal to the value of |
|||
`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is |
|||
`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will |
|||
be created if a `size` less than or equal to 0 is specified. |
|||
|
|||
If `fill` is specified, the allocated `Buffer` will be initialized by calling |
|||
`buf.fill(fill)`. See [`buf.fill()`][] for more information. |
|||
|
|||
```js |
|||
const buf = Buffer.alloc(5, 'a'); |
|||
console.log(buf); |
|||
// <Buffer 61 61 61 61 61> |
|||
``` |
|||
|
|||
If both `fill` and `encoding` are specified, the allocated `Buffer` will be |
|||
initialized by calling `buf.fill(fill, encoding)`. For example: |
|||
|
|||
```js |
|||
const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); |
|||
console.log(buf); |
|||
// <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64> |
|||
``` |
|||
|
|||
Calling `Buffer.alloc(size)` can be significantly slower than the alternative |
|||
`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance |
|||
contents will *never contain sensitive data*. |
|||
|
|||
A `TypeError` will be thrown if `size` is not a number. |
|||
|
|||
### Class Method: Buffer.allocUnsafe(size) |
|||
<!-- YAML |
|||
added: v5.10.0 |
|||
--> |
|||
|
|||
* `size` {Number} |
|||
|
|||
Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must |
|||
be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit |
|||
architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is |
|||
thrown. A zero-length Buffer will be created if a `size` less than or equal to |
|||
0 is specified. |
|||
|
|||
The underlying memory for `Buffer` instances created in this way is *not |
|||
initialized*. The contents of the newly created `Buffer` are unknown and |
|||
*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such |
|||
`Buffer` instances to zeroes. |
|||
|
|||
```js |
|||
const buf = Buffer.allocUnsafe(5); |
|||
console.log(buf); |
|||
// <Buffer 78 e0 82 02 01> |
|||
// (octets will be different, every time) |
|||
buf.fill(0); |
|||
console.log(buf); |
|||
// <Buffer 00 00 00 00 00> |
|||
``` |
|||
|
|||
A `TypeError` will be thrown if `size` is not a number. |
|||
|
|||
Note that the `Buffer` module pre-allocates an internal `Buffer` instance of |
|||
size `Buffer.poolSize` that is used as a pool for the fast allocation of new |
|||
`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated |
|||
`new Buffer(size)` constructor) only when `size` is less than or equal to |
|||
`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default |
|||
value of `Buffer.poolSize` is `8192` but can be modified. |
|||
|
|||
Use of this pre-allocated internal memory pool is a key difference between |
|||
calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. |
|||
Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer |
|||
pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal |
|||
Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The |
|||
difference is subtle but can be important when an application requires the |
|||
additional performance that `Buffer.allocUnsafe(size)` provides. |
|||
|
|||
### Class Method: Buffer.allocUnsafeSlow(size) |
|||
<!-- YAML |
|||
added: v5.10.0 |
|||
--> |
|||
|
|||
* `size` {Number} |
|||
|
|||
Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The |
|||
`size` must be less than or equal to the value of |
|||
`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is |
|||
`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will |
|||
be created if a `size` less than or equal to 0 is specified. |
|||
|
|||
The underlying memory for `Buffer` instances created in this way is *not |
|||
initialized*. The contents of the newly created `Buffer` are unknown and |
|||
*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such |
|||
`Buffer` instances to zeroes. |
|||
|
|||
When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, |
|||
allocations under 4KB are, by default, sliced from a single pre-allocated |
|||
`Buffer`. This allows applications to avoid the garbage collection overhead of |
|||
creating many individually allocated Buffers. This approach improves both |
|||
performance and memory usage by eliminating the need to track and cleanup as |
|||
many `Persistent` objects. |
|||
|
|||
However, in the case where a developer may need to retain a small chunk of |
|||
memory from a pool for an indeterminate amount of time, it may be appropriate |
|||
to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then |
|||
copy out the relevant bits. |
|||
|
|||
```js |
|||
// need to keep around a few small chunks of memory |
|||
const store = []; |
|||
|
|||
socket.on('readable', () => { |
|||
const data = socket.read(); |
|||
// allocate for retained data |
|||
const sb = Buffer.allocUnsafeSlow(10); |
|||
// copy the data into the new allocation |
|||
data.copy(sb, 0, 0, 10); |
|||
store.push(sb); |
|||
}); |
|||
``` |
|||
|
|||
Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* |
|||
a developer has observed undue memory retention in their applications. |
|||
|
|||
A `TypeError` will be thrown if `size` is not a number. |
|||
|
|||
### All the Rest |
|||
|
|||
The rest of the `Buffer` API is exactly the same as in node.js. |
|||
[See the docs](https://nodejs.org/api/buffer.html). |
|||
|
|||
|
|||
## Related links |
|||
|
|||
- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) |
|||
- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) |
|||
|
|||
## Why is `Buffer` unsafe? |
|||
|
|||
Today, the node.js `Buffer` constructor is overloaded to handle many different argument |
|||
types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), |
|||
`ArrayBuffer`, and also `Number`. |
|||
|
|||
The API is optimized for convenience: you can throw any type at it, and it will try to do |
|||
what you want. |
|||
|
|||
Because the Buffer constructor is so powerful, you often see code like this: |
|||
|
|||
```js |
|||
// Convert UTF-8 strings to hex |
|||
function toHex (str) { |
|||
return new Buffer(str).toString('hex') |
|||
} |
|||
``` |
|||
|
|||
***But what happens if `toHex` is called with a `Number` argument?*** |
|||
|
|||
### Remote Memory Disclosure |
|||
|
|||
If an attacker can make your program call the `Buffer` constructor with a `Number` |
|||
argument, then they can make it allocate uninitialized memory from the node.js process. |
|||
This could potentially disclose TLS private keys, user data, or database passwords. |
|||
|
|||
When the `Buffer` constructor is passed a `Number` argument, it returns an |
|||
**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like |
|||
this, you **MUST** overwrite the contents before returning it to the user. |
|||
|
|||
From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): |
|||
|
|||
> `new Buffer(size)` |
|||
> |
|||
> - `size` Number |
|||
> |
|||
> The underlying memory for `Buffer` instances created in this way is not initialized. |
|||
> **The contents of a newly created `Buffer` are unknown and could contain sensitive |
|||
> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. |
|||
|
|||
(Emphasis our own.) |
|||
|
|||
Whenever the programmer intended to create an uninitialized `Buffer` you often see code |
|||
like this: |
|||
|
|||
```js |
|||
var buf = new Buffer(16) |
|||
|
|||
// Immediately overwrite the uninitialized buffer with data from another buffer |
|||
for (var i = 0; i < buf.length; i++) { |
|||
buf[i] = otherBuf[i] |
|||
} |
|||
``` |
|||
|
|||
|
|||
### Would this ever be a problem in real code? |
|||
|
|||
Yes. It's surprisingly common to forget to check the type of your variables in a |
|||
dynamically-typed language like JavaScript. |
|||
|
|||
Usually the consequences of assuming the wrong type is that your program crashes with an |
|||
uncaught exception. But the failure mode for forgetting to check the type of arguments to |
|||
the `Buffer` constructor is more catastrophic. |
|||
|
|||
Here's an example of a vulnerable service that takes a JSON payload and converts it to |
|||
hex: |
|||
|
|||
```js |
|||
// Take a JSON payload {str: "some string"} and convert it to hex |
|||
var server = http.createServer(function (req, res) { |
|||
var data = '' |
|||
req.setEncoding('utf8') |
|||
req.on('data', function (chunk) { |
|||
data += chunk |
|||
}) |
|||
req.on('end', function () { |
|||
var body = JSON.parse(data) |
|||
res.end(new Buffer(body.str).toString('hex')) |
|||
}) |
|||
}) |
|||
|
|||
server.listen(8080) |
|||
``` |
|||
|
|||
In this example, an http client just has to send: |
|||
|
|||
```json |
|||
{ |
|||
"str": 1000 |
|||
} |
|||
``` |
|||
|
|||
and it will get back 1,000 bytes of uninitialized memory from the server. |
|||
|
|||
This is a very serious bug. It's similar in severity to the |
|||
[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process |
|||
memory by remote attackers. |
|||
|
|||
|
|||
### Which real-world packages were vulnerable? |
|||
|
|||
#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) |
|||
|
|||
[Mathias Buus](https://github.com/mafintosh) and I |
|||
([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, |
|||
[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow |
|||
anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get |
|||
them to reveal 20 bytes at a time of uninitialized memory from the node.js process. |
|||
|
|||
Here's |
|||
[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) |
|||
that fixed it. We released a new fixed version, created a |
|||
[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all |
|||
vulnerable versions on npm so users will get a warning to upgrade to a newer version. |
|||
|
|||
#### [`ws`](https://www.npmjs.com/package/ws) |
|||
|
|||
That got us wondering if there were other vulnerable packages. Sure enough, within a short |
|||
period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the |
|||
most popular WebSocket implementation in node.js. |
|||
|
|||
If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as |
|||
expected, then uninitialized server memory would be disclosed to the remote peer. |
|||
|
|||
These were the vulnerable methods: |
|||
|
|||
```js |
|||
socket.send(number) |
|||
socket.ping(number) |
|||
socket.pong(number) |
|||
``` |
|||
|
|||
Here's a vulnerable socket server with some echo functionality: |
|||
|
|||
```js |
|||
server.on('connection', function (socket) { |
|||
socket.on('message', function (message) { |
|||
message = JSON.parse(message) |
|||
if (message.type === 'echo') { |
|||
socket.send(message.data) // send back the user's message |
|||
} |
|||
}) |
|||
}) |
|||
``` |
|||
|
|||
`socket.send(number)` called on the server, will disclose server memory. |
|||
|
|||
Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue |
|||
was fixed, with a more detailed explanation. Props to |
|||
[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the |
|||
[Node Security Project disclosure](https://nodesecurity.io/advisories/67). |
|||
|
|||
|
|||
### What's the solution? |
|||
|
|||
It's important that node.js offers a fast way to get memory otherwise performance-critical |
|||
applications would needlessly get a lot slower. |
|||
|
|||
But we need a better way to *signal our intent* as programmers. **When we want |
|||
uninitialized memory, we should request it explicitly.** |
|||
|
|||
Sensitive functionality should not be packed into a developer-friendly API that loosely |
|||
accepts many different types. This type of API encourages the lazy practice of passing |
|||
variables in without checking the type very carefully. |
|||
|
|||
#### A new API: `Buffer.allocUnsafe(number)` |
|||
|
|||
The functionality of creating buffers with uninitialized memory should be part of another |
|||
API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that |
|||
frequently gets user input of all sorts of different types passed into it. |
|||
|
|||
```js |
|||
var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! |
|||
|
|||
// Immediately overwrite the uninitialized buffer with data from another buffer |
|||
for (var i = 0; i < buf.length; i++) { |
|||
buf[i] = otherBuf[i] |
|||
} |
|||
``` |
|||
|
|||
|
|||
### How do we fix node.js core? |
|||
|
|||
We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as |
|||
`semver-major`) which defends against one case: |
|||
|
|||
```js |
|||
var str = 16 |
|||
new Buffer(str, 'utf8') |
|||
``` |
|||
|
|||
In this situation, it's implied that the programmer intended the first argument to be a |
|||
string, since they passed an encoding as a second argument. Today, node.js will allocate |
|||
uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not |
|||
what the programmer intended. |
|||
|
|||
But this is only a partial solution, since if the programmer does `new Buffer(variable)` |
|||
(without an `encoding` parameter) there's no way to know what they intended. If `variable` |
|||
is sometimes a number, then uninitialized memory will sometimes be returned. |
|||
|
|||
### What's the real long-term fix? |
|||
|
|||
We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when |
|||
we need uninitialized memory. But that would break 1000s of packages. |
|||
|
|||
~~We believe the best solution is to:~~ |
|||
|
|||
~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ |
|||
|
|||
~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ |
|||
|
|||
#### Update |
|||
|
|||
We now support adding three new APIs: |
|||
|
|||
- `Buffer.from(value)` - convert from any type to a buffer |
|||
- `Buffer.alloc(size)` - create a zero-filled buffer |
|||
- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size |
|||
|
|||
This solves the core problem that affected `ws` and `bittorrent-dht` which is |
|||
`Buffer(variable)` getting tricked into taking a number argument. |
|||
|
|||
This way, existing code continues working and the impact on the npm ecosystem will be |
|||
minimal. Over time, npm maintainers can migrate performance-critical code to use |
|||
`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. |
|||
|
|||
|
|||
### Conclusion |
|||
|
|||
We think there's a serious design issue with the `Buffer` API as it exists today. It |
|||
promotes insecure software by putting high-risk functionality into a convenient API |
|||
with friendly "developer ergonomics". |
|||
|
|||
This wasn't merely a theoretical exercise because we found the issue in some of the |
|||
most popular npm packages. |
|||
|
|||
Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of |
|||
`buffer`. |
|||
|
|||
```js |
|||
var Buffer = require('safe-buffer').Buffer |
|||
``` |
|||
|
|||
Eventually, we hope that node.js core can switch to this new, safer behavior. We believe |
|||
the impact on the ecosystem would be minimal since it's not a breaking change. |
|||
Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while |
|||
older, insecure packages would magically become safe from this attack vector. |
|||
|
|||
|
|||
## links |
|||
|
|||
- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) |
|||
- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) |
|||
- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) |
|||
|
|||
|
|||
## credit |
|||
|
|||
The original issues in `bittorrent-dht` |
|||
([disclosure](https://nodesecurity.io/advisories/68)) and |
|||
`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by |
|||
[Mathias Buus](https://github.com/mafintosh) and |
|||
[Feross Aboukhadijeh](http://feross.org/). |
|||
|
|||
Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues |
|||
and for his work running the [Node Security Project](https://nodesecurity.io/). |
|||
|
|||
Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and |
|||
auditing the code. |
|||
|
|||
|
|||
## license |
|||
|
|||
MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) |
|||
@ -0,0 +1,187 @@ |
|||
declare module "safe-buffer" { |
|||
export class Buffer { |
|||
length: number |
|||
write(string: string, offset?: number, length?: number, encoding?: string): number; |
|||
toString(encoding?: string, start?: number, end?: number): string; |
|||
toJSON(): { type: 'Buffer', data: any[] }; |
|||
equals(otherBuffer: Buffer): boolean; |
|||
compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; |
|||
copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; |
|||
slice(start?: number, end?: number): Buffer; |
|||
writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; |
|||
writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; |
|||
writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; |
|||
writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; |
|||
readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; |
|||
readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; |
|||
readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; |
|||
readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; |
|||
readUInt8(offset: number, noAssert?: boolean): number; |
|||
readUInt16LE(offset: number, noAssert?: boolean): number; |
|||
readUInt16BE(offset: number, noAssert?: boolean): number; |
|||
readUInt32LE(offset: number, noAssert?: boolean): number; |
|||
readUInt32BE(offset: number, noAssert?: boolean): number; |
|||
readInt8(offset: number, noAssert?: boolean): number; |
|||
readInt16LE(offset: number, noAssert?: boolean): number; |
|||
readInt16BE(offset: number, noAssert?: boolean): number; |
|||
readInt32LE(offset: number, noAssert?: boolean): number; |
|||
readInt32BE(offset: number, noAssert?: boolean): number; |
|||
readFloatLE(offset: number, noAssert?: boolean): number; |
|||
readFloatBE(offset: number, noAssert?: boolean): number; |
|||
readDoubleLE(offset: number, noAssert?: boolean): number; |
|||
readDoubleBE(offset: number, noAssert?: boolean): number; |
|||
swap16(): Buffer; |
|||
swap32(): Buffer; |
|||
swap64(): Buffer; |
|||
writeUInt8(value: number, offset: number, noAssert?: boolean): number; |
|||
writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; |
|||
writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; |
|||
writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; |
|||
writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; |
|||
writeInt8(value: number, offset: number, noAssert?: boolean): number; |
|||
writeInt16LE(value: number, offset: number, noAssert?: boolean): number; |
|||
writeInt16BE(value: number, offset: number, noAssert?: boolean): number; |
|||
writeInt32LE(value: number, offset: number, noAssert?: boolean): number; |
|||
writeInt32BE(value: number, offset: number, noAssert?: boolean): number; |
|||
writeFloatLE(value: number, offset: number, noAssert?: boolean): number; |
|||
writeFloatBE(value: number, offset: number, noAssert?: boolean): number; |
|||
writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; |
|||
writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; |
|||
fill(value: any, offset?: number, end?: number): this; |
|||
indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; |
|||
lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; |
|||
includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; |
|||
|
|||
/** |
|||
* Allocates a new buffer containing the given {str}. |
|||
* |
|||
* @param str String to store in buffer. |
|||
* @param encoding encoding to use, optional. Default is 'utf8' |
|||
*/ |
|||
constructor (str: string, encoding?: string); |
|||
/** |
|||
* Allocates a new buffer of {size} octets. |
|||
* |
|||
* @param size count of octets to allocate. |
|||
*/ |
|||
constructor (size: number); |
|||
/** |
|||
* Allocates a new buffer containing the given {array} of octets. |
|||
* |
|||
* @param array The octets to store. |
|||
*/ |
|||
constructor (array: Uint8Array); |
|||
/** |
|||
* Produces a Buffer backed by the same allocated memory as |
|||
* the given {ArrayBuffer}. |
|||
* |
|||
* |
|||
* @param arrayBuffer The ArrayBuffer with which to share memory. |
|||
*/ |
|||
constructor (arrayBuffer: ArrayBuffer); |
|||
/** |
|||
* Allocates a new buffer containing the given {array} of octets. |
|||
* |
|||
* @param array The octets to store. |
|||
*/ |
|||
constructor (array: any[]); |
|||
/** |
|||
* Copies the passed {buffer} data onto a new {Buffer} instance. |
|||
* |
|||
* @param buffer The buffer to copy. |
|||
*/ |
|||
constructor (buffer: Buffer); |
|||
prototype: Buffer; |
|||
/** |
|||
* Allocates a new Buffer using an {array} of octets. |
|||
* |
|||
* @param array |
|||
*/ |
|||
static from(array: any[]): Buffer; |
|||
/** |
|||
* When passed a reference to the .buffer property of a TypedArray instance, |
|||
* the newly created Buffer will share the same allocated memory as the TypedArray. |
|||
* The optional {byteOffset} and {length} arguments specify a memory range |
|||
* within the {arrayBuffer} that will be shared by the Buffer. |
|||
* |
|||
* @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() |
|||
* @param byteOffset |
|||
* @param length |
|||
*/ |
|||
static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; |
|||
/** |
|||
* Copies the passed {buffer} data onto a new Buffer instance. |
|||
* |
|||
* @param buffer |
|||
*/ |
|||
static from(buffer: Buffer): Buffer; |
|||
/** |
|||
* Creates a new Buffer containing the given JavaScript string {str}. |
|||
* If provided, the {encoding} parameter identifies the character encoding. |
|||
* If not provided, {encoding} defaults to 'utf8'. |
|||
* |
|||
* @param str |
|||
*/ |
|||
static from(str: string, encoding?: string): Buffer; |
|||
/** |
|||
* Returns true if {obj} is a Buffer |
|||
* |
|||
* @param obj object to test. |
|||
*/ |
|||
static isBuffer(obj: any): obj is Buffer; |
|||
/** |
|||
* Returns true if {encoding} is a valid encoding argument. |
|||
* Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' |
|||
* |
|||
* @param encoding string to test. |
|||
*/ |
|||
static isEncoding(encoding: string): boolean; |
|||
/** |
|||
* Gives the actual byte length of a string. encoding defaults to 'utf8'. |
|||
* This is not the same as String.prototype.length since that returns the number of characters in a string. |
|||
* |
|||
* @param string string to test. |
|||
* @param encoding encoding used to evaluate (defaults to 'utf8') |
|||
*/ |
|||
static byteLength(string: string, encoding?: string): number; |
|||
/** |
|||
* Returns a buffer which is the result of concatenating all the buffers in the list together. |
|||
* |
|||
* If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. |
|||
* If the list has exactly one item, then the first item of the list is returned. |
|||
* If the list has more than one item, then a new Buffer is created. |
|||
* |
|||
* @param list An array of Buffer objects to concatenate |
|||
* @param totalLength Total length of the buffers when concatenated. |
|||
* If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. |
|||
*/ |
|||
static concat(list: Buffer[], totalLength?: number): Buffer; |
|||
/** |
|||
* The same as buf1.compare(buf2). |
|||
*/ |
|||
static compare(buf1: Buffer, buf2: Buffer): number; |
|||
/** |
|||
* Allocates a new buffer of {size} octets. |
|||
* |
|||
* @param size count of octets to allocate. |
|||
* @param fill if specified, buffer will be initialized by calling buf.fill(fill). |
|||
* If parameter is omitted, buffer will be filled with zeros. |
|||
* @param encoding encoding used for call to buf.fill while initalizing |
|||
*/ |
|||
static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; |
|||
/** |
|||
* Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents |
|||
* of the newly created Buffer are unknown and may contain sensitive data. |
|||
* |
|||
* @param size count of octets to allocate |
|||
*/ |
|||
static allocUnsafe(size: number): Buffer; |
|||
/** |
|||
* Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents |
|||
* of the newly created Buffer are unknown and may contain sensitive data. |
|||
* |
|||
* @param size count of octets to allocate |
|||
*/ |
|||
static allocUnsafeSlow(size: number): Buffer; |
|||
} |
|||
} |
|||
@ -0,0 +1,62 @@ |
|||
/* eslint-disable node/no-deprecated-api */ |
|||
var buffer = require('buffer') |
|||
var Buffer = buffer.Buffer |
|||
|
|||
// alternative to using Object.keys for old browsers
|
|||
function copyProps (src, dst) { |
|||
for (var key in src) { |
|||
dst[key] = src[key] |
|||
} |
|||
} |
|||
if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { |
|||
module.exports = buffer |
|||
} else { |
|||
// Copy properties from require('buffer')
|
|||
copyProps(buffer, exports) |
|||
exports.Buffer = SafeBuffer |
|||
} |
|||
|
|||
function SafeBuffer (arg, encodingOrOffset, length) { |
|||
return Buffer(arg, encodingOrOffset, length) |
|||
} |
|||
|
|||
// Copy static methods from Buffer
|
|||
copyProps(Buffer, SafeBuffer) |
|||
|
|||
SafeBuffer.from = function (arg, encodingOrOffset, length) { |
|||
if (typeof arg === 'number') { |
|||
throw new TypeError('Argument must not be a number') |
|||
} |
|||
return Buffer(arg, encodingOrOffset, length) |
|||
} |
|||
|
|||
SafeBuffer.alloc = function (size, fill, encoding) { |
|||
if (typeof size !== 'number') { |
|||
throw new TypeError('Argument must be a number') |
|||
} |
|||
var buf = Buffer(size) |
|||
if (fill !== undefined) { |
|||
if (typeof encoding === 'string') { |
|||
buf.fill(fill, encoding) |
|||
} else { |
|||
buf.fill(fill) |
|||
} |
|||
} else { |
|||
buf.fill(0) |
|||
} |
|||
return buf |
|||
} |
|||
|
|||
SafeBuffer.allocUnsafe = function (size) { |
|||
if (typeof size !== 'number') { |
|||
throw new TypeError('Argument must be a number') |
|||
} |
|||
return Buffer(size) |
|||
} |
|||
|
|||
SafeBuffer.allocUnsafeSlow = function (size) { |
|||
if (typeof size !== 'number') { |
|||
throw new TypeError('Argument must be a number') |
|||
} |
|||
return buffer.SlowBuffer(size) |
|||
} |
|||
@ -0,0 +1,37 @@ |
|||
{ |
|||
"name": "safe-buffer", |
|||
"description": "Safer Node.js Buffer API", |
|||
"version": "5.1.2", |
|||
"author": { |
|||
"name": "Feross Aboukhadijeh", |
|||
"email": "feross@feross.org", |
|||
"url": "http://feross.org" |
|||
}, |
|||
"bugs": { |
|||
"url": "https://github.com/feross/safe-buffer/issues" |
|||
}, |
|||
"devDependencies": { |
|||
"standard": "*", |
|||
"tape": "^4.0.0" |
|||
}, |
|||
"homepage": "https://github.com/feross/safe-buffer", |
|||
"keywords": [ |
|||
"buffer", |
|||
"buffer allocate", |
|||
"node security", |
|||
"safe", |
|||
"safe-buffer", |
|||
"security", |
|||
"uninitialized" |
|||
], |
|||
"license": "MIT", |
|||
"main": "index.js", |
|||
"types": "index.d.ts", |
|||
"repository": { |
|||
"type": "git", |
|||
"url": "git://github.com/feross/safe-buffer.git" |
|||
}, |
|||
"scripts": { |
|||
"test": "standard && tape test/*.js" |
|||
} |
|||
} |
|||
@ -0,0 +1,41 @@ |
|||
{ |
|||
"name": "basic-auth", |
|||
"description": "node.js basic auth parser", |
|||
"version": "2.0.1", |
|||
"license": "MIT", |
|||
"keywords": [ |
|||
"basic", |
|||
"auth", |
|||
"authorization", |
|||
"basicauth" |
|||
], |
|||
"repository": "jshttp/basic-auth", |
|||
"dependencies": { |
|||
"safe-buffer": "5.1.2" |
|||
}, |
|||
"devDependencies": { |
|||
"eslint": "5.6.0", |
|||
"eslint-config-standard": "12.0.0", |
|||
"eslint-plugin-import": "2.14.0", |
|||
"eslint-plugin-markdown": "1.0.0-beta.6", |
|||
"eslint-plugin-node": "7.0.1", |
|||
"eslint-plugin-promise": "4.0.1", |
|||
"eslint-plugin-standard": "4.0.0", |
|||
"istanbul": "0.4.5", |
|||
"mocha": "5.2.0" |
|||
}, |
|||
"files": [ |
|||
"HISTORY.md", |
|||
"LICENSE", |
|||
"index.js" |
|||
], |
|||
"engines": { |
|||
"node": ">= 0.8" |
|||
}, |
|||
"scripts": { |
|||
"lint": "eslint --plugin markdown --ext js,md .", |
|||
"test": "mocha --check-leaks --reporter spec --bail", |
|||
"test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", |
|||
"test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" |
|||
} |
|||
} |
|||
@ -0,0 +1,263 @@ |
|||
[ |
|||
"3dm", |
|||
"3ds", |
|||
"3g2", |
|||
"3gp", |
|||
"7z", |
|||
"a", |
|||
"aac", |
|||
"adp", |
|||
"afdesign", |
|||
"afphoto", |
|||
"afpub", |
|||
"ai", |
|||
"aif", |
|||
"aiff", |
|||
"alz", |
|||
"ape", |
|||
"apk", |
|||
"appimage", |
|||
"ar", |
|||
"arj", |
|||
"asf", |
|||
"au", |
|||
"avi", |
|||
"bak", |
|||
"baml", |
|||
"bh", |
|||
"bin", |
|||
"bk", |
|||
"bmp", |
|||
"btif", |
|||
"bz2", |
|||
"bzip2", |
|||
"cab", |
|||
"caf", |
|||
"cgm", |
|||
"class", |
|||
"cmx", |
|||
"cpio", |
|||
"cr2", |
|||
"cur", |
|||
"dat", |
|||
"dcm", |
|||
"deb", |
|||
"dex", |
|||
"djvu", |
|||
"dll", |
|||
"dmg", |
|||
"dng", |
|||
"doc", |
|||
"docm", |
|||
"docx", |
|||
"dot", |
|||
"dotm", |
|||
"dra", |
|||
"DS_Store", |
|||
"dsk", |
|||
"dts", |
|||
"dtshd", |
|||
"dvb", |
|||
"dwg", |
|||
"dxf", |
|||
"ecelp4800", |
|||
"ecelp7470", |
|||
"ecelp9600", |
|||
"egg", |
|||
"eol", |
|||
"eot", |
|||
"epub", |
|||
"exe", |
|||
"f4v", |
|||
"fbs", |
|||
"fh", |
|||
"fla", |
|||
"flac", |
|||
"flatpak", |
|||
"fli", |
|||
"flv", |
|||
"fpx", |
|||
"fst", |
|||
"fvt", |
|||
"g3", |
|||
"gh", |
|||
"gif", |
|||
"graffle", |
|||
"gz", |
|||
"gzip", |
|||
"h261", |
|||
"h263", |
|||
"h264", |
|||
"icns", |
|||
"ico", |
|||
"ief", |
|||
"img", |
|||
"ipa", |
|||
"iso", |
|||
"jar", |
|||
"jpeg", |
|||
"jpg", |
|||
"jpgv", |
|||
"jpm", |
|||
"jxr", |
|||
"key", |
|||
"ktx", |
|||
"lha", |
|||
"lib", |
|||
"lvp", |
|||
"lz", |
|||
"lzh", |
|||
"lzma", |
|||
"lzo", |
|||
"m3u", |
|||
"m4a", |
|||
"m4v", |
|||
"mar", |
|||
"mdi", |
|||
"mht", |
|||
"mid", |
|||
"midi", |
|||
"mj2", |
|||
"mka", |
|||
"mkv", |
|||
"mmr", |
|||
"mng", |
|||
"mobi", |
|||
"mov", |
|||
"movie", |
|||
"mp3", |
|||
"mp4", |
|||
"mp4a", |
|||
"mpeg", |
|||
"mpg", |
|||
"mpga", |
|||
"mxu", |
|||
"nef", |
|||
"npx", |
|||
"numbers", |
|||
"nupkg", |
|||
"o", |
|||
"odp", |
|||
"ods", |
|||
"odt", |
|||
"oga", |
|||
"ogg", |
|||
"ogv", |
|||
"otf", |
|||
"ott", |
|||
"pages", |
|||
"pbm", |
|||
"pcx", |
|||
"pdb", |
|||
"pdf", |
|||
"pea", |
|||
"pgm", |
|||
"pic", |
|||
"png", |
|||
"pnm", |
|||
"pot", |
|||
"potm", |
|||
"potx", |
|||
"ppa", |
|||
"ppam", |
|||
"ppm", |
|||
"pps", |
|||
"ppsm", |
|||
"ppsx", |
|||
"ppt", |
|||
"pptm", |
|||
"pptx", |
|||
"psd", |
|||
"pya", |
|||
"pyc", |
|||
"pyo", |
|||
"pyv", |
|||
"qt", |
|||
"rar", |
|||
"ras", |
|||
"raw", |
|||
"resources", |
|||
"rgb", |
|||
"rip", |
|||
"rlc", |
|||
"rmf", |
|||
"rmvb", |
|||
"rpm", |
|||
"rtf", |
|||
"rz", |
|||
"s3m", |
|||
"s7z", |
|||
"scpt", |
|||
"sgi", |
|||
"shar", |
|||
"snap", |
|||
"sil", |
|||
"sketch", |
|||
"slk", |
|||
"smv", |
|||
"snk", |
|||
"so", |
|||
"stl", |
|||
"suo", |
|||
"sub", |
|||
"swf", |
|||
"tar", |
|||
"tbz", |
|||
"tbz2", |
|||
"tga", |
|||
"tgz", |
|||
"thmx", |
|||
"tif", |
|||
"tiff", |
|||
"tlz", |
|||
"ttc", |
|||
"ttf", |
|||
"txz", |
|||
"udf", |
|||
"uvh", |
|||
"uvi", |
|||
"uvm", |
|||
"uvp", |
|||
"uvs", |
|||
"uvu", |
|||
"viv", |
|||
"vob", |
|||
"war", |
|||
"wav", |
|||
"wax", |
|||
"wbmp", |
|||
"wdp", |
|||
"weba", |
|||
"webm", |
|||
"webp", |
|||
"whl", |
|||
"wim", |
|||
"wm", |
|||
"wma", |
|||
"wmv", |
|||
"wmx", |
|||
"woff", |
|||
"woff2", |
|||
"wrm", |
|||
"wvx", |
|||
"xbm", |
|||
"xif", |
|||
"xla", |
|||
"xlam", |
|||
"xls", |
|||
"xlsb", |
|||
"xlsm", |
|||
"xlsx", |
|||
"xlt", |
|||
"xltm", |
|||
"xltx", |
|||
"xm", |
|||
"xmind", |
|||
"xpi", |
|||
"xpm", |
|||
"xwd", |
|||
"xz", |
|||
"z", |
|||
"zip", |
|||
"zipx" |
|||
] |
|||
@ -0,0 +1,3 @@ |
|||
declare const binaryExtensionsJson: readonly string[]; |
|||
|
|||
export = binaryExtensionsJson; |
|||
@ -0,0 +1,14 @@ |
|||
/** |
|||
List of binary file extensions. |
|||
|
|||
@example |
|||
``` |
|||
import binaryExtensions = require('binary-extensions'); |
|||
|
|||
console.log(binaryExtensions); |
|||
//=> ['3ds', '3g2', …]
|
|||
``` |
|||
*/ |
|||
declare const binaryExtensions: readonly string[]; |
|||
|
|||
export = binaryExtensions; |
|||
@ -0,0 +1 @@ |
|||
module.exports = require('./binary-extensions.json'); |
|||
@ -0,0 +1,10 @@ |
|||
MIT License |
|||
|
|||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com) |
|||
Copyright (c) Paul Miller (https://paulmillr.com) |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
|||
@ -0,0 +1,40 @@ |
|||
{ |
|||
"name": "binary-extensions", |
|||
"version": "2.3.0", |
|||
"description": "List of binary file extensions", |
|||
"license": "MIT", |
|||
"repository": "sindresorhus/binary-extensions", |
|||
"funding": "https://github.com/sponsors/sindresorhus", |
|||
"author": { |
|||
"name": "Sindre Sorhus", |
|||
"email": "sindresorhus@gmail.com", |
|||
"url": "https://sindresorhus.com" |
|||
}, |
|||
"sideEffects": false, |
|||
"engines": { |
|||
"node": ">=8" |
|||
}, |
|||
"scripts": { |
|||
"test": "xo && ava && tsd" |
|||
}, |
|||
"files": [ |
|||
"index.js", |
|||
"index.d.ts", |
|||
"binary-extensions.json", |
|||
"binary-extensions.json.d.ts" |
|||
], |
|||
"keywords": [ |
|||
"binary", |
|||
"extensions", |
|||
"extension", |
|||
"file", |
|||
"json", |
|||
"list", |
|||
"array" |
|||
], |
|||
"devDependencies": { |
|||
"ava": "^1.4.1", |
|||
"tsd": "^0.7.2", |
|||
"xo": "^0.24.0" |
|||
} |
|||
} |
|||
@ -0,0 +1,25 @@ |
|||
# binary-extensions |
|||
|
|||
> List of binary file extensions |
|||
|
|||
The list is just a [JSON file](binary-extensions.json) and can be used anywhere. |
|||
|
|||
## Install |
|||
|
|||
```sh |
|||
npm install binary-extensions |
|||
``` |
|||
|
|||
## Usage |
|||
|
|||
```js |
|||
const binaryExtensions = require('binary-extensions'); |
|||
|
|||
console.log(binaryExtensions); |
|||
//=> ['3ds', '3g2', …] |
|||
``` |
|||
|
|||
## Related |
|||
|
|||
- [is-binary-path](https://github.com/sindresorhus/is-binary-path) - Check if a filepath is a binary file |
|||
- [text-extensions](https://github.com/sindresorhus/text-extensions) - List of text file extensions |
|||
@ -0,0 +1,731 @@ |
|||
2.2.0 / 2025-03-27 |
|||
========================= |
|||
|
|||
* refactor: normalize common options for all parsers |
|||
* deps: |
|||
* iconv-lite@^0.6.3 |
|||
|
|||
2.1.0 / 2025-02-10 |
|||
========================= |
|||
|
|||
* deps: |
|||
* type-is@^2.0.0 |
|||
* debug@^4.4.0 |
|||
* Removed destroy |
|||
* refactor: prefix built-in node module imports |
|||
* use the node require cache instead of custom caching |
|||
|
|||
2.0.2 / 2024-10-31 |
|||
========================= |
|||
|
|||
* remove `unpipe` package and use native `unpipe()` method |
|||
|
|||
2.0.1 / 2024-09-10 |
|||
========================= |
|||
|
|||
* Restore expected behavior `extended` to `false` |
|||
|
|||
2.0.0 / 2024-09-10 |
|||
========================= |
|||
* Propagate changes from 1.20.3 |
|||
* add brotli support #406 |
|||
* Breaking Change: Node.js 18 is the minimum supported version |
|||
|
|||
2.0.0-beta.2 / 2023-02-23 |
|||
========================= |
|||
|
|||
This incorporates all changes after 1.19.1 up to 1.20.2. |
|||
|
|||
* Remove deprecated `bodyParser()` combination middleware |
|||
* deps: debug@3.1.0 |
|||
- Add `DEBUG_HIDE_DATE` environment variable |
|||
- Change timer to per-namespace instead of global |
|||
- Change non-TTY date format |
|||
- Remove `DEBUG_FD` environment variable support |
|||
- Support 256 namespace colors |
|||
* deps: iconv-lite@0.5.2 |
|||
- Add encoding cp720 |
|||
- Add encoding UTF-32 |
|||
* deps: raw-body@3.0.0-beta.1 |
|||
|
|||
2.0.0-beta.1 / 2021-12-17 |
|||
========================= |
|||
|
|||
* Drop support for Node.js 0.8 |
|||
* `req.body` is no longer always initialized to `{}` |
|||
- it is left `undefined` unless a body is parsed |
|||
* `urlencoded` parser now defaults `extended` to `false` |
|||
* Use `on-finished` to determine when body read |
|||
|
|||
1.20.3 / 2024-09-10 |
|||
=================== |
|||
|
|||
* deps: qs@6.13.0 |
|||
* add `depth` option to customize the depth level in the parser |
|||
* IMPORTANT: The default `depth` level for parsing URL-encoded data is now `32` (previously was `Infinity`) |
|||
|
|||
1.20.2 / 2023-02-21 |
|||
=================== |
|||
|
|||
* Fix strict json error message on Node.js 19+ |
|||
* deps: content-type@~1.0.5 |
|||
- perf: skip value escaping when unnecessary |
|||
* deps: raw-body@2.5.2 |
|||
|
|||
1.20.1 / 2022-10-06 |
|||
=================== |
|||
|
|||
* deps: qs@6.11.0 |
|||
* perf: remove unnecessary object clone |
|||
|
|||
1.20.0 / 2022-04-02 |
|||
=================== |
|||
|
|||
* Fix error message for json parse whitespace in `strict` |
|||
* Fix internal error when inflated body exceeds limit |
|||
* Prevent loss of async hooks context |
|||
* Prevent hanging when request already read |
|||
* deps: depd@2.0.0 |
|||
- Replace internal `eval` usage with `Function` constructor |
|||
- Use instance methods on `process` to check for listeners |
|||
* deps: http-errors@2.0.0 |
|||
- deps: depd@2.0.0 |
|||
- deps: statuses@2.0.1 |
|||
* deps: on-finished@2.4.1 |
|||
* deps: qs@6.10.3 |
|||
* deps: raw-body@2.5.1 |
|||
- deps: http-errors@2.0.0 |
|||
|
|||
1.19.2 / 2022-02-15 |
|||
=================== |
|||
|
|||
* deps: bytes@3.1.2 |
|||
* deps: qs@6.9.7 |
|||
* Fix handling of `__proto__` keys |
|||
* deps: raw-body@2.4.3 |
|||
- deps: bytes@3.1.2 |
|||
|
|||
1.19.1 / 2021-12-10 |
|||
=================== |
|||
|
|||
* deps: bytes@3.1.1 |
|||
* deps: http-errors@1.8.1 |
|||
- deps: inherits@2.0.4 |
|||
- deps: toidentifier@1.0.1 |
|||
- deps: setprototypeof@1.2.0 |
|||
* deps: qs@6.9.6 |
|||
* deps: raw-body@2.4.2 |
|||
- deps: bytes@3.1.1 |
|||
- deps: http-errors@1.8.1 |
|||
* deps: safe-buffer@5.2.1 |
|||
* deps: type-is@~1.6.18 |
|||
|
|||
1.19.0 / 2019-04-25 |
|||
=================== |
|||
|
|||
* deps: bytes@3.1.0 |
|||
- Add petabyte (`pb`) support |
|||
* deps: http-errors@1.7.2 |
|||
- Set constructor name when possible |
|||
- deps: setprototypeof@1.1.1 |
|||
- deps: statuses@'>= 1.5.0 < 2' |
|||
* deps: iconv-lite@0.4.24 |
|||
- Added encoding MIK |
|||
* deps: qs@6.7.0 |
|||
- Fix parsing array brackets after index |
|||
* deps: raw-body@2.4.0 |
|||
- deps: bytes@3.1.0 |
|||
- deps: http-errors@1.7.2 |
|||
- deps: iconv-lite@0.4.24 |
|||
* deps: type-is@~1.6.17 |
|||
- deps: mime-types@~2.1.24 |
|||
- perf: prevent internal `throw` on invalid type |
|||
|
|||
1.18.3 / 2018-05-14 |
|||
=================== |
|||
|
|||
* Fix stack trace for strict json parse error |
|||
* deps: depd@~1.1.2 |
|||
- perf: remove argument reassignment |
|||
* deps: http-errors@~1.6.3 |
|||
- deps: depd@~1.1.2 |
|||
- deps: setprototypeof@1.1.0 |
|||
- deps: statuses@'>= 1.3.1 < 2' |
|||
* deps: iconv-lite@0.4.23 |
|||
- Fix loading encoding with year appended |
|||
- Fix deprecation warnings on Node.js 10+ |
|||
* deps: qs@6.5.2 |
|||
* deps: raw-body@2.3.3 |
|||
- deps: http-errors@1.6.3 |
|||
- deps: iconv-lite@0.4.23 |
|||
* deps: type-is@~1.6.16 |
|||
- deps: mime-types@~2.1.18 |
|||
|
|||
1.18.2 / 2017-09-22 |
|||
=================== |
|||
|
|||
* deps: debug@2.6.9 |
|||
* perf: remove argument reassignment |
|||
|
|||
1.18.1 / 2017-09-12 |
|||
=================== |
|||
|
|||
* deps: content-type@~1.0.4 |
|||
- perf: remove argument reassignment |
|||
- perf: skip parameter parsing when no parameters |
|||
* deps: iconv-lite@0.4.19 |
|||
- Fix ISO-8859-1 regression |
|||
- Update Windows-1255 |
|||
* deps: qs@6.5.1 |
|||
- Fix parsing & compacting very deep objects |
|||
* deps: raw-body@2.3.2 |
|||
- deps: iconv-lite@0.4.19 |
|||
|
|||
1.18.0 / 2017-09-08 |
|||
=================== |
|||
|
|||
* Fix JSON strict violation error to match native parse error |
|||
* Include the `body` property on verify errors |
|||
* Include the `type` property on all generated errors |
|||
* Use `http-errors` to set status code on errors |
|||
* deps: bytes@3.0.0 |
|||
* deps: debug@2.6.8 |
|||
* deps: depd@~1.1.1 |
|||
- Remove unnecessary `Buffer` loading |
|||
* deps: http-errors@~1.6.2 |
|||
- deps: depd@1.1.1 |
|||
* deps: iconv-lite@0.4.18 |
|||
- Add support for React Native |
|||
- Add a warning if not loaded as utf-8 |
|||
- Fix CESU-8 decoding in Node.js 8 |
|||
- Improve speed of ISO-8859-1 encoding |
|||
* deps: qs@6.5.0 |
|||
* deps: raw-body@2.3.1 |
|||
- Use `http-errors` for standard emitted errors |
|||
- deps: bytes@3.0.0 |
|||
- deps: iconv-lite@0.4.18 |
|||
- perf: skip buffer decoding on overage chunk |
|||
* perf: prevent internal `throw` when missing charset |
|||
|
|||
1.17.2 / 2017-05-17 |
|||
=================== |
|||
|
|||
* deps: debug@2.6.7 |
|||
- Fix `DEBUG_MAX_ARRAY_LENGTH` |
|||
- deps: ms@2.0.0 |
|||
* deps: type-is@~1.6.15 |
|||
- deps: mime-types@~2.1.15 |
|||
|
|||
1.17.1 / 2017-03-06 |
|||
=================== |
|||
|
|||
* deps: qs@6.4.0 |
|||
- Fix regression parsing keys starting with `[` |
|||
|
|||
1.17.0 / 2017-03-01 |
|||
=================== |
|||
|
|||
* deps: http-errors@~1.6.1 |
|||
- Make `message` property enumerable for `HttpError`s |
|||
- deps: setprototypeof@1.0.3 |
|||
* deps: qs@6.3.1 |
|||
- Fix compacting nested arrays |
|||
|
|||
1.16.1 / 2017-02-10 |
|||
=================== |
|||
|
|||
* deps: debug@2.6.1 |
|||
- Fix deprecation messages in WebStorm and other editors |
|||
- Undeprecate `DEBUG_FD` set to `1` or `2` |
|||
|
|||
1.16.0 / 2017-01-17 |
|||
=================== |
|||
|
|||
* deps: debug@2.6.0 |
|||
- Allow colors in workers |
|||
- Deprecated `DEBUG_FD` environment variable |
|||
- Fix error when running under React Native |
|||
- Use same color for same namespace |
|||
- deps: ms@0.7.2 |
|||
* deps: http-errors@~1.5.1 |
|||
- deps: inherits@2.0.3 |
|||
- deps: setprototypeof@1.0.2 |
|||
- deps: statuses@'>= 1.3.1 < 2' |
|||
* deps: iconv-lite@0.4.15 |
|||
- Added encoding MS-31J |
|||
- Added encoding MS-932 |
|||
- Added encoding MS-936 |
|||
- Added encoding MS-949 |
|||
- Added encoding MS-950 |
|||
- Fix GBK/GB18030 handling of Euro character |
|||
* deps: qs@6.2.1 |
|||
- Fix array parsing from skipping empty values |
|||
* deps: raw-body@~2.2.0 |
|||
- deps: iconv-lite@0.4.15 |
|||
* deps: type-is@~1.6.14 |
|||
- deps: mime-types@~2.1.13 |
|||
|
|||
1.15.2 / 2016-06-19 |
|||
=================== |
|||
|
|||
* deps: bytes@2.4.0 |
|||
* deps: content-type@~1.0.2 |
|||
- perf: enable strict mode |
|||
* deps: http-errors@~1.5.0 |
|||
- Use `setprototypeof` module to replace `__proto__` setting |
|||
- deps: statuses@'>= 1.3.0 < 2' |
|||
- perf: enable strict mode |
|||
* deps: qs@6.2.0 |
|||
* deps: raw-body@~2.1.7 |
|||
- deps: bytes@2.4.0 |
|||
- perf: remove double-cleanup on happy path |
|||
* deps: type-is@~1.6.13 |
|||
- deps: mime-types@~2.1.11 |
|||
|
|||
1.15.1 / 2016-05-05 |
|||
=================== |
|||
|
|||
* deps: bytes@2.3.0 |
|||
- Drop partial bytes on all parsed units |
|||
- Fix parsing byte string that looks like hex |
|||
* deps: raw-body@~2.1.6 |
|||
- deps: bytes@2.3.0 |
|||
* deps: type-is@~1.6.12 |
|||
- deps: mime-types@~2.1.10 |
|||
|
|||
1.15.0 / 2016-02-10 |
|||
=================== |
|||
|
|||
* deps: http-errors@~1.4.0 |
|||
- Add `HttpError` export, for `err instanceof createError.HttpError` |
|||
- deps: inherits@2.0.1 |
|||
- deps: statuses@'>= 1.2.1 < 2' |
|||
* deps: qs@6.1.0 |
|||
* deps: type-is@~1.6.11 |
|||
- deps: mime-types@~2.1.9 |
|||
|
|||
1.14.2 / 2015-12-16 |
|||
=================== |
|||
|
|||
* deps: bytes@2.2.0 |
|||
* deps: iconv-lite@0.4.13 |
|||
* deps: qs@5.2.0 |
|||
* deps: raw-body@~2.1.5 |
|||
- deps: bytes@2.2.0 |
|||
- deps: iconv-lite@0.4.13 |
|||
* deps: type-is@~1.6.10 |
|||
- deps: mime-types@~2.1.8 |
|||
|
|||
1.14.1 / 2015-09-27 |
|||
=================== |
|||
|
|||
* Fix issue where invalid charset results in 400 when `verify` used |
|||
* deps: iconv-lite@0.4.12 |
|||
- Fix CESU-8 decoding in Node.js 4.x |
|||
* deps: raw-body@~2.1.4 |
|||
- Fix masking critical errors from `iconv-lite` |
|||
- deps: iconv-lite@0.4.12 |
|||
* deps: type-is@~1.6.9 |
|||
- deps: mime-types@~2.1.7 |
|||
|
|||
1.14.0 / 2015-09-16 |
|||
=================== |
|||
|
|||
* Fix JSON strict parse error to match syntax errors |
|||
* Provide static `require` analysis in `urlencoded` parser |
|||
* deps: depd@~1.1.0 |
|||
- Support web browser loading |
|||
* deps: qs@5.1.0 |
|||
* deps: raw-body@~2.1.3 |
|||
- Fix sync callback when attaching data listener causes sync read |
|||
* deps: type-is@~1.6.8 |
|||
- Fix type error when given invalid type to match against |
|||
- deps: mime-types@~2.1.6 |
|||
|
|||
1.13.3 / 2015-07-31 |
|||
=================== |
|||
|
|||
* deps: type-is@~1.6.6 |
|||
- deps: mime-types@~2.1.4 |
|||
|
|||
1.13.2 / 2015-07-05 |
|||
=================== |
|||
|
|||
* deps: iconv-lite@0.4.11 |
|||
* deps: qs@4.0.0 |
|||
- Fix dropping parameters like `hasOwnProperty` |
|||
- Fix user-visible incompatibilities from 3.1.0 |
|||
- Fix various parsing edge cases |
|||
* deps: raw-body@~2.1.2 |
|||
- Fix error stack traces to skip `makeError` |
|||
- deps: iconv-lite@0.4.11 |
|||
* deps: type-is@~1.6.4 |
|||
- deps: mime-types@~2.1.2 |
|||
- perf: enable strict mode |
|||
- perf: remove argument reassignment |
|||
|
|||
1.13.1 / 2015-06-16 |
|||
=================== |
|||
|
|||
* deps: qs@2.4.2 |
|||
- Downgraded from 3.1.0 because of user-visible incompatibilities |
|||
|
|||
1.13.0 / 2015-06-14 |
|||
=================== |
|||
|
|||
* Add `statusCode` property on `Error`s, in addition to `status` |
|||
* Change `type` default to `application/json` for JSON parser |
|||
* Change `type` default to `application/x-www-form-urlencoded` for urlencoded parser |
|||
* Provide static `require` analysis |
|||
* Use the `http-errors` module to generate errors |
|||
* deps: bytes@2.1.0 |
|||
- Slight optimizations |
|||
* deps: iconv-lite@0.4.10 |
|||
- The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails |
|||
- Leading BOM is now removed when decoding |
|||
* deps: on-finished@~2.3.0 |
|||
- Add defined behavior for HTTP `CONNECT` requests |
|||
- Add defined behavior for HTTP `Upgrade` requests |
|||
- deps: ee-first@1.1.1 |
|||
* deps: qs@3.1.0 |
|||
- Fix dropping parameters like `hasOwnProperty` |
|||
- Fix various parsing edge cases |
|||
- Parsed object now has `null` prototype |
|||
* deps: raw-body@~2.1.1 |
|||
- Use `unpipe` module for unpiping requests |
|||
- deps: iconv-lite@0.4.10 |
|||
* deps: type-is@~1.6.3 |
|||
- deps: mime-types@~2.1.1 |
|||
- perf: reduce try block size |
|||
- perf: remove bitwise operations |
|||
* perf: enable strict mode |
|||
* perf: remove argument reassignment |
|||
* perf: remove delete call |
|||
|
|||
1.12.4 / 2015-05-10 |
|||
=================== |
|||
|
|||
* deps: debug@~2.2.0 |
|||
* deps: qs@2.4.2 |
|||
- Fix allowing parameters like `constructor` |
|||
* deps: on-finished@~2.2.1 |
|||
* deps: raw-body@~2.0.1 |
|||
- Fix a false-positive when unpiping in Node.js 0.8 |
|||
- deps: bytes@2.0.1 |
|||
* deps: type-is@~1.6.2 |
|||
- deps: mime-types@~2.0.11 |
|||
|
|||
1.12.3 / 2015-04-15 |
|||
=================== |
|||
|
|||
* Slight efficiency improvement when not debugging |
|||
* deps: depd@~1.0.1 |
|||
* deps: iconv-lite@0.4.8 |
|||
- Add encoding alias UNICODE-1-1-UTF-7 |
|||
* deps: raw-body@1.3.4 |
|||
- Fix hanging callback if request aborts during read |
|||
- deps: iconv-lite@0.4.8 |
|||
|
|||
1.12.2 / 2015-03-16 |
|||
=================== |
|||
|
|||
* deps: qs@2.4.1 |
|||
- Fix error when parameter `hasOwnProperty` is present |
|||
|
|||
1.12.1 / 2015-03-15 |
|||
=================== |
|||
|
|||
* deps: debug@~2.1.3 |
|||
- Fix high intensity foreground color for bold |
|||
- deps: ms@0.7.0 |
|||
* deps: type-is@~1.6.1 |
|||
- deps: mime-types@~2.0.10 |
|||
|
|||
1.12.0 / 2015-02-13 |
|||
=================== |
|||
|
|||
* add `debug` messages |
|||
* accept a function for the `type` option |
|||
* use `content-type` to parse `Content-Type` headers |
|||
* deps: iconv-lite@0.4.7 |
|||
- Gracefully support enumerables on `Object.prototype` |
|||
* deps: raw-body@1.3.3 |
|||
- deps: iconv-lite@0.4.7 |
|||
* deps: type-is@~1.6.0 |
|||
- fix argument reassignment |
|||
- fix false-positives in `hasBody` `Transfer-Encoding` check |
|||
- support wildcard for both type and subtype (`*/*`) |
|||
- deps: mime-types@~2.0.9 |
|||
|
|||
1.11.0 / 2015-01-30 |
|||
=================== |
|||
|
|||
* make internal `extended: true` depth limit infinity |
|||
* deps: type-is@~1.5.6 |
|||
- deps: mime-types@~2.0.8 |
|||
|
|||
1.10.2 / 2015-01-20 |
|||
=================== |
|||
|
|||
* deps: iconv-lite@0.4.6 |
|||
- Fix rare aliases of single-byte encodings |
|||
* deps: raw-body@1.3.2 |
|||
- deps: iconv-lite@0.4.6 |
|||
|
|||
1.10.1 / 2015-01-01 |
|||
=================== |
|||
|
|||
* deps: on-finished@~2.2.0 |
|||
* deps: type-is@~1.5.5 |
|||
- deps: mime-types@~2.0.7 |
|||
|
|||
1.10.0 / 2014-12-02 |
|||
=================== |
|||
|
|||
* make internal `extended: true` array limit dynamic |
|||
|
|||
1.9.3 / 2014-11-21 |
|||
================== |
|||
|
|||
* deps: iconv-lite@0.4.5 |
|||
- Fix Windows-31J and X-SJIS encoding support |
|||
* deps: qs@2.3.3 |
|||
- Fix `arrayLimit` behavior |
|||
* deps: raw-body@1.3.1 |
|||
- deps: iconv-lite@0.4.5 |
|||
* deps: type-is@~1.5.3 |
|||
- deps: mime-types@~2.0.3 |
|||
|
|||
1.9.2 / 2014-10-27 |
|||
================== |
|||
|
|||
* deps: qs@2.3.2 |
|||
- Fix parsing of mixed objects and values |
|||
|
|||
1.9.1 / 2014-10-22 |
|||
================== |
|||
|
|||
* deps: on-finished@~2.1.1 |
|||
- Fix handling of pipelined requests |
|||
* deps: qs@2.3.0 |
|||
- Fix parsing of mixed implicit and explicit arrays |
|||
* deps: type-is@~1.5.2 |
|||
- deps: mime-types@~2.0.2 |
|||
|
|||
1.9.0 / 2014-09-24 |
|||
================== |
|||
|
|||
* include the charset in "unsupported charset" error message |
|||
* include the encoding in "unsupported content encoding" error message |
|||
* deps: depd@~1.0.0 |
|||
|
|||
1.8.4 / 2014-09-23 |
|||
================== |
|||
|
|||
* fix content encoding to be case-insensitive |
|||
|
|||
1.8.3 / 2014-09-19 |
|||
================== |
|||
|
|||
* deps: qs@2.2.4 |
|||
- Fix issue with object keys starting with numbers truncated |
|||
|
|||
1.8.2 / 2014-09-15 |
|||
================== |
|||
|
|||
* deps: depd@0.4.5 |
|||
|
|||
1.8.1 / 2014-09-07 |
|||
================== |
|||
|
|||
* deps: media-typer@0.3.0 |
|||
* deps: type-is@~1.5.1 |
|||
|
|||
1.8.0 / 2014-09-05 |
|||
================== |
|||
|
|||
* make empty-body-handling consistent between chunked requests |
|||
- empty `json` produces `{}` |
|||
- empty `raw` produces `new Buffer(0)` |
|||
- empty `text` produces `''` |
|||
- empty `urlencoded` produces `{}` |
|||
* deps: qs@2.2.3 |
|||
- Fix issue where first empty value in array is discarded |
|||
* deps: type-is@~1.5.0 |
|||
- fix `hasbody` to be true for `content-length: 0` |
|||
|
|||
1.7.0 / 2014-09-01 |
|||
================== |
|||
|
|||
* add `parameterLimit` option to `urlencoded` parser |
|||
* change `urlencoded` extended array limit to 100 |
|||
* respond with 413 when over `parameterLimit` in `urlencoded` |
|||
|
|||
1.6.7 / 2014-08-29 |
|||
================== |
|||
|
|||
* deps: qs@2.2.2 |
|||
- Remove unnecessary cloning |
|||
|
|||
1.6.6 / 2014-08-27 |
|||
================== |
|||
|
|||
* deps: qs@2.2.0 |
|||
- Array parsing fix |
|||
- Performance improvements |
|||
|
|||
1.6.5 / 2014-08-16 |
|||
================== |
|||
|
|||
* deps: on-finished@2.1.0 |
|||
|
|||
1.6.4 / 2014-08-14 |
|||
================== |
|||
|
|||
* deps: qs@1.2.2 |
|||
|
|||
1.6.3 / 2014-08-10 |
|||
================== |
|||
|
|||
* deps: qs@1.2.1 |
|||
|
|||
1.6.2 / 2014-08-07 |
|||
================== |
|||
|
|||
* deps: qs@1.2.0 |
|||
- Fix parsing array of objects |
|||
|
|||
1.6.1 / 2014-08-06 |
|||
================== |
|||
|
|||
* deps: qs@1.1.0 |
|||
- Accept urlencoded square brackets |
|||
- Accept empty values in implicit array notation |
|||
|
|||
1.6.0 / 2014-08-05 |
|||
================== |
|||
|
|||
* deps: qs@1.0.2 |
|||
- Complete rewrite |
|||
- Limits array length to 20 |
|||
- Limits object depth to 5 |
|||
- Limits parameters to 1,000 |
|||
|
|||
1.5.2 / 2014-07-27 |
|||
================== |
|||
|
|||
* deps: depd@0.4.4 |
|||
- Work-around v8 generating empty stack traces |
|||
|
|||
1.5.1 / 2014-07-26 |
|||
================== |
|||
|
|||
* deps: depd@0.4.3 |
|||
- Fix exception when global `Error.stackTraceLimit` is too low |
|||
|
|||
1.5.0 / 2014-07-20 |
|||
================== |
|||
|
|||
* deps: depd@0.4.2 |
|||
- Add `TRACE_DEPRECATION` environment variable |
|||
- Remove non-standard grey color from color output |
|||
- Support `--no-deprecation` argument |
|||
- Support `--trace-deprecation` argument |
|||
* deps: iconv-lite@0.4.4 |
|||
- Added encoding UTF-7 |
|||
* deps: raw-body@1.3.0 |
|||
- deps: iconv-lite@0.4.4 |
|||
- Added encoding UTF-7 |
|||
- Fix `Cannot switch to old mode now` error on Node.js 0.10+ |
|||
* deps: type-is@~1.3.2 |
|||
|
|||
1.4.3 / 2014-06-19 |
|||
================== |
|||
|
|||
* deps: type-is@1.3.1 |
|||
- fix global variable leak |
|||
|
|||
1.4.2 / 2014-06-19 |
|||
================== |
|||
|
|||
* deps: type-is@1.3.0 |
|||
- improve type parsing |
|||
|
|||
1.4.1 / 2014-06-19 |
|||
================== |
|||
|
|||
* fix urlencoded extended deprecation message |
|||
|
|||
1.4.0 / 2014-06-19 |
|||
================== |
|||
|
|||
* add `text` parser |
|||
* add `raw` parser |
|||
* check accepted charset in content-type (accepts utf-8) |
|||
* check accepted encoding in content-encoding (accepts identity) |
|||
* deprecate `bodyParser()` middleware; use `.json()` and `.urlencoded()` as needed |
|||
* deprecate `urlencoded()` without provided `extended` option |
|||
* lazy-load urlencoded parsers |
|||
* parsers split into files for reduced mem usage |
|||
* support gzip and deflate bodies |
|||
- set `inflate: false` to turn off |
|||
* deps: raw-body@1.2.2 |
|||
- Support all encodings from `iconv-lite` |
|||
|
|||
1.3.1 / 2014-06-11 |
|||
================== |
|||
|
|||
* deps: type-is@1.2.1 |
|||
- Switch dependency from mime to mime-types@1.0.0 |
|||
|
|||
1.3.0 / 2014-05-31 |
|||
================== |
|||
|
|||
* add `extended` option to urlencoded parser |
|||
|
|||
1.2.2 / 2014-05-27 |
|||
================== |
|||
|
|||
* deps: raw-body@1.1.6 |
|||
- assert stream encoding on node.js 0.8 |
|||
- assert stream encoding on node.js < 0.10.6 |
|||
- deps: bytes@1 |
|||
|
|||
1.2.1 / 2014-05-26 |
|||
================== |
|||
|
|||
* invoke `next(err)` after request fully read |
|||
- prevents hung responses and socket hang ups |
|||
|
|||
1.2.0 / 2014-05-11 |
|||
================== |
|||
|
|||
* add `verify` option |
|||
* deps: type-is@1.2.0 |
|||
- support suffix matching |
|||
|
|||
1.1.2 / 2014-05-11 |
|||
================== |
|||
|
|||
* improve json parser speed |
|||
|
|||
1.1.1 / 2014-05-11 |
|||
================== |
|||
|
|||
* fix repeated limit parsing with every request |
|||
|
|||
1.1.0 / 2014-05-10 |
|||
================== |
|||
|
|||
* add `type` option |
|||
* deps: pin for safety and consistency |
|||
|
|||
1.0.2 / 2014-04-14 |
|||
================== |
|||
|
|||
* use `type-is` module |
|||
|
|||
1.0.1 / 2014-03-20 |
|||
================== |
|||
|
|||
* lower default limits to 100kb |
|||
@ -0,0 +1,23 @@ |
|||
(The MIT License) |
|||
|
|||
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com> |
|||
Copyright (c) 2014-2015 Douglas Christopher Wilson <doug@somethingdoug.com> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining |
|||
a copy of this software and associated documentation files (the |
|||
'Software'), to deal in the Software without restriction, including |
|||
without limitation the rights to use, copy, modify, merge, publish, |
|||
distribute, sublicense, and/or sell copies of the Software, and to |
|||
permit persons to whom the Software is furnished to do so, subject to |
|||
the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be |
|||
included in all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, |
|||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
|||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
|||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, |
|||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
|||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
|||
@ -0,0 +1,491 @@ |
|||
# body-parser |
|||
|
|||
[![NPM Version][npm-version-image]][npm-url] |
|||
[![NPM Downloads][npm-downloads-image]][npm-url] |
|||
[![Build Status][ci-image]][ci-url] |
|||
[![Test Coverage][coveralls-image]][coveralls-url] |
|||
[![OpenSSF Scorecard Badge][ossf-scorecard-badge]][ossf-scorecard-visualizer] |
|||
|
|||
Node.js body parsing middleware. |
|||
|
|||
Parse incoming request bodies in a middleware before your handlers, available |
|||
under the `req.body` property. |
|||
|
|||
**Note** As `req.body`'s shape is based on user-controlled input, all |
|||
properties and values in this object are untrusted and should be validated |
|||
before trusting. For example, `req.body.foo.toString()` may fail in multiple |
|||
ways, for example the `foo` property may not be there or may not be a string, |
|||
and `toString` may not be a function and instead a string or other user input. |
|||
|
|||
[Learn about the anatomy of an HTTP transaction in Node.js](https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/). |
|||
|
|||
_This does not handle multipart bodies_, due to their complex and typically |
|||
large nature. For multipart bodies, you may be interested in the following |
|||
modules: |
|||
|
|||
* [busboy](https://www.npmjs.org/package/busboy#readme) and |
|||
[connect-busboy](https://www.npmjs.org/package/connect-busboy#readme) |
|||
* [multiparty](https://www.npmjs.org/package/multiparty#readme) and |
|||
[connect-multiparty](https://www.npmjs.org/package/connect-multiparty#readme) |
|||
* [formidable](https://www.npmjs.org/package/formidable#readme) |
|||
* [multer](https://www.npmjs.org/package/multer#readme) |
|||
|
|||
This module provides the following parsers: |
|||
|
|||
* [JSON body parser](#bodyparserjsonoptions) |
|||
* [Raw body parser](#bodyparserrawoptions) |
|||
* [Text body parser](#bodyparsertextoptions) |
|||
* [URL-encoded form body parser](#bodyparserurlencodedoptions) |
|||
|
|||
Other body parsers you might be interested in: |
|||
|
|||
- [body](https://www.npmjs.org/package/body#readme) |
|||
- [co-body](https://www.npmjs.org/package/co-body#readme) |
|||
|
|||
## Installation |
|||
|
|||
```sh |
|||
$ npm install body-parser |
|||
``` |
|||
|
|||
## API |
|||
|
|||
```js |
|||
const bodyParser = require('body-parser') |
|||
``` |
|||
|
|||
The `bodyParser` object exposes various factories to create middlewares. All |
|||
middlewares will populate the `req.body` property with the parsed body when |
|||
the `Content-Type` request header matches the `type` option. |
|||
|
|||
The various errors returned by this module are described in the |
|||
[errors section](#errors). |
|||
|
|||
### bodyParser.json([options]) |
|||
|
|||
Returns middleware that only parses `json` and only looks at requests where |
|||
the `Content-Type` header matches the `type` option. This parser accepts any |
|||
Unicode encoding of the body and supports automatic inflation of `gzip`, |
|||
`br` (brotli) and `deflate` encodings. |
|||
|
|||
A new `body` object containing the parsed data is populated on the `request` |
|||
object after the middleware (i.e. `req.body`). |
|||
|
|||
#### Options |
|||
|
|||
The `json` function takes an optional `options` object that may contain any of |
|||
the following keys: |
|||
|
|||
##### inflate |
|||
|
|||
When set to `true`, then deflated (compressed) bodies will be inflated; when |
|||
`false`, deflated bodies are rejected. Defaults to `true`. |
|||
|
|||
##### limit |
|||
|
|||
Controls the maximum request body size. If this is a number, then the value |
|||
specifies the number of bytes; if it is a string, the value is passed to the |
|||
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults |
|||
to `'100kb'`. |
|||
|
|||
##### reviver |
|||
|
|||
The `reviver` option is passed directly to `JSON.parse` as the second |
|||
argument. You can find more information on this argument |
|||
[in the MDN documentation about JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Example.3A_Using_the_reviver_parameter). |
|||
|
|||
##### strict |
|||
|
|||
When set to `true`, will only accept arrays and objects; when `false` will |
|||
accept anything `JSON.parse` accepts. Defaults to `true`. |
|||
|
|||
##### type |
|||
|
|||
The `type` option is used to determine what media type the middleware will |
|||
parse. This option can be a string, array of strings, or a function. If not a |
|||
function, `type` option is passed directly to the |
|||
[type-is](https://www.npmjs.org/package/type-is#readme) library and this can |
|||
be an extension name (like `json`), a mime type (like `application/json`), or |
|||
a mime type with a wildcard (like `*/*` or `*/json`). If a function, the `type` |
|||
option is called as `fn(req)` and the request is parsed if it returns a truthy |
|||
value. Defaults to `application/json`. |
|||
|
|||
##### verify |
|||
|
|||
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, |
|||
where `buf` is a `Buffer` of the raw request body and `encoding` is the |
|||
encoding of the request. The parsing can be aborted by throwing an error. |
|||
|
|||
### bodyParser.raw([options]) |
|||
|
|||
Returns middleware that parses all bodies as a `Buffer` and only looks at |
|||
requests where the `Content-Type` header matches the `type` option. This |
|||
parser supports automatic inflation of `gzip`, `br` (brotli) and `deflate` |
|||
encodings. |
|||
|
|||
A new `body` object containing the parsed data is populated on the `request` |
|||
object after the middleware (i.e. `req.body`). This will be a `Buffer` object |
|||
of the body. |
|||
|
|||
#### Options |
|||
|
|||
The `raw` function takes an optional `options` object that may contain any of |
|||
the following keys: |
|||
|
|||
##### inflate |
|||
|
|||
When set to `true`, then deflated (compressed) bodies will be inflated; when |
|||
`false`, deflated bodies are rejected. Defaults to `true`. |
|||
|
|||
##### limit |
|||
|
|||
Controls the maximum request body size. If this is a number, then the value |
|||
specifies the number of bytes; if it is a string, the value is passed to the |
|||
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults |
|||
to `'100kb'`. |
|||
|
|||
##### type |
|||
|
|||
The `type` option is used to determine what media type the middleware will |
|||
parse. This option can be a string, array of strings, or a function. |
|||
If not a function, `type` option is passed directly to the |
|||
[type-is](https://www.npmjs.org/package/type-is#readme) library and this |
|||
can be an extension name (like `bin`), a mime type (like |
|||
`application/octet-stream`), or a mime type with a wildcard (like `*/*` or |
|||
`application/*`). If a function, the `type` option is called as `fn(req)` |
|||
and the request is parsed if it returns a truthy value. Defaults to |
|||
`application/octet-stream`. |
|||
|
|||
##### verify |
|||
|
|||
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, |
|||
where `buf` is a `Buffer` of the raw request body and `encoding` is the |
|||
encoding of the request. The parsing can be aborted by throwing an error. |
|||
|
|||
### bodyParser.text([options]) |
|||
|
|||
Returns middleware that parses all bodies as a string and only looks at |
|||
requests where the `Content-Type` header matches the `type` option. This |
|||
parser supports automatic inflation of `gzip`, `br` (brotli) and `deflate` |
|||
encodings. |
|||
|
|||
A new `body` string containing the parsed data is populated on the `request` |
|||
object after the middleware (i.e. `req.body`). This will be a string of the |
|||
body. |
|||
|
|||
#### Options |
|||
|
|||
The `text` function takes an optional `options` object that may contain any of |
|||
the following keys: |
|||
|
|||
##### defaultCharset |
|||
|
|||
Specify the default character set for the text content if the charset is not |
|||
specified in the `Content-Type` header of the request. Defaults to `utf-8`. |
|||
|
|||
##### inflate |
|||
|
|||
When set to `true`, then deflated (compressed) bodies will be inflated; when |
|||
`false`, deflated bodies are rejected. Defaults to `true`. |
|||
|
|||
##### limit |
|||
|
|||
Controls the maximum request body size. If this is a number, then the value |
|||
specifies the number of bytes; if it is a string, the value is passed to the |
|||
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults |
|||
to `'100kb'`. |
|||
|
|||
##### type |
|||
|
|||
The `type` option is used to determine what media type the middleware will |
|||
parse. This option can be a string, array of strings, or a function. If not |
|||
a function, `type` option is passed directly to the |
|||
[type-is](https://www.npmjs.org/package/type-is#readme) library and this can |
|||
be an extension name (like `txt`), a mime type (like `text/plain`), or a mime |
|||
type with a wildcard (like `*/*` or `text/*`). If a function, the `type` |
|||
option is called as `fn(req)` and the request is parsed if it returns a |
|||
truthy value. Defaults to `text/plain`. |
|||
|
|||
##### verify |
|||
|
|||
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, |
|||
where `buf` is a `Buffer` of the raw request body and `encoding` is the |
|||
encoding of the request. The parsing can be aborted by throwing an error. |
|||
|
|||
### bodyParser.urlencoded([options]) |
|||
|
|||
Returns middleware that only parses `urlencoded` bodies and only looks at |
|||
requests where the `Content-Type` header matches the `type` option. This |
|||
parser accepts only UTF-8 encoding of the body and supports automatic |
|||
inflation of `gzip`, `br` (brotli) and `deflate` encodings. |
|||
|
|||
A new `body` object containing the parsed data is populated on the `request` |
|||
object after the middleware (i.e. `req.body`). This object will contain |
|||
key-value pairs, where the value can be a string or array (when `extended` is |
|||
`false`), or any type (when `extended` is `true`). |
|||
|
|||
#### Options |
|||
|
|||
The `urlencoded` function takes an optional `options` object that may contain |
|||
any of the following keys: |
|||
|
|||
##### extended |
|||
|
|||
The "extended" syntax allows for rich objects and arrays to be encoded into the |
|||
URL-encoded format, allowing for a JSON-like experience with URL-encoded. For |
|||
more information, please [see the qs |
|||
library](https://www.npmjs.org/package/qs#readme). |
|||
|
|||
Defaults to `false`. |
|||
|
|||
##### inflate |
|||
|
|||
When set to `true`, then deflated (compressed) bodies will be inflated; when |
|||
`false`, deflated bodies are rejected. Defaults to `true`. |
|||
|
|||
##### limit |
|||
|
|||
Controls the maximum request body size. If this is a number, then the value |
|||
specifies the number of bytes; if it is a string, the value is passed to the |
|||
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults |
|||
to `'100kb'`. |
|||
|
|||
##### parameterLimit |
|||
|
|||
The `parameterLimit` option controls the maximum number of parameters that |
|||
are allowed in the URL-encoded data. If a request contains more parameters |
|||
than this value, a 413 will be returned to the client. Defaults to `1000`. |
|||
|
|||
##### type |
|||
|
|||
The `type` option is used to determine what media type the middleware will |
|||
parse. This option can be a string, array of strings, or a function. If not |
|||
a function, `type` option is passed directly to the |
|||
[type-is](https://www.npmjs.org/package/type-is#readme) library and this can |
|||
be an extension name (like `urlencoded`), a mime type (like |
|||
`application/x-www-form-urlencoded`), or a mime type with a wildcard (like |
|||
`*/x-www-form-urlencoded`). If a function, the `type` option is called as |
|||
`fn(req)` and the request is parsed if it returns a truthy value. Defaults |
|||
to `application/x-www-form-urlencoded`. |
|||
|
|||
##### verify |
|||
|
|||
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, |
|||
where `buf` is a `Buffer` of the raw request body and `encoding` is the |
|||
encoding of the request. The parsing can be aborted by throwing an error. |
|||
|
|||
##### defaultCharset |
|||
|
|||
The default charset to parse as, if not specified in content-type. Must be |
|||
either `utf-8` or `iso-8859-1`. Defaults to `utf-8`. |
|||
|
|||
##### charsetSentinel |
|||
|
|||
Whether to let the value of the `utf8` parameter take precedence as the charset |
|||
selector. It requires the form to contain a parameter named `utf8` with a value |
|||
of `✓`. Defaults to `false`. |
|||
|
|||
##### interpretNumericEntities |
|||
|
|||
Whether to decode numeric entities such as `☺` when parsing an iso-8859-1 |
|||
form. Defaults to `false`. |
|||
|
|||
|
|||
#### depth |
|||
|
|||
The `depth` option is used to configure the maximum depth of the `qs` library when `extended` is `true`. This allows you to limit the amount of keys that are parsed and can be useful to prevent certain types of abuse. Defaults to `32`. It is recommended to keep this value as low as possible. |
|||
|
|||
## Errors |
|||
|
|||
The middlewares provided by this module create errors using the |
|||
[`http-errors` module](https://www.npmjs.com/package/http-errors). The errors |
|||
will typically have a `status`/`statusCode` property that contains the suggested |
|||
HTTP response code, an `expose` property to determine if the `message` property |
|||
should be displayed to the client, a `type` property to determine the type of |
|||
error without matching against the `message`, and a `body` property containing |
|||
the read body, if available. |
|||
|
|||
The following are the common errors created, though any error can come through |
|||
for various reasons. |
|||
|
|||
### content encoding unsupported |
|||
|
|||
This error will occur when the request had a `Content-Encoding` header that |
|||
contained an encoding but the "inflation" option was set to `false`. The |
|||
`status` property is set to `415`, the `type` property is set to |
|||
`'encoding.unsupported'`, and the `charset` property will be set to the |
|||
encoding that is unsupported. |
|||
|
|||
### entity parse failed |
|||
|
|||
This error will occur when the request contained an entity that could not be |
|||
parsed by the middleware. The `status` property is set to `400`, the `type` |
|||
property is set to `'entity.parse.failed'`, and the `body` property is set to |
|||
the entity value that failed parsing. |
|||
|
|||
### entity verify failed |
|||
|
|||
This error will occur when the request contained an entity that could not be |
|||
failed verification by the defined `verify` option. The `status` property is |
|||
set to `403`, the `type` property is set to `'entity.verify.failed'`, and the |
|||
`body` property is set to the entity value that failed verification. |
|||
|
|||
### request aborted |
|||
|
|||
This error will occur when the request is aborted by the client before reading |
|||
the body has finished. The `received` property will be set to the number of |
|||
bytes received before the request was aborted and the `expected` property is |
|||
set to the number of expected bytes. The `status` property is set to `400` |
|||
and `type` property is set to `'request.aborted'`. |
|||
|
|||
### request entity too large |
|||
|
|||
This error will occur when the request body's size is larger than the "limit" |
|||
option. The `limit` property will be set to the byte limit and the `length` |
|||
property will be set to the request body's length. The `status` property is |
|||
set to `413` and the `type` property is set to `'entity.too.large'`. |
|||
|
|||
### request size did not match content length |
|||
|
|||
This error will occur when the request's length did not match the length from |
|||
the `Content-Length` header. This typically occurs when the request is malformed, |
|||
typically when the `Content-Length` header was calculated based on characters |
|||
instead of bytes. The `status` property is set to `400` and the `type` property |
|||
is set to `'request.size.invalid'`. |
|||
|
|||
### stream encoding should not be set |
|||
|
|||
This error will occur when something called the `req.setEncoding` method prior |
|||
to this middleware. This module operates directly on bytes only and you cannot |
|||
call `req.setEncoding` when using this module. The `status` property is set to |
|||
`500` and the `type` property is set to `'stream.encoding.set'`. |
|||
|
|||
### stream is not readable |
|||
|
|||
This error will occur when the request is no longer readable when this middleware |
|||
attempts to read it. This typically means something other than a middleware from |
|||
this module read the request body already and the middleware was also configured to |
|||
read the same request. The `status` property is set to `500` and the `type` |
|||
property is set to `'stream.not.readable'`. |
|||
|
|||
### too many parameters |
|||
|
|||
This error will occur when the content of the request exceeds the configured |
|||
`parameterLimit` for the `urlencoded` parser. The `status` property is set to |
|||
`413` and the `type` property is set to `'parameters.too.many'`. |
|||
|
|||
### unsupported charset "BOGUS" |
|||
|
|||
This error will occur when the request had a charset parameter in the |
|||
`Content-Type` header, but the `iconv-lite` module does not support it OR the |
|||
parser does not support it. The charset is contained in the message as well |
|||
as in the `charset` property. The `status` property is set to `415`, the |
|||
`type` property is set to `'charset.unsupported'`, and the `charset` property |
|||
is set to the charset that is unsupported. |
|||
|
|||
### unsupported content encoding "bogus" |
|||
|
|||
This error will occur when the request had a `Content-Encoding` header that |
|||
contained an unsupported encoding. The encoding is contained in the message |
|||
as well as in the `encoding` property. The `status` property is set to `415`, |
|||
the `type` property is set to `'encoding.unsupported'`, and the `encoding` |
|||
property is set to the encoding that is unsupported. |
|||
|
|||
### The input exceeded the depth |
|||
|
|||
This error occurs when using `bodyParser.urlencoded` with the `extended` property set to `true` and the input exceeds the configured `depth` option. The `status` property is set to `400`. It is recommended to review the `depth` option and evaluate if it requires a higher value. When the `depth` option is set to `32` (default value), the error will not be thrown. |
|||
|
|||
## Examples |
|||
|
|||
### Express/Connect top-level generic |
|||
|
|||
This example demonstrates adding a generic JSON and URL-encoded parser as a |
|||
top-level middleware, which will parse the bodies of all incoming requests. |
|||
This is the simplest setup. |
|||
|
|||
```js |
|||
const express = require('express') |
|||
const bodyParser = require('body-parser') |
|||
|
|||
const app = express() |
|||
|
|||
// parse application/x-www-form-urlencoded |
|||
app.use(bodyParser.urlencoded()) |
|||
|
|||
// parse application/json |
|||
app.use(bodyParser.json()) |
|||
|
|||
app.use(function (req, res) { |
|||
res.setHeader('Content-Type', 'text/plain') |
|||
res.write('you posted:\n') |
|||
res.end(String(JSON.stringify(req.body, null, 2))) |
|||
}) |
|||
``` |
|||
|
|||
### Express route-specific |
|||
|
|||
This example demonstrates adding body parsers specifically to the routes that |
|||
need them. In general, this is the most recommended way to use body-parser with |
|||
Express. |
|||
|
|||
```js |
|||
const express = require('express') |
|||
const bodyParser = require('body-parser') |
|||
|
|||
const app = express() |
|||
|
|||
// create application/json parser |
|||
const jsonParser = bodyParser.json() |
|||
|
|||
// create application/x-www-form-urlencoded parser |
|||
const urlencodedParser = bodyParser.urlencoded() |
|||
|
|||
// POST /login gets urlencoded bodies |
|||
app.post('/login', urlencodedParser, function (req, res) { |
|||
if (!req.body || !req.body.username) res.sendStatus(400) |
|||
res.send('welcome, ' + req.body.username) |
|||
}) |
|||
|
|||
// POST /api/users gets JSON bodies |
|||
app.post('/api/users', jsonParser, function (req, res) { |
|||
if (!req.body) res.sendStatus(400) |
|||
// create user in req.body |
|||
}) |
|||
``` |
|||
|
|||
### Change accepted type for parsers |
|||
|
|||
All the parsers accept a `type` option which allows you to change the |
|||
`Content-Type` that the middleware will parse. |
|||
|
|||
```js |
|||
const express = require('express') |
|||
const bodyParser = require('body-parser') |
|||
|
|||
const app = express() |
|||
|
|||
// parse various different custom JSON types as JSON |
|||
app.use(bodyParser.json({ type: 'application/*+json' })) |
|||
|
|||
// parse some custom thing into a Buffer |
|||
app.use(bodyParser.raw({ type: 'application/vnd.custom-type' })) |
|||
|
|||
// parse an HTML body into a string |
|||
app.use(bodyParser.text({ type: 'text/html' })) |
|||
``` |
|||
|
|||
## License |
|||
|
|||
[MIT](LICENSE) |
|||
|
|||
[ci-image]: https://badgen.net/github/checks/expressjs/body-parser/master?label=ci |
|||
[ci-url]: https://github.com/expressjs/body-parser/actions/workflows/ci.yml |
|||
[coveralls-image]: https://badgen.net/coveralls/c/github/expressjs/body-parser/master |
|||
[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master |
|||
[node-version-image]: https://badgen.net/npm/node/body-parser |
|||
[node-version-url]: https://nodejs.org/en/download |
|||
[npm-downloads-image]: https://badgen.net/npm/dm/body-parser |
|||
[npm-url]: https://npmjs.org/package/body-parser |
|||
[npm-version-image]: https://badgen.net/npm/v/body-parser |
|||
[ossf-scorecard-badge]: https://api.scorecard.dev/projects/github.com/expressjs/body-parser/badge |
|||
[ossf-scorecard-visualizer]: https://ossf.github.io/scorecard-visualizer/#/projects/github.com/expressjs/body-parser |
|||
@ -0,0 +1,80 @@ |
|||
/*! |
|||
* body-parser |
|||
* Copyright(c) 2014-2015 Douglas Christopher Wilson |
|||
* MIT Licensed |
|||
*/ |
|||
|
|||
'use strict' |
|||
|
|||
/** |
|||
* @typedef Parsers |
|||
* @type {function} |
|||
* @property {function} json |
|||
* @property {function} raw |
|||
* @property {function} text |
|||
* @property {function} urlencoded |
|||
*/ |
|||
|
|||
/** |
|||
* Module exports. |
|||
* @type {Parsers} |
|||
*/ |
|||
|
|||
exports = module.exports = bodyParser |
|||
|
|||
/** |
|||
* JSON parser. |
|||
* @public |
|||
*/ |
|||
|
|||
Object.defineProperty(exports, 'json', { |
|||
configurable: true, |
|||
enumerable: true, |
|||
get: () => require('./lib/types/json') |
|||
}) |
|||
|
|||
/** |
|||
* Raw parser. |
|||
* @public |
|||
*/ |
|||
|
|||
Object.defineProperty(exports, 'raw', { |
|||
configurable: true, |
|||
enumerable: true, |
|||
get: () => require('./lib/types/raw') |
|||
}) |
|||
|
|||
/** |
|||
* Text parser. |
|||
* @public |
|||
*/ |
|||
|
|||
Object.defineProperty(exports, 'text', { |
|||
configurable: true, |
|||
enumerable: true, |
|||
get: () => require('./lib/types/text') |
|||
}) |
|||
|
|||
/** |
|||
* URL-encoded parser. |
|||
* @public |
|||
*/ |
|||
|
|||
Object.defineProperty(exports, 'urlencoded', { |
|||
configurable: true, |
|||
enumerable: true, |
|||
get: () => require('./lib/types/urlencoded') |
|||
}) |
|||
|
|||
/** |
|||
* Create a middleware to parse json and urlencoded bodies. |
|||
* |
|||
* @param {object} [options] |
|||
* @return {function} |
|||
* @deprecated |
|||
* @public |
|||
*/ |
|||
|
|||
function bodyParser () { |
|||
throw new Error('The bodyParser() generic has been split into individual middleware to use instead.') |
|||
} |
|||
@ -0,0 +1,210 @@ |
|||
/*! |
|||
* body-parser |
|||
* Copyright(c) 2014-2015 Douglas Christopher Wilson |
|||
* MIT Licensed |
|||
*/ |
|||
|
|||
'use strict' |
|||
|
|||
/** |
|||
* Module dependencies. |
|||
* @private |
|||
*/ |
|||
|
|||
var createError = require('http-errors') |
|||
var getBody = require('raw-body') |
|||
var iconv = require('iconv-lite') |
|||
var onFinished = require('on-finished') |
|||
var zlib = require('node:zlib') |
|||
|
|||
/** |
|||
* Module exports. |
|||
*/ |
|||
|
|||
module.exports = read |
|||
|
|||
/** |
|||
* Read a request into a buffer and parse. |
|||
* |
|||
* @param {object} req |
|||
* @param {object} res |
|||
* @param {function} next |
|||
* @param {function} parse |
|||
* @param {function} debug |
|||
* @param {object} options |
|||
* @private |
|||
*/ |
|||
|
|||
function read (req, res, next, parse, debug, options) { |
|||
var length |
|||
var opts = options |
|||
var stream |
|||
|
|||
// read options
|
|||
var encoding = opts.encoding !== null |
|||
? opts.encoding |
|||
: null |
|||
var verify = opts.verify |
|||
|
|||
try { |
|||
// get the content stream
|
|||
stream = contentstream(req, debug, opts.inflate) |
|||
length = stream.length |
|||
stream.length = undefined |
|||
} catch (err) { |
|||
return next(err) |
|||
} |
|||
|
|||
// set raw-body options
|
|||
opts.length = length |
|||
opts.encoding = verify |
|||
? null |
|||
: encoding |
|||
|
|||
// assert charset is supported
|
|||
if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) { |
|||
return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { |
|||
charset: encoding.toLowerCase(), |
|||
type: 'charset.unsupported' |
|||
})) |
|||
} |
|||
|
|||
// read body
|
|||
debug('read body') |
|||
getBody(stream, opts, function (error, body) { |
|||
if (error) { |
|||
var _error |
|||
|
|||
if (error.type === 'encoding.unsupported') { |
|||
// echo back charset
|
|||
_error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { |
|||
charset: encoding.toLowerCase(), |
|||
type: 'charset.unsupported' |
|||
}) |
|||
} else { |
|||
// set status code on error
|
|||
_error = createError(400, error) |
|||
} |
|||
|
|||
// unpipe from stream and destroy
|
|||
if (stream !== req) { |
|||
req.unpipe() |
|||
stream.destroy() |
|||
} |
|||
|
|||
// read off entire request
|
|||
dump(req, function onfinished () { |
|||
next(createError(400, _error)) |
|||
}) |
|||
return |
|||
} |
|||
|
|||
// verify
|
|||
if (verify) { |
|||
try { |
|||
debug('verify body') |
|||
verify(req, res, body, encoding) |
|||
} catch (err) { |
|||
next(createError(403, err, { |
|||
body: body, |
|||
type: err.type || 'entity.verify.failed' |
|||
})) |
|||
return |
|||
} |
|||
} |
|||
|
|||
// parse
|
|||
var str = body |
|||
try { |
|||
debug('parse body') |
|||
str = typeof body !== 'string' && encoding !== null |
|||
? iconv.decode(body, encoding) |
|||
: body |
|||
req.body = parse(str, encoding) |
|||
} catch (err) { |
|||
next(createError(400, err, { |
|||
body: str, |
|||
type: err.type || 'entity.parse.failed' |
|||
})) |
|||
return |
|||
} |
|||
|
|||
next() |
|||
}) |
|||
} |
|||
|
|||
/** |
|||
* Get the content stream of the request. |
|||
* |
|||
* @param {object} req |
|||
* @param {function} debug |
|||
* @param {boolean} [inflate=true] |
|||
* @return {object} |
|||
* @api private |
|||
*/ |
|||
|
|||
function contentstream (req, debug, inflate) { |
|||
var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase() |
|||
var length = req.headers['content-length'] |
|||
|
|||
debug('content-encoding "%s"', encoding) |
|||
|
|||
if (inflate === false && encoding !== 'identity') { |
|||
throw createError(415, 'content encoding unsupported', { |
|||
encoding: encoding, |
|||
type: 'encoding.unsupported' |
|||
}) |
|||
} |
|||
|
|||
if (encoding === 'identity') { |
|||
req.length = length |
|||
return req |
|||
} |
|||
|
|||
var stream = createDecompressionStream(encoding, debug) |
|||
req.pipe(stream) |
|||
return stream |
|||
} |
|||
|
|||
/** |
|||
* Create a decompression stream for the given encoding. |
|||
* @param {string} encoding |
|||
* @param {function} debug |
|||
* @return {object} |
|||
* @api private |
|||
*/ |
|||
function createDecompressionStream (encoding, debug) { |
|||
switch (encoding) { |
|||
case 'deflate': |
|||
debug('inflate body') |
|||
return zlib.createInflate() |
|||
case 'gzip': |
|||
debug('gunzip body') |
|||
return zlib.createGunzip() |
|||
case 'br': |
|||
debug('brotli decompress body') |
|||
return zlib.createBrotliDecompress() |
|||
default: |
|||
throw createError(415, 'unsupported content encoding "' + encoding + '"', { |
|||
encoding: encoding, |
|||
type: 'encoding.unsupported' |
|||
}) |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Dump the contents of a request. |
|||
* |
|||
* @param {object} req |
|||
* @param {function} callback |
|||
* @api private |
|||
*/ |
|||
|
|||
function dump (req, callback) { |
|||
if (onFinished.isFinished(req)) { |
|||
callback(null) |
|||
} else { |
|||
onFinished(req, callback) |
|||
req.resume() |
|||
} |
|||
} |
|||
@ -0,0 +1,247 @@ |
|||
/*! |
|||
* body-parser |
|||
* Copyright(c) 2014 Jonathan Ong |
|||
* Copyright(c) 2014-2015 Douglas Christopher Wilson |
|||
* MIT Licensed |
|||
*/ |
|||
|
|||
'use strict' |
|||
|
|||
/** |
|||
* Module dependencies. |
|||
* @private |
|||
*/ |
|||
|
|||
var bytes = require('bytes') |
|||
var contentType = require('content-type') |
|||
var createError = require('http-errors') |
|||
var debug = require('debug')('body-parser:json') |
|||
var read = require('../read') |
|||
var typeis = require('type-is') |
|||
|
|||
/** |
|||
* Module exports. |
|||
*/ |
|||
|
|||
module.exports = json |
|||
|
|||
/** |
|||
* RegExp to match the first non-space in a string. |
|||
* |
|||
* Allowed whitespace is defined in RFC 7159: |
|||
* |
|||
* ws = *( |
|||
* %x20 / ; Space |
|||
* %x09 / ; Horizontal tab |
|||
* %x0A / ; Line feed or New line |
|||
* %x0D ) ; Carriage return |
|||
*/ |
|||
|
|||
var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/ // eslint-disable-line no-control-regex
|
|||
|
|||
var JSON_SYNTAX_CHAR = '#' |
|||
var JSON_SYNTAX_REGEXP = /#+/g |
|||
|
|||
/** |
|||
* Create a middleware to parse JSON bodies. |
|||
* |
|||
* @param {object} [options] |
|||
* @return {function} |
|||
* @public |
|||
*/ |
|||
|
|||
function json (options) { |
|||
var opts = options || {} |
|||
|
|||
var limit = typeof opts.limit !== 'number' |
|||
? bytes.parse(opts.limit || '100kb') |
|||
: opts.limit |
|||
var inflate = opts.inflate !== false |
|||
var reviver = opts.reviver |
|||
var strict = opts.strict !== false |
|||
var type = opts.type || 'application/json' |
|||
var verify = opts.verify || false |
|||
|
|||
if (verify !== false && typeof verify !== 'function') { |
|||
throw new TypeError('option verify must be function') |
|||
} |
|||
|
|||
// create the appropriate type checking function
|
|||
var shouldParse = typeof type !== 'function' |
|||
? typeChecker(type) |
|||
: type |
|||
|
|||
function parse (body) { |
|||
if (body.length === 0) { |
|||
// special-case empty json body, as it's a common client-side mistake
|
|||
// TODO: maybe make this configurable or part of "strict" option
|
|||
return {} |
|||
} |
|||
|
|||
if (strict) { |
|||
var first = firstchar(body) |
|||
|
|||
if (first !== '{' && first !== '[') { |
|||
debug('strict violation') |
|||
throw createStrictSyntaxError(body, first) |
|||
} |
|||
} |
|||
|
|||
try { |
|||
debug('parse json') |
|||
return JSON.parse(body, reviver) |
|||
} catch (e) { |
|||
throw normalizeJsonSyntaxError(e, { |
|||
message: e.message, |
|||
stack: e.stack |
|||
}) |
|||
} |
|||
} |
|||
|
|||
return function jsonParser (req, res, next) { |
|||
if (req._body) { |
|||
debug('body already parsed') |
|||
next() |
|||
return |
|||
} |
|||
|
|||
req.body = req.body || {} |
|||
|
|||
// skip requests without bodies
|
|||
if (!typeis.hasBody(req)) { |
|||
debug('skip empty body') |
|||
next() |
|||
return |
|||
} |
|||
|
|||
debug('content-type %j', req.headers['content-type']) |
|||
|
|||
// determine if request should be parsed
|
|||
if (!shouldParse(req)) { |
|||
debug('skip parsing') |
|||
next() |
|||
return |
|||
} |
|||
|
|||
// assert charset per RFC 7159 sec 8.1
|
|||
var charset = getCharset(req) || 'utf-8' |
|||
if (charset.slice(0, 4) !== 'utf-') { |
|||
debug('invalid charset') |
|||
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { |
|||
charset: charset, |
|||
type: 'charset.unsupported' |
|||
})) |
|||
return |
|||
} |
|||
|
|||
// read
|
|||
read(req, res, next, parse, debug, { |
|||
encoding: charset, |
|||
inflate: inflate, |
|||
limit: limit, |
|||
verify: verify |
|||
}) |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Create strict violation syntax error matching native error. |
|||
* |
|||
* @param {string} str |
|||
* @param {string} char |
|||
* @return {Error} |
|||
* @private |
|||
*/ |
|||
|
|||
function createStrictSyntaxError (str, char) { |
|||
var index = str.indexOf(char) |
|||
var partial = '' |
|||
|
|||
if (index !== -1) { |
|||
partial = str.substring(0, index) + JSON_SYNTAX_CHAR |
|||
|
|||
for (var i = index + 1; i < str.length; i++) { |
|||
partial += JSON_SYNTAX_CHAR |
|||
} |
|||
} |
|||
|
|||
try { |
|||
JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation') |
|||
} catch (e) { |
|||
return normalizeJsonSyntaxError(e, { |
|||
message: e.message.replace(JSON_SYNTAX_REGEXP, function (placeholder) { |
|||
return str.substring(index, index + placeholder.length) |
|||
}), |
|||
stack: e.stack |
|||
}) |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Get the first non-whitespace character in a string. |
|||
* |
|||
* @param {string} str |
|||
* @return {function} |
|||
* @private |
|||
*/ |
|||
|
|||
function firstchar (str) { |
|||
var match = FIRST_CHAR_REGEXP.exec(str) |
|||
|
|||
return match |
|||
? match[1] |
|||
: undefined |
|||
} |
|||
|
|||
/** |
|||
* Get the charset of a request. |
|||
* |
|||
* @param {object} req |
|||
* @api private |
|||
*/ |
|||
|
|||
function getCharset (req) { |
|||
try { |
|||
return (contentType.parse(req).parameters.charset || '').toLowerCase() |
|||
} catch (e) { |
|||
return undefined |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Normalize a SyntaxError for JSON.parse. |
|||
* |
|||
* @param {SyntaxError} error |
|||
* @param {object} obj |
|||
* @return {SyntaxError} |
|||
*/ |
|||
|
|||
function normalizeJsonSyntaxError (error, obj) { |
|||
var keys = Object.getOwnPropertyNames(error) |
|||
|
|||
for (var i = 0; i < keys.length; i++) { |
|||
var key = keys[i] |
|||
if (key !== 'stack' && key !== 'message') { |
|||
delete error[key] |
|||
} |
|||
} |
|||
|
|||
// replace stack before message for Node.js 0.10 and below
|
|||
error.stack = obj.stack.replace(error.message, obj.message) |
|||
error.message = obj.message |
|||
|
|||
return error |
|||
} |
|||
|
|||
/** |
|||
* Get the simple type checker. |
|||
* |
|||
* @param {string} type |
|||
* @return {function} |
|||
*/ |
|||
|
|||
function typeChecker (type) { |
|||
return function checkType (req) { |
|||
return Boolean(typeis(req, type)) |
|||
} |
|||
} |
|||
@ -0,0 +1,101 @@ |
|||
/*! |
|||
* body-parser |
|||
* Copyright(c) 2014-2015 Douglas Christopher Wilson |
|||
* MIT Licensed |
|||
*/ |
|||
|
|||
'use strict' |
|||
|
|||
/** |
|||
* Module dependencies. |
|||
*/ |
|||
|
|||
var bytes = require('bytes') |
|||
var debug = require('debug')('body-parser:raw') |
|||
var read = require('../read') |
|||
var typeis = require('type-is') |
|||
|
|||
/** |
|||
* Module exports. |
|||
*/ |
|||
|
|||
module.exports = raw |
|||
|
|||
/** |
|||
* Create a middleware to parse raw bodies. |
|||
* |
|||
* @param {object} [options] |
|||
* @return {function} |
|||
* @api public |
|||
*/ |
|||
|
|||
function raw (options) { |
|||
var opts = options || {} |
|||
|
|||
var inflate = opts.inflate !== false |
|||
var limit = typeof opts.limit !== 'number' |
|||
? bytes.parse(opts.limit || '100kb') |
|||
: opts.limit |
|||
var type = opts.type || 'application/octet-stream' |
|||
var verify = opts.verify || false |
|||
|
|||
if (verify !== false && typeof verify !== 'function') { |
|||
throw new TypeError('option verify must be function') |
|||
} |
|||
|
|||
// create the appropriate type checking function
|
|||
var shouldParse = typeof type !== 'function' |
|||
? typeChecker(type) |
|||
: type |
|||
|
|||
function parse (buf) { |
|||
return buf |
|||
} |
|||
|
|||
return function rawParser (req, res, next) { |
|||
if (req._body) { |
|||
debug('body already parsed') |
|||
next() |
|||
return |
|||
} |
|||
|
|||
req.body = req.body || {} |
|||
|
|||
// skip requests without bodies
|
|||
if (!typeis.hasBody(req)) { |
|||
debug('skip empty body') |
|||
next() |
|||
return |
|||
} |
|||
|
|||
debug('content-type %j', req.headers['content-type']) |
|||
|
|||
// determine if request should be parsed
|
|||
if (!shouldParse(req)) { |
|||
debug('skip parsing') |
|||
next() |
|||
return |
|||
} |
|||
|
|||
// read
|
|||
read(req, res, next, parse, debug, { |
|||
encoding: null, |
|||
inflate: inflate, |
|||
limit: limit, |
|||
verify: verify |
|||
}) |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Get the simple type checker. |
|||
* |
|||
* @param {string} type |
|||
* @return {function} |
|||
*/ |
|||
|
|||
function typeChecker (type) { |
|||
return function checkType (req) { |
|||
return Boolean(typeis(req, type)) |
|||
} |
|||
} |
|||
@ -0,0 +1,121 @@ |
|||
/*! |
|||
* body-parser |
|||
* Copyright(c) 2014-2015 Douglas Christopher Wilson |
|||
* MIT Licensed |
|||
*/ |
|||
|
|||
'use strict' |
|||
|
|||
/** |
|||
* Module dependencies. |
|||
*/ |
|||
|
|||
var bytes = require('bytes') |
|||
var contentType = require('content-type') |
|||
var debug = require('debug')('body-parser:text') |
|||
var read = require('../read') |
|||
var typeis = require('type-is') |
|||
|
|||
/** |
|||
* Module exports. |
|||
*/ |
|||
|
|||
module.exports = text |
|||
|
|||
/** |
|||
* Create a middleware to parse text bodies. |
|||
* |
|||
* @param {object} [options] |
|||
* @return {function} |
|||
* @api public |
|||
*/ |
|||
|
|||
function text (options) { |
|||
var opts = options || {} |
|||
|
|||
var defaultCharset = opts.defaultCharset || 'utf-8' |
|||
var inflate = opts.inflate !== false |
|||
var limit = typeof opts.limit !== 'number' |
|||
? bytes.parse(opts.limit || '100kb') |
|||
: opts.limit |
|||
var type = opts.type || 'text/plain' |
|||
var verify = opts.verify || false |
|||
|
|||
if (verify !== false && typeof verify !== 'function') { |
|||
throw new TypeError('option verify must be function') |
|||
} |
|||
|
|||
// create the appropriate type checking function
|
|||
var shouldParse = typeof type !== 'function' |
|||
? typeChecker(type) |
|||
: type |
|||
|
|||
function parse (buf) { |
|||
return buf |
|||
} |
|||
|
|||
return function textParser (req, res, next) { |
|||
if (req._body) { |
|||
debug('body already parsed') |
|||
next() |
|||
return |
|||
} |
|||
|
|||
req.body = req.body || {} |
|||
|
|||
// skip requests without bodies
|
|||
if (!typeis.hasBody(req)) { |
|||
debug('skip empty body') |
|||
next() |
|||
return |
|||
} |
|||
|
|||
debug('content-type %j', req.headers['content-type']) |
|||
|
|||
// determine if request should be parsed
|
|||
if (!shouldParse(req)) { |
|||
debug('skip parsing') |
|||
next() |
|||
return |
|||
} |
|||
|
|||
// get charset
|
|||
var charset = getCharset(req) || defaultCharset |
|||
|
|||
// read
|
|||
read(req, res, next, parse, debug, { |
|||
encoding: charset, |
|||
inflate: inflate, |
|||
limit: limit, |
|||
verify: verify |
|||
}) |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Get the charset of a request. |
|||
* |
|||
* @param {object} req |
|||
* @api private |
|||
*/ |
|||
|
|||
function getCharset (req) { |
|||
try { |
|||
return (contentType.parse(req).parameters.charset || '').toLowerCase() |
|||
} catch (e) { |
|||
return undefined |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Get the simple type checker. |
|||
* |
|||
* @param {string} type |
|||
* @return {function} |
|||
*/ |
|||
|
|||
function typeChecker (type) { |
|||
return function checkType (req) { |
|||
return Boolean(typeis(req, type)) |
|||
} |
|||
} |
|||
@ -0,0 +1,307 @@ |
|||
/*! |
|||
* body-parser |
|||
* Copyright(c) 2014 Jonathan Ong |
|||
* Copyright(c) 2014-2015 Douglas Christopher Wilson |
|||
* MIT Licensed |
|||
*/ |
|||
|
|||
'use strict' |
|||
|
|||
/** |
|||
* Module dependencies. |
|||
* @private |
|||
*/ |
|||
|
|||
var bytes = require('bytes') |
|||
var contentType = require('content-type') |
|||
var createError = require('http-errors') |
|||
var debug = require('debug')('body-parser:urlencoded') |
|||
var deprecate = require('depd')('body-parser') |
|||
var read = require('../read') |
|||
var typeis = require('type-is') |
|||
|
|||
/** |
|||
* Module exports. |
|||
*/ |
|||
|
|||
module.exports = urlencoded |
|||
|
|||
/** |
|||
* Cache of parser modules. |
|||
*/ |
|||
|
|||
var parsers = Object.create(null) |
|||
|
|||
/** |
|||
* Create a middleware to parse urlencoded bodies. |
|||
* |
|||
* @param {object} [options] |
|||
* @return {function} |
|||
* @public |
|||
*/ |
|||
|
|||
function urlencoded (options) { |
|||
var opts = options || {} |
|||
|
|||
// notice because option default will flip in next major
|
|||
if (opts.extended === undefined) { |
|||
deprecate('undefined extended: provide extended option') |
|||
} |
|||
|
|||
var extended = opts.extended !== false |
|||
var inflate = opts.inflate !== false |
|||
var limit = typeof opts.limit !== 'number' |
|||
? bytes.parse(opts.limit || '100kb') |
|||
: opts.limit |
|||
var type = opts.type || 'application/x-www-form-urlencoded' |
|||
var verify = opts.verify || false |
|||
var depth = typeof opts.depth !== 'number' |
|||
? Number(opts.depth || 32) |
|||
: opts.depth |
|||
|
|||
if (verify !== false && typeof verify !== 'function') { |
|||
throw new TypeError('option verify must be function') |
|||
} |
|||
|
|||
// create the appropriate query parser
|
|||
var queryparse = extended |
|||
? extendedparser(opts) |
|||
: simpleparser(opts) |
|||
|
|||
// create the appropriate type checking function
|
|||
var shouldParse = typeof type !== 'function' |
|||
? typeChecker(type) |
|||
: type |
|||
|
|||
function parse (body) { |
|||
return body.length |
|||
? queryparse(body) |
|||
: {} |
|||
} |
|||
|
|||
return function urlencodedParser (req, res, next) { |
|||
if (req._body) { |
|||
debug('body already parsed') |
|||
next() |
|||
return |
|||
} |
|||
|
|||
req.body = req.body || {} |
|||
|
|||
// skip requests without bodies
|
|||
if (!typeis.hasBody(req)) { |
|||
debug('skip empty body') |
|||
next() |
|||
return |
|||
} |
|||
|
|||
debug('content-type %j', req.headers['content-type']) |
|||
|
|||
// determine if request should be parsed
|
|||
if (!shouldParse(req)) { |
|||
debug('skip parsing') |
|||
next() |
|||
return |
|||
} |
|||
|
|||
// assert charset
|
|||
var charset = getCharset(req) || 'utf-8' |
|||
if (charset !== 'utf-8') { |
|||
debug('invalid charset') |
|||
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { |
|||
charset: charset, |
|||
type: 'charset.unsupported' |
|||
})) |
|||
return |
|||
} |
|||
|
|||
// read
|
|||
read(req, res, next, parse, debug, { |
|||
debug: debug, |
|||
encoding: charset, |
|||
inflate: inflate, |
|||
limit: limit, |
|||
verify: verify, |
|||
depth: depth |
|||
}) |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Get the extended query parser. |
|||
* |
|||
* @param {object} options |
|||
*/ |
|||
|
|||
function extendedparser (options) { |
|||
var parameterLimit = options.parameterLimit !== undefined |
|||
? options.parameterLimit |
|||
: 1000 |
|||
|
|||
var depth = typeof options.depth !== 'number' |
|||
? Number(options.depth || 32) |
|||
: options.depth |
|||
var parse = parser('qs') |
|||
|
|||
if (isNaN(parameterLimit) || parameterLimit < 1) { |
|||
throw new TypeError('option parameterLimit must be a positive number') |
|||
} |
|||
|
|||
if (isNaN(depth) || depth < 0) { |
|||
throw new TypeError('option depth must be a zero or a positive number') |
|||
} |
|||
|
|||
if (isFinite(parameterLimit)) { |
|||
parameterLimit = parameterLimit | 0 |
|||
} |
|||
|
|||
return function queryparse (body) { |
|||
var paramCount = parameterCount(body, parameterLimit) |
|||
|
|||
if (paramCount === undefined) { |
|||
debug('too many parameters') |
|||
throw createError(413, 'too many parameters', { |
|||
type: 'parameters.too.many' |
|||
}) |
|||
} |
|||
|
|||
var arrayLimit = Math.max(100, paramCount) |
|||
|
|||
debug('parse extended urlencoding') |
|||
try { |
|||
return parse(body, { |
|||
allowPrototypes: true, |
|||
arrayLimit: arrayLimit, |
|||
depth: depth, |
|||
strictDepth: true, |
|||
parameterLimit: parameterLimit |
|||
}) |
|||
} catch (err) { |
|||
if (err instanceof RangeError) { |
|||
throw createError(400, 'The input exceeded the depth', { |
|||
type: 'querystring.parse.rangeError' |
|||
}) |
|||
} else { |
|||
throw err |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Get the charset of a request. |
|||
* |
|||
* @param {object} req |
|||
* @api private |
|||
*/ |
|||
|
|||
function getCharset (req) { |
|||
try { |
|||
return (contentType.parse(req).parameters.charset || '').toLowerCase() |
|||
} catch (e) { |
|||
return undefined |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Count the number of parameters, stopping once limit reached |
|||
* |
|||
* @param {string} body |
|||
* @param {number} limit |
|||
* @api private |
|||
*/ |
|||
|
|||
function parameterCount (body, limit) { |
|||
var count = 0 |
|||
var index = 0 |
|||
|
|||
while ((index = body.indexOf('&', index)) !== -1) { |
|||
count++ |
|||
index++ |
|||
|
|||
if (count === limit) { |
|||
return undefined |
|||
} |
|||
} |
|||
|
|||
return count |
|||
} |
|||
|
|||
/** |
|||
* Get parser for module name dynamically. |
|||
* |
|||
* @param {string} name |
|||
* @return {function} |
|||
* @api private |
|||
*/ |
|||
|
|||
function parser (name) { |
|||
var mod = parsers[name] |
|||
|
|||
if (mod !== undefined) { |
|||
return mod.parse |
|||
} |
|||
|
|||
// this uses a switch for static require analysis
|
|||
switch (name) { |
|||
case 'qs': |
|||
mod = require('qs') |
|||
break |
|||
case 'querystring': |
|||
mod = require('querystring') |
|||
break |
|||
} |
|||
|
|||
// store to prevent invoking require()
|
|||
parsers[name] = mod |
|||
|
|||
return mod.parse |
|||
} |
|||
|
|||
/** |
|||
* Get the simple query parser. |
|||
* |
|||
* @param {object} options |
|||
*/ |
|||
|
|||
function simpleparser (options) { |
|||
var parameterLimit = options.parameterLimit !== undefined |
|||
? options.parameterLimit |
|||
: 1000 |
|||
var parse = parser('querystring') |
|||
|
|||
if (isNaN(parameterLimit) || parameterLimit < 1) { |
|||
throw new TypeError('option parameterLimit must be a positive number') |
|||
} |
|||
|
|||
if (isFinite(parameterLimit)) { |
|||
parameterLimit = parameterLimit | 0 |
|||
} |
|||
|
|||
return function queryparse (body) { |
|||
var paramCount = parameterCount(body, parameterLimit) |
|||
|
|||
if (paramCount === undefined) { |
|||
debug('too many parameters') |
|||
throw createError(413, 'too many parameters', { |
|||
type: 'parameters.too.many' |
|||
}) |
|||
} |
|||
|
|||
debug('parse urlencoding') |
|||
return parse(body, undefined, undefined, { maxKeys: parameterLimit }) |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Get the simple type checker. |
|||
* |
|||
* @param {string} type |
|||
* @return {function} |
|||
*/ |
|||
|
|||
function typeChecker (type) { |
|||
return function checkType (req) { |
|||
return Boolean(typeis(req, type)) |
|||
} |
|||
} |
|||
@ -0,0 +1,83 @@ |
|||
'use strict' |
|||
|
|||
/** |
|||
* Module dependencies. |
|||
*/ |
|||
|
|||
var bytes = require('bytes') |
|||
var contentType = require('content-type') |
|||
var typeis = require('type-is') |
|||
|
|||
/** |
|||
* Module exports. |
|||
*/ |
|||
|
|||
module.exports = { |
|||
getCharset, |
|||
normalizeOptions |
|||
} |
|||
|
|||
/** |
|||
* Get the charset of a request. |
|||
* |
|||
* @param {object} req |
|||
* @api private |
|||
*/ |
|||
|
|||
function getCharset (req) { |
|||
try { |
|||
return (contentType.parse(req).parameters.charset || '').toLowerCase() |
|||
} catch { |
|||
return undefined |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Get the simple type checker. |
|||
* |
|||
* @param {string | string[]} type |
|||
* @return {function} |
|||
*/ |
|||
|
|||
function typeChecker (type) { |
|||
return function checkType (req) { |
|||
return Boolean(typeis(req, type)) |
|||
} |
|||
} |
|||
|
|||
/** |
|||
* Normalizes the common options for all parsers. |
|||
* |
|||
* @param {object} options options to normalize |
|||
* @param {string | string[] | function} defaultType default content type(s) or a function to determine it |
|||
* @returns {object} |
|||
*/ |
|||
function normalizeOptions (options, defaultType) { |
|||
if (!defaultType) { |
|||
// Parsers must define a default content type
|
|||
throw new TypeError('defaultType must be provided') |
|||
} |
|||
|
|||
var inflate = options?.inflate !== false |
|||
var limit = typeof options?.limit !== 'number' |
|||
? bytes.parse(options?.limit || '100kb') |
|||
: options?.limit |
|||
var type = options?.type || defaultType |
|||
var verify = options?.verify || false |
|||
|
|||
if (verify !== false && typeof verify !== 'function') { |
|||
throw new TypeError('option verify must be function') |
|||
} |
|||
|
|||
// create the appropriate type checking function
|
|||
var shouldParse = typeof type !== 'function' |
|||
? typeChecker(type) |
|||
: type |
|||
|
|||
return { |
|||
inflate, |
|||
limit, |
|||
verify, |
|||
shouldParse |
|||
} |
|||
} |
|||
@ -0,0 +1,49 @@ |
|||
{ |
|||
"name": "body-parser", |
|||
"description": "Node.js body parsing middleware", |
|||
"version": "2.2.0", |
|||
"contributors": [ |
|||
"Douglas Christopher Wilson <doug@somethingdoug.com>", |
|||
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)" |
|||
], |
|||
"license": "MIT", |
|||
"repository": "expressjs/body-parser", |
|||
"dependencies": { |
|||
"bytes": "^3.1.2", |
|||
"content-type": "^1.0.5", |
|||
"debug": "^4.4.0", |
|||
"http-errors": "^2.0.0", |
|||
"iconv-lite": "^0.6.3", |
|||
"on-finished": "^2.4.1", |
|||
"qs": "^6.14.0", |
|||
"raw-body": "^3.0.0", |
|||
"type-is": "^2.0.0" |
|||
}, |
|||
"devDependencies": { |
|||
"eslint": "8.34.0", |
|||
"eslint-config-standard": "14.1.1", |
|||
"eslint-plugin-import": "2.27.5", |
|||
"eslint-plugin-markdown": "3.0.0", |
|||
"eslint-plugin-node": "11.1.0", |
|||
"eslint-plugin-promise": "6.1.1", |
|||
"eslint-plugin-standard": "4.1.0", |
|||
"mocha": "^11.1.0", |
|||
"nyc": "^17.1.0", |
|||
"supertest": "^7.0.0" |
|||
}, |
|||
"files": [ |
|||
"lib/", |
|||
"LICENSE", |
|||
"HISTORY.md", |
|||
"index.js" |
|||
], |
|||
"engines": { |
|||
"node": ">=18" |
|||
}, |
|||
"scripts": { |
|||
"lint": "eslint .", |
|||
"test": "mocha --reporter spec --check-leaks test/", |
|||
"test-ci": "nyc --reporter=lcovonly --reporter=text npm test", |
|||
"test-cov": "nyc --reporter=html --reporter=text npm test" |
|||
} |
|||
} |
|||
@ -0,0 +1,21 @@ |
|||
MIT License |
|||
|
|||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in all |
|||
copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,129 @@ |
|||
# brace-expansion |
|||
|
|||
[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), |
|||
as known from sh/bash, in JavaScript. |
|||
|
|||
[](http://travis-ci.org/juliangruber/brace-expansion) |
|||
[](https://www.npmjs.org/package/brace-expansion) |
|||
[](https://greenkeeper.io/) |
|||
|
|||
[](https://ci.testling.com/juliangruber/brace-expansion) |
|||
|
|||
## Example |
|||
|
|||
```js |
|||
var expand = require('brace-expansion'); |
|||
|
|||
expand('file-{a,b,c}.jpg') |
|||
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] |
|||
|
|||
expand('-v{,,}') |
|||
// => ['-v', '-v', '-v'] |
|||
|
|||
expand('file{0..2}.jpg') |
|||
// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] |
|||
|
|||
expand('file-{a..c}.jpg') |
|||
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] |
|||
|
|||
expand('file{2..0}.jpg') |
|||
// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] |
|||
|
|||
expand('file{0..4..2}.jpg') |
|||
// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] |
|||
|
|||
expand('file-{a..e..2}.jpg') |
|||
// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] |
|||
|
|||
expand('file{00..10..5}.jpg') |
|||
// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] |
|||
|
|||
expand('{{A..C},{a..c}}') |
|||
// => ['A', 'B', 'C', 'a', 'b', 'c'] |
|||
|
|||
expand('ppp{,config,oe{,conf}}') |
|||
// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] |
|||
``` |
|||
|
|||
## API |
|||
|
|||
```js |
|||
var expand = require('brace-expansion'); |
|||
``` |
|||
|
|||
### var expanded = expand(str) |
|||
|
|||
Return an array of all possible and valid expansions of `str`. If none are |
|||
found, `[str]` is returned. |
|||
|
|||
Valid expansions are: |
|||
|
|||
```js |
|||
/^(.*,)+(.+)?$/ |
|||
// {a,b,...} |
|||
``` |
|||
|
|||
A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. |
|||
|
|||
```js |
|||
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ |
|||
// {x..y[..incr]} |
|||
``` |
|||
|
|||
A numeric sequence from `x` to `y` inclusive, with optional increment. |
|||
If `x` or `y` start with a leading `0`, all the numbers will be padded |
|||
to have equal length. Negative numbers and backwards iteration work too. |
|||
|
|||
```js |
|||
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ |
|||
// {x..y[..incr]} |
|||
``` |
|||
|
|||
An alphabetic sequence from `x` to `y` inclusive, with optional increment. |
|||
`x` and `y` must be exactly one character, and if given, `incr` must be a |
|||
number. |
|||
|
|||
For compatibility reasons, the string `${` is not eligible for brace expansion. |
|||
|
|||
## Installation |
|||
|
|||
With [npm](https://npmjs.org) do: |
|||
|
|||
```bash |
|||
npm install brace-expansion |
|||
``` |
|||
|
|||
## Contributors |
|||
|
|||
- [Julian Gruber](https://github.com/juliangruber) |
|||
- [Isaac Z. Schlueter](https://github.com/isaacs) |
|||
|
|||
## Sponsors |
|||
|
|||
This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! |
|||
|
|||
Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! |
|||
|
|||
## License |
|||
|
|||
(MIT) |
|||
|
|||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy of |
|||
this software and associated documentation files (the "Software"), to deal in |
|||
the Software without restriction, including without limitation the rights to |
|||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies |
|||
of the Software, and to permit persons to whom the Software is furnished to do |
|||
so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in all |
|||
copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,201 @@ |
|||
var concatMap = require('concat-map'); |
|||
var balanced = require('balanced-match'); |
|||
|
|||
module.exports = expandTop; |
|||
|
|||
var escSlash = '\0SLASH'+Math.random()+'\0'; |
|||
var escOpen = '\0OPEN'+Math.random()+'\0'; |
|||
var escClose = '\0CLOSE'+Math.random()+'\0'; |
|||
var escComma = '\0COMMA'+Math.random()+'\0'; |
|||
var escPeriod = '\0PERIOD'+Math.random()+'\0'; |
|||
|
|||
function numeric(str) { |
|||
return parseInt(str, 10) == str |
|||
? parseInt(str, 10) |
|||
: str.charCodeAt(0); |
|||
} |
|||
|
|||
function escapeBraces(str) { |
|||
return str.split('\\\\').join(escSlash) |
|||
.split('\\{').join(escOpen) |
|||
.split('\\}').join(escClose) |
|||
.split('\\,').join(escComma) |
|||
.split('\\.').join(escPeriod); |
|||
} |
|||
|
|||
function unescapeBraces(str) { |
|||
return str.split(escSlash).join('\\') |
|||
.split(escOpen).join('{') |
|||
.split(escClose).join('}') |
|||
.split(escComma).join(',') |
|||
.split(escPeriod).join('.'); |
|||
} |
|||
|
|||
|
|||
// Basically just str.split(","), but handling cases
|
|||
// where we have nested braced sections, which should be
|
|||
// treated as individual members, like {a,{b,c},d}
|
|||
function parseCommaParts(str) { |
|||
if (!str) |
|||
return ['']; |
|||
|
|||
var parts = []; |
|||
var m = balanced('{', '}', str); |
|||
|
|||
if (!m) |
|||
return str.split(','); |
|||
|
|||
var pre = m.pre; |
|||
var body = m.body; |
|||
var post = m.post; |
|||
var p = pre.split(','); |
|||
|
|||
p[p.length-1] += '{' + body + '}'; |
|||
var postParts = parseCommaParts(post); |
|||
if (post.length) { |
|||
p[p.length-1] += postParts.shift(); |
|||
p.push.apply(p, postParts); |
|||
} |
|||
|
|||
parts.push.apply(parts, p); |
|||
|
|||
return parts; |
|||
} |
|||
|
|||
function expandTop(str) { |
|||
if (!str) |
|||
return []; |
|||
|
|||
// I don't know why Bash 4.3 does this, but it does.
|
|||
// Anything starting with {} will have the first two bytes preserved
|
|||
// but *only* at the top level, so {},a}b will not expand to anything,
|
|||
// but a{},b}c will be expanded to [a}c,abc].
|
|||
// One could argue that this is a bug in Bash, but since the goal of
|
|||
// this module is to match Bash's rules, we escape a leading {}
|
|||
if (str.substr(0, 2) === '{}') { |
|||
str = '\\{\\}' + str.substr(2); |
|||
} |
|||
|
|||
return expand(escapeBraces(str), true).map(unescapeBraces); |
|||
} |
|||
|
|||
function identity(e) { |
|||
return e; |
|||
} |
|||
|
|||
function embrace(str) { |
|||
return '{' + str + '}'; |
|||
} |
|||
function isPadded(el) { |
|||
return /^-?0\d/.test(el); |
|||
} |
|||
|
|||
function lte(i, y) { |
|||
return i <= y; |
|||
} |
|||
function gte(i, y) { |
|||
return i >= y; |
|||
} |
|||
|
|||
function expand(str, isTop) { |
|||
var expansions = []; |
|||
|
|||
var m = balanced('{', '}', str); |
|||
if (!m || /\$$/.test(m.pre)) return [str]; |
|||
|
|||
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); |
|||
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); |
|||
var isSequence = isNumericSequence || isAlphaSequence; |
|||
var isOptions = m.body.indexOf(',') >= 0; |
|||
if (!isSequence && !isOptions) { |
|||
// {a},b}
|
|||
if (m.post.match(/,(?!,).*\}/)) { |
|||
str = m.pre + '{' + m.body + escClose + m.post; |
|||
return expand(str); |
|||
} |
|||
return [str]; |
|||
} |
|||
|
|||
var n; |
|||
if (isSequence) { |
|||
n = m.body.split(/\.\./); |
|||
} else { |
|||
n = parseCommaParts(m.body); |
|||
if (n.length === 1) { |
|||
// x{{a,b}}y ==> x{a}y x{b}y
|
|||
n = expand(n[0], false).map(embrace); |
|||
if (n.length === 1) { |
|||
var post = m.post.length |
|||
? expand(m.post, false) |
|||
: ['']; |
|||
return post.map(function(p) { |
|||
return m.pre + n[0] + p; |
|||
}); |
|||
} |
|||
} |
|||
} |
|||
|
|||
// at this point, n is the parts, and we know it's not a comma set
|
|||
// with a single entry.
|
|||
|
|||
// no need to expand pre, since it is guaranteed to be free of brace-sets
|
|||
var pre = m.pre; |
|||
var post = m.post.length |
|||
? expand(m.post, false) |
|||
: ['']; |
|||
|
|||
var N; |
|||
|
|||
if (isSequence) { |
|||
var x = numeric(n[0]); |
|||
var y = numeric(n[1]); |
|||
var width = Math.max(n[0].length, n[1].length) |
|||
var incr = n.length == 3 |
|||
? Math.abs(numeric(n[2])) |
|||
: 1; |
|||
var test = lte; |
|||
var reverse = y < x; |
|||
if (reverse) { |
|||
incr *= -1; |
|||
test = gte; |
|||
} |
|||
var pad = n.some(isPadded); |
|||
|
|||
N = []; |
|||
|
|||
for (var i = x; test(i, y); i += incr) { |
|||
var c; |
|||
if (isAlphaSequence) { |
|||
c = String.fromCharCode(i); |
|||
if (c === '\\') |
|||
c = ''; |
|||
} else { |
|||
c = String(i); |
|||
if (pad) { |
|||
var need = width - c.length; |
|||
if (need > 0) { |
|||
var z = new Array(need + 1).join('0'); |
|||
if (i < 0) |
|||
c = '-' + z + c.slice(1); |
|||
else |
|||
c = z + c; |
|||
} |
|||
} |
|||
} |
|||
N.push(c); |
|||
} |
|||
} else { |
|||
N = concatMap(n, function(el) { return expand(el, false) }); |
|||
} |
|||
|
|||
for (var j = 0; j < N.length; j++) { |
|||
for (var k = 0; k < post.length; k++) { |
|||
var expansion = pre + N[j] + post[k]; |
|||
if (!isTop || isSequence || expansion) |
|||
expansions.push(expansion); |
|||
} |
|||
} |
|||
|
|||
return expansions; |
|||
} |
|||
|
|||
@ -0,0 +1,50 @@ |
|||
{ |
|||
"name": "brace-expansion", |
|||
"description": "Brace expansion as known from sh/bash", |
|||
"version": "1.1.12", |
|||
"repository": { |
|||
"type": "git", |
|||
"url": "git://github.com/juliangruber/brace-expansion.git" |
|||
}, |
|||
"homepage": "https://github.com/juliangruber/brace-expansion", |
|||
"main": "index.js", |
|||
"scripts": { |
|||
"test": "tape test/*.js", |
|||
"gentest": "bash test/generate.sh", |
|||
"bench": "matcha test/perf/bench.js" |
|||
}, |
|||
"dependencies": { |
|||
"balanced-match": "^1.0.0", |
|||
"concat-map": "0.0.1" |
|||
}, |
|||
"devDependencies": { |
|||
"matcha": "^0.7.0", |
|||
"tape": "^4.6.0" |
|||
}, |
|||
"keywords": [], |
|||
"author": { |
|||
"name": "Julian Gruber", |
|||
"email": "mail@juliangruber.com", |
|||
"url": "http://juliangruber.com" |
|||
}, |
|||
"license": "MIT", |
|||
"testling": { |
|||
"files": "test/*.js", |
|||
"browsers": [ |
|||
"ie/8..latest", |
|||
"firefox/20..latest", |
|||
"firefox/nightly", |
|||
"chrome/25..latest", |
|||
"chrome/canary", |
|||
"opera/12..latest", |
|||
"opera/next", |
|||
"safari/5.1..latest", |
|||
"ipad/6.0..latest", |
|||
"iphone/6.0..latest", |
|||
"android-browser/4.2..latest" |
|||
] |
|||
}, |
|||
"publishConfig": { |
|||
"tag": "1.x" |
|||
} |
|||
} |
|||
@ -0,0 +1,21 @@ |
|||
The MIT License (MIT) |
|||
|
|||
Copyright (c) 2014-present, Jon Schlinkert. |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in |
|||
all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |
|||
THE SOFTWARE. |
|||
@ -0,0 +1,586 @@ |
|||
# braces [](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [](https://www.npmjs.com/package/braces) [](https://npmjs.org/package/braces) [](https://npmjs.org/package/braces) [](https://travis-ci.org/micromatch/braces) |
|||
|
|||
> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed. |
|||
|
|||
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. |
|||
|
|||
## Install |
|||
|
|||
Install with [npm](https://www.npmjs.com/): |
|||
|
|||
```sh |
|||
$ npm install --save braces |
|||
``` |
|||
|
|||
## v3.0.0 Released!! |
|||
|
|||
See the [changelog](CHANGELOG.md) for details. |
|||
|
|||
## Why use braces? |
|||
|
|||
Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters. |
|||
|
|||
- **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests) |
|||
- **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity. |
|||
- **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up. |
|||
- **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written). |
|||
- **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)). |
|||
- [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']` |
|||
- [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']` |
|||
- [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']` |
|||
- [Supports escaping](#escaping) - To prevent evaluation of special characters. |
|||
|
|||
## Usage |
|||
|
|||
The main export is a function that takes one or more brace `patterns` and `options`. |
|||
|
|||
```js |
|||
const braces = require('braces'); |
|||
// braces(patterns[, options]); |
|||
|
|||
console.log(braces(['{01..05}', '{a..e}'])); |
|||
//=> ['(0[1-5])', '([a-e])'] |
|||
|
|||
console.log(braces(['{01..05}', '{a..e}'], { expand: true })); |
|||
//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e'] |
|||
``` |
|||
|
|||
### Brace Expansion vs. Compilation |
|||
|
|||
By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching. |
|||
|
|||
**Compiled** |
|||
|
|||
```js |
|||
console.log(braces('a/{x,y,z}/b')); |
|||
//=> ['a/(x|y|z)/b'] |
|||
console.log(braces(['a/{01..20}/b', 'a/{1..5}/b'])); |
|||
//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ] |
|||
``` |
|||
|
|||
**Expanded** |
|||
|
|||
Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)): |
|||
|
|||
```js |
|||
console.log(braces('a/{x,y,z}/b', { expand: true })); |
|||
//=> ['a/x/b', 'a/y/b', 'a/z/b'] |
|||
|
|||
console.log(braces.expand('{01..10}')); |
|||
//=> ['01','02','03','04','05','06','07','08','09','10'] |
|||
``` |
|||
|
|||
### Lists |
|||
|
|||
Expand lists (like Bash "sets"): |
|||
|
|||
```js |
|||
console.log(braces('a/{foo,bar,baz}/*.js')); |
|||
//=> ['a/(foo|bar|baz)/*.js'] |
|||
|
|||
console.log(braces.expand('a/{foo,bar,baz}/*.js')); |
|||
//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js'] |
|||
``` |
|||
|
|||
### Sequences |
|||
|
|||
Expand ranges of characters (like Bash "sequences"): |
|||
|
|||
```js |
|||
console.log(braces.expand('{1..3}')); // ['1', '2', '3'] |
|||
console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b'] |
|||
console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c'] |
|||
console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c'] |
|||
|
|||
// supports zero-padded ranges |
|||
console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b'] |
|||
console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b'] |
|||
``` |
|||
|
|||
See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options. |
|||
|
|||
### Steppped ranges |
|||
|
|||
Steps, or increments, may be used with ranges: |
|||
|
|||
```js |
|||
console.log(braces.expand('{2..10..2}')); |
|||
//=> ['2', '4', '6', '8', '10'] |
|||
|
|||
console.log(braces('{2..10..2}')); |
|||
//=> ['(2|4|6|8|10)'] |
|||
``` |
|||
|
|||
When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion. |
|||
|
|||
### Nesting |
|||
|
|||
Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved. |
|||
|
|||
**"Expanded" braces** |
|||
|
|||
```js |
|||
console.log(braces.expand('a{b,c,/{x,y}}/e')); |
|||
//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e'] |
|||
|
|||
console.log(braces.expand('a/{x,{1..5},y}/c')); |
|||
//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c'] |
|||
``` |
|||
|
|||
**"Optimized" braces** |
|||
|
|||
```js |
|||
console.log(braces('a{b,c,/{x,y}}/e')); |
|||
//=> ['a(b|c|/(x|y))/e'] |
|||
|
|||
console.log(braces('a/{x,{1..5},y}/c')); |
|||
//=> ['a/(x|([1-5])|y)/c'] |
|||
``` |
|||
|
|||
### Escaping |
|||
|
|||
**Escaping braces** |
|||
|
|||
A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_: |
|||
|
|||
```js |
|||
console.log(braces.expand('a\\{d,c,b}e')); |
|||
//=> ['a{d,c,b}e'] |
|||
|
|||
console.log(braces.expand('a{d,c,b\\}e')); |
|||
//=> ['a{d,c,b}e'] |
|||
``` |
|||
|
|||
**Escaping commas** |
|||
|
|||
Commas inside braces may also be escaped: |
|||
|
|||
```js |
|||
console.log(braces.expand('a{b\\,c}d')); |
|||
//=> ['a{b,c}d'] |
|||
|
|||
console.log(braces.expand('a{d\\,c,b}e')); |
|||
//=> ['ad,ce', 'abe'] |
|||
``` |
|||
|
|||
**Single items** |
|||
|
|||
Following bash conventions, a brace pattern is also not expanded when it contains a single character: |
|||
|
|||
```js |
|||
console.log(braces.expand('a{b}c')); |
|||
//=> ['a{b}c'] |
|||
``` |
|||
|
|||
## Options |
|||
|
|||
### options.maxLength |
|||
|
|||
**Type**: `Number` |
|||
|
|||
**Default**: `10,000` |
|||
|
|||
**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera. |
|||
|
|||
```js |
|||
console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error |
|||
``` |
|||
|
|||
### options.expand |
|||
|
|||
**Type**: `Boolean` |
|||
|
|||
**Default**: `undefined` |
|||
|
|||
**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing). |
|||
|
|||
```js |
|||
console.log(braces('a/{b,c}/d', { expand: true })); |
|||
//=> [ 'a/b/d', 'a/c/d' ] |
|||
``` |
|||
|
|||
### options.nodupes |
|||
|
|||
**Type**: `Boolean` |
|||
|
|||
**Default**: `undefined` |
|||
|
|||
**Description**: Remove duplicates from the returned array. |
|||
|
|||
### options.rangeLimit |
|||
|
|||
**Type**: `Number` |
|||
|
|||
**Default**: `1000` |
|||
|
|||
**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`. |
|||
|
|||
You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether. |
|||
|
|||
**Examples** |
|||
|
|||
```js |
|||
// pattern exceeds the "rangeLimit", so it's optimized automatically |
|||
console.log(braces.expand('{1..1000}')); |
|||
//=> ['([1-9]|[1-9][0-9]{1,2}|1000)'] |
|||
|
|||
// pattern does not exceed "rangeLimit", so it's NOT optimized |
|||
console.log(braces.expand('{1..100}')); |
|||
//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100'] |
|||
``` |
|||
|
|||
### options.transform |
|||
|
|||
**Type**: `Function` |
|||
|
|||
**Default**: `undefined` |
|||
|
|||
**Description**: Customize range expansion. |
|||
|
|||
**Example: Transforming non-numeric values** |
|||
|
|||
```js |
|||
const alpha = braces.expand('x/{a..e}/y', { |
|||
transform(value, index) { |
|||
// When non-numeric values are passed, "value" is a character code. |
|||
return 'foo/' + String.fromCharCode(value) + '-' + index; |
|||
}, |
|||
}); |
|||
console.log(alpha); |
|||
//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ] |
|||
``` |
|||
|
|||
**Example: Transforming numeric values** |
|||
|
|||
```js |
|||
const numeric = braces.expand('{1..5}', { |
|||
transform(value) { |
|||
// when numeric values are passed, "value" is a number |
|||
return 'foo/' + value * 2; |
|||
}, |
|||
}); |
|||
console.log(numeric); |
|||
//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ] |
|||
``` |
|||
|
|||
### options.quantifiers |
|||
|
|||
**Type**: `Boolean` |
|||
|
|||
**Default**: `undefined` |
|||
|
|||
**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times. |
|||
|
|||
Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists) |
|||
|
|||
The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists. |
|||
|
|||
**Examples** |
|||
|
|||
```js |
|||
const braces = require('braces'); |
|||
console.log(braces('a/b{1,3}/{x,y,z}')); |
|||
//=> [ 'a/b(1|3)/(x|y|z)' ] |
|||
console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true })); |
|||
//=> [ 'a/b{1,3}/(x|y|z)' ] |
|||
console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true, expand: true })); |
|||
//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ] |
|||
``` |
|||
|
|||
### options.keepEscaping |
|||
|
|||
**Type**: `Boolean` |
|||
|
|||
**Default**: `undefined` |
|||
|
|||
**Description**: Do not strip backslashes that were used for escaping from the result. |
|||
|
|||
## What is "brace expansion"? |
|||
|
|||
Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs). |
|||
|
|||
In addition to "expansion", braces are also used for matching. In other words: |
|||
|
|||
- [brace expansion](#brace-expansion) is for generating new lists |
|||
- [brace matching](#brace-matching) is for filtering existing lists |
|||
|
|||
<details> |
|||
<summary><strong>More about brace expansion</strong> (click to expand)</summary> |
|||
|
|||
There are two main types of brace expansion: |
|||
|
|||
1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}` |
|||
2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges". |
|||
|
|||
Here are some example brace patterns to illustrate how they work: |
|||
|
|||
**Sets** |
|||
|
|||
``` |
|||
{a,b,c} => a b c |
|||
{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2 |
|||
``` |
|||
|
|||
**Sequences** |
|||
|
|||
``` |
|||
{1..9} => 1 2 3 4 5 6 7 8 9 |
|||
{4..-4} => 4 3 2 1 0 -1 -2 -3 -4 |
|||
{1..20..3} => 1 4 7 10 13 16 19 |
|||
{a..j} => a b c d e f g h i j |
|||
{j..a} => j i h g f e d c b a |
|||
{a..z..3} => a d g j m p s v y |
|||
``` |
|||
|
|||
**Combination** |
|||
|
|||
Sets and sequences can be mixed together or used along with any other strings. |
|||
|
|||
``` |
|||
{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3 |
|||
foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar |
|||
``` |
|||
|
|||
The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases. |
|||
|
|||
## Brace matching |
|||
|
|||
In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching. |
|||
|
|||
For example, the pattern `foo/{1..3}/bar` would match any of following strings: |
|||
|
|||
``` |
|||
foo/1/bar |
|||
foo/2/bar |
|||
foo/3/bar |
|||
``` |
|||
|
|||
But not: |
|||
|
|||
``` |
|||
baz/1/qux |
|||
baz/2/qux |
|||
baz/3/qux |
|||
``` |
|||
|
|||
Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings: |
|||
|
|||
``` |
|||
foo/1/bar |
|||
foo/2/bar |
|||
foo/3/bar |
|||
baz/1/qux |
|||
baz/2/qux |
|||
baz/3/qux |
|||
``` |
|||
|
|||
## Brace matching pitfalls |
|||
|
|||
Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of. |
|||
|
|||
### tldr |
|||
|
|||
**"brace bombs"** |
|||
|
|||
- brace expansion can eat up a huge amount of processing resources |
|||
- as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially |
|||
- users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!) |
|||
|
|||
For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section. |
|||
|
|||
### The solution |
|||
|
|||
Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries. |
|||
|
|||
### Geometric complexity |
|||
|
|||
At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`. |
|||
|
|||
For example, the following sets demonstrate quadratic (`O(n^2)`) complexity: |
|||
|
|||
``` |
|||
{1,2}{3,4} => (2X2) => 13 14 23 24 |
|||
{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246 |
|||
``` |
|||
|
|||
But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity: |
|||
|
|||
``` |
|||
{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248 |
|||
249 257 258 259 267 268 269 347 348 349 357 |
|||
358 359 367 368 369 |
|||
``` |
|||
|
|||
Now, imagine how this complexity grows given that each element is a n-tuple: |
|||
|
|||
``` |
|||
{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB) |
|||
{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB) |
|||
``` |
|||
|
|||
Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control. |
|||
|
|||
**More information** |
|||
|
|||
Interested in learning more about brace expansion? |
|||
|
|||
- [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion) |
|||
- [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion) |
|||
- [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product) |
|||
|
|||
</details> |
|||
|
|||
## Performance |
|||
|
|||
Braces is not only screaming fast, it's also more accurate the other brace expansion libraries. |
|||
|
|||
### Better algorithms |
|||
|
|||
Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_. |
|||
|
|||
Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently. |
|||
|
|||
**The proof is in the numbers** |
|||
|
|||
Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively. |
|||
|
|||
| **Pattern** | **braces** | **[minimatch][]** | |
|||
| --------------------------- | ------------------- | ---------------------------- | |
|||
| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs) | N/A (freezes) | |
|||
| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) | |
|||
| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) | |
|||
| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) | |
|||
| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) | |
|||
| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) | |
|||
| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) | |
|||
| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) | |
|||
| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) | |
|||
| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) | |
|||
| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) | |
|||
| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) | |
|||
| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) | |
|||
| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) | |
|||
| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) | |
|||
| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) | |
|||
| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) | |
|||
|
|||
### Faster algorithms |
|||
|
|||
When you need expansion, braces is still much faster. |
|||
|
|||
_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_ |
|||
|
|||
| **Pattern** | **braces** | **[minimatch][]** | |
|||
| --------------- | --------------------------- | ---------------------------- | |
|||
| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) | |
|||
| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) | |
|||
| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) | |
|||
| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) | |
|||
| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) | |
|||
| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) | |
|||
| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) | |
|||
| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) | |
|||
|
|||
If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js). |
|||
|
|||
## Benchmarks |
|||
|
|||
### Running benchmarks |
|||
|
|||
Install dev dependencies: |
|||
|
|||
```bash |
|||
npm i -d && npm benchmark |
|||
``` |
|||
|
|||
### Latest results |
|||
|
|||
Braces is more accurate, without sacrificing performance. |
|||
|
|||
```bash |
|||
● expand - range (expanded) |
|||
braces x 53,167 ops/sec ±0.12% (102 runs sampled) |
|||
minimatch x 11,378 ops/sec ±0.10% (102 runs sampled) |
|||
● expand - range (optimized for regex) |
|||
braces x 373,442 ops/sec ±0.04% (100 runs sampled) |
|||
minimatch x 3,262 ops/sec ±0.18% (100 runs sampled) |
|||
● expand - nested ranges (expanded) |
|||
braces x 33,921 ops/sec ±0.09% (99 runs sampled) |
|||
minimatch x 10,855 ops/sec ±0.28% (100 runs sampled) |
|||
● expand - nested ranges (optimized for regex) |
|||
braces x 287,479 ops/sec ±0.52% (98 runs sampled) |
|||
minimatch x 3,219 ops/sec ±0.28% (101 runs sampled) |
|||
● expand - set (expanded) |
|||
braces x 238,243 ops/sec ±0.19% (97 runs sampled) |
|||
minimatch x 538,268 ops/sec ±0.31% (96 runs sampled) |
|||
● expand - set (optimized for regex) |
|||
braces x 321,844 ops/sec ±0.10% (97 runs sampled) |
|||
minimatch x 140,600 ops/sec ±0.15% (100 runs sampled) |
|||
● expand - nested sets (expanded) |
|||
braces x 165,371 ops/sec ±0.42% (96 runs sampled) |
|||
minimatch x 337,720 ops/sec ±0.28% (100 runs sampled) |
|||
● expand - nested sets (optimized for regex) |
|||
braces x 242,948 ops/sec ±0.12% (99 runs sampled) |
|||
minimatch x 87,403 ops/sec ±0.79% (96 runs sampled) |
|||
``` |
|||
|
|||
## About |
|||
|
|||
<details> |
|||
<summary><strong>Contributing</strong></summary> |
|||
|
|||
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). |
|||
|
|||
</details> |
|||
|
|||
<details> |
|||
<summary><strong>Running Tests</strong></summary> |
|||
|
|||
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: |
|||
|
|||
```sh |
|||
$ npm install && npm test |
|||
``` |
|||
|
|||
</details> |
|||
|
|||
<details> |
|||
<summary><strong>Building docs</strong></summary> |
|||
|
|||
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ |
|||
|
|||
To generate the readme, run the following command: |
|||
|
|||
```sh |
|||
$ npm install -g verbose/verb#dev verb-generate-readme && verb |
|||
``` |
|||
|
|||
</details> |
|||
|
|||
### Contributors |
|||
|
|||
| **Commits** | **Contributor** | |
|||
| ----------- | ------------------------------------------------------------- | |
|||
| 197 | [jonschlinkert](https://github.com/jonschlinkert) | |
|||
| 4 | [doowb](https://github.com/doowb) | |
|||
| 1 | [es128](https://github.com/es128) | |
|||
| 1 | [eush77](https://github.com/eush77) | |
|||
| 1 | [hemanth](https://github.com/hemanth) | |
|||
| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | |
|||
|
|||
### Author |
|||
|
|||
**Jon Schlinkert** |
|||
|
|||
- [GitHub Profile](https://github.com/jonschlinkert) |
|||
- [Twitter Profile](https://twitter.com/jonschlinkert) |
|||
- [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) |
|||
|
|||
### License |
|||
|
|||
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). |
|||
Released under the [MIT License](LICENSE). |
|||
|
|||
--- |
|||
|
|||
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ |
|||
@ -0,0 +1,170 @@ |
|||
'use strict'; |
|||
|
|||
const stringify = require('./lib/stringify'); |
|||
const compile = require('./lib/compile'); |
|||
const expand = require('./lib/expand'); |
|||
const parse = require('./lib/parse'); |
|||
|
|||
/** |
|||
* Expand the given pattern or create a regex-compatible string. |
|||
* |
|||
* ```js
|
|||
* const braces = require('braces'); |
|||
* console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
|
|||
* console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
|
|||
* ``` |
|||
* @param {String} `str` |
|||
* @param {Object} `options` |
|||
* @return {String} |
|||
* @api public |
|||
*/ |
|||
|
|||
const braces = (input, options = {}) => { |
|||
let output = []; |
|||
|
|||
if (Array.isArray(input)) { |
|||
for (const pattern of input) { |
|||
const result = braces.create(pattern, options); |
|||
if (Array.isArray(result)) { |
|||
output.push(...result); |
|||
} else { |
|||
output.push(result); |
|||
} |
|||
} |
|||
} else { |
|||
output = [].concat(braces.create(input, options)); |
|||
} |
|||
|
|||
if (options && options.expand === true && options.nodupes === true) { |
|||
output = [...new Set(output)]; |
|||
} |
|||
return output; |
|||
}; |
|||
|
|||
/** |
|||
* Parse the given `str` with the given `options`. |
|||
* |
|||
* ```js
|
|||
* // braces.parse(pattern, [, options]);
|
|||
* const ast = braces.parse('a/{b,c}/d'); |
|||
* console.log(ast); |
|||
* ``` |
|||
* @param {String} pattern Brace pattern to parse |
|||
* @param {Object} options |
|||
* @return {Object} Returns an AST |
|||
* @api public |
|||
*/ |
|||
|
|||
braces.parse = (input, options = {}) => parse(input, options); |
|||
|
|||
/** |
|||
* Creates a braces string from an AST, or an AST node. |
|||
* |
|||
* ```js
|
|||
* const braces = require('braces'); |
|||
* let ast = braces.parse('foo/{a,b}/bar'); |
|||
* console.log(stringify(ast.nodes[2])); //=> '{a,b}'
|
|||
* ``` |
|||
* @param {String} `input` Brace pattern or AST. |
|||
* @param {Object} `options` |
|||
* @return {Array} Returns an array of expanded values. |
|||
* @api public |
|||
*/ |
|||
|
|||
braces.stringify = (input, options = {}) => { |
|||
if (typeof input === 'string') { |
|||
return stringify(braces.parse(input, options), options); |
|||
} |
|||
return stringify(input, options); |
|||
}; |
|||
|
|||
/** |
|||
* Compiles a brace pattern into a regex-compatible, optimized string. |
|||
* This method is called by the main [braces](#braces) function by default. |
|||
* |
|||
* ```js
|
|||
* const braces = require('braces'); |
|||
* console.log(braces.compile('a/{b,c}/d')); |
|||
* //=> ['a/(b|c)/d']
|
|||
* ``` |
|||
* @param {String} `input` Brace pattern or AST. |
|||
* @param {Object} `options` |
|||
* @return {Array} Returns an array of expanded values. |
|||
* @api public |
|||
*/ |
|||
|
|||
braces.compile = (input, options = {}) => { |
|||
if (typeof input === 'string') { |
|||
input = braces.parse(input, options); |
|||
} |
|||
return compile(input, options); |
|||
}; |
|||
|
|||
/** |
|||
* Expands a brace pattern into an array. This method is called by the |
|||
* main [braces](#braces) function when `options.expand` is true. Before |
|||
* using this method it's recommended that you read the [performance notes](#performance)) |
|||
* and advantages of using [.compile](#compile) instead. |
|||
* |
|||
* ```js
|
|||
* const braces = require('braces'); |
|||
* console.log(braces.expand('a/{b,c}/d')); |
|||
* //=> ['a/b/d', 'a/c/d'];
|
|||
* ``` |
|||
* @param {String} `pattern` Brace pattern |
|||
* @param {Object} `options` |
|||
* @return {Array} Returns an array of expanded values. |
|||
* @api public |
|||
*/ |
|||
|
|||
braces.expand = (input, options = {}) => { |
|||
if (typeof input === 'string') { |
|||
input = braces.parse(input, options); |
|||
} |
|||
|
|||
let result = expand(input, options); |
|||
|
|||
// filter out empty strings if specified
|
|||
if (options.noempty === true) { |
|||
result = result.filter(Boolean); |
|||
} |
|||
|
|||
// filter out duplicates if specified
|
|||
if (options.nodupes === true) { |
|||
result = [...new Set(result)]; |
|||
} |
|||
|
|||
return result; |
|||
}; |
|||
|
|||
/** |
|||
* Processes a brace pattern and returns either an expanded array |
|||
* (if `options.expand` is true), a highly optimized regex-compatible string. |
|||
* This method is called by the main [braces](#braces) function. |
|||
* |
|||
* ```js
|
|||
* const braces = require('braces'); |
|||
* console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) |
|||
* //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
|
|||
* ``` |
|||
* @param {String} `pattern` Brace pattern |
|||
* @param {Object} `options` |
|||
* @return {Array} Returns an array of expanded values. |
|||
* @api public |
|||
*/ |
|||
|
|||
braces.create = (input, options = {}) => { |
|||
if (input === '' || input.length < 3) { |
|||
return [input]; |
|||
} |
|||
|
|||
return options.expand !== true |
|||
? braces.compile(input, options) |
|||
: braces.expand(input, options); |
|||
}; |
|||
|
|||
/** |
|||
* Expose "braces" |
|||
*/ |
|||
|
|||
module.exports = braces; |
|||
@ -0,0 +1,60 @@ |
|||
'use strict'; |
|||
|
|||
const fill = require('fill-range'); |
|||
const utils = require('./utils'); |
|||
|
|||
const compile = (ast, options = {}) => { |
|||
const walk = (node, parent = {}) => { |
|||
const invalidBlock = utils.isInvalidBrace(parent); |
|||
const invalidNode = node.invalid === true && options.escapeInvalid === true; |
|||
const invalid = invalidBlock === true || invalidNode === true; |
|||
const prefix = options.escapeInvalid === true ? '\\' : ''; |
|||
let output = ''; |
|||
|
|||
if (node.isOpen === true) { |
|||
return prefix + node.value; |
|||
} |
|||
|
|||
if (node.isClose === true) { |
|||
console.log('node.isClose', prefix, node.value); |
|||
return prefix + node.value; |
|||
} |
|||
|
|||
if (node.type === 'open') { |
|||
return invalid ? prefix + node.value : '('; |
|||
} |
|||
|
|||
if (node.type === 'close') { |
|||
return invalid ? prefix + node.value : ')'; |
|||
} |
|||
|
|||
if (node.type === 'comma') { |
|||
return node.prev.type === 'comma' ? '' : invalid ? node.value : '|'; |
|||
} |
|||
|
|||
if (node.value) { |
|||
return node.value; |
|||
} |
|||
|
|||
if (node.nodes && node.ranges > 0) { |
|||
const args = utils.reduce(node.nodes); |
|||
const range = fill(...args, { ...options, wrap: false, toRegex: true, strictZeros: true }); |
|||
|
|||
if (range.length !== 0) { |
|||
return args.length > 1 && range.length > 1 ? `(${range})` : range; |
|||
} |
|||
} |
|||
|
|||
if (node.nodes) { |
|||
for (const child of node.nodes) { |
|||
output += walk(child, node); |
|||
} |
|||
} |
|||
|
|||
return output; |
|||
}; |
|||
|
|||
return walk(ast); |
|||
}; |
|||
|
|||
module.exports = compile; |
|||
@ -0,0 +1,57 @@ |
|||
'use strict'; |
|||
|
|||
module.exports = { |
|||
MAX_LENGTH: 10000, |
|||
|
|||
// Digits
|
|||
CHAR_0: '0', /* 0 */ |
|||
CHAR_9: '9', /* 9 */ |
|||
|
|||
// Alphabet chars.
|
|||
CHAR_UPPERCASE_A: 'A', /* A */ |
|||
CHAR_LOWERCASE_A: 'a', /* a */ |
|||
CHAR_UPPERCASE_Z: 'Z', /* Z */ |
|||
CHAR_LOWERCASE_Z: 'z', /* z */ |
|||
|
|||
CHAR_LEFT_PARENTHESES: '(', /* ( */ |
|||
CHAR_RIGHT_PARENTHESES: ')', /* ) */ |
|||
|
|||
CHAR_ASTERISK: '*', /* * */ |
|||
|
|||
// Non-alphabetic chars.
|
|||
CHAR_AMPERSAND: '&', /* & */ |
|||
CHAR_AT: '@', /* @ */ |
|||
CHAR_BACKSLASH: '\\', /* \ */ |
|||
CHAR_BACKTICK: '`', /* ` */ |
|||
CHAR_CARRIAGE_RETURN: '\r', /* \r */ |
|||
CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ |
|||
CHAR_COLON: ':', /* : */ |
|||
CHAR_COMMA: ',', /* , */ |
|||
CHAR_DOLLAR: '$', /* . */ |
|||
CHAR_DOT: '.', /* . */ |
|||
CHAR_DOUBLE_QUOTE: '"', /* " */ |
|||
CHAR_EQUAL: '=', /* = */ |
|||
CHAR_EXCLAMATION_MARK: '!', /* ! */ |
|||
CHAR_FORM_FEED: '\f', /* \f */ |
|||
CHAR_FORWARD_SLASH: '/', /* / */ |
|||
CHAR_HASH: '#', /* # */ |
|||
CHAR_HYPHEN_MINUS: '-', /* - */ |
|||
CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ |
|||
CHAR_LEFT_CURLY_BRACE: '{', /* { */ |
|||
CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ |
|||
CHAR_LINE_FEED: '\n', /* \n */ |
|||
CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ |
|||
CHAR_PERCENT: '%', /* % */ |
|||
CHAR_PLUS: '+', /* + */ |
|||
CHAR_QUESTION_MARK: '?', /* ? */ |
|||
CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ |
|||
CHAR_RIGHT_CURLY_BRACE: '}', /* } */ |
|||
CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ |
|||
CHAR_SEMICOLON: ';', /* ; */ |
|||
CHAR_SINGLE_QUOTE: '\'', /* ' */ |
|||
CHAR_SPACE: ' ', /* */ |
|||
CHAR_TAB: '\t', /* \t */ |
|||
CHAR_UNDERSCORE: '_', /* _ */ |
|||
CHAR_VERTICAL_LINE: '|', /* | */ |
|||
CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ |
|||
}; |
|||
@ -0,0 +1,113 @@ |
|||
'use strict'; |
|||
|
|||
const fill = require('fill-range'); |
|||
const stringify = require('./stringify'); |
|||
const utils = require('./utils'); |
|||
|
|||
const append = (queue = '', stash = '', enclose = false) => { |
|||
const result = []; |
|||
|
|||
queue = [].concat(queue); |
|||
stash = [].concat(stash); |
|||
|
|||
if (!stash.length) return queue; |
|||
if (!queue.length) { |
|||
return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; |
|||
} |
|||
|
|||
for (const item of queue) { |
|||
if (Array.isArray(item)) { |
|||
for (const value of item) { |
|||
result.push(append(value, stash, enclose)); |
|||
} |
|||
} else { |
|||
for (let ele of stash) { |
|||
if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; |
|||
result.push(Array.isArray(ele) ? append(item, ele, enclose) : item + ele); |
|||
} |
|||
} |
|||
} |
|||
return utils.flatten(result); |
|||
}; |
|||
|
|||
const expand = (ast, options = {}) => { |
|||
const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit; |
|||
|
|||
const walk = (node, parent = {}) => { |
|||
node.queue = []; |
|||
|
|||
let p = parent; |
|||
let q = parent.queue; |
|||
|
|||
while (p.type !== 'brace' && p.type !== 'root' && p.parent) { |
|||
p = p.parent; |
|||
q = p.queue; |
|||
} |
|||
|
|||
if (node.invalid || node.dollar) { |
|||
q.push(append(q.pop(), stringify(node, options))); |
|||
return; |
|||
} |
|||
|
|||
if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { |
|||
q.push(append(q.pop(), ['{}'])); |
|||
return; |
|||
} |
|||
|
|||
if (node.nodes && node.ranges > 0) { |
|||
const args = utils.reduce(node.nodes); |
|||
|
|||
if (utils.exceedsLimit(...args, options.step, rangeLimit)) { |
|||
throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); |
|||
} |
|||
|
|||
let range = fill(...args, options); |
|||
if (range.length === 0) { |
|||
range = stringify(node, options); |
|||
} |
|||
|
|||
q.push(append(q.pop(), range)); |
|||
node.nodes = []; |
|||
return; |
|||
} |
|||
|
|||
const enclose = utils.encloseBrace(node); |
|||
let queue = node.queue; |
|||
let block = node; |
|||
|
|||
while (block.type !== 'brace' && block.type !== 'root' && block.parent) { |
|||
block = block.parent; |
|||
queue = block.queue; |
|||
} |
|||
|
|||
for (let i = 0; i < node.nodes.length; i++) { |
|||
const child = node.nodes[i]; |
|||
|
|||
if (child.type === 'comma' && node.type === 'brace') { |
|||
if (i === 1) queue.push(''); |
|||
queue.push(''); |
|||
continue; |
|||
} |
|||
|
|||
if (child.type === 'close') { |
|||
q.push(append(q.pop(), queue, enclose)); |
|||
continue; |
|||
} |
|||
|
|||
if (child.value && child.type !== 'open') { |
|||
queue.push(append(queue.pop(), child.value)); |
|||
continue; |
|||
} |
|||
|
|||
if (child.nodes) { |
|||
walk(child, node); |
|||
} |
|||
} |
|||
|
|||
return queue; |
|||
}; |
|||
|
|||
return utils.flatten(walk(ast)); |
|||
}; |
|||
|
|||
module.exports = expand; |
|||
@ -0,0 +1,331 @@ |
|||
'use strict'; |
|||
|
|||
const stringify = require('./stringify'); |
|||
|
|||
/** |
|||
* Constants |
|||
*/ |
|||
|
|||
const { |
|||
MAX_LENGTH, |
|||
CHAR_BACKSLASH, /* \ */ |
|||
CHAR_BACKTICK, /* ` */ |
|||
CHAR_COMMA, /* , */ |
|||
CHAR_DOT, /* . */ |
|||
CHAR_LEFT_PARENTHESES, /* ( */ |
|||
CHAR_RIGHT_PARENTHESES, /* ) */ |
|||
CHAR_LEFT_CURLY_BRACE, /* { */ |
|||
CHAR_RIGHT_CURLY_BRACE, /* } */ |
|||
CHAR_LEFT_SQUARE_BRACKET, /* [ */ |
|||
CHAR_RIGHT_SQUARE_BRACKET, /* ] */ |
|||
CHAR_DOUBLE_QUOTE, /* " */ |
|||
CHAR_SINGLE_QUOTE, /* ' */ |
|||
CHAR_NO_BREAK_SPACE, |
|||
CHAR_ZERO_WIDTH_NOBREAK_SPACE |
|||
} = require('./constants'); |
|||
|
|||
/** |
|||
* parse |
|||
*/ |
|||
|
|||
const parse = (input, options = {}) => { |
|||
if (typeof input !== 'string') { |
|||
throw new TypeError('Expected a string'); |
|||
} |
|||
|
|||
const opts = options || {}; |
|||
const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; |
|||
if (input.length > max) { |
|||
throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); |
|||
} |
|||
|
|||
const ast = { type: 'root', input, nodes: [] }; |
|||
const stack = [ast]; |
|||
let block = ast; |
|||
let prev = ast; |
|||
let brackets = 0; |
|||
const length = input.length; |
|||
let index = 0; |
|||
let depth = 0; |
|||
let value; |
|||
|
|||
/** |
|||
* Helpers |
|||
*/ |
|||
|
|||
const advance = () => input[index++]; |
|||
const push = node => { |
|||
if (node.type === 'text' && prev.type === 'dot') { |
|||
prev.type = 'text'; |
|||
} |
|||
|
|||
if (prev && prev.type === 'text' && node.type === 'text') { |
|||
prev.value += node.value; |
|||
return; |
|||
} |
|||
|
|||
block.nodes.push(node); |
|||
node.parent = block; |
|||
node.prev = prev; |
|||
prev = node; |
|||
return node; |
|||
}; |
|||
|
|||
push({ type: 'bos' }); |
|||
|
|||
while (index < length) { |
|||
block = stack[stack.length - 1]; |
|||
value = advance(); |
|||
|
|||
/** |
|||
* Invalid chars |
|||
*/ |
|||
|
|||
if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { |
|||
continue; |
|||
} |
|||
|
|||
/** |
|||
* Escaped chars |
|||
*/ |
|||
|
|||
if (value === CHAR_BACKSLASH) { |
|||
push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); |
|||
continue; |
|||
} |
|||
|
|||
/** |
|||
* Right square bracket (literal): ']' |
|||
*/ |
|||
|
|||
if (value === CHAR_RIGHT_SQUARE_BRACKET) { |
|||
push({ type: 'text', value: '\\' + value }); |
|||
continue; |
|||
} |
|||
|
|||
/** |
|||
* Left square bracket: '[' |
|||
*/ |
|||
|
|||
if (value === CHAR_LEFT_SQUARE_BRACKET) { |
|||
brackets++; |
|||
|
|||
let next; |
|||
|
|||
while (index < length && (next = advance())) { |
|||
value += next; |
|||
|
|||
if (next === CHAR_LEFT_SQUARE_BRACKET) { |
|||
brackets++; |
|||
continue; |
|||
} |
|||
|
|||
if (next === CHAR_BACKSLASH) { |
|||
value += advance(); |
|||
continue; |
|||
} |
|||
|
|||
if (next === CHAR_RIGHT_SQUARE_BRACKET) { |
|||
brackets--; |
|||
|
|||
if (brackets === 0) { |
|||
break; |
|||
} |
|||
} |
|||
} |
|||
|
|||
push({ type: 'text', value }); |
|||
continue; |
|||
} |
|||
|
|||
/** |
|||
* Parentheses |
|||
*/ |
|||
|
|||
if (value === CHAR_LEFT_PARENTHESES) { |
|||
block = push({ type: 'paren', nodes: [] }); |
|||
stack.push(block); |
|||
push({ type: 'text', value }); |
|||
continue; |
|||
} |
|||
|
|||
if (value === CHAR_RIGHT_PARENTHESES) { |
|||
if (block.type !== 'paren') { |
|||
push({ type: 'text', value }); |
|||
continue; |
|||
} |
|||
block = stack.pop(); |
|||
push({ type: 'text', value }); |
|||
block = stack[stack.length - 1]; |
|||
continue; |
|||
} |
|||
|
|||
/** |
|||
* Quotes: '|"|` |
|||
*/ |
|||
|
|||
if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { |
|||
const open = value; |
|||
let next; |
|||
|
|||
if (options.keepQuotes !== true) { |
|||
value = ''; |
|||
} |
|||
|
|||
while (index < length && (next = advance())) { |
|||
if (next === CHAR_BACKSLASH) { |
|||
value += next + advance(); |
|||
continue; |
|||
} |
|||
|
|||
if (next === open) { |
|||
if (options.keepQuotes === true) value += next; |
|||
break; |
|||
} |
|||
|
|||
value += next; |
|||
} |
|||
|
|||
push({ type: 'text', value }); |
|||
continue; |
|||
} |
|||
|
|||
/** |
|||
* Left curly brace: '{' |
|||
*/ |
|||
|
|||
if (value === CHAR_LEFT_CURLY_BRACE) { |
|||
depth++; |
|||
|
|||
const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; |
|||
const brace = { |
|||
type: 'brace', |
|||
open: true, |
|||
close: false, |
|||
dollar, |
|||
depth, |
|||
commas: 0, |
|||
ranges: 0, |
|||
nodes: [] |
|||
}; |
|||
|
|||
block = push(brace); |
|||
stack.push(block); |
|||
push({ type: 'open', value }); |
|||
continue; |
|||
} |
|||
|
|||
/** |
|||
* Right curly brace: '}' |
|||
*/ |
|||
|
|||
if (value === CHAR_RIGHT_CURLY_BRACE) { |
|||
if (block.type !== 'brace') { |
|||
push({ type: 'text', value }); |
|||
continue; |
|||
} |
|||
|
|||
const type = 'close'; |
|||
block = stack.pop(); |
|||
block.close = true; |
|||
|
|||
push({ type, value }); |
|||
depth--; |
|||
|
|||
block = stack[stack.length - 1]; |
|||
continue; |
|||
} |
|||
|
|||
/** |
|||
* Comma: ',' |
|||
*/ |
|||
|
|||
if (value === CHAR_COMMA && depth > 0) { |
|||
if (block.ranges > 0) { |
|||
block.ranges = 0; |
|||
const open = block.nodes.shift(); |
|||
block.nodes = [open, { type: 'text', value: stringify(block) }]; |
|||
} |
|||
|
|||
push({ type: 'comma', value }); |
|||
block.commas++; |
|||
continue; |
|||
} |
|||
|
|||
/** |
|||
* Dot: '.' |
|||
*/ |
|||
|
|||
if (value === CHAR_DOT && depth > 0 && block.commas === 0) { |
|||
const siblings = block.nodes; |
|||
|
|||
if (depth === 0 || siblings.length === 0) { |
|||
push({ type: 'text', value }); |
|||
continue; |
|||
} |
|||
|
|||
if (prev.type === 'dot') { |
|||
block.range = []; |
|||
prev.value += value; |
|||
prev.type = 'range'; |
|||
|
|||
if (block.nodes.length !== 3 && block.nodes.length !== 5) { |
|||
block.invalid = true; |
|||
block.ranges = 0; |
|||
prev.type = 'text'; |
|||
continue; |
|||
} |
|||
|
|||
block.ranges++; |
|||
block.args = []; |
|||
continue; |
|||
} |
|||
|
|||
if (prev.type === 'range') { |
|||
siblings.pop(); |
|||
|
|||
const before = siblings[siblings.length - 1]; |
|||
before.value += prev.value + value; |
|||
prev = before; |
|||
block.ranges--; |
|||
continue; |
|||
} |
|||
|
|||
push({ type: 'dot', value }); |
|||
continue; |
|||
} |
|||
|
|||
/** |
|||
* Text |
|||
*/ |
|||
|
|||
push({ type: 'text', value }); |
|||
} |
|||
|
|||
// Mark imbalanced braces and brackets as invalid
|
|||
do { |
|||
block = stack.pop(); |
|||
|
|||
if (block.type !== 'root') { |
|||
block.nodes.forEach(node => { |
|||
if (!node.nodes) { |
|||
if (node.type === 'open') node.isOpen = true; |
|||
if (node.type === 'close') node.isClose = true; |
|||
if (!node.nodes) node.type = 'text'; |
|||
node.invalid = true; |
|||
} |
|||
}); |
|||
|
|||
// get the location of the block on parent.nodes (block's siblings)
|
|||
const parent = stack[stack.length - 1]; |
|||
const index = parent.nodes.indexOf(block); |
|||
// replace the (invalid) block with it's nodes
|
|||
parent.nodes.splice(index, 1, ...block.nodes); |
|||
} |
|||
} while (stack.length > 0); |
|||
|
|||
push({ type: 'eos' }); |
|||
return ast; |
|||
}; |
|||
|
|||
module.exports = parse; |
|||
@ -0,0 +1,32 @@ |
|||
'use strict'; |
|||
|
|||
const utils = require('./utils'); |
|||
|
|||
module.exports = (ast, options = {}) => { |
|||
const stringify = (node, parent = {}) => { |
|||
const invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); |
|||
const invalidNode = node.invalid === true && options.escapeInvalid === true; |
|||
let output = ''; |
|||
|
|||
if (node.value) { |
|||
if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { |
|||
return '\\' + node.value; |
|||
} |
|||
return node.value; |
|||
} |
|||
|
|||
if (node.value) { |
|||
return node.value; |
|||
} |
|||
|
|||
if (node.nodes) { |
|||
for (const child of node.nodes) { |
|||
output += stringify(child); |
|||
} |
|||
} |
|||
return output; |
|||
}; |
|||
|
|||
return stringify(ast); |
|||
}; |
|||
|
|||
@ -0,0 +1,122 @@ |
|||
'use strict'; |
|||
|
|||
exports.isInteger = num => { |
|||
if (typeof num === 'number') { |
|||
return Number.isInteger(num); |
|||
} |
|||
if (typeof num === 'string' && num.trim() !== '') { |
|||
return Number.isInteger(Number(num)); |
|||
} |
|||
return false; |
|||
}; |
|||
|
|||
/** |
|||
* Find a node of the given type |
|||
*/ |
|||
|
|||
exports.find = (node, type) => node.nodes.find(node => node.type === type); |
|||
|
|||
/** |
|||
* Find a node of the given type |
|||
*/ |
|||
|
|||
exports.exceedsLimit = (min, max, step = 1, limit) => { |
|||
if (limit === false) return false; |
|||
if (!exports.isInteger(min) || !exports.isInteger(max)) return false; |
|||
return ((Number(max) - Number(min)) / Number(step)) >= limit; |
|||
}; |
|||
|
|||
/** |
|||
* Escape the given node with '\\' before node.value |
|||
*/ |
|||
|
|||
exports.escapeNode = (block, n = 0, type) => { |
|||
const node = block.nodes[n]; |
|||
if (!node) return; |
|||
|
|||
if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { |
|||
if (node.escaped !== true) { |
|||
node.value = '\\' + node.value; |
|||
node.escaped = true; |
|||
} |
|||
} |
|||
}; |
|||
|
|||
/** |
|||
* Returns true if the given brace node should be enclosed in literal braces |
|||
*/ |
|||
|
|||
exports.encloseBrace = node => { |
|||
if (node.type !== 'brace') return false; |
|||
if ((node.commas >> 0 + node.ranges >> 0) === 0) { |
|||
node.invalid = true; |
|||
return true; |
|||
} |
|||
return false; |
|||
}; |
|||
|
|||
/** |
|||
* Returns true if a brace node is invalid. |
|||
*/ |
|||
|
|||
exports.isInvalidBrace = block => { |
|||
if (block.type !== 'brace') return false; |
|||
if (block.invalid === true || block.dollar) return true; |
|||
if ((block.commas >> 0 + block.ranges >> 0) === 0) { |
|||
block.invalid = true; |
|||
return true; |
|||
} |
|||
if (block.open !== true || block.close !== true) { |
|||
block.invalid = true; |
|||
return true; |
|||
} |
|||
return false; |
|||
}; |
|||
|
|||
/** |
|||
* Returns true if a node is an open or close node |
|||
*/ |
|||
|
|||
exports.isOpenOrClose = node => { |
|||
if (node.type === 'open' || node.type === 'close') { |
|||
return true; |
|||
} |
|||
return node.open === true || node.close === true; |
|||
}; |
|||
|
|||
/** |
|||
* Reduce an array of text nodes. |
|||
*/ |
|||
|
|||
exports.reduce = nodes => nodes.reduce((acc, node) => { |
|||
if (node.type === 'text') acc.push(node.value); |
|||
if (node.type === 'range') node.type = 'text'; |
|||
return acc; |
|||
}, []); |
|||
|
|||
/** |
|||
* Flatten an array |
|||
*/ |
|||
|
|||
exports.flatten = (...args) => { |
|||
const result = []; |
|||
|
|||
const flat = arr => { |
|||
for (let i = 0; i < arr.length; i++) { |
|||
const ele = arr[i]; |
|||
|
|||
if (Array.isArray(ele)) { |
|||
flat(ele); |
|||
continue; |
|||
} |
|||
|
|||
if (ele !== undefined) { |
|||
result.push(ele); |
|||
} |
|||
} |
|||
return result; |
|||
}; |
|||
|
|||
flat(args); |
|||
return result; |
|||
}; |
|||
@ -0,0 +1,77 @@ |
|||
{ |
|||
"name": "braces", |
|||
"description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.", |
|||
"version": "3.0.3", |
|||
"homepage": "https://github.com/micromatch/braces", |
|||
"author": "Jon Schlinkert (https://github.com/jonschlinkert)", |
|||
"contributors": [ |
|||
"Brian Woodward (https://twitter.com/doowb)", |
|||
"Elan Shanker (https://github.com/es128)", |
|||
"Eugene Sharygin (https://github.com/eush77)", |
|||
"hemanth.hm (http://h3manth.com)", |
|||
"Jon Schlinkert (http://twitter.com/jonschlinkert)" |
|||
], |
|||
"repository": "micromatch/braces", |
|||
"bugs": { |
|||
"url": "https://github.com/micromatch/braces/issues" |
|||
}, |
|||
"license": "MIT", |
|||
"files": [ |
|||
"index.js", |
|||
"lib" |
|||
], |
|||
"main": "index.js", |
|||
"engines": { |
|||
"node": ">=8" |
|||
}, |
|||
"scripts": { |
|||
"test": "mocha", |
|||
"benchmark": "node benchmark" |
|||
}, |
|||
"dependencies": { |
|||
"fill-range": "^7.1.1" |
|||
}, |
|||
"devDependencies": { |
|||
"ansi-colors": "^3.2.4", |
|||
"bash-path": "^2.0.1", |
|||
"gulp-format-md": "^2.0.0", |
|||
"mocha": "^6.1.1" |
|||
}, |
|||
"keywords": [ |
|||
"alpha", |
|||
"alphabetical", |
|||
"bash", |
|||
"brace", |
|||
"braces", |
|||
"expand", |
|||
"expansion", |
|||
"filepath", |
|||
"fill", |
|||
"fs", |
|||
"glob", |
|||
"globbing", |
|||
"letter", |
|||
"match", |
|||
"matches", |
|||
"matching", |
|||
"number", |
|||
"numerical", |
|||
"path", |
|||
"range", |
|||
"ranges", |
|||
"sh" |
|||
], |
|||
"verb": { |
|||
"toc": false, |
|||
"layout": "default", |
|||
"tasks": [ |
|||
"readme" |
|||
], |
|||
"lint": { |
|||
"reflinks": true |
|||
}, |
|||
"plugins": [ |
|||
"gulp-format-md" |
|||
] |
|||
} |
|||
} |
|||
@ -0,0 +1,97 @@ |
|||
3.1.2 / 2022-01-27 |
|||
================== |
|||
|
|||
* Fix return value for un-parsable strings |
|||
|
|||
3.1.1 / 2021-11-15 |
|||
================== |
|||
|
|||
* Fix "thousandsSeparator" incorrecting formatting fractional part |
|||
|
|||
3.1.0 / 2019-01-22 |
|||
================== |
|||
|
|||
* Add petabyte (`pb`) support |
|||
|
|||
3.0.0 / 2017-08-31 |
|||
================== |
|||
|
|||
* Change "kB" to "KB" in format output |
|||
* Remove support for Node.js 0.6 |
|||
* Remove support for ComponentJS |
|||
|
|||
2.5.0 / 2017-03-24 |
|||
================== |
|||
|
|||
* Add option "unit" |
|||
|
|||
2.4.0 / 2016-06-01 |
|||
================== |
|||
|
|||
* Add option "unitSeparator" |
|||
|
|||
2.3.0 / 2016-02-15 |
|||
================== |
|||
|
|||
* Drop partial bytes on all parsed units |
|||
* Fix non-finite numbers to `.format` to return `null` |
|||
* Fix parsing byte string that looks like hex |
|||
* perf: hoist regular expressions |
|||
|
|||
2.2.0 / 2015-11-13 |
|||
================== |
|||
|
|||
* add option "decimalPlaces" |
|||
* add option "fixedDecimals" |
|||
|
|||
2.1.0 / 2015-05-21 |
|||
================== |
|||
|
|||
* add `.format` export |
|||
* add `.parse` export |
|||
|
|||
2.0.2 / 2015-05-20 |
|||
================== |
|||
|
|||
* remove map recreation |
|||
* remove unnecessary object construction |
|||
|
|||
2.0.1 / 2015-05-07 |
|||
================== |
|||
|
|||
* fix browserify require |
|||
* remove node.extend dependency |
|||
|
|||
2.0.0 / 2015-04-12 |
|||
================== |
|||
|
|||
* add option "case" |
|||
* add option "thousandsSeparator" |
|||
* return "null" on invalid parse input |
|||
* support proper round-trip: bytes(bytes(num)) === num |
|||
* units no longer case sensitive when parsing |
|||
|
|||
1.0.0 / 2014-05-05 |
|||
================== |
|||
|
|||
* add negative support. fixes #6 |
|||
|
|||
0.3.0 / 2014-03-19 |
|||
================== |
|||
|
|||
* added terabyte support |
|||
|
|||
0.2.1 / 2013-04-01 |
|||
================== |
|||
|
|||
* add .component |
|||
|
|||
0.2.0 / 2012-10-28 |
|||
================== |
|||
|
|||
* bytes(200).should.eql('200b') |
|||
|
|||
0.1.0 / 2012-07-04 |
|||
================== |
|||
|
|||
* add bytes to string conversion [yields] |
|||
@ -0,0 +1,23 @@ |
|||
(The MIT License) |
|||
|
|||
Copyright (c) 2012-2014 TJ Holowaychuk <tj@vision-media.ca> |
|||
Copyright (c) 2015 Jed Watson <jed.watson@me.com> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining |
|||
a copy of this software and associated documentation files (the |
|||
'Software'), to deal in the Software without restriction, including |
|||
without limitation the rights to use, copy, modify, merge, publish, |
|||
distribute, sublicense, and/or sell copies of the Software, and to |
|||
permit persons to whom the Software is furnished to do so, subject to |
|||
the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be |
|||
included in all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, |
|||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
|||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
|||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, |
|||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
|||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
|||
@ -0,0 +1,152 @@ |
|||
# Bytes utility |
|||
|
|||
[![NPM Version][npm-image]][npm-url] |
|||
[![NPM Downloads][downloads-image]][downloads-url] |
|||
[![Build Status][ci-image]][ci-url] |
|||
[![Test Coverage][coveralls-image]][coveralls-url] |
|||
|
|||
Utility to parse a string bytes (ex: `1TB`) to bytes (`1099511627776`) and vice-versa. |
|||
|
|||
## Installation |
|||
|
|||
This is a [Node.js](https://nodejs.org/en/) module available through the |
|||
[npm registry](https://www.npmjs.com/). Installation is done using the |
|||
[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): |
|||
|
|||
```bash |
|||
$ npm install bytes |
|||
``` |
|||
|
|||
## Usage |
|||
|
|||
```js |
|||
var bytes = require('bytes'); |
|||
``` |
|||
|
|||
#### bytes(number|string value, [options]): number|string|null |
|||
|
|||
Default export function. Delegates to either `bytes.format` or `bytes.parse` based on the type of `value`. |
|||
|
|||
**Arguments** |
|||
|
|||
| Name | Type | Description | |
|||
|---------|----------|--------------------| |
|||
| value | `number`|`string` | Number value to format or string value to parse | |
|||
| options | `Object` | Conversion options for `format` | |
|||
|
|||
**Returns** |
|||
|
|||
| Name | Type | Description | |
|||
|---------|------------------|-------------------------------------------------| |
|||
| results | `string`|`number`|`null` | Return null upon error. Numeric value in bytes, or string value otherwise. | |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
bytes(1024); |
|||
// output: '1KB' |
|||
|
|||
bytes('1KB'); |
|||
// output: 1024 |
|||
``` |
|||
|
|||
#### bytes.format(number value, [options]): string|null |
|||
|
|||
Format the given value in bytes into a string. If the value is negative, it is kept as such. If it is a float, it is |
|||
rounded. |
|||
|
|||
**Arguments** |
|||
|
|||
| Name | Type | Description | |
|||
|---------|----------|--------------------| |
|||
| value | `number` | Value in bytes | |
|||
| options | `Object` | Conversion options | |
|||
|
|||
**Options** |
|||
|
|||
| Property | Type | Description | |
|||
|-------------------|--------|-----------------------------------------------------------------------------------------| |
|||
| decimalPlaces | `number`|`null` | Maximum number of decimal places to include in output. Default value to `2`. | |
|||
| fixedDecimals | `boolean`|`null` | Whether to always display the maximum number of decimal places. Default value to `false` | |
|||
| thousandsSeparator | `string`|`null` | Example of values: `' '`, `','` and `'.'`... Default value to `''`. | |
|||
| unit | `string`|`null` | The unit in which the result will be returned (B/KB/MB/GB/TB). Default value to `''` (which means auto detect). | |
|||
| unitSeparator | `string`|`null` | Separator to use between number and unit. Default value to `''`. | |
|||
|
|||
**Returns** |
|||
|
|||
| Name | Type | Description | |
|||
|---------|------------------|-------------------------------------------------| |
|||
| results | `string`|`null` | Return null upon error. String value otherwise. | |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
bytes.format(1024); |
|||
// output: '1KB' |
|||
|
|||
bytes.format(1000); |
|||
// output: '1000B' |
|||
|
|||
bytes.format(1000, {thousandsSeparator: ' '}); |
|||
// output: '1 000B' |
|||
|
|||
bytes.format(1024 * 1.7, {decimalPlaces: 0}); |
|||
// output: '2KB' |
|||
|
|||
bytes.format(1024, {unitSeparator: ' '}); |
|||
// output: '1 KB' |
|||
``` |
|||
|
|||
#### bytes.parse(string|number value): number|null |
|||
|
|||
Parse the string value into an integer in bytes. If no unit is given, or `value` |
|||
is a number, it is assumed the value is in bytes. |
|||
|
|||
Supported units and abbreviations are as follows and are case-insensitive: |
|||
|
|||
* `b` for bytes |
|||
* `kb` for kilobytes |
|||
* `mb` for megabytes |
|||
* `gb` for gigabytes |
|||
* `tb` for terabytes |
|||
* `pb` for petabytes |
|||
|
|||
The units are in powers of two, not ten. This means 1kb = 1024b according to this parser. |
|||
|
|||
**Arguments** |
|||
|
|||
| Name | Type | Description | |
|||
|---------------|--------|--------------------| |
|||
| value | `string`|`number` | String to parse, or number in bytes. | |
|||
|
|||
**Returns** |
|||
|
|||
| Name | Type | Description | |
|||
|---------|-------------|-------------------------| |
|||
| results | `number`|`null` | Return null upon error. Value in bytes otherwise. | |
|||
|
|||
**Example** |
|||
|
|||
```js |
|||
bytes.parse('1KB'); |
|||
// output: 1024 |
|||
|
|||
bytes.parse('1024'); |
|||
// output: 1024 |
|||
|
|||
bytes.parse(1024); |
|||
// output: 1024 |
|||
``` |
|||
|
|||
## License |
|||
|
|||
[MIT](LICENSE) |
|||
|
|||
[ci-image]: https://badgen.net/github/checks/visionmedia/bytes.js/master?label=ci |
|||
[ci-url]: https://github.com/visionmedia/bytes.js/actions?query=workflow%3Aci |
|||
[coveralls-image]: https://badgen.net/coveralls/c/github/visionmedia/bytes.js/master |
|||
[coveralls-url]: https://coveralls.io/r/visionmedia/bytes.js?branch=master |
|||
[downloads-image]: https://badgen.net/npm/dm/bytes |
|||
[downloads-url]: https://npmjs.org/package/bytes |
|||
[npm-image]: https://badgen.net/npm/v/bytes |
|||
[npm-url]: https://npmjs.org/package/bytes |
|||
@ -0,0 +1,170 @@ |
|||
/*! |
|||
* bytes |
|||
* Copyright(c) 2012-2014 TJ Holowaychuk |
|||
* Copyright(c) 2015 Jed Watson |
|||
* MIT Licensed |
|||
*/ |
|||
|
|||
'use strict'; |
|||
|
|||
/** |
|||
* Module exports. |
|||
* @public |
|||
*/ |
|||
|
|||
module.exports = bytes; |
|||
module.exports.format = format; |
|||
module.exports.parse = parse; |
|||
|
|||
/** |
|||
* Module variables. |
|||
* @private |
|||
*/ |
|||
|
|||
var formatThousandsRegExp = /\B(?=(\d{3})+(?!\d))/g; |
|||
|
|||
var formatDecimalsRegExp = /(?:\.0*|(\.[^0]+)0+)$/; |
|||
|
|||
var map = { |
|||
b: 1, |
|||
kb: 1 << 10, |
|||
mb: 1 << 20, |
|||
gb: 1 << 30, |
|||
tb: Math.pow(1024, 4), |
|||
pb: Math.pow(1024, 5), |
|||
}; |
|||
|
|||
var parseRegExp = /^((-|\+)?(\d+(?:\.\d+)?)) *(kb|mb|gb|tb|pb)$/i; |
|||
|
|||
/** |
|||
* Convert the given value in bytes into a string or parse to string to an integer in bytes. |
|||
* |
|||
* @param {string|number} value |
|||
* @param {{ |
|||
* case: [string], |
|||
* decimalPlaces: [number] |
|||
* fixedDecimals: [boolean] |
|||
* thousandsSeparator: [string] |
|||
* unitSeparator: [string] |
|||
* }} [options] bytes options. |
|||
* |
|||
* @returns {string|number|null} |
|||
*/ |
|||
|
|||
function bytes(value, options) { |
|||
if (typeof value === 'string') { |
|||
return parse(value); |
|||
} |
|||
|
|||
if (typeof value === 'number') { |
|||
return format(value, options); |
|||
} |
|||
|
|||
return null; |
|||
} |
|||
|
|||
/** |
|||
* Format the given value in bytes into a string. |
|||
* |
|||
* If the value is negative, it is kept as such. If it is a float, |
|||
* it is rounded. |
|||
* |
|||
* @param {number} value |
|||
* @param {object} [options] |
|||
* @param {number} [options.decimalPlaces=2] |
|||
* @param {number} [options.fixedDecimals=false] |
|||
* @param {string} [options.thousandsSeparator=] |
|||
* @param {string} [options.unit=] |
|||
* @param {string} [options.unitSeparator=] |
|||
* |
|||
* @returns {string|null} |
|||
* @public |
|||
*/ |
|||
|
|||
function format(value, options) { |
|||
if (!Number.isFinite(value)) { |
|||
return null; |
|||
} |
|||
|
|||
var mag = Math.abs(value); |
|||
var thousandsSeparator = (options && options.thousandsSeparator) || ''; |
|||
var unitSeparator = (options && options.unitSeparator) || ''; |
|||
var decimalPlaces = (options && options.decimalPlaces !== undefined) ? options.decimalPlaces : 2; |
|||
var fixedDecimals = Boolean(options && options.fixedDecimals); |
|||
var unit = (options && options.unit) || ''; |
|||
|
|||
if (!unit || !map[unit.toLowerCase()]) { |
|||
if (mag >= map.pb) { |
|||
unit = 'PB'; |
|||
} else if (mag >= map.tb) { |
|||
unit = 'TB'; |
|||
} else if (mag >= map.gb) { |
|||
unit = 'GB'; |
|||
} else if (mag >= map.mb) { |
|||
unit = 'MB'; |
|||
} else if (mag >= map.kb) { |
|||
unit = 'KB'; |
|||
} else { |
|||
unit = 'B'; |
|||
} |
|||
} |
|||
|
|||
var val = value / map[unit.toLowerCase()]; |
|||
var str = val.toFixed(decimalPlaces); |
|||
|
|||
if (!fixedDecimals) { |
|||
str = str.replace(formatDecimalsRegExp, '$1'); |
|||
} |
|||
|
|||
if (thousandsSeparator) { |
|||
str = str.split('.').map(function (s, i) { |
|||
return i === 0 |
|||
? s.replace(formatThousandsRegExp, thousandsSeparator) |
|||
: s |
|||
}).join('.'); |
|||
} |
|||
|
|||
return str + unitSeparator + unit; |
|||
} |
|||
|
|||
/** |
|||
* Parse the string value into an integer in bytes. |
|||
* |
|||
* If no unit is given, it is assumed the value is in bytes. |
|||
* |
|||
* @param {number|string} val |
|||
* |
|||
* @returns {number|null} |
|||
* @public |
|||
*/ |
|||
|
|||
function parse(val) { |
|||
if (typeof val === 'number' && !isNaN(val)) { |
|||
return val; |
|||
} |
|||
|
|||
if (typeof val !== 'string') { |
|||
return null; |
|||
} |
|||
|
|||
// Test if the string passed is valid
|
|||
var results = parseRegExp.exec(val); |
|||
var floatValue; |
|||
var unit = 'b'; |
|||
|
|||
if (!results) { |
|||
// Nothing could be extracted from the given string
|
|||
floatValue = parseInt(val, 10); |
|||
unit = 'b' |
|||
} else { |
|||
// Retrieve the value and the unit
|
|||
floatValue = parseFloat(results[1]); |
|||
unit = results[4].toLowerCase(); |
|||
} |
|||
|
|||
if (isNaN(floatValue)) { |
|||
return null; |
|||
} |
|||
|
|||
return Math.floor(map[unit] * floatValue); |
|||
} |
|||
@ -0,0 +1,42 @@ |
|||
{ |
|||
"name": "bytes", |
|||
"description": "Utility to parse a string bytes to bytes and vice-versa", |
|||
"version": "3.1.2", |
|||
"author": "TJ Holowaychuk <tj@vision-media.ca> (http://tjholowaychuk.com)", |
|||
"contributors": [ |
|||
"Jed Watson <jed.watson@me.com>", |
|||
"Théo FIDRY <theo.fidry@gmail.com>" |
|||
], |
|||
"license": "MIT", |
|||
"keywords": [ |
|||
"byte", |
|||
"bytes", |
|||
"utility", |
|||
"parse", |
|||
"parser", |
|||
"convert", |
|||
"converter" |
|||
], |
|||
"repository": "visionmedia/bytes.js", |
|||
"devDependencies": { |
|||
"eslint": "7.32.0", |
|||
"eslint-plugin-markdown": "2.2.1", |
|||
"mocha": "9.2.0", |
|||
"nyc": "15.1.0" |
|||
}, |
|||
"files": [ |
|||
"History.md", |
|||
"LICENSE", |
|||
"Readme.md", |
|||
"index.js" |
|||
], |
|||
"engines": { |
|||
"node": ">= 0.8" |
|||
}, |
|||
"scripts": { |
|||
"lint": "eslint .", |
|||
"test": "mocha --check-leaks --reporter spec", |
|||
"test-ci": "nyc --reporter=lcov --reporter=text npm test", |
|||
"test-cov": "nyc --reporter=html --reporter=text npm test" |
|||
} |
|||
} |
|||
@ -0,0 +1,17 @@ |
|||
{ |
|||
"root": true, |
|||
|
|||
"extends": "@ljharb", |
|||
|
|||
"rules": { |
|||
"func-name-matching": 0, |
|||
"id-length": 0, |
|||
"new-cap": [2, { |
|||
"capIsNewExceptions": [ |
|||
"GetIntrinsic", |
|||
], |
|||
}], |
|||
"no-extra-parens": 0, |
|||
"no-magic-numbers": 0, |
|||
}, |
|||
} |
|||
@ -0,0 +1,12 @@ |
|||
# These are supported funding model platforms |
|||
|
|||
github: [ljharb] |
|||
patreon: # Replace with a single Patreon username |
|||
open_collective: # Replace with a single Open Collective username |
|||
ko_fi: # Replace with a single Ko-fi username |
|||
tidelift: npm/call-bind-apply-helpers |
|||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry |
|||
liberapay: # Replace with a single Liberapay username |
|||
issuehunt: # Replace with a single IssueHunt username |
|||
otechie: # Replace with a single Otechie username |
|||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] |
|||
@ -0,0 +1,9 @@ |
|||
{ |
|||
"all": true, |
|||
"check-coverage": false, |
|||
"reporter": ["text-summary", "text", "html", "json"], |
|||
"exclude": [ |
|||
"coverage", |
|||
"test" |
|||
] |
|||
} |
|||
@ -0,0 +1,30 @@ |
|||
# Changelog |
|||
|
|||
All notable changes to this project will be documented in this file. |
|||
|
|||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) |
|||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). |
|||
|
|||
## [v1.0.2](https://github.com/ljharb/call-bind-apply-helpers/compare/v1.0.1...v1.0.2) - 2025-02-12 |
|||
|
|||
### Commits |
|||
|
|||
- [types] improve inferred types [`e6f9586`](https://github.com/ljharb/call-bind-apply-helpers/commit/e6f95860a3c72879cb861a858cdfb8138fbedec1) |
|||
- [Dev Deps] update `@arethetypeswrong/cli`, `@ljharb/tsconfig`, `@types/tape`, `es-value-fixtures`, `for-each`, `has-strict-mode`, `object-inspect` [`e43d540`](https://github.com/ljharb/call-bind-apply-helpers/commit/e43d5409f97543bfbb11f345d47d8ce4e066d8c1) |
|||
|
|||
## [v1.0.1](https://github.com/ljharb/call-bind-apply-helpers/compare/v1.0.0...v1.0.1) - 2024-12-08 |
|||
|
|||
### Commits |
|||
|
|||
- [types] `reflectApply`: fix types [`4efc396`](https://github.com/ljharb/call-bind-apply-helpers/commit/4efc3965351a4f02cc55e836fa391d3d11ef2ef8) |
|||
- [Fix] `reflectApply`: oops, Reflect is not a function [`83cc739`](https://github.com/ljharb/call-bind-apply-helpers/commit/83cc7395de6b79b7730bdf092f1436f0b1263c75) |
|||
- [Dev Deps] update `@arethetypeswrong/cli` [`80bd5d3`](https://github.com/ljharb/call-bind-apply-helpers/commit/80bd5d3ae58b4f6b6995ce439dd5a1bcb178a940) |
|||
|
|||
## v1.0.0 - 2024-12-05 |
|||
|
|||
### Commits |
|||
|
|||
- Initial implementation, tests, readme [`7879629`](https://github.com/ljharb/call-bind-apply-helpers/commit/78796290f9b7430c9934d6f33d94ae9bc89fce04) |
|||
- Initial commit [`3f1dc16`](https://github.com/ljharb/call-bind-apply-helpers/commit/3f1dc164afc43285631b114a5f9dd9137b2b952f) |
|||
- npm init [`081df04`](https://github.com/ljharb/call-bind-apply-helpers/commit/081df048c312fcee400922026f6e97281200a603) |
|||
- Only apps should have lockfiles [`5b9ca0f`](https://github.com/ljharb/call-bind-apply-helpers/commit/5b9ca0fe8101ebfaf309c549caac4e0a017ed930) |
|||
@ -0,0 +1,21 @@ |
|||
MIT License |
|||
|
|||
Copyright (c) 2024 Jordan Harband |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in all |
|||
copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,62 @@ |
|||
# call-bind-apply-helpers <sup>[![Version Badge][npm-version-svg]][package-url]</sup> |
|||
|
|||
[![github actions][actions-image]][actions-url] |
|||
[![coverage][codecov-image]][codecov-url] |
|||
[![dependency status][deps-svg]][deps-url] |
|||
[![dev dependency status][dev-deps-svg]][dev-deps-url] |
|||
[![License][license-image]][license-url] |
|||
[![Downloads][downloads-image]][downloads-url] |
|||
|
|||
[![npm badge][npm-badge-png]][package-url] |
|||
|
|||
Helper functions around Function call/apply/bind, for use in `call-bind`. |
|||
|
|||
The only packages that should likely ever use this package directly are `call-bind` and `get-intrinsic`. |
|||
Please use `call-bind` unless you have a very good reason not to. |
|||
|
|||
## Getting started |
|||
|
|||
```sh |
|||
npm install --save call-bind-apply-helpers |
|||
``` |
|||
|
|||
## Usage/Examples |
|||
|
|||
```js |
|||
const assert = require('assert'); |
|||
const callBindBasic = require('call-bind-apply-helpers'); |
|||
|
|||
function f(a, b) { |
|||
assert.equal(this, 1); |
|||
assert.equal(a, 2); |
|||
assert.equal(b, 3); |
|||
assert.equal(arguments.length, 2); |
|||
} |
|||
|
|||
const fBound = callBindBasic([f, 1]); |
|||
|
|||
delete Function.prototype.call; |
|||
delete Function.prototype.bind; |
|||
|
|||
fBound(2, 3); |
|||
``` |
|||
|
|||
## Tests |
|||
|
|||
Clone the repo, `npm install`, and run `npm test` |
|||
|
|||
[package-url]: https://npmjs.org/package/call-bind-apply-helpers |
|||
[npm-version-svg]: https://versionbadg.es/ljharb/call-bind-apply-helpers.svg |
|||
[deps-svg]: https://david-dm.org/ljharb/call-bind-apply-helpers.svg |
|||
[deps-url]: https://david-dm.org/ljharb/call-bind-apply-helpers |
|||
[dev-deps-svg]: https://david-dm.org/ljharb/call-bind-apply-helpers/dev-status.svg |
|||
[dev-deps-url]: https://david-dm.org/ljharb/call-bind-apply-helpers#info=devDependencies |
|||
[npm-badge-png]: https://nodei.co/npm/call-bind-apply-helpers.png?downloads=true&stars=true |
|||
[license-image]: https://img.shields.io/npm/l/call-bind-apply-helpers.svg |
|||
[license-url]: LICENSE |
|||
[downloads-image]: https://img.shields.io/npm/dm/call-bind-apply-helpers.svg |
|||
[downloads-url]: https://npm-stat.com/charts.html?package=call-bind-apply-helpers |
|||
[codecov-image]: https://codecov.io/gh/ljharb/call-bind-apply-helpers/branch/main/graphs/badge.svg |
|||
[codecov-url]: https://app.codecov.io/gh/ljharb/call-bind-apply-helpers/ |
|||
[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/call-bind-apply-helpers |
|||
[actions-url]: https://github.com/ljharb/call-bind-apply-helpers/actions |
|||
@ -0,0 +1 @@ |
|||
export = Reflect.apply; |
|||
@ -0,0 +1,10 @@ |
|||
'use strict'; |
|||
|
|||
var bind = require('function-bind'); |
|||
|
|||
var $apply = require('./functionApply'); |
|||
var $call = require('./functionCall'); |
|||
var $reflectApply = require('./reflectApply'); |
|||
|
|||
/** @type {import('./actualApply')} */ |
|||
module.exports = $reflectApply || bind.call($call, $apply); |
|||
@ -0,0 +1,19 @@ |
|||
import actualApply from './actualApply'; |
|||
|
|||
type TupleSplitHead<T extends any[], N extends number> = T['length'] extends N |
|||
? T |
|||
: T extends [...infer R, any] |
|||
? TupleSplitHead<R, N> |
|||
: never |
|||
|
|||
type TupleSplitTail<T, N extends number, O extends any[] = []> = O['length'] extends N |
|||
? T |
|||
: T extends [infer F, ...infer R] |
|||
? TupleSplitTail<[...R], N, [...O, F]> |
|||
: never |
|||
|
|||
type TupleSplit<T extends any[], N extends number> = [TupleSplitHead<T, N>, TupleSplitTail<T, N>] |
|||
|
|||
declare function applyBind(...args: TupleSplit<Parameters<typeof actualApply>, 2>[1]): ReturnType<typeof actualApply>; |
|||
|
|||
export = applyBind; |
|||
@ -0,0 +1,10 @@ |
|||
'use strict'; |
|||
|
|||
var bind = require('function-bind'); |
|||
var $apply = require('./functionApply'); |
|||
var actualApply = require('./actualApply'); |
|||
|
|||
/** @type {import('./applyBind')} */ |
|||
module.exports = function applyBind() { |
|||
return actualApply(bind, $apply, arguments); |
|||
}; |
|||
Some files were not shown because too many files changed in this diff
Loading…
Reference in new issue