🗄️ Base de données
Architecture et schéma de la base de données Commerce Tracking.
🎯 Vue d'ensemble
Commerce Tracking utilise MySQL 8.0 comme base de données principale avec TypeORM comme ORM pour la gestion des entités et des migrations.
Technologies
- Base de données : MySQL 8.0
- ORM : TypeORM
- Migrations : Automatiques avec TypeORM
- Pool de connexions : Optimisé pour les performances
- Charset : UTF8MB4 pour le support Unicode complet
🏗️ Architecture de la base de données
Configuration
# Configuration MySQL
host: localhost
port: 3340
database: tradeflow_db
username: tradeflow_user
password: secure_password
charset: utf8mb4
collation: utf8mb4_unicode_ci
engine: InnoDB
Connexion TypeORM
// Configuration TypeORM
{
type: 'mysql',
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT) || 3340,
username: process.env.DB_USERNAME || 'tradeflow_user',
password: process.env.DB_PASSWORD || 'password',
database: process.env.DB_DATABASE || 'tradeflow_db',
charset: 'utf8mb4',
timezone: '+00:00',
synchronize: false, // Désactivé en production
logging: process.env.NODE_ENV === 'development',
entities: [__dirname + '/**/*.entity{.ts,.js}'],
migrations: [__dirname + '/migrations/*{.ts,.js}'],
subscribers: [__dirname + '/subscribers/*{.ts,.js}'],
poolSize: 10,
acquireTimeout: 60000,
timeout: 60000,
extra: {
charset: 'utf8mb4_unicode_ci',
},
}
📊 Schéma de la base de données
Tables principales
users - Utilisateurs du système
CREATE TABLE users (
id INT PRIMARY KEY AUTO_INCREMENT,
username VARCHAR(50) UNIQUE NOT NULL,
email VARCHAR(100) UNIQUE NOT NULL,
password VARCHAR(255) NOT NULL,
role_id INT NOT NULL,
country_id INT NOT NULL,
actor_id INT,
is_active BOOLEAN DEFAULT TRUE,
last_login TIMESTAMP NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (role_id) REFERENCES roles(id),
FOREIGN KEY (country_id) REFERENCES countries(id),
FOREIGN KEY (actor_id) REFERENCES actors(id),
INDEX idx_username (username),
INDEX idx_email (email),
INDEX idx_role_id (role_id),
INDEX idx_country_id (country_id),
INDEX idx_is_active (is_active)
);
roles - Rôles et permissions
CREATE TABLE roles (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(50) UNIQUE NOT NULL,
name_en VARCHAR(50) NOT NULL,
level INT NOT NULL,
description TEXT,
permissions JSON,
is_active BOOLEAN DEFAULT TRUE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
INDEX idx_level (level),
INDEX idx_is_active (is_active)
);
actors - Acteurs commerciaux
CREATE TABLE actors (
id INT PRIMARY KEY AUTO_INCREMENT,
actor_role VARCHAR(100) NOT NULL,
first_name VARCHAR(100) NOT NULL,
last_name VARCHAR(100) NOT NULL,
email VARCHAR(100) UNIQUE,
phone VARCHAR(20),
country_id INT NOT NULL,
specialization VARCHAR(200),
experience_years INT DEFAULT 0,
is_active BOOLEAN DEFAULT TRUE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (country_id) REFERENCES countries(id),
INDEX idx_email (email),
INDEX idx_country_id (country_id),
INDEX idx_actor_role (actor_role),
INDEX idx_is_active (is_active)
);
collections - Collections de données commerciales
CREATE TABLE collections (
id INT PRIMARY KEY AUTO_INCREMENT,
public_id VARCHAR(36) UNIQUE NOT NULL,
collection_date TIMESTAMP NOT NULL,
collector_id INT NOT NULL,
status ENUM('draft', 'submitted', 'team_manager_validation', 'supervisor_validation', 'approved', 'rejected') DEFAULT 'draft',
validation_level INT DEFAULT 0,
location JSON,
trade_data JSON,
metadata JSON,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (collector_id) REFERENCES actors(id),
INDEX idx_public_id (public_id),
INDEX idx_collector_id (collector_id),
INDEX idx_status (status),
INDEX idx_collection_date (collection_date),
INDEX idx_validation_level (validation_level),
INDEX idx_created_at (created_at)
);
validations - Validations multi-niveaux
CREATE TABLE validations (
id INT PRIMARY KEY AUTO_INCREMENT,
collection_id INT NOT NULL,
validator_id INT NOT NULL,
level INT NOT NULL,
status ENUM('pending', 'approved', 'rejected') DEFAULT 'pending',
comments TEXT,
validated_at TIMESTAMP NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (collection_id) REFERENCES collections(id) ON DELETE CASCADE,
FOREIGN KEY (validator_id) REFERENCES users(id),
INDEX idx_collection_id (collection_id),
INDEX idx_validator_id (validator_id),
INDEX idx_level (level),
INDEX idx_status (status),
INDEX idx_validated_at (validated_at)
);
digitalized_collections - Collectes digitalisées
CREATE TABLE digitalized_collections (
id INT PRIMARY KEY AUTO_INCREMENT,
collection_id INT NOT NULL,
digital_data JSON NOT NULL,
device_info JSON,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (collection_id) REFERENCES collections(id) ON DELETE CASCADE,
INDEX idx_collection_id (collection_id),
INDEX idx_created_at (created_at)
);
Tables de référence
countries - Pays
CREATE TABLE countries (
id INT PRIMARY KEY AUTO_INCREMENT,
code VARCHAR(3) UNIQUE NOT NULL,
name VARCHAR(100) NOT NULL,
name_en VARCHAR(100) NOT NULL,
iso_code VARCHAR(3) UNIQUE,
region VARCHAR(100),
currency_code VARCHAR(3),
is_active BOOLEAN DEFAULT TRUE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
INDEX idx_code (code),
INDEX idx_iso_code (iso_code),
INDEX idx_region (region),
INDEX idx_is_active (is_active)
);
currencies - Devises
CREATE TABLE currencies (
id INT PRIMARY KEY AUTO_INCREMENT,
code VARCHAR(3) UNIQUE NOT NULL,
name VARCHAR(100) NOT NULL,
symbol VARCHAR(10),
decimal_places INT DEFAULT 2,
is_active BOOLEAN DEFAULT TRUE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
INDEX idx_code (code),
INDEX idx_is_active (is_active)
);
products - Produits
CREATE TABLE products (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(200) NOT NULL,
name_en VARCHAR(200),
category ENUM('agricultural', 'livestock', 'minerals', 'manufactured') NOT NULL,
product_type_id INT,
code VARCHAR(50) UNIQUE,
scientific_name VARCHAR(200),
seasonality JSON,
storage_requirements VARCHAR(200),
export_restrictions JSON,
is_active BOOLEAN DEFAULT TRUE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
INDEX idx_category (category),
INDEX idx_product_type_id (product_type_id),
INDEX idx_code (code),
INDEX idx_is_active (is_active)
);
animals - Animaux
CREATE TABLE animals (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(200) NOT NULL,
name_en VARCHAR(200),
category ENUM('livestock', 'poultry', 'wild') NOT NULL,
animal_type_id INT,
code VARCHAR(50) UNIQUE,
scientific_name VARCHAR(200),
average_weight_kg DECIMAL(10,2),
health_requirements JSON,
export_restrictions JSON,
is_active BOOLEAN DEFAULT TRUE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
INDEX idx_category (category),
INDEX idx_animal_type_id (animal_type_id),
INDEX idx_code (code),
INDEX idx_is_active (is_active)
);
Tables d'audit
audit_logs - Logs d'audit
CREATE TABLE audit_logs (
id INT PRIMARY KEY AUTO_INCREMENT,
user_id INT,
user_name VARCHAR(100),
action VARCHAR(50) NOT NULL,
resource_type VARCHAR(50) NOT NULL,
resource_id INT,
resource_name VARCHAR(200),
details JSON,
ip_address VARCHAR(45),
user_agent TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id),
INDEX idx_user_id (user_id),
INDEX idx_action (action),
INDEX idx_resource_type (resource_type),
INDEX idx_resource_id (resource_id),
INDEX idx_created_at (created_at)
);
🔗 Relations entre tables
Diagramme ERD
erDiagram
users ||--o{ audit_logs : creates
users ||--o{ validations : validates
users }o--|| roles : has
users }o--|| countries : located_in
users }o--o| actors : represents
actors ||--o{ collections : collects
actors }o--|| countries : located_in
collections ||--o{ validations : has
collections ||--o{ digitalized_collections : has
countries ||--o{ users : contains
countries ||--o{ actors : contains
products ||--o{ collections : referenced_in
animals ||--o{ collections : referenced_in
Relations principales
- users → roles : Un utilisateur a un rôle
- users → countries : Un utilisateur appartient à un pays
- users → actors : Un utilisateur peut représenter un acteur
- actors → collections : Un acteur peut créer des collections
- collections → validations : Une collection peut avoir plusieurs validations
- collections → digitalized_collections : Une collection peut avoir des données digitalisées
📈 Optimisations et index
Index composites
-- Index pour les requêtes de collections par collecteur et date
CREATE INDEX idx_collections_collector_date ON collections(collector_id, collection_date);
-- Index pour les requêtes de validations par collection et niveau
CREATE INDEX idx_validations_collection_level ON validations(collection_id, level);
-- Index pour les requêtes d'audit par utilisateur et date
CREATE INDEX idx_audit_logs_user_date ON audit_logs(user_id, created_at);
Index de recherche textuelle
-- Index full-text pour la recherche dans les collections
ALTER TABLE collections ADD FULLTEXT(location, trade_data);
-- Index full-text pour la recherche dans les produits
ALTER TABLE products ADD FULLTEXT(name, name_en, scientific_name);
🔄 Migrations TypeORM
Exemple de migration
// migrations/001-CreateUsersTable.ts
import { MigrationInterface, QueryRunner } from 'typeorm';
export class CreateUsersTable1704067200000 implements MigrationInterface {
name = 'CreateUsersTable1704067200000';
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
CREATE TABLE users (
id INT PRIMARY KEY AUTO_INCREMENT,
username VARCHAR(50) UNIQUE NOT NULL,
email VARCHAR(100) UNIQUE NOT NULL,
password VARCHAR(255) NOT NULL,
role_id INT NOT NULL,
country_id INT NOT NULL,
actor_id INT,
is_active BOOLEAN DEFAULT TRUE,
last_login TIMESTAMP NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (role_id) REFERENCES roles(id),
FOREIGN KEY (country_id) REFERENCES countries(id),
FOREIGN KEY (actor_id) REFERENCES actors(id),
INDEX idx_username (username),
INDEX idx_email (email),
INDEX idx_role_id (role_id),
INDEX idx_country_id (country_id),
INDEX idx_is_active (is_active)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
`);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP TABLE users`);
}
}
Commandes de migration
# Générer une nouvelle migration
npm run typeorm migration:generate -- -n CreateUsersTable
# Exécuter les migrations
npm run typeorm migration:run
# Annuler la dernière migration
npm run typeorm migration:revert
🛡️ Sécurité et permissions
Utilisateurs de base de données
-- Utilisateur principal pour l'application
CREATE USER 'tradeflow_user'@'%' IDENTIFIED BY 'secure_password';
GRANT SELECT, INSERT, UPDATE, DELETE ON tradeflow_db.* TO 'tradeflow_user'@'%';
-- Utilisateur en lecture seule pour les rapports
CREATE USER 'tradeflow_readonly'@'%' IDENTIFIED BY 'readonly_password';
GRANT SELECT ON tradeflow_db.* TO 'tradeflow_readonly'@'%';
-- Utilisateur pour les migrations
CREATE USER 'tradeflow_migrate'@'%' IDENTIFIED BY 'migrate_password';
GRANT ALL PRIVILEGES ON tradeflow_db.* TO 'tradeflow_migrate'@'%';
FLUSH PRIVILEGES;
Chiffrement des données sensibles
-- Chiffrement des mots de passe (géré par l'application avec bcrypt)
-- Les mots de passe ne sont jamais stockés en clair
-- Chiffrement des données sensibles dans JSON
-- Utilisation de fonctions MySQL pour le chiffrement
UPDATE collections
SET trade_data = JSON_SET(
trade_data,
'$.trader_info.document_number',
AES_ENCRYPT(JSON_UNQUOTE(JSON_EXTRACT(trade_data, '$.trader_info.document_number')), 'encryption_key')
);
📊 Monitoring et maintenance
Requêtes de monitoring
-- Vérifier la taille de la base de données
SELECT
table_schema AS 'Database',
ROUND(SUM(data_length + index_length) / 1024 / 1024, 2) AS 'Size (MB)'
FROM information_schema.tables
WHERE table_schema = 'tradeflow_db'
GROUP BY table_schema;
-- Vérifier les tables les plus volumineuses
SELECT
table_name AS 'Table',
ROUND(((data_length + index_length) / 1024 / 1024), 2) AS 'Size (MB)',
table_rows AS 'Rows'
FROM information_schema.tables
WHERE table_schema = 'tradeflow_db'
ORDER BY (data_length + index_length) DESC;
-- Vérifier les index non utilisés
SELECT
object_schema,
object_name,
index_name,
count_read,
count_write
FROM performance_schema.table_io_waits_summary_by_index_usage
WHERE object_schema = 'tradeflow_db'
AND count_read = 0
AND count_write = 0;
Maintenance automatique
-- Nettoyage des logs d'audit anciens (plus de 1 an)
DELETE FROM audit_logs
WHERE created_at < DATE_SUB(NOW(), INTERVAL 1 YEAR);
-- Optimisation des tables
OPTIMIZE TABLE collections, validations, audit_logs;
-- Analyse des tables pour les statistiques
ANALYZE TABLE collections, validations, users, actors;
🔄 Sauvegarde et restauration
Script de sauvegarde
#!/bin/bash
# backup-database.sh
DB_NAME="tradeflow_db"
DB_USER="tradeflow_migrate"
DB_PASS="migrate_password"
BACKUP_DIR="/backups/mysql"
DATE=$(date +%Y%m%d_%H%M%S)
# Créer le répertoire de sauvegarde
mkdir -p $BACKUP_DIR
# Sauvegarde complète
mysqldump -u $DB_USER -p$DB_PASS \
--single-transaction \
--routines \
--triggers \
--events \
$DB_NAME > $BACKUP_DIR/tradeflow_full_$DATE.sql
# Compression
gzip $BACKUP_DIR/tradeflow_full_$DATE.sql
# Supprimer les sauvegardes anciennes (plus de 30 jours)
find $BACKUP_DIR -name "tradeflow_full_*.sql.gz" -mtime +30 -delete
echo "Sauvegarde terminée: tradeflow_full_$DATE.sql.gz"
Script de restauration
#!/bin/bash
# restore-database.sh
DB_NAME="tradeflow_db"
DB_USER="tradeflow_migrate"
DB_PASS="migrate_password"
BACKUP_FILE=$1
if [ -z "$BACKUP_FILE" ]; then
echo "Usage: $0 <backup_file.sql.gz>"
exit 1
fi
# Décompression et restauration
gunzip -c $BACKUP_FILE | mysql -u $DB_USER -p$DB_PASS $DB_NAME
echo "Restauration terminée depuis: $BACKUP_FILE"
Cette architecture de base de données fournit une base solide et scalable pour le système Commerce Tracking avec des optimisations de performance, de sécurité et de maintenance.