SQL Databases with Sequelize
Introduction to Sequelize ORM
Sequelize is a promise-based Node.js ORM (Object-Relational Mapping) tool for PostgreSQL, MySQL, MariaDB, SQLite, and Microsoft SQL Server. It features solid transaction support, relations, eager and lazy loading, read replication, and more. Sequelize provides a powerful abstraction layer over raw SQL, allowing you to work with databases using JavaScript objects.
Why Use Sequelize?
- Database-agnostic code (switch databases easily)
- Automatic SQL query generation
- Built-in validation and data types
- Migration and seeding support
- Transaction management
- Relationship handling (associations)
- Type safety and intellisense support
Installing and Configuring Sequelize
# Install Sequelize and database driver
npm install sequelize
# Choose your database driver
npm install pg pg-hstore # PostgreSQL
npm install mysql2 # MySQL
npm install mariadb # MariaDB
npm install sqlite3 # SQLite
npm install tedious # Microsoft SQL Server
# Install Sequelize CLI for migrations
npm install --save-dev sequelize-cli
Initialize Sequelize in your project:
# Initialize Sequelize (creates config, models, migrations, seeders folders)
npx sequelize-cli init
Configure database connection in config/config.json:
{
"development": {
"username": "root",
"password": "password",
"database": "myapp_dev",
"host": "127.0.0.1",
"dialect": "mysql",
"logging": console.log
},
"test": {
"username": "root",
"password": "password",
"database": "myapp_test",
"host": "127.0.0.1",
"dialect": "mysql",
"logging": false
},
"production": {
"username": process.env.DB_USERNAME,
"password": process.env.DB_PASSWORD,
"database": process.env.DB_DATABASE,
"host": process.env.DB_HOST,
"dialect": "mysql",
"logging": false,
"pool": {
"max": 5,
"min": 0,
"acquire": 30000,
"idle": 10000
}
}
}
Create database connection instance:
// config/database.js
const { Sequelize } = require('sequelize');
require('dotenv').config();
const sequelize = new Sequelize(
process.env.DB_DATABASE,
process.env.DB_USERNAME,
process.env.DB_PASSWORD,
{
host: process.env.DB_HOST,
dialect: 'mysql', // or 'postgres', 'sqlite', 'mariadb', 'mssql'
logging: process.env.NODE_ENV === 'development' ? console.log : false,
pool: {
max: 5,
min: 0,
acquire: 30000,
idle: 10000
}
}
);
// Test connection
const testConnection = async () => {
try {
await sequelize.authenticate();
console.log('Database connection established successfully.');
} catch (error) {
console.error('Unable to connect to the database:', error);
}
};
testConnection();
module.exports = sequelize;
Environment Variables: Store database credentials in .env file:
DB_HOST=localhost
DB_USERNAME=root
DB_PASSWORD=yourpassword
DB_DATABASE=myapp_db
Defining Models
Models represent tables in your database. Each model corresponds to a table, and instances of the model represent rows in that table.
// models/User.js
const { DataTypes } = require('sequelize');
const sequelize = require('../config/database');
const User = sequelize.define('User', {
id: {
type: DataTypes.INTEGER,
primaryKey: true,
autoIncrement: true
},
username: {
type: DataTypes.STRING(50),
allowNull: false,
unique: true,
validate: {
len: [3, 50],
notEmpty: true
}
},
email: {
type: DataTypes.STRING(100),
allowNull: false,
unique: true,
validate: {
isEmail: true,
notEmpty: true
}
},
password: {
type: DataTypes.STRING(255),
allowNull: false,
validate: {
len: [6, 255]
}
},
firstName: {
type: DataTypes.STRING(50),
allowNull: true,
field: 'first_name' // Column name in database
},
lastName: {
type: DataTypes.STRING(50),
allowNull: true,
field: 'last_name'
},
age: {
type: DataTypes.INTEGER,
allowNull: true,
validate: {
min: 18,
max: 120
}
},
role: {
type: DataTypes.ENUM('user', 'admin', 'moderator'),
defaultValue: 'user'
},
isActive: {
type: DataTypes.BOOLEAN,
defaultValue: true,
field: 'is_active'
},
lastLogin: {
type: DataTypes.DATE,
allowNull: true,
field: 'last_login'
}
}, {
tableName: 'users',
timestamps: true, // Adds createdAt and updatedAt
underscored: true, // Use snake_case for automatically added fields
paranoid: true, // Adds deletedAt for soft deletes
indexes: [
{
unique: true,
fields: ['email']
},
{
fields: ['username']
}
]
});
module.exports = User;
Common Data Types:
DataTypes.STRING(length)- VARCHARDataTypes.TEXT- TEXTDataTypes.INTEGER- INTEGERDataTypes.BIGINT- BIGINTDataTypes.FLOAT- FLOATDataTypes.DECIMAL(precision, scale)- DECIMALDataTypes.BOOLEAN- TINYINT(1)DataTypes.DATE- DATETIMEDataTypes.DATEONLY- DATEDataTypes.ENUM(...values)- ENUMDataTypes.JSON- JSON (MySQL, PostgreSQL)DataTypes.UUID- UUID (PostgreSQL)
Migrations
Migrations are a way to make changes to your database schema over time in a systematic and reversible way.
# Create a migration
npx sequelize-cli migration:generate --name create-users-table
Edit the generated migration file:
// migrations/YYYYMMDDHHMMSS-create-users-table.js
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable('users', {
id: {
type: Sequelize.INTEGER,
primaryKey: true,
autoIncrement: true,
allowNull: false
},
username: {
type: Sequelize.STRING(50),
allowNull: false,
unique: true
},
email: {
type: Sequelize.STRING(100),
allowNull: false,
unique: true
},
password: {
type: Sequelize.STRING(255),
allowNull: false
},
first_name: {
type: Sequelize.STRING(50),
allowNull: true
},
last_name: {
type: Sequelize.STRING(50),
allowNull: true
},
age: {
type: Sequelize.INTEGER,
allowNull: true
},
role: {
type: Sequelize.ENUM('user', 'admin', 'moderator'),
defaultValue: 'user'
},
is_active: {
type: Sequelize.BOOLEAN,
defaultValue: true
},
last_login: {
type: Sequelize.DATE,
allowNull: true
},
created_at: {
type: Sequelize.DATE,
allowNull: false,
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP')
},
updated_at: {
type: Sequelize.DATE,
allowNull: false,
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP')
},
deleted_at: {
type: Sequelize.DATE,
allowNull: true
}
});
// Add indexes
await queryInterface.addIndex('users', ['email'], {
unique: true,
name: 'users_email_unique'
});
await queryInterface.addIndex('users', ['username'], {
name: 'users_username_index'
});
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable('users');
}
};
Run migrations:
# Run all pending migrations
npx sequelize-cli db:migrate
# Undo last migration
npx sequelize-cli db:migrate:undo
# Undo all migrations
npx sequelize-cli db:migrate:undo:all
# Check migration status
npx sequelize-cli db:migrate:status
Important: Never modify existing migrations that have already been run. Create new migrations for changes to maintain version control and avoid data loss.
CRUD Operations
Creating Records
// Create single record
const createUser = async (req, res) => {
try {
const user = await User.create({
username: 'johndoe',
email: 'john@example.com',
password: 'hashedpassword',
firstName: 'John',
lastName: 'Doe',
age: 25
});
res.status(201).json({
success: true,
data: user
});
} catch (error) {
if (error.name === 'SequelizeValidationError') {
return res.status(400).json({
success: false,
errors: error.errors.map(e => ({
field: e.path,
message: e.message
}))
});
}
res.status(500).json({
success: false,
error: error.message
});
}
};
// Build but don't save
const buildUser = async () => {
const user = User.build({
username: 'janedoe',
email: 'jane@example.com'
});
// Save later
await user.save();
};
// Bulk create
const createMultipleUsers = async (req, res) => {
try {
const users = await User.bulkCreate([
{ username: 'user1', email: 'user1@example.com', password: 'pass123' },
{ username: 'user2', email: 'user2@example.com', password: 'pass123' }
], {
validate: true, // Run validations
ignoreDuplicates: false // Throw error on duplicates
});
res.status(201).json({
success: true,
count: users.length,
data: users
});
} catch (error) {
res.status(400).json({
success: false,
error: error.message
});
}
};
Reading Records
// Find all
const getAllUsers = async (req, res) => {
try {
const users = await User.findAll();
res.status(200).json({
success: true,
count: users.length,
data: users
});
} catch (error) {
res.status(500).json({
success: false,
error: error.message
});
}
};
// Find with conditions
const getActiveUsers = async (req, res) => {
try {
const users = await User.findAll({
where: {
isActive: true
},
attributes: ['id', 'username', 'email', 'role'], // Select specific fields
order: [['createdAt', 'DESC']], // Sort
limit: 10,
offset: 0
});
res.status(200).json({
success: true,
count: users.length,
data: users
});
} catch (error) {
res.status(500).json({
success: false,
error: error.message
});
}
};
// Find by primary key
const getUserById = async (req, res) => {
try {
const user = await User.findByPk(req.params.id);
if (!user) {
return res.status(404).json({
success: false,
error: 'User not found'
});
}
res.status(200).json({
success: true,
data: user
});
} catch (error) {
res.status(500).json({
success: false,
error: error.message
});
}
};
// Find one with conditions
const getUserByEmail = async (req, res) => {
try {
const user = await User.findOne({
where: {
email: req.params.email
}
});
if (!user) {
return res.status(404).json({
success: false,
error: 'User not found'
});
}
res.status(200).json({
success: true,
data: user
});
} catch (error) {
res.status(500).json({
success: false,
error: error.message
});
}
};
// Count
const countUsers = async (req, res) => {
try {
const count = await User.count({
where: {
isActive: true
}
});
res.status(200).json({
success: true,
count
});
} catch (error) {
res.status(500).json({
success: false,
error: error.message
});
}
};
Updating Records
// Update single record
const updateUser = async (req, res) => {
try {
const user = await User.findByPk(req.params.id);
if (!user) {
return res.status(404).json({
success: false,
error: 'User not found'
});
}
await user.update(req.body);
res.status(200).json({
success: true,
data: user
});
} catch (error) {
res.status(400).json({
success: false,
error: error.message
});
}
};
// Update with conditions
const updateUserStatus = async (req, res) => {
try {
const [updatedCount] = await User.update(
{ isActive: false },
{
where: {
id: req.params.id
}
}
);
if (updatedCount === 0) {
return res.status(404).json({
success: false,
error: 'User not found'
});
}
res.status(200).json({
success: true,
message: 'User updated successfully'
});
} catch (error) {
res.status(400).json({
success: false,
error: error.message
});
}
};
// Increment/Decrement
const incrementAge = async (req, res) => {
try {
const user = await User.findByPk(req.params.id);
if (!user) {
return res.status(404).json({
success: false,
error: 'User not found'
});
}
await user.increment('age', { by: 1 });
// or user.decrement('age', { by: 1 });
res.status(200).json({
success: true,
data: user
});
} catch (error) {
res.status(400).json({
success: false,
error: error.message
});
}
};
Deleting Records
// Soft delete (if paranoid is enabled)
const deleteUser = async (req, res) => {
try {
const user = await User.findByPk(req.params.id);
if (!user) {
return res.status(404).json({
success: false,
error: 'User not found'
});
}
await user.destroy(); // Sets deletedAt timestamp
res.status(200).json({
success: true,
message: 'User deleted successfully'
});
} catch (error) {
res.status(500).json({
success: false,
error: error.message
});
}
};
// Hard delete
const forceDeleteUser = async (req, res) => {
try {
const user = await User.findByPk(req.params.id);
if (!user) {
return res.status(404).json({
success: false,
error: 'User not found'
});
}
await user.destroy({ force: true }); // Permanently delete
res.status(200).json({
success: true,
message: 'User permanently deleted'
});
} catch (error) {
res.status(500).json({
success: false,
error: error.message
});
}
};
// Delete with conditions
const deleteInactiveUsers = async (req, res) => {
try {
const deletedCount = await User.destroy({
where: {
isActive: false
}
});
res.status(200).json({
success: true,
deletedCount
});
} catch (error) {
res.status(500).json({
success: false,
error: error.message
});
}
};
// Restore soft deleted
const restoreUser = async (req, res) => {
try {
await User.restore({
where: {
id: req.params.id
}
});
res.status(200).json({
success: true,
message: 'User restored successfully'
});
} catch (error) {
res.status(500).json({
success: false,
error: error.message
});
}
};
Advanced Queries
const { Op } = require('sequelize');
// Operators
const advancedQueries = async () => {
// Comparison
const adults = await User.findAll({
where: {
age: { [Op.gte]: 18 }
}
});
const youngAdults = await User.findAll({
where: {
age: {
[Op.gte]: 18,
[Op.lt]: 30
}
}
});
const specificAges = await User.findAll({
where: {
age: { [Op.in]: [20, 25, 30] }
}
});
// String matching
const usersStartingWithJ = await User.findAll({
where: {
username: { [Op.like]: 'J%' }
}
});
const usersContainingDoe = await User.findAll({
where: {
username: { [Op.iLike]: '%doe%' } // Case-insensitive (PostgreSQL)
}
});
// Logical operators
const activeAdmins = await User.findAll({
where: {
[Op.and]: [
{ role: 'admin' },
{ isActive: true }
]
}
});
const adminOrModerator = await User.findAll({
where: {
[Op.or]: [
{ role: 'admin' },
{ role: 'moderator' }
]
}
});
// Date queries
const recentUsers = await User.findAll({
where: {
createdAt: {
[Op.gte]: new Date(Date.now() - 7 * 24 * 60 * 60 * 1000) // Last 7 days
}
}
});
// Null checks
const usersWithoutLastName = await User.findAll({
where: {
lastName: { [Op.is]: null }
}
});
const usersWithLastName = await User.findAll({
where: {
lastName: { [Op.not]: null }
}
});
};
Practice Exercise:
- Set up Sequelize with PostgreSQL or MySQL in a Node.js project
- Create a User model with validation rules
- Create and run a migration for the users table
- Implement CRUD operations for users
- Add advanced query filters (age range, role, active status)
- Implement pagination for the get all users endpoint
- Create a seeder to populate initial data
- Implement soft deletes and restore functionality
Best Practices:
- Use migrations for all schema changes
- Add indexes to frequently queried columns
- Use transactions for operations affecting multiple tables
- Validate data at the model level
- Use paranoid mode for soft deletes when appropriate
- Keep raw queries minimal; use Sequelize methods
- Use environment-specific configurations
- Log queries in development, disable in production