Entity Framework Core is excellent for most database operations, but when dealing with large datasets, Entity Framework Extensions can provide massive performance improvements. This guide shows you exactly when to use each approach with real examples from our project.
According to Entity Framework Extensions
When working with individual entities or small datasets (< 100 entities):
// Single customer creation
await _customerRepository.AddAsync(customer);
// Get customer by ID
var customer = await _customerRepository.GetByIdAsync(id);
// Update single customer
customer.Name = "Updated Name";
await _customerRepository.UpdateAsync(customer);
// Delete single customer
await _customerRepository.DeleteAsync(customer);				When you need to load related data and perform complex filtering:
// Complex query with navigation properties
var customers = await _context.Customers
    .Where(c => c.IsActive && c.TotalSpent > 1000)
    .Include(c => c.Orders)
        .ThenInclude(o => o.OrderItems)
    .OrderByDescending(c => c.TotalSpent)
    .ToListAsync();				When you need to perform business operations on entities:
public async Task<Result> UpdateCustomerStatsAsync(int customerId)
{
    var customer = await _customerRepository.GetByIdAsync(customerId);
    if (customer == null)
        return new FailureResult("Customer not found");
    // Business logic requiring loaded entity
    customer.OrderCount = await _context.Orders
        .CountAsync(o => o.CustomerId == customerId);
    
    customer.TotalSpent = await _context.Orders
        .Where(o => o.CustomerId == customerId)
        .SumAsync(o => o.TotalAmount);
    await _customerRepository.UpdateAsync(customer);
    return new SuccessResult();
}				When dealing with large datasets, use bulk operations for significant performance gains:
// Bulk insert 1000+ customers
public async Task<Result<BulkOperationResult>> BulkCreateCustomersAsync(IEnumerable<Customer> customers)
{
    var stopwatch = System.Diagnostics.Stopwatch.StartNew();
    
    // Use optimized bulk insert for better performance
    await _bulkRepository.BulkInsertOptimizedAsync(customers);
    stopwatch.Stop();
    
    var result = new BulkOperationResult(
        $"Successfully created {customers.Count()} customers using optimized bulk insert",
        customers.Count(),
        stopwatch.Elapsed,
        "Bulk Insert"
    );
    return result.ToSuccessResult();
}				When you need to update/delete records without loading them into memory:
// Batch deactivate inactive customers (no entity loading)
public async Task<Result<BulkOperationResult>> BulkDeactivateInactiveCustomersAsync(DateTime threshold)
{
    var stopwatch = System.Diagnostics.Stopwatch.StartNew();
    
    var affectedRows = await _context.Customers
        .Where(c => c.IsActive && c.LastLogin < threshold)
        .UpdateFromQueryAsync(c => new Customer { IsActive = false });
    stopwatch.Stop();
    
    var result = new BulkOperationResult(
        $"Successfully deactivated {affectedRows} inactive customers",
        affectedRows,
        stopwatch.Stop(),
        "Batch Update"
    );
    return result.ToSuccessResult();
}
// Batch delete inactive customers
var deletedRows = await _context.Customers
    .Where(c => !c.IsActive)
    .DeleteFromQueryAsync();
// Batch insert from query (backup scenario)
var insertedRows = await _context.Customers
    .Where(c => c.IsActive)
    .InsertFromQueryAsync("backup_customers", c => new { c.Code, c.Name, c.Email });				When you need to insert new records and update existing ones:
// Bulk sync customers (upsert) using BulkMerge
public async Task<Result<BulkOperationResult>> BulkSyncCustomersAsync(IEnumerable<Customer> customers)
{
    var stopwatch = System.Diagnostics.Stopwatch.StartNew();
    
    await _bulkRepository.BulkMergeAsync(customers, 
        keySelector: c => new { c.Code, c.Email });
    stopwatch.Stop();
    
    var result = new BulkOperationResult(
        $"Successfully synced {customers.Count()} customers using bulk merge",
        customers.Count(),
        stopwatch.Elapsed,
        "Bulk Merge"
    );
    return result.ToSuccessResult();
}				When you need fine-grained control over bulk operations:
var options = new BulkOperationOptions<Customer>
{
    BatchSize = 1000,                    // Process in batches of 1000
    UseTransaction = true,               // Wrap in transaction
    InsertIfNotExists = true,            // Insert new records
    UpdateIfExists = true,               // Update existing records
    IncludeGraph = true,                 // Include related entities
    ColumnPrimaryKeyExpression = c => new { c.Code, c.Email }  // Custom key
};
await _context.BulkSynchronizeAsync(customers, options);				When you need to filter by multiple values efficiently:
// Get customers by multiple codes efficiently
var customerCodes = new[] { "CUST001", "CUST002", "CUST003" };
var customers = await _context.Customers
    .WhereBulkContains(customerCodes, x => x.Code)
    .ToListAsync();
// Get customers by multiple criteria
var customers = await _context.Customers
    .WhereBulkContainsFilterList(customerList, x => new { x.Email, x.Name })
    .ToListAsync();				Our project includes performance measurement endpoints that demonstrate the actual difference:
public interface IBulkRepository
{
    Task BulkInsertAsync<T>(IEnumerable<T> entities) where T : class;
    Task BulkUpdateAsync<T>(IEnumerable<T> entities) where T : class;
    Task BulkDeleteAsync<T>(IEnumerable<T> entities) where T : class;
    Task BulkMergeAsync<T>(IEnumerable<T> entities) where T : class;
    Task BulkSynchronizeAsync<T>(IEnumerable<T> entities) where T : class;
    
    // Optimized versions
    Task BulkInsertOptimizedAsync<T>(IEnumerable<T> entities) where T : class;
    Task BulkUpdateOptimizedAsync<T>(IEnumerable<T> entities) where T : class;
    
    // Batch operations
    Task<int> UpdateFromQueryAsync<T>(Expression<Func<T, T>> updateExpression, Expression<Func<T, bool>> whereExpression) where T : class;
    Task<int> DeleteFromQueryAsync<T>(Expression<Func<T, bool>> whereExpression) where T : class;
}				public class CustomerService
{
    private readonly ICustomerRepository _customerRepository;      // For EF Core operations
    private readonly IBulkRepository _bulkRepository;             // For bulk operations
    // Use EF Core for individual operations
    public async Task<Result> CreateCustomerAsync(Customer customer)
    {
        await _customerRepository.AddAsync(customer);
        return new SuccessResult();
    }
    // Use Entity Framework Extensions for bulk operations
    public async Task<Result<BulkOperationResult>> BulkCreateCustomersAsync(IEnumerable<Customer> customers)
    {
        await _bulkRepository.BulkInsertOptimizedAsync(customers);
        return new SuccessResult();
    }
}				| Scenario | Entity Count | Use | Reason | 
|---|---|---|---|
| Single customer CRUD | 1 | EF Core | Simple, no performance benefit | 
| Customer search with orders | < 100 | EF Core | Complex queries, navigation properties | 
| Customer import from CSV | 100-1000 | Entity Framework Extensions | Large dataset, performance critical | 
| Batch status updates | Any | Entity Framework Extensions | No entity loading needed | 
| Data synchronization | Any | Entity Framework Extensions | Upsert operations | 
| Customer backup | Any | Entity Framework Extensions | Batch operations | 
dotnet add package Z.EntityFramework.Extensions.EFCore				using Z.BulkOperations;
public class ApplicationDbContext : DbContext
{
    // Your existing DbContext code
}				public class BulkRepository : IBulkRepository
{
    private readonly ApplicationDbContext _context;
    
    public async Task BulkInsertAsync<T>(IEnumerable<T> entities) where T : class
    {
        await _context.BulkInsertAsync(entities.ToList());
    }
}				Elevating your digital future with cutting-edge software solutions, cloud services, AI innovations, and DevOps excellence that transform businesses and exceed expectations.