Introduction
In modern enterprise applications, handling large-scale data operations (100,000+ records) efficiently is a common challenge. Traditional CRUD approaches often fail due to:
Excessive database round-trips
Poor performance with row-by-row processing
Increased transaction overhead
To overcome these limitations, this article demonstrates a high-performance approach using:
Oracle Global Temporary Tables (GTT)
Stored Procedures for bulk processing
ASP.NET Core Web API with Dapper
Optimized handling of 1 Lac records per request ๎filecite๎turn0file0๎
Solution Architecture
The architecture follows a staging + processing pattern:
Load bulk data into a Temporary Table (GTT)
Execute set-based operations using Stored Procedures
Commit changes efficiently to the main table
Oracle Database Setup
Main Table
CREATE TABLE EMPLOYEES (
ID NUMBER PRIMARY KEY,
NAME VARCHAR2(100),
EMAIL VARCHAR2(150),
SALARY NUMBER,
CREATED_AT DATE DEFAULT SYSDATE
);
Global Temporary Table (GTT)
CREATE GLOBAL TEMPORARY TABLE EMPLOYEES_TEMP (
ID NUMBER,
NAME VARCHAR2(100),
EMAIL VARCHAR2(150),
SALARY NUMBER
) ON COMMIT PRESERVE ROWS;
Why GTT?
Sequence
CREATE SEQUENCE EMP_SEQ START WITH 1 INCREMENT BY 1;
Stored Procedures for Bulk Operations
Bulk Insert
CREATE OR REPLACE PROCEDURE SP_BULK_INSERT_EMPLOYEES AS
BEGIN
INSERT INTO EMPLOYEES (ID, NAME, EMAIL, SALARY)
SELECT EMP_SEQ.NEXTVAL, NAME, EMAIL, SALARY
FROM EMPLOYEES_TEMP;
COMMIT;
END;
Bulk Update
CREATE OR REPLACE PROCEDURE SP_BULK_UPDATE_EMPLOYEES AS
BEGIN
MERGE INTO EMPLOYEES E
USING EMPLOYEES_TEMP T
ON (E.ID = T.ID)
WHEN MATCHED THEN
UPDATE SET
E.NAME = T.NAME,
E.EMAIL = T.EMAIL,
E.SALARY = T.SALARY;
COMMIT;
END;
Bulk Delete
CREATE OR REPLACE PROCEDURE SP_BULK_DELETE_EMPLOYEES AS
BEGIN
DELETE FROM EMPLOYEES
WHERE ID IN (SELECT ID FROM EMPLOYEES_TEMP);
COMMIT;
END;
ASP.NET Core Implementation
Required NuGet Packages
dotnet add package Oracle.ManagedDataAccess
dotnet add package Dapper
Model
public class Employee
{
public int Id { get; set; } // Required for update/delete
public string Name { get; set; }
public string Email { get; set; }
public decimal Salary { get; set; }
}
Repository Pattern Implementation
Interface
public interface IEmployeeRepository
{
Task BulkInsertAsync(List<Employee> employees);
Task BulkUpdateAsync(List<Employee> employees);
Task BulkDeleteAsync(List<int> ids);
Task<IEnumerable<Employee>> GetAllAsync();
}
Repository Implementation
using Dapper;
using Oracle.ManagedDataAccess.Client;
public class EmployeeRepository : IEmployeeRepository
{
private readonly string _connectionString;
public EmployeeRepository(IConfiguration config)
{
_connectionString = config.GetConnectionString("OracleDb");
}
private OracleConnection GetConnection()
{
return new OracleConnection(_connectionString);
}
// BULK INSERT
public async Task BulkInsertAsync(List<Employee> employees)
{
using var connection = GetConnection();
await connection.OpenAsync();
using var transaction = connection.BeginTransaction();
await connection.ExecuteAsync("TRUNCATE TABLE EMPLOYEES_TEMP");
var query = @"INSERT INTO EMPLOYEES_TEMP (NAME, EMAIL, SALARY)
VALUES (:Name, :Email, :Salary)";
await connection.ExecuteAsync(query, employees, transaction);
await connection.ExecuteAsync("BEGIN SP_BULK_INSERT_EMPLOYEES; END;", transaction: transaction);
transaction.Commit();
}
// BULK UPDATE
public async Task BulkUpdateAsync(List<Employee> employees)
{
using var connection = GetConnection();
await connection.OpenAsync();
using var transaction = connection.BeginTransaction();
await connection.ExecuteAsync("TRUNCATE TABLE EMPLOYEES_TEMP");
var query = @"INSERT INTO EMPLOYEES_TEMP (ID, NAME, EMAIL, SALARY)
VALUES (:Id, :Name, :Email, :Salary)";
await connection.ExecuteAsync(query, employees, transaction);
await connection.ExecuteAsync("BEGIN SP_BULK_UPDATE_EMPLOYEES; END;", transaction: transaction);
transaction.Commit();
}
// BULK DELETE
public async Task BulkDeleteAsync(List<int> ids)
{
using var connection = GetConnection();
await connection.OpenAsync();
using var transaction = connection.BeginTransaction();
await connection.ExecuteAsync("TRUNCATE TABLE EMPLOYEES_TEMP");
var tempData = ids.Select(id => new { Id = id });
await connection.ExecuteAsync(
"INSERT INTO EMPLOYEES_TEMP (ID) VALUES (:Id)",
tempData,
transaction);
await connection.ExecuteAsync("BEGIN SP_BULK_DELETE_EMPLOYEES; END;", transaction: transaction);
transaction.Commit();
}
// ๐ READ
public async Task<IEnumerable<Employee>> GetAllAsync()
{
using var connection = GetConnection();
return await connection.QueryAsync<Employee>("SELECT * FROM EMPLOYEES");
}
}
API Controller
[ApiController]
[Route("api/[controller]")]
public class EmployeesController : ControllerBase
{
private readonly IEmployeeRepository _repo;
public EmployeesController(IEmployeeRepository repo)
{
_repo = repo;
}
[HttpPost("bulk-insert")]
public async Task<IActionResult> BulkInsert(List<Employee> employees)
{
await _repo.BulkInsertAsync(employees);
return Ok("Inserted Successfully");
}
[HttpPut("bulk-update")]
public async Task<IActionResult> BulkUpdate(List<Employee> employees)
{
await _repo.BulkUpdateAsync(employees);
return Ok("Updated Successfully");
}
[HttpDelete("bulk-delete")]
public async Task<IActionResult> BulkDelete(List<int> ids)
{
await _repo.BulkDeleteAsync(ids);
return Ok("Deleted Successfully");
}
[HttpGet]
public async Task<IActionResult> Get()
{
var result = await _repo.GetAllAsync();
return Ok(result);
}
}
Performance Best Practices
1. Use Batch Processing
var batches = employees.Chunk(10000);
2. Use Array Binding (Advanced Optimization)
command.ArrayBindCount = employees.Count;
3. Avoid Row-by-Row Processing
Always prefer:
โ Set-based SQL
โ Temp tables
โ Stored procedures
4. Index Optimization
Key Benefits of This Approach
Handles 100K+ records efficiently
Minimal database calls
Optimized bulk operations
Scalable enterprise architecture
Clean separation of concerns
Conclusion
Using Oracle Global Temporary Tables with ASP.NET Core provides a powerful and scalable solution for handling large datasets. By leveraging bulk processing and set-based operations, you can significantly improve performance and reliability in enterprise applications.