β‘ Performance Optimization Guide
Maximize your Dubhe applicationβs performance with proven optimization strategies
Prerequisites: Understanding of ECS architecture, Move language, and basic performance concepts
π― Performance Optimization Philosophy
Performance in blockchain applications involves multiple layers: smart contract efficiency, indexer throughput, frontend responsiveness, and network optimization. Each layer requires specific strategies.Smart Contracts
Gas optimization and execution efficiency
Indexer Service
Data processing and query optimization
Frontend
UI responsiveness and real-time updates
π Performance Metrics & Monitoring
Key Performance Indicators
- Smart Contract Metrics
- Indexer Performance
- Frontend Performance
// Performance monitoring for contracts
interface ContractMetrics {
gasUsage: {
entityCreation: number;
componentAdd: number;
systemExecution: number;
batchOperations: number;
};
executionTime: {
averageBlockTime: number;
transactionThroughput: number;
confirmationTime: number;
};
stateSize: {
totalEntities: number;
componentsPerEntity: number;
storageUtilization: number;
};
}
// Gas usage tracking
export class GasTracker {
private metrics: Map<string, number[]> = new Map();
track(operation: string, gasUsed: number) {
if (!this.metrics.has(operation)) {
this.metrics.set(operation, []);
}
this.metrics.get(operation)!.push(gasUsed);
}
getAverage(operation: string): number {
const values = this.metrics.get(operation) || [];
return values.reduce((sum, val) => sum + val, 0) / values.length;
}
getPercentile(operation: string, percentile: number): number {
const values = this.metrics.get(operation)?.sort((a, b) => a - b) || [];
const index = Math.floor(values.length * (percentile / 100));
return values[index] || 0;
}
}
// Indexer performance monitoring
interface IndexerMetrics {
throughput: {
eventsPerSecond: number;
blocksProcessed: number;
avgProcessingTime: number;
};
database: {
queryLatency: Map<string, number>;
connectionUtilization: number;
cacheHitRate: number;
};
websocket: {
activeConnections: number;
messageLatency: number;
connectionDropRate: number;
};
}
// Performance monitoring middleware
export class PerformanceMonitor {
private metrics: IndexerMetrics;
trackQuery(queryType: string, duration: number) {
this.metrics.database.queryLatency.set(queryType, duration);
}
trackEventProcessing(eventCount: number, duration: number) {
this.metrics.throughput.eventsPerSecond = eventCount / (duration / 1000);
}
getHealthCheck(): { status: string; metrics: IndexerMetrics } {
return {
status: this.calculateOverallHealth(),
metrics: this.metrics
};
}
}
// Frontend performance tracking
interface FrontendMetrics {
rendering: {
fps: number;
componentRenderTime: Map<string, number>;
memoryUsage: number;
};
network: {
rpcLatency: number;
websocketLatency: number;
dataTransferRate: number;
};
user: {
timeToInteractive: number;
firstContentfulPaint: number;
largestContentfulPaint: number;
};
}
// React performance hook
export const usePerformanceMonitor = () => {
const [metrics, setMetrics] = useState<FrontendMetrics>();
useEffect(() => {
const observer = new PerformanceObserver((list) => {
list.getEntries().forEach((entry) => {
if (entry.entryType === 'measure') {
// Track custom performance measures
console.log(`${entry.name}: ${entry.duration}ms`);
}
});
});
observer.observe({ entryTypes: ['measure', 'navigation'] });
return () => observer.disconnect();
}, []);
const markStart = (name: string) => {
performance.mark(`${name}-start`);
};
const markEnd = (name: string) => {
performance.mark(`${name}-end`);
performance.measure(name, `${name}-start`, `${name}-end`);
};
return { metrics, markStart, markEnd };
};
π Smart Contract Optimization
Component Design Optimization
Memory Layout Optimization
Memory Layout Optimization
// β
Optimized: Pack related data together
struct OptimizedHealthComponent has store, drop {
// Pack current and maximum health into single u64
// Bits 0-31: current health (max 4.3B)
// Bits 32-63: maximum health (max 4.3B)
packed_health: u64,
}
public fun get_current_health(health: &OptimizedHealthComponent): u32 {
(health.packed_health & 0xFFFFFFFF) as u32
}
public fun get_maximum_health(health: &OptimizedHealthComponent): u32 {
(health.packed_health >> 32) as u32
}
public fun set_current_health(health: &mut OptimizedHealthComponent, value: u32) {
let max_health = get_maximum_health(health) as u64;
health.packed_health = (max_health << 32) | (value as u64);
}
// β
Optimized: Use bit flags for boolean states
struct OptimizedStateComponent has store, drop {
// Bit flags: 0=alive, 1=poisoned, 2=stunned, 3=frozen, etc.
status_flags: u8,
// Pack multiple small values
// Bits 0-7: level (max 255)
// Bits 8-15: class (max 255)
// Bits 16-31: experience_multiplier (fixed point)
packed_stats: u32,
}
const STATUS_ALIVE: u8 = 1 << 0;
const STATUS_POISONED: u8 = 1 << 1;
const STATUS_STUNNED: u8 = 1 << 2;
public fun is_alive(state: &OptimizedStateComponent): bool {
(state.status_flags & STATUS_ALIVE) != 0
}
public fun set_poisoned(state: &mut OptimizedStateComponent, poisoned: bool) {
if (poisoned) {
state.status_flags = state.status_flags | STATUS_POISONED;
} else {
state.status_flags = state.status_flags & !STATUS_POISONED;
};
}
Component Granularity
Component Granularity
// β Bad: Monolithic component (expensive to read/write)
struct PlayerData has store, drop {
health: u64,
mana: u64,
position_x: u64,
position_y: u64,
velocity_x: u64,
velocity_y: u64,
inventory_items: vector<u64>,
equipment: Equipment,
stats: Stats,
buffs: vector<Buff>,
}
// β
Good: Granular components (load only what's needed)
struct CoreStatsComponent has store, drop {
health: u64,
mana: u64,
}
struct PositionComponent has store, drop {
x: u64,
y: u64,
}
struct MovementComponent has store, drop {
velocity_x: u64,
velocity_y: u64,
speed_multiplier: u16, // Packed as fixed-point
}
// Load only required components for each system
public entry fun movement_system(world: &mut World) {
// Only loads Position + Movement components, not entire player data
let moving_entities = world::query_with<PositionComponent, MovementComponent>(world);
// Process movement...
}
System Optimization Patterns
- Batch Processing
- Lazy Evaluation
- Efficient Data Structures
// β
Optimized: Process multiple entities in single transaction
public entry fun batch_heal_system(
world: &mut World,
targets: vector<u64>,
heal_amounts: vector<u64>
) {
let len = vector::length(&targets);
assert!(len == vector::length(&heal_amounts), EArrayLengthMismatch);
assert!(len <= MAX_BATCH_SIZE, EBatchTooLarge);
let i = 0;
while (i < len) {
let target = *vector::borrow(&targets, i);
let heal_amount = *vector::borrow(&heal_amounts, i);
if (world::has_component<HealthComponent>(world, target)) {
let health = world::get_mut_component<HealthComponent>(world, target);
health.current = math::min(health.current + heal_amount, health.maximum);
};
i = i + 1;
};
// Single event for entire batch
event::emit(BatchHealEvent {
targets,
heal_amounts,
timestamp: tx_context::epoch_timestamp_ms(ctx),
});
}
// β
Optimized: Conditional system execution
public entry fun smart_ai_system(world: &mut World) {
// Early exit if no AI entities exist
if (!world::has_entities_with_component<AIComponent>(world)) {
return
};
let ai_entities = world::query_with<AIComponent>(world);
// Process only entities that need updates
let i = 0;
while (i < vector::length(&ai_entities)) {
let entity = *vector::borrow(&ai_entities, i);
let ai = world::get_component<AIComponent>(world, entity);
// Skip entities that don't need processing
if (ai.next_update_time > tx_context::epoch_timestamp_ms(ctx)) {
i = i + 1;
continue
};
process_ai_entity(world, entity);
i = i + 1;
};
}
// β
Optimized: Cache expensive calculations
struct CachedStatsComponent has store, drop {
base_strength: u16,
base_agility: u16,
base_intelligence: u16,
// Equipment bonuses
equipment_bonuses: vector<StatModifier>,
// Cached total stats (computed on demand)
cached_total_strength: Option<u16>,
cached_total_agility: Option<u16>,
cached_total_intelligence: Option<u16>,
// Cache invalidation flag
cache_dirty: bool,
}
public fun get_total_strength(stats: &mut CachedStatsComponent): u16 {
if (stats.cache_dirty || option::is_none(&stats.cached_total_strength)) {
let total = calculate_total_strength(stats);
stats.cached_total_strength = option::some(total);
};
*option::borrow(&stats.cached_total_strength)
}
public fun add_equipment_bonus(
stats: &mut CachedStatsComponent,
bonus: StatModifier
) {
vector::push_back(&mut stats.equipment_bonuses, bonus);
// Mark cache as dirty
stats.cache_dirty = true;
stats.cached_total_strength = option::none();
stats.cached_total_agility = option::none();
stats.cached_total_intelligence = option::none();
}
fun calculate_total_strength(stats: &CachedStatsComponent): u16 {
let total = stats.base_strength;
let i = 0;
while (i < vector::length(&stats.equipment_bonuses)) {
let bonus = vector::borrow(&stats.equipment_bonuses, i);
if (bonus.stat_type == STAT_STRENGTH) {
total = total + bonus.value;
};
i = i + 1;
};
total
}
use sui::table::{Self, Table};
use sui::vec_map::{Self, VecMap};
use sui::vec_set::{Self, VecSet};
// Choose appropriate data structure based on usage pattern
struct OptimizedWorld has key {
id: UID,
// Large, sparse collections -> Table (O(1) lookup)
entities: Table<u64, EntityData>,
// Small, frequently iterated collections -> VecMap (cache friendly)
active_players: VecMap<address, u64>, // <100 items typically
// Membership testing -> VecSet (O(log n) membership, compact)
online_entities: VecSet<u64>,
// Recent items with FIFO behavior -> Vector
recent_events: vector<GameEvent>, // Keep last 10 events
recent_events_head: u8,
// Counters and frequently updated values
next_entity_id: u64,
total_players: u32,
}
// Efficient event storage with circular buffer
public fun add_recent_event(world: &mut World, event: GameEvent) {
let events_len = vector::length(&world.recent_events);
if (events_len < MAX_RECENT_EVENTS) {
vector::push_back(&mut world.recent_events, event);
} else {
// Replace oldest event (circular buffer)
let index = (world.recent_events_head as u64) % MAX_RECENT_EVENTS;
*vector::borrow_mut(&mut world.recent_events, index) = event;
world.recent_events_head = (world.recent_events_head + 1) % (MAX_RECENT_EVENTS as u8);
};
}
Gas Optimization Techniques
Minimize Storage Operations
// β Bad: Multiple storage writes
public fun update_player_bad(world: &mut World, player: u64, new_health: u64, new_mana: u64) {
let health = world::get_mut_component<HealthComponent>(world, player);
health.current = new_health;
let mana = world::get_mut_component<ManaComponent>(world, player);
mana.current = new_mana;
}
// β
Good: Batch updates in single component
struct VitalStatsComponent has store, drop {
health_current: u64,
health_maximum: u64,
mana_current: u64,
mana_maximum: u64,
}
public fun update_player_good(
world: &mut World,
player: u64,
new_health: u64,
new_mana: u64
) {
let vitals = world::get_mut_component<VitalStatsComponent>(world, player);
vitals.health_current = new_health;
vitals.mana_current = new_mana;
// Single storage write
}
Optimize Loops and Iterations
// β
Optimized: Use indices and break early
public fun find_target_optimized(
world: &World,
searcher_pos: &PositionComponent,
max_range: u64
): Option<u64> {
let enemy_entities = world::query_with<EnemyTag, PositionComponent>(world);
let len = vector::length(&enemy_entities);
if (len == 0) return option::none();
let closest_distance = max_range + 1;
let closest_entity = option::none<u64>();
let i = 0;
while (i < len) {
let entity = *vector::borrow(&enemy_entities, i);
let pos = world::get_component<PositionComponent>(world, entity);
let distance = calculate_distance(searcher_pos, pos);
// Early exit if perfect match found
if (distance == 0) {
return option::some(entity)
};
if (distance < closest_distance && distance <= max_range) {
closest_distance = distance;
closest_entity = option::some(entity);
};
i = i + 1;
};
closest_entity
}
Use Native Operations
// β
Use built-in vector operations when possible
public fun remove_dead_entities_optimized(world: &mut World) {
let dead_entities = world::query_with<DeadTag>(world);
// Process in reverse order for efficient removal
let i = vector::length(&dead_entities);
while (i > 0) {
i = i - 1;
let entity = *vector::borrow(&dead_entities, i);
world::despawn_entity(world, entity);
};
}
// β
Batch vector operations
public fun apply_area_damage(
world: &mut World,
center: &PositionComponent,
radius: u64,
damage: u64
) {
let all_entities = world::query_with<PositionComponent, HealthComponent>(world);
let targets = vector::empty<u64>();
// First pass: collect targets
let i = 0;
while (i < vector::length(&all_entities)) {
let entity = *vector::borrow(&all_entities, i);
let pos = world::get_component<PositionComponent>(world, entity);
if (calculate_distance(center, pos) <= radius) {
vector::push_back(&mut targets, entity);
};
i = i + 1;
};
// Second pass: apply damage to all targets
batch_damage_entities(world, targets, damage);
}
ποΈ Indexer Performance Optimization
Database Optimization
- Index Strategy
- Connection Pooling
- Query Optimization
-- Optimize for common query patterns
-- 1. Entity-component lookups (most frequent)
CREATE INDEX CONCURRENTLY idx_entity_components_lookup
ON entity_components(entity_id, component_type);
-- 2. Component queries by type (system queries)
CREATE INDEX CONCURRENTLY idx_components_by_type
ON entity_components(component_type)
INCLUDE (entity_id, component_data);
-- 3. Recent events (timeline queries)
CREATE INDEX CONCURRENTLY idx_events_recent
ON blockchain_events(timestamp DESC, event_type);
-- 4. Player-specific queries
CREATE INDEX CONCURRENTLY idx_players_active
ON entities(id)
WHERE has_component('PlayerComponent');
-- 5. Partial index for alive entities (common filter)
CREATE INDEX CONCURRENTLY idx_entities_alive
ON entities(id)
WHERE NOT has_component('DeadTag');
-- 6. Spatial queries for position-based systems
CREATE INDEX CONCURRENTLY idx_positions_spatial
ON entity_components
USING GIST ((component_data->>'x')::int, (component_data->>'y')::int)
WHERE component_type = 'PositionComponent';
-- Analyze query performance
EXPLAIN (ANALYZE, BUFFERS, FORMAT JSON)
SELECT entity_id
FROM entity_components
WHERE component_type = 'HealthComponent'
AND (component_data->>'current')::int > 0;
// Optimized database connection management
import { Pool, PoolConfig } from 'pg';
import { createClient } from 'redis';
class OptimizedDatabase {
private pool: Pool;
private redis: ReturnType<typeof createClient>;
constructor() {
// PostgreSQL connection pool
this.pool = new Pool({
host: process.env.DB_HOST,
port: parseInt(process.env.DB_PORT || '5432'),
database: process.env.DB_NAME,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
// Connection pool optimization
min: 10, // Minimum connections
max: 50, // Maximum connections
idleTimeoutMillis: 30000, // Close idle connections after 30s
connectionTimeoutMillis: 2000, // Connection timeout
statementTimeout: 60000, // Query timeout
// Performance tuning
options: '-c default_transaction_isolation=read_committed',
});
// Redis connection with cluster support
this.redis = createClient({
url: process.env.REDIS_URL,
socket: {
connectTimeout: 2000,
lazyConnect: true,
},
// Connection pooling for Redis
isolationPoolOptions: {
min: 2,
max: 10,
},
});
// Connection monitoring
this.pool.on('connect', (client) => {
console.log('New database connection established');
});
this.pool.on('error', (err) => {
console.error('Database connection error:', err);
});
}
// Optimized query with caching
async getComponentsCached(entityId: string, componentTypes: string[]) {
const cacheKey = `entity:${entityId}:${componentTypes.join(',')}`;
// Try cache first
const cached = await this.redis.get(cacheKey);
if (cached) {
return JSON.parse(cached);
}
// Database query with prepared statement
const query = `
SELECT component_type, component_data
FROM entity_components
WHERE entity_id = $1 AND component_type = ANY($2)
`;
const result = await this.pool.query(query, [entityId, componentTypes]);
// Cache result for 5 minutes
await this.redis.setEx(cacheKey, 300, JSON.stringify(result.rows));
return result.rows;
}
// Batch operations for better throughput
async batchInsertComponents(updates: ComponentUpdate[]) {
const client = await this.pool.connect();
try {
await client.query('BEGIN');
// Use COPY for bulk inserts (fastest method)
const copyStream = client.query(copyFrom(`
COPY entity_components (entity_id, component_type, component_data, updated_at)
FROM STDIN WITH CSV
`));
for (const update of updates) {
copyStream.write([
update.entityId,
update.componentType,
JSON.stringify(update.data),
new Date()
]);
}
copyStream.end();
await client.query('COMMIT');
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
}
}
// Optimized query patterns
class OptimizedQueries {
private db: OptimizedDatabase;
// Use materialized views for complex aggregations
async createMaterializedViews() {
await this.db.query(`
CREATE MATERIALIZED VIEW IF NOT EXISTS player_stats AS
SELECT
e.entity_id,
p.component_data->>'name' as player_name,
h.component_data->>'current' as health,
h.component_data->>'maximum' as max_health,
pos.component_data->>'x' as x,
pos.component_data->>'y' as y
FROM entities e
JOIN entity_components p ON e.entity_id = p.entity_id
AND p.component_type = 'PlayerComponent'
JOIN entity_components h ON e.entity_id = h.entity_id
AND h.component_type = 'HealthComponent'
LEFT JOIN entity_components pos ON e.entity_id = pos.entity_id
AND pos.component_type = 'PositionComponent'
WHERE NOT EXISTS (
SELECT 1 FROM entity_components dead
WHERE dead.entity_id = e.entity_id
AND dead.component_type = 'DeadTag'
);
`);
// Refresh periodically
setInterval(async () => {
await this.db.query('REFRESH MATERIALIZED VIEW player_stats');
}, 60000); // Every minute
}
// Efficient spatial queries
async getEntitiesInRadius(centerX: number, centerY: number, radius: number) {
// Use spatial index for fast lookup
const query = `
SELECT entity_id, component_data
FROM entity_components
WHERE component_type = 'PositionComponent'
AND point((component_data->>'x')::int, (component_data->>'y')::int)
<@ circle(point($1, $2), $3)
`;
return this.db.query(query, [centerX, centerY, radius]);
}
// Optimized component filtering
async getEntitiesWithComponents(componentTypes: string[], filters: Record<string, any>) {
// Build dynamic query with proper indexing
const conditions = componentTypes.map((type, i) =>
`EXISTS (SELECT 1 FROM entity_components c${i}
WHERE c${i}.entity_id = e.entity_id
AND c${i}.component_type = '${type}')`
).join(' AND ');
const query = `
SELECT DISTINCT e.entity_id
FROM entities e
WHERE ${conditions}
LIMIT 1000
`;
return this.db.query(query);
}
}
Event Processing Optimization
Stream Processing Pipeline
Stream Processing Pipeline
// High-throughput event processing
import { Transform, pipeline } from 'stream';
import { promisify } from 'util';
class OptimizedEventProcessor {
private batchSize = 100;
private batchTimeout = 1000; // 1 second
private currentBatch: BlockchainEvent[] = [];
private processingQueue: Promise<void> = Promise.resolve();
constructor(private db: OptimizedDatabase) {
this.startBatchProcessor();
}
async processEvent(event: BlockchainEvent) {
this.currentBatch.push(event);
// Process batch when full or after timeout
if (this.currentBatch.length >= this.batchSize) {
await this.processBatch();
}
}
private startBatchProcessor() {
setInterval(async () => {
if (this.currentBatch.length > 0) {
await this.processBatch();
}
}, this.batchTimeout);
}
private async processBatch() {
const batch = this.currentBatch.splice(0);
if (batch.length === 0) return;
// Queue processing to avoid concurrent batch conflicts
this.processingQueue = this.processingQueue.then(async () => {
await this.processBatchInternal(batch);
});
await this.processingQueue;
}
private async processBatchInternal(events: BlockchainEvent[]) {
const startTime = performance.now();
try {
// Group events by type for efficient processing
const eventsByType = new Map<string, BlockchainEvent[]>();
events.forEach(event => {
if (!eventsByType.has(event.type)) {
eventsByType.set(event.type, []);
}
eventsByType.get(event.type)!.push(event);
});
// Process each event type in parallel
const processingPromises = Array.from(eventsByType.entries()).map(
([eventType, eventBatch]) => this.processEventType(eventType, eventBatch)
);
await Promise.all(processingPromises);
// Update metrics
const processingTime = performance.now() - startTime;
this.updateMetrics(events.length, processingTime);
console.log(`Processed ${events.length} events in ${processingTime}ms`);
} catch (error) {
console.error('Error processing batch:', error);
// Implement retry logic or dead letter queue
await this.handleBatchError(events, error);
}
}
private async processEventType(eventType: string, events: BlockchainEvent[]) {
switch (eventType) {
case 'ComponentAdded':
return this.processComponentAddedEvents(events);
case 'ComponentUpdated':
return this.processComponentUpdatedEvents(events);
case 'EntitySpawned':
return this.processEntitySpawnedEvents(events);
default:
console.warn(`Unknown event type: ${eventType}`);
}
}
private async processComponentAddedEvents(events: BlockchainEvent[]) {
// Batch insert components
const updates = events.map(event => ({
entityId: event.data.entity,
componentType: event.data.componentType,
data: event.data.componentData,
blockNumber: event.blockNumber,
transactionHash: event.transactionHash,
}));
await this.db.batchInsertComponents(updates);
// Invalidate related caches
const entityIds = events.map(e => e.data.entity);
await this.invalidateEntityCaches(entityIds);
// Emit WebSocket updates
await this.emitWebSocketUpdates(events);
}
}
WebSocket Optimization
- Connection Management
- Message Optimization
// Optimized WebSocket server
import WebSocket from 'ws';
import { EventEmitter } from 'events';
class OptimizedWebSocketServer extends EventEmitter {
private wss: WebSocket.Server;
private connections: Map<string, WebSocketConnection> = new Map();
private subscriptions: Map<string, Set<string>> = new Map(); // topic -> connectionIds
private connectionGroups: Map<string, Set<string>> = new Map(); // group -> connectionIds
constructor(port: number) {
super();
this.wss = new WebSocket.Server({
port,
perMessageDeflate: false, // Disable compression for lower latency
maxPayload: 1024 * 1024, // 1MB max message size
});
this.wss.on('connection', this.handleConnection.bind(this));
this.startCleanupTimer();
}
private handleConnection(ws: WebSocket, request: any) {
const connectionId = this.generateConnectionId();
const connection = new WebSocketConnection(connectionId, ws);
this.connections.set(connectionId, connection);
ws.on('message', (data: Buffer) => {
this.handleMessage(connectionId, data);
});
ws.on('close', () => {
this.handleDisconnection(connectionId);
});
ws.on('error', (error) => {
console.error(`WebSocket error for ${connectionId}:`, error);
this.handleDisconnection(connectionId);
});
// Send connection confirmation
this.send(connectionId, {
type: 'connected',
connectionId,
timestamp: Date.now(),
});
}
private handleMessage(connectionId: string, data: Buffer) {
try {
const message = JSON.parse(data.toString());
switch (message.type) {
case 'subscribe':
this.subscribe(connectionId, message.topic, message.filters);
break;
case 'unsubscribe':
this.unsubscribe(connectionId, message.topic);
break;
case 'join_group':
this.joinGroup(connectionId, message.group);
break;
case 'ping':
this.send(connectionId, { type: 'pong', timestamp: Date.now() });
break;
}
} catch (error) {
console.error(`Error parsing message from ${connectionId}:`, error);
}
}
private subscribe(connectionId: string, topic: string, filters?: any) {
if (!this.subscriptions.has(topic)) {
this.subscriptions.set(topic, new Set());
}
this.subscriptions.get(topic)!.add(connectionId);
const connection = this.connections.get(connectionId);
if (connection) {
connection.subscriptions.set(topic, filters || {});
}
this.send(connectionId, {
type: 'subscribed',
topic,
timestamp: Date.now(),
});
}
// Optimized broadcast with filtering
broadcast(topic: string, data: any, filters?: (connectionId: string) => boolean) {
const subscribers = this.subscriptions.get(topic);
if (!subscribers) return;
const message = JSON.stringify({
type: 'update',
topic,
data,
timestamp: Date.now(),
});
// Batch send for better performance
const sends: Promise<void>[] = [];
for (const connectionId of subscribers) {
if (filters && !filters(connectionId)) continue;
const connection = this.connections.get(connectionId);
if (connection?.isAlive()) {
sends.push(connection.send(message));
}
}
// Don't wait for all sends to complete
Promise.allSettled(sends).catch(console.error);
}
// Efficient cleanup of dead connections
private startCleanupTimer() {
setInterval(() => {
for (const [connectionId, connection] of this.connections) {
if (!connection.isAlive()) {
this.handleDisconnection(connectionId);
}
}
}, 30000); // Check every 30 seconds
}
}
class WebSocketConnection {
public subscriptions: Map<string, any> = new Map();
private lastPing: number = Date.now();
constructor(
public id: string,
private ws: WebSocket
) {
// Start ping/pong for connection health
setInterval(() => {
if (this.isAlive()) {
this.ws.ping();
}
}, 30000);
this.ws.on('pong', () => {
this.lastPing = Date.now();
});
}
async send(message: string): Promise<void> {
return new Promise((resolve, reject) => {
if (this.ws.readyState !== WebSocket.OPEN) {
reject(new Error('Connection not open'));
return;
}
this.ws.send(message, (error) => {
if (error) reject(error);
else resolve();
});
});
}
isAlive(): boolean {
return this.ws.readyState === WebSocket.OPEN &&
(Date.now() - this.lastPing) < 60000; // 1 minute timeout
}
}
// Efficient message serialization and batching
class MessageOptimizer {
private messageQueue: Map<string, QueuedMessage[]> = new Map();
private batchTimer: NodeJS.Timeout | null = null;
private readonly BATCH_SIZE = 50;
private readonly BATCH_DELAY = 16; // ~60fps
queueMessage(topic: string, data: any, priority: 'high' | 'normal' | 'low' = 'normal') {
if (!this.messageQueue.has(topic)) {
this.messageQueue.set(topic, []);
}
const queue = this.messageQueue.get(topic)!;
queue.push({
data,
priority,
timestamp: Date.now(),
});
// Sort by priority and timestamp
queue.sort((a, b) => {
const priorityOrder = { high: 0, normal: 1, low: 2 };
const priorityDiff = priorityOrder[a.priority] - priorityOrder[b.priority];
return priorityDiff !== 0 ? priorityDiff : a.timestamp - b.timestamp;
});
// Limit queue size to prevent memory issues
if (queue.length > this.BATCH_SIZE * 2) {
queue.splice(this.BATCH_SIZE); // Keep only newest messages
}
this.scheduleBatch();
}
private scheduleBatch() {
if (this.batchTimer) return;
this.batchTimer = setTimeout(() => {
this.processBatches();
this.batchTimer = null;
}, this.BATCH_DELAY);
}
private processBatches() {
for (const [topic, messages] of this.messageQueue.entries()) {
if (messages.length === 0) continue;
// Take batch from front of queue
const batch = messages.splice(0, this.BATCH_SIZE);
// Group similar message types for compression
const grouped = this.groupMessages(batch);
for (const group of grouped) {
this.sendBatch(topic, group);
}
}
}
private groupMessages(messages: QueuedMessage[]): QueuedMessage[][] {
const groups = new Map<string, QueuedMessage[]>();
for (const message of messages) {
const type = this.getMessageType(message.data);
if (!groups.has(type)) {
groups.set(type, []);
}
groups.get(type)!.push(message);
}
return Array.from(groups.values());
}
private sendBatch(topic: string, batch: QueuedMessage[]) {
if (batch.length === 1) {
// Single message - send directly
this.wsServer.broadcast(topic, batch[0].data);
} else {
// Multiple messages - send as batch
const batchData = {
type: 'batch_update',
messages: batch.map(m => m.data),
count: batch.length,
};
this.wsServer.broadcast(topic, batchData);
}
}
}
interface QueuedMessage {
data: any;
priority: 'high' | 'normal' | 'low';
timestamp: number;
}
π₯οΈ Frontend Performance Optimization
React Optimization Strategies
- Component Optimization
- State Management
- Rendering Optimization
// Optimized React components for real-time updates
import React, { useMemo, useCallback, memo } from 'react';
import { useDubheClient } from '@0xobelisk/dubhe-react';
// Memoized entity component to prevent unnecessary re-renders
const EntityDisplay = memo<{ entityId: string; componentTypes: string[] }>(({
entityId,
componentTypes
}) => {
const client = useDubheClient();
// Use optimized hook that batches component queries
const components = useEntityComponents(entityId, componentTypes);
// Memoize expensive calculations
const computedStats = useMemo(() => {
if (!components.health || !components.stats) return null;
return {
healthPercentage: (components.health.current / components.health.maximum) * 100,
totalAttack: components.stats.base_attack + (components.equipment?.weapon_bonus || 0),
totalDefense: components.stats.base_defense + (components.equipment?.armor_bonus || 0),
};
}, [components.health, components.stats, components.equipment]);
// Memoized event handlers
const handleAttack = useCallback(async (targetId: string) => {
await client.tx.combatSystem.attack({
attacker: entityId,
target: targetId,
});
}, [client, entityId]);
if (!computedStats) return <div>Loading...</div>;
return (
<div className="entity-display">
<HealthBar
current={components.health.current}
maximum={components.health.maximum}
percentage={computedStats.healthPercentage}
/>
<StatsDisplay stats={computedStats} />
<ActionButtons onAttack={handleAttack} />
</div>
);
});
// Virtualized list for large entity collections
import { FixedSizeList as List } from 'react-window';
const EntityList: React.FC<{ entityIds: string[] }> = ({ entityIds }) => {
const renderEntity = useCallback(({ index, style }: any) => {
const entityId = entityIds[index];
return (
<div style={style}>
<EntityDisplay
entityId={entityId}
componentTypes={['HealthComponent', 'StatsComponent']}
/>
</div>
);
}, [entityIds]);
return (
<List
height={600}
itemCount={entityIds.length}
itemSize={80}
width="100%"
>
{renderEntity}
</List>
);
};
// Optimized state management with Zustand
import { create } from 'zustand';
import { subscribeWithSelector } from 'zustand/middleware';
interface GameState {
entities: Map<string, Entity>;
components: Map<string, Map<string, any>>; // entityId -> componentType -> data
// Actions
updateEntity: (entityId: string, components: Record<string, any>) => void;
addEntity: (entity: Entity) => void;
removeEntity: (entityId: string) => void;
// Optimized selectors
getEntityComponents: (entityId: string, componentTypes: string[]) => any[];
getEntitiesWithComponents: (componentTypes: string[]) => string[];
}
const useGameStore = create<GameState>()(
subscribeWithSelector((set, get) => ({
entities: new Map(),
components: new Map(),
updateEntity: (entityId, newComponents) => set(state => {
// Optimized immutable update
const newComponentsMap = new Map(state.components);
let entityComponents = newComponentsMap.get(entityId);
if (!entityComponents) {
entityComponents = new Map();
newComponentsMap.set(entityId, entityComponents);
}
// Update only changed components
for (const [componentType, data] of Object.entries(newComponents)) {
entityComponents.set(componentType, data);
}
return { components: newComponentsMap };
}),
getEntityComponents: (entityId, componentTypes) => {
const state = get();
const entityComponents = state.components.get(entityId);
if (!entityComponents) return [];
return componentTypes.map(type => entityComponents.get(type)).filter(Boolean);
},
getEntitiesWithComponents: (componentTypes) => {
const state = get();
const result: string[] = [];
for (const [entityId, entityComponents] of state.components) {
const hasAllComponents = componentTypes.every(type =>
entityComponents.has(type)
);
if (hasAllComponents) {
result.push(entityId);
}
}
return result;
},
}))
);
// Optimized hook for component subscriptions
export const useEntityComponents = (entityId: string, componentTypes: string[]) => {
// Subscribe only to specific entity and component types
return useGameStore(
useCallback((state) => {
const entityComponents = state.components.get(entityId);
if (!entityComponents) return {};
const result: Record<string, any> = {};
for (const componentType of componentTypes) {
const component = entityComponents.get(componentType);
if (component) {
result[componentType] = component;
}
}
return result;
}, [entityId, componentTypes.join(',')])
);
};
// Canvas-based rendering for high-performance games
import React, { useRef, useEffect, useCallback } from 'react';
const GameCanvas: React.FC = () => {
const canvasRef = useRef<HTMLCanvasElement>(null);
const animationFrameRef = useRef<number>();
const lastRenderTime = useRef<number>(0);
// Game state from optimized store
const entities = useGameStore(state =>
state.getEntitiesWithComponents(['PositionComponent', 'RenderComponent'])
);
const renderFrame = useCallback((timestamp: number) => {
const canvas = canvasRef.current;
if (!canvas) return;
const ctx = canvas.getContext('2d')!;
const deltaTime = timestamp - lastRenderTime.current;
// Throttle to 60fps
if (deltaTime < 16.67) {
animationFrameRef.current = requestAnimationFrame(renderFrame);
return;
}
// Clear canvas
ctx.clearRect(0, 0, canvas.width, canvas.height);
// Render entities
for (const entityId of entities) {
renderEntity(ctx, entityId);
}
lastRenderTime.current = timestamp;
animationFrameRef.current = requestAnimationFrame(renderFrame);
}, [entities]);
const renderEntity = useCallback((ctx: CanvasRenderingContext2D, entityId: string) => {
const components = useGameStore.getState().getEntityComponents(
entityId,
['PositionComponent', 'RenderComponent', 'HealthComponent']
);
const [position, render, health] = components;
if (!position || !render) return;
// Optimized rendering with object pooling
ctx.save();
ctx.translate(position.x, position.y);
// Render sprite or shape
if (render.sprite) {
const img = SpriteCache.get(render.sprite);
if (img) {
ctx.drawImage(img, -render.width/2, -render.height/2, render.width, render.height);
}
} else {
// Fallback to shape rendering
ctx.fillStyle = render.color;
ctx.fillRect(-render.width/2, -render.height/2, render.width, render.height);
}
// Health bar
if (health) {
const healthPercentage = health.current / health.maximum;
const barWidth = render.width;
const barHeight = 4;
ctx.fillStyle = '#ff0000';
ctx.fillRect(-barWidth/2, -render.height/2 - 8, barWidth, barHeight);
ctx.fillStyle = '#00ff00';
ctx.fillRect(-barWidth/2, -render.height/2 - 8, barWidth * healthPercentage, barHeight);
}
ctx.restore();
}, []);
useEffect(() => {
animationFrameRef.current = requestAnimationFrame(renderFrame);
return () => {
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
};
}, [renderFrame]);
return (
<canvas
ref={canvasRef}
width={800}
height={600}
style={{ border: '1px solid #ccc' }}
/>
);
};
// Sprite caching system
class SpriteCache {
private static cache: Map<string, HTMLImageElement> = new Map();
static get(spriteId: string): HTMLImageElement | null {
if (this.cache.has(spriteId)) {
return this.cache.get(spriteId)!;
}
// Load sprite asynchronously
const img = new Image();
img.onload = () => {
this.cache.set(spriteId, img);
};
img.src = `/sprites/${spriteId}.png`;
return null; // Will be available on next frame
}
static preload(spriteIds: string[]): Promise<void> {
const promises = spriteIds.map(id => new Promise<void>((resolve) => {
if (this.cache.has(id)) {
resolve();
return;
}
const img = new Image();
img.onload = () => {
this.cache.set(id, img);
resolve();
};
img.onerror = () => resolve(); // Don't block on failed loads
img.src = `/sprites/${id}.png`;
}));
return Promise.all(promises).then(() => {});
}
}
π§ Advanced Optimization Techniques
Memory Management
Object Pooling
Object Pooling
// Object pooling for frequently created objects
class ObjectPool<T> {
private available: T[] = [];
private inUse: Set<T> = new Set();
private factory: () => T;
private reset: (obj: T) => void;
constructor(factory: () => T, reset: (obj: T) => void, initialSize = 10) {
this.factory = factory;
this.reset = reset;
// Pre-populate pool
for (let i = 0; i < initialSize; i++) {
this.available.push(factory());
}
}
acquire(): T {
let obj = this.available.pop();
if (!obj) {
obj = this.factory();
}
this.inUse.add(obj);
return obj;
}
release(obj: T) {
if (this.inUse.has(obj)) {
this.inUse.delete(obj);
this.reset(obj);
this.available.push(obj);
}
}
getPoolSize() {
return {
available: this.available.length,
inUse: this.inUse.size,
total: this.available.length + this.inUse.size,
};
}
}
// Usage examples
interface Vector2D {
x: number;
y: number;
}
const vectorPool = new ObjectPool<Vector2D>(
() => ({ x: 0, y: 0 }),
(vec) => { vec.x = 0; vec.y = 0; }
);
// In performance-critical code
function calculateDistance(pos1: Vector2D, pos2: Vector2D): number {
const diff = vectorPool.acquire();
diff.x = pos2.x - pos1.x;
diff.y = pos2.y - pos1.y;
const distance = Math.sqrt(diff.x * diff.x + diff.y * diff.y);
vectorPool.release(diff);
return distance;
}
Caching Strategies
- Multi-Level Caching
- Smart Cache Invalidation
// Hierarchical caching system
class CacheManager {
private l1Cache: Map<string, CacheEntry> = new Map(); // In-memory
private l2Cache: Redis; // Redis cache
private l3Cache: Database; // Database
private readonly L1_TTL = 30_000; // 30 seconds
private readonly L2_TTL = 300_000; // 5 minutes
constructor(redis: Redis, database: Database) {
this.l2Cache = redis;
this.l3Cache = database;
// Cleanup expired L1 entries
setInterval(() => this.cleanupL1(), 10_000);
}
async get<T>(key: string, fetcher?: () => Promise<T>): Promise<T | null> {
// L1 Cache (memory)
const l1Entry = this.l1Cache.get(key);
if (l1Entry && !this.isExpired(l1Entry)) {
return l1Entry.value;
}
// L2 Cache (Redis)
const l2Value = await this.l2Cache.get(key);
if (l2Value) {
const parsed = JSON.parse(l2Value);
this.setL1(key, parsed);
return parsed;
}
// L3 Cache (Database) or fetcher
let value: T | null = null;
if (fetcher) {
value = await fetcher();
} else {
value = await this.l3Cache.get(key);
}
if (value !== null) {
// Store in all cache levels
await this.setAll(key, value);
}
return value;
}
async set<T>(key: string, value: T): Promise<void> {
await this.setAll(key, value);
}
private setL1<T>(key: string, value: T) {
this.l1Cache.set(key, {
value,
timestamp: Date.now(),
});
}
private async setAll<T>(key: string, value: T) {
// L1 Cache
this.setL1(key, value);
// L2 Cache
await this.l2Cache.setex(key, this.L2_TTL / 1000, JSON.stringify(value));
// L3 Cache (database) - async, don't wait
this.l3Cache.set(key, value).catch(console.error);
}
private isExpired(entry: CacheEntry): boolean {
return Date.now() - entry.timestamp > this.L1_TTL;
}
private cleanupL1() {
for (const [key, entry] of this.l1Cache.entries()) {
if (this.isExpired(entry)) {
this.l1Cache.delete(key);
}
}
}
}
interface CacheEntry {
value: any;
timestamp: number;
}
// Cache invalidation with dependency tracking
class SmartCache {
private cache: Map<string, any> = new Map();
private dependencies: Map<string, Set<string>> = new Map(); // key -> dependent keys
private reverseDeps: Map<string, Set<string>> = new Map(); // key -> keys it depends on
set(key: string, value: any, dependencies: string[] = []) {
this.cache.set(key, value);
// Update dependency tracking
this.dependencies.set(key, new Set());
for (const dep of dependencies) {
// Track that 'key' depends on 'dep'
if (!this.reverseDeps.has(dep)) {
this.reverseDeps.set(dep, new Set());
}
this.reverseDeps.get(dep)!.add(key);
// Track that 'dep' affects 'key'
if (!this.dependencies.has(dep)) {
this.dependencies.set(dep, new Set());
}
this.dependencies.get(dep)!.add(key);
}
}
get(key: string): any {
return this.cache.get(key);
}
invalidate(key: string) {
// Remove the key itself
this.cache.delete(key);
// Invalidate all dependent keys
const dependents = this.dependencies.get(key);
if (dependents) {
for (const dependent of dependents) {
this.invalidate(dependent); // Recursive invalidation
}
}
// Clean up dependency tracking
this.dependencies.delete(key);
const reverseDeps = this.reverseDeps.get(key);
if (reverseDeps) {
for (const dep of reverseDeps) {
this.dependencies.get(dep)?.delete(key);
}
this.reverseDeps.delete(key);
}
}
// Example usage for entity-component caching
cacheEntityComponents(entityId: string, components: Record<string, any>) {
const cacheKey = `entity:${entityId}:components`;
const dependencies = [`entity:${entityId}`, ...Object.keys(components).map(type => `component:${type}`)];
this.set(cacheKey, components, dependencies);
}
invalidateEntity(entityId: string) {
this.invalidate(`entity:${entityId}`);
}
invalidateComponentType(componentType: string) {
this.invalidate(`component:${componentType}`);
}
}
π Performance Monitoring Dashboard
Comprehensive Metrics Collection
Comprehensive Metrics Collection
// Performance metrics aggregation
class PerformanceDashboard {
private metrics: Map<string, MetricSeries> = new Map();
private alerts: AlertRule[] = [];
constructor() {
this.startMetricsCollection();
}
recordMetric(name: string, value: number, tags: Record<string, string> = {}) {
const key = `${name}:${JSON.stringify(tags)}`;
if (!this.metrics.has(key)) {
this.metrics.set(key, {
name,
tags,
values: [],
timestamps: [],
});
}
const series = this.metrics.get(key)!;
series.values.push(value);
series.timestamps.push(Date.now());
// Keep only last 1000 data points
if (series.values.length > 1000) {
series.values.shift();
series.timestamps.shift();
}
this.checkAlerts(name, value, tags);
}
getMetrics(name: string, timeRange: number = 3600000): MetricSeries[] {
const cutoff = Date.now() - timeRange;
const results: MetricSeries[] = [];
for (const [key, series] of this.metrics) {
if (series.name === name) {
// Filter by time range
const filtered = {
...series,
values: [] as number[],
timestamps: [] as number[],
};
for (let i = 0; i < series.timestamps.length; i++) {
if (series.timestamps[i] >= cutoff) {
filtered.values.push(series.values[i]);
filtered.timestamps.push(series.timestamps[i]);
}
}
results.push(filtered);
}
}
return results;
}
addAlert(rule: AlertRule) {
this.alerts.push(rule);
}
private checkAlerts(metricName: string, value: number, tags: Record<string, string>) {
for (const alert of this.alerts) {
if (alert.metric === metricName && this.matchesTags(alert.tags, tags)) {
if (alert.condition(value)) {
this.triggerAlert(alert, value);
}
}
}
}
private triggerAlert(alert: AlertRule, value: number) {
console.warn(`ALERT: ${alert.name} - Current value: ${value}`);
// Send to monitoring system (Slack, PagerDuty, etc.)
fetch('/api/alerts', {
method: 'POST',
body: JSON.stringify({
alertName: alert.name,
metric: alert.metric,
value,
severity: alert.severity,
timestamp: Date.now(),
}),
}).catch(console.error);
}
private startMetricsCollection() {
// Collect system metrics
setInterval(() => {
this.recordMetric('memory_usage', process.memoryUsage().heapUsed);
this.recordMetric('cpu_usage', process.cpuUsage().user + process.cpuUsage().system);
// Custom application metrics
this.recordMetric('active_connections', this.getActiveConnections());
this.recordMetric('cache_hit_rate', this.getCacheHitRate());
}, 5000);
}
}
interface MetricSeries {
name: string;
tags: Record<string, string>;
values: number[];
timestamps: number[];
}
interface AlertRule {
name: string;
metric: string;
tags: Record<string, string>;
condition: (value: number) => boolean;
severity: 'low' | 'medium' | 'high' | 'critical';
}
// Usage
const dashboard = new PerformanceDashboard();
// Add performance alerts
dashboard.addAlert({
name: 'High Memory Usage',
metric: 'memory_usage',
tags: {},
condition: (value) => value > 1024 * 1024 * 1024, // 1GB
severity: 'high',
});
dashboard.addAlert({
name: 'Low Cache Hit Rate',
metric: 'cache_hit_rate',
tags: {},
condition: (value) => value < 0.8, // Below 80%
severity: 'medium',
});
π― Performance Testing Tools
Load Testing
Artillery.js and k6 for testing system limits
Profiling Tools
Chrome DevTools, Node.js profiler, and Move analyzer
Benchmarking
Automated performance regression testing
Monitoring
Production monitoring with Grafana and Prometheus
π₯ Real-World Case Studies
Case Study 1: MMO Battle Royale Optimization
- Problem
- Solution
- Results
A 100-player battle royale game faced severe performance issues:
- Gas costs: 0.5 SUI per player action (unsustainable)
- Latency: 3-5 second transaction confirmations
- Throughput: Only 10 TPS during peak battles
- Memory: Frontend consuming 500MB+ for player states
Applied comprehensive optimization strategy:Smart Contract Level:Database Level:Frontend Level:
// Before: Individual position updates
public entry fun move_player(world: &mut World, player: u64, x: u64, y: u64) {
// Cost: ~50k gas per call
}
// After: Batch movement system
public entry fun batch_move_players(
world: &mut World,
players: vector<u64>,
positions: vector<Position>
) {
// Cost: ~10k gas per player (5x improvement)
}
-- Added spatial indexing for proximity queries
CREATE INDEX idx_player_position_spatial
ON entity_components
USING GIST ((component_data->>'x')::int, (component_data->>'y')::int)
WHERE component_type = 'PositionComponent';
// Implemented object pooling for bullets
const bulletPool = new ObjectPool(
() => new Bullet(),
(bullet) => bullet.reset(),
1000 // Pre-allocate 1000 bullets
);
Performance Improvements:
- β‘ Gas costs: 0.5 SUI β 0.1 SUI per action (80% reduction)
- π Latency: 3-5s β 1-2s transaction time (60% improvement)
- π Throughput: 10 TPS β 45 TPS (350% increase)
- πΎ Memory: 500MB β 150MB frontend usage (70% reduction)
- 300% increase in daily active users
- 85% reduction in infrastructure costs
- 95% improvement in user satisfaction scores
Case Study 2: DeFi Trading Game Optimization
High-Frequency Trading System
High-Frequency Trading System
Challenge: Processing 1000+ trades per second with complex market mechanicsKey Optimizations:Results: Achieved 1000+ TPS with 90% cost reduction
- Merkle Tree State Compression
struct MarketState has store {
// Instead of storing all orders individually
orders_merkle_root: vector<u8>,
total_volume: u64,
last_update: u64,
}
// Validate trades against merkle proof
public fun execute_trade_batch(
market: &mut MarketState,
trades: vector<Trade>,
merkle_proofs: vector<MerkleProof>
) {
// Process entire batch in single transaction
}
- Event Aggregation
// Single event for entire trading session
struct TradingSessionEvent has copy, drop {
session_id: u64,
total_trades: u64,
volume_by_pair: vector<PairVolume>,
price_changes: vector<PriceChange>,
// Compressed event data
}
π οΈ Optimization Toolkit
Performance Testing Framework
# Comprehensive performance testing suite
# 1. Contract gas analysis
dubhe analyze --gas-report contracts/
# 2. Load testing with Artillery
artillery run load-test-config.yml
# 3. Database performance analysis
pg_stat_statements_reset();
# Run your queries
SELECT query, calls, mean_time FROM pg_stat_statements ORDER BY mean_time DESC;
# 4. Frontend profiling
lighthouse --only-categories=performance http://localhost:3000
Optimization Checklist
- Smart Contracts
- Database & Indexing
- Frontend
Pre-deployment Checklist:
Gas Optimization:
- Components are minimal and focused
- Systems use batch operations where possible
- Loops are bounded and optimized
- Storage operations are minimized
- Event emission is efficient
Architecture Review:
- ECS patterns are properly implemented
- Component granularity is appropriate
- Systems have single responsibilities
- Error handling is comprehensive
- Access control is properly implemented
Performance Checklist:
Query Optimization:
- All frequently queried columns are indexed
- Composite indexes match query patterns
- No unnecessary table scans
- Query execution plans are optimized
- Connection pooling is configured
Data Management:
- Archiving strategy for old data
- Proper partitioning for large tables
- Regular statistics updates
- Materialized views for complex aggregations
- Caching layer implemented
User Experience Checklist:
Rendering Performance:
- Components are properly memoized
- Virtual scrolling for large lists
- Object pooling for frequent allocations
- Canvas rendering for game elements
- Efficient state management
Network Optimization:
- WebSocket connection management
- Message batching and compression
- Intelligent caching strategies
- Progressive loading
- Offline capability
π Advanced Optimization Strategies
Hybrid On-Chain/Off-Chain Architecture
Game State Sharding
Game State Sharding
// Split game world into optimized shards
class WorldShardManager {
private shards: Map<string, WorldShard> = new Map();
getShardForEntity(entityId: string): WorldShard {
// Determine shard based on game logic
const shardId = this.calculateShardId(entityId);
return this.shards.get(shardId)!;
}
// Cross-shard operations handled efficiently
async moveEntityBetweenShards(
entityId: string,
fromShard: string,
toShard: string
) {
// Atomic cross-shard transfer
const fromShard = this.shards.get(fromShardId)!;
const toShard = this.shards.get(toShardId)!;
await this.atomicTransfer(entityId, fromShard, toShard);
}
}
Predictive Performance Scaling
Auto-scaling Based on Game Events
Auto-scaling Based on Game Events
// Intelligent performance scaling
class PerformanceScaler {
private readonly SCALE_TRIGGERS = {
HIGH_PLAYER_DENSITY: 50,
COMBAT_EVENT_RATE: 100,
MARKET_ACTIVITY_SPIKE: 1000,
};
async scaleBasedOnGameState(gameMetrics: GameMetrics) {
const predictions = this.predictPerformanceNeeds(gameMetrics);
if (predictions.needsDatabaseScaling) {
await this.scaleDatabase(predictions.dbCpuTarget);
}
if (predictions.needsWebSocketScaling) {
await this.scaleWebSocketServers(predictions.wsInstanceCount);
}
if (predictions.needsCacheScaling) {
await this.scaleRedisCluster(predictions.cacheMemoryTarget);
}
}
private predictPerformanceNeeds(metrics: GameMetrics) {
// ML-based performance prediction
return this.performanceModel.predict(metrics);
}
}
π― Next Steps
Start Performance Testing
Immediate Actions:
- Set up monitoring dashboard
- Run baseline performance tests
- Identify top 3 bottlenecks
- Create optimization plan
Implementation Roadmap
Week 1-2: Smart contract optimizations
Week 3-4: Database and indexing improvements
Week 5-6: Frontend performance enhancements Week 7-8: Integration testing and monitoring
Week 5-6: Frontend performance enhancements Week 7-8: Integration testing and monitoring
Community Resources
Advanced Topics
Performance optimization is an iterative process. Donβt try to implement all optimizations at once. Focus on the bottlenecks that impact user experience the most, measure the results, and iterate.