Skip to main content
In this tutorial, we will build a complete data composition pipeline that fetches, transforms, and structures DLMM pool data for application consumption. By the end, you’ll understand how to compose complex pool analytics using the Saros DLMM SDK.

Prerequisites

What We’ll Build

A production-ready data composition service that:
  • Fetches live pool metadata and state
  • Calculates real-time pricing and quotes
  • Processes bin liquidity distribution
  • Composes user-friendly data structures
  • Handles errors gracefully
Result: Complete data pipeline powering pool distribution analytics

Step 1: Project Setup and SDK Integration

First, create a new TypeScript project and install the Saros DLMM SDK:
mkdir dlmm-data-pipeline
cd dlmm-data-pipeline
npm init -y
npm install @saros-finance/dlmm-sdk @solana/web3.js
npm install -D typescript @types/node ts-node
Create the basic project structure:
// src/types/pool.ts
export interface ComposedPoolData {
  pool: {
    address: string;
    tokenX: TokenInfo;
    tokenY: TokenInfo;
    binStep: number;
    baseFactor: number;
  };
  state: {
    activeId: number;
    currentPrice: number;
    reserveX: number;
    reserveY: number;
    totalLiquidity: number;
  };
  liquidity: {
    distribution: BinLiquidityData[];
    activeBins: number;
    concentrationRatio: number;
    totalValueLocked: number;
  };
  pricing: {
    currentRate: number;
    priceImpact: number;
    tradingFees: number;
  };
  timestamp: number;
}

export interface BinLiquidityData {
  binId: number;
  price: number;
  liquidityAmount: number;
  reserveX: number;
  reserveY: number;
  feeX: number;
  feeY: number;
  utilization: number;
}

export interface TokenInfo {
  mint: string;
  symbol: string;
  decimals: number;
}

Step 2: DLMM Service Foundation

Create the core service that manages DLMM SDK interactions:
// src/services/DLMMDataComposer.ts
import { Connection, PublicKey } from '@solana/web3.js';
import { DLMM, LiquidityBookServices } from '@saros-finance/dlmm-sdk';
import { ComposedPoolData, BinLiquidityData, TokenInfo } from '../types/pool';

export class DLMMDataComposer {
  private connection: Connection;
  private services: LiquidityBookServices;
  
  constructor(rpcUrl: string) {
    this.connection = new Connection(rpcUrl, 'confirmed');
    this.services = new LiquidityBookServices(this.connection);
  }

  /**
   * Compose complete pool data from DLMM SDK
   */
  async composePoolData(poolAddress: string): Promise<ComposedPoolData> {
    console.log(`🔄 Composing data for pool: ${poolAddress}`);
    
    try {
      const poolPubkey = new PublicKey(poolAddress);
      const pool = new DLMM(this.connection, poolPubkey);
      
      // Fetch all required data in parallel for efficiency
      const [poolInfo, poolState, liquidityDistribution] = await Promise.all([
        this.composePoolInfo(pool),
        this.composePoolState(pool),
        this.composeLiquidityDistribution(pool)
      ]);
      
      // Compose pricing data based on current state
      const pricing = await this.composePricingData(pool, poolState.activeId);
      
      const composedData: ComposedPoolData = {
        pool: poolInfo,
        state: poolState,
        liquidity: liquidityDistribution,
        pricing,
        timestamp: Date.now()
      };
      
      console.log(`✅ Pool data composed successfully`);
      console.log(`   - Active Bin: ${poolState.activeId}`);
      console.log(`   - Current Price: ${poolState.currentPrice.toFixed(6)}`);
      console.log(`   - Active Bins: ${liquidityDistribution.activeBins}`);
      console.log(`   - TVL: $${liquidityDistribution.totalValueLocked.toLocaleString()}`);
      
      return composedData;
      
    } catch (error) {
      console.error(`❌ Failed to compose pool data:`, error);
      throw new Error(`Pool data composition failed: ${error.message}`);
    }
  }

  /**
   * Compose basic pool information
   */
  private async composePoolInfo(pool: DLMM) {
    const poolInfo = await pool.getPoolInfo();
    
    // Get token metadata (in production, fetch from token registry)
    const tokenX = await this.getTokenInfo(poolInfo.tokenX);
    const tokenY = await this.getTokenInfo(poolInfo.tokenY);
    
    return {
      address: pool.pubkey.toString(),
      tokenX,
      tokenY,
      binStep: poolInfo.binStep,
      baseFactor: poolInfo.baseFactor
    };
  }

  /**
   * Compose current pool state with calculated values
   */
  private async composePoolState(pool: DLMM) {
    const poolState = await pool.getPoolState();
    const poolInfo = await pool.getPoolInfo();
    
    // Calculate human-readable values
    const currentPrice = this.binIdToPrice(poolState.activeId, poolInfo.binStep);
    const reserveX = this.lamportsToNumber(poolState.reserveX, 6); // Assuming 6 decimals
    const reserveY = this.lamportsToNumber(poolState.reserveY, 9); // Assuming 9 decimals
    
    return {
      activeId: poolState.activeId,
      currentPrice,
      reserveX,
      reserveY,
      totalLiquidity: poolState.liquiditySupply.toNumber()
    };
  }

  /**
   * Compose comprehensive liquidity distribution data
   */
  private async composeLiquidityDistribution(pool: DLMM) {
    const poolState = await pool.getPoolState();
    const poolInfo = await pool.getPoolInfo();
    
    // Get bin data around active bin (±50 bins for comprehensive view)
    const binRange = 50;
    const distribution = await this.fetchBinDistribution(
      pool, 
      poolState.activeId, 
      binRange
    );
    
    // Calculate aggregate metrics
    const activeBins = distribution.filter(bin => bin.liquidityAmount > 0).length;
    const concentrationRatio = this.calculateConcentrationRatio(distribution);
    const totalValueLocked = this.calculateTVL(distribution);
    
    return {
      distribution,
      activeBins,
      concentrationRatio,
      totalValueLocked
    };
  }

  /**
   * Fetch and compose bin distribution data
   */
  private async fetchBinDistribution(
    pool: DLMM, 
    activeBinId: number, 
    range: number
  ): Promise<BinLiquidityData[]> {
    const distribution: BinLiquidityData[] = [];
    const poolInfo = await pool.getPoolInfo();
    
    // Process bins in batches for efficiency
    const binIds = [];
    for (let i = -range; i <= range; i++) {
      binIds.push(activeBinId + i);
    }
    
    // Fetch bin data (in production, batch these requests)
    for (const binId of binIds) {
      try {
        const bin = await pool.getBin(binId);
        const price = this.binIdToPrice(binId, poolInfo.binStep);
        
        // Convert lamports to human-readable numbers
        const reserveX = this.lamportsToNumber(bin.amountX, 6);
        const reserveY = this.lamportsToNumber(bin.amountY, 9);
        const liquidityAmount = bin.liquiditySupply.toNumber();
        
        distribution.push({
          binId,
          price,
          liquidityAmount,
          reserveX,
          reserveY,
          feeX: this.lamportsToNumber(bin.feeAmountX, 6),
          feeY: this.lamportsToNumber(bin.feeAmountY, 9),
          utilization: liquidityAmount > 0 ? 1 : 0
        });
        
      } catch {
        // Bin doesn't exist or has no liquidity
        distribution.push({
          binId,
          price: this.binIdToPrice(binId, poolInfo.binStep),
          liquidityAmount: 0,
          reserveX: 0,
          reserveY: 0,
          feeX: 0,
          feeY: 0,
          utilization: 0
        });
      }
    }
    
    return distribution.sort((a, b) => a.binId - b.binId);
  }

  /**
   * Compose real-time pricing data
   */
  private async composePricingData(pool: DLMM, activeBinId: number) {
    const poolInfo = await pool.getPoolInfo();
    const currentRate = this.binIdToPrice(activeBinId, poolInfo.binStep);
    
    // Get quote for price impact calculation (1000 USDC swap example)
    try {
      const swapAmount = 1000 * Math.pow(10, 6); // 1000 USDC in lamports
      const quote = await pool.getSwapQuote({
        fromMint: poolInfo.tokenX,
        toMint: poolInfo.tokenY,
        amount: swapAmount,
        swapForY: true
      });
      
      const priceImpact = this.calculatePriceImpact(quote);
      const tradingFees = this.calculateTradingFees(quote);
      
      return {
        currentRate,
        priceImpact,
        tradingFees
      };
      
    } catch (error) {
      console.warn('Could not fetch pricing data, using defaults:', error.message);
      return {
        currentRate,
        priceImpact: 0,
        tradingFees: 0
      };
    }
  }

  // Utility methods for data transformation
  private async getTokenInfo(mintAddress: PublicKey): Promise<TokenInfo> {
    // In production, fetch from token registry or metadata program
    const mint = mintAddress.toString();
    
    // Mock token info - replace with real token metadata fetching
    const tokenMap = {
      'EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v': { symbol: 'USDC', decimals: 6 },
      'So11111111111111111111111111111111111111112': { symbol: 'SOL', decimals: 9 },
      'Es9vMFrzaCERmJfrF4H2FYD4KCoNkY11McCe8BenwNYB': { symbol: 'USDT', decimals: 6 }
    };
    
    return {
      mint,
      symbol: tokenMap[mint]?.symbol || 'UNKNOWN',
      decimals: tokenMap[mint]?.decimals || 9
    };
  }

  private binIdToPrice(binId: number, binStep: number): number {
    // DLMM price formula: price = (1 + binStep / 10000) ^ (binId - 8388608)
    return Math.pow(1 + binStep / 10000, binId - 8388608);
  }

  private lamportsToNumber(lamports: any, decimals: number): number {
    return Number(lamports.toString()) / Math.pow(10, decimals);
  }

  private calculateConcentrationRatio(distribution: BinLiquidityData[]): number {
    const activeBins = distribution.filter(bin => bin.liquidityAmount > 0);
    if (activeBins.length === 0) return 0;
    
    const totalLiquidity = activeBins.reduce((sum, bin) => sum + bin.liquidityAmount, 0);
    const top20Percent = Math.ceil(activeBins.length * 0.2);
    const topBins = activeBins
      .sort((a, b) => b.liquidityAmount - a.liquidityAmount)
      .slice(0, top20Percent);
    const topLiquidity = topBins.reduce((sum, bin) => sum + bin.liquidityAmount, 0);
    
    return topLiquidity / totalLiquidity;
  }

  private calculateTVL(distribution: BinLiquidityData[]): number {
    // Simplified TVL calculation (assuming $1 = 1 USDC, $100 = 1 SOL)
    return distribution.reduce((total, bin) => {
      const valueX = bin.reserveX * 1; // USDC value
      const valueY = bin.reserveY * 100; // SOL value approximation
      return total + valueX + valueY;
    }, 0);
  }

  private calculatePriceImpact(quote: any): number {
    // Extract price impact from quote response
    return quote.priceImpact || 0;
  }

  private calculateTradingFees(quote: any): number {
    // Calculate trading fees from quote
    return quote.fee || 0;
  }
}

Step 3: Advanced Data Composition Patterns

Create helper utilities for complex data operations:
// src/utils/dataCompositionHelpers.ts

export class DataCompositionHelpers {
  /**
   * Compose historical data points for trending analysis
   */
  static composeHistoricalTrend(
    currentData: ComposedPoolData,
    historicalSnapshots: ComposedPoolData[]
  ) {
    return {
      current: {
        tvl: currentData.liquidity.totalValueLocked,
        price: currentData.state.currentPrice,
        activeBins: currentData.liquidity.activeBins
      },
      trend: {
        tvlChange24h: this.calculatePercentageChange(
          historicalSnapshots[0]?.liquidity.totalValueLocked || 0,
          currentData.liquidity.totalValueLocked
        ),
        priceChange24h: this.calculatePercentageChange(
          historicalSnapshots[0]?.state.currentPrice || 0,
          currentData.state.currentPrice
        ),
        liquidityChange24h: this.calculatePercentageChange(
          historicalSnapshots[0]?.liquidity.activeBins || 0,
          currentData.liquidity.activeBins
        )
      }
    };
  }

  /**
   * Compose multi-pool comparison data
   */
  static composePoolComparison(poolsData: ComposedPoolData[]) {
    return {
      pools: poolsData.map(pool => ({
        address: pool.pool.address,
        pair: `${pool.pool.tokenX.symbol}/${pool.pool.tokenY.symbol}`,
        tvl: pool.liquidity.totalValueLocked,
        activeBins: pool.liquidity.activeBins,
        concentrationRatio: pool.liquidity.concentrationRatio,
        currentPrice: pool.state.currentPrice
      })),
      summary: {
        totalTVL: poolsData.reduce((sum, pool) => sum + pool.liquidity.totalValueLocked, 0),
        averageConcentration: poolsData.reduce((sum, pool) => sum + pool.liquidity.concentrationRatio, 0) / poolsData.length,
        mostActivePool: poolsData.reduce((max, pool) => 
          pool.liquidity.activeBins > max.liquidity.activeBins ? pool : max
        ).pool.address
      }
    };
  }

  /**
   * Compose user position analytics within pool distribution
   */
  static composeUserPositionAnalytics(
    poolData: ComposedPoolData,
    userPositions: any[]
  ) {
    const userBins = userPositions.map(pos => ({
      lowerBinId: pos.lowerBinId,
      upperBinId: pos.upperBinId,
      liquidity: pos.liquidity,
      inRange: pos.lowerBinId <= poolData.state.activeId && pos.upperBinId >= poolData.state.activeId
    }));

    const totalUserLiquidity = userBins.reduce((sum, pos) => sum + pos.liquidity, 0);
    const inRangePositions = userBins.filter(pos => pos.inRange);

    return {
      positions: userBins,
      analytics: {
        totalPositions: userBins.length,
        inRangePositions: inRangePositions.length,
        totalLiquidity: totalUserLiquidity,
        inRangeLiquidity: inRangePositions.reduce((sum, pos) => sum + pos.liquidity, 0),
        utilizationRate: inRangePositions.length / userBins.length,
        concentrationScore: this.calculateUserConcentrationScore(userBins, poolData.state.activeId)
      }
    };
  }

  private static calculatePercentageChange(oldValue: number, newValue: number): number {
    if (oldValue === 0) return 0;
    return ((newValue - oldValue) / oldValue) * 100;
  }

  private static calculateUserConcentrationScore(positions: any[], activeBinId: number): number {
    const proximityScores = positions.map(pos => {
      const centerBin = (pos.lowerBinId + pos.upperBinId) / 2;
      const distance = Math.abs(centerBin - activeBinId);
      return Math.max(0, 100 - distance); // Score decreases with distance
    });
    
    return proximityScores.reduce((sum, score) => sum + score, 0) / positions.length;
  }
}

Step 4: Error Handling and Resilience Patterns

Implement production-ready error handling:
// src/utils/errorHandling.ts

export class DLMMError extends Error {
  constructor(
    message: string,
    public code: string,
    public context?: any
  ) {
    super(message);
    this.name = 'DLMMError';
  }
}

export class ErrorHandler {
  static handleRPCErrors(error: any): never {
    if (error.message.includes('Invalid public key')) {
      throw new DLMMError(
        'Invalid pool address provided',
        'INVALID_POOL_ADDRESS',
        { originalError: error.message }
      );
    }
    
    if (error.message.includes('Account does not exist')) {
      throw new DLMMError(
        'Pool does not exist or is not initialized',
        'POOL_NOT_FOUND',
        { originalError: error.message }
      );
    }
    
    if (error.message.includes('Connection failed')) {
      throw new DLMMError(
        'RPC connection failed - check network connectivity',
        'RPC_CONNECTION_FAILED',
        { originalError: error.message }
      );
    }
    
    throw new DLMMError(
      'Unexpected DLMM operation error',
      'UNKNOWN_ERROR',
      { originalError: error.message }
    );
  }

  static async withRetry<T>(
    operation: () => Promise<T>,
    maxRetries: number = 3,
    delay: number = 1000
  ): Promise<T> {
    let lastError: Error;
    
    for (let attempt = 1; attempt <= maxRetries; attempt++) {
      try {
        return await operation();
      } catch (error) {
        lastError = error;
        console.warn(`Attempt ${attempt}/${maxRetries} failed:`, error.message);
        
        if (attempt < maxRetries) {
          await this.sleep(delay * attempt); // Exponential backoff
        }
      }
    }
    
    throw lastError;
  }

  private static sleep(ms: number): Promise<void> {
    return new Promise(resolve => setTimeout(resolve, ms));
  }
}

Step 5: Complete Integration Example

Put everything together in a working example:
// src/example/poolAnalysis.ts
import { DLMMDataComposer } from '../services/DLMMDataComposer';
import { DataCompositionHelpers } from '../utils/dataCompositionHelpers';
import { ErrorHandler } from '../utils/errorHandling';

async function main() {
  const composer = new DLMMDataComposer('https://api.devnet.solana.com');
  
  // Use a real DLMM pool address from Saros devnet
  const poolAddress = '2wT4jHyF6o5fFzJJM9cHH8WKrZt1kqEKnEwqYZgQy8wq'; // Example devnet pool
  
  try {
    console.log('🚀 Starting DLMM pool data composition...\n');
    
    // Compose complete pool data with error handling
    const poolData = await ErrorHandler.withRetry(
      () => composer.composePoolData(poolAddress),
      3,
      2000
    );
    
    console.log('\n📊 Pool Analysis Results:');
    console.log('=====================================');
    console.log(`Pool: ${poolData.pool.tokenX.symbol}/${poolData.pool.tokenY.symbol}`);
    console.log(`Current Price: ${poolData.state.currentPrice.toFixed(6)}`);
    console.log(`Total Value Locked: $${poolData.liquidity.totalValueLocked.toLocaleString()}`);
    console.log(`Active Bins: ${poolData.liquidity.activeBins}`);
    console.log(`Concentration Ratio: ${(poolData.liquidity.concentrationRatio * 100).toFixed(1)}%`);
    console.log(`Price Impact (1K trade): ${poolData.pricing.priceImpact.toFixed(3)}%`);
    
    // Analyze liquidity distribution
    const liquidityAnalysis = analyzeLiquidityDistribution(poolData);
    console.log('\n💧 Liquidity Distribution Analysis:');
    console.log('====================================');
    console.log(`Peak Liquidity Bin: ${liquidityAnalysis.peakBin.binId} (Price: ${liquidityAnalysis.peakBin.price.toFixed(6)})`);
    console.log(`Liquidity Spread: ${liquidityAnalysis.spread} bins`);
    console.log(`Utilization Efficiency: ${liquidityAnalysis.efficiency.toFixed(1)}%`);
    
    // Example of data suitable for chart consumption
    const chartData = prepareChartData(poolData);
    console.log('\n📈 Chart Data Prepared:');
    console.log('=======================');
    console.log(`Price Range: ${chartData.priceRange.min.toFixed(6)} - ${chartData.priceRange.max.toFixed(6)}`);
    console.log(`Data Points: ${chartData.binData.length}`);
    console.log(`Active Range: ${chartData.activeRange.start} to ${chartData.activeRange.end}`);
    
  } catch (error) {
    console.error('❌ Pool analysis failed:', error.message);
    console.error('Code:', error.code);
    if (error.context) {
      console.error('Context:', error.context);
    }
  }
}

function analyzeLiquidityDistribution(poolData: any) {
  const activeBins = poolData.liquidity.distribution.filter(bin => bin.liquidityAmount > 0);
  const peakBin = activeBins.reduce((max, bin) => 
    bin.liquidityAmount > max.liquidityAmount ? bin : max
  );
  
  const minBinId = Math.min(...activeBins.map(bin => bin.binId));
  const maxBinId = Math.max(...activeBins.map(bin => bin.binId));
  const spread = maxBinId - minBinId;
  
  const efficiency = (activeBins.length / poolData.liquidity.distribution.length) * 100;
  
  return {
    peakBin,
    spread,
    efficiency,
    totalActiveBins: activeBins.length
  };
}

function prepareChartData(poolData: any) {
  const binData = poolData.liquidity.distribution
    .filter(bin => bin.liquidityAmount > 0)
    .map(bin => ({
      x: bin.price,
      y: bin.liquidityAmount,
      binId: bin.binId,
      isActive: bin.binId === poolData.state.activeId
    }));
  
  const prices = binData.map(d => d.x);
  const activeIndex = binData.findIndex(d => d.isActive);
  
  return {
    binData,
    priceRange: {
      min: Math.min(...prices),
      max: Math.max(...prices)
    },
    activeRange: {
      start: Math.max(0, activeIndex - 10),
      end: Math.min(binData.length - 1, activeIndex + 10)
    },
    metadata: {
      totalBins: binData.length,
      activeBinId: poolData.state.activeId,
      timestamp: poolData.timestamp
    }
  };
}

// Run the example
main().catch(console.error);

Step 6: Production Optimizations

Add caching and performance optimizations:
// src/utils/caching.ts

export class DataCache {
  private cache = new Map<string, { data: any; timestamp: number; ttl: number }>();
  
  set(key: string, data: any, ttlMs: number = 30000): void {
    this.cache.set(key, {
      data,
      timestamp: Date.now(),
      ttl: ttlMs
    });
    
    // Auto-cleanup expired entries
    setTimeout(() => this.cache.delete(key), ttlMs);
  }
  
  get(key: string): any | null {
    const entry = this.cache.get(key);
    if (!entry) return null;
    
    if (Date.now() - entry.timestamp > entry.ttl) {
      this.cache.delete(key);
      return null;
    }
    
    return entry.data;
  }
  
  has(key: string): boolean {
    return this.get(key) !== null;
  }
}

// Enhanced composer with caching
export class CachedDLMMDataComposer extends DLMMDataComposer {
  private cache = new DataCache();
  
  async composePoolData(poolAddress: string): Promise<ComposedPoolData> {
    const cacheKey = `pool-data-${poolAddress}`;
    
    // Check cache first
    const cached = this.cache.get(cacheKey);
    if (cached) {
      console.log(`📦 Using cached data for ${poolAddress}`);
      return cached;
    }
    
    // Fetch fresh data
    const data = await super.composePoolData(poolAddress);
    
    // Cache for 30 seconds
    this.cache.set(cacheKey, data, 30000);
    
    return data;
  }
}

Validation and Testing

Test your implementation:
# Run the example
npx ts-node src/example/poolAnalysis.ts
Expected output should show:
  • ✅ Successful pool data composition
  • ✅ Detailed pool analytics with real numbers
  • ✅ Liquidity distribution analysis
  • ✅ Chart-ready data structures
  • ✅ Error handling demonstrations

Key Takeaways

What You’ve Learned:
  1. SDK Data Composition: How to fetch and transform raw DLMM data into application-ready structures
  2. Production Patterns: Error handling, caching, and retry mechanisms for robust applications
  3. Advanced Analytics: Liquidity concentration analysis and multi-pool comparisons
  4. Performance Optimization: Efficient data fetching and caching strategies
Next Steps: Your data composition pipeline is now ready to power sophisticated pool distribution analytics in production applications.