File size: 4,716 Bytes
f880b65
51bab74
f880b65
 
 
 
 
 
 
51bab74
 
f880b65
51bab74
 
f880b65
 
51bab74
f880b65
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51bab74
f880b65
 
51bab74
 
 
 
f880b65
 
 
 
 
51bab74
f880b65
51bab74
 
 
f880b65
51bab74
f880b65
51bab74
 
 
 
 
f880b65
c3ab216
51bab74
 
 
 
 
 
f880b65
51bab74
 
 
c3ab216
 
 
51bab74
 
 
c3ab216
51bab74
c3ab216
f880b65
 
 
 
 
51bab74
f880b65
 
51bab74
f880b65
 
 
 
51bab74
f880b65
 
51bab74
f880b65
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
# simulation_engine/mock_kucoin.py
# (V1.2 - Realistic Data Feeder with Debug)

import pandas as pd
import numpy as np
import os

class MockKuCoin:
    def __init__(self, raw_data_dir):
        self.data_store = {}
        self.current_time_ms = 0
        self.raw_dir = raw_data_dir
        self._warned_empty = set()
        print(f"🎭 [MockKuCoin] Initialized. Data source: {self.raw_dir}", flush=True)

    async def load_data(self, symbols, timeframes):
        print(f"⏳ [MockKuCoin] Loading historical data for {len(symbols)} symbols...", flush=True)
        count = 0
        for sym in symbols:
            safe_sym = sym.replace('/', '')
            self.data_store[sym] = {}
            for tf in timeframes:
                paths_to_try = [
                    os.path.join(self.raw_dir, f"{safe_sym}_{tf}.parquet"),
                    os.path.join(self.raw_dir, f"{sym.replace('/', '')}_{tf}.parquet")
                ]
                loaded = False
                for path in paths_to_try:
                    if os.path.exists(path):
                        try:
                            df = pd.read_parquet(path)
                            df = df.sort_values('timestamp').drop_duplicates('timestamp').reset_index(drop=True)
                            self.data_store[sym][tf] = df
                            loaded = True
                            count += 1
                            print(f"   • Loaded {sym} {tf}: {len(df)} rows", flush=True)
                            break
                        except Exception as e:
                            print(f"⚠️ [MockKuCoin] Error loading {path}: {e}", flush=True)
                if not loaded:
                    print(f"⚠️ [MockKuCoin] Missing file for {sym} {tf}", flush=True)
        print(f"✅ [MockKuCoin] Loaded {count} data files into memory.", flush=True)

    def set_time(self, timestamp_ms):
        self.current_time_ms = timestamp_ms

    async def fetch_ohlcv(self, symbol, timeframe, limit=500, since=None):
        key = (symbol, timeframe)
        if symbol not in self.data_store or timeframe not in self.data_store[symbol]:
            if key not in self._warned_empty:
                print(f"⚠️ [MockKuCoin] No data store for {symbol} {timeframe}", flush=True)
                self._warned_empty.add(key)
            return []

        df = self.data_store[symbol][timeframe]
        if df.empty:
            if key not in self._warned_empty:
                print(f"⚠️ [MockKuCoin] Empty DF for {symbol} {timeframe}", flush=True)
                self._warned_empty.add(key)
            return []

        end_idx = np.searchsorted(df['timestamp'].values, self.current_time_ms, side='right')
        if end_idx == 0:
            if key not in self._warned_empty:
                print(f"⚠️ [MockKuCoin] end_idx=0 for {symbol} {timeframe} @ {self.current_time_ms}", flush=True)
                self._warned_empty.add(key)
            return []

        start_idx = 0
        if since is not None:
            start_idx = np.searchsorted(df['timestamp'].values, since, side='left')

        limit_start_idx = max(0, end_idx - limit)
        final_start_idx = max(start_idx, limit_start_idx)
        if final_start_idx >= end_idx:
            if key not in self._warned_empty:
                print(f"⚠️ [MockKuCoin] final_start_idx>=end_idx for {symbol} {timeframe}", flush=True)
                self._warned_empty.add(key)
            return []

        subset = df.iloc[final_start_idx:end_idx]
        return subset[['timestamp', 'open', 'high', 'low', 'close', 'volume']].values.tolist()

    async def fetch_ticker(self, symbol):
        candles = await self.fetch_ohlcv(symbol, '5m', limit=1)
        if candles:
            last_close = candles[-1][4]
            return {'symbol': symbol, 'last': last_close, 'timestamp': candles[-1][0]}
        return {'symbol': symbol, 'last': 0.0, 'timestamp': self.current_time_ms}

    async def fetch_tickers(self, symbols=None):
        tickers = {}
        target_syms = symbols if symbols else self.data_store.keys()
        for sym in target_syms:
            candles = await self.fetch_ohlcv(sym, '1d', limit=1)
            current_candle = await self.fetch_ohlcv(sym, '5m', limit=1)
            if candles and current_candle:
                vol_quote = candles[-1][5] * candles[-1][4]
                tickers[sym] = {
                    'symbol': sym,
                    'last': current_candle[-1][4],
                    'quoteVolume': vol_quote,
                    'timestamp': self.current_time_ms
                }
        return tickers

    async def load_markets(self): return True
    async def close(self): pass