init commit

This commit is contained in:
ALIHAN DIKEL
2024-02-05 20:53:49 +03:00
commit f871c22f3e
17 changed files with 1745 additions and 0 deletions

5
.gitignore vendored Normal file
View File

@@ -0,0 +1,5 @@
tmp
venv
.idea/
.ipynb_checkpoints
*.pkl

39
README.md Normal file
View File

@@ -0,0 +1,39 @@
## Design Decisions
* Fetching
* Ticker Symbols
* dependency to StockSymbol API:
* https://stock-symbol.herokuapp.com/
* https://pypi.org/project/stocksymbol/
* Price Data
* Yahoo Finance API
## Code Structure
```
|
|-- assets
| |-- portfolio.py : dataclasses to represent wealth
|-- config
| |-- portfolios.yml : initial states of portfolios
| |-- strategies.yml : rules and parameters to generate signals
|-- data
| |-- YYMMDD : scrapped historical trading data
|-- ops
|-- fetch.py : methods to get hist trading data
|-- transform.py : methods to transform any data
|- backtest.py : performs backtesting based on specified strategy
|- preprocess.py : retrieves hist trading data
```
## Installation
### Installing TA-Lib on ARM-based Macs
```bash
brew install ta-lib
export TA_INCLUDE_PATH="$(brew --prefix ta-lib)/include"
export TA_LIBRARY_PATH="$(brew --prefix ta-lib)/lib"
pip install ta-lib
```
[source for TA-Lib](https://github.com/TA-Lib/ta-lib-python/issues/418#issuecomment-826129619)

55
backtest.py Normal file
View File

@@ -0,0 +1,55 @@
import numpy as np
from backtesting import Backtest
from loguru import logger
from ops.portfolio import PortfolioLoader
from ops.fetch import DataFetcher
from ops.strategy import PCTrader
# prepare historical data
data_fetcher = DataFetcher()
_, dfs_ohlc = data_fetcher.load_from_pickle()
# prepare portfoliio
portf_loader = PortfolioLoader(path="config/portfolios.yml")
portfolios = portf_loader.load_from_yaml()
portfolio = portfolios["backtest_portf"]
# start backtesting
final_equities = []
optims = []
for stock in portfolio.stocks:
logger.debug(f"backtesting for: {stock.symbol}.IS")
data = dfs_ohlc[f"{stock.symbol}.IS"]
bt = Backtest(data, PCTrader, cash=10000, commission=.002)
stats = bt.run()
stats, heatmap = bt.optimize(
pct=range(1, 5, 1),
maximize='Equity Final [$]',
max_tries=200,
random_state=0,
return_heatmap=True)
optims.append((stats, heatmap))
#bt.plot(filename=f"data/bt_plots/{stock.symbol}")
#final_equities.append(stats.get('Equity Final [$]'))
final_equities = np.array(final_equities)
#print(stats['_trades'].to_string())
# n_enter=range(15, 35, 5),
# n_exit=range(10, 25, 5),
# constraint=lambda p: p.pct,
"""
# prepare strategy
strat_loader = StrategyLoader(path="config/strategies.yml")
strategy = strat_loader.load_from_yaml(name="PercentChange")
"""
# run backtest

21
config/portfolios.yml Normal file
View File

@@ -0,0 +1,21 @@
# ["KCHOL", "SAHOL", "FROTO", "TOASO", "TCELL", "TTKOM", "THYAO", "PGSUS"]
portfolios:
- name: backtest_portf
stocks:
- symbol: KCHOL
- symbol: SAHOL
- symbol: FROTO
- symbol: TOASO
- symbol: TCELL
- symbol: TTKOM
- symbol: THYAO
- symbol: PGSUS
cash: 10000.0
# - name: Portfolio2
# stocks:
# - symbol: MSFT
# quantity: 8
# - symbol: AMZN
# quantity: 3
# cash: 7000.0

4
config/strategies.yml Normal file
View File

@@ -0,0 +1,4 @@
strategies:
- name: PercentChange
params:
change_threshold: 0.02

61
data/bt_plots.html Normal file

File diff suppressed because one or more lines are too long

61
data/bt_plots/KCHOL.html Normal file

File diff suppressed because one or more lines are too long

61
data/bt_plots/SAHOL.html Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,307 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "2c4f2143-0f9f-47b6-9933-cc3e13347530",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/tcudikel/Dev/sandbox/trade-strat/venv/lib/python3.9/site-packages/urllib3/__init__.py:34: NotOpenSSLWarning: urllib3 v2 only supports OpenSSL 1.1.1+, currently the 'ssl' module is compiled with 'LibreSSL 2.8.3'. See: https://github.com/urllib3/urllib3/issues/3020\n",
" warnings.warn(\n"
]
}
],
"source": [
"import sys; sys.path.append(\"../\")\n",
"import os; os.chdir(\"../\")\n",
"\n",
"from loguru import logger\n",
"import pandas as pd\n",
"import finplot as fplt\n",
"\n",
"from ops.portfolio import PortfolioLoader\n",
"from ops.fetch import DataFetcher\n",
"\n",
"\n",
"# prepare historical data\n",
"data_fetcher = DataFetcher()\n",
"_, dfs_ohlc = data_fetcher.load_from_pickle()\n",
"\n",
"# prepare portfoliio\n",
"portf_loader = PortfolioLoader(path=\"config/portfolios.yml\")\n",
"portfolios = portf_loader.load_from_yaml()\n",
"portfolio = portfolios[\"backtest_portf\"]"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "980fb1f2-7de9-4913-8943-cd00aa745de4",
"metadata": {},
"outputs": [],
"source": [
"for stock in portfolio.stocks:\n",
" data = dfs_ohlc[f\"{stock.symbol}.IS\"]\n",
" break"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "af2694bc-2c79-4744-973f-e5e5d4ada0c4",
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Close</th>\n",
" <th>Open</th>\n",
" <th>High</th>\n",
" <th>Low</th>\n",
" <th>Volume</th>\n",
" </tr>\n",
" <tr>\n",
" <th>Date</th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>2022-02-07</th>\n",
" <td>32.560001</td>\n",
" <td>31.100000</td>\n",
" <td>32.580002</td>\n",
" <td>30.500000</td>\n",
" <td>14509607</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2022-02-08</th>\n",
" <td>33.320000</td>\n",
" <td>32.580002</td>\n",
" <td>33.439999</td>\n",
" <td>32.500000</td>\n",
" <td>15159861</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2022-02-09</th>\n",
" <td>33.660000</td>\n",
" <td>33.580002</td>\n",
" <td>33.880001</td>\n",
" <td>33.200001</td>\n",
" <td>12581438</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2022-02-10</th>\n",
" <td>33.720001</td>\n",
" <td>33.720001</td>\n",
" <td>34.119999</td>\n",
" <td>33.360001</td>\n",
" <td>13014121</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2022-02-11</th>\n",
" <td>33.799999</td>\n",
" <td>33.320000</td>\n",
" <td>33.880001</td>\n",
" <td>32.439999</td>\n",
" <td>17758379</td>\n",
" </tr>\n",
" <tr>\n",
" <th>...</th>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2024-01-30</th>\n",
" <td>163.199997</td>\n",
" <td>162.899994</td>\n",
" <td>164.500000</td>\n",
" <td>160.600006</td>\n",
" <td>14412086</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2024-01-31</th>\n",
" <td>160.699997</td>\n",
" <td>163.000000</td>\n",
" <td>164.500000</td>\n",
" <td>160.600006</td>\n",
" <td>14175405</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2024-02-01</th>\n",
" <td>169.899994</td>\n",
" <td>161.000000</td>\n",
" <td>172.000000</td>\n",
" <td>161.000000</td>\n",
" <td>26878438</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2024-02-02</th>\n",
" <td>170.500000</td>\n",
" <td>171.000000</td>\n",
" <td>173.800003</td>\n",
" <td>169.899994</td>\n",
" <td>14288239</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2024-02-05</th>\n",
" <td>169.300003</td>\n",
" <td>172.000000</td>\n",
" <td>172.100006</td>\n",
" <td>168.899994</td>\n",
" <td>10069132</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>501 rows × 5 columns</p>\n",
"</div>"
],
"text/plain": [
" Close Open High Low Volume\n",
"Date \n",
"2022-02-07 32.560001 31.100000 32.580002 30.500000 14509607\n",
"2022-02-08 33.320000 32.580002 33.439999 32.500000 15159861\n",
"2022-02-09 33.660000 33.580002 33.880001 33.200001 12581438\n",
"2022-02-10 33.720001 33.720001 34.119999 33.360001 13014121\n",
"2022-02-11 33.799999 33.320000 33.880001 32.439999 17758379\n",
"... ... ... ... ... ...\n",
"2024-01-30 163.199997 162.899994 164.500000 160.600006 14412086\n",
"2024-01-31 160.699997 163.000000 164.500000 160.600006 14175405\n",
"2024-02-01 169.899994 161.000000 172.000000 161.000000 26878438\n",
"2024-02-02 170.500000 171.000000 173.800003 169.899994 14288239\n",
"2024-02-05 169.300003 172.000000 172.100006 168.899994 10069132\n",
"\n",
"[501 rows x 5 columns]"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"data"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "b461abaa-80ef-41d1-b9ad-8b9e5d7e8c7f",
"metadata": {},
"outputs": [],
"source": [
"start = \"2023-10-05\"\n",
"end = \"2023-11-15\"\n",
"\n",
"sample = data.loc[start:end]"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "2e6c4451-884a-44b4-b149-11e182216ddb",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/var/folders/v8/ym6h5fw970l86ns393wxhzm1d3tht0/T/ipykernel_79482/2390513591.py:3: SettingWithCopyWarning: \n",
"A value is trying to be set on a copy of a slice from a DataFrame.\n",
"Try using .loc[row_indexer,col_indexer] = value instead\n",
"\n",
"See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
" sample['Date'] = pd.to_datetime(sample['Date'])\n",
"qt.pointer.dispatch: skipping QEventPoint(id=1 ts=0 pos=0,0 scn=610.588,441.089 gbl=610.588,441.089 Released ellipse=(1x1 ∡ 0) vel=0,0 press=-610.588,-441.089 last=-610.588,-441.089 Δ 610.588,441.089) : no target window\n"
]
}
],
"source": [
"#sample = data\n",
"sample.reset_index(inplace=True)\n",
"sample['Date'] = pd.to_datetime(sample['Date'])\n",
"\n",
"fplt.candlestick_ochl(sample)\n",
"fplt.show()\n"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "05e50d21-3296-4f08-b571-66abdad2eed1",
"metadata": {},
"outputs": [],
"source": [
"sample.to_pickle(f\"/Users/tcudikel/Dev/sandbox/trade-strat/data/trend_samples/bearish_kchol_{start}_{end}.pkl\")"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "1971ae65-a62b-4d35-a117-7b109958a56e",
"metadata": {},
"outputs": [],
"source": [
"fplt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5aec2c83-bfac-475a-a6b8-03065138c615",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "tradestrat",
"language": "python",
"name": "tradestrat"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.6"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

0
ops/__init__.py Normal file
View File

125
ops/fetch.py Normal file
View File

@@ -0,0 +1,125 @@
import os
from datetime import datetime
from typing import Dict
from tqdm import tqdm
from loguru import logger
import yfinance as yf
import pandas as pd
from stocksymbol import StockSymbol
class DataFetcher:
def __init__(self):
SS_API_KEY = 'a2598c86-da6c-41ac-a792-9af08f3c482c'
self.ss_api = StockSymbol(SS_API_KEY)
today = datetime.now().strftime("%Y%m%d")
self.data_dir = os.path.join('data/tmp', today)
def get_historical_for(self, ticker, period="1y", interval="1d"):
pd.set_option('display.max_rows', None)
if type(ticker) != yf.ticker.Ticker:
ticker = yf.Ticker(ticker)
return ticker.history(period=period, interval=interval)
def get_historicals_for(self, ticker_list, period="2y", interval="1d"):
logger.debug(f"retrieving historical raw ohlc data for tickers:")
logger.debug(f"ticker list: {ticker_list}")
return yf.download(" ".join(ticker_list), period=period, interval=interval) # to debug: ticker_list[:3]
def generate_ohlcs_per_ticker(self, combined_data):
ohlcs = {}
for ticker_symbol in combined_data.columns.get_level_values(1).unique():
ohlc_df = combined_data['Close'][ticker_symbol].to_frame().rename(columns={ticker_symbol: 'Close'})
ohlc_df['Open'] = combined_data['Open'][ticker_symbol]
ohlc_df['High'] = combined_data['High'][ticker_symbol]
ohlc_df['Low'] = combined_data['Low'][ticker_symbol]
ohlc_df['Volume'] = combined_data['Volume'][ticker_symbol]
ohlcs[ticker_symbol] = ohlc_df
return ohlcs
def get_market_list(self):
return self.ss_api.market_list
def get_ticker_list(self, market):
self.tickers = []
raw_tickers = self.ss_api.get_symbol_list(market=market)
for ticker in raw_tickers:
self.tickers.append(ticker["symbol"])
return self.tickers
def lookup_tickers(self, symbols):
found_tickers = []
for symbol in symbols:
for item in self.tickers:
if symbol in item:
#found_tickers[symbol] = yf.Ticker(item)
found_tickers.append(yf.Ticker(item))
if found_tickers != []:
return found_tickers
else:
return f"Tickers not found for: {symbols}"
def persist_into_pickle(self, combined_data, ohcls_per_ticker):
logger.debug("saving fetched combined historical price df")
os.makedirs(self.data_dir, exist_ok=True)
for name, df in combined_data.items():
df.to_pickle(f'{self.data_dir}/{name}.pkl')
logger.debug("saving fetched ohlcs per ticker as seperate dataframes")
ohlcs_dir = os.path.join(self.data_dir, "ticker_ohlcs")
os.makedirs(ohlcs_dir, exist_ok=True)
for name, df in tqdm(ohcls_per_ticker.items(), desc="saving ohlc for each ticker:"):
df.to_pickle(f'{ohlcs_dir}/{name}.pkl')
logger.success(f"all fetched data succesfully saved into: {self.data_dir}")
def check_for_persistent_data(self):
logger.info(f"checking hist data for {self.data_dir} exists.. ")
if not os.path.exists(self.data_dir):
logger.warning(f"Tmp data dir missing: {self.data_dir}")
return False
files = os.listdir(self.data_dir)
if not any(file.endswith('.pkl') for file in files):
logger.warning(f"Persisted OHLC ticker data missing")
return False
ohlcs_dir = os.path.join(self.data_dir, "ticker_ohlcs")
if not os.path.exists(ohlcs_dir):
logger.warning(f"OHLCs dir not found: {ohlcs_dir}")
return False
pkl_files = [file for file in os.listdir(ohlcs_dir) if file.endswith('.pkl')]
if not pkl_files:
logger.warning(f"OHLC data files missing")
return False
logger.success("Persisted data found, no need to download again")
return True
def load_from_pickle(self):
today = datetime.now().strftime("%Y%m%d")
data_dir = os.path.join('data/tmp', today)
dfs_cv = {}
dfs_ohlc = {}
items = os.listdir(data_dir)
for item in items:
if item.endswith('.pkl'):
file_path = os.path.join(data_dir, item)
df = pd.read_pickle(file_path)
dfs_cv[item.replace(".pkl", "")] = df
if item == 'ticker_ohlcs':
ohlcs_path = os.path.join(data_dir, item)
for ticker_file in os.listdir(ohlcs_path):
ticker_path = os.path.join(ohlcs_path, ticker_file)
df = pd.read_pickle(ticker_path)
dfs_ohlc[ticker_file.replace(".pkl", "")] = df
return dfs_cv, dfs_ohlc

73
ops/portfolio.py Normal file
View File

@@ -0,0 +1,73 @@
from dataclasses import dataclass, field
from typing import List
import yaml
#from pydantic import BaseModel, validator
@dataclass
class Stock:
symbol: str
quantity: int = 0
@dataclass
class Portfolio:
name: str = "dummy_portfolio"
stocks: List[Stock] = field(default_factory=list)
cash: float = 0.0
def add_lot(self, stock: Stock):
for exist_stock in self.stocks:
if exist_stock.symbol == stock.symbol:
exist_stock.quantity += stock.quantity
break
else:
self.stocks.append(stock)
def remove_lot(self, stock: Stock):
for exist_stock in self.stocks:
if exist_stock.symbol == stock.symbol:
if exist_stock.quantity == stock.quantity:
self.stocks.remove(stock); break
elif exist_stock.quantity < stock.quantity:
raise Exception(
f"\nInsufficient lots to remove {stock.symbol}: {stock.quantity}\n"
f"Current lots for {exist_stock.symbol}: {exist_stock.quantity}"
)
elif exist_stock.quantity > stock.quantity:
exist_stock.quantity -= stock.quantity; break
else:
raise Exception(f"Given stock not found in portfolio: {exist_stock.symbol}, {exist_stock.quantity}")
def update_cash(self, amount: float):
new_cash = self.cash + amount
if new_cash < 0:
raise Exception(f"Cannot have a negative cash balance, current balance: {self.cash} - amount:{amount}")
self.cash = new_cash
class PortfolioLoader:
def __init__(self, path):
self.file_path = path
def load_from_yaml(self):
portfolios = {}
with open(self.file_path, 'r') as file:
data = yaml.safe_load(file)
if not isinstance(data, dict) or 'portfolios' not in data:
raise ValueError("Invalid YAML file format. Expected 'portfolios' key.")
for portf_decl in data.get('portfolios', []):
name = portf_decl.get('name')
for stock in portf_decl['stocks']:
stock["quantity"] = stock.get('quantity', 0)
portfolios[name] = Portfolio(
name=portf_decl.get('name', None),
stocks=[Stock(symbol=stock['symbol'], quantity=stock['quantity']) for stock in portf_decl['stocks']],
cash=portf_decl['cash']
)
return portfolios

72
ops/strategy.py Normal file
View File

@@ -0,0 +1,72 @@
import pandas as pd
from backtesting import Strategy
from backtesting.lib import crossover
class PCTrader(Strategy):
# Define percentate threshold as *class variables*
# for later optimization
pct = 4
def PercentChange(self, values):
"""
Returns percent change of 'values', at
each step taking into account previous row value.
"""
return pd.Series(values).pct_change()*100
def init(self):
# Precompute the percent changes
self.pc = self.I(self.PercentChange, self.data.Close)
def next(self):
# If percent change is higher than (+) threshold, close any existing
# short trades, and buy the asset
if self.pc > self.pct:
self.position.close()
self.buy()
# Else, if percent change is lower than (-) threshold, close any existing
# long trades, and sell the asset
elif self.pc < -self.pct:
self.position.close()
self.sell()
"""import yaml
from dataclasses import dataclass
from typing import Any, Dict
@dataclass
class Strategy:
name: str
params: Dict[str, Any]
class StrategyLoader:
def __init__(self, path):
self.file_path = path
def load_from_yaml(self, name):
with open(self.file_path, 'r') as file:
data = yaml.safe_load(file)
strategies = []
for strategy_data in data.get('strategies', []):
strategy = Strategy(
name=strategy_data.get('name', ''),
params=strategy_data.get('params', {})
)
strategies.append(strategy)
if name:
for s in strategies:
if s.name == name:
return s
else:
raise Exception(f"No strategy found given name: {name}")"""

33
ops/transform.py Normal file
View File

@@ -0,0 +1,33 @@
import pandas as pd
from ops.fetch import DataFetcher
class DataTransformer:
def __init__(self):
self.data_fetcher = DataFetcher()
def get_topk_vol_stocks(self, df, K):
topk = df.iloc[-1].sort_values(ascending=False).head(K)
return pd.DataFrame({
'tickers': topk.index,
'volumes': topk.values
})
def calc_diffs(self, portfolio):
pct_diff = portfolio.pct_change()
mon_diff = portfolio.diff()
pct_diff.columns = [f"{col}_diff_pct" for col in pct_diff.columns]
mon_diff.columns = [f"{col}_diff_mon" for col in mon_diff.columns]
return pd.concat([portfolio, pct_diff, mon_diff], axis=1)
def filter_tickers(self, portfolio, raw_data):
# TODO: bu kısım daha sonra portfolo objesine göre transform metodu olarak yazılacak
#symbols = ["KCHOL", "SAHOL", "FROTO", "TOASO", "TCELL", "TTKOM", "THYAO", "PGSUS"]
#tickers = data_fetcher.lookup_tickers(symbols=symbols)
#symbol_codes = [t.ticker for t in tickers]
#portf_closes = hist_data["closes"][symbol_codes]
print("currently not implemented")

37
preprocess.py Normal file
View File

@@ -0,0 +1,37 @@
from loguru import logger
from ops.transform import DataTransformer
from ops.fetch import DataFetcher
# INITIALIZE
data_fetcher = DataFetcher()
data_transformer = DataTransformer()
# FETCH AND PERSIST
tickers = data_fetcher.get_ticker_list(market="TR")
if data_fetcher.check_for_persistent_data():
dfs_cv, dfs_ohlc = data_fetcher.load_from_pickle()
else:
raw_data = data_fetcher.get_historicals_for(ticker_list=tickers)
ohlcs_data = data_fetcher.generate_ohlcs_per_ticker(combined_data=raw_data)
data_fetcher.persist_into_pickle(
combined_data={"closes": raw_data["Adj Close"], "volumes": raw_data["Volume"]},
ohcls_per_ticker=ohlcs_data
)
df_cv, df_ohlc = data_fetcher.load_from_pickle()
# TODO: validation yaz (hist_data ve ohlcs_data için) ve burada validate edip logger.success bas
logger.success("historical data preprocessing is done")
# Refactor Note:
# removed this b/c backtesting.py format only require OHLC, gonna transform later
#portf_closediffs = data_transformer.calc_diffs(portf_closes)
#data_fetcher.persist_into_pickle({"portf_closediffs": portf_closediffs})

5
scripts/start_jupy.sh Executable file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
jupyter lab --ip='0.0.0.0' --NotebookApp.token='' --NotebookApp.password='' --no-browser --port=9992 --notebook-dir=/Users/tcudikel/Dev/sandbox/trade-strat