diff --git a/.gitignore b/.gitignore index 6e194c6..783495b 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ -.pnpm-store \ No newline at end of file +.pnpm-store +node_modules \ No newline at end of file diff --git a/biome.json b/biome.json new file mode 100644 index 0000000..ede396c --- /dev/null +++ b/biome.json @@ -0,0 +1,16 @@ +{ + "$schema": "https://biomejs.dev/schemas/1.8.3/schema.json", + "organizeImports": { + "enabled": true + }, + "formatter": { + "indentWidth": 2, + "indentStyle": "space" + }, + "linter": { + "enabled": true, + "rules": { + "recommended": true + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..240328a --- /dev/null +++ b/package.json @@ -0,0 +1,5 @@ +{ + "devDependencies": { + "@biomejs/biome": "^1.8.3" + } +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml new file mode 100644 index 0000000..3fb396b --- /dev/null +++ b/pnpm-lock.yaml @@ -0,0 +1,105 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + devDependencies: + '@biomejs/biome': + specifier: ^1.8.3 + version: 1.8.3 + +packages: + + '@biomejs/biome@1.8.3': + resolution: {integrity: sha512-/uUV3MV+vyAczO+vKrPdOW0Iaet7UnJMU4bNMinggGJTAnBPjCoLEYcyYtYHNnUNYlv4xZMH6hVIQCAozq8d5w==} + engines: {node: '>=14.21.3'} + hasBin: true + + '@biomejs/cli-darwin-arm64@1.8.3': + resolution: {integrity: sha512-9DYOjclFpKrH/m1Oz75SSExR8VKvNSSsLnVIqdnKexj6NwmiMlKk94Wa1kZEdv6MCOHGHgyyoV57Cw8WzL5n3A==} + engines: {node: '>=14.21.3'} + cpu: [arm64] + os: [darwin] + + '@biomejs/cli-darwin-x64@1.8.3': + resolution: {integrity: sha512-UeW44L/AtbmOF7KXLCoM+9PSgPo0IDcyEUfIoOXYeANaNXXf9mLUwV1GeF2OWjyic5zj6CnAJ9uzk2LT3v/wAw==} + engines: {node: '>=14.21.3'} + cpu: [x64] + os: [darwin] + + '@biomejs/cli-linux-arm64-musl@1.8.3': + resolution: {integrity: sha512-9yjUfOFN7wrYsXt/T/gEWfvVxKlnh3yBpnScw98IF+oOeCYb5/b/+K7YNqKROV2i1DlMjg9g/EcN9wvj+NkMuQ==} + engines: {node: '>=14.21.3'} + cpu: [arm64] + os: [linux] + + '@biomejs/cli-linux-arm64@1.8.3': + resolution: {integrity: sha512-fed2ji8s+I/m8upWpTJGanqiJ0rnlHOK3DdxsyVLZQ8ClY6qLuPc9uehCREBifRJLl/iJyQpHIRufLDeotsPtw==} + engines: {node: '>=14.21.3'} + cpu: [arm64] + os: [linux] + + '@biomejs/cli-linux-x64-musl@1.8.3': + resolution: {integrity: sha512-UHrGJX7PrKMKzPGoEsooKC9jXJMa28TUSMjcIlbDnIO4EAavCoVmNQaIuUSH0Ls2mpGMwUIf+aZJv657zfWWjA==} + engines: {node: '>=14.21.3'} + cpu: [x64] + os: [linux] + + '@biomejs/cli-linux-x64@1.8.3': + resolution: {integrity: sha512-I8G2QmuE1teISyT8ie1HXsjFRz9L1m5n83U1O6m30Kw+kPMPSKjag6QGUn+sXT8V+XWIZxFFBoTDEDZW2KPDDw==} + engines: {node: '>=14.21.3'} + cpu: [x64] + os: [linux] + + '@biomejs/cli-win32-arm64@1.8.3': + resolution: {integrity: sha512-J+Hu9WvrBevfy06eU1Na0lpc7uR9tibm9maHynLIoAjLZpQU3IW+OKHUtyL8p6/3pT2Ju5t5emReeIS2SAxhkQ==} + engines: {node: '>=14.21.3'} + cpu: [arm64] + os: [win32] + + '@biomejs/cli-win32-x64@1.8.3': + resolution: {integrity: sha512-/PJ59vA1pnQeKahemaQf4Nyj7IKUvGQSc3Ze1uIGi+Wvr1xF7rGobSrAAG01T/gUDG21vkDsZYM03NAmPiVkqg==} + engines: {node: '>=14.21.3'} + cpu: [x64] + os: [win32] + +snapshots: + + '@biomejs/biome@1.8.3': + optionalDependencies: + '@biomejs/cli-darwin-arm64': 1.8.3 + '@biomejs/cli-darwin-x64': 1.8.3 + '@biomejs/cli-linux-arm64': 1.8.3 + '@biomejs/cli-linux-arm64-musl': 1.8.3 + '@biomejs/cli-linux-x64': 1.8.3 + '@biomejs/cli-linux-x64-musl': 1.8.3 + '@biomejs/cli-win32-arm64': 1.8.3 + '@biomejs/cli-win32-x64': 1.8.3 + + '@biomejs/cli-darwin-arm64@1.8.3': + optional: true + + '@biomejs/cli-darwin-x64@1.8.3': + optional: true + + '@biomejs/cli-linux-arm64-musl@1.8.3': + optional: true + + '@biomejs/cli-linux-arm64@1.8.3': + optional: true + + '@biomejs/cli-linux-x64-musl@1.8.3': + optional: true + + '@biomejs/cli-linux-x64@1.8.3': + optional: true + + '@biomejs/cli-win32-arm64@1.8.3': + optional: true + + '@biomejs/cli-win32-x64@1.8.3': + optional: true diff --git a/server/lmdbx-range-test.mjs b/server/lmdbx-range-test.mjs new file mode 100644 index 0000000..5605057 --- /dev/null +++ b/server/lmdbx-range-test.mjs @@ -0,0 +1,17 @@ +import { open } from 'lmdbx'; // or require + +const MAXIMUM_KEY = Buffer.from([0xff]); + +// or in deno: import { open } from 'https://deno.land/x/lmdbx/mod.ts'; +const myDB = open({ + path: '/tmp/my.db', + // any options go here, we can turn on compression like this: + compression: true, +}); +await myDB.put(["a","b"], "ab"); +await myDB.put(["a","c"], "ac"); +await myDB.put(["a","d"], "ad"); +await myDB.put(["b","a"], "ba"); +await myDB.put(["b","c"], "bc"); + +console.log(Array.from(myDB.getRange({start: ["a"], end: ["a", MAXIMUM_KEY]}).asArray)) \ No newline at end of file diff --git a/server/package.json b/server/package.json index 3e6f663..80e5e9c 100644 --- a/server/package.json +++ b/server/package.json @@ -14,6 +14,7 @@ "@trpc/server": "^10.45.0", "cors": "^2.8.5", "execa": "^9.3.0", + "lmdbx": "^0.5.0", "p-all": "^5.0.0", "p-queue": "^8.0.1", "p-retry": "^6.2.0", diff --git a/server/pnpm-lock.yaml b/server/pnpm-lock.yaml index 56cc8b0..8bab229 100644 --- a/server/pnpm-lock.yaml +++ b/server/pnpm-lock.yaml @@ -26,6 +26,9 @@ importers: execa: specifier: ^9.3.0 version: 9.3.0 + lmdbx: + specifier: ^0.5.0 + version: 0.5.0 p-all: specifier: ^5.0.0 version: 5.0.0 @@ -217,6 +220,36 @@ packages: '@humanwhocodes/env@3.0.5': resolution: {integrity: sha512-IpnujSwQ93i/amSy4GoynqaOAjbYKAI1b28JmPogfEytAh2aSjOE2ZlFnOAuXHUt3OQA41RvU0JL4lzTnVKeIw==} + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': + resolution: {integrity: sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==} + cpu: [arm64] + os: [darwin] + + '@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3': + resolution: {integrity: sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==} + cpu: [x64] + os: [darwin] + + '@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3': + resolution: {integrity: sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==} + cpu: [arm64] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3': + resolution: {integrity: sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==} + cpu: [arm] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3': + resolution: {integrity: sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==} + cpu: [x64] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3': + resolution: {integrity: sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==} + cpu: [x64] + os: [win32] + '@npmcli/fs@1.1.1': resolution: {integrity: sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==} @@ -682,6 +715,9 @@ packages: json-parse-better-errors@1.0.2: resolution: {integrity: sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==} + lmdbx@0.5.0: + resolution: {integrity: sha512-t6rl4YE1Z86CHP//bvFIJv8JuH2/+hZO9McdAEXFcswFbNL4gkWcH+mqrDd8QiFDIgfGK/ulcZgNq1SAwEZO+w==} + load-json-file@4.0.0: resolution: {integrity: sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==} engines: {node: '>=4'} @@ -754,6 +790,16 @@ packages: ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + msgpackr-extract@3.0.3: + resolution: {integrity: sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==} + hasBin: true + + msgpackr@1.11.0: + resolution: {integrity: sha512-I8qXuuALqJe5laEBYoFykChhSXLikZmUhccjGsPuSJ/7uPip2TJ7lwdIQwWSAi0jGZDXv4WOP8Qg65QZRuXxXw==} + + nan@2.20.0: + resolution: {integrity: sha512-bk3gXBZDGILuuo/6sKtr0DQmSThYHLtNCdSdXk9YkxD/jK6X2vmCyyXBBxyqZ4XcnzTyYEAThfX3DCEnLf6igw==} + napi-build-utils@1.0.2: resolution: {integrity: sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==} @@ -772,6 +818,14 @@ packages: resolution: {integrity: sha512-mNcltoe1R8o7STTegSOHdnJNN7s5EUvhoS7ShnTHDyOSd+8H+UdWODq6qSv67PjC8Zc5JRT8+oLAMCr0SIXw7g==} engines: {node: ^16 || ^18 || >= 20} + node-gyp-build-optional-packages@5.2.2: + resolution: {integrity: sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==} + hasBin: true + + node-gyp-build@4.8.1: + resolution: {integrity: sha512-OSs33Z9yWr148JZcbZd5WiAXhh/n9z8TxQcdMhIOlpN9AhWpLfvVFO73+m77bBABQMaY9XSvIa+qk0jlI7Gcaw==} + hasBin: true + node-gyp@8.4.1: resolution: {integrity: sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w==} engines: {node: '>= 10.12.0'} @@ -817,6 +871,9 @@ packages: once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + ordered-binary@1.5.1: + resolution: {integrity: sha512-5VyHfHY3cd0iza71JepYG50My+YUbrFtGoUz2ooEydPyPM7Aai/JW098juLr+RG6+rDJuzNNTsEQu2DZa1A41A==} + p-all@5.0.0: resolution: {integrity: sha512-pofqu/1FhCVa+78xNAptCGc9V45exFz2pvBRyIvgXkNM0Rh18Py7j8pQuSjA+zpabI46v9hRjNWmL9EAFcEbpw==} engines: {node: '>=16'} @@ -1152,6 +1209,9 @@ packages: resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} engines: {node: '>= 0.8'} + weak-lru-cache@1.2.2: + resolution: {integrity: sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw==} + which-boxed-primitive@1.0.2: resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==} @@ -1263,6 +1323,24 @@ snapshots: '@humanwhocodes/env@3.0.5': {} + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3': + optional: true + '@npmcli/fs@1.1.1': dependencies: '@gar/promisify': 1.1.3 @@ -1853,6 +1931,14 @@ snapshots: json-parse-better-errors@1.0.2: {} + lmdbx@0.5.0: + dependencies: + msgpackr: 1.11.0 + nan: 2.20.0 + node-gyp-build: 4.8.1 + ordered-binary: 1.5.1 + weak-lru-cache: 1.2.2 + load-json-file@4.0.0: dependencies: graceful-fs: 4.2.11 @@ -1948,6 +2034,24 @@ snapshots: ms@2.1.3: optional: true + msgpackr-extract@3.0.3: + dependencies: + node-gyp-build-optional-packages: 5.2.2 + optionalDependencies: + '@msgpackr-extract/msgpackr-extract-darwin-arm64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-darwin-x64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-arm': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-arm64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-x64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-win32-x64': 3.0.3 + optional: true + + msgpackr@1.11.0: + optionalDependencies: + msgpackr-extract: 3.0.3 + + nan@2.20.0: {} + napi-build-utils@1.0.2: {} negotiator@0.6.3: @@ -1961,6 +2065,13 @@ snapshots: node-addon-api@7.1.0: {} + node-gyp-build-optional-packages@5.2.2: + dependencies: + detect-libc: 2.0.3 + optional: true + + node-gyp-build@4.8.1: {} + node-gyp@8.4.1: dependencies: env-paths: 2.2.1 @@ -2031,6 +2142,8 @@ snapshots: dependencies: wrappy: 1.0.2 + ordered-binary@1.5.1: {} + p-all@5.0.0: dependencies: p-map: 6.0.0 @@ -2421,6 +2534,8 @@ snapshots: vary@1.1.2: {} + weak-lru-cache@1.2.2: {} + which-boxed-primitive@1.0.2: dependencies: is-bigint: 1.0.4 diff --git a/server/src/backtest.ts b/server/src/backtest.ts new file mode 100644 index 0000000..b6633cb --- /dev/null +++ b/server/src/backtest.ts @@ -0,0 +1,152 @@ +import { stockDatabase } from "./stockdb.clickhouse.js"; +import { calendarDatabase } from "./calendardb.clickhouse.js"; +import type { CalendarKey } from "./calendardb.interfaces.js"; +import type { Aggregate } from "./interfaces.js"; + +function nextDate(date: string) { + const dateObject = new Date(date); + dateObject.setDate(dateObject.getDate() + 1); + return dateObject.toISOString().substring(0, 10); +} + +type BacktestInput = { + symbol: string; + startDate: string; + endDate: string; + /** Between 0 and 1. The frequency that similar calendars have historically ended (i.e. within the last hour) at a higher price than the current calendar's price. */ + historicalProbabilityOfSuccess?: number; + initialAvailableValue?: number; +}; +export async function backtest({ + symbol, + startDate, + endDate, + historicalProbabilityOfSuccess = 0.8, + initialAvailableValue: initialBuyingPower = 2000, +}: BacktestInput) { + let buyingPower = initialBuyingPower; + const portfolio = new Set(); + // for each day: + for ( + let date = startDate, didBuyCalendar = false; + date <= endDate; + date = nextDate(date), didBuyCalendar = false + ) { + console.log("Current Date:", date); + const calendars = await calendarDatabase.getCalendars({ + key: { symbol }, + date, + }); + const stockAggregates = await stockDatabase.getAggregates({ + key: symbol, + date, + }); + const calendarsAggregates = new Map< + CalendarKey, + Array, "tsStart" | "open" | "close">> + >(); + for (const calendar of calendars) { + calendarsAggregates.set( + calendar, + await calendarDatabase.getAggregates({ + key: { + ...calendar, + }, + date, + }), + ); + } + // for each minute of that day for which we have a stock candlestick: + for (const stockAggregate of stockAggregates) { + console.log("Current Time:", new Date(stockAggregate.tsStart)); + // filter-out calendars that are far-from-the-money (10%) + const calendarsNearTheMoney = calendars.filter( + ({ strike }) => + Math.abs((stockAggregate.open - strike) / stockAggregate.open) < 0.1, + ); + // for each relevant calendar on that day: + for (const calendar of calendarsNearTheMoney) { + const strikePercentageFromTheMoney = Math.abs( + (stockAggregate.open - calendar.strike) / stockAggregate.open, + ); + /** In days. */ + const calendarSpan = + (new Date(calendar.backExpirationDate).valueOf() - + new Date(calendar.frontExpirationDate).valueOf()) / + (1000 * 60 * 60 * 24); + const targetCalendarPrice = + await calendarDatabase.getTargetPriceByProbability({ + symbol, + calendarSpan, + strikePercentageFromTheMoney, + historicalProbabilityOfSuccess, + }); + const calendarAggregates = calendarsAggregates.get(calendar); + const calendarAggregateAtCurrentTime = calendarAggregates.find( + ({ tsStart }) => tsStart === stockAggregate.tsStart, + ); + // if there exists a matching calendar candlestick for the current minute: + if (calendarAggregateAtCurrentTime) { + // if the current candlestick is a good price (i.e. less than the target price): + const minCalendarPriceInCandlestick = Math.min( + calendarAggregateAtCurrentTime.open, + calendarAggregateAtCurrentTime.close, + ); + if ( + minCalendarPriceInCandlestick < targetCalendarPrice && + minCalendarPriceInCandlestick > + 0.07 /* sometimes the calendar price is zero or negative, which is of course impossible; some institution got a good deal */ + ) { + // if we can afford to buy the calendar: + if (buyingPower > minCalendarPriceInCandlestick) { + // buy the calendar, and continue to the next day: + portfolio.add(calendar); + buyingPower = buyingPower - minCalendarPriceInCandlestick * 100; + console.log( + "Bought", + calendar, + "for", + minCalendarPriceInCandlestick * 100, + "...$", + buyingPower, + "left", + ); + didBuyCalendar = true; + } + } + } + if (didBuyCalendar) { + break; + } + } + if (didBuyCalendar) { + break; + } + } + + // for each calendar in portfolio, if today is the last day, close the position: + for (const calendar of portfolio.values()) { + if (calendar.frontExpirationDate === date) { + const calendarClosingPrice = await calendarDatabase.getClosingPrice({ + key: { + ...calendar, + }, + }); + portfolio.delete(calendar); + buyingPower = buyingPower + calendarClosingPrice * 100; + console.log( + "Sold", + calendar, + "for", + calendarClosingPrice, + "...$", + buyingPower, + "left", + ); + } + } + } + + console.log("Ending Buying Power:", buyingPower); + console.log("Portfolio:", portfolio.values()); +} diff --git a/server/src/calendardb.clickhouse.ts b/server/src/calendardb.clickhouse.ts new file mode 100644 index 0000000..dac31bb --- /dev/null +++ b/server/src/calendardb.clickhouse.ts @@ -0,0 +1,142 @@ +import type { CalendarDatabase, CalendarKey } from "./calendardb.interfaces.js"; +import type { Aggregate } from "./interfaces.js"; +import { query } from "./lib/clickhouse.js"; + +function makeCalendarDatabase(): CalendarDatabase { + const calendarDatabase: Omit = { + getKeys: async ({ key: { symbol }, date }) => { + const calendarsForSymbolOnDate = await query< + Omit + >(` + WITH today_option_contracts AS ( + SELECT expirationDate, strike, type + FROM option_contract_existences + WHERE symbol = '${symbol}' + AND asOfDate = '${date}' + ) + SELECT + front_option_contract.type as type, + front_option_contract.strike as strike, + front_option_contract.expirationDate as frontExpirationDate, + back_option_contract.expirationDate as backExpirationDate + FROM today_option_contracts AS front_option_contract + ASOF INNER JOIN today_option_contracts AS back_option_contract + ON front_option_contract.type = back_option_contract.type + AND front_option_contract.strike = back_option_contract.strike + AND front_option_contract.expirationDate < back_option_contract.expirationDate + `); + + return calendarsForSymbolOnDate.map((calendarWithoutSymbol) => ({ + ...calendarWithoutSymbol, + symbol, + })); + }, + getAggregates: async ({ + key: { symbol, frontExpirationDate, backExpirationDate, strike, type }, + date, + }) => { + return ( + await query, "key">>(` + WITH front_option_contract_candlestick AS ( + SELECT + tsStart, + open, + close, + high, + low + FROM option_contract_aggregates + WHERE symbol = '${symbol}' + AND type = '${type}' + AND strike = '${strike}' + AND expirationDate = '${frontExpirationDate}' + AND toDate(tsStart) = '${date}' + ), + back_option_contract_candlestick AS ( + SELECT + tsStart, + open, + close, + high, + low + FROM option_contract_aggregates + WHERE symbol = '${symbol}' + AND type = '${type}' + AND strike = '${strike}' + AND expirationDate = '${backExpirationDate}' + AND toDate(tsStart) = '${date}' + ) + SELECT + toUnixTimestamp(front_option_contract_candlestick.tsStart) as tsStart, + back_option_contract_candlestick.open - front_option_contract_candlestick.open as open, + back_option_contract_candlestick.close - front_option_contract_candlestick.close as close + FROM front_option_contract_candlestick + INNER JOIN back_option_contract_candlestick + ON front_option_contract_candlestick.tsStart = back_option_contract_candlestick.tsStart + ORDER BY front_option_contract_candlestick.tsStart ASC + `) + ).map((aggregate) => ({ + ...aggregate, + tsStart: aggregate.tsStart * 1000, // unfortunately, `toUnixTimestamp` only returns second-precision + })); + }, + insertAggregates: async (aggregates) => { + // no-op: we insert individual option contracts, not calendars + }, + getClosingPrice: async ({ + key: { symbol, strike, type, frontExpirationDate, backExpirationDate }, + }) => { + return ( + await query<{ calendarClosingPrice: number }>(` + WITH front_option_contract_candlestick AS ( + SELECT + tsStart, + open, + close, + high, + low + FROM option_contract_aggregates + WHERE symbol = '${symbol}' + AND type = '${type}' + AND strike = '${strike}' + AND expirationDate = '${frontExpirationDate}' + AND toDate(tsStart) = '${frontExpirationDate}' + ), + back_option_contract_candlestick AS ( + SELECT + tsStart, + open, + close, + high, + low + FROM option_contract_aggregates + WHERE symbol = '${symbol}' + AND type = '${type}' + AND strike = '${strike}' + AND expirationDate = '${backExpirationDate}' + AND toDate(tsStart) = '${frontExpirationDate}' + ) + SELECT + min(back_option_contract_candlestick.close - front_option_contract_candlestick.close) as calendarClosingPrice + FROM front_option_contract_candlestick + INNER JOIN back_option_contract_candlestick + ON front_option_contract_candlestick.tsStart = back_option_contract_candlestick.tsStart + `) + )[0]?.calendarClosingPrice; + }, + getTargetPriceByProbability: async ({ + symbol, + calendarSpan, + strikePercentageFromTheMoney, + historicalProbabilityOfSuccess, + }) => { + return 0.24; + }, + }; + + return { + ...calendarDatabase, + getCalendars: calendarDatabase.getKeys, + }; +} + +export const calendarDatabase: CalendarDatabase = makeCalendarDatabase(); diff --git a/server/src/calendardb.interfaces.ts b/server/src/calendardb.interfaces.ts new file mode 100644 index 0000000..2495c80 --- /dev/null +++ b/server/src/calendardb.interfaces.ts @@ -0,0 +1,24 @@ +import type { AggregateDatabase } from "./interfaces.js"; + +export type CalendarKey = { + symbol: string; + type: "call" | "put"; + strike: number; + frontExpirationDate: string; + backExpirationDate: string; +}; + +export type CalendarDatabase = AggregateDatabase & { + getCalendars: AggregateDatabase["getKeys"]; + getTargetPriceByProbability: ({ + symbol, + calendarSpan, + strikePercentageFromTheMoney, + historicalProbabilityOfSuccess, + }: { + symbol: string; + calendarSpan: number; + strikePercentageFromTheMoney: number; + historicalProbabilityOfSuccess: number; + }) => Promise; +}; diff --git a/server/src/calendardb.lmdbx.ts b/server/src/calendardb.lmdbx.ts new file mode 100644 index 0000000..c6b20e9 --- /dev/null +++ b/server/src/calendardb.lmdbx.ts @@ -0,0 +1,153 @@ +import type { CalendarDatabase } from "./calendardb.interfaces.js"; +import { open } from "lmdbx"; + +const calendarAggregatesDb = open({ + path: "/tmp/calendar-aggregates.db", + // any options go here, we can turn on compression like this: + compression: true, +}); + +const calendarExistenceDb = open({ + path: "/tmp/calendar-existence.db", + // any options go here, we can turn on compression like this: + compression: true, +}); + +/** Largest possible key according to the `ordered-binary` (used by lmdbx) docs. */ +const MAXIMUM_KEY = Buffer.from([0xff]); + +function makeCalendarDatabase(): CalendarDatabase { + const calendarDatabase: Omit = { + getKeys: async ({ key: { symbol }, date }) => { + return calendarExistenceDb + .getRange({ + start: [date, symbol], + end: [date, symbol, MAXIMUM_KEY], + }) + .map(({ key }) => ({ + symbol, + frontExpirationDate: key[2], + backExpirationDate: key[3], + strike: key[4], + type: key[5], + })).asArray; + }, + getAggregates: async ({ + key: { symbol, frontExpirationDate, backExpirationDate, strike, type }, + date, + }) => { + const startOfDayUnix = new Date(`${date}T00:00:00Z`).valueOf(); + const endOfDayUnix = startOfDayUnix + 3600 * 24 * 1000; + return calendarAggregatesDb + .getRange({ + start: [ + symbol, + frontExpirationDate, + backExpirationDate, + strike, + type, + startOfDayUnix, + ], + end: [ + symbol, + frontExpirationDate, + backExpirationDate, + strike, + type, + endOfDayUnix, + ], + }) + .map(({ value }) => ({ + tsStart: value.tsStart, + open: value.open, + close: value.close, + high: value.high, + low: value.low, + })).asArray; + }, + insertAggregates: async (aggregates) => { + await calendarExistenceDb.batch(() => { + for (const aggregate of aggregates) { + calendarExistenceDb.put( + [ + new Date(aggregate.tsStart).toISOString().substring(0, 10), + aggregate.key.symbol, + aggregate.key.frontExpirationDate, + aggregate.key.backExpirationDate, + aggregate.key.strike, + aggregate.key.type, + ], + null, + ); + } + }); + await calendarAggregatesDb.batch(() => { + for (const aggregate of aggregates) { + calendarAggregatesDb.put( + [ + aggregate.key.symbol, + aggregate.key.frontExpirationDate, + aggregate.key.backExpirationDate, + aggregate.key.strike, + aggregate.key.type, + aggregate.tsStart, + ], + { + open: aggregate.open, + close: aggregate.close, + high: aggregate.high, + low: aggregate.low, + }, + ); + } + }); + }, + getClosingPrice: async ({ + key: { symbol, strike, type, frontExpirationDate, backExpirationDate }, + }) => { + const startOfLastHourUnix = new Date( + `${frontExpirationDate}T00:00:00Z`, + ).valueOf(); + const endOfLastHourUnix = startOfLastHourUnix + 3600 * 1000; + let minPrice = 0; + for (const { value } of calendarAggregatesDb.getRange({ + start: [ + symbol, + frontExpirationDate, + backExpirationDate, + strike, + type, + startOfLastHourUnix, + ], + end: [ + symbol, + frontExpirationDate, + backExpirationDate, + strike, + type, + endOfLastHourUnix, + ], + })) { + if (value.close < minPrice || minPrice === 0) { + minPrice = value.close; + } + } + return minPrice; + }, + getTargetPriceByProbability: async ({ + symbol, + calendarSpan, + strikePercentageFromTheMoney, + historicalProbabilityOfSuccess, + }) => { + return 0.24; + }, + }; + + return { + ...calendarDatabase, + getCalendars: calendarDatabase.getKeys, + }; +} + +export const calendarDatabase: CalendarDatabase = makeCalendarDatabase(); diff --git a/server/src/index.ts b/server/src/index.ts index 8f1c5c4..2bd4eec 100644 --- a/server/src/index.ts +++ b/server/src/index.ts @@ -3,14 +3,14 @@ import { query } from "./lib/clickhouse.js"; import { createHTTPHandler } from "@trpc/server/adapters/standalone"; import cors from "cors"; import { - Object as ObjectT, - String as StringT, - TSchema, - Number as NumberT, + Object as ObjectT, + String as StringT, + type TSchema, + Number as NumberT, } from "@sinclair/typebox"; import { TypeCompiler } from "@sinclair/typebox/compiler"; import { TRPCError } from "@trpc/server"; -import { createServer } from "http"; +import { createServer } from "node:http"; import { Env } from "@humanwhocodes/env"; const env = new Env(); @@ -24,53 +24,53 @@ const LISTEN_PORT = env.get("LISTEN_PORT", 3005); * @returns A TRPC-compatible validator function */ export function RpcType(schema: T) { - const check = TypeCompiler.Compile(schema); - return (value: unknown) => { - if (check.Check(value)) return value; - const { path, message } = check.Errors(value).First()!; - throw new TRPCError({ - message: `${message} for ${path}`, - code: "BAD_REQUEST", - }); - }; + const check = TypeCompiler.Compile(schema); + return (value: unknown) => { + if (check.Check(value)) return value; + const { path, message } = check.Errors(value).First(); + throw new TRPCError({ + message: `${message} for ${path}`, + code: "BAD_REQUEST", + }); + }; } const appRouter = router({ - getAvailableUnderlyings: publicProcedure.query(async (opts) => { - // return ( - // await query<{ symbol: string }>(` - // SELECT DISTINCT(symbol) as symbol FROM option_contract_existences WHERE asOfDate = (SELECT max(asOfDate) FROM option_contract_existences) - // `) - // ).map(({ symbol }) => symbol); - return ["AAPL", "AMD", "GOOGL", "MSFT", "NFLX"]; - }), - getAvailableAsOfDates: publicProcedure - .input(RpcType(ObjectT({ underlying: StringT() }))) - .query(async (opts) => { - const underlying = opts.input.underlying; - return ( - await query<{ asOfDate: string }>(` + getAvailableUnderlyings: publicProcedure.query(async (opts) => { + // return ( + // await query<{ symbol: string }>(` + // SELECT DISTINCT(symbol) as symbol FROM option_contract_existences WHERE asOfDate = (SELECT max(asOfDate) FROM option_contract_existences) + // `) + // ).map(({ symbol }) => symbol); + return ["AAPL", "AMD", "GOOGL", "MSFT", "NFLX"]; + }), + getAvailableAsOfDates: publicProcedure + .input(RpcType(ObjectT({ underlying: StringT() }))) + .query(async (opts) => { + const underlying = opts.input.underlying; + return ( + await query<{ asOfDate: string }>(` SELECT DISTINCT(asOfDate) as asOfDate FROM option_contract_existences WHERE symbol = '${underlying}' ORDER BY asOfDate `) - ).map(({ asOfDate }) => asOfDate); - }), - getExpirationsForUnderlying: publicProcedure - .input( - RpcType( - ObjectT({ - underlying: StringT({ maxLength: 5 }), - asOfDate: StringT(), - }) - ) - ) - .query(async (opts) => { - const { underlying, asOfDate } = opts.input; - return ( - await query<{ expirationDate: string }>(` + ).map(({ asOfDate }) => asOfDate); + }), + getExpirationsForUnderlying: publicProcedure + .input( + RpcType( + ObjectT({ + underlying: StringT({ maxLength: 5 }), + asOfDate: StringT(), + }), + ), + ) + .query(async (opts) => { + const { underlying, asOfDate } = opts.input; + return ( + await query<{ expirationDate: string }>(` SELECT DISTINCT(expirationDate) as expirationDate FROM option_contract_existences @@ -78,22 +78,22 @@ const appRouter = router({ AND asOfDate = '${asOfDate}' ORDER BY expirationDate `) - ).map(({ expirationDate }) => expirationDate); - }), - getStrikesForUnderlying: publicProcedure - .input( - RpcType( - ObjectT({ - underlying: StringT({ maxLength: 5 }), - asOfDate: StringT(), - expirationDate: StringT(), - }) - ) - ) - .query(async (opts) => { - const { underlying, asOfDate, expirationDate } = opts.input; - return ( - await query<{ strike: string }>(` + ).map(({ expirationDate }) => expirationDate); + }), + getStrikesForUnderlying: publicProcedure + .input( + RpcType( + ObjectT({ + underlying: StringT({ maxLength: 5 }), + asOfDate: StringT(), + expirationDate: StringT(), + }), + ), + ) + .query(async (opts) => { + const { underlying, asOfDate, expirationDate } = opts.input; + return ( + await query<{ strike: string }>(` SELECT DISTINCT(strike) as strike FROM option_contract_existences @@ -102,20 +102,20 @@ const appRouter = router({ AND expirationDate = '${expirationDate}' ORDER BY strike `) - ).map(({ strike }) => strike); - }), - getOpensForUnderlying: publicProcedure - .input( - RpcType( - ObjectT({ - underlying: StringT({ maxLength: 5 }), - }) - ) - ) - .query(async (opts) => { - const { underlying } = opts.input; - return await query<{ x: number; y: number }>( - ` + ).map(({ strike }) => strike); + }), + getOpensForUnderlying: publicProcedure + .input( + RpcType( + ObjectT({ + underlying: StringT({ maxLength: 5 }), + }), + ), + ) + .query(async (opts) => { + const { underlying } = opts.input; + return await query<{ x: number; y: number }>( + ` SELECT toUnixTimestamp(tsStart) as x, open as y @@ -123,23 +123,23 @@ const appRouter = router({ WHERE symbol = '${underlying}' ORDER BY tsStart ASC `, - "JSONEachRow" - ); - }), - getOpensForOptionContract: publicProcedure - .input( - RpcType( - ObjectT({ - underlying: StringT({ maxLength: 5 }), - expirationDate: StringT(), - strike: NumberT(), - }) - ) - ) - .query(async (opts) => { - const { underlying, expirationDate, strike } = opts.input; - return await query<{ x: number; y: number }>( - ` + "JSONEachRow", + ); + }), + getOpensForOptionContract: publicProcedure + .input( + RpcType( + ObjectT({ + underlying: StringT({ maxLength: 5 }), + expirationDate: StringT(), + strike: NumberT(), + }), + ), + ) + .query(async (opts) => { + const { underlying, expirationDate, strike } = opts.input; + return await query<{ x: number; y: number }>( + ` SELECT toUnixTimestamp(tsStart) as x, open as y @@ -150,32 +150,32 @@ const appRouter = router({ AND type = 'call' ORDER BY tsStart ASC `, - "JSONEachRow" - ); - }), - getHistoricalCalendarPrices: publicProcedure - .input( - RpcType( - ObjectT({ - underlying: StringT({ maxLength: 5 }), - daysToFrontExpiration: NumberT(), - daysBetweenFrontAndBackExpiration: NumberT(), - strikePercentageFromUnderlyingPriceRangeMin: NumberT(), - strikePercentageFromUnderlyingPriceRangeMax: NumberT(), - }) - ) - ) - .query(async (opts) => { - const { - underlying, - daysToFrontExpiration, - daysBetweenFrontAndBackExpiration, - strikePercentageFromUnderlyingPriceRangeMin, - strikePercentageFromUnderlyingPriceRangeMax, - } = opts.input; - return ( - await query<[number, number]>( - ` + "JSONEachRow", + ); + }), + getHistoricalCalendarPrices: publicProcedure + .input( + RpcType( + ObjectT({ + underlying: StringT({ maxLength: 5 }), + daysToFrontExpiration: NumberT(), + daysBetweenFrontAndBackExpiration: NumberT(), + strikePercentageFromUnderlyingPriceRangeMin: NumberT(), + strikePercentageFromUnderlyingPriceRangeMax: NumberT(), + }), + ), + ) + .query(async (opts) => { + const { + underlying, + daysToFrontExpiration, + daysBetweenFrontAndBackExpiration, + strikePercentageFromUnderlyingPriceRangeMin, + strikePercentageFromUnderlyingPriceRangeMax, + } = opts.input; + return ( + await query<[number, number]>( + ` SELECT toUnixTimestamp(tsStart) as asOfTs, calendarPrice @@ -186,31 +186,31 @@ const appRouter = router({ AND strikePercentageFromUnderlyingPrice <= ${strikePercentageFromUnderlyingPriceRangeMax} AND daysBetweenFrontAndBackExpiration = ${daysBetweenFrontAndBackExpiration} `, - "JSONCompactEachRow" - ) - ).reduce( - (columns, row) => { - columns[0].push(row[0]); - columns[1].push(row[1]); - return columns; - }, - [[], []] - ); - }), - getHistoricalStockQuoteChartData: publicProcedure - .input( - RpcType( - ObjectT({ - underlying: StringT({ maxLength: 5 }), - lookbackPeriodStart: StringT(), - lookbackPeriodEnd: StringT(), - }) - ) - ) - .query(async (opts) => { - const { underlying, lookbackPeriodStart, lookbackPeriodEnd } = opts.input; - return await query<[number, number]>( - ` + "JSONCompactEachRow", + ) + ).reduce( + (columns, row) => { + columns[0].push(row[0]); + columns[1].push(row[1]); + return columns; + }, + [[], []], + ); + }), + getHistoricalStockQuoteChartData: publicProcedure + .input( + RpcType( + ObjectT({ + underlying: StringT({ maxLength: 5 }), + lookbackPeriodStart: StringT(), + lookbackPeriodEnd: StringT(), + }), + ), + ) + .query(async (opts) => { + const { underlying, lookbackPeriodStart, lookbackPeriodEnd } = opts.input; + return await query<[number, number]>( + ` SELECT toUnixTimestamp(tsStart) as x, open as y @@ -220,35 +220,35 @@ const appRouter = router({ AND tsStart <= '${lookbackPeriodEnd} 00:00:00' ORDER BY x ASC `, - "JSONEachRow" - ); - }), - getHistoricalCalendarQuoteChartData: publicProcedure - .input( - RpcType( - ObjectT({ - underlying: StringT({ maxLength: 5 }), - daysToFrontExpiration: NumberT(), - daysBetweenFrontAndBackExpiration: NumberT(), - strikePercentageFromUnderlyingPriceRangeMin: NumberT(), - strikePercentageFromUnderlyingPriceRangeMax: NumberT(), - lookbackPeriodStart: StringT(), - lookbackPeriodEnd: StringT(), - }) - ) - ) - .query(async (opts) => { - const { - underlying, - daysToFrontExpiration, - daysBetweenFrontAndBackExpiration, - strikePercentageFromUnderlyingPriceRangeMin, - strikePercentageFromUnderlyingPriceRangeMax, - lookbackPeriodStart, - lookbackPeriodEnd, - } = opts.input; - return await query<[number, number]>( - ` + "JSONEachRow", + ); + }), + getHistoricalCalendarQuoteChartData: publicProcedure + .input( + RpcType( + ObjectT({ + underlying: StringT({ maxLength: 5 }), + daysToFrontExpiration: NumberT(), + daysBetweenFrontAndBackExpiration: NumberT(), + strikePercentageFromUnderlyingPriceRangeMin: NumberT(), + strikePercentageFromUnderlyingPriceRangeMax: NumberT(), + lookbackPeriodStart: StringT(), + lookbackPeriodEnd: StringT(), + }), + ), + ) + .query(async (opts) => { + const { + underlying, + daysToFrontExpiration, + daysBetweenFrontAndBackExpiration, + strikePercentageFromUnderlyingPriceRangeMin, + strikePercentageFromUnderlyingPriceRangeMax, + lookbackPeriodStart, + lookbackPeriodEnd, + } = opts.input; + return await query<[number, number]>( + ` SELECT toUnixTimestamp(tsStart) as x, truncate(calendarPrice, 2) as y @@ -261,33 +261,33 @@ const appRouter = router({ AND tsStart >= '${lookbackPeriodStart} 00:00:00' AND tsStart <= '${lookbackPeriodEnd} 00:00:00' `, - "JSONEachRow" - ); - }), - getHistoricalCalendarExitQuoteChartData: publicProcedure - .input( - RpcType( - ObjectT({ - underlying: StringT({ maxLength: 5 }), - daysToFrontExpiration: NumberT(), - daysBetweenFrontAndBackExpiration: NumberT(), - lookbackPeriodStart: StringT({ - pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2}", - }), - lookbackPeriodEnd: StringT({ pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2}" }), - }) - ) - ) - .query(async (opts) => { - const { - underlying, - daysToFrontExpiration, - daysBetweenFrontAndBackExpiration, - lookbackPeriodStart, - lookbackPeriodEnd, - } = opts.input; - return await query<[number, number, number]>( - ` + "JSONEachRow", + ); + }), + getHistoricalCalendarExitQuoteChartData: publicProcedure + .input( + RpcType( + ObjectT({ + underlying: StringT({ maxLength: 5 }), + daysToFrontExpiration: NumberT(), + daysBetweenFrontAndBackExpiration: NumberT(), + lookbackPeriodStart: StringT({ + pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2}", + }), + lookbackPeriodEnd: StringT({ pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2}" }), + }), + ), + ) + .query(async (opts) => { + const { + underlying, + daysToFrontExpiration, + daysBetweenFrontAndBackExpiration, + lookbackPeriodStart, + lookbackPeriodEnd, + } = opts.input; + return await query<[number, number, number]>( + ` SELECT FLOOR(strikePercentageFromUnderlyingPrice, 1) as x, FLOOR(calendarPrice, 1) as y, @@ -303,9 +303,9 @@ const appRouter = router({ GROUP BY x, y ORDER BY x ASC, y ASC `, - "JSONEachRow" - ); - }), + "JSONEachRow", + ); + }), }); // Export type router type signature, @@ -313,20 +313,20 @@ const appRouter = router({ export type AppRouter = typeof appRouter; const handler = createHTTPHandler({ - middleware: cors(), - router: appRouter, - createContext() { - return {}; - }, + middleware: cors(), + router: appRouter, + createContext() { + return {}; + }, }); const server = createServer((req, res) => { - if (req.url.startsWith("/healthz")) { - res.statusCode = 200; - res.end("OK"); - } else { - handler(req, res); - } + if (req.url.startsWith("/healthz")) { + res.statusCode = 200; + res.end("OK"); + } else { + handler(req, res); + } }); -server.listen(parseInt(LISTEN_PORT)); +server.listen(Number.parseInt(LISTEN_PORT)); diff --git a/server/src/interfaces.ts b/server/src/interfaces.ts new file mode 100644 index 0000000..b53ad0c --- /dev/null +++ b/server/src/interfaces.ts @@ -0,0 +1,25 @@ +export type Candlestick = { + open: number; + close: number; + high: number; + low: number; +}; + +export type Aggregate = { + key: T; + /** UNIX time in milliseconds */ + tsStart: number; +} & Candlestick; + +export type AggregateDatabase = { + getKeys: ({ + key, + date, + }: { key?: T | Partial; date?: string }) => Promise>; + getAggregates: ({ + key, + date, + }: { key: T; date: string }) => Promise, "key">>>; + insertAggregates: (aggregates: Array>) => Promise; + getClosingPrice: ({ key }: { key: T }) => Promise; +}; diff --git a/server/src/optiondb.clickhouse.ts b/server/src/optiondb.clickhouse.ts new file mode 100644 index 0000000..7f80add --- /dev/null +++ b/server/src/optiondb.clickhouse.ts @@ -0,0 +1,90 @@ +import type { + OptionContractDatabase, + OptionContractKey, +} from "./optiondb.interfaces.js"; +import type { Aggregate } from "./interfaces.js"; +import { clickhouse, query } from "./lib/clickhouse.js"; + +function makeOptionContractDatabase(): OptionContractDatabase { + const optionContractDatabase: Omit< + OptionContractDatabase, + "getOptionContracts" + > = { + getKeys: async ({ key: { symbol }, date }) => { + return ( + await query>(` + SELECT expirationDate, strike, type + FROM option_contract_existences + WHERE symbol = '${symbol}' + AND asOfDate = '${date}' + `) + ).map((optionContractWithoutKey) => ({ + ...optionContractWithoutKey, + symbol, + })); + }, + getAggregates: async ({ + key: { symbol, expirationDate, strike, type }, + date, + }) => { + return ( + await query, "key">>(` + SELECT + toUnixTimestamp(tsStart) as tsStart, + open, + close, + high, + low + FROM option_contract_aggregates + WHERE symbol = '${symbol}' + AND type = '${type}' + AND strike = '${strike}' + AND expirationDate = '${expirationDate}' + AND toDate(tsStart) = '${date}' + ORDER BY tsStart ASC + `) + ).map((aggregate) => ({ + ...aggregate, + tsStart: aggregate.tsStart * 1000, // unfortunately, `toUnixTimestamp` only returns second-precision + })); + }, + insertAggregates: async (aggregates) => { + // stock existence is taken care of by clickhouse materialized view + await clickhouse.insert({ + table: "option_contract_aggregates", + values: aggregates.map( + ({ + key: { symbol, expirationDate, strike, type }, + tsStart, + open, + close, + high, + low, + }) => ({ + symbol, + expirationDate, + strike, + type, + tsStart, + open, + close, + high, + low, + }), + ), + }); + }, + getClosingPrice: async ({ key }) => { + // no-op: not used since stocks don't have a "closing" price, unlike options. + return 0; + }, + }; + + return { + ...optionContractDatabase, + getOptionContracts: optionContractDatabase.getKeys, + }; +} + +export const optionContractDatabase: OptionContractDatabase = + makeOptionContractDatabase(); diff --git a/server/src/optiondb.interfaces.ts b/server/src/optiondb.interfaces.ts new file mode 100644 index 0000000..6b56d97 --- /dev/null +++ b/server/src/optiondb.interfaces.ts @@ -0,0 +1,12 @@ +import type { AggregateDatabase } from "./interfaces.js"; + +export type OptionContractKey = { + symbol: string; + expirationDate: string; + strike: number; + type: "call" | "put"; +}; + +export type OptionContractDatabase = AggregateDatabase & { + getOptionContracts: AggregateDatabase["getKeys"]; +}; diff --git a/server/src/optiondb.lmdbx.ts b/server/src/optiondb.lmdbx.ts new file mode 100644 index 0000000..9fbf8a2 --- /dev/null +++ b/server/src/optiondb.lmdbx.ts @@ -0,0 +1,118 @@ +import type { OptionContractDatabase } from "./optiondb.interfaces.js"; +import { open } from "lmdbx"; + +const optionContractAggregatesDb = open({ + path: "/tmp/option-contract-aggregates.db", + // any options go here, we can turn on compression like this: + compression: true, +}); + +const optionContractExistenceDb = open({ + path: "/tmp/option-contract-existence.db", + // any options go here, we can turn on compression like this: + compression: true, +}); + +/** Largest possible key according to the `ordered-binary` (used by lmdbx) docs. */ +const MAXIMUM_KEY = Buffer.from([0xff]); + +function makeOptionContractDatabase(): OptionContractDatabase { + const optionContractDatabase: Omit< + OptionContractDatabase, + "getOptionContracts" + > = { + getKeys: async ({ key: { symbol }, date }) => { + return optionContractExistenceDb + .getRange({ + start: [date, symbol], + end: [date, symbol, MAXIMUM_KEY], + }) + .map(({ key }) => ({ + symbol, + expirationDate: key[2], + strike: key[3], + type: key[4], + })).asArray; + }, + getAggregates: async ({ + key: { symbol, expirationDate, strike, type }, + date, + }) => { + const startOfDayUnix = new Date(`${date}T00:00:00Z`).valueOf(); + const endOfDayUnix = startOfDayUnix + 3600 * 24 * 1000; + return optionContractAggregatesDb + .getRange({ + start: [symbol, expirationDate, strike, type, startOfDayUnix], + end: [symbol, expirationDate, strike, type, endOfDayUnix], + }) + .map(({ value }) => ({ + tsStart: value.tsStart, + open: value.open, + close: value.close, + high: value.high, + low: value.low, + })).asArray; + }, + insertAggregates: async (aggregates) => { + await optionContractExistenceDb.batch(() => { + for (const aggregate of aggregates) { + optionContractExistenceDb.put( + [ + new Date(aggregate.tsStart).toISOString().substring(0, 10), + aggregate.key.symbol, + aggregate.key.expirationDate, + aggregate.key.strike, + aggregate.key.type, + ], + null, + ); + } + }); + await optionContractAggregatesDb.batch(() => { + for (const aggregate of aggregates) { + optionContractAggregatesDb.put( + [ + aggregate.key.symbol, + aggregate.key.expirationDate, + aggregate.key.strike, + aggregate.key.type, + aggregate.tsStart, + ], + { + open: aggregate.open, + close: aggregate.close, + high: aggregate.high, + low: aggregate.low, + }, + ); + } + }); + }, + getClosingPrice: async ({ + key: { symbol, strike, type, expirationDate }, + }) => { + const startOfLastHourUnix = new Date( + `${expirationDate}T00:00:00Z`, + ).valueOf(); + const endOfLastHourUnix = startOfLastHourUnix + 3600 * 1000; + let minPrice = 0; + for (const { value } of optionContractAggregatesDb.getRange({ + start: [symbol, expirationDate, strike, type, startOfLastHourUnix], + end: [symbol, expirationDate, strike, type, endOfLastHourUnix], + })) { + if (value.close < minPrice || minPrice === 0) { + minPrice = value.close; + } + } + return minPrice; + }, + }; + + return { + ...optionContractDatabase, + getOptionContracts: optionContractDatabase.getKeys, + }; +} + +export const optionContractDatabase: OptionContractDatabase = + makeOptionContractDatabase(); diff --git a/server/src/scripts/clickhouse-to-lmdbx.ts b/server/src/scripts/clickhouse-to-lmdbx.ts new file mode 100644 index 0000000..ffed9e6 --- /dev/null +++ b/server/src/scripts/clickhouse-to-lmdbx.ts @@ -0,0 +1,51 @@ +import type { AggregateDatabase } from "../interfaces.js"; +// import { stockDatabase as stockDatabaseClickhouse } from "../stockdb.clickhouse.js"; +// import { stockDatabase as stockDatabaseLmdbx } from "../stockdb.lmdbx.js"; +import { optionContractDatabase as optionContractDatabaseClickhouse } from "../optiondb.clickhouse.js"; +import { optionContractDatabase as optionContractDatabaseLmdbx } from "../optiondb.lmdbx.js"; + +function nextDate(date: string) { + const dateObject = new Date(date); + dateObject.setDate(dateObject.getDate() + 1); + return dateObject.toISOString().substring(0, 10); +} + +async function syncAggregates({ + from, + to, + key, + date, +}: { + from: AggregateDatabase; + to: AggregateDatabase; + key: T; + date: string; +}) { + const aggregatesFrom = (await from.getAggregates({ key, date })).map( + (aggregateWithoutKey) => ({ ...aggregateWithoutKey, key }), + ); + await to.insertAggregates(aggregatesFrom); +} + +const symbols = ["AMD", "AAPL", "MSFT", "GOOGL", "NFLX", "NVDA"]; +async function run() { + const startDate = "2022-02-01"; + const endDate = "2024-07-15"; + for (let date = startDate; date <= endDate; date = nextDate(date)) { + // const symbols = await stockDatabaseClickhouse.getSymbols({ date }); + for (const symbol of symbols) { + console.log(date, symbol); + const keys = await optionContractDatabaseClickhouse.getKeys({key: {symbol}, date}); + for(const key of keys){ + await syncAggregates({ + from: optionContractDatabaseClickhouse, + to: optionContractDatabaseLmdbx, + key, + date, + }); + } + } + } +} + +await run(); diff --git a/server/src/stockdb.clickhouse.ts b/server/src/stockdb.clickhouse.ts new file mode 100644 index 0000000..0277109 --- /dev/null +++ b/server/src/stockdb.clickhouse.ts @@ -0,0 +1,59 @@ +import type { StockDatabase, StockKey } from "./stockdb.interfaces.js"; +import type { Aggregate } from "./interfaces.js"; +import { clickhouse, query } from "./lib/clickhouse.js"; + +function makeStockDatabase(): StockDatabase { + const stockDatabase: Omit = { + getKeys: async ({ date }) => { + return ( + await query(` + SELECT DISTINCT symbol FROM stock_aggregates WHERE toDate(tsStart) = '${date}' + `) + ).map(({ symbol }) => symbol); + }, + getAggregates: async ({ key: symbol, date }) => { + return ( + await query, "key">>(` + SELECT + toUnixTimestamp(tsStart) as tsStart, + open, + close, + high, + low + FROM stock_aggregates + WHERE symbol = '${symbol}' + AND toDate(tsStart) = '${date}' + ORDER BY tsStart ASC + `) + ).map((aggregate) => ({ + ...aggregate, + tsStart: aggregate.tsStart * 1000, // unfortunately, `toUnixTimestamp` only returns second-precision + })); + }, + insertAggregates: async (aggregates) => { + // stock existence is taken care of by clickhouse materialized view + await clickhouse.insert({ + table: "stock_aggregates", + values: aggregates.map(({ key, tsStart, open, close, high, low }) => ({ + symbol: key, + tsStart, + open, + close, + high, + low, + })), + }); + }, + getClosingPrice: async ({ key }) => { + // no-op: not used since stocks don't have a "closing" price, unlike options. + return 0; + }, + }; + + return { + ...stockDatabase, + getSymbols: stockDatabase.getKeys, + }; +} + +export const stockDatabase: StockDatabase = makeStockDatabase(); diff --git a/server/src/stockdb.interfaces.ts b/server/src/stockdb.interfaces.ts new file mode 100644 index 0000000..bf71707 --- /dev/null +++ b/server/src/stockdb.interfaces.ts @@ -0,0 +1,7 @@ +import type { AggregateDatabase } from "./interfaces.js"; + +export type StockKey = string; + +export type StockDatabase = AggregateDatabase & { + getSymbols: AggregateDatabase["getKeys"]; +}; diff --git a/server/src/stockdb.lmdbx.ts b/server/src/stockdb.lmdbx.ts new file mode 100644 index 0000000..12d98b0 --- /dev/null +++ b/server/src/stockdb.lmdbx.ts @@ -0,0 +1,80 @@ +import type { StockDatabase } from "./stockdb.interfaces.js"; +import { open } from "lmdbx"; + +const stockAggregatesDb = open({ + path: "/tmp/stock-aggregates.db", + // any options go here, we can turn on compression like this: + compression: true, +}); + +const stockExistenceDb = open({ + path: "/tmp/stock-existence.db", + // any options go here, we can turn on compression like this: + compression: true, +}); + +/** Largest possible key according to the `ordered-binary` (used by lmdbx) docs. */ +const MAXIMUM_KEY = Buffer.from([0xff]); + +function makeStockDatabase(): StockDatabase { + const stockDatabase: Omit = { + getKeys: async ({ date }) => { + return stockExistenceDb + .getRange({ + start: [date], + end: [date, MAXIMUM_KEY], + }) + .map(({ key }) => key[1]).asArray; + }, + getAggregates: async ({ key: symbol, date }) => { + const startOfDayUnix = new Date(`${date}T00:00:00Z`).valueOf(); + const endOfDayUnix = startOfDayUnix + 3600 * 24 * 1000; + return stockAggregatesDb + .getRange({ + start: [symbol, startOfDayUnix], + end: [symbol, endOfDayUnix], + }) + .map(({ key, value }) => ({ + tsStart: key[1], + open: value.open, + close: value.close, + high: value.high, + low: value.low, + })).asArray; + }, + insertAggregates: async (aggregates) => { + await stockExistenceDb.batch(() => { + for (const aggregate of aggregates) { + stockExistenceDb.put( + [ + new Date(aggregate.tsStart).toISOString().substring(0, 10), + aggregate.key, + ], + null, + ); + } + }); + await stockAggregatesDb.batch(() => { + for (const aggregate of aggregates) { + stockAggregatesDb.put([aggregate.key, aggregate.tsStart], { + open: aggregate.open, + close: aggregate.close, + high: aggregate.high, + low: aggregate.low, + }); + } + }); + }, + getClosingPrice: async ({ key }) => { + // no-op: not used since stocks don't have a "closing" price, unlike options. + return 0; + }, + }; + + return { + ...stockDatabase, + getSymbols: stockDatabase.getKeys, + }; +} + +export const stockDatabase: StockDatabase = makeStockDatabase(); diff --git a/server/tables.sql b/server/tables.sql index 7d641cd..015b3cd 100644 --- a/server/tables.sql +++ b/server/tables.sql @@ -82,31 +82,9 @@ CREATE TABLE stock_aggregates volume UInt64, volume_weighted_price Float64 ) -ENGINE MergeTree() +ENGINE ReplacingMergeTree() ORDER BY (symbol, tsStart) -CREATE TABLE option_aggregates -( - symbol LowCardinality(String), - expirationDate Date, - strike Float32, - type Enum('call', 'put'), - - tsStart DateTime32 CODEC(DoubleDelta(1), ZSTD), - open Float32 CODEC(Delta(2), ZSTD), - close Float32 CODEC(Delta(2), ZSTD), - low Float32 CODEC(Delta(2), ZSTD), - high Float32 CODEC(Delta(2), ZSTD), - volume UInt32 CODEC(T64), - volumeWeightedPrice Float32 CODEC(Delta(2), ZSTD) -) -ENGINE MergeTree() -ORDER BY (symbol, expirationDate, strike, type, tsStart) - -ALTER TABLE option_aggregates ADD INDEX idx_expirationDate expirationDate TYPE minmax GRANULARITY 2; -ALTER TABLE option_aggregates ADD INDEX idx_strike strike TYPE minmax GRANULARITY 2; -ALTER TABLE option_aggregates ADD INDEX idx_tsStart tsStart TYPE minmax GRANULARITY 2; - CREATE TABLE option_contract_aggregates ( symbol LowCardinality(String), @@ -125,47 +103,51 @@ CREATE TABLE option_contract_aggregates ENGINE ReplacingMergeTree() ORDER BY (symbol, expirationDate, strike, type, tsStart) -CREATE TABLE option_histories_last_day +-- For stats about the character of this stock's options given a certain distance-from-the-money and time-to-expiration: +CREATE TABLE calendar_stats_by_symbol ( - symbol LowCardinality(String), - expirationDate Date, - strike Float64, - type Enum('call', 'put'), + symbol LowCardinality(String), - tsStart DateTime32, - open Float64, - minutesToFront UInt16, - underlyingPrice Float64, - strikePercentageFromUnderlyingPrice Float64 + calendarSpanInDays UInt16, + + tsStart DateTime32 CODEC(Delta, ZSTD), -- included so as to assess the character of the stock's options within a given range of time; for example, if the stock got really hot for a few months. + minutesToExpiration UInt32, + + frontMonthOpen Float32, + backMonthOpen Float32, + strikePercentageFromUnderlyingOpen Float64, + frontMonthClose Float32, + backMonthClose Float32, + strikePercentageFromUnderlyingClose Float64 ) ENGINE MergeTree() -ORDER BY (symbol, minutesToFront, strikePercentageFromUnderlyingPrice) - -INSERT INTO option_histories_last_day +PRIMARY KEY (symbol, calendarSpanInDays, tsStart) +ORDER BY (symbol, calendarSpanInDays, tsStart, minutesToExpiration); +-- Populate `calendar_stats_by_symbol` by: +-- INSERT INTO calendar_stats_by_symbol SELECT - option_aggregates.symbol as symbol, - option_aggregates.expirationDate as expirationDate, - option_aggregates.strike as strike, - option_aggregates.type as type, - option_aggregates.tsStart as tsStart, - option_aggregates.open as open, - date_diff('minute', tsStart, timestamp_add(expirationDate, INTERVAL 16 HOUR)) as minutesToFront, - stock_aggregates.open as underlyingPrice Float64, - (strike-underlyingPrice)/underlyingPrice as strikePercentageFromUnderlyingPrice Float64 -FROM ( - SELECT - symbol, - expirationDate, - strike, - type, - tsStart, - open - FROM option_aggregates - WHERE toDate(tsStart) = expirationDate -) as option_aggregates + frontMonth.symbol, + dateDiff('day', frontMonth.expirationDate, backMonth.expirationDate) as calendarSpanInDays, + frontMonth.tsStart, + dateDiff('minute', frontMonth.tsStart, addMinutes(toDateTime(expirationDate, 'America/New_York'), 60 * 16)) as minutesToExpiration, + frontMonth.open as frontMonthOpen, + backMonth.open as backMonthOpen, + (frontMonth.strike-stock_aggregates.open)/stock_aggregates.open*100.0 as strikePercentageFromUnderlyingOpen, + frontMonth.close as frontMonthClose, + backMonth.close as backMonthClose, + (frontMonth.strike-stock_aggregates.close)/stock_aggregates.close*100.0 as strikePercentageFromUnderlyingClose +FROM option_contract_aggregates as frontMonth INNER JOIN stock_aggregates -ON option_aggregates.symbol = stock_aggregates.symbol -AND option_aggregates.tsStart = stock_aggregates.tsStart +ON option_contract_aggregates.symbol = stock_aggregates.symbol +AND option_contract_aggregates.tsStart = stock_aggregates.tsStart +INNER JOIN option_contract_aggregates as backMonth +ON frontMonth.symbol = backMonth.symbol +AND frontMonth.strike = backMonth.strike +AND frontMonth.type = backMonth.type +AND frontMonth.tsStart = backMonth.tsStart +WHERE backMonth.expirationDate > frontMonth.expirationDate +AND frontMonth.symbol = 'AAPL' +AND calendarSpanInDays = 14 CREATE TABLE option_histories (