diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 02c7785c..d7105d15 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -1 +1 @@
-* @jbonzo @mmoghaddam385 @justinpolygon
+* @justinpolygon @penelopus @davidwf-polygonio
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index 313c2716..f0767d70 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -7,7 +7,7 @@ on:
branches:
- master
schedule:
- - cron: '33 12 * * 3'
+ - cron: "33 12 * * 3"
jobs:
analyze:
name: analyze
@@ -19,15 +19,15 @@ jobs:
strategy:
fail-fast: false
matrix:
- language: [ 'python' ]
+ language: ["python"]
steps:
- - name: Checkout repository
- uses: actions/checkout@v3
- - name: Initialize CodeQL
- uses: github/codeql-action/init@v2
- with:
- languages: ${{ matrix.language }}
- - name: Autobuild
- uses: github/codeql-action/autobuild@v2
- - name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v2
+ - name: Checkout repository
+ uses: actions/checkout@v3
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v3
+ with:
+ languages: ${{ matrix.language }}
+ - name: Autobuild
+ uses: github/codeql-action/autobuild@v3
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v3
diff --git a/.polygon/rest.json b/.polygon/rest.json
index fe330dce..425d2c2d 100644
--- a/.polygon/rest.json
+++ b/.polygon/rest.json
@@ -63,7 +63,7 @@
},
"AggregateTimeTo": {
"description": "The end of the aggregate time window. Either a date with the format YYYY-MM-DD or a millisecond timestamp.",
- "example": "2023-01-09",
+ "example": "2023-02-10",
"in": "path",
"name": "to",
"required": true,
@@ -101,6 +101,17 @@
"type": "string"
}
},
+ "CryptoTickersQueryParam": {
+ "description": "A case-sensitive comma separated list of tickers to get snapshots for. For example, X:BTCUSD, X:ETHBTC, and X:BOBAUSD. Empty string defaults to querying all tickers.",
+ "in": "query",
+ "name": "tickers",
+ "schema": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array"
+ }
+ },
"ForexTickerPathParam": {
"description": "The ticker symbol of the currency pair.",
"example": "C:EURUSD",
@@ -111,6 +122,17 @@
"type": "string"
}
},
+ "ForexTickersQueryParam": {
+ "description": "A case-sensitive comma separated list of tickers to get snapshots for. For example, C:EURUSD, C:GBPCAD, and C:AUDINR. Empty string defaults to querying all tickers.",
+ "in": "query",
+ "name": "tickers",
+ "schema": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array"
+ }
+ },
"GeneralTickerPathParam": {
"description": "The ticker symbol of the asset.",
"example": "AAPL",
@@ -131,7 +153,7 @@
},
"IndicesAggregateTimeFrom": {
"description": "The start of the aggregate time window. Either a date with the format YYYY-MM-DD or a millisecond timestamp.",
- "example": "2023-03-10",
+ "example": "2023-03-13",
"in": "path",
"name": "from",
"required": true,
@@ -141,7 +163,7 @@
},
"IndicesAggregateTimeTo": {
"description": "The end of the aggregate time window. Either a date with the format YYYY-MM-DD or a millisecond timestamp.",
- "example": "2023-03-10",
+ "example": "2023-03-24",
"in": "path",
"name": "to",
"required": true,
@@ -6039,7 +6061,7 @@
},
"summary": "Exponential Moving Average (EMA)",
"tags": [
- "crpyto:aggregates"
+ "crypto:aggregates"
],
"x-polygon-entitlement-data-type": {
"description": "Aggregate data",
@@ -9407,7 +9429,7 @@
},
"summary": "Relative Strength Index (RSI)",
"tags": [
- "crpyto:aggregates"
+ "crypto:aggregates"
],
"x-polygon-entitlement-data-type": {
"description": "Aggregate data",
@@ -11003,7 +11025,7 @@
},
"summary": "Simple Moving Average (SMA)",
"tags": [
- "crpyto:aggregates"
+ "crypto:aggregates"
],
"x-polygon-entitlement-data-type": {
"description": "Aggregate data",
@@ -12698,6 +12720,10 @@
"description": "The status of Cboe Streaming Market Indices Cryptocurrency (\"CCCY\") indices trading hours.",
"type": "string"
},
+ "cgi": {
+ "description": "The status of Cboe Global Indices (\"CGI\") trading hours.",
+ "type": "string"
+ },
"dow_jones": {
"description": "The status of Dow Jones indices trading hours",
"type": "string"
@@ -13885,8 +13911,7 @@
"files_count",
"source_url",
"download_url",
- "entities",
- "acceptance_datetime"
+ "entities"
],
"type": "object",
"x-polygon-go-type": {
@@ -13956,121 +13981,125 @@
"example": {},
"schema": {
"properties": {
- "acceptance_datetime": {
- "description": "The datetime when the filing was accepted by EDGAR in EST (format: YYYYMMDDHHMMSS)",
- "type": "string"
- },
- "accession_number": {
- "description": "Filing Accession Number",
- "type": "string"
- },
- "entities": {
- "description": "Entities related to the filing (e.g. the document filers).",
- "items": {
- "description": "A filing entity (e.g. the document filer).",
- "properties": {
- "company_data": {
+ "results": {
+ "properties": {
+ "acceptance_datetime": {
+ "description": "The datetime when the filing was accepted by EDGAR in EST (format: YYYYMMDDHHMMSS)",
+ "type": "string"
+ },
+ "accession_number": {
+ "description": "Filing Accession Number",
+ "type": "string"
+ },
+ "entities": {
+ "description": "Entities related to the filing (e.g. the document filers).",
+ "items": {
+ "description": "A filing entity (e.g. the document filer).",
"properties": {
- "cik": {
- "description": "Central Index Key (CIK) Number",
- "type": "string"
- },
- "name": {
- "example": "Facebook Inc",
- "type": "string"
- },
- "sic": {
- "description": "Standard Industrial Classification (SIC)",
- "type": "string"
+ "company_data": {
+ "properties": {
+ "cik": {
+ "description": "Central Index Key (CIK) Number",
+ "type": "string"
+ },
+ "name": {
+ "example": "Facebook Inc",
+ "type": "string"
+ },
+ "sic": {
+ "description": "Standard Industrial Classification (SIC)",
+ "type": "string"
+ },
+ "ticker": {
+ "description": "Ticker",
+ "type": "string"
+ }
+ },
+ "required": [
+ "name",
+ "cik",
+ "sic"
+ ],
+ "type": "object",
+ "x-polygon-go-type": {
+ "name": "SECCompanyData",
+ "path": "github.com/polygon-io/go-lib-models/v2/globals"
+ }
},
- "ticker": {
- "description": "Ticker",
+ "relation": {
+ "description": "Relationship of this entity to the filing.",
+ "enum": [
+ "filer"
+ ],
"type": "string"
}
},
"required": [
- "name",
- "cik",
- "sic"
+ "relation"
],
"type": "object",
"x-polygon-go-type": {
- "name": "SECCompanyData",
+ "name": "SECFilingEntity",
"path": "github.com/polygon-io/go-lib-models/v2/globals"
}
},
- "relation": {
- "description": "Relationship of this entity to the filing.",
- "enum": [
- "filer"
- ],
- "type": "string"
- }
+ "type": "array"
},
- "required": [
- "relation"
- ],
- "type": "object",
- "x-polygon-go-type": {
- "name": "SECFilingEntity",
- "path": "github.com/polygon-io/go-lib-models/v2/globals"
+ "files_count": {
+ "description": "The number of files associated with the filing.",
+ "format": "int64",
+ "type": "integer"
+ },
+ "filing_date": {
+ "description": "The date when the filing was filed in YYYYMMDD format.",
+ "example": "20210101",
+ "pattern": "^[0-9]{8}$",
+ "type": "string"
+ },
+ "id": {
+ "description": "Unique identifier for the filing.",
+ "type": "string"
+ },
+ "period_of_report_date": {
+ "description": "The period of report for the filing in YYYYMMDD format.",
+ "example": "20210101",
+ "pattern": "^[0-9]{8}$",
+ "type": "string"
+ },
+ "source_url": {
+ "description": "The source URL is a link back to the upstream source for this filing\ndocument.",
+ "example": "https://www.sec.gov/Archives/edgar/data/0001326801/000132680119000037/0001326801-19-000037-index.html",
+ "format": "uri",
+ "type": "string"
+ },
+ "type": {
+ "description": "Filing Type",
+ "enum": [
+ "10-K",
+ "10-Q"
+ ],
+ "type": "string"
}
},
- "type": "array"
- },
- "files_count": {
- "description": "The number of files associated with the filing.",
- "format": "int64",
- "type": "integer"
- },
- "filing_date": {
- "description": "The date when the filing was filed in YYYYMMDD format.",
- "example": "20210101",
- "pattern": "^[0-9]{8}$",
- "type": "string"
- },
- "id": {
- "description": "Unique identifier for the filing.",
- "type": "string"
- },
- "period_of_report_date": {
- "description": "The period of report for the filing in YYYYMMDD format.",
- "example": "20210101",
- "pattern": "^[0-9]{8}$",
- "type": "string"
- },
- "source_url": {
- "description": "The source URL is a link back to the upstream source for this filing\ndocument.",
- "example": "https://www.sec.gov/Archives/edgar/data/0001326801/000132680119000037/0001326801-19-000037-index.html",
- "format": "uri",
- "type": "string"
- },
- "type": {
- "description": "Filing Type",
- "enum": [
- "10-K",
- "10-Q"
+ "required": [
+ "id",
+ "accession_number",
+ "type",
+ "filing_date",
+ "period_of_report_date",
+ "files_count",
+ "source_url",
+ "download_url",
+ "entities"
],
- "type": "string"
+ "type": "object",
+ "x-polygon-go-type": {
+ "name": "SECFiling",
+ "path": "github.com/polygon-io/go-lib-models/v2/globals"
+ }
}
},
- "required": [
- "id",
- "accession_number",
- "type",
- "filing_date",
- "period_of_report_date",
- "files_count",
- "source_url",
- "download_url",
- "entities",
- "acceptance_datetime"
- ],
- "type": "object",
- "x-polygon-go-type": {
- "name": "SECFiling",
- "path": "github.com/polygon-io/go-lib-models/v2/globals"
- }
+ "type": "object"
}
}
},
@@ -14461,6 +14490,114 @@
},
"x-polygon-draft": true
},
+ "/v1/related-companies/{ticker}": {
+ "get": {
+ "description": "Get a list of tickers related to the queried ticker based on News and Returns data.",
+ "operationId": "GetRelatedCompanies",
+ "parameters": [
+ {
+ "description": "The ticker symbol to search.",
+ "example": "AAPL",
+ "in": "path",
+ "name": "ticker",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "content": {
+ "application/json": {
+ "example": {
+ "request_id": "31d59dda-80e5-4721-8496-d0d32a654afe",
+ "results": [
+ {
+ "ticker": "MSFT"
+ },
+ {
+ "ticker": "GOOGL"
+ },
+ {
+ "ticker": "AMZN"
+ },
+ {
+ "ticker": "FB"
+ },
+ {
+ "ticker": "TSLA"
+ },
+ {
+ "ticker": "NVDA"
+ },
+ {
+ "ticker": "INTC"
+ },
+ {
+ "ticker": "ADBE"
+ },
+ {
+ "ticker": "NFLX"
+ },
+ {
+ "ticker": "PYPL"
+ }
+ ],
+ "status": "OK",
+ "stock_symbol": "AAPL"
+ },
+ "schema": {
+ "properties": {
+ "request_id": {
+ "description": "A request id assigned by the server.",
+ "type": "string"
+ },
+ "results": {
+ "items": {
+ "description": "The tickers related to the requested ticker.",
+ "properties": {
+ "ticker": {
+ "description": "A ticker related to the requested ticker.",
+ "type": "string"
+ }
+ },
+ "required": [
+ "ticker"
+ ],
+ "type": "object"
+ },
+ "type": "array"
+ },
+ "status": {
+ "description": "The status of this request's response.",
+ "type": "string"
+ },
+ "ticker": {
+ "description": "The ticker being queried.",
+ "type": "string"
+ }
+ },
+ "type": "object"
+ }
+ }
+ },
+ "description": "Related Companies."
+ },
+ "401": {
+ "description": "Unauthorized - Check our API Key and account status"
+ }
+ },
+ "summary": "Related Companies",
+ "tags": [
+ "reference:related:companies"
+ ],
+ "x-polygon-entitlement-data-type": {
+ "description": "Reference data",
+ "name": "reference"
+ }
+ }
+ },
"/v1/summaries": {
"get": {
"description": "Get everything needed to visualize the tick-by-tick movement of a list of tickers.",
@@ -14507,7 +14644,7 @@
"volume": 37
},
"ticker": "NCLH",
- "type": "stock"
+ "type": "stocks"
},
{
"last_updated": 1679597116344223500,
@@ -14758,6 +14895,16 @@
"format": "double",
"type": "number"
},
+ "regular_trading_change": {
+ "description": "Today's change in regular trading hours, difference between current price and previous trading day's close, otherwise difference between today's close and previous day's close.",
+ "format": "double",
+ "type": "number"
+ },
+ "regular_trading_change_percent": {
+ "description": "Today's regular trading change as a percentage.",
+ "format": "double",
+ "type": "number"
+ },
"volume": {
"description": "The trading volume for the asset for the day.",
"format": "double",
@@ -14794,15 +14941,7 @@
}
},
"required": [
- "ticker",
- "name",
- "price",
- "branding",
- "market_status",
- "type",
- "session",
- "options",
- "last_updated"
+ "ticker"
],
"type": "object",
"x-polygon-go-type": {
@@ -15685,7 +15824,7 @@
},
{
"description": "The end of the aggregate time window. Either a date with the format YYYY-MM-DD or a millisecond timestamp.",
- "example": "2023-01-09",
+ "example": "2023-02-10",
"in": "path",
"name": "to",
"required": true,
@@ -16138,7 +16277,7 @@
},
{
"description": "The end of the aggregate time window. Either a date with the format YYYY-MM-DD or a millisecond timestamp.",
- "example": "2023-01-09",
+ "example": "2023-02-10",
"in": "path",
"name": "to",
"required": true,
@@ -16534,7 +16673,7 @@
},
{
"description": "The start of the aggregate time window. Either a date with the format YYYY-MM-DD or a millisecond timestamp.",
- "example": "2023-03-10",
+ "example": "2023-03-13",
"in": "path",
"name": "from",
"required": true,
@@ -16544,7 +16683,7 @@
},
{
"description": "The end of the aggregate time window. Either a date with the format YYYY-MM-DD or a millisecond timestamp.",
- "example": "2023-03-10",
+ "example": "2023-03-24",
"in": "path",
"name": "to",
"required": true,
@@ -16957,7 +17096,7 @@
},
{
"description": "The end of the aggregate time window. Either a date with the format YYYY-MM-DD or a millisecond timestamp.",
- "example": "2023-01-09",
+ "example": "2023-02-10",
"in": "path",
"name": "to",
"required": true,
@@ -17405,7 +17544,7 @@
},
{
"description": "The end of the aggregate time window. Either a date with the format YYYY-MM-DD or a millisecond timestamp.",
- "example": "2023-01-09",
+ "example": "2023-02-10",
"in": "path",
"name": "to",
"required": true,
@@ -18175,7 +18314,7 @@
"operationId": "ListNews",
"parameters": [
{
- "description": "Return results that contain this ticker.",
+ "description": "Specify a case-sensitive ticker symbol. For example, AAPL represents Apple Inc.",
"in": "query",
"name": "ticker",
"schema": {
@@ -18359,52 +18498,35 @@
"request_id": "831afdb0b8078549fed053476984947a",
"results": [
{
- "amp_url": "https://amp.benzinga.com/amp/content/20784086",
- "article_url": "https://www.benzinga.com/markets/cryptocurrency/21/04/20784086/cathie-wood-adds-more-coinbase-skillz-trims-square",
- "author": "Rachit Vats",
- "description": "
Cathie Wood-led Ark Investment Management on Friday snapped up another 221,167 shares of the cryptocurrency exchange Coinbase Global Inc (NASDAQ COIN) worth about $64.49 million on the stock’s Friday’s dip and also its fourth-straight loss.
\n
The investment firm’s Ark Innovation ETF (NYSE ARKK) bought the shares of the company that closed 0.63% lower at $291.60 on Friday, giving the cryptocurrency exchange a market cap of $58.09 billion. Coinbase’s market cap has dropped from $85.8 billion on its blockbuster listing earlier this month.
\n
The New York-based company also added another 3,873 shares of the mobile gaming company Skillz Inc (NYSE SKLZ), just a day after snapping 1.2 million shares of the stock.
\n
ARKK bought the shares of the company which closed ...
",
- "id": "nJsSJJdwViHZcw5367rZi7_qkXLfMzacXBfpv-vD9UA",
- "image_url": "https://cdn2.benzinga.com/files/imagecache/og_image_social_share_1200x630/images/story/2012/andre-francois-mckenzie-auhr4gcqcce-unsplash.jpg?width=720",
+ "amp_url": "https://m.uk.investing.com/news/stock-market-news/markets-are-underestimating-fed-cuts-ubs-3559968?ampMode=1",
+ "article_url": "https://uk.investing.com/news/stock-market-news/markets-are-underestimating-fed-cuts-ubs-3559968",
+ "author": "Sam Boughedda",
+ "description": "UBS analysts warn that markets are underestimating the extent of future interest rate cuts by the Federal Reserve, as the weakening economy is likely to justify more cuts than currently anticipated.",
+ "id": "8ec638777ca03b553ae516761c2a22ba2fdd2f37befae3ab6fdab74e9e5193eb",
+ "image_url": "https://i-invdn-com.investing.com/news/LYNXNPEC4I0AL_L.jpg",
+ "insights": [
+ {
+ "sentiment": "positive",
+ "sentiment_reasoning": "UBS analysts are providing a bullish outlook on the extent of future Federal Reserve rate cuts, suggesting that markets are underestimating the number of cuts that will occur.",
+ "ticker": "UBS"
+ }
+ ],
"keywords": [
- "Sector ETFs",
- "Penny Stocks",
- "Cryptocurrency",
- "Small Cap",
- "Markets",
- "Trading Ideas",
- "ETFs"
+ "Federal Reserve",
+ "interest rates",
+ "economic data"
],
- "published_utc": "2021-04-26T02:33:17Z",
+ "published_utc": "2024-06-24T18:33:53Z",
"publisher": {
- "favicon_url": "https://s3.polygon.io/public/public/assets/news/favicons/benzinga.ico",
- "homepage_url": "https://www.benzinga.com/",
- "logo_url": "https://s3.polygon.io/public/public/assets/news/logos/benzinga.svg",
- "name": "Benzinga"
+ "favicon_url": "https://s3.polygon.io/public/assets/news/favicons/investing.ico",
+ "homepage_url": "https://www.investing.com/",
+ "logo_url": "https://s3.polygon.io/public/assets/news/logos/investing.png",
+ "name": "Investing.com"
},
"tickers": [
- "DOCU",
- "DDD",
- "NIU",
- "ARKF",
- "NVDA",
- "SKLZ",
- "PCAR",
- "MASS",
- "PSTI",
- "SPFR",
- "TREE",
- "PHR",
- "IRDM",
- "BEAM",
- "ARKW",
- "ARKK",
- "ARKG",
- "PSTG",
- "SQ",
- "IONS",
- "SYRS"
+ "UBS"
],
- "title": "Cathie Wood Adds More Coinbase, Skillz, Trims Square"
+ "title": "Markets are underestimating Fed cuts: UBS By Investing.com - Investing.com UK"
}
],
"status": "OK"
@@ -18450,6 +18572,37 @@
"description": "The article's image URL.",
"type": "string"
},
+ "insights": {
+ "description": "The insights related to the article.",
+ "items": {
+ "properties": {
+ "sentiment": {
+ "description": "The sentiment of the insight.",
+ "enum": [
+ "positive",
+ "neutral",
+ "negative"
+ ],
+ "type": "string"
+ },
+ "sentiment_reasoning": {
+ "description": "The reasoning behind the sentiment.",
+ "type": "string"
+ },
+ "ticker": {
+ "description": "The ticker symbol associated with the insight.",
+ "type": "string"
+ }
+ },
+ "required": [
+ "ticker",
+ "sentiment",
+ "sentiment_reasoning"
+ ],
+ "type": "object"
+ },
+ "type": "array"
+ },
"keywords": {
"description": "The keywords associated with the article (which will vary depending on\nthe publishing source).",
"items": {
@@ -18530,7 +18683,7 @@
}
},
"text/csv": {
- "example": "id,publisher_name,publisher_homepage_url,publisher_logo_url,publisher_favicon_url,title,author,published_utc,article_url,tickers,amp_url,image_url,description,keywords\nnJsSJJdwViHZcw5367rZi7_qkXLfMzacXBfpv-vD9UA,Benzinga,https://www.benzinga.com/,https://s3.polygon.io/public/public/assets/news/logos/benzinga.svg,https://s3.polygon.io/public/public/assets/news/favicons/benzinga.ico,\"Cathie Wood Adds More Coinbase, Skillz, Trims Square\",Rachit Vats,2021-04-26T02:33:17Z,https://www.benzinga.com/markets/cryptocurrency/21/04/20784086/cathie-wood-adds-more-coinbase-skillz-trims-square,\"DOCU,DDD,NIU,ARKF,NVDA,SKLZ,PCAR,MASS,PSTI,SPFR,TREE,PHR,IRDM,BEAM,ARKW,ARKK,ARKG,PSTG,SQ,IONS,SYRS\",https://amp.benzinga.com/amp/content/20784086,https://cdn2.benzinga.com/files/imagecache/og_image_social_share_1200x630/images/story/2012/andre-francois-mckenzie-auhr4gcqcce-unsplash.jpg?width=720,\"
Cathie Wood-led Ark Investment Management on Friday snapped up another 221,167 shares of the cryptocurrency exchange Coinbase Global Inc (NASDAQ COIN) worth about $64.49 million on the stock’s Friday’s dip and also its fourth-straight loss.
The investment firm’s Ark Innovation ETF (NYSE ARKK) bought the shares of the company that closed 0.63% lower at $291.60 on Friday, giving the cryptocurrency exchange a market cap of $58.09 billion. Coinbase’s market cap has dropped from $85.8 billion on its blockbuster listing earlier this month.
The New York-based company also added another 3,873 shares of the mobile gaming company Skillz Inc (NYSE SKLZ), just a day after snapping 1.2 million shares of the stock.
ARKK bought the shares of the company which closed ...
\",\"Sector ETFs,Penny Stocks,Cryptocurrency,Small Cap,Markets,Trading Ideas,ETFs\"\n",
+ "example": "id,publisher_name,publisher_homepage_url,publisher_logo_url,publisher_favicon_url,title,author,published_utc,article_url,ticker,amp_url,image_url,description,keywords,sentiment,sentiment_reasoning\n8ec638777ca03b553ae516761c2a22ba2fdd2f37befae3ab6fdab74e9e5193eb,Investing.com,https://www.investing.com/,https://s3.polygon.io/public/assets/news/logos/investing.png,https://s3.polygon.io/public/assets/news/favicons/investing.ico,Markets are underestimating Fed cuts: UBS By Investing.com - Investing.com UK,Sam Boughedda,1719254033000000000,https://uk.investing.com/news/stock-market-news/markets-are-underestimating-fed-cuts-ubs-3559968,UBS,https://m.uk.investing.com/news/stock-market-news/markets-are-underestimating-fed-cuts-ubs-3559968?ampMode=1,https://i-invdn-com.investing.com/news/LYNXNPEC4I0AL_L.jpg,\"UBS analysts warn that markets are underestimating the extent of future interest rate cuts by the Federal Reserve, as the weakening economy is likely to justify more cuts than currently anticipated.\",\"Federal Reserve,interest rates,economic data\",positive,\"UBS analysts are providing a bullish outlook on the extent of future Federal Reserve rate cuts, suggesting that markets are underestimating the number of cuts that will occur.\"\n",
"schema": {
"type": "string"
}
@@ -18573,7 +18726,7 @@
"description": "Get the current minute, day, and previous day\u2019s aggregate, as well as the last trade and quote for all traded cryptocurrency symbols.\n \n \nNote: Snapshot data is cleared at 12am EST and gets populated as data is received from the exchanges. This can happen as early as 4am EST.\n",
"parameters": [
{
- "description": "A case-sensitive comma separated list of tickers to get snapshots for. For example, AAPL,TSLA,GOOG. Empty string defaults to querying all tickers.",
+ "description": "A case-sensitive comma separated list of tickers to get snapshots for. For example, X:BTCUSD, X:ETHBTC, and X:BOBAUSD. Empty string defaults to querying all tickers.",
"in": "query",
"name": "tickers",
"schema": {
@@ -19457,6 +19610,9 @@
"tags": [
"crypto:snapshot"
],
+ "x-polygon-deprecation": {
+ "date": 1719838800000
+ },
"x-polygon-entitlement-allowed-timeframes": [
{
"description": "Real Time Data",
@@ -19834,7 +19990,7 @@
"description": "Get the current minute, day, and previous day\u2019s aggregate, as well as the last trade and quote for all traded forex symbols.\n \n \nNote: Snapshot data is cleared at 12am EST and gets populated as data is received from the exchanges. This can happen as early as 4am EST.\n",
"parameters": [
{
- "description": "A case-sensitive comma separated list of tickers to get snapshots for. For example, AAPL,TSLA,GOOG. Empty string defaults to querying all tickers.",
+ "description": "A case-sensitive comma separated list of tickers to get snapshots for. For example, C:EURUSD, C:GBPCAD, and C:AUDINR. Empty string defaults to querying all tickers.",
"in": "query",
"name": "tickers",
"schema": {
@@ -21604,7 +21760,7 @@
},
"/v2/snapshot/locale/us/markets/stocks/{direction}": {
"get": {
- "description": "Get the most up-to-date market data for the current top 20 gainers or losers of the day in the stocks/equities markets.\n \n \nTop gainers are those tickers whose price has increased by the highest percentage since the previous day's close.\nTop losers are those tickers whose price has decreased by the highest percentage since the previous day's close.\n \n \nNote: Snapshot data is cleared at 3:30am EST and gets populated as data is received from the exchanges.\n",
+ "description": "Get the most up-to-date market data for the current top 20 gainers or losers of the day in the stocks/equities markets.\n \n \nTop gainers are those tickers whose price has increased by the highest percentage since the previous day's close.\nTop losers are those tickers whose price has decreased by the highest percentage since the previous day's close.\nThis output will only include tickers with a trading volume of 10,000 or more.\n \n \nNote: Snapshot data is cleared at 3:30am EST and gets populated as data is received from the exchanges.\n",
"parameters": [
{
"description": "The direction of the snapshot results to return.\n",
@@ -22724,11 +22880,11 @@
}
},
{
- "description": "Limit the number of results returned, default is 10 and max is 50000.",
+ "description": "Limit the number of results returned, default is 1000 and max is 50000.",
"in": "query",
"name": "limit",
"schema": {
- "default": 10,
+ "default": 1000,
"example": 10,
"maximum": 50000,
"minimum": 1,
@@ -22853,6 +23009,8 @@
},
"x-polygon-paginate": {
"limit": {
+ "default": 1000,
+ "example": 10,
"max": 50000
},
"order": {
@@ -22949,11 +23107,11 @@
}
},
{
- "description": "Limit the number of results returned, default is 10 and max is 50000.",
+ "description": "Limit the number of results returned, default is 1000 and max is 50000.",
"in": "query",
"name": "limit",
"schema": {
- "default": 10,
+ "default": 1000,
"example": 10,
"maximum": 50000,
"minimum": 1,
@@ -23100,6 +23258,8 @@
},
"x-polygon-paginate": {
"limit": {
+ "default": 1000,
+ "example": 10,
"max": 50000
},
"order": {
@@ -23189,11 +23349,11 @@
}
},
{
- "description": "Limit the number of results returned, default is 10 and max is 50000.",
+ "description": "Limit the number of results returned, default is 1000 and max is 50000.",
"in": "query",
"name": "limit",
"schema": {
- "default": 10,
+ "default": 1000,
"example": 10,
"maximum": 50000,
"minimum": 1,
@@ -23401,6 +23561,8 @@
},
"x-polygon-paginate": {
"limit": {
+ "default": 1000,
+ "example": 10,
"max": 50000
},
"order": {
@@ -24173,7 +24335,7 @@
"operationId": "ListDividends",
"parameters": [
{
- "description": "Return the dividends that contain this ticker.",
+ "description": "Specify a case-sensitive ticker symbol. For example, AAPL represents Apple Inc.",
"in": "query",
"name": "ticker",
"schema": {
@@ -24553,6 +24715,7 @@
"dividend_type": "CD",
"ex_dividend_date": "2021-11-05",
"frequency": 4,
+ "id": "E8e3c4f794613e9205e2f178a36c53fcc57cdabb55e1988c87b33f9e52e221444",
"pay_date": "2021-11-11",
"record_date": "2021-11-08",
"ticker": "AAPL"
@@ -24563,6 +24726,7 @@
"dividend_type": "CD",
"ex_dividend_date": "2021-08-06",
"frequency": 4,
+ "id": "E6436c5475706773f03490acf0b63fdb90b2c72bfeed329a6eb4afc080acd80ae",
"pay_date": "2021-08-12",
"record_date": "2021-08-09",
"ticker": "AAPL"
@@ -24651,6 +24815,10 @@
]
}
},
+ "id": {
+ "description": "The unique identifier of the dividend.",
+ "type": "string"
+ },
"pay_date": {
"description": "The date that the dividend is paid out.",
"type": "string"
@@ -24677,7 +24845,8 @@
"ex_dividend_date",
"frequency",
"cash_amount",
- "dividend_type"
+ "dividend_type",
+ "id"
],
"type": "object",
"x-polygon-go-struct-tags": {
@@ -25366,7 +25535,7 @@
"parameters": [
{
"description": "Query for a contract by options ticker. You can learn more about the structure of options tickers [here](https://polygon.io/blog/how-to-read-a-stock-options-ticker/).",
- "example": "O:EVRI240119C00002500",
+ "example": "O:SPY251219C00650000",
"in": "path",
"name": "options_ticker",
"required": true,
@@ -25686,12 +25855,14 @@
"results": [
{
"execution_date": "2020-08-31",
+ "id": "E36416cce743c3964c5da63e1ef1626c0aece30fb47302eea5a49c0055c04e8d0",
"split_from": 1,
"split_to": 4,
"ticker": "AAPL"
},
{
"execution_date": "2005-02-28",
+ "id": "E90a77bdf742661741ed7c8fc086415f0457c2816c45899d73aaa88bdc8ff6025",
"split_from": 1,
"split_to": 2,
"ticker": "AAPL"
@@ -25715,6 +25886,10 @@
"description": "The execution date of the stock split. On this date the stock split was applied.",
"type": "string"
},
+ "id": {
+ "description": "The unique identifier for this stock split.",
+ "type": "string"
+ },
"split_from": {
"description": "The second number in the split ratio.\n\nFor example: In a 2-for-1 split, split_from would be 1.",
"format": "float",
@@ -25732,7 +25907,10 @@
},
"required": [
"split_from",
- "split_to"
+ "split_to",
+ "id",
+ "ticker",
+ "execution_date"
],
"type": "object"
},
@@ -26444,6 +26622,10 @@
"description": "The first line of the company's headquarters address.",
"type": "string"
},
+ "address2": {
+ "description": "The second line of the company's headquarters address, if applicable.",
+ "type": "string"
+ },
"city": {
"description": "The city of the company's headquarters address.",
"type": "string"
@@ -26608,7 +26790,7 @@
}
},
"text/csv": {
- "example": "ticker,name,market,locale,primary_exchange,type,active,currency_name,cik,composite_figi,share_class_figi,share_class_shares_outstanding,weighted_shares_outstanding,round_lot,market_cap,phone_number,address1,city,state,postal_code,sic_code,sic_description,ticker_root,total_employees,list_date,homepage_url,description,branding/logo_url,branding/icon_url\nAAPL,Apple Inc.,stocks,us,XNAS,CS,true,usd,0000320193,BBG000B9XRY4,BBG001S5N8V8,16406400000,16334371000,100,2771126040150,(408) 996-1010,One Apple Park Way,Cupertino,CA,95014,3571,ELECTRONIC COMPUTERS,AAPL,154000,1980-12-12,https://www.apple.com,\"Apple designs a wide variety of consumer electronic devices, including smartphones (iPhone), tablets (iPad), PCs (Mac), smartwatches (Apple Watch), AirPods, and TV boxes (Apple TV), among others. The iPhone makes up the majority of Apple's total revenue. In addition, Apple offers its customers a variety of services such as Apple Music, iCloud, Apple Care, Apple TV+, Apple Arcade, Apple Card, and Apple Pay, among others. Apple's products run internally developed software and semiconductors, and the firm is well known for its integration of hardware, software and services. Apple's products are distributed online as well as through company-owned stores and third-party retailers. The company generates roughly 40% of its revenue from the Americas, with the remainder earned internationally.\",https://api.polygon.io/v1/reference/company-branding/d3d3LmFwcGxlLmNvbQ/images/2022-01-10_logo.svg,https://api.polygon.io/v1/reference/company-branding/d3d3LmFwcGxlLmNvbQ/images/2022-01-10_icon.png\n",
+ "example": "ticker,name,market,locale,primary_exchange,type,active,currency_name,cik,composite_figi,share_class_figi,share_class_shares_outstanding,weighted_shares_outstanding,round_lot,market_cap,phone_number,address1,address2,city,state,postal_code,sic_code,sic_description,ticker_root,total_employees,list_date,homepage_url,description,branding/logo_url,branding/icon_url\nAAPL,Apple Inc.,stocks,us,XNAS,CS,true,usd,0000320193,BBG000B9XRY4,BBG001S5N8V8,16406400000,16334371000,100,2771126040150,(408) 996-1010,One Apple Park Way,,Cupertino,CA,95014,3571,ELECTRONIC COMPUTERS,AAPL,154000,1980-12-12,https://www.apple.com,\"Apple designs a wide variety of consumer electronic devices, including smartphones (iPhone), tablets (iPad), PCs (Mac), smartwatches (Apple Watch), AirPods, and TV boxes (Apple TV), among others. The iPhone makes up the majority of Apple's total revenue. In addition, Apple offers its customers a variety of services such as Apple Music, iCloud, Apple Care, Apple TV+, Apple Arcade, Apple Card, and Apple Pay, among others. Apple's products run internally developed software and semiconductors, and the firm is well known for its integration of hardware, software and services. Apple's products are distributed online as well as through company-owned stores and third-party retailers. The company generates roughly 40% of its revenue from the Americas, with the remainder earned internationally.\",https://api.polygon.io/v1/reference/company-branding/d3d3LmFwcGxlLmNvbQ/images/2022-01-10_logo.svg,https://api.polygon.io/v1/reference/company-branding/d3d3LmFwcGxlLmNvbQ/images/2022-01-10_icon.png\n",
"schema": {
"type": "string"
}
@@ -27038,8 +27220,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
"ask",
"bid",
"last_updated",
@@ -27113,9 +27293,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
- "participant_timestamp",
"price",
"size"
],
@@ -27203,6 +27380,16 @@
"format": "double",
"type": "number"
},
+ "regular_trading_change": {
+ "description": "Today's change in regular trading hours, difference between current price and previous trading day's close, otherwise difference between today's close and previous day's close.",
+ "format": "double",
+ "type": "number"
+ },
+ "regular_trading_change_percent": {
+ "description": "Today's regular trading change as a percentage.",
+ "format": "double",
+ "type": "number"
+ },
"volume": {
"description": "The trading volume for the asset for the day.",
"format": "double",
@@ -27279,8 +27466,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
"ticker",
"change_to_break_even"
],
@@ -27594,9 +27779,7 @@
}
},
"required": [
- "ticker",
- "timeframe",
- "last_updated"
+ "ticker"
],
"type": "object",
"x-polygon-go-type": {
@@ -27855,7 +28038,8 @@
"bid": 120.28,
"bid_size": 8,
"last_updated": 1605195918507251700,
- "midpoint": 120.29
+ "midpoint": 120.29,
+ "timeframe": "REAL-TIME"
},
"last_trade": {
"conditions": [
@@ -27956,7 +28140,6 @@
}
},
"required": [
- "last_updated",
"open",
"high",
"low",
@@ -28130,8 +28313,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
"ask",
"ask_size",
"bid_size",
@@ -28184,7 +28365,6 @@
}
},
"required": [
- "timeframe",
"exchange",
"price",
"sip_timestamp",
@@ -28241,8 +28421,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
"ticker",
"change_to_break_even"
],
@@ -28257,7 +28435,6 @@
"last_quote",
"underlying_asset",
"details",
- "cha",
"break_even_price",
"open_interest"
],
@@ -28492,7 +28669,6 @@
}
},
"required": [
- "last_updated",
"open",
"high",
"low",
@@ -28666,8 +28842,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
"ask",
"ask_size",
"bid_size",
@@ -28720,7 +28894,6 @@
}
},
"required": [
- "timeframe",
"exchange",
"price",
"sip_timestamp",
@@ -28777,8 +28950,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
"ticker",
"change_to_break_even"
],
@@ -28793,7 +28964,6 @@
"last_quote",
"underlying_asset",
"details",
- "cha",
"break_even_price",
"open_interest"
],
@@ -28921,11 +29091,11 @@
}
},
{
- "description": "Limit the number of results returned, default is 10 and max is 50000.",
+ "description": "Limit the number of results returned, default is 1000 and max is 50000.",
"in": "query",
"name": "limit",
"schema": {
- "default": 10,
+ "default": 1000,
"example": 10,
"maximum": 50000,
"minimum": 1,
@@ -29071,6 +29241,8 @@
},
"x-polygon-paginate": {
"limit": {
+ "default": 1000,
+ "example": 10,
"max": 50000
},
"order": {
@@ -29167,11 +29339,11 @@
}
},
{
- "description": "Limit the number of results returned, default is 10 and max is 50000.",
+ "description": "Limit the number of results returned, default is 1000 and max is 50000.",
"in": "query",
"name": "limit",
"schema": {
- "default": 10,
+ "default": 1000,
"example": 10,
"maximum": 50000,
"minimum": 1,
@@ -29323,6 +29495,8 @@
},
"x-polygon-paginate": {
"limit": {
+ "default": 1000,
+ "example": 10,
"max": 50000
},
"order": {
@@ -29412,11 +29586,11 @@
}
},
{
- "description": "Limit the number of results returned, default is 10 and max is 50000.",
+ "description": "Limit the number of results returned, default is 1000 and max is 50000.",
"in": "query",
"name": "limit",
"schema": {
- "default": 10,
+ "default": 1000,
"example": 10,
"maximum": 50000,
"minimum": 1,
@@ -29612,6 +29786,8 @@
},
"x-polygon-paginate": {
"limit": {
+ "default": 1000,
+ "example": 10,
"max": 50000
},
"order": {
@@ -30321,81 +30497,98 @@
}
}
},
- "/vX/reference/tickers/taxonomies": {
+ "/vX/reference/ipos": {
"get": {
- "description": "Retrieve taxonomy classifications for one or more tickers.",
- "operationId": "ListTickerTaxonomyClassifications",
+ "description": "The IPOs API provides access to detailed information about Initial Public Offerings (IPOs), including both upcoming and historical events. With this API, you can query for a comprehensive list of IPOs, along with key details such as the issuer name, ticker symbol, ISIN, IPO date, number of shares offered, expected price range, and final offering price. You can filter the results by status to focus on new, rumors, pending, historical, and more.",
+ "operationId": "ListIPOs",
"parameters": [
{
+ "description": "Specify a case-sensitive ticker symbol. For example, AAPL represents Apple Inc.",
"in": "query",
"name": "ticker",
"schema": {
"type": "string"
- },
- "x-polygon-filter-field": {
- "anyOf": {
- "description": "Comma separated list of tickers, up to a maximum of 250. If no tickers are passed then all results will be returned in a paginated manner.\n\nWarning: The maximum number of characters allowed in a URL are subject to your technology stack.\n",
- "enabled": true,
- "example": "NCLH,O:SPY250321C00380000,C:EURUSD,X:BTCUSD,I:SPX"
- },
- "range": true,
+ }
+ },
+ {
+ "description": "Specify a us_code. This is a unique nine-character alphanumeric code that identifies a North American financial security for the purposes of facilitating clearing and settlement of trades.",
+ "in": "query",
+ "name": "us_code",
+ "schema": {
"type": "string"
}
},
{
- "description": "Filter by taxonomy category.",
+ "description": "Specify an International Securities Identification Number (ISIN). This is a unique twelve-digit code that is assigned to every security issuance in the world.",
"in": "query",
- "name": "category",
+ "name": "isin",
"schema": {
"type": "string"
}
},
{
- "description": "Filter by taxonomy tag. Each category has a set of associated tags.",
+ "description": "Specify a listing date. This is the first trading date for the newly listed entity.",
"in": "query",
- "name": "tag",
+ "name": "listing_date",
"schema": {
+ "format": "date",
+ "type": "string"
+ },
+ "x-polygon-filter-field": {
+ "range": true,
"type": "string"
}
},
{
- "description": "Range by ticker.",
+ "description": "Specify an IPO status.",
"in": "query",
- "name": "ticker.gte",
+ "name": "ipo_status",
"schema": {
+ "enum": [
+ "direct_listing_process",
+ "history",
+ "new",
+ "pending",
+ "postponed",
+ "rumor",
+ "withdrawn"
+ ],
"type": "string"
}
},
{
- "description": "Range by ticker.",
+ "description": "Range by listing_date.",
"in": "query",
- "name": "ticker.gt",
+ "name": "listing_date.gte",
"schema": {
+ "format": "date",
"type": "string"
}
},
{
- "description": "Range by ticker.",
+ "description": "Range by listing_date.",
"in": "query",
- "name": "ticker.lte",
+ "name": "listing_date.gt",
"schema": {
+ "format": "date",
"type": "string"
}
},
{
- "description": "Range by ticker.",
+ "description": "Range by listing_date.",
"in": "query",
- "name": "ticker.lt",
+ "name": "listing_date.lte",
"schema": {
+ "format": "date",
"type": "string"
}
},
{
- "description": "Comma separated list of tickers, up to a maximum of 250. If no tickers are passed then all results will be returned in a paginated manner.\n\nWarning: The maximum number of characters allowed in a URL are subject to your technology stack.\n",
- "example": "NCLH,O:SPY250321C00380000,C:EURUSD,X:BTCUSD,I:SPX",
+ "description": "Range by listing_date.",
"in": "query",
- "name": "ticker.any_of",
+ "name": "listing_date.lt",
"schema": {
+ "format": "date",
"type": "string"
}
},
@@ -30404,6 +30597,7 @@
"in": "query",
"name": "order",
"schema": {
+ "default": "desc",
"enum": [
"asc",
"desc"
@@ -30413,13 +30607,13 @@
}
},
{
- "description": "Limit the number of results returned, default is 10 and max is 250.",
+ "description": "Limit the number of results returned, default is 10 and max is 1000.",
"in": "query",
"name": "limit",
"schema": {
"default": 10,
"example": 10,
- "maximum": 250,
+ "maximum": 1000,
"minimum": 1,
"type": "integer"
}
@@ -30429,11 +30623,29 @@
"in": "query",
"name": "sort",
"schema": {
- "default": "ticker",
+ "default": "listing_date",
"enum": [
- "ticker"
+ "listing_date",
+ "ticker",
+ "last_updated",
+ "security_type",
+ "issuer_name",
+ "currency_code",
+ "isin",
+ "us_code",
+ "final_issue_price",
+ "min_shares_offered",
+ "max_shares_offered",
+ "lowest_offer_price",
+ "highest_offer_price",
+ "total_offer_size",
+ "shares_outstanding",
+ "primary_exchange",
+ "lot_size",
+ "security_description",
+ "ipo_status"
],
- "example": "ticker",
+ "example": "listing_date",
"type": "string"
}
}
@@ -30443,29 +30655,353 @@
"content": {
"application/json": {
"example": {
- "request_id": "31d59dda-80e5-4721-8496-d0d32a654afe",
+ "next_url": "https://api.polygon.io/vX/reference/ipos?cursor=YWN0aXZlPXRydWUmZGF0ZT0yMDIxLTA0LTI1JmxpbWl0PTEmb3JkZXI9YXNjJnBhZ2VfbWFya2VyPUElN0M5YWRjMjY0ZTgyM2E1ZjBiOGUyNDc5YmZiOGE1YmYwNDVkYzU0YjgwMDcyMWE2YmI1ZjBjMjQwMjU4MjFmNGZiJnNvcnQ9dGlja2Vy",
+ "request_id": "6a7e466379af0a71039d60cc78e72282",
"results": [
{
- "category": "revenue_streams",
- "reason": "Company recognizes revenue from the sales of consumer electronics such as the iPhone and iPad.",
- "relevance": 0.99,
- "tag": "physical_product_sales_electronics",
- "ticker": "AAPL"
+ "currency_code": "USD",
+ "final_issue_price": 17,
+ "highest_offer_price": 17,
+ "ipo_status": "history",
+ "isin": "US75383L1026",
+ "issue_end_date": "2024-06-06",
+ "issue_start_date": "2024-06-01",
+ "issuer_name": "Rapport Therapeutics Inc.",
+ "last_updated": "2024-06-27",
+ "listing_date": "2024-06-07",
+ "lot_size": 100,
+ "lowest_offer_price": 17,
+ "max_shares_offered": 8000000,
+ "min_shares_offered": 1000000,
+ "primary_exchange": "XNAS",
+ "security_description": "Ordinary Shares",
+ "security_type": "CS",
+ "shares_outstanding": 35376457,
+ "ticker": "RAPP",
+ "total_offer_size": 136000000,
+ "us_code": "75383L102"
+ }
+ ],
+ "status": "OK"
+ },
+ "schema": {
+ "properties": {
+ "next_url": {
+ "description": "If present, this value can be used to fetch the next page of data.",
+ "type": "string"
+ },
+ "request_id": {
+ "description": "A request id assigned by the server.",
+ "type": "string"
},
+ "results": {
+ "description": "An array of results containing the requested data.",
+ "items": {
+ "properties": {
+ "currency_code": {
+ "description": "Underlying currency of the security.",
+ "example": "USD",
+ "type": "string"
+ },
+ "final_issue_price": {
+ "description": "The price set by the company and its underwriters before the IPO goes live.",
+ "example": 14.5,
+ "format": "float",
+ "type": "number"
+ },
+ "highest_offer_price": {
+ "description": "The highest price within the IPO price range that the company might use to price the shares.",
+ "example": 20,
+ "format": "float",
+ "type": "number"
+ },
+ "ipo_status": {
+ "description": "The status of the IPO event. IPO events start out as status \"rumor\" or \"pending\". On listing day, the status changes to \"new\". After the listing day, the status changes to \"history\".\n\nThe status \"direct_listing_process\" corresponds to a type of offering where, instead of going through all the IPO processes, the company decides to list its shares directly on an exchange, without using an investment bank or other intermediaries. This is called a direct listing, direct placement, or direct public offering (DPO).",
+ "enum": [
+ "direct_listing_process",
+ "history",
+ "new",
+ "pending",
+ "postponed",
+ "rumor",
+ "withdrawn"
+ ],
+ "example": "history",
+ "type": "string"
+ },
+ "isin": {
+ "description": "International Securities Identification Number. This is a unique twelve-digit code that is assigned to every security issuance in the world.",
+ "example": "US0378331005",
+ "type": "string"
+ },
+ "issuer_name": {
+ "description": "Name of issuer.",
+ "example": "Apple Inc.",
+ "type": "string"
+ },
+ "last_updated": {
+ "description": "The date when the IPO event was last modified.",
+ "example": "2023-01-02",
+ "format": "date",
+ "type": "string"
+ },
+ "listing_date": {
+ "description": "First trading date for the newly listed entity.",
+ "example": "2023-02-01",
+ "format": "date",
+ "type": "string"
+ },
+ "lot_size": {
+ "description": "The minimum number of shares that can be bought or sold in a single transaction.",
+ "example": 100,
+ "type": "number"
+ },
+ "lowest_offer_price": {
+ "description": "The lowest price within the IPO price range that the company is willing to offer its shares to investors.",
+ "example": 10,
+ "format": "float",
+ "type": "number"
+ },
+ "max_shares_offered": {
+ "description": "The upper limit of the shares that the company is offering to investors.",
+ "example": 1000,
+ "type": "number"
+ },
+ "min_shares_offered": {
+ "description": "The lower limit of shares that the company is willing to sell in the IPO.",
+ "example": 1000,
+ "type": "number"
+ },
+ "primary_exchange": {
+ "description": "Market Identifier Code (MIC) of the primary exchange where the security is listed. The Market Identifier Code (MIC) (ISO 10383) is a unique identification code used to identify securities trading exchanges, regulated and non-regulated trading markets.",
+ "example": "XNAS",
+ "type": "string"
+ },
+ "security_description": {
+ "description": "Description of the security.",
+ "example": "Ordinary Shares - Class A",
+ "type": "string"
+ },
+ "security_type": {
+ "description": "The classification of the stock. For example, \"CS\" stands for Common Stock.",
+ "example": "CS",
+ "type": "string"
+ },
+ "shares_outstanding": {
+ "description": "The total number of shares that the company has issued and are held by investors.",
+ "example": 1000000,
+ "type": "number"
+ },
+ "ticker": {
+ "description": "The ticker symbol of the IPO event.",
+ "example": "AAPL",
+ "type": "string"
+ },
+ "total_offer_size": {
+ "description": "The total amount raised by the company for IPO.",
+ "example": 1000000,
+ "format": "float",
+ "type": "number"
+ },
+ "us_code": {
+ "description": "This is a unique nine-character alphanumeric code that identifies a North American financial security for the purposes of facilitating clearing and settlement of trades.",
+ "example": 37833100,
+ "type": "string"
+ }
+ },
+ "required": [
+ "name",
+ "last_updated",
+ "primary_exchange",
+ "security_type",
+ "security_description",
+ "ipo_status"
+ ],
+ "type": "object",
+ "x-polygon-go-type": {
+ "name": "IPOsResult"
+ }
+ },
+ "type": "array"
+ },
+ "status": {
+ "description": "The status of this request's response.",
+ "type": "string"
+ }
+ },
+ "type": "object"
+ }
+ }
+ },
+ "description": "A list of IPO events."
+ }
+ },
+ "summary": "IPOs",
+ "tags": [
+ "reference:stocks:ipos"
+ ],
+ "x-polygon-entitlement-data-type": {
+ "description": "Reference data",
+ "name": "reference"
+ },
+ "x-polygon-paginate": {
+ "limit": {
+ "default": 10,
+ "max": 1000
+ },
+ "order": {
+ "default": "desc"
+ },
+ "sort": {
+ "default": "listing_date",
+ "enum": [
+ "listing_date",
+ "ticker",
+ "last_updated",
+ "security_type",
+ "issuer_name",
+ "currency_code",
+ "isin",
+ "us_code",
+ "final_issue_price",
+ "min_shares_offered",
+ "max_shares_offered",
+ "lowest_offer_price",
+ "highest_offer_price",
+ "total_offer_size",
+ "shares_outstanding",
+ "primary_exchange",
+ "lot_size",
+ "security_description",
+ "ipo_status"
+ ]
+ }
+ }
+ }
+ },
+ "/vX/reference/tickers/taxonomies": {
+ "get": {
+ "description": "Many investors place a high value on sector data. It is used to measure economic activity, identify peers and competitors, build ETF products, quantify market share, and compare company performance. However, there are some limitations to industry standard sectors:\n* They have difficulty identifying the primary area of activity for large, complex businesses.\n* Studies confirm significant disagreement between classification schemes when attempting to categorize the same companies.\n* The systems' hierarchical nature is inflexible and struggles to convey business nuances.\n \n \nAs a result, we've developed a new taxonomy to supplement existing sector classifications. The taxonomy is created by reviewing related 10K filings to create a set of structured categories and tags.\n \n \nThe categories are based on company operating models and are industry agnostic. Our current version only supports one category, Revenue Streams, with future plans to support more.\n \n \nThe tags define a specific type within the category. Within the Revenue Streams category, for example, tags for \"product sales\" and \"advertising\" may be found. A company may have many tags in a given category. The complete Revenue Streams taxonomy is shown below.\n \n \nOur taxonomy is powered by AI and is currently in early beta testing. You should expect some inaccuracies in the responses.\n \n \n## **Revenue Streams**\n *Latest Revision (7/7/2023)*\n \n \n- **Physical Product Sales:**\n Revenue generated from the sale of tangible goods or physical products to customers, either in-store or online.\n - Consumer Goods\n - Industrial Goods\n - Electronics\n - Vehicles\n - Healthcare Products\n \n \n- **Digital Product Sales:**\n Revenue earned from the sale of digital goods or products, such as software licenses, e-books, music downloads, or digital media content. It also includes revenue obtained by selling aggregated, anonymized, or processed data to third parties for market research, analytics, or other purposes.\n - Software\n - E-books and Digital Media\n - Mobile Applications\n - Games\n - Online Courses\n - Market Research Data\n - Customer Behavior Data\n \n \n- **Professional Services:**\n Revenue obtained by providing specialized services, expertise, or consulting to clients in exchange for fees. This includes services offered by professionals such as lawyers, accountants, or consultants.\n - Consulting\n - Legal Services\n - Financial Services\n - Marketing Services\n - Construction Services\n - Education & Tutoring\n \n \n- **Consumer Services:**\n Revenue earned from providing services directly to consumers, including services like healthcare, personal grooming, fitness, or hospitality.\n - Dining & Hospitality\n - Personal Care\n - Entertainment & Recreation\n - Fitness & Wellness\n - Travel & Tourism\n - Transportation\n - Home Services\n - Child & Family Care\n - Automotive\n \n \n- **Subscription-based Revenue:**\n Revenue obtained from recurring fees charged to customers for accessing a product or service over a defined period. This includes revenue from subscription-based models, membership programs, or software-as-a-service (SaaS) offerings.\n - Software as a Service (SaaS)\n - Streaming Services\n - Physical Media\n - Memberships\n \n \n- **Licensing and Royalties:**\n Revenue generated from the licensing of intellectual property rights to third parties, including franchise rights, patent licensing, brand licensing, and the receipt of royalties for authorized use of intellectual property like music royalties, book royalties, or patent royalties.\n - Franchise Fees\n - Patent Licensing\n - Brand Licensing\n - Media Royalties\n \n \n- **Advertising:**\n Revenue generated by displaying ads or promotional content to customers, whether through traditional or digital advertising channels, including revenue from display ads, sponsored content, or affiliate marketing.\n - Print Advertising\n - Online Display Advertising\n - Social Media Advertising\n - Influencer Marketing\n \n \n- **Commission-Based Revenue:**\n Revenue earned by acting as an intermediary and receiving a percentage or commission on sales made on behalf of another party. This includes revenue from affiliate programs, referral fees, or any other commission-based revenue models.\n - Real Estate Commissions\n - Affiliate Marketing Commissions\n - Online Marketplace Commissions\n \n \n- **Rentals or Leasing:**\n Revenue earned by leasing or renting out assets, properties, or equipment to customers, including rental income from real estate properties, equipment leasing, or vehicle rentals.\n - Property Rentals\n - Equipment Leasing\n - Vehicle Rentals",
+ "operationId": "ListTickerTaxonomyClassifications",
+ "parameters": [
+ {
+ "in": "query",
+ "name": "ticker",
+ "schema": {
+ "type": "string"
+ },
+ "x-polygon-filter-field": {
+ "anyOf": {
+ "description": "Comma separated list of tickers, up to a maximum of 250.\n\nWarning: The maximum number of characters allowed in a URL are subject to your own technology stack.\n",
+ "enabled": true,
+ "example": "AAPL,AMD,MSFT"
+ },
+ "range": true,
+ "type": "string"
+ }
+ },
+ {
+ "description": "Filter by taxonomy category. The current version of this API supports the following category: revenue_streams",
+ "in": "query",
+ "name": "category",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "Filter by taxonomy tag. Each category has a set of associated tags.",
+ "in": "query",
+ "name": "tag",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "Order results ascending or descending based on the ticker.",
+ "in": "query",
+ "name": "order",
+ "schema": {
+ "enum": [
+ "asc",
+ "desc"
+ ],
+ "type": "string"
+ }
+ },
+ {
+ "description": "Limit the number of results returned. The default is 10 and the max is 250.",
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "default": 10,
+ "maximum": 250,
+ "minimum": 1,
+ "type": "integer"
+ }
+ },
+ {
+ "description": "Range by ticker.",
+ "in": "query",
+ "name": "ticker.gte",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "Range by ticker.",
+ "in": "query",
+ "name": "ticker.gt",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "Range by ticker.",
+ "in": "query",
+ "name": "ticker.lte",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "Range by ticker.",
+ "in": "query",
+ "name": "ticker.lt",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "Comma separated list of tickers, up to a maximum of 250.\n\nWarning: The maximum number of characters allowed in a URL are subject to your own technology stack.\n",
+ "example": "AAPL,AMD,MSFT",
+ "in": "query",
+ "name": "ticker.any_of",
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "content": {
+ "application/json": {
+ "example": {
+ "request_id": "a4f9947955398c28905337f003bfee7c",
+ "results": [
{
"category": "revenue_streams",
- "reason": "Company recognizes revenue from the sales of digital products such as digital storage and app store fees.",
- "relevance": 0.99,
- "tag": "digital_product_sales_software",
+ "reason": "The text mentions revenue earned from the sale of digital goods or products, such as software licenses, e-books, music downloads, or digital media content.",
+ "tag": "digital_product_sales",
"ticker": "AAPL"
},
{
- "category": "cost_structure",
- "relevance": 0.86,
- "tag": "economies_of_scale",
+ "category": "revenue_streams",
+ "reason": "The text mentions revenue generated from the licensing of intellectual property rights to third parties, including franchise rights, patent licensing, brand licensing, and the receipt of royalties for authorized use of intellectual property like music royalties, book royalties, or patent royalties.",
+ "tag": "licensing_and_royalties",
"ticker": "AAPL"
}
- ]
+ ],
+ "status": "OK"
},
"schema": {
"properties": {
@@ -30480,27 +31016,23 @@
"items": {
"properties": {
"category": {
- "description": "The classification category.",
+ "description": "A dimension of a company\u2019s operating model that is agnostic to industry. Category contains a comprehensive list of tags which reflect defined types within that category. The current version of this API supports the following category: revenue_streams",
"type": "string"
},
"reason": {
- "description": "The reason why the classification was given.",
+ "description": "The reason why the classification was given. The reason is provided by our AI to help you determine whether or not you agree with its applicability for your uses.",
"type": "string"
},
- "relevance": {
- "description": "The relevance score for the tag. This is a measure of confidence in the tag classification.",
- "format": "double",
- "type": "number"
- },
"tag": {
- "description": "The classification tag. Each category has a set of associated tags.",
+ "description": "A specific type within a category. For example \u201cproduct_sales\u201d is a type of revenue stream. A company may have multiple tags within a given category. A taxonomy of tags are determined based on 10k filings.",
"type": "string"
},
"ticker": {
- "description": "The ticker symbol for the asset.",
+ "description": "The identifying ticker symbol for the asset.",
"type": "string"
}
},
+ "type": "object",
"x-polygon-go-type": {
"name": "TaxonomyClassificationResult"
}
@@ -30531,20 +31063,7 @@
"description": "Reference data",
"name": "reference"
},
- "x-polygon-experimental": {},
- "x-polygon-paginate": {
- "limit": {
- "default": 10,
- "max": 250,
- "min": 1
- },
- "sort": {
- "default": "ticker",
- "enum": [
- "ticker"
- ]
- }
- }
+ "x-polygon-experimental": {}
},
"x-polygon-draft": true
},
@@ -31290,6 +31809,21 @@
"paths": [
"/v3/reference/exchanges"
]
+ },
+ {
+ "paths": [
+ "/v1/related-companies/{ticker}"
+ ]
+ },
+ {
+ "paths": [
+ "/vX/reference/ipos"
+ ]
+ },
+ {
+ "paths": [
+ "/vX/reference/short-interest/{identifier_type}/{identifier}"
+ ]
}
]
}
diff --git a/.polygon/websocket.json b/.polygon/websocket.json
index 85245e63..ff09762a 100644
--- a/.polygon/websocket.json
+++ b/.polygon/websocket.json
@@ -269,7 +269,7 @@
},
"c": {
"type": "array",
- "description": "The trade conditions. See Conditions and Indicators\" for Polygon.io's trade conditions glossary.\n",
+ "description": "The trade conditions. See Conditions and Indicators for Polygon.io's trade conditions glossary.\n",
"items": {
"type": "integer",
"description": "The ID of the condition."
@@ -277,7 +277,7 @@
},
"t": {
"type": "integer",
- "description": "The Timestamp in Unix MS."
+ "description": "The SIP timestamp in Unix MS."
},
"q": {
"type": "integer",
@@ -407,7 +407,7 @@
},
"t": {
"type": "integer",
- "description": "The Timestamp in Unix MS."
+ "description": "The SIP timestamp in Unix MS."
},
"q": {
"type": "integer",
@@ -995,7 +995,7 @@
},
"example": {
"ev": "FMV",
- "val": 189.22,
+ "fmv": 189.22,
"sym": "AAPL",
"t": 1678220098130
}
@@ -1761,7 +1761,7 @@
},
"example": {
"ev": "FMV",
- "val": 7.2,
+ "fmv": 7.2,
"sym": "O:TSLA210903C00700000",
"t": 1401715883806000000
}
@@ -2331,7 +2331,7 @@
},
"example": {
"ev": "FMV",
- "val": 1.0631,
+ "fmv": 1.0631,
"sym": "C:EURUSD",
"t": 1678220098130
}
@@ -2885,7 +2885,10 @@
"name": "realtime",
"description": "Real Time Data"
}
- ]
+ ],
+ "x-polygon-deprecation": {
+ "date": 1719838800000
+ }
}
},
"/crypto/XA": {
@@ -3157,7 +3160,7 @@
},
"example": {
"ev": "FMV",
- "val": 33021.9,
+ "fmv": 33021.9,
"sym": "X:BTC-USD",
"t": 1610462007425
}
@@ -3961,7 +3964,7 @@
},
"c": {
"type": "array",
- "description": "The trade conditions. See Conditions and Indicators\" for Polygon.io's trade conditions glossary.\n",
+ "description": "The trade conditions. See Conditions and Indicators for Polygon.io's trade conditions glossary.\n",
"items": {
"type": "integer",
"description": "The ID of the condition."
@@ -3969,7 +3972,7 @@
},
"t": {
"type": "integer",
- "description": "The Timestamp in Unix MS."
+ "description": "The SIP timestamp in Unix MS."
},
"q": {
"type": "integer",
@@ -4038,7 +4041,7 @@
},
"t": {
"type": "integer",
- "description": "The Timestamp in Unix MS."
+ "description": "The SIP timestamp in Unix MS."
},
"q": {
"type": "integer",
diff --git a/README.md b/README.md
index 81494a78..aaf7bb5f 100644
--- a/README.md
+++ b/README.md
@@ -7,11 +7,7 @@ Welcome to the official Python client library for the [Polygon](https://polygon.
## Prerequisites
-Before installing the Polygon Python client, ensure your environment has Python 3.8 or higher. While most Python environments come with setuptools installed, it is a dependency for this library. In the rare case it's not already present, you can install setuptools using pip:
-
-```
-pip install setuptools
-```
+Before installing the Polygon Python client, ensure your environment has Python 3.8 or higher.
## Install
@@ -150,54 +146,6 @@ ws.run(handle_msg=handle_msg)
```
Check out more detailed examples [here](https://github.com/polygon-io/client-python/tree/master/examples/websocket).
-## Launchpad REST API Client
-Users of the Launchpad product will need to pass in certain headers in order to make API requests using the RequestOptionBuilder.
-Example can be found [here](./examples/launchpad).
-
-Import classes
-```python
-from polygon import RESTClient
-from polygon.rest.models.request import RequestOptionBuilder
-```
-### Using the client
-Create client and set options
-```python
-# create client
-c = RESTClient(api_key="API_KEY")
-
-# create request options
-options = RequestOptionBuilder().edge_headers(
- edge_id="YOUR_EDGE_ID", # required
- edge_ip_address="IP_ADDRESS", # required
-)
-```
-Request data using client methods.
-```python
-# get response
-res = c.get_aggs("AAPL", 1, "day", "2022-04-04", "2022-04-04", options=options)
-
-# do something with response
-```
-Checkout Launchpad readme for more details on RequestOptionBuilder [here](./examples/launchpad)
-
-
-## Launchpad WebSocket Client
-
-```python
-from polygon import WebSocketClient
-from polygon.websocket.models import WebSocketMessage
-from polygon.websocket.models.common import Feed, Market
-from typing import List
-
-ws = WebSocketClient(api_key="API_KEY",feed=Feed.Launchpad,market=Market.Stocks, subscriptions=["AM.AAPL"])
-
-def handle_msg(msg: List[WebSocketMessage]):
- for m in msg:
- print(m)
-
-ws.run(handle_msg=handle_msg)
-```
-
## Contributing
If you found a bug or have an idea for a new feature, please first discuss it with us by
diff --git a/examples/rest/demo_correlation_matrix.py b/examples/rest/demo_correlation_matrix.py
index f056ab6d..df939590 100644
--- a/examples/rest/demo_correlation_matrix.py
+++ b/examples/rest/demo_correlation_matrix.py
@@ -40,6 +40,7 @@
essential to do your own research or consult a financial advisor for
personalized advice when investing.
"""
+
import pandas as pd # type: ignore
import numpy as np # type: ignore
import seaborn as sns # type: ignore
diff --git a/examples/rest/economy-treasury_yields.py b/examples/rest/economy-treasury_yields.py
new file mode 100644
index 00000000..011b1866
--- /dev/null
+++ b/examples/rest/economy-treasury_yields.py
@@ -0,0 +1,13 @@
+from polygon import RESTClient
+
+# docs
+# https://polygon.io/docs/rest/economy/treasury-yields
+
+# client = RESTClient("XXXXXX") # hardcoded api_key is used
+client = RESTClient() # POLYGON_API_KEY environment variable is used
+
+yields = []
+for date in client.vx.list_treasury_yields():
+ yields.append(date)
+
+print(yields)
diff --git a/examples/rest/raw-list.py b/examples/rest/raw-list.py
index 5a5a5642..9dd0020e 100644
--- a/examples/rest/raw-list.py
+++ b/examples/rest/raw-list.py
@@ -6,7 +6,7 @@
trades = cast(
HTTPResponse,
- client.list_trades("AAA", "2022-04-20", 5, raw=True),
+ client.list_trades("AAA", "2022-04-20", raw=True),
)
print(trades.data)
# b'{
diff --git a/examples/rest/stocks-ipos.py b/examples/rest/stocks-ipos.py
new file mode 100644
index 00000000..cc09f61b
--- /dev/null
+++ b/examples/rest/stocks-ipos.py
@@ -0,0 +1,13 @@
+from polygon import RESTClient
+
+# docs
+# https://polygon.io/docs/rest/stocks/corporate-actions/ipos
+
+# client = RESTClient("XXXXXX") # hardcoded api_key is used
+client = RESTClient() # POLYGON_API_KEY environment variable is used
+
+ipos = []
+for ipo in client.vx.list_ipos(ticker="RDDT"):
+ ipos.append(ipo)
+
+print(ipos)
diff --git a/examples/rest/stocks-related_companies.py b/examples/rest/stocks-related_companies.py
new file mode 100644
index 00000000..84b3a405
--- /dev/null
+++ b/examples/rest/stocks-related_companies.py
@@ -0,0 +1,10 @@
+from polygon import RESTClient
+
+# docs
+# https://polygon.io/docs/stocks/get_v1_related-companies__ticker
+
+# client = RESTClient("XXXXXX") # hardcoded api_key is used
+client = RESTClient() # POLYGON_API_KEY environment variable is used
+
+related_companies = client.get_related_companies("AAPL")
+print(related_companies)
diff --git a/examples/rest/stocks-short_interest.py b/examples/rest/stocks-short_interest.py
new file mode 100644
index 00000000..3b64cd2a
--- /dev/null
+++ b/examples/rest/stocks-short_interest.py
@@ -0,0 +1,13 @@
+from polygon import RESTClient
+
+# docs
+# https://polygon.io/docs/rest/stocks/fundamentals/short-interest
+
+# client = RESTClient("XXXXXX") # hardcoded api_key is used
+client = RESTClient() # POLYGON_API_KEY environment variable is used
+
+items = []
+for item in client.vx.list_short_interest(ticker="RDDT"):
+ items.append(item)
+
+print(items)
diff --git a/examples/rest/stocks-short_volume.py b/examples/rest/stocks-short_volume.py
new file mode 100644
index 00000000..711bfd47
--- /dev/null
+++ b/examples/rest/stocks-short_volume.py
@@ -0,0 +1,13 @@
+from polygon import RESTClient
+
+# docs
+# https://polygon.io/docs/rest/stocks/fundamentals/short-volume
+
+# client = RESTClient("XXXXXX") # hardcoded api_key is used
+client = RESTClient() # POLYGON_API_KEY environment variable is used
+
+items = []
+for item in client.vx.list_short_volume(ticker="RDDT"):
+ items.append(item)
+
+print(items)
diff --git a/examples/rest/stocks-stock_financials.py b/examples/rest/stocks-stock_financials.py
index dc356494..a75087e7 100644
--- a/examples/rest/stocks-stock_financials.py
+++ b/examples/rest/stocks-stock_financials.py
@@ -8,6 +8,13 @@
client = RESTClient() # POLYGON_API_KEY environment variable is used
financials = []
-for f in client.vx.list_stock_financials("AAPL"):
+for f in client.vx.list_stock_financials("AAPL", filing_date="2024-11-01"):
financials.append(f)
+
+ # get diluted_earnings_per_share
+ # print(f.financials.income_statement.diluted_earnings_per_share)
+
+ # get net_income_loss
+ # print(f.financials.income_statement.net_income_loss)
+
print(financials)
diff --git a/examples/tools/flatfiles-stock-trades/exchange-heatmap.py b/examples/tools/flatfiles-stock-trades/exchange-heatmap.py
new file mode 100644
index 00000000..060b6350
--- /dev/null
+++ b/examples/tools/flatfiles-stock-trades/exchange-heatmap.py
@@ -0,0 +1,68 @@
+# We can use a Python script that aggregates trades by exchange into 30-minute
+# chunks, setting the stage for a visual analysis. This approach will highlight
+# trade flows, including opening hours and peak activity times, across the
+# exchanges. Please see https://polygon.io/blog/insights-from-trade-level-data
+#
+import pandas as pd # type: ignore
+import seaborn as sns # type: ignore
+import matplotlib.pyplot as plt # type: ignore
+import numpy as np # type: ignore
+import pytz # type: ignore
+
+# Replace '2024-04-05.csv' with the path to your actual file
+file_path = "2024-04-05.csv"
+
+# Load the CSV file into a pandas DataFrame
+df = pd.read_csv(file_path)
+
+# Convert 'participant_timestamp' to datetime (assuming nanoseconds Unix timestamp)
+df["participant_timestamp"] = pd.to_datetime(
+ df["participant_timestamp"], unit="ns", utc=True
+)
+
+# Convert to Eastern Time (ET), accounting for both EST and EDT
+df["participant_timestamp"] = df["participant_timestamp"].dt.tz_convert(
+ "America/New_York"
+)
+
+# Create a new column for 30-minute time intervals, now in ET
+df["time_interval"] = df["participant_timestamp"].dt.floor("30T").dt.time
+
+# Ensure full 24-hour coverage by generating all possible 30-minute intervals
+all_intervals = pd.date_range(start="00:00", end="23:59", freq="30T").time
+all_exchanges = df["exchange"].unique()
+full_index = pd.MultiIndex.from_product(
+ [all_exchanges, all_intervals], names=["exchange", "time_interval"]
+)
+
+# Group by 'exchange' and 'time_interval', count trades, and reset index
+grouped = (
+ df.groupby(["exchange", "time_interval"])
+ .size()
+ .reindex(full_index, fill_value=0)
+ .reset_index(name="trade_count")
+)
+
+# Pivot the DataFrame for the heatmap, ensuring all intervals and exchanges are represented
+pivot_table = grouped.pivot("exchange", "time_interval", "trade_count").fillna(0)
+
+# Apply a log scale transformation to the trade counts + 1 to handle zero trades correctly
+log_scale_data = np.log1p(pivot_table.values)
+
+# Plotting the heatmap using the log scale data
+plt.figure(figsize=(20, 10))
+sns.heatmap(
+ log_scale_data,
+ annot=False,
+ cmap="Reds",
+ linewidths=0.5,
+ cbar=False,
+ xticklabels=[t.strftime("%H:%M") for t in all_intervals],
+ yticklabels=pivot_table.index,
+)
+plt.title("Trade Count Heatmap by Exchange and Time Interval (Log Scale, ET)")
+plt.ylabel("Exchange")
+plt.xlabel("Time Interval (ET)")
+plt.xticks(rotation=45)
+plt.tight_layout() # Adjust layout to not cut off labels
+plt.show()
diff --git a/examples/tools/flatfiles-stock-trades/exchanges-seen.py b/examples/tools/flatfiles-stock-trades/exchanges-seen.py
new file mode 100644
index 00000000..70fb5081
--- /dev/null
+++ b/examples/tools/flatfiles-stock-trades/exchanges-seen.py
@@ -0,0 +1,23 @@
+# Here's a Python script for analyzing the dataset, that identifies the
+# distribution of trades across different exchanges and calculates their
+# respective percentages of the total trades. Please see
+# https://polygon.io/blog/insights-from-trade-level-data
+#
+import pandas as pd # type: ignore
+
+# Replace '2024-04-05.csv' with the path to your actual file
+file_path = "2024-04-05.csv"
+
+# Load the CSV file into a pandas DataFrame
+df = pd.read_csv(file_path)
+
+# Count the number of trades for each exchange
+exchange_counts = df["exchange"].value_counts()
+
+# Calculate the total number of trades
+total_trades = exchange_counts.sum()
+
+# Print out all exchanges and their percentage of total trades
+for exchange, count in exchange_counts.items():
+ percentage = (count / total_trades) * 100
+ print(f"Exchange {exchange}: {count} trades, {percentage:.2f}% of total trades")
diff --git a/examples/tools/flatfiles-stock-trades/heatmap.png b/examples/tools/flatfiles-stock-trades/heatmap.png
new file mode 100644
index 00000000..9cf4c0ac
Binary files /dev/null and b/examples/tools/flatfiles-stock-trades/heatmap.png differ
diff --git a/examples/tools/flatfiles-stock-trades/histogram.png b/examples/tools/flatfiles-stock-trades/histogram.png
new file mode 100644
index 00000000..1ccb62dd
Binary files /dev/null and b/examples/tools/flatfiles-stock-trades/histogram.png differ
diff --git a/examples/tools/flatfiles-stock-trades/readme.md b/examples/tools/flatfiles-stock-trades/readme.md
new file mode 100644
index 00000000..c794b3ba
--- /dev/null
+++ b/examples/tools/flatfiles-stock-trades/readme.md
@@ -0,0 +1,86 @@
+# Polygon.io Flat Files Stock Trades Analysis Scripts
+
+This repository contains Python scripts for analyzing stock market trading data using Flat Files from Polygon.io. These scripts demonstrate various ways to dissect and visualize trade data for comprehensive market analysis.
+
+Please see the tutorial: [Deep Dive into Trade-Level Data with Flat Files](https://polygon.io/blog/insights-from-trade-level-data)
+
+## Scripts Overview
+
+### **exchange-heatmap.py**
+This script aggregates trades by exchange into 30-minute chunks and creates a heatmap visualization. It highlights the flow of trades and peak activity times across different exchanges, providing insights into how different exchanges operate throughout the day.
+
+
+
+### **exchanges-seen.py**
+Analyzes the distribution of trades across different exchanges and calculates their respective percentages of total trades. This script helps identify which exchanges handle the most trading volume, offering a perspective on market structure.
+
+```
+Exchange 4: 25,570,324 trades, 36.32% of total trades
+Exchange 12: 15,147,689 trades, 21.52% of total trades
+Exchange 11: 6,877,306 trades, 9.77% of total trades
+Exchange 19: 5,098,852 trades, 7.24% of total trades
+Exchange 10: 4,006,611 trades, 5.69% of total trades
+Exchange 8: 3,686,168 trades, 5.24% of total trades
+Exchange 15: 2,446,340 trades, 3.47% of total trades
+Exchange 21: 2,173,744 trades, 3.09% of total trades
+Exchange 7: 1,509,083 trades, 2.14% of total trades
+Exchange 20: 1,296,811 trades, 1.84% of total trades
+Exchange 18: 674,553 trades, 0.96% of total trades
+Exchange 13: 527,767 trades, 0.75% of total trades
+Exchange 2: 417,295 trades, 0.59% of total trades
+Exchange 3: 393,919 trades, 0.56% of total trades
+Exchange 17: 230,210 trades, 0.33% of total trades
+Exchange 1: 183,010 trades, 0.26% of total trades
+Exchange 9: 159,020 trades, 0.23% of total trades
+Exchange 14: 1,211 trades, 0.00% of total trades
+```
+
+### **top-10-tickers.py**
+Identifies the top 10 most traded stocks and calculates their respective percentages of the total trades. This script provides a clear view of the market's most active stocks, highlighting where the most trading activity is concentrated.
+
+```
+TSLA: 1,549,605 trades, 2.20% of total trades
+NVDA: 788,331 trades, 1.12% of total trades
+SPY: 669,762 trades, 0.95% of total trades
+AMD: 587,140 trades, 0.83% of total trades
+MDIA: 561,698 trades, 0.80% of total trades
+AAPL: 540,870 trades, 0.77% of total trades
+SOXL: 533,511 trades, 0.76% of total trades
+QQQ: 508,822 trades, 0.72% of total trades
+CADL: 466,604 trades, 0.66% of total trades
+AMZN: 465,526 trades, 0.66% of total trades
+```
+
+### **trades-histogram.py**
+Creates a histogram that aggregates trades into 30-minute intervals throughout the day. This visualization helps understand the distribution of trading volume across different times, including pre-market, regular trading hours, and after-hours.
+
+
+
+## Download the Data
+
+First, let's download an actual file and explore the data and see what we can learn. We start by downloading the trades for 2024-04-05 via the [File Browser](https://polygon.io/flat-files/stocks-trades/2024/04). The `us_stocks_sip/trades_v1/2024/04/2024-04-05.csv.gz` file is about 1.35GB and is in a compressed gzip format.
+
+```
+gunzip 2024-04-05.csv.gz
+```
+
+## Getting Started
+
+To run these scripts, you will need Python 3 and several dependencies installed, including pandas, matplotlib, seaborn, and pytz. Ensure that you have the trading data file available and modify the `file_path` variable in each script to point to your data file location.
+
+```
+pip install pandas matplotlib seaborn pytz
+```
+
+## Usage
+
+Each script is designed to be run independently:
+
+```bash
+python exchange-heatmap.py
+python exchanges-seen.py
+python top-10-tickers.py
+python trades-histogram.py
+```
+
+Adjust the script parameters as necessary to fit your specific analysis needs or to accommodate different datasets.
\ No newline at end of file
diff --git a/examples/tools/flatfiles-stock-trades/top-10-tickers.py b/examples/tools/flatfiles-stock-trades/top-10-tickers.py
new file mode 100644
index 00000000..ec046e0b
--- /dev/null
+++ b/examples/tools/flatfiles-stock-trades/top-10-tickers.py
@@ -0,0 +1,25 @@
+# Here's a Python script for analyzing the dataset, that identifies the top 10
+# most traded stocks and calculates their respective percentages of the total
+# trades. Please see https://polygon.io/blog/insights-from-trade-level-data
+#
+import pandas as pd # type: ignore
+
+# Replace '2024-04-05.csv' with the path to your actual file
+file_path = "2024-04-05.csv"
+
+# Load the CSV file into a pandas DataFrame
+df = pd.read_csv(file_path)
+
+# Count the number of trades for each ticker
+trade_counts = df["ticker"].value_counts()
+
+# Calculate the total number of trades
+total_trades = trade_counts.sum()
+
+# Get the top 10 traded stocks
+top_10_traded = trade_counts.head(10)
+
+# Print out the top 10 traded stocks and their percentage of total trades
+for ticker, count in top_10_traded.items():
+ percentage = (count / total_trades) * 100
+ print(f"{ticker}: {count} trades, {percentage:.2f}% of total trades")
diff --git a/examples/tools/flatfiles-stock-trades/trades-histogram.py b/examples/tools/flatfiles-stock-trades/trades-histogram.py
new file mode 100644
index 00000000..6651978d
--- /dev/null
+++ b/examples/tools/flatfiles-stock-trades/trades-histogram.py
@@ -0,0 +1,63 @@
+# To visualize these dynamics, we can use a Python script to create a histogram
+# aggregating trades into 30-minute intervals, providing a clear view of when
+# trading activity concentrates during the day. This analysis aims to highlight
+# the distribution of trading volume across the day, from pre-market to after-
+# hours. Please see https://polygon.io/blog/insights-from-trade-level-data
+#
+import pandas as pd # type: ignore
+import matplotlib.pyplot as plt # type: ignore
+
+# Replace '2024-04-05.csv' with the path to your actual file
+file_path = "2024-04-05.csv"
+
+# Load the CSV file into a pandas DataFrame
+df = pd.read_csv(file_path)
+
+# Convert 'participant_timestamp' to datetime (assuming nanoseconds Unix timestamp)
+df["participant_timestamp"] = pd.to_datetime(
+ df["participant_timestamp"], unit="ns", utc=True
+)
+
+# Convert to Eastern Time (ET), accounting for both EST and EDT
+df["participant_timestamp"] = df["participant_timestamp"].dt.tz_convert(
+ "America/New_York"
+)
+
+# Create a new column for 30-minute time intervals, now in ET
+df["time_interval"] = df["participant_timestamp"].dt.floor("30T")
+
+# Aggregate trades into 30-minute intervals for the entire dataset
+trade_counts_per_interval = df.groupby("time_interval").size()
+
+# Prepare the plot
+plt.figure(figsize=(15, 7))
+
+# Plotting the histogram/bar chart
+bars = plt.bar(
+ trade_counts_per_interval.index, trade_counts_per_interval.values, width=0.02
+)
+
+# Adding trade count annotations on each bar
+for bar in bars:
+ height = bar.get_height()
+ plt.annotate(
+ f"{int(height)}",
+ xy=(bar.get_x() + bar.get_width() / 2, height),
+ xytext=(0, 3), # 3 points vertical offset
+ textcoords="offset points",
+ ha="center",
+ va="bottom",
+ )
+
+plt.title("Trade Counts Aggregated by 30-Minute Intervals (ET)")
+plt.xlabel("Time Interval (ET)")
+plt.ylabel("Number of Trades")
+plt.xticks(rotation=45, ha="right")
+
+# Ensure that every 30-minute interval is represented on the x-axis
+plt.gca().set_xticklabels(
+ [t.strftime("%Y-%m-%d %H:%M") for t in trade_counts_per_interval.index], rotation=90
+)
+
+plt.tight_layout()
+plt.show()
diff --git a/examples/tools/hunting-anomalies/README.md b/examples/tools/hunting-anomalies/README.md
new file mode 100644
index 00000000..4b36f1b5
--- /dev/null
+++ b/examples/tools/hunting-anomalies/README.md
@@ -0,0 +1,49 @@
+# Hunting Anomalies in the Stock Market
+
+This repository contains all the necessary scripts and data directories used in the [Hunting Anomalies in the Stock Market](https://polygon.io/blog/hunting-anomalies-in-stock-market/) tutorial, hosted on Polygon.io's blog. The tutorial demonstrates how to detect statistical anomalies in historical US stock market data through a comprehensive workflow that involves downloading data, building a lookup table, querying for anomalies, and visualizing them through a web interface.
+
+### Prerequisites
+
+- Python 3.8+
+- Access to Polygon.io's historical data via Flat Files
+- An active Polygon.io API key, obtainable by signing up for a Stocks paid plan
+
+### Repository Contents
+
+- `README.md`: This file, outlining setup and execution instructions.
+- `aggregates_day`: Directory where downloaded CSV data files are stored.
+- `build-lookup-table.py`: Python script to build a lookup table from the historical data.
+- `query-lookup-table.py`: Python script to query the lookup table for anomalies.
+- `gui-lookup-table.py`: Python script for a browser-based interface to explore anomalies visually.
+
+### Running the Tutorial
+
+1. **Ensure Python 3.8+ is installed:** Check your Python version and ensure all required libraries (polygon-api-client, pandas, pickle, and argparse) are installed.
+
+2. **Set up your API key:** Make sure you have an active paid Polygon.io Stock subscription for accessing Flat Files. Set up your API key in your environment or directly in the scripts where required.
+
+3. **Download Historical Data:** Use the MinIO client to download historical stock market data. Adjust the commands and paths based on the data you are interested in.
+ ```bash
+ mc alias set s3polygon https://files.polygon.io YOUR_ACCESS_KEY YOUR_SECRET_KEY
+ mc cp --recursive s3polygon/flatfiles/us_stocks_sip/day_aggs_v1/2024/08/ ./aggregates_day/
+ mc cp --recursive s3polygon/flatfiles/us_stocks_sip/day_aggs_v1/2024/09/ ./aggregates_day/
+ mc cp --recursive s3polygon/flatfiles/us_stocks_sip/day_aggs_v1/2024/10/ ./aggregates_day/
+ gunzip ./aggregates_day/*.gz
+ ```
+
+4. **Build the Lookup Table:** This script processes the downloaded data and builds a lookup table, saving it as `lookup_table.pkl`.
+ ```bash
+ python build-lookup-table.py
+ ```
+
+5. **Query Anomalies:** Replace `2024-10-18` with the date you want to analyze for anomalies.
+ ```bash
+ python query-lookup-table.py 2024-10-18
+ ```
+
+6. **Run the GUI:** Access the web interface at `http://localhost:8888` to explore the anomalies visually.
+ ```bash
+ python gui-lookup-table.py
+ ```
+
+For a complete step-by-step guide on each phase of the anomaly detection process, including additional configurations and troubleshooting, refer to the detailed [tutorial on our blog](https://polygon.io/blog/hunting-anomalies-in-stock-market/).
diff --git a/examples/tools/hunting-anomalies/aggregates_day/README.md b/examples/tools/hunting-anomalies/aggregates_day/README.md
new file mode 100644
index 00000000..a0ade480
--- /dev/null
+++ b/examples/tools/hunting-anomalies/aggregates_day/README.md
@@ -0,0 +1 @@
+Download flat files into here.
diff --git a/examples/tools/hunting-anomalies/build-lookup-table.py b/examples/tools/hunting-anomalies/build-lookup-table.py
new file mode 100644
index 00000000..16abca2d
--- /dev/null
+++ b/examples/tools/hunting-anomalies/build-lookup-table.py
@@ -0,0 +1,91 @@
+import os
+import pandas as pd # type: ignore
+from collections import defaultdict
+import pickle
+import json
+from typing import DefaultDict, Dict, Any, BinaryIO
+
+# Directory containing the daily CSV files
+data_dir = "./aggregates_day/"
+
+# Initialize a dictionary to hold trades data
+trades_data = defaultdict(list)
+
+# List all CSV files in the directory
+files = sorted([f for f in os.listdir(data_dir) if f.endswith(".csv")])
+
+print("Starting to process files...")
+
+# Process each file (assuming files are named in order)
+for file in files:
+ print(f"Processing {file}")
+ file_path = os.path.join(data_dir, file)
+ df = pd.read_csv(file_path)
+ # For each stock, store the date and relevant data
+ for _, row in df.iterrows():
+ ticker = row["ticker"]
+ date = pd.to_datetime(row["window_start"], unit="ns").date()
+ trades = row["transactions"]
+ close_price = row["close"] # Ensure 'close' column exists in your CSV
+ trades_data[ticker].append(
+ {"date": date, "trades": trades, "close_price": close_price}
+ )
+
+print("Finished processing files.")
+print("Building lookup table...")
+
+# Now, build the lookup table with rolling averages and percentage price change
+lookup_table: DefaultDict[str, Dict[str, Any]] = defaultdict(
+ dict
+) # Nested dict: ticker -> date -> stats
+
+for ticker, records in trades_data.items():
+ # Convert records to DataFrame
+ df_ticker = pd.DataFrame(records)
+ # Sort records by date
+ df_ticker.sort_values("date", inplace=True)
+ df_ticker.set_index("date", inplace=True)
+
+ # Calculate the percentage change in close_price
+ df_ticker["price_diff"] = (
+ df_ticker["close_price"].pct_change() * 100
+ ) # Multiply by 100 for percentage
+
+ # Shift trades to exclude the current day from rolling calculations
+ df_ticker["trades_shifted"] = df_ticker["trades"].shift(1)
+ # Calculate rolling average and standard deviation over the previous 5 days
+ df_ticker["avg_trades"] = df_ticker["trades_shifted"].rolling(window=5).mean()
+ df_ticker["std_trades"] = df_ticker["trades_shifted"].rolling(window=5).std()
+ # Store the data in the lookup table
+ for date, row in df_ticker.iterrows():
+ # Convert date to string for JSON serialization
+ date_str = date.strftime("%Y-%m-%d")
+ # Ensure rolling stats are available
+ if pd.notnull(row["avg_trades"]) and pd.notnull(row["std_trades"]):
+ lookup_table[ticker][date_str] = {
+ "trades": row["trades"],
+ "close_price": row["close_price"],
+ "price_diff": row["price_diff"],
+ "avg_trades": row["avg_trades"],
+ "std_trades": row["std_trades"],
+ }
+ else:
+ # Store data without rolling stats if not enough data points
+ lookup_table[ticker][date_str] = {
+ "trades": row["trades"],
+ "close_price": row["close_price"],
+ "price_diff": row["price_diff"],
+ "avg_trades": None,
+ "std_trades": None,
+ }
+
+print("Lookup table built successfully.")
+
+# Convert defaultdict to regular dict for JSON serialization
+lookup_table_dict = {k: v for k, v in lookup_table.items()}
+
+# Save the lookup table to a file for later use
+with open("lookup_table.pkl", "wb") as f: # type: BinaryIO
+ pickle.dump(lookup_table_dict, f)
+
+print("Lookup table saved to 'lookup_table.pkl'.")
diff --git a/examples/tools/hunting-anomalies/gui-lookup-table.py b/examples/tools/hunting-anomalies/gui-lookup-table.py
new file mode 100644
index 00000000..df58746c
--- /dev/null
+++ b/examples/tools/hunting-anomalies/gui-lookup-table.py
@@ -0,0 +1,302 @@
+import os
+import pickle
+import json
+from datetime import datetime
+from polygon import RESTClient
+from polygon.rest.models import Agg
+import http.server
+import socketserver
+import traceback
+from urllib.parse import urlparse, parse_qs
+
+PORT = 8888
+
+# Load the lookup_table
+with open("lookup_table.pkl", "rb") as f:
+ lookup_table = pickle.load(f)
+
+
+class handler(http.server.SimpleHTTPRequestHandler):
+ def do_GET(self):
+ # Parse the path and query parameters
+ parsed_path = urlparse(self.path)
+ path = parsed_path.path
+ query_params = parse_qs(parsed_path.query)
+
+ if path == "/":
+ # Handle the root path
+ # Get the date parameter if provided
+ date_param = query_params.get("date", [None])[0]
+
+ # Get all dates from the lookup table
+ all_dates = set()
+ for ticker_data in lookup_table.values():
+ all_dates.update(ticker_data.keys())
+ all_dates = sorted(all_dates)
+
+ # If date is None, get the latest date from the lookup table
+ if date_param is None:
+ if all_dates:
+ latest_date = max(all_dates)
+ else:
+ self.send_response(200)
+ self.send_header("Content-type", "text/html")
+ self.end_headers()
+ html_content = (
+ "
No data available.
"
+ )
+ self.wfile.write(html_content.encode())
+ return
+ else:
+ latest_date = date_param
+
+ # Ensure latest_date is in all_dates
+ if latest_date not in all_dates:
+ # Handle the case where the provided date is invalid
+ self.send_response(400)
+ self.send_header("Content-type", "text/html")
+ self.end_headers()
+ error_html = f"
Error: No data available for date {latest_date}
"
+ self.wfile.write(error_html.encode())
+ return
+
+ # Now, get the anomalies for the latest_date
+ anomalies = []
+ for ticker, date_data in lookup_table.items():
+ if latest_date in date_data:
+ data = date_data[latest_date]
+ trades = data["trades"]
+ avg_trades = data["avg_trades"]
+ std_trades = data["std_trades"]
+ if (
+ avg_trades is not None
+ and std_trades is not None
+ and std_trades > 0
+ ):
+ z_score = (trades - avg_trades) / std_trades
+ threshold_multiplier = 3 # Adjust as needed
+ if z_score > threshold_multiplier:
+ anomalies.append(
+ {
+ "ticker": ticker,
+ "date": latest_date,
+ "trades": trades,
+ "avg_trades": avg_trades,
+ "std_trades": std_trades,
+ "z_score": z_score,
+ "close_price": data["close_price"],
+ "price_diff": data["price_diff"],
+ }
+ )
+ # Sort anomalies by trades in descending order
+ anomalies.sort(key=lambda x: x["trades"], reverse=True)
+ # Generate the HTML to display the anomalies
+ self.send_response(200)
+ self.send_header("Content-type", "text/html")
+ self.end_headers()
+ # Build the HTML content
+ html_content = 'Anomalies for {}'.format(
+ latest_date
+ )
+ html_content += '