diff --git a/.polygon/rest.json b/.polygon/rest.json index fe330dce..14f9bceb 100644 --- a/.polygon/rest.json +++ b/.polygon/rest.json @@ -12698,6 +12698,10 @@ "description": "The status of Cboe Streaming Market Indices Cryptocurrency (\"CCCY\") indices trading hours.", "type": "string" }, + "cgi": { + "description": "The status of Cboe Global Indices (\"CGI\") trading hours.", + "type": "string" + }, "dow_jones": { "description": "The status of Dow Jones indices trading hours", "type": "string" @@ -26444,6 +26448,10 @@ "description": "The first line of the company's headquarters address.", "type": "string" }, + "address2": { + "description": "The second line of the company's headquarters address, if applicable.", + "type": "string" + }, "city": { "description": "The city of the company's headquarters address.", "type": "string" @@ -26608,7 +26616,7 @@ } }, "text/csv": { - "example": "ticker,name,market,locale,primary_exchange,type,active,currency_name,cik,composite_figi,share_class_figi,share_class_shares_outstanding,weighted_shares_outstanding,round_lot,market_cap,phone_number,address1,city,state,postal_code,sic_code,sic_description,ticker_root,total_employees,list_date,homepage_url,description,branding/logo_url,branding/icon_url\nAAPL,Apple Inc.,stocks,us,XNAS,CS,true,usd,0000320193,BBG000B9XRY4,BBG001S5N8V8,16406400000,16334371000,100,2771126040150,(408) 996-1010,One Apple Park Way,Cupertino,CA,95014,3571,ELECTRONIC COMPUTERS,AAPL,154000,1980-12-12,https://www.apple.com,\"Apple designs a wide variety of consumer electronic devices, including smartphones (iPhone), tablets (iPad), PCs (Mac), smartwatches (Apple Watch), AirPods, and TV boxes (Apple TV), among others. The iPhone makes up the majority of Apple's total revenue. In addition, Apple offers its customers a variety of services such as Apple Music, iCloud, Apple Care, Apple TV+, Apple Arcade, Apple Card, and Apple Pay, among others. Apple's products run internally developed software and semiconductors, and the firm is well known for its integration of hardware, software and services. Apple's products are distributed online as well as through company-owned stores and third-party retailers. The company generates roughly 40% of its revenue from the Americas, with the remainder earned internationally.\",https://api.polygon.io/v1/reference/company-branding/d3d3LmFwcGxlLmNvbQ/images/2022-01-10_logo.svg,https://api.polygon.io/v1/reference/company-branding/d3d3LmFwcGxlLmNvbQ/images/2022-01-10_icon.png\n", + "example": "ticker,name,market,locale,primary_exchange,type,active,currency_name,cik,composite_figi,share_class_figi,share_class_shares_outstanding,weighted_shares_outstanding,round_lot,market_cap,phone_number,address1,address2,city,state,postal_code,sic_code,sic_description,ticker_root,total_employees,list_date,homepage_url,description,branding/logo_url,branding/icon_url\nAAPL,Apple Inc.,stocks,us,XNAS,CS,true,usd,0000320193,BBG000B9XRY4,BBG001S5N8V8,16406400000,16334371000,100,2771126040150,(408) 996-1010,One Apple Park Way,,Cupertino,CA,95014,3571,ELECTRONIC COMPUTERS,AAPL,154000,1980-12-12,https://www.apple.com,\"Apple designs a wide variety of consumer electronic devices, including smartphones (iPhone), tablets (iPad), PCs (Mac), smartwatches (Apple Watch), AirPods, and TV boxes (Apple TV), among others. The iPhone makes up the majority of Apple's total revenue. In addition, Apple offers its customers a variety of services such as Apple Music, iCloud, Apple Care, Apple TV+, Apple Arcade, Apple Card, and Apple Pay, among others. Apple's products run internally developed software and semiconductors, and the firm is well known for its integration of hardware, software and services. Apple's products are distributed online as well as through company-owned stores and third-party retailers. The company generates roughly 40% of its revenue from the Americas, with the remainder earned internationally.\",https://api.polygon.io/v1/reference/company-branding/d3d3LmFwcGxlLmNvbQ/images/2022-01-10_logo.svg,https://api.polygon.io/v1/reference/company-branding/d3d3LmFwcGxlLmNvbQ/images/2022-01-10_icon.png\n", "schema": { "type": "string" } @@ -30323,7 +30331,7 @@ }, "/vX/reference/tickers/taxonomies": { "get": { - "description": "Retrieve taxonomy classifications for one or more tickers.", + "description": "Many investors place a high value on sector data. It is used to measure economic activity, identify peers and competitors, build ETF products, quantify market share, and compare company performance. However, there are some limitations to industry standard sectors:\n* They have difficulty identifying the primary area of activity for large, complex businesses.\n* Studies confirm significant disagreement between classification schemes when attempting to categorize the same companies.\n* The systems' hierarchical nature is inflexible and struggles to convey business nuances.\n
\n
\nAs a result, we've developed a new taxonomy to supplement existing sector classifications. The taxonomy is created by reviewing related 10K filings to create a set of structured categories and tags.\n
\n
\nThe categories are based on company operating models and are industry agnostic. Our current version only supports one category, Revenue Streams, with future plans to support more.\n
\n
\nThe tags define a specific type within the category. Within the Revenue Streams category, for example, tags for \"product sales\" and \"advertising\" may be found. A company may have many tags in a given category. The complete Revenue Streams taxonomy is shown below.\n
\n
\nOur taxonomy is powered by AI and is currently in early beta testing. You should expect some inaccuracies in the responses.\n
\n
\n## **Revenue Streams**\n *Latest Revision (7/7/2023)*\n
\n
\n- **Physical Product Sales:**\n Revenue generated from the sale of tangible goods or physical products to customers, either in-store or online.\n - Consumer Goods\n - Industrial Goods\n - Electronics\n - Vehicles\n - Healthcare Products\n
\n
\n- **Digital Product Sales:**\n Revenue earned from the sale of digital goods or products, such as software licenses, e-books, music downloads, or digital media content. It also includes revenue obtained by selling aggregated, anonymized, or processed data to third parties for market research, analytics, or other purposes.\n - Software\n - E-books and Digital Media\n - Mobile Applications\n - Games\n - Online Courses\n - Market Research Data\n - Customer Behavior Data\n
\n
\n- **Professional Services:**\n Revenue obtained by providing specialized services, expertise, or consulting to clients in exchange for fees. This includes services offered by professionals such as lawyers, accountants, or consultants.\n - Consulting\n - Legal Services\n - Financial Services\n - Marketing Services\n - Construction Services\n - Education & Tutoring\n
\n
\n- **Consumer Services:**\n Revenue earned from providing services directly to consumers, including services like healthcare, personal grooming, fitness, or hospitality.\n - Dining & Hospitality\n - Personal Care\n - Entertainment & Recreation\n - Fitness & Wellness\n - Travel & Tourism\n - Transportation\n - Home Services\n - Child & Family Care\n - Automotive\n
\n
\n- **Subscription-based Revenue:**\n Revenue obtained from recurring fees charged to customers for accessing a product or service over a defined period. This includes revenue from subscription-based models, membership programs, or software-as-a-service (SaaS) offerings.\n - Software as a Service (SaaS)\n - Streaming Services\n - Physical Media\n - Memberships\n
\n
\n- **Licensing and Royalties:**\n Revenue generated from the licensing of intellectual property rights to third parties, including franchise rights, patent licensing, brand licensing, and the receipt of royalties for authorized use of intellectual property like music royalties, book royalties, or patent royalties.\n - Franchise Fees\n - Patent Licensing\n - Brand Licensing\n - Media Royalties\n
\n
\n- **Advertising:**\n Revenue generated by displaying ads or promotional content to customers, whether through traditional or digital advertising channels, including revenue from display ads, sponsored content, or affiliate marketing.\n - Print Advertising\n - Online Display Advertising\n - Social Media Advertising\n - Influencer Marketing\n
\n
\n- **Commission-Based Revenue:**\n Revenue earned by acting as an intermediary and receiving a percentage or commission on sales made on behalf of another party. This includes revenue from affiliate programs, referral fees, or any other commission-based revenue models.\n - Real Estate Commissions\n - Affiliate Marketing Commissions\n - Online Marketplace Commissions\n
\n
\n- **Rentals or Leasing:**\n Revenue earned by leasing or renting out assets, properties, or equipment to customers, including rental income from real estate properties, equipment leasing, or vehicle rentals.\n - Property Rentals\n - Equipment Leasing\n - Vehicle Rentals", "operationId": "ListTickerTaxonomyClassifications", "parameters": [ { @@ -30334,16 +30342,16 @@ }, "x-polygon-filter-field": { "anyOf": { - "description": "Comma separated list of tickers, up to a maximum of 250. If no tickers are passed then all results will be returned in a paginated manner.\n\nWarning: The maximum number of characters allowed in a URL are subject to your technology stack.\n", + "description": "Comma separated list of tickers, up to a maximum of 250.\n\nWarning: The maximum number of characters allowed in a URL are subject to your own technology stack.\n", "enabled": true, - "example": "NCLH,O:SPY250321C00380000,C:EURUSD,X:BTCUSD,I:SPX" + "example": "AAPL,AMD,MSFT" }, "range": true, "type": "string" } }, { - "description": "Filter by taxonomy category.", + "description": "Filter by taxonomy category. The current version of this API supports the following category: revenue_streams", "in": "query", "name": "category", "schema": { @@ -30359,25 +30367,32 @@ } }, { - "description": "Range by ticker.", + "description": "Order results ascending or descending based on the ticker.", "in": "query", - "name": "ticker.gte", + "name": "order", "schema": { + "enum": [ + "asc", + "desc" + ], "type": "string" } }, { - "description": "Range by ticker.", + "description": "Limit the number of results returned. The default is 10 and the max is 250.", "in": "query", - "name": "ticker.gt", + "name": "limit", "schema": { - "type": "string" + "default": 10, + "maximum": 250, + "minimum": 1, + "type": "integer" } }, { "description": "Range by ticker.", "in": "query", - "name": "ticker.lte", + "name": "ticker.gte", "schema": { "type": "string" } @@ -30385,55 +30400,33 @@ { "description": "Range by ticker.", "in": "query", - "name": "ticker.lt", + "name": "ticker.gt", "schema": { "type": "string" } }, { - "description": "Comma separated list of tickers, up to a maximum of 250. If no tickers are passed then all results will be returned in a paginated manner.\n\nWarning: The maximum number of characters allowed in a URL are subject to your technology stack.\n", - "example": "NCLH,O:SPY250321C00380000,C:EURUSD,X:BTCUSD,I:SPX", + "description": "Range by ticker.", "in": "query", - "name": "ticker.any_of", + "name": "ticker.lte", "schema": { "type": "string" } }, { - "description": "Order results based on the `sort` field.", + "description": "Range by ticker.", "in": "query", - "name": "order", + "name": "ticker.lt", "schema": { - "enum": [ - "asc", - "desc" - ], - "example": "asc", "type": "string" } }, { - "description": "Limit the number of results returned, default is 10 and max is 250.", - "in": "query", - "name": "limit", - "schema": { - "default": 10, - "example": 10, - "maximum": 250, - "minimum": 1, - "type": "integer" - } - }, - { - "description": "Sort field used for ordering.", + "description": "Comma separated list of tickers, up to a maximum of 250.\n\nWarning: The maximum number of characters allowed in a URL are subject to your own technology stack.\n", + "example": "AAPL,AMD,MSFT", "in": "query", - "name": "sort", + "name": "ticker.any_of", "schema": { - "default": "ticker", - "enum": [ - "ticker" - ], - "example": "ticker", "type": "string" } } @@ -30443,29 +30436,22 @@ "content": { "application/json": { "example": { - "request_id": "31d59dda-80e5-4721-8496-d0d32a654afe", + "request_id": "a4f9947955398c28905337f003bfee7c", "results": [ { "category": "revenue_streams", - "reason": "Company recognizes revenue from the sales of consumer electronics such as the iPhone and iPad.", - "relevance": 0.99, - "tag": "physical_product_sales_electronics", + "reason": "The text mentions revenue earned from the sale of digital goods or products, such as software licenses, e-books, music downloads, or digital media content.", + "tag": "digital_product_sales", "ticker": "AAPL" }, { "category": "revenue_streams", - "reason": "Company recognizes revenue from the sales of digital products such as digital storage and app store fees.", - "relevance": 0.99, - "tag": "digital_product_sales_software", - "ticker": "AAPL" - }, - { - "category": "cost_structure", - "relevance": 0.86, - "tag": "economies_of_scale", + "reason": "The text mentions revenue generated from the licensing of intellectual property rights to third parties, including franchise rights, patent licensing, brand licensing, and the receipt of royalties for authorized use of intellectual property like music royalties, book royalties, or patent royalties.", + "tag": "licensing_and_royalties", "ticker": "AAPL" } - ] + ], + "status": "OK" }, "schema": { "properties": { @@ -30480,27 +30466,23 @@ "items": { "properties": { "category": { - "description": "The classification category.", + "description": "A dimension of a company\u2019s operating model that is agnostic to industry. Category contains a comprehensive list of tags which reflect defined types within that category. The current version of this API supports the following category: revenue_streams", "type": "string" }, "reason": { - "description": "The reason why the classification was given.", + "description": "The reason why the classification was given. The reason is provided by our AI to help you determine whether or not you agree with its applicability for your uses.", "type": "string" }, - "relevance": { - "description": "The relevance score for the tag. This is a measure of confidence in the tag classification.", - "format": "double", - "type": "number" - }, "tag": { - "description": "The classification tag. Each category has a set of associated tags.", + "description": "A specific type within a category. For example \u201cproduct_sales\u201d is a type of revenue stream. A company may have multiple tags within a given category. A taxonomy of tags are determined based on 10k filings.", "type": "string" }, "ticker": { - "description": "The ticker symbol for the asset.", + "description": "The identifying ticker symbol for the asset.", "type": "string" } }, + "type": "object", "x-polygon-go-type": { "name": "TaxonomyClassificationResult" } @@ -30531,20 +30513,7 @@ "description": "Reference data", "name": "reference" }, - "x-polygon-experimental": {}, - "x-polygon-paginate": { - "limit": { - "default": 10, - "max": 250, - "min": 1 - }, - "sort": { - "default": "ticker", - "enum": [ - "ticker" - ] - } - } + "x-polygon-experimental": {} }, "x-polygon-draft": true }, diff --git a/README.md b/README.md index 81494a78..3df6c512 100644 --- a/README.md +++ b/README.md @@ -150,54 +150,6 @@ ws.run(handle_msg=handle_msg) ``` Check out more detailed examples [here](https://github.com/polygon-io/client-python/tree/master/examples/websocket). -## Launchpad REST API Client -Users of the Launchpad product will need to pass in certain headers in order to make API requests using the RequestOptionBuilder. -Example can be found [here](./examples/launchpad). - -Import classes -```python -from polygon import RESTClient -from polygon.rest.models.request import RequestOptionBuilder -``` -### Using the client -Create client and set options -```python -# create client -c = RESTClient(api_key="API_KEY") - -# create request options -options = RequestOptionBuilder().edge_headers( - edge_id="YOUR_EDGE_ID", # required - edge_ip_address="IP_ADDRESS", # required -) -``` -Request data using client methods. -```python -# get response -res = c.get_aggs("AAPL", 1, "day", "2022-04-04", "2022-04-04", options=options) - -# do something with response -``` -Checkout Launchpad readme for more details on RequestOptionBuilder [here](./examples/launchpad) - - -## Launchpad WebSocket Client - -```python -from polygon import WebSocketClient -from polygon.websocket.models import WebSocketMessage -from polygon.websocket.models.common import Feed, Market -from typing import List - -ws = WebSocketClient(api_key="API_KEY",feed=Feed.Launchpad,market=Market.Stocks, subscriptions=["AM.AAPL"]) - -def handle_msg(msg: List[WebSocketMessage]): - for m in msg: - print(m) - -ws.run(handle_msg=handle_msg) -``` - ## Contributing If you found a bug or have an idea for a new feature, please first discuss it with us by diff --git a/examples/rest/demo_correlation_matrix.py b/examples/rest/demo_correlation_matrix.py index f056ab6d..df939590 100644 --- a/examples/rest/demo_correlation_matrix.py +++ b/examples/rest/demo_correlation_matrix.py @@ -40,6 +40,7 @@ essential to do your own research or consult a financial advisor for personalized advice when investing. """ + import pandas as pd # type: ignore import numpy as np # type: ignore import seaborn as sns # type: ignore diff --git a/examples/tools/flatfiles-stock-trades/exchange-heatmap.py b/examples/tools/flatfiles-stock-trades/exchange-heatmap.py new file mode 100644 index 00000000..060b6350 --- /dev/null +++ b/examples/tools/flatfiles-stock-trades/exchange-heatmap.py @@ -0,0 +1,68 @@ +# We can use a Python script that aggregates trades by exchange into 30-minute +# chunks, setting the stage for a visual analysis. This approach will highlight +# trade flows, including opening hours and peak activity times, across the +# exchanges. Please see https://polygon.io/blog/insights-from-trade-level-data +# +import pandas as pd # type: ignore +import seaborn as sns # type: ignore +import matplotlib.pyplot as plt # type: ignore +import numpy as np # type: ignore +import pytz # type: ignore + +# Replace '2024-04-05.csv' with the path to your actual file +file_path = "2024-04-05.csv" + +# Load the CSV file into a pandas DataFrame +df = pd.read_csv(file_path) + +# Convert 'participant_timestamp' to datetime (assuming nanoseconds Unix timestamp) +df["participant_timestamp"] = pd.to_datetime( + df["participant_timestamp"], unit="ns", utc=True +) + +# Convert to Eastern Time (ET), accounting for both EST and EDT +df["participant_timestamp"] = df["participant_timestamp"].dt.tz_convert( + "America/New_York" +) + +# Create a new column for 30-minute time intervals, now in ET +df["time_interval"] = df["participant_timestamp"].dt.floor("30T").dt.time + +# Ensure full 24-hour coverage by generating all possible 30-minute intervals +all_intervals = pd.date_range(start="00:00", end="23:59", freq="30T").time +all_exchanges = df["exchange"].unique() +full_index = pd.MultiIndex.from_product( + [all_exchanges, all_intervals], names=["exchange", "time_interval"] +) + +# Group by 'exchange' and 'time_interval', count trades, and reset index +grouped = ( + df.groupby(["exchange", "time_interval"]) + .size() + .reindex(full_index, fill_value=0) + .reset_index(name="trade_count") +) + +# Pivot the DataFrame for the heatmap, ensuring all intervals and exchanges are represented +pivot_table = grouped.pivot("exchange", "time_interval", "trade_count").fillna(0) + +# Apply a log scale transformation to the trade counts + 1 to handle zero trades correctly +log_scale_data = np.log1p(pivot_table.values) + +# Plotting the heatmap using the log scale data +plt.figure(figsize=(20, 10)) +sns.heatmap( + log_scale_data, + annot=False, + cmap="Reds", + linewidths=0.5, + cbar=False, + xticklabels=[t.strftime("%H:%M") for t in all_intervals], + yticklabels=pivot_table.index, +) +plt.title("Trade Count Heatmap by Exchange and Time Interval (Log Scale, ET)") +plt.ylabel("Exchange") +plt.xlabel("Time Interval (ET)") +plt.xticks(rotation=45) +plt.tight_layout() # Adjust layout to not cut off labels +plt.show() diff --git a/examples/tools/flatfiles-stock-trades/exchanges-seen.py b/examples/tools/flatfiles-stock-trades/exchanges-seen.py new file mode 100644 index 00000000..70fb5081 --- /dev/null +++ b/examples/tools/flatfiles-stock-trades/exchanges-seen.py @@ -0,0 +1,23 @@ +# Here's a Python script for analyzing the dataset, that identifies the +# distribution of trades across different exchanges and calculates their +# respective percentages of the total trades. Please see +# https://polygon.io/blog/insights-from-trade-level-data +# +import pandas as pd # type: ignore + +# Replace '2024-04-05.csv' with the path to your actual file +file_path = "2024-04-05.csv" + +# Load the CSV file into a pandas DataFrame +df = pd.read_csv(file_path) + +# Count the number of trades for each exchange +exchange_counts = df["exchange"].value_counts() + +# Calculate the total number of trades +total_trades = exchange_counts.sum() + +# Print out all exchanges and their percentage of total trades +for exchange, count in exchange_counts.items(): + percentage = (count / total_trades) * 100 + print(f"Exchange {exchange}: {count} trades, {percentage:.2f}% of total trades") diff --git a/examples/tools/flatfiles-stock-trades/heatmap.png b/examples/tools/flatfiles-stock-trades/heatmap.png new file mode 100644 index 00000000..9cf4c0ac Binary files /dev/null and b/examples/tools/flatfiles-stock-trades/heatmap.png differ diff --git a/examples/tools/flatfiles-stock-trades/histogram.png b/examples/tools/flatfiles-stock-trades/histogram.png new file mode 100644 index 00000000..1ccb62dd Binary files /dev/null and b/examples/tools/flatfiles-stock-trades/histogram.png differ diff --git a/examples/tools/flatfiles-stock-trades/readme.md b/examples/tools/flatfiles-stock-trades/readme.md new file mode 100644 index 00000000..c794b3ba --- /dev/null +++ b/examples/tools/flatfiles-stock-trades/readme.md @@ -0,0 +1,86 @@ +# Polygon.io Flat Files Stock Trades Analysis Scripts + +This repository contains Python scripts for analyzing stock market trading data using Flat Files from Polygon.io. These scripts demonstrate various ways to dissect and visualize trade data for comprehensive market analysis. + +Please see the tutorial: [Deep Dive into Trade-Level Data with Flat Files](https://polygon.io/blog/insights-from-trade-level-data) + +## Scripts Overview + +### **exchange-heatmap.py** +This script aggregates trades by exchange into 30-minute chunks and creates a heatmap visualization. It highlights the flow of trades and peak activity times across different exchanges, providing insights into how different exchanges operate throughout the day. + +![Treemap Visualization](./heatmap.png) + +### **exchanges-seen.py** +Analyzes the distribution of trades across different exchanges and calculates their respective percentages of total trades. This script helps identify which exchanges handle the most trading volume, offering a perspective on market structure. + +``` +Exchange 4: 25,570,324 trades, 36.32% of total trades +Exchange 12: 15,147,689 trades, 21.52% of total trades +Exchange 11: 6,877,306 trades, 9.77% of total trades +Exchange 19: 5,098,852 trades, 7.24% of total trades +Exchange 10: 4,006,611 trades, 5.69% of total trades +Exchange 8: 3,686,168 trades, 5.24% of total trades +Exchange 15: 2,446,340 trades, 3.47% of total trades +Exchange 21: 2,173,744 trades, 3.09% of total trades +Exchange 7: 1,509,083 trades, 2.14% of total trades +Exchange 20: 1,296,811 trades, 1.84% of total trades +Exchange 18: 674,553 trades, 0.96% of total trades +Exchange 13: 527,767 trades, 0.75% of total trades +Exchange 2: 417,295 trades, 0.59% of total trades +Exchange 3: 393,919 trades, 0.56% of total trades +Exchange 17: 230,210 trades, 0.33% of total trades +Exchange 1: 183,010 trades, 0.26% of total trades +Exchange 9: 159,020 trades, 0.23% of total trades +Exchange 14: 1,211 trades, 0.00% of total trades +``` + +### **top-10-tickers.py** +Identifies the top 10 most traded stocks and calculates their respective percentages of the total trades. This script provides a clear view of the market's most active stocks, highlighting where the most trading activity is concentrated. + +``` +TSLA: 1,549,605 trades, 2.20% of total trades +NVDA: 788,331 trades, 1.12% of total trades +SPY: 669,762 trades, 0.95% of total trades +AMD: 587,140 trades, 0.83% of total trades +MDIA: 561,698 trades, 0.80% of total trades +AAPL: 540,870 trades, 0.77% of total trades +SOXL: 533,511 trades, 0.76% of total trades +QQQ: 508,822 trades, 0.72% of total trades +CADL: 466,604 trades, 0.66% of total trades +AMZN: 465,526 trades, 0.66% of total trades +``` + +### **trades-histogram.py** +Creates a histogram that aggregates trades into 30-minute intervals throughout the day. This visualization helps understand the distribution of trading volume across different times, including pre-market, regular trading hours, and after-hours. + +![Treemap Visualization](./histogram.png) + +## Download the Data + +First, let's download an actual file and explore the data and see what we can learn. We start by downloading the trades for 2024-04-05 via the [File Browser](https://polygon.io/flat-files/stocks-trades/2024/04). The `us_stocks_sip/trades_v1/2024/04/2024-04-05.csv.gz` file is about 1.35GB and is in a compressed gzip format. + +``` +gunzip 2024-04-05.csv.gz +``` + +## Getting Started + +To run these scripts, you will need Python 3 and several dependencies installed, including pandas, matplotlib, seaborn, and pytz. Ensure that you have the trading data file available and modify the `file_path` variable in each script to point to your data file location. + +``` +pip install pandas matplotlib seaborn pytz +``` + +## Usage + +Each script is designed to be run independently: + +```bash +python exchange-heatmap.py +python exchanges-seen.py +python top-10-tickers.py +python trades-histogram.py +``` + +Adjust the script parameters as necessary to fit your specific analysis needs or to accommodate different datasets. \ No newline at end of file diff --git a/examples/tools/flatfiles-stock-trades/top-10-tickers.py b/examples/tools/flatfiles-stock-trades/top-10-tickers.py new file mode 100644 index 00000000..ec046e0b --- /dev/null +++ b/examples/tools/flatfiles-stock-trades/top-10-tickers.py @@ -0,0 +1,25 @@ +# Here's a Python script for analyzing the dataset, that identifies the top 10 +# most traded stocks and calculates their respective percentages of the total +# trades. Please see https://polygon.io/blog/insights-from-trade-level-data +# +import pandas as pd # type: ignore + +# Replace '2024-04-05.csv' with the path to your actual file +file_path = "2024-04-05.csv" + +# Load the CSV file into a pandas DataFrame +df = pd.read_csv(file_path) + +# Count the number of trades for each ticker +trade_counts = df["ticker"].value_counts() + +# Calculate the total number of trades +total_trades = trade_counts.sum() + +# Get the top 10 traded stocks +top_10_traded = trade_counts.head(10) + +# Print out the top 10 traded stocks and their percentage of total trades +for ticker, count in top_10_traded.items(): + percentage = (count / total_trades) * 100 + print(f"{ticker}: {count} trades, {percentage:.2f}% of total trades") diff --git a/examples/tools/flatfiles-stock-trades/trades-histogram.py b/examples/tools/flatfiles-stock-trades/trades-histogram.py new file mode 100644 index 00000000..6651978d --- /dev/null +++ b/examples/tools/flatfiles-stock-trades/trades-histogram.py @@ -0,0 +1,63 @@ +# To visualize these dynamics, we can use a Python script to create a histogram +# aggregating trades into 30-minute intervals, providing a clear view of when +# trading activity concentrates during the day. This analysis aims to highlight +# the distribution of trading volume across the day, from pre-market to after- +# hours. Please see https://polygon.io/blog/insights-from-trade-level-data +# +import pandas as pd # type: ignore +import matplotlib.pyplot as plt # type: ignore + +# Replace '2024-04-05.csv' with the path to your actual file +file_path = "2024-04-05.csv" + +# Load the CSV file into a pandas DataFrame +df = pd.read_csv(file_path) + +# Convert 'participant_timestamp' to datetime (assuming nanoseconds Unix timestamp) +df["participant_timestamp"] = pd.to_datetime( + df["participant_timestamp"], unit="ns", utc=True +) + +# Convert to Eastern Time (ET), accounting for both EST and EDT +df["participant_timestamp"] = df["participant_timestamp"].dt.tz_convert( + "America/New_York" +) + +# Create a new column for 30-minute time intervals, now in ET +df["time_interval"] = df["participant_timestamp"].dt.floor("30T") + +# Aggregate trades into 30-minute intervals for the entire dataset +trade_counts_per_interval = df.groupby("time_interval").size() + +# Prepare the plot +plt.figure(figsize=(15, 7)) + +# Plotting the histogram/bar chart +bars = plt.bar( + trade_counts_per_interval.index, trade_counts_per_interval.values, width=0.02 +) + +# Adding trade count annotations on each bar +for bar in bars: + height = bar.get_height() + plt.annotate( + f"{int(height)}", + xy=(bar.get_x() + bar.get_width() / 2, height), + xytext=(0, 3), # 3 points vertical offset + textcoords="offset points", + ha="center", + va="bottom", + ) + +plt.title("Trade Counts Aggregated by 30-Minute Intervals (ET)") +plt.xlabel("Time Interval (ET)") +plt.ylabel("Number of Trades") +plt.xticks(rotation=45, ha="right") + +# Ensure that every 30-minute interval is represented on the x-axis +plt.gca().set_xticklabels( + [t.strftime("%Y-%m-%d %H:%M") for t in trade_counts_per_interval.index], rotation=90 +) + +plt.tight_layout() +plt.show() diff --git a/poetry.lock b/poetry.lock index 66b3bfd2..494086e8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -44,33 +44,33 @@ pytz = ">=2015.7" [[package]] name = "black" -version = "23.12.1" +version = "24.4.2" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, - {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, - {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, - {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, - {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, - {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, - {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, - {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, - {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, - {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, - {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, - {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, - {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, - {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, - {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, - {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, - {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, - {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, - {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, - {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, - {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, - {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, ] [package.dependencies] @@ -312,38 +312,38 @@ files = [ [[package]] name = "mypy" -version = "1.9.0" +version = "1.10.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, + {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, + {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, + {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, + {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, + {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, + {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, + {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, + {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, + {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, + {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, + {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, + {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, + {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, + {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, + {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, + {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, + {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, ] [package.dependencies] @@ -384,62 +384,62 @@ six = ">=1.8.0" [[package]] name = "orjson" -version = "3.10.0" +version = "3.10.1" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.0-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47af5d4b850a2d1328660661f0881b67fdbe712aea905dadd413bdea6f792c33"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c90681333619d78360d13840c7235fdaf01b2b129cb3a4f1647783b1971542b6"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:400c5b7c4222cb27b5059adf1fb12302eebcabf1978f33d0824aa5277ca899bd"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dcb32e949eae80fb335e63b90e5808b4b0f64e31476b3777707416b41682db5"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7d507c7493252c0a0264b5cc7e20fa2f8622b8a83b04d819b5ce32c97cf57b"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e286a51def6626f1e0cc134ba2067dcf14f7f4b9550f6dd4535fd9d79000040b"}, - {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8acd4b82a5f3a3ec8b1dc83452941d22b4711964c34727eb1e65449eead353ca"}, - {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:30707e646080dd3c791f22ce7e4a2fc2438765408547c10510f1f690bd336217"}, - {file = "orjson-3.10.0-cp310-none-win32.whl", hash = "sha256:115498c4ad34188dcb73464e8dc80e490a3e5e88a925907b6fedcf20e545001a"}, - {file = "orjson-3.10.0-cp310-none-win_amd64.whl", hash = "sha256:6735dd4a5a7b6df00a87d1d7a02b84b54d215fb7adac50dd24da5997ffb4798d"}, - {file = "orjson-3.10.0-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9587053e0cefc284e4d1cd113c34468b7d3f17666d22b185ea654f0775316a26"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bef1050b1bdc9ea6c0d08468e3e61c9386723633b397e50b82fda37b3563d72"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d16c6963ddf3b28c0d461641517cd312ad6b3cf303d8b87d5ef3fa59d6844337"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4251964db47ef090c462a2d909f16c7c7d5fe68e341dabce6702879ec26d1134"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73bbbdc43d520204d9ef0817ac03fa49c103c7f9ea94f410d2950755be2c349c"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:414e5293b82373606acf0d66313aecb52d9c8c2404b1900683eb32c3d042dbd7"}, - {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:feaed5bb09877dc27ed0d37f037ddef6cb76d19aa34b108db270d27d3d2ef747"}, - {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5127478260db640323cea131ee88541cb1a9fbce051f0b22fa2f0892f44da302"}, - {file = "orjson-3.10.0-cp311-none-win32.whl", hash = "sha256:b98345529bafe3c06c09996b303fc0a21961820d634409b8639bc16bd4f21b63"}, - {file = "orjson-3.10.0-cp311-none-win_amd64.whl", hash = "sha256:658ca5cee3379dd3d37dbacd43d42c1b4feee99a29d847ef27a1cb18abdfb23f"}, - {file = "orjson-3.10.0-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4329c1d24fd130ee377e32a72dc54a3c251e6706fccd9a2ecb91b3606fddd998"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef0f19fdfb6553342b1882f438afd53c7cb7aea57894c4490c43e4431739c700"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4f60db24161534764277f798ef53b9d3063092f6d23f8f962b4a97edfa997a0"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1de3fd5c7b208d836f8ecb4526995f0d5877153a4f6f12f3e9bf11e49357de98"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f93e33f67729d460a177ba285002035d3f11425ed3cebac5f6ded4ef36b28344"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:237ba922aef472761acd697eef77fef4831ab769a42e83c04ac91e9f9e08fa0e"}, - {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98c1bfc6a9bec52bc8f0ab9b86cc0874b0299fccef3562b793c1576cf3abb570"}, - {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30d795a24be16c03dca0c35ca8f9c8eaaa51e3342f2c162d327bd0225118794a"}, - {file = "orjson-3.10.0-cp312-none-win32.whl", hash = "sha256:6a3f53dc650bc860eb26ec293dfb489b2f6ae1cbfc409a127b01229980e372f7"}, - {file = "orjson-3.10.0-cp312-none-win_amd64.whl", hash = "sha256:983db1f87c371dc6ffc52931eb75f9fe17dc621273e43ce67bee407d3e5476e9"}, - {file = "orjson-3.10.0-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9a667769a96a72ca67237224a36faf57db0c82ab07d09c3aafc6f956196cfa1b"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade1e21dfde1d37feee8cf6464c20a2f41fa46c8bcd5251e761903e46102dc6b"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23c12bb4ced1c3308eff7ba5c63ef8f0edb3e4c43c026440247dd6c1c61cea4b"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2d014cf8d4dc9f03fc9f870de191a49a03b1bcda51f2a957943fb9fafe55aac"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eadecaa16d9783affca33597781328e4981b048615c2ddc31c47a51b833d6319"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd583341218826f48bd7c6ebf3310b4126216920853cbc471e8dbeaf07b0b80e"}, - {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:90bfc137c75c31d32308fd61951d424424426ddc39a40e367704661a9ee97095"}, - {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13b5d3c795b09a466ec9fcf0bd3ad7b85467d91a60113885df7b8d639a9d374b"}, - {file = "orjson-3.10.0-cp38-none-win32.whl", hash = "sha256:5d42768db6f2ce0162544845facb7c081e9364a5eb6d2ef06cd17f6050b048d8"}, - {file = "orjson-3.10.0-cp38-none-win_amd64.whl", hash = "sha256:33e6655a2542195d6fd9f850b428926559dee382f7a862dae92ca97fea03a5ad"}, - {file = "orjson-3.10.0-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4050920e831a49d8782a1720d3ca2f1c49b150953667eed6e5d63a62e80f46a2"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1897aa25a944cec774ce4a0e1c8e98fb50523e97366c637b7d0cddabc42e6643"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bf565a69e0082ea348c5657401acec3cbbb31564d89afebaee884614fba36b4"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6ebc17cfbbf741f5c1a888d1854354536f63d84bee537c9a7c0335791bb9009"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2817877d0b69f78f146ab305c5975d0618df41acf8811249ee64231f5953fee"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57d017863ec8aa4589be30a328dacd13c2dc49de1c170bc8d8c8a98ece0f2925"}, - {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:22c2f7e377ac757bd3476ecb7480c8ed79d98ef89648f0176deb1da5cd014eb7"}, - {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e62ba42bfe64c60c1bc84799944f80704e996592c6b9e14789c8e2a303279912"}, - {file = "orjson-3.10.0-cp39-none-win32.whl", hash = "sha256:60c0b1bdbccd959ebd1575bd0147bd5e10fc76f26216188be4a36b691c937077"}, - {file = "orjson-3.10.0-cp39-none-win_amd64.whl", hash = "sha256:175a41500ebb2fdf320bf78e8b9a75a1279525b62ba400b2b2444e274c2c8bee"}, - {file = "orjson-3.10.0.tar.gz", hash = "sha256:ba4d8cac5f2e2cff36bea6b6481cdb92b38c202bcec603d6f5ff91960595a1ed"}, + {file = "orjson-3.10.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8ec2fc456d53ea4a47768f622bb709be68acd455b0c6be57e91462259741c4f3"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e900863691d327758be14e2a491931605bd0aded3a21beb6ce133889830b659"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab6ecbd6fe57785ebc86ee49e183f37d45f91b46fc601380c67c5c5e9c0014a2"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af7c68b01b876335cccfb4eee0beef2b5b6eae1945d46a09a7c24c9faac7a77"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:915abfb2e528677b488a06eba173e9d7706a20fdfe9cdb15890b74ef9791b85e"}, + {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe3fd4a36eff9c63d25503b439531d21828da9def0059c4f472e3845a081aa0b"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d229564e72cfc062e6481a91977a5165c5a0fdce11ddc19ced8471847a67c517"}, + {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9e00495b18304173ac843b5c5fbea7b6f7968564d0d49bef06bfaeca4b656f4e"}, + {file = "orjson-3.10.1-cp310-none-win32.whl", hash = "sha256:fd78ec55179545c108174ba19c1795ced548d6cac4d80d014163033c047ca4ea"}, + {file = "orjson-3.10.1-cp310-none-win_amd64.whl", hash = "sha256:50ca42b40d5a442a9e22eece8cf42ba3d7cd4cd0f2f20184b4d7682894f05eec"}, + {file = "orjson-3.10.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b345a3d6953628df2f42502297f6c1e1b475cfbf6268013c94c5ac80e8abc04c"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caa7395ef51af4190d2c70a364e2f42138e0e5fcb4bc08bc9b76997659b27dab"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b01d701decd75ae092e5f36f7b88a1e7a1d3bb7c9b9d7694de850fb155578d5a"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5028981ba393f443d8fed9049211b979cadc9d0afecf162832f5a5b152c6297"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31ff6a222ea362b87bf21ff619598a4dc1106aaafaea32b1c4876d692891ec27"}, + {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e852a83d7803d3406135fb7a57cf0c1e4a3e73bac80ec621bd32f01c653849c5"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2567bc928ed3c3fcd90998009e8835de7c7dc59aabcf764b8374d36044864f3b"}, + {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4ce98cac60b7bb56457bdd2ed7f0d5d7f242d291fdc0ca566c83fa721b52e92d"}, + {file = "orjson-3.10.1-cp311-none-win32.whl", hash = "sha256:813905e111318acb356bb8029014c77b4c647f8b03f314e7b475bd9ce6d1a8ce"}, + {file = "orjson-3.10.1-cp311-none-win_amd64.whl", hash = "sha256:03a3ca0b3ed52bed1a869163a4284e8a7b0be6a0359d521e467cdef7e8e8a3ee"}, + {file = "orjson-3.10.1-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f02c06cee680b1b3a8727ec26c36f4b3c0c9e2b26339d64471034d16f74f4ef5"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1aa2f127ac546e123283e437cc90b5ecce754a22306c7700b11035dad4ccf85"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2cf29b4b74f585225196944dffdebd549ad2af6da9e80db7115984103fb18a96"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1b130c20b116f413caf6059c651ad32215c28500dce9cd029a334a2d84aa66f"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d31f9a709e6114492136e87c7c6da5e21dfedebefa03af85f3ad72656c493ae9"}, + {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d1d169461726f271ab31633cf0e7e7353417e16fb69256a4f8ecb3246a78d6e"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57c294d73825c6b7f30d11c9e5900cfec9a814893af7f14efbe06b8d0f25fba9"}, + {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7f11dbacfa9265ec76b4019efffabaabba7a7ebf14078f6b4df9b51c3c9a8ea"}, + {file = "orjson-3.10.1-cp312-none-win32.whl", hash = "sha256:d89e5ed68593226c31c76ab4de3e0d35c760bfd3fbf0a74c4b2be1383a1bf123"}, + {file = "orjson-3.10.1-cp312-none-win_amd64.whl", hash = "sha256:aa76c4fe147fd162107ce1692c39f7189180cfd3a27cfbc2ab5643422812da8e"}, + {file = "orjson-3.10.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a2c6a85c92d0e494c1ae117befc93cf8e7bca2075f7fe52e32698da650b2c6d1"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9813f43da955197d36a7365eb99bed42b83680801729ab2487fef305b9ced866"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec917b768e2b34b7084cb6c68941f6de5812cc26c6f1a9fecb728e36a3deb9e8"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5252146b3172d75c8a6d27ebca59c9ee066ffc5a277050ccec24821e68742fdf"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:536429bb02791a199d976118b95014ad66f74c58b7644d21061c54ad284e00f4"}, + {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dfed3c3e9b9199fb9c3355b9c7e4649b65f639e50ddf50efdf86b45c6de04b5"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2b230ec35f188f003f5b543644ae486b2998f6afa74ee3a98fc8ed2e45960afc"}, + {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:01234249ba19c6ab1eb0b8be89f13ea21218b2d72d496ef085cfd37e1bae9dd8"}, + {file = "orjson-3.10.1-cp38-none-win32.whl", hash = "sha256:8a884fbf81a3cc22d264ba780920d4885442144e6acaa1411921260416ac9a54"}, + {file = "orjson-3.10.1-cp38-none-win_amd64.whl", hash = "sha256:dab5f802d52b182163f307d2b1f727d30b1762e1923c64c9c56dd853f9671a49"}, + {file = "orjson-3.10.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a51fd55d4486bc5293b7a400f9acd55a2dc3b5fc8420d5ffe9b1d6bb1a056a5e"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53521542a6db1411b3bfa1b24ddce18605a3abdc95a28a67b33f9145f26aa8f2"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27d610df96ac18ace4931411d489637d20ab3b8f63562b0531bba16011998db0"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79244b1456e5846d44e9846534bd9e3206712936d026ea8e6a55a7374d2c0694"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d751efaa8a49ae15cbebdda747a62a9ae521126e396fda8143858419f3b03610"}, + {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27ff69c620a4fff33267df70cfd21e0097c2a14216e72943bd5414943e376d77"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ebc58693464146506fde0c4eb1216ff6d4e40213e61f7d40e2f0dde9b2f21650"}, + {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5be608c3972ed902e0143a5b8776d81ac1059436915d42defe5c6ae97b3137a4"}, + {file = "orjson-3.10.1-cp39-none-win32.whl", hash = "sha256:4ae10753e7511d359405aadcbf96556c86e9dbf3a948d26c2c9f9a150c52b091"}, + {file = "orjson-3.10.1-cp39-none-win_amd64.whl", hash = "sha256:fb5bc4caa2c192077fdb02dce4e5ef8639e7f20bec4e3a834346693907362932"}, + {file = "orjson-3.10.1.tar.gz", hash = "sha256:a883b28d73370df23ed995c466b4f6c708c1f7a9bdc400fe89165c96c7603204"}, ] [[package]] @@ -642,22 +642,22 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinx-autodoc-typehints" -version = "2.0.0" +version = "2.0.1" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" optional = false python-versions = ">=3.8" files = [ - {file = "sphinx_autodoc_typehints-2.0.0-py3-none-any.whl", hash = "sha256:12c0e161f6fe191c2cdfd8fa3caea271f5387d9fbc67ebcd6f4f1f24ce880993"}, - {file = "sphinx_autodoc_typehints-2.0.0.tar.gz", hash = "sha256:7f2cdac2e70fd9787926b6e9e541cd4ded1e838d2b46fda2a1bb0a75ec5b7f3a"}, + {file = "sphinx_autodoc_typehints-2.0.1-py3-none-any.whl", hash = "sha256:f73ae89b43a799e587e39266672c1075b2ef783aeb382d3ebed77c38a3fc0149"}, + {file = "sphinx_autodoc_typehints-2.0.1.tar.gz", hash = "sha256:60ed1e3b2c970acc0aa6e877be42d48029a9faec7378a17838716cacd8c10b12"}, ] [package.dependencies] sphinx = ">=7.1.2" [package.extras] -docs = ["furo (>=2023.9.10)"] +docs = ["furo (>=2024.1.29)"] numpy = ["nptyping (>=2.5)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.8)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.4.2)", "diff-cover (>=8.0.3)", "pytest (>=8.0.1)", "pytest-cov (>=4.1)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.9)"] [[package]] name = "sphinx-rtd-theme" @@ -805,13 +805,13 @@ files = [ [[package]] name = "types-setuptools" -version = "69.2.0.20240317" +version = "69.5.0.20240423" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.8" files = [ - {file = "types-setuptools-69.2.0.20240317.tar.gz", hash = "sha256:b607c4c48842ef3ee49dc0c7fe9c1bad75700b071e1018bb4d7e3ac492d47048"}, - {file = "types_setuptools-69.2.0.20240317-py3-none-any.whl", hash = "sha256:cf91ff7c87ab7bf0625c3f0d4d90427c9da68561f3b0feab77977aaf0bbf7531"}, + {file = "types-setuptools-69.5.0.20240423.tar.gz", hash = "sha256:a7ba908f1746c4337d13f027fa0f4a5bcad6d1d92048219ba792b3295c58586d"}, + {file = "types_setuptools-69.5.0.20240423-py3-none-any.whl", hash = "sha256:a4381e041510755a6c9210e26ad55b1629bc10237aeb9cb8b6bd24996b73db48"}, ] [[package]] @@ -962,4 +962,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "8da0244cb90aff64d2af412a331650e52939bbabafdfd0ddb4837fdcce83bf4b" +content-hash = "2cf0c53839df9409c9e91972ef3a7d08c7b98de8fcbdadb5f329d44f6b227b47" \ No newline at end of file diff --git a/polygon/rest/models/conditions.py b/polygon/rest/models/conditions.py index 3fc0d776..98baa261 100644 --- a/polygon/rest/models/conditions.py +++ b/polygon/rest/models/conditions.py @@ -47,12 +47,16 @@ class UpdateRules: @staticmethod def from_dict(d): return UpdateRules( - consolidated=None - if "consolidated" not in d - else Consolidated.from_dict(d["consolidated"]), - market_center=None - if "market_center" not in d - else MarketCenter.from_dict(d["market_center"]), + consolidated=( + None + if "consolidated" not in d + else Consolidated.from_dict(d["consolidated"]) + ), + market_center=( + None + if "market_center" not in d + else MarketCenter.from_dict(d["market_center"]) + ), ) @@ -82,11 +86,15 @@ def from_dict(d): id=d.get("id", None), legacy=d.get("legacy", None), name=d.get("name", None), - sip_mapping=None - if "sip_mapping" not in d - else SipMapping.from_dict(d["sip_mapping"]), + sip_mapping=( + None + if "sip_mapping" not in d + else SipMapping.from_dict(d["sip_mapping"]) + ), type=d.get("type", None), - update_rules=None - if "update_rules" not in d - else UpdateRules.from_dict(d["update_rules"]), + update_rules=( + None + if "update_rules" not in d + else UpdateRules.from_dict(d["update_rules"]) + ), ) diff --git a/polygon/rest/models/contracts.py b/polygon/rest/models/contracts.py index dc69f614..469779b6 100644 --- a/polygon/rest/models/contracts.py +++ b/polygon/rest/models/contracts.py @@ -32,9 +32,11 @@ class OptionsContract: @staticmethod def from_dict(d): return OptionsContract( - additional_underlyings=None - if "additional_underlyings" not in d - else [Underlying.from_dict(u) for u in d["additional_underlyings"]], + additional_underlyings=( + None + if "additional_underlyings" not in d + else [Underlying.from_dict(u) for u in d["additional_underlyings"]] + ), cfi=d.get("cfi", None), contract_type=d.get("contract_type", None), correction=d.get("correction", None), diff --git a/polygon/rest/models/financials.py b/polygon/rest/models/financials.py index 85a63e37..1a480c48 100644 --- a/polygon/rest/models/financials.py +++ b/polygon/rest/models/financials.py @@ -74,16 +74,22 @@ class CashFlowStatement: @staticmethod def from_dict(d): return CashFlowStatement( - exchange_gains_losses=None - if "exchange_gains_losses" not in d - else ExchangeGainsLosses.from_dict(d["exchange_gains_losses"]), - net_cash_flow=None - if "net_cash_flow" not in d - else NetCashFlow.from_dict(d["net_cash_flow"]), - net_cash_flow_from_financing_activities=None - if "net_cash_flow_from_financing_activities" not in d - else NetCashFlowFromFinancingActivities.from_dict( - d["net_cash_flow_from_financing_activities"] + exchange_gains_losses=( + None + if "exchange_gains_losses" not in d + else ExchangeGainsLosses.from_dict(d["exchange_gains_losses"]) + ), + net_cash_flow=( + None + if "net_cash_flow" not in d + else NetCashFlow.from_dict(d["net_cash_flow"]) + ), + net_cash_flow_from_financing_activities=( + None + if "net_cash_flow_from_financing_activities" not in d + else NetCashFlowFromFinancingActivities.from_dict( + d["net_cash_flow_from_financing_activities"] + ) ), ) @@ -145,18 +151,24 @@ class ComprehensiveIncome: @staticmethod def from_dict(d): return ComprehensiveIncome( - comprehensive_income_loss=None - if "comprehensive_income_loss" not in d - else ComprehensiveIncomeLoss.from_dict(d["comprehensive_income_loss"]), - comprehensive_income_loss_attributable_to_parent=None - if "comprehensive_income_loss_attributable_to_parent" not in d - else ComprehensiveIncomeLossAttributableToParent.from_dict( - d["comprehensive_income_loss_attributable_to_parent"] + comprehensive_income_loss=( + None + if "comprehensive_income_loss" not in d + else ComprehensiveIncomeLoss.from_dict(d["comprehensive_income_loss"]) + ), + comprehensive_income_loss_attributable_to_parent=( + None + if "comprehensive_income_loss_attributable_to_parent" not in d + else ComprehensiveIncomeLossAttributableToParent.from_dict( + d["comprehensive_income_loss_attributable_to_parent"] + ) ), - other_comprehensive_income_loss=None - if "other_comprehensive_income_loss" not in d - else OtherComprehensiveIncomeLoss.from_dict( - d["other_comprehensive_income_loss"] + other_comprehensive_income_loss=( + None + if "other_comprehensive_income_loss" not in d + else OtherComprehensiveIncomeLoss.from_dict( + d["other_comprehensive_income_loss"] + ) ), ) @@ -248,18 +260,26 @@ class IncomeStatement: @staticmethod def from_dict(d): return IncomeStatement( - basic_earnings_per_share=None - if "basic_earnings_per_share" not in d - else BasicEarningsPerShare.from_dict(d["basic_earnings_per_share"]), - cost_of_revenue=None - if "cost_of_revenue" not in d - else CostOfRevenue.from_dict(d["cost_of_revenue"]), - gross_profit=None - if "gross_profit" not in d - else GrossProfit.from_dict(d["gross_profit"]), - operating_expenses=None - if "operating_expenses" not in d - else OperatingExpenses.from_dict(d["operating_expenses"]), + basic_earnings_per_share=( + None + if "basic_earnings_per_share" not in d + else BasicEarningsPerShare.from_dict(d["basic_earnings_per_share"]) + ), + cost_of_revenue=( + None + if "cost_of_revenue" not in d + else CostOfRevenue.from_dict(d["cost_of_revenue"]) + ), + gross_profit=( + None + if "gross_profit" not in d + else GrossProfit.from_dict(d["gross_profit"]) + ), + operating_expenses=( + None + if "operating_expenses" not in d + else OperatingExpenses.from_dict(d["operating_expenses"]) + ), revenues=None if "revenues" not in d else Revenues.from_dict(d["revenues"]), ) @@ -275,18 +295,28 @@ class Financials: @staticmethod def from_dict(d): return Financials( - balance_sheet=None - if "balance_sheet" not in d - else {k: DataPoint.from_dict(v) for (k, v) in d["balance_sheet"].items()}, - cash_flow_statement=None - if "cash_flow_statement" not in d - else CashFlowStatement.from_dict(d["cash_flow_statement"]), - comprehensive_income=None - if "comprehensive_income" not in d - else ComprehensiveIncome.from_dict(d["comprehensive_income"]), - income_statement=None - if "income_statement" not in d - else IncomeStatement.from_dict(d["income_statement"]), + balance_sheet=( + None + if "balance_sheet" not in d + else { + k: DataPoint.from_dict(v) for (k, v) in d["balance_sheet"].items() + } + ), + cash_flow_statement=( + None + if "cash_flow_statement" not in d + else CashFlowStatement.from_dict(d["cash_flow_statement"]) + ), + comprehensive_income=( + None + if "comprehensive_income" not in d + else ComprehensiveIncome.from_dict(d["comprehensive_income"]) + ), + income_statement=( + None + if "income_statement" not in d + else IncomeStatement.from_dict(d["income_statement"]) + ), ) @@ -311,9 +341,9 @@ def from_dict(d): company_name=d.get("company_name", None), end_date=d.get("end_date", None), filing_date=d.get("filing_date", None), - financials=None - if "financials" not in d - else Financials.from_dict(d["financials"]), + financials=( + None if "financials" not in d else Financials.from_dict(d["financials"]) + ), fiscal_period=d.get("fiscal_period", None), fiscal_year=d.get("fiscal_year", None), source_filing_file_url=d.get("source_filing_file_url", None), diff --git a/polygon/rest/models/markets.py b/polygon/rest/models/markets.py index 509caa86..4e68abd4 100644 --- a/polygon/rest/models/markets.py +++ b/polygon/rest/models/markets.py @@ -30,6 +30,7 @@ class MarketIndices: "Contains indices market status data." s_and_p: Optional[str] = None societe_generale: Optional[str] = None + cgi: Optional[str] = None msci: Optional[str] = None ftse_russell: Optional[str] = None mstar: Optional[str] = None @@ -73,16 +74,22 @@ class MarketStatus: def from_dict(d): return MarketStatus( after_hours=d.get("afterHours", None), - currencies=None - if "currencies" not in d - else MarketCurrencies.from_dict(d["currencies"]), + currencies=( + None + if "currencies" not in d + else MarketCurrencies.from_dict(d["currencies"]) + ), early_hours=d.get("earlyHours", None), - exchanges=None - if "exchanges" not in d - else MarketExchanges.from_dict(d["exchanges"]), - indicesGroups=None - if "indicesGroups" not in d - else MarketIndices.from_dict(d["indicesGroups"]), + exchanges=( + None + if "exchanges" not in d + else MarketExchanges.from_dict(d["exchanges"]) + ), + indicesGroups=( + None + if "indicesGroups" not in d + else MarketIndices.from_dict(d["indicesGroups"]) + ), market=d.get("market", None), server_time=d.get("serverTime", None), ) diff --git a/polygon/rest/models/snapshot.py b/polygon/rest/models/snapshot.py index d97f17c3..ceb5f7f8 100644 --- a/polygon/rest/models/snapshot.py +++ b/polygon/rest/models/snapshot.py @@ -70,9 +70,9 @@ def from_dict(d): type=d.get("type", None), ticker=d.get("ticker", None), market_status=d.get("market_status", None), - session=None - if "session" not in d - else IndicesSession.from_dict(d["session"]), + session=( + None if "session" not in d else IndicesSession.from_dict(d["session"]) + ), error=d.get("error", None), message=d.get("message", None), ) @@ -96,12 +96,12 @@ class TickerSnapshot: def from_dict(d): return TickerSnapshot( day=None if "day" not in d else Agg.from_dict(d["day"]), - last_quote=None - if "lastQuote" not in d - else LastQuote.from_dict(d["lastQuote"]), - last_trade=None - if "lastTrade" not in d - else LastTrade.from_dict(d["lastTrade"]), + last_quote=( + None if "lastQuote" not in d else LastQuote.from_dict(d["lastQuote"]) + ), + last_trade=( + None if "lastTrade" not in d else LastTrade.from_dict(d["lastTrade"]) + ), min=None if "min" not in d else MinuteSnapshot.from_dict(d["min"]), prev_day=None if "prevDay" not in d else Agg.from_dict(d["prevDay"]), ticker=d.get("ticker", None), @@ -223,24 +223,32 @@ class OptionContractSnapshot: def from_dict(d): return OptionContractSnapshot( break_even_price=d.get("break_even_price", None), - day=None - if "day" not in d - else DayOptionContractSnapshot.from_dict(d["day"]), - details=None - if "details" not in d - else OptionDetails.from_dict(d["details"]), + day=( + None + if "day" not in d + else DayOptionContractSnapshot.from_dict(d["day"]) + ), + details=( + None if "details" not in d else OptionDetails.from_dict(d["details"]) + ), greeks=None if "greeks" not in d else Greeks.from_dict(d["greeks"]), implied_volatility=d.get("implied_volatility", None), - last_quote=None - if "last_quote" not in d - else LastQuoteOptionContractSnapshot.from_dict(d["last_quote"]), - last_trade=None - if "last_trade" not in d - else LastTradeOptionContractSnapshot.from_dict(d["last_trade"]), + last_quote=( + None + if "last_quote" not in d + else LastQuoteOptionContractSnapshot.from_dict(d["last_quote"]) + ), + last_trade=( + None + if "last_trade" not in d + else LastTradeOptionContractSnapshot.from_dict(d["last_trade"]) + ), open_interest=d.get("open_interest", None), - underlying_asset=None - if "underlying_asset" not in d - else UnderlyingAsset.from_dict(d["underlying_asset"]), + underlying_asset=( + None + if "underlying_asset" not in d + else UnderlyingAsset.from_dict(d["underlying_asset"]) + ), fair_market_value=d.get("fmv", None), ) @@ -274,12 +282,16 @@ class SnapshotTickerFullBook: def from_dict(d): return SnapshotTickerFullBook( ticker=d.get("ticker", None), - bids=None - if "bids" not in d - else [OrderBookQuote.from_dict(o) for o in d["bids"]], - asks=None - if "asks" not in d - else [OrderBookQuote.from_dict(o) for o in d["asks"]], + bids=( + None + if "bids" not in d + else [OrderBookQuote.from_dict(o) for o in d["bids"]] + ), + asks=( + None + if "asks" not in d + else [OrderBookQuote.from_dict(o) for o in d["asks"]] + ), bid_count=d.get("bidCount", None), ask_count=d.get("askCount", None), spread=d.get("spread", None), @@ -404,22 +416,32 @@ def from_dict(d): return UniversalSnapshot( ticker=d.get("ticker", None), type=d.get("type", None), - session=None - if "session" not in d - else UniversalSnapshotSession.from_dict(d["session"]), - last_quote=None - if "last_quote" not in d - else UniversalSnapshotLastQuote.from_dict(d["last_quote"]), - last_trade=None - if "last_trade" not in d - else UniversalSnapshotLastTrade.from_dict(d["last_trade"]), + session=( + None + if "session" not in d + else UniversalSnapshotSession.from_dict(d["session"]) + ), + last_quote=( + None + if "last_quote" not in d + else UniversalSnapshotLastQuote.from_dict(d["last_quote"]) + ), + last_trade=( + None + if "last_trade" not in d + else UniversalSnapshotLastTrade.from_dict(d["last_trade"]) + ), greeks=None if "greeks" not in d else Greeks.from_dict(d["greeks"]), - underlying_asset=None - if "underlying_asset" not in d - else UniversalSnapshotUnderlyingAsset.from_dict(d["underlying_asset"]), - details=None - if "details" not in d - else UniversalSnapshotDetails.from_dict(d["details"]), + underlying_asset=( + None + if "underlying_asset" not in d + else UniversalSnapshotUnderlyingAsset.from_dict(d["underlying_asset"]) + ), + details=( + None + if "details" not in d + else UniversalSnapshotDetails.from_dict(d["details"]) + ), break_even_price=d.get("break_even_price", None), implied_volatility=d.get("implied_volatility", None), open_interest=d.get("open_interest", None), diff --git a/polygon/rest/models/tickers.py b/polygon/rest/models/tickers.py index 1c2ea947..f7ff2bed 100644 --- a/polygon/rest/models/tickers.py +++ b/polygon/rest/models/tickers.py @@ -108,9 +108,9 @@ class TickerDetails: def from_dict(d): return TickerDetails( active=d.get("active", None), - address=None - if "address" not in d - else CompanyAddress.from_dict(d["address"]), + address=( + None if "address" not in d else CompanyAddress.from_dict(d["address"]) + ), branding=None if "branding" not in d else Branding.from_dict(d["branding"]), cik=d.get("cik", None), composite_figi=d.get("composite_figi", None), @@ -169,9 +169,9 @@ def from_dict(d): image_url=d.get("image_url", None), keywords=d.get("keywords", None), published_utc=d.get("published_utc", None), - publisher=None - if "publisher" not in d - else Publisher.from_dict(d["publisher"]), + publisher=( + None if "publisher" not in d else Publisher.from_dict(d["publisher"]) + ), tickers=d.get("tickers", None), title=d.get("title", None), ) diff --git a/polygon/websocket/__init__.py b/polygon/websocket/__init__.py index b9f45a2e..77865d3f 100644 --- a/polygon/websocket/__init__.py +++ b/polygon/websocket/__init__.py @@ -127,9 +127,9 @@ async def connect( self.schedule_resub = False try: - cmsg: Union[ - List[WebSocketMessage], Union[str, bytes] - ] = await asyncio.wait_for(s.recv(), timeout=1) + cmsg: Union[List[WebSocketMessage], Union[str, bytes]] = ( + await asyncio.wait_for(s.recv(), timeout=1) + ) except asyncio.TimeoutError: continue diff --git a/pyproject.toml b/pyproject.toml index 198487e9..149ec167 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,17 +30,17 @@ websockets = ">=10.3,<13.0" certifi = ">=2022.5.18,<2025.0.0" [tool.poetry.dev-dependencies] -black = "^23.12.1" -mypy = "^1.9" +black = "^24.4.2" +mypy = "^1.10" types-urllib3 = "^1.26.25" Sphinx = "^7.1.2" sphinx-rtd-theme = "^2.0.0" # keep this in sync with docs/requirements.txt for readthedocs.org -sphinx-autodoc-typehints = "^2.0.0" +sphinx-autodoc-typehints = "^2.0.1" types-certifi = "^2021.10.8" -types-setuptools = "^69.2.0" +types-setuptools = "^69.5.0" pook = "^1.4.3" -orjson = "^3.10.0" +orjson = "^3.10.1" [build-system] requires = ["poetry-core>=1.0.0"]