diff --git a/CHANGELOG.md b/CHANGELOG.md index f61b27a75c4..2adf01e7bd4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [1.9.0] - 2020-12-04 + +### Added +- **Utilities**: Added Kinesis, S3, CloudWatch Logs, Application Load Balancer, and SES support in `Parser` +- **Docs**: Sidebar menu are now always expanded + +### Fixed +- **Docs**: Broken link to GitHub to homepage + ## [1.8.0] - 2020-11-20 ### Added diff --git a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py index 4be73363b0f..d9d820aede0 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py @@ -1,7 +1,17 @@ from .base import BaseEnvelope +from .cloudwatch import CloudWatchLogsEnvelope from .dynamodb import DynamoDBStreamEnvelope from .event_bridge import EventBridgeEnvelope +from .kinesis import KinesisDataStreamEnvelope from .sns import SnsEnvelope from .sqs import SqsEnvelope -__all__ = ["DynamoDBStreamEnvelope", "EventBridgeEnvelope", "SnsEnvelope", "SqsEnvelope", "BaseEnvelope"] +__all__ = [ + "CloudWatchLogsEnvelope", + "DynamoDBStreamEnvelope", + "EventBridgeEnvelope", + "KinesisDataStreamEnvelope", + "SnsEnvelope", + "SqsEnvelope", + "BaseEnvelope", +] diff --git a/aws_lambda_powertools/utilities/parser/envelopes/cloudwatch.py b/aws_lambda_powertools/utilities/parser/envelopes/cloudwatch.py new file mode 100644 index 00000000000..e4ecdd8b5ac --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/envelopes/cloudwatch.py @@ -0,0 +1,42 @@ +import logging +from typing import Any, Dict, List, Optional, Union + +from ..models import CloudWatchLogsModel +from ..types import Model +from .base import BaseEnvelope + +logger = logging.getLogger(__name__) + + +class CloudWatchLogsEnvelope(BaseEnvelope): + """CloudWatch Envelope to extract a List of log records. + + The record's body parameter is a string (after being base64 decoded and gzipped), + though it can also be a JSON encoded string. + Regardless of its type it'll be parsed into a BaseModel object. + + Note: The record will be parsed the same way so if model is str + """ + + def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Model) -> List[Optional[Model]]: + """Parses records found with model provided + + Parameters + ---------- + data : Dict + Lambda event to be parsed + model : Model + Data model provided to parse after extracting data using envelope + + Returns + ------- + List + List of records parsed with model provided + """ + logger.debug(f"Parsing incoming data with SNS model {CloudWatchLogsModel}") + parsed_envelope = CloudWatchLogsModel.parse_obj(data) + logger.debug(f"Parsing CloudWatch records in `body` with {model}") + output = [] + for record in parsed_envelope.awslogs.decoded_data.logEvents: + output.append(self._parse(data=record.message, model=model)) + return output diff --git a/aws_lambda_powertools/utilities/parser/envelopes/kinesis.py b/aws_lambda_powertools/utilities/parser/envelopes/kinesis.py new file mode 100644 index 00000000000..97ad7bffec7 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/envelopes/kinesis.py @@ -0,0 +1,43 @@ +import logging +from typing import Any, Dict, List, Optional, Union + +from ..models import KinesisDataStreamModel +from ..types import Model +from .base import BaseEnvelope + +logger = logging.getLogger(__name__) + + +class KinesisDataStreamEnvelope(BaseEnvelope): + """Kinesis Data Stream Envelope to extract array of Records + + The record's data parameter is a base64 encoded string which is parsed into a bytes array, + though it can also be a JSON encoded string. + Regardless of its type it'll be parsed into a BaseModel object. + + Note: Records will be parsed the same way so if model is str, + all items in the list will be parsed as str and npt as JSON (and vice versa) + """ + + def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Model) -> List[Optional[Model]]: + """Parses records found with model provided + + Parameters + ---------- + data : Dict + Lambda event to be parsed + model : Model + Data model provided to parse after extracting data using envelope + + Returns + ------- + List + List of records parsed with model provided + """ + logger.debug(f"Parsing incoming data with Kinesis model {KinesisDataStreamModel}") + parsed_envelope: KinesisDataStreamModel = KinesisDataStreamModel.parse_obj(data) + output = [] + logger.debug(f"Parsing Kinesis records in `body` with {model}") + for record in parsed_envelope.Records: + output.append(self._parse(data=record.kinesis.data.decode("utf-8"), model=model)) + return output diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py index e9daded27ca..45230b8c79e 100644 --- a/aws_lambda_powertools/utilities/parser/models/__init__.py +++ b/aws_lambda_powertools/utilities/parser/models/__init__.py @@ -1,13 +1,32 @@ +from .alb import AlbModel, AlbRequestContext, AlbRequestContextData +from .cloudwatch import CloudWatchLogsData, CloudWatchLogsDecode, CloudWatchLogsLogEvent, CloudWatchLogsModel from .dynamodb import DynamoDBStreamChangedRecordModel, DynamoDBStreamModel, DynamoDBStreamRecordModel from .event_bridge import EventBridgeModel +from .kinesis import KinesisDataStreamModel, KinesisDataStreamRecord, KinesisDataStreamRecordPayload +from .s3 import S3Model, S3RecordModel +from .ses import SesModel, SesRecordModel from .sns import SnsModel, SnsNotificationModel, SnsRecordModel from .sqs import SqsModel, SqsRecordModel __all__ = [ + "CloudWatchLogsData", + "CloudWatchLogsDecode", + "CloudWatchLogsLogEvent", + "CloudWatchLogsModel", + "AlbModel", + "AlbRequestContext", + "AlbRequestContextData", "DynamoDBStreamModel", "EventBridgeModel", "DynamoDBStreamChangedRecordModel", "DynamoDBStreamRecordModel", + "KinesisDataStreamModel", + "KinesisDataStreamRecord", + "KinesisDataStreamRecordPayload", + "S3Model", + "S3RecordModel", + "SesModel", + "SesRecordModel", "SnsModel", "SnsNotificationModel", "SnsRecordModel", diff --git a/aws_lambda_powertools/utilities/parser/models/alb.py b/aws_lambda_powertools/utilities/parser/models/alb.py new file mode 100644 index 00000000000..d4ea5fde2a1 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/alb.py @@ -0,0 +1,21 @@ +from typing import Dict + +from pydantic import BaseModel + + +class AlbRequestContextData(BaseModel): + targetGroupArn: str + + +class AlbRequestContext(BaseModel): + elb: AlbRequestContextData + + +class AlbModel(BaseModel): + httpMethod: str + path: str + body: str + isBase64Encoded: bool + headers: Dict[str, str] + queryStringParameters: Dict[str, str] + requestContext: AlbRequestContext diff --git a/aws_lambda_powertools/utilities/parser/models/cloudwatch.py b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py new file mode 100644 index 00000000000..26eeef5b56f --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/cloudwatch.py @@ -0,0 +1,44 @@ +import base64 +import json +import logging +import zlib +from datetime import datetime +from typing import List + +from pydantic import BaseModel, Field, validator + +logger = logging.getLogger(__name__) + + +class CloudWatchLogsLogEvent(BaseModel): + id: str # noqa AA03 VNE003 + timestamp: datetime + message: str + + +class CloudWatchLogsDecode(BaseModel): + messageType: str + owner: str + logGroup: str + logStream: str + subscriptionFilters: List[str] + logEvents: List[CloudWatchLogsLogEvent] + + +class CloudWatchLogsData(BaseModel): + decoded_data: CloudWatchLogsDecode = Field(None, alias="data") + + @validator("decoded_data", pre=True) + def prepare_data(cls, value): + try: + logger.debug("Decoding base64 cloudwatch log data before parsing") + payload = base64.b64decode(value) + logger.debug("Decompressing cloudwatch log data before parsing") + uncompressed = zlib.decompress(payload, zlib.MAX_WBITS | 32) + return json.loads(uncompressed.decode("utf-8")) + except Exception: + raise ValueError("unable to decompress data") + + +class CloudWatchLogsModel(BaseModel): + awslogs: CloudWatchLogsData diff --git a/aws_lambda_powertools/utilities/parser/models/kinesis.py b/aws_lambda_powertools/utilities/parser/models/kinesis.py new file mode 100644 index 00000000000..d2852e9f4a8 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/kinesis.py @@ -0,0 +1,41 @@ +import base64 +import logging +from binascii import Error as BinAsciiError +from typing import List + +from pydantic import BaseModel, validator +from pydantic.types import PositiveInt +from typing_extensions import Literal + +logger = logging.getLogger(__name__) + + +class KinesisDataStreamRecordPayload(BaseModel): + kinesisSchemaVersion: str + partitionKey: str + sequenceNumber: PositiveInt + data: bytes # base64 encoded str is parsed into bytes + approximateArrivalTimestamp: float + + @validator("data", pre=True) + def data_base64_decode(cls, value): + try: + logger.debug("Decoding base64 Kinesis data record before parsing") + return base64.b64decode(value) + except (BinAsciiError, TypeError): + raise ValueError("base64 decode failed") + + +class KinesisDataStreamRecord(BaseModel): + eventSource: Literal["aws:kinesis"] + eventVersion: str + eventID: str + eventName: Literal["aws:kinesis:record"] + invokeIdentityArn: str + awsRegion: str + eventSourceARN: str + kinesis: KinesisDataStreamRecordPayload + + +class KinesisDataStreamModel(BaseModel): + Records: List[KinesisDataStreamRecord] diff --git a/aws_lambda_powertools/utilities/parser/models/s3.py b/aws_lambda_powertools/utilities/parser/models/s3.py new file mode 100644 index 00000000000..14ea250b35b --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/s3.py @@ -0,0 +1,72 @@ +from datetime import datetime +from typing import List, Optional + +from pydantic import BaseModel +from pydantic.fields import Field +from pydantic.networks import IPvAnyNetwork +from pydantic.types import PositiveInt +from typing_extensions import Literal + + +class S3EventRecordGlacierRestoreEventData(BaseModel): + lifecycleRestorationExpiryTime: datetime + lifecycleRestoreStorageClass: str + + +class S3EventRecordGlacierEventData(BaseModel): + restoreEventData: S3EventRecordGlacierRestoreEventData + + +class S3Identity(BaseModel): + principalId: str + + +class S3RequestParameters(BaseModel): + sourceIPAddress: IPvAnyNetwork + + +class S3ResponseElements(BaseModel): + x_amz_request_id: str = Field(None, alias="x-amz-request-id") + x_amz_id_2: str = Field(None, alias="x-amz-id-2") + + +class S3OwnerIdentify(BaseModel): + principalId: str + + +class S3Bucket(BaseModel): + name: str + ownerIdentity: S3OwnerIdentify + arn: str + + +class S3Object(BaseModel): + key: str + size: PositiveInt + eTag: str + sequencer: str + versionId: Optional[str] + + +class S3Message(BaseModel): + s3SchemaVersion: str + configurationId: str + bucket: S3Bucket + object: S3Object # noqa: A003,VNE003 + + +class S3RecordModel(BaseModel): + eventVersion: str + eventSource: Literal["aws:s3"] + awsRegion: str + eventTime: datetime + eventName: str + userIdentity: S3Identity + requestParameters: S3RequestParameters + responseElements: S3ResponseElements + s3: S3Message + glacierEventData: Optional[S3EventRecordGlacierEventData] + + +class S3Model(BaseModel): + Records: List[S3RecordModel] diff --git a/aws_lambda_powertools/utilities/parser/models/ses.py b/aws_lambda_powertools/utilities/parser/models/ses.py new file mode 100644 index 00000000000..c82ae03a6c6 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/ses.py @@ -0,0 +1,71 @@ +from datetime import datetime +from typing import List, Optional + +from pydantic import BaseModel, Field +from pydantic.networks import EmailStr +from pydantic.types import PositiveInt +from typing_extensions import Literal + + +class SesReceiptVerdict(BaseModel): + status: Literal["PASS", "FAIL", "GRAY", "PROCESSING_FAILED"] + + +class SesReceiptAction(BaseModel): + type: Literal["Lambda"] # noqa A003,VNE003 + invocationType: Literal["Event"] + functionArn: str + + +class SesReceipt(BaseModel): + timestamp: datetime + processingTimeMillis: PositiveInt + recipients: List[EmailStr] + spamVerdict: SesReceiptVerdict + virusVerdict: SesReceiptVerdict + spfVerdict: SesReceiptVerdict + dmarcVerdict: SesReceiptVerdict + action: SesReceiptAction + + +class SesMailHeaders(BaseModel): + name: str + value: str + + +class SesMailCommonHeaders(BaseModel): + header_from: List[str] = Field(None, alias="from") + to: List[str] + cc: Optional[List[str]] + bcc: Optional[List[str]] + sender: Optional[List[str]] + reply_to: Optional[List[str]] = Field(None, alias="reply-to") + returnPath: EmailStr + messageId: str + date: str + subject: str + + +class SesMail(BaseModel): + timestamp: datetime + source: EmailStr + messageId: str + destination: List[EmailStr] + headersTruncated: bool + headers: List[SesMailHeaders] + commonHeaders: SesMailCommonHeaders + + +class SesMessage(BaseModel): + mail: SesMail + receipt: SesReceipt + + +class SesRecordModel(BaseModel): + eventSource: Literal["aws:ses"] + eventVersion: str + ses: SesMessage + + +class SesModel(BaseModel): + Records: List[SesRecordModel] diff --git a/docs/content/core/logger.mdx b/docs/content/core/logger.mdx index 43ef93f8a85..baabd9c2faf 100644 --- a/docs/content/core/logger.mdx +++ b/docs/content/core/logger.mdx @@ -353,6 +353,8 @@ logger = Logger(stream=stdout, log_record_order=["message"]) # highlight-line logger = Logger(stream=stdout, log_record_order=["level","location","message","timestamp"]) # highlight-line ``` +Some keys cannot be supressed in the Log records: `sampling_rate` is part of the specification and cannot be supressed; `xray_trace_id` is supressed automatically if X-Ray is not enabled in the Lambda function, and added automatically if it is. + ### Logging exceptions When logging exceptions, Logger will add a new key named `exception`, and will serialize the full traceback as a string. diff --git a/docs/content/utilities/parser.mdx b/docs/content/utilities/parser.mdx index 0c571bc257a..9b7457ef21a 100644 --- a/docs/content/utilities/parser.mdx +++ b/docs/content/utilities/parser.mdx @@ -156,6 +156,10 @@ Model name | Description **DynamoDBStreamModel** | Lambda Event Source payload for Amazon DynamoDB Streams **EventBridgeModel** | Lambda Event Source payload for Amazon EventBridge **SqsModel** | Lambda Event Source payload for Amazon SQS +**AlbModel** | Lambda Event Source payload for Amazon Application Load Balancer +**CloudwatchLogsModel** | Lambda Event Source payload for Amazon CloudWatch Logs +**S3Model** | Lambda Event Source payload for Amazon S3 +**KinesisDataStreamModel** | Lambda Event Source payload for Amazon Kinesis Data Streams You can extend them to include your own models, and yet have all other known fields parsed along the way. @@ -292,6 +296,8 @@ Envelope name | Behaviour | Return **DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`.
2. Parses records in `NewImage` and `OldImage` keys using your model.
3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]` **EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`.
2. Parses `detail` key using your model and returns it. | `Model` **SqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` +**CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it.
2. Parses records in `message` key using your model and return them in a list. | `List[Model]` +**KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]` ### Bringing your own envelope diff --git a/docs/src/gatsby-theme-apollo-docs/components/multi-code-block.js b/docs/src/gatsby-theme-apollo-docs/components/multi-code-block.js new file mode 100644 index 00000000000..c9346ebb52e --- /dev/null +++ b/docs/src/gatsby-theme-apollo-docs/components/multi-code-block.js @@ -0,0 +1,127 @@ +import PropTypes from 'prop-types'; +import React, {createContext, useContext, useMemo} from 'react'; +import styled from '@emotion/styled'; +import {trackCustomEvent} from 'gatsby-plugin-google-analytics'; + +export const GA_EVENT_CATEGORY_CODE_BLOCK = 'Code Block'; +export const MultiCodeBlockContext = createContext({}); +export const SelectedLanguageContext = createContext(); + +const Container = styled.div({ + position: 'relative' +}); + +const langLabels = { + js: 'JavaScript', + ts: 'TypeScript', + 'hooks-js': 'Hooks (JS)', + 'hooks-ts': 'Hooks (TS)' +}; + +function getUnifiedLang(language) { + switch (language) { + case 'js': + case 'jsx': + case 'javascript': + return 'js'; + case 'ts': + case 'tsx': + case 'typescript': + return 'ts'; + default: + return language; + } +} + +function getLang(child) { + return getUnifiedLang(child.props['data-language']); +} + +export function MultiCodeBlock(props) { + const {codeBlocks, titles} = useMemo(() => { + const defaultState = { + codeBlocks: {}, + titles: {} + }; + + if (!Array.isArray(props.children)) { + return defaultState; + } + + return props.children.reduce((acc, child, index, array) => { + const lang = getLang(child); + if (lang) { + return { + ...acc, + codeBlocks: { + ...acc.codeBlocks, + [lang]: child + } + }; + } + + if (child.props.className === 'gatsby-code-title') { + const nextNode = array[index + 1]; + const title = child.props.children; + const lang = getLang(nextNode); + if (nextNode && title && lang) { + return { + ...acc, + titles: { + ...acc.titles, + [lang]: title + } + }; + } + } + + return acc; + }, defaultState); + }, [props.children]); + + const languages = useMemo(() => Object.keys(codeBlocks), [codeBlocks]); + const [selectedLanguage, setSelectedLanguage] = useContext( + SelectedLanguageContext + ); + + if (!languages.length) { + return props.children; + } + + function handleLanguageChange(language) { + setSelectedLanguage(language); + trackCustomEvent({ + category: GA_EVENT_CATEGORY_CODE_BLOCK, + action: 'Change language', + label: language + }); + } + + const defaultLanguage = languages[0]; + const renderedLanguage = + selectedLanguage in codeBlocks ? selectedLanguage : defaultLanguage; + + return ( + + ({ + lang, + label: + // try to find a label or capitalize the provided lang + langLabels[lang] || lang.charAt(0).toUpperCase() + lang.slice(1) + })), + onLanguageChange: handleLanguageChange + }} + > +
{titles[renderedLanguage]}
+ {codeBlocks[renderedLanguage]} +
+
+ ); +} + +MultiCodeBlock.propTypes = { + children: PropTypes.node.isRequired +}; diff --git a/docs/src/gatsby-theme-apollo-docs/components/page-content.js b/docs/src/gatsby-theme-apollo-docs/components/page-content.js index 3c752c4782f..30c26c00752 100644 --- a/docs/src/gatsby-theme-apollo-docs/components/page-content.js +++ b/docs/src/gatsby-theme-apollo-docs/components/page-content.js @@ -179,7 +179,10 @@ export default function PageContent(props) { ); }); - const githubUrl = props.githubUrl.replace("master", "master/docs") + const githubUrl = props.githubUrl.replace("tree/", "blob/") + .replace("/content/", "/docs/content/") + const sourceUrl = /.+?(?=tree)/.exec(props.githubUrl) + const editLink = githubUrl && ( Edit on GitHub @@ -208,6 +211,9 @@ export default function PageContent(props) { /> )} {editLink} + + Source code + ); @@ -216,11 +222,11 @@ export default function PageContent(props) { PageContent.propTypes = { children: PropTypes.node.isRequired, pathname: PropTypes.string.isRequired, - githubUrl: PropTypes.string, + githubUrl: PropTypes.string.isRequired, pages: PropTypes.array.isRequired, hash: PropTypes.string.isRequired, title: PropTypes.string.isRequired, - graphManagerUrl: PropTypes.string.isRequired, + graphManagerUrl: PropTypes.string, headings: PropTypes.array.isRequired, spectrumUrl: PropTypes.string }; diff --git a/docs/src/gatsby-theme-apollo-docs/components/page-layout.js b/docs/src/gatsby-theme-apollo-docs/components/page-layout.js new file mode 100644 index 00000000000..7c97481ab7f --- /dev/null +++ b/docs/src/gatsby-theme-apollo-docs/components/page-layout.js @@ -0,0 +1,312 @@ +import '../prism.less'; +import 'prismjs/plugins/line-numbers/prism-line-numbers.css'; +import DocsetSwitcher from './docset-switcher'; +import Header from './header'; +import HeaderButton from './header-button'; +import PropTypes from 'prop-types'; +import React, {createContext, useMemo, useRef, useState} from 'react'; +import Search from './search'; +import styled from '@emotion/styled'; +import useLocalStorage from 'react-use/lib/useLocalStorage'; +import {Button} from '@apollo/space-kit/Button'; +import { + FlexWrapper, + Layout, + MenuButton, + Sidebar, + SidebarNav, + breakpoints, + colors, + useResponsiveSidebar +} from 'gatsby-theme-apollo-core'; +import {Helmet} from 'react-helmet'; +import {IconLayoutModule} from '@apollo/space-kit/icons/IconLayoutModule'; +import {Link, graphql, navigate, useStaticQuery} from 'gatsby'; +import {MobileLogo} from './mobile-logo'; +import {Select} from './select'; +import {SelectedLanguageContext} from './multi-code-block'; +import {getSpectrumUrl, getVersionBasePath} from '../utils'; +import {groupBy} from 'lodash'; +import {size} from 'polished'; +import {trackCustomEvent} from 'gatsby-plugin-google-analytics'; + +const Main = styled.main({ + flexGrow: 1 +}); + +const ButtonWrapper = styled.div({ + flexGrow: 1 +}); + +const StyledButton = styled(Button)({ + width: '100%', + ':not(:hover)': { + backgroundColor: colors.background + } +}); + +const StyledIcon = styled(IconLayoutModule)(size(16), { + marginLeft: 'auto' +}); + +const MobileNav = styled.div({ + display: 'none', + [breakpoints.md]: { + display: 'flex', + alignItems: 'center', + marginRight: 32, + color: colors.text1 + } +}); + +const HeaderInner = styled.span({ + display: 'flex', + alignItems: 'center', + justifyContent: 'space-between', + marginBottom: 32 +}); + +const Eyebrow = styled.div({ + flexShrink: 0, + padding: '8px 56px', + backgroundColor: colors.background, + color: colors.primary, + fontSize: 14, + position: 'sticky', + top: 0, + a: { + color: 'inherit', + fontWeight: 600 + }, + [breakpoints.md]: { + padding: '8px 24px' + } +}); + +function getVersionLabel(version) { + return `v${version}`; +} + +const GA_EVENT_CATEGORY_SIDEBAR = 'Sidebar'; + +function handleToggleAll(expanded) { + trackCustomEvent({ + category: GA_EVENT_CATEGORY_SIDEBAR, + action: 'Toggle all', + label: expanded ? 'expand' : 'collapse' + }); +} + +function handleToggleCategory(label, expanded) { + trackCustomEvent({ + category: GA_EVENT_CATEGORY_SIDEBAR, + action: 'Toggle category', + label, + value: Number(expanded) + }); +} + +export const NavItemsContext = createContext(); + +export default function PageLayout(props) { + const data = useStaticQuery( + graphql` + { + site { + siteMetadata { + title + siteName + } + } + } + ` + ); + + const { + sidebarRef, + openSidebar, + sidebarOpen, + handleWrapperClick, + handleSidebarNavLinkClick + } = useResponsiveSidebar(); + + const buttonRef = useRef(null); + const [menuOpen, setMenuOpen] = useState(false); + const selectedLanguageState = useLocalStorage('docs-lang'); + + function openMenu() { + setMenuOpen(true); + } + + function closeMenu() { + setMenuOpen(false); + } + + const {pathname} = props.location; + const {siteName, title} = data.site.siteMetadata; + const { + subtitle, + sidebarContents, + versions, + versionDifference, + versionBasePath, + defaultVersion + } = props.pageContext; + const { + spectrumHandle, + twitterHandle, + youtubeUrl, + navConfig = {}, + footerNavConfig, + logoLink, + algoliaApiKey, + algoliaIndexName, + menuTitle + } = props.pluginOptions; + + const {navItems, navCategories} = useMemo(() => { + const navItems = Object.entries(navConfig).map(([title, navItem]) => ({ + ...navItem, + title + })); + return { + navItems, + navCategories: Object.entries(groupBy(navItems, 'category')) + }; + }, [navConfig]); + + const hasNavItems = navItems.length > 0; + const sidebarTitle = ( + {subtitle || siteName} + ); + + return ( + + + + + + + + + {hasNavItems ? ( + + + {sidebarTitle} + + + + ) : ( + sidebarTitle + )} + {versions && versions.length > 0 && ( +