diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 0000000..9c28f47 --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,47 @@ +{ + "extends": [ + "airbnb-base", + "prettier" + ], + "rules": { + "no-console": [ + "error", + { + "allow": [ + "warn", + "error", + "info" + ] + } + ], + "camelcase": 0 + }, + "overrides": [ + { + "files": [ + "**/*.test.js" + ], + "env": { + "jest": true, + "jasmine": true + }, + "plugins": [ + "jest", + "chai-expect", + "chai-friendly" + ], + "rules": { + "no-unused-expressions": 0, + "chai-friendly/no-unused-expressions": 2, + "chai-expect/missing-assertion": 2, + "chai-expect/terminating-properties": 2, + "chai-expect/no-inner-compare": 2, + "jest/no-disabled-tests": "warn", + "jest/no-focused-tests": "error", + "jest/no-identical-title": "error", + "jest/prefer-to-have-length": "warn", + "jest/valid-expect": 0 + } + } + ] +} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4d8bd51 --- /dev/null +++ b/.gitignore @@ -0,0 +1,12 @@ +node_modules +coverage +pacts +dist-web +dist-lambda +logs + +config.sh +*.key +*.key.pub +serverless-output.yml +package-lock.json diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 0000000..544138b --- /dev/null +++ b/.prettierrc @@ -0,0 +1,3 @@ +{ + "singleQuote": true +} diff --git a/.snyk b/.snyk new file mode 100644 index 0000000..7fc3100 --- /dev/null +++ b/.snyk @@ -0,0 +1,13 @@ +# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities. +version: v1.13.5 +ignore: {} +# patches apply the minimum changes required to fix a vulnerability +patch: + SNYK-JS-AXIOS-174505: + - axios: + patched: '2019-05-06T06:05:06.784Z' + SNYK-JS-HTTPSPROXYAGENT-469131: + - snyk > proxy-agent > https-proxy-agent: + patched: '2019-10-08T04:45:37.194Z' + - snyk > proxy-agent > pac-proxy-agent > https-proxy-agent: + patched: '2019-10-08T04:45:37.194Z' diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..62e7330 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,21 @@ +language: node_js +cache: + directories: + - "node_modules" +node_js: + - "node" +env: + global: + - CC_TEST_REPORTER_ID=da46c045c47b340b3044989b44d42517cd36fd17c3e21ede55c45c49c7abe420 +dist: trusty +install: + - npm install +before_script: + - curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter + - chmod +x ./cc-test-reporter + - ./cc-test-reporter before-build +script: + - npm run lint + - npm run test +after_script: + - ./cc-test-reporter after-build --exit-code $TRAVIS_TEST_RESULT diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..944f2e3 --- /dev/null +++ b/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2018, Timothy Jones +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/README.md b/README.md index 670d48b..290ecc8 100644 --- a/README.md +++ b/README.md @@ -1 +1,306 @@ -# linkedin-cognito-openid-wrapper +# GitHub OpenID Connect Wrapper for Cognito + +Do you want to add GitHub as an OIDC (OpenID Connect) provider to an AWS Cognito User Pool? Have you run in to trouble because GitHub only provides OAuth2.0 endpoints, and doesn't support OpenID Connect? + +This project allows you to wrap your GitHub OAuth App in an OpenID Connect layer, allowing you to use it with AWS Cognito. + +Here are some questions you may immediately have: + +- **Why does Cognito not support federation with OAuth?** Because OAuth provides + no standard way of requesting user identity data. (see the [background](#background) + section below for more details). + +- **Why has no one written a shim to wrap general OAuth implementations with an + OpenID Connect layer?** Because OAuth provides no standard way of requesting + user identity data, any shim must be custom written for the particular OAuth + implementation that's wrapped. + +- **GitHub is very popular, has someone written this specific custom wrapper + before?** As far as I can tell, if it has been written, it has not been open + sourced. Until now! + +## Project overview + +When deployed, this project sits between Cognito and GitHub: + +![Overview](docs/overview.png) + +This allows you to use GitHub as an OpenID Identity Provider (IdP) for federation with a Cognito User Pool. + +The project implements everything needed by the [OIDC User Pool IdP authentication flow](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-oidc-flow.html) used by Cognito. + +It implements the following endpoints from the +[OpenID Connect Core Spec](https://openid.net/specs/openid-connect-core-1_0.html): + +- Authorization - used to start the authorisation process ([spec](https://openid.net/specs/openid-connect-core-1_0.html#AuthorizationEndpoint)) +- Token - used to exchange an authorisation code for an access and ID token ([spec](https://openid.net/specs/openid-connect-core-1_0.html#TokenEndpoint)) +- UserInfo - used to exchange an access token for information about the user ([spec](https://openid.net/specs/openid-connect-core-1_0.html#UserInfo)) +- jwks - used to describe the keys used to sign ID tokens ([implied by spec](https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata)) + +It also implements the following [OpenID Connect Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html) endpoint: + +- Configuration - used to discover configuration of this OpenID implementation's + endpoints and capabilities. ([spec](https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfig)) + +Out of the box, you can deploy it as a CloudFormation stack, or run it as a web server with node. + +## Getting Started + +This project is intended to be deployed as a series of lambda functions alongside +an API Gateway. This means it's easy to use in conjunction with Cognito, and +should be cheap to host and run. + +You can also deploy it as a http server running as a node app. This is useful +for testing, exposing it to Cognito using something like [ngrok](https://ngrok.com/). + +### 1: Setup + +You will need to: + +- Create a Cognito User Pool ([instructions](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pool-as-user-directory.html)). +- Configure App Integration for your User Pool ([instructions](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-configuring-app-integration.html)). Note down the domain name. +- Create a GitHub OAuth App ([instructions](https://developer.github.com/apps/building-oauth-apps/creating-an-oauth-app/), with the following settings: + - Authorization callback URL: `https:///oauth2/idpresponse` + - Note down the Client ID and secret + +(If you use GitHub Enterprise, you need the API & Login URL. This is usually `https:///api/v3` and `https://`.) + +Next you need to decide if you'd like to deploy with lambda/API Gateway (follow Step 2a), or as a node server (follow Step 2b) + +### 2a: Deployment with lambda and API Gateway + +- Install the `aws` and `sam` CLIs from AWS: + + - `aws` ([install instructions](https://docs.aws.amazon.com/cli/latest/userguide/installing.html)) and configured + - `sam` ([install instructions](https://docs.aws.amazon.com/lambda/latest/dg/sam-cli-requirements.html)) + +- Run `aws configure` and set appropriate access keys etc +- Set environment variables for the OAuth App client/secret, callback url, stack name, etc: + + cp example-config.sh config.sh + vim config.sh # Or whatever your favourite editor is + +- Run `npm install` and `npm run deploy` +- Note down the DNS of the deployed API Gateway (available in the AWS console). + +### 2b: Running the node server + +- Set environment variables for the OAuth App client/secret, callback url, and + port to run the server on: + + cp example-config.sh config.sh + vim config.sh # Or whatever your favourite editor is + +- Source the config file: + +``` + source config.sh +``` + +- Run `npm run start` to fire up an auto-refreshing development build of the + server (production deployment is out of scope for this repository, but you can expose it using something like [ngrok](https://ngrok.com/) for easy development and testing with Cognito). + +### 3: Finalise Cognito configuration + +- Configure the OIDC integration in AWS console for Cognito (described below, but following [these instructions](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-oidc-idp.html)). The following settings are required: + - Client ID: The GitHub Client ID above + - Authorize scope: `openid read:user user:email` + - Issuer: `https:///${Stage_Name}` or `https:///` (for the node server). + - If you have deployed the web app: Run discovery (big blue button next to Issuer). + - If you have deployed the lambda/Gateway: For some reason, Cognito is unable to + do OpenID Discovery. You will need to configure the endpoints manually. They are: + - Authorization endpoint: `https:///${Stage_Name}/authorize` + - Token endpoint: `https:///${Stage_Name}/token` + - Userinfo endpoint: `https:///${Stage_Name}/userinfo` + - JWKS uri: `https:///${Stage_Name}/.well-known/jwks.json` +- Configure the Attribute Mapping in the AWS console: + +![Attribute mapping](docs/attribute-mapping.png) + +- Ensure that your new provider is enabled under **Enabled Identity Providers** on the App Client Settings screen under App Integration. + +That's it! If you need to redeploy the lambda/API gateway solution, all you need to do is run `npm run deploy` again. + +### Logging + +This shim also supports logging with Winston. By default, all logging goes to +STDOUT. Beware that if you set the log level to DEBUG, then sensitive user +information may be logged. + +If you're using the node server, you can also use Splunk for logging. +Environment variables configuring splunk are commented in `example-config.sh`. The Splunk HEC URL and access +token are required, and you can also set the source, sourcetype & index for all logged events. + +## The details + +### Background + +There are two important concepts for identity federation: + +- Authentication: Is this user who they say they are? +- Authorisation: Is the user allowed to use a particular resource? + +#### OAuth + +[OAuth2.0](https://tools.ietf.org/html/rfc6749) is an _authorisation_ framework, +used for determining whether a user is allowed to access a resource (like +private user profile data). In order to do this, it's usually necessary for +_authentication_ of the user to happen before authorisation. + +This means that most OAuth2.0 implementations (including GitHub) [include authentication in a step of the authorisation process](https://medium.com/@darutk/new-architecture-of-oauth-2-0-and-openid-connect-implementation-18f408f9338d). +For all practical purposes, most OAuth2.0 implementations (including GitHub)can +be thought of as providing both authorisation and authentication. + +Below is a diagram of the authentication code flow for OAuth: + +![OAuth flow](docs/oauth-flow.svg) + +(The solid lines are http requests from the browser, and then dashed lines are +back-channel requests). + +As you can see in the diagram, a drawback of OAuth is that it provides no +standard way of finding out user data such as name, avatar picture, email +address(es), etc. This is one of the problems that is solved by OpenID. + +#### OpenID Connect + +To provide a standard way of learning about users, +[OpenID Connect](https://openid.net/connect/) is an identity layer built on top +of OAuth2.0. It extends the `token` endpoint from OAuth to include an ID Token +alongside the access token, and provides a `userinfo` endpoint, where information +describing the authenticated user can be accessed. + +![OpenID Connect](docs/openid-flow.svg) + +OpenID Connect describes a standard way to get user data, and is therefore a good choice +for identity federation. + +### A custom shim for GitHub + +This project provides the OpenID shim to wrap GitHub's OAuth implementation, by combining the two +diagrams: + +![GitHub Shim](docs/shim.svg) + +The userinfo request is handled by joining two GitHub API requests: `/user` and `/user/emails`. + +You can compare this workflow to the documented Cognito workflow [here](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-oidc-flow.html) + +#### Code layout + + ├── scripts # Bash scripts for deployment and key generation + ├── src # Source code + │ ├── __mocks__ # Mock private key data for tests + │ └── connectors # Common code for both lambda and web handlers + │ ├── lambda # AWS lambda handlers + │ │ └── util # Helper functions for lambdas + │ └── web # Express.js webserver (useful for local deployment) + ├── docs # Documentation images + ├── config # Configuration for tests + ├── dist-web # Dist folder for web server deployment + └-- dist-lambda # Dist folder for lambda deployment + +#### npm targets + +- `build` and `build-dist`: create packages in the `dist-lambda` folder (for the lambda + deployment) and the `dist-web` folder (for the node web server). +- `test`: Run unit tests with Jest +- `lint`: Run `eslint` to check code style +- `test-dev`: Run unit tests continuously, watching the file system for changes + (useful for development) +- `deploy`: This script builds the project, then creates and deploys the + cloudformation stack with the API gateway and the endpoints as lambdas + +#### Scripts + +- `scripts/create-key.sh`: If the private key is missing, generate a new one. + This is run as a preinstall script before `npm install` +- `scripts/deploy.sh`: This is the deploy part of `npm run deploy`. It uploads + the dist folder to S3, and then creates the cloudformation stack that contains + the API gateway and lambdas + +#### Tests + +Tests are provided with [Jest](https://jestjs.io/) using +[`chai`'s `expect`](http://www.chaijs.com/api/bdd/), included by a shim based on [this blog post](https://medium.com/@RubenOostinga/combining-chai-and-jest-matchers-d12d1ffd0303). + +#### Private key + +The private key used to make ID tokens is stored in `./jwtRS256.key` once +`scripts/create-key.sh` is run (either manually, or as part of `npm install`). +You may optionally replace it with your own key - if you do this, you will need +to redeploy. + +#### Missing features + +This is a near-complete implementation of [OpenID Connect Core](https://openid.net/specs/openid-connect-core-1_0.html). +However, since the focus was on enabling Cognito's [authentication flow](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-oidc-flow.html), +you may run in to some missing features if you wish to use it with a different +client. + +**Missing Connect Core Features:** + +- Private key rotation ([spec](https://openid.net/specs/openid-connect-core-1_0.html#RotateSigKeys)) +- Refresh tokens ([spec](https://openid.net/specs/openid-connect-core-1_0.html#RefreshTokens)) +- Passing request parameters as JWTs ([spec](https://openid.net/specs/openid-connect-core-1_0.html#JWTRequests)) + +If you don't know what these things are, you are probably ok to use this project. + +**Missing non-core features:** + +A full OpenID implementation would also include: + +- [The Dynamic client registration spec](https://openid.net/specs/openid-connect-registration-1_0.html) +- The [OpenID Connect Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html) endpoints beyond `openid-configuration` + +**Known issues** + +See [the issue tracker](https://github.com/TimothyJones/github-cognito-openid-wrapper/issues) for an up to date list. + +## Extending + +This section contains pointers if you would like to extend this shim. + +### Using other OAuth providers + +If you want to use a provider other than GitHub, you'll need to change the contents of `userinfo` in `src/openid.js`. + +### Using a custom GitHub location + +If you're using an on-site GitHub install, you will need to change the API +endpoints used when the `github` object is initialised. + +### Including additional user information + +If you want to include custom claims based on other GitHub data, +you can extend `userinfo` in `src/openid.js`. You may need to add extra API +client calls in `src/github.js` + +## Contributing + +Contributions are welcome, especially for the missing features! Pull requests and issues are very welcome. + +## FAQ + +### How do I use this to implement Cognito logins in my app? + +Login requests from your app go directly to Cognito, rather than this shim. +This is because the shim sits only between Cognito and GitHub, not between your +app and GitHub. See the [Cognito app integration instructions](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-app-integration.html) +for more details. + +### Can I use this shim to connect to GitHub directly from another OpenID client? + +Yes. This implementation isn't complete, as it focusses exclusively on +Cognito's requirements. However, it does follow the OpenID spec, and is +complete enough to be able to use it as an OpenID connect provider. See the +missing features section above for one or two caveats. + +### How do I contact you to tell you that I built something cool with this code? + +If you build anything cool, ping me [@JonesTim](https://twitter.com/JonesTim) on +twitter (or open an issue if you have any problems). + +## License + +[BSD 3-Clause License](LICENSE) diff --git a/babel.config.js b/babel.config.js new file mode 100644 index 0000000..248fc0d --- /dev/null +++ b/babel.config.js @@ -0,0 +1,19 @@ +module.exports = function(api) { + const presets = [ + [ + '@babel/preset-env', + { + targets: { node: '8.10' } + } + ] + ]; + const plugins = []; + + // Cache the returned value forever and don't call this function again. + api.cache(true); + + return { + presets, + plugins + }; +}; diff --git a/config/setup-test-framework-script.js b/config/setup-test-framework-script.js new file mode 100644 index 0000000..fcd6384 --- /dev/null +++ b/config/setup-test-framework-script.js @@ -0,0 +1,35 @@ +const chai = require('chai'); +const chaiAsPromised = require('chai-as-promised'); + +// use chai as promised to get awesome promise handlers +chai.use(chaiAsPromised); + +// Allow chai to print diffs like Jest would +const chaiJestDiff = require('chai-jest-diff').default; +chai.use(chaiJestDiff()); + +// Make sure chai and jasmine ".not" play nice together +const originalNot = Object.getOwnPropertyDescriptor( + chai.Assertion.prototype, + 'not' +).get; +Object.defineProperty(chai.Assertion.prototype, 'not', { + get() { + Object.assign(this, this.assignedNot); + return originalNot.apply(this); + }, + set(newNot) { + this.assignedNot = newNot; + return newNot; + } +}); + +// Combine both jest and chai matchers on expect +const originalExpect = global.expect; + +global.expect = actual => { + const originalMatchers = originalExpect(actual); + const chaiMatchers = chai.expect(actual); + const combinedMatchers = Object.assign(chaiMatchers, originalMatchers); + return combinedMatchers; +}; diff --git a/docs/attribute-mapping.png b/docs/attribute-mapping.png new file mode 100644 index 0000000..06ec375 Binary files /dev/null and b/docs/attribute-mapping.png differ diff --git a/docs/oauth-flow.svg b/docs/oauth-flow.svg new file mode 100644 index 0000000..ea7e6f6 --- /dev/null +++ b/docs/oauth-flow.svg @@ -0,0 +1,7 @@ +Client->OAuth Server: Authenticate\n(to get profile) +Note right of OAuth Server: OAuth server\nmay do authorisation here +OAuth Server->Client: Authentication Code +Client-->OAuth Server: Authentication Code +Note right of OAuth Server: Token endpoint uses the code\nto get an access token +OAuth Server-->Client: Access Token +Note over Client,OAuth Server: The access token is now\n used to access custom \nendpoints on the server\n(eg to get user profile data)ClientClientOAuth ServerOAuth ServerAuthenticate(to get profile)OAuth servermay do authorisation hereAuthentication CodeAuthentication CodeToken endpoint uses the codeto get an access tokenAccess TokenThe access token is nowused to access customendpoints on the server(eg to get user profile data) \ No newline at end of file diff --git a/docs/oauth-flow.txt b/docs/oauth-flow.txt new file mode 100644 index 0000000..25e43fd --- /dev/null +++ b/docs/oauth-flow.txt @@ -0,0 +1,7 @@ +Client->OAuth Server: Authenticate\n(to get profile) +Note right of OAuth Server: OAuth server\nmay do authorisation here +OAuth Server->Client: Authentication Code +Client-->OAuth Server: Authentication Code +Note right of OAuth Server: Token endpoint uses the code\nto get an access token +OAuth Server-->Client: Access Token +Note over Client,OAuth Server: The access token is now\n used to access custom \nendpoints on the server\n(eg to get user profile data) diff --git a/docs/openid-flow.svg b/docs/openid-flow.svg new file mode 100644 index 0000000..042a908 --- /dev/null +++ b/docs/openid-flow.svg @@ -0,0 +1,8 @@ +Client->OpenID Connect: Authenticate\n(to get profile) +Note right of OpenID Connect: OpenID Connect server\nmay do authorisation here +OpenID Connect->Client: Authentication Code +Client-->OpenID Connect: Authentication Code +Note right of OpenID Connect: Token endpoint uses the code\nto get an access token\n and an ID token +OpenID Connect-->Client: Tokens +Client-->OpenID Connect: Access token +OpenID Connect-->Client: User informationClientClientOpenID ConnectOpenID ConnectAuthenticate(to get profile)OpenID Connect servermay do authorisation hereAuthentication CodeAuthentication CodeToken endpoint uses the codeto get an access tokenand an ID tokenTokensAccess tokenUser information \ No newline at end of file diff --git a/docs/openid-flow.txt b/docs/openid-flow.txt new file mode 100644 index 0000000..8f00626 --- /dev/null +++ b/docs/openid-flow.txt @@ -0,0 +1,8 @@ +Client->OpenID Connect: Authenticate\n(to get profile) +Note right of OpenID Connect: OpenID Connect server\nmay do authorisation here +OpenID Connect->Client: Authentication Code +Client-->OpenID Connect: Authentication Code +Note right of OpenID Connect: Token endpoint uses the code\nto get an access token\n and an ID token +OpenID Connect-->Client: Tokens +Client-->OpenID Connect: Access token +OpenID Connect-->Client: User information diff --git a/docs/overview.graphml b/docs/overview.graphml new file mode 100644 index 0000000..0ab353b --- /dev/null +++ b/docs/overview.graphml @@ -0,0 +1,180 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + github-cognito-openid-wrapper + + + + + + + + + + + Your App + + + + + + + + + + + AWS Cognito +User Pool + + + + + + + + + + + GitHub OAuth + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <?xml version="1.0" encoding="utf-8"?> +<!-- Generator: Adobe Illustrator 22.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) --> +<svg version="1.2" baseProfile="tiny" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" + x="0px" y="0px" viewBox="0 0 2350 2314.8" xml:space="preserve"> +<path d="M1175,0C525.8,0,0,525.8,0,1175c0,552.2,378.9,1010.5,890.1,1139.7c-5.9-14.7-8.8-35.3-8.8-55.8v-199.8H734.4 + c-79.3,0-152.8-35.2-185.1-99.9c-38.2-70.5-44.1-179.2-141-246.8c-29.4-23.5-5.9-47,26.4-44.1c61.7,17.6,111.6,58.8,158.6,120.4 + c47,61.7,67.6,76.4,155.7,76.4c41.1,0,105.7-2.9,164.5-11.8c32.3-82.3,88.1-155.7,155.7-190.9c-393.6-47-581.6-240.9-581.6-505.3 + c0-114.6,49.9-223.3,132.2-317.3c-26.4-91.1-61.7-279.1,11.8-352.5c176.3,0,282,114.6,308.4,143.9c88.1-29.4,185.1-47,284.9-47 + c102.8,0,196.8,17.6,284.9,47c26.4-29.4,132.2-143.9,308.4-143.9c70.5,70.5,38.2,261.4,8.8,352.5c82.3,91.1,129.3,202.7,129.3,317.3 + c0,264.4-185.1,458.3-575.7,499.4c108.7,55.8,185.1,214.4,185.1,331.9V2256c0,8.8-2.9,17.6-2.9,26.4 + C2021,2123.8,2350,1689.1,2350,1175C2350,525.8,1824.2,0,1175,0L1175,0z"/> +</svg> + + <svg width="2140" height="2500" viewBox="0 0 256 299" xmlns="http://www.w3.org/2000/svg" preserveAspectRatio="xMidYMid"><path d="M208.752 58.061l25.771-6.636.192.283.651 155.607-.843.846-5.31.227-20.159-3.138-.302-.794V58.061M59.705 218.971l.095.007 68.027 19.767.173.133.296.236-.096 59.232-.2.252-68.295-33.178v-46.449" fill="#7A3E65"/><path d="M208.752 204.456l-80.64 19.312-40.488-9.773-27.919 4.976L128 238.878l105.405-28.537 1.118-2.18-25.771-3.705" fill="#CFB2C1"/><path d="M196.295 79.626l-.657-.749-66.904-19.44-.734.283-.672-.343L22.052 89.734l-.575.703.845.463 24.075 3.53.851-.289 80.64-19.311 40.488 9.773 27.919-4.977" fill="#512843"/><path d="M47.248 240.537l-25.771 6.221-.045-.149-1.015-155.026 1.06-1.146 25.771 3.704v146.396" fill="#C17B9E"/><path d="M82.04 180.403l45.96 5.391.345-.515.187-71.887-.532-.589-45.96 5.392v62.208" fill="#7A3E65"/><path d="M173.96 180.403L128 185.794v-72.991l45.96 5.392v62.208M196.295 79.626L128 59.72V0l68.295 33.177v46.449" fill="#C17B9E"/><path d="M128 0L0 61.793v175.011l21.477 9.954V90.437L128 59.72V0" fill="#7A3E65"/><path d="M234.523 51.425v156.736L128 238.878v59.72l128-61.794V61.793l-21.477-10.368" fill="#C17B9E"/></svg> + + + diff --git a/docs/overview.png b/docs/overview.png new file mode 100644 index 0000000..e6d40c4 Binary files /dev/null and b/docs/overview.png differ diff --git a/docs/shim.svg b/docs/shim.svg new file mode 100644 index 0000000..a3d0856 --- /dev/null +++ b/docs/shim.svg @@ -0,0 +1,17 @@ +Cognito->OpenID Shim: Authenticate\n(to get profile) +OpenID Shim->GitHub: Authenticate +Note right of GitHub: GitHub does authorisation\n here if necessary +GitHub->OpenID Shim: Authentication Code +OpenID Shim->Cognito: Authentication Code +Cognito-->OpenID Shim: Authentication Code +OpenID Shim-->GitHub: Authentication Code +GitHub-->OpenID Shim: Access Token +Note over OpenID Shim: also generates an ID token +OpenID Shim-->Cognito: Access and ID tokens +Cognito-->OpenID Shim: Request public key +Note right of OpenID Shim: Public key is used \nby cognito to validate\n the ID token +OpenID Shim-->Cognito: Public Key +Cognito-->OpenID Shim: Access token +Note over OpenID Shim, GitHub: Various API calls to\n get user profile data +OpenID Shim-->Cognito: User information +CognitoCognitoOpenID ShimOpenID ShimGitHubGitHubAuthenticate(to get profile)AuthenticateGitHub does authorisationhere if necessaryAuthentication CodeAuthentication CodeAuthentication CodeAuthentication CodeAccess Tokenalso generates an ID tokenAccess and ID tokensRequest public keyPublic key is usedby cognito to validatethe ID tokenPublic KeyAccess tokenVarious API calls toget user profile dataUser information \ No newline at end of file diff --git a/docs/shim.txt b/docs/shim.txt new file mode 100644 index 0000000..21b3b79 --- /dev/null +++ b/docs/shim.txt @@ -0,0 +1,16 @@ +Cognito->OpenID Shim: Authenticate\n(to get profile) +OpenID Shim->GitHub: Authenticate +Note right of GitHub: GitHub does authorisation\n here if necessary +GitHub->OpenID Shim: Authentication Code +OpenID Shim->Cognito: Authentication Code +Cognito-->OpenID Shim: Authentication Code +OpenID Shim-->GitHub: Authentication Code +GitHub-->OpenID Shim: Access Token +Note over OpenID Shim: also generates an ID token +OpenID Shim-->Cognito: Access and ID tokens +Cognito-->OpenID Shim: Request public key +Note right of OpenID Shim: Public key is used \nby cognito to validate\n the ID token +OpenID Shim-->Cognito: Public Key +Cognito-->OpenID Shim: Access token +Note over OpenID Shim, GitHub: Various API calls to\n get user profile data +OpenID Shim-->Cognito: User information diff --git a/example-config.sh b/example-config.sh new file mode 100755 index 0000000..c5f709d --- /dev/null +++ b/example-config.sh @@ -0,0 +1,15 @@ +#!/bin/bash -eu + +# Variables always required +export LINKEDIN_CLIENT_ID=# +export LINKEDIN_CLIENT_SECRET=# +export COGNITO_REDIRECT_URI=# https:///oauth2/idpresponse + +# Variables required if deploying with API Gateway / Lambda +export BUCKET_NAME=# An S3 bucket name to use as the deployment pipeline +export STACK_NAME=# The name of the stack to create +export REGION=# AWS region to deploy the stack and bucket in +export STAGE_NAME=# Stage name to create and deploy to in API gateway + +# Variables required if deploying a node http server +export PORT=# diff --git a/jest.config.js b/jest.config.js new file mode 100644 index 0000000..9ef0b75 --- /dev/null +++ b/jest.config.js @@ -0,0 +1,180 @@ +// For a detailed explanation regarding each configuration property, visit: +// https://jestjs.io/docs/en/configuration.html + +module.exports = { + // All imported modules in your tests should be mocked automatically + // automock: false, + + // Stop running tests after the first failure + // bail: false, + + // Respect "browser" field in package.json when resolving modules + // browser: false, + + // The directory where Jest should store its cached dependency information + // cacheDirectory: "/var/folders/35/09t_zsws5d9dnb6jvrwd92rw0000gp/T/jest_dy", + + // Automatically clear mock calls and instances between every test + clearMocks: true, + + // Indicates whether the coverage information should be collected while executing the test + // collectCoverage: false, + + // An array of glob patterns indicating a set of files for which coverage information should be collected + // collectCoverageFrom: null, + + // The directory where Jest should output its coverage files + coverageDirectory: 'coverage', + + // An array of regexp pattern strings used to skip coverage collection + coveragePathIgnorePatterns: ['/node_modules/', '/config/'], + + // A list of reporter names that Jest uses when writing coverage reports + // coverageReporters: [ + // "json", + // "text", + // "lcov", + // "clover" + // ], + + // An object that configures minimum threshold enforcement for coverage results + // coverageThreshold: null, + + // Make calling deprecated APIs throw helpful error messages + // errorOnDeprecated: false, + + // Force coverage collection from ignored files usin a array of glob patterns + // forceCoverageMatch: [], + + // A path to a module which exports an async function that is triggered once before all test suites + // globalSetup: null, + + // A path to a module which exports an async function that is triggered once after all test suites + // globalTeardown: null, + + // A set of global variables that need to be available in all test environments + // globals: {}, + + // An array of directory names to be searched recursively up from the requiring module's location + // moduleDirectories: [ + // "node_modules" + // ], + + // An array of file extensions your modules use + // moduleFileExtensions: [ + // "js", + // "json", + // "jsx", + // "node" + // ], + + // A map from regular expressions to module names that allow to stub out resources with a single module + moduleNameMapper: { + '\\.key$': '/src/__mocks__/privateKeyMock.js', + '\\.key\\.pub$': '/src/__mocks__/publicKeyMock.js' + }, + + // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader + // modulePathIgnorePatterns: [], + + // Activates notifications for test results + // notify: false, + + // An enum that specifies notification mode. Requires { notify: true } + // notifyMode: "always", + + // A preset that is used as a base for Jest's configuration + // preset: null, + + // Run tests from one or more projects + // projects: null, + + // Use this configuration option to add custom reporters to Jest + // reporters: undefined, + + // Automatically reset mock state between every test + // resetMocks: false, + + // Reset the module registry before running each individual test + // resetModules: false, + + // A path to a custom resolver + // resolver: null, + + // Automatically restore mock state between every test + // restoreMocks: false, + + // The root directory that Jest should scan for tests and modules within + // rootDir: null, + + // A list of paths to directories that Jest should use to search for files in + // roots: [ + // "" + // ], + + // Allows you to use a custom runner instead of Jest's default test runner + // runner: "jest-runner", + + // The paths to modules that run some code to configure or set up the testing environment before each test + // setupFiles: [], + + // The path to a module that runs some code to configure or set up the testing framework before each test + setupFilesAfterEnv: ['./config/setup-test-framework-script.js'], + + // A list of paths to snapshot serializer modules Jest should use for snapshot testing + // snapshotSerializers: [], + + // The test environment that will be used for testing + testEnvironment: 'node', + + // Options that will be passed to the testEnvironment + // testEnvironmentOptions: {}, + + // Adds a location field to test results + // testLocationInResults: false, + + // The glob patterns Jest uses to detect test files + testMatch: ['**/__tests__/**/*.js?(x)', '**/?(*.)+(spec|test).js?(x)'], + + // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped + // testPathIgnorePatterns: [ + // "/node_modules/" + // ], + + // The regexp pattern Jest uses to detect test files + // testRegex: "", + + // This option allows the use of a custom results processor + // testResultsProcessor: null, + + // This option allows use of a custom test runner + // testRunner: "jasmine2", + + // This option sets the URL for the jsdom environment. It is reflected in properties such as location.href + // testURL: "http://localhost", + + // Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout" + // timers: "real", + + // A map from regular expressions to paths to transformers + transform: { + '^.+\\.js$': 'babel-jest' + } + + // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation + // transformIgnorePatterns: [ + // "/node_modules/" + // ], + + // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them + // unmockedModulePathPatterns: undefined, + + // Indicates whether each individual test should be reported during the run + // verbose: null, + + // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode + // watchPathIgnorePatterns: [], + + // Whether to use watchman for file crawling + // watchman: true, +}; diff --git a/package.json b/package.json new file mode 100644 index 0000000..20b5c18 --- /dev/null +++ b/package.json @@ -0,0 +1,61 @@ +{ + "name": "linkedin-cognito-openid-wrapper", + "version": "1.1.0", + "description": "A wrapper to enable AWS Cognito User Pools (which requires OpenID Connect) to talk to LinkedIn (which only provides OAuth2.0)", + "main": "dist-web/server.js", + "repository": { + "type": "git", + "url": "https://github.com/isotoma/linkedin-cognito-openid-wrapper.git" + }, + "scripts": { + "build": "webpack", + "test": "jest --runInBand --coverage", + "test-dev": "jest --runInBand --watch", + "start": "webpack --watch --display errors-only", + "lint": "eslint 'src/**' --ext .js", + "preinstall": "./scripts/create-key.sh", + "prebuild-dist": "npm run lint && npm run test", + "build-dist": "npm run build", + "coverage": "jest --runInBand --coverage" + }, + "author": "", + "license": "ISC", + "dependencies": { + "axios": "^0.19.0", + "body-parser": "^1.18.3", + "colors": "^1.3.2", + "express": "^4.16.3", + "json-web-key": "^0.3.0", + "jsonwebtoken": "^8.3.0", + "winston": "^3.2.1", + "winston-splunk-httplogger": "^2.2.0" + }, + "devDependencies": { + "@babel/core": "^7.3.4", + "@babel/preset-env": "^7.3.4", + "babel-jest": "^24.1.0", + "babel-loader": "^8.0.2", + "chai": "^4.1.2", + "chai-as-promised": "^7.1.1", + "chai-jest-diff": "^1.0.2", + "eslint": "^5.15.1", + "eslint-config-airbnb-base": "^13.1.0", + "eslint-config-prettier": "^3.0.1", + "eslint-plugin-chai-expect": "^1.1.1", + "eslint-plugin-chai-friendly": "^0.4.1", + "eslint-plugin-import": "^2.14.0", + "eslint-plugin-jest": "^21.22.0", + "jest": "^24.1.0", + "nodemon": "^1.18.7", + "nodemon-webpack-plugin": "^4.0.6", + "prettier": "^1.14.2", + "raw-loader": "^0.5.1", + "to": "^0.2.9", + "webpack": "^4.41.2", + "webpack-cli": "^3.1.0", + "webpack-node-externals": "^1.7.2" + }, + "engines": { + "node": ">=10" + } +} diff --git a/scripts/create-key.sh b/scripts/create-key.sh new file mode 100755 index 0000000..bf74432 --- /dev/null +++ b/scripts/create-key.sh @@ -0,0 +1,15 @@ +#!/bin/bash -eu +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")"; pwd)" # Figure out where the script is running +. "$SCRIPT_DIR"/lib-robust-bash.sh # load the robust bash library +PROJECT_ROOT="$SCRIPT_DIR"/.. # Figure out where the project directory is + +require_binary ssh-keygen +require_binary openssl + +KEY_FILE="$PROJECT_ROOT"/jwtRS256.key + +if [ ! -f "$KEY_FILE" ]; then + echo " --- Creating private key, as it does not exist ---" + ssh-keygen -t rsa -b 4096 -m PEM -f "$KEY_FILE" -N '' + openssl rsa -in "$KEY_FILE" -pubout -outform PEM -out "$KEY_FILE".pub +fi diff --git a/scripts/lib-robust-bash.sh b/scripts/lib-robust-bash.sh new file mode 100644 index 0000000..cb0ec8e --- /dev/null +++ b/scripts/lib-robust-bash.sh @@ -0,0 +1,14 @@ +#!/bin/bash -eu + +# Check to see that we have a required binary on the path +function require_binary { + if [ -z "${1:-}" ]; then + error "${FUNCNAME[0]} requires an argument" + exit 1 + fi + + if ! [ -x "$(command -v "$1")" ]; then + error "The required executable '$1' is not on the path." + exit 1 + fi +} diff --git a/src/__mocks__/privateKeyMock.js b/src/__mocks__/privateKeyMock.js new file mode 100644 index 0000000..8ffbb25 --- /dev/null +++ b/src/__mocks__/privateKeyMock.js @@ -0,0 +1,53 @@ +// These keys exist for testing outside webpack. +// They should not be used in production. +module.exports = `-----BEGIN RSA PRIVATE KEY----- +MIIJJwIBAAKCAgEAvT8Gp2s+dau70ANkGC992iALb7b+o5LJvEQhgfLF3AA8qLbZ +ekFobWHr2oKGEl/MFMD4lGoKB1zgfqfPjwNFYkEV2Hr4K0c5ULTP4nTVB5c9h66Z +g4V3VeatR6UwG5f5O3HPRn3zKAJq+3qfde3ZphEGgMVmhaouXzT3c9gHzZFbrMNO +RM/VfyvMhco760mDo/Z0mCvV1qvEi/GVnnst9KLnp1ZF6Qgq9LF9oBfsnahKRGLU +q5X29/onWi/CDQehf8npNI78ypbpqSay2FsG2WXS05LSfE9KgbwFjSaY/npFk8gu +TBx7xidFWrCMQZLxP3EKgfK1ehfkiNkdTB288g0jhtnPv0t2yhnUoOIRQN4kH2iy +lkwo1zjqysPwmQRATDnVwvRzrr1f9phV8hYEj1N33xVpMJ999vlSlBMcYkC5MDE/ +onI+cJD1uJw3qk2znMN+JOCSu55gn5mk2k4aBhMujhyTliM7yikYYpscB/+Qo37N +DzSEgVZkzu/cBo9lyAshB2oHLMpBzyjVCpE4+A92QvNKLouAjvG6LNioLpBj89Jm +yr+jqSNDGhgOlIZVzmxJPWktKI00O1BkxrHMWZayND8CqZnkITHGJGOv2Yh7v7V+ +I8GuD1knnKdBvrC3dCjh45/T7jAC6eyIyq9u0L9itaNdDRhTx1TIIPwTBKsCAwEA +AQKCAgBmElxwaHOj3CDMrUeLk/H5eFcyxizJ5R5SIR1BjcQSST5ucVETk7vTY4Fm +tgfCWVEl0H9C7X7DQeED71cP99+wgxJZVNoN3biYQy3tIr7T/Ur+p7m9KnuXJXhI +sFwmRe3zlgsKJlzEM6moQhdH8CX4YC51vgLyDTw9tb/SB68NvV0I7b6FduPcV69U +aiOvYFNUWFXCzcHkq3izUnY0qJO84zC3HN1QN0uT9zee5ciFsIN+JyA1/Ajc71vS +VPgBNyBwYGMcKhNG4ebdiTXlZBNppnX0D7jfu5SgLN2BqFPUzfZBIzN1cdeApSXP +KHI93Beq9DmKpXIkjEaDWTdoGpIRvkoYsKUTbmiYIeG3HWp+tyNkoJyGRhvBcTpb +wuXkhTjraZfdpRZZG2aczJIlqDX/XDzp2Un8H1CfdCT0gZSjjka3yKB/2JprACar +4DMO2VqlmGHaE9eU+2DuEzCK22EIpjgBt8g8+FP4wy6VuzLyWMf/jHsumwgsPKyo +Fp5jjVkhTnaMAH3T12MkNBQssPAwC4+NXg728ikxq7Aotp3vlnHY+AGdG4fGHFvb +R2JWUZcvWanQAbrKR43RFU2Lt4OiiWWJ6yh1C3VMfOUJuvRIKQ3g25c1MPjnr2Vf +BilzBrHfa3/NFgd1J6MDClhcSV9J1/pDnBkFVEX91K7cYIgUoQKCAQEA5UJ4+b1U +ZLFCu2mik93QIJ63EEaIU2yWM5ZUSeriumLgF2TbAgadvd5aoj0u7Gn0xdvl1oEp ++Fjp14syijLC46vgCh/Xc9ESrWOu0jNyBX+VtJIZ+MMxwGYjoeTdv5J9PcZKbCkj +wUFs7b9w2Twogt4Cs7pj9FxmWGnF4NGrrt+NOZJqkOYLqOsUtQNtafSAY+XMriP/ +cdA5vLAcVImNNgITUwwwoldmkSCNmjYtZ5N5ceopeskq3jt7CgeT7jgtzoIWyyWn +x+DaMlBr8ukAzfnERAc09in+hE0ZrAUOhK++W6OizsDj3pfFas+39iXXgbffShF/ +Jbc7Yt2oMQBKlwKCAQEA01HHx3bhw/NW6h9gdYeB/nvKozZEW0YIyN9zDJgRdfIL +NvTWPW1XFzR99Io/Ry8S0ROLAJhSXmXdNiQip0r6vWpvEU7zg36xcjswTBhyel3s +rKmvfubLde/BUaaug7mI9HUZIpGi9bavOHSLmW3AoxCZ+2cQBKJNa9MyhM+XBNFT +0mXzgHEU7Ol9IWYiBtDQz1gU1djx8DulgM8q2527Eyp2mhpIU4z3E43/ck3NYbIh +o3wQV6ThthjuXv9q/D8/amKrzT6pFq58MsDKFTDZ2UIXaq9yI72X+FyuaNLiZ0Pz +WeTSQeBK1ruvsYa8F/WMWfC+JHrZx4oT/laxWGj9DQKCAQBS7XqJC70tNxlmZU2T +oxaX1hFt57WER7EFNAmFO1uMBBv/GlJGJ1KDzZyHNw50IdeSgnpe3xXpaXAcsZM0 +fiwU4qUFxILQt/3Djl08V7OU3ZOvX3HZk/G5ILke5IR5uWloIQPmn/L5Ast+LFOL +oMEepWPg0zk4uPukW45iRjWN6ftRqe62PyBUl8RDvdukCfcvK36gNxE9gA6CfEmj +IqZbtOB8l9o3vtmxAU93SsWdw7CYThV2/rFs9aMJ+7e84cFgA5pvHU3VdTY4IPML +SNErsH8YBGtZ59LS1HjGyoV35YI777MCiq9iYw/cFQr8FLAhkftI9Y9Ce0cV+gvO +vcY9AoIBADZs1qZRwP3Fu3nbEO3UM6/GVD9K57oWRIbvveLde4nECA1ka5UwhwCr +/VCxFnEP96mvfdpuYLB9Tcb28ZHipseIFQkBK4iPZcZE5kCC/2DytdUWcuOdL4O1 +dzW/Vy0H6PUE/68+kRj1rxU8NwQSF04oJXBxb9exsXz2zQkVqhCMlSkYJunKthf6 +XsbuVg8pUs5EIkGdeVplElAliyU674aVJiy0XjJemPgJV2QqE5540V8YweDAz30l +2KbQ484JwBx3Q0Y+QDBeShCMRL/GcCUbd4p7m0sHNo+51xWaUsND5fYeD+T1jnDV +r/9p8yG8lSRI87/TgCl7L4EO9OgPSjUCggEAeMINDX6wynPuO8GPDVcotwvOqvJq +tWv7IJJdFGRkKsoY7GAXtbV33pGEx2lpwpYmDWoxFEZgJ1hj47ox6VeodPy+2bkP +WlZEIDcEzYMV3aUjnMXPYhzkveboBP2okssqJxqzbNCgcxdHQH4UPbogqMC0u1Tw +WjMM9pubazOVy7/QSTTcAXLzPOiPQwbB98Yp5chsJeoMoWo3XQ+f+tF3nuGnVRjr +PtqVvTGZ/B4pOjtdD1qQ0KeN4w+vjgwV8986Mw7bSkd/aV3rrrzJQQUKW1+lKIdc +F5piXD8il+9ZNGP9HGBfjfFsjyQWG60lh+CTSamKeFuOb9Ko5l8ARIHgTA== +-----END RSA PRIVATE KEY-----`; diff --git a/src/__mocks__/publicKeyMock.js b/src/__mocks__/publicKeyMock.js new file mode 100644 index 0000000..2201fff --- /dev/null +++ b/src/__mocks__/publicKeyMock.js @@ -0,0 +1,16 @@ +// These keys exist for testing outside webpack. +// They should not be used in production. +module.exports = `-----BEGIN PUBLIC KEY----- +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvT8Gp2s+dau70ANkGC99 +2iALb7b+o5LJvEQhgfLF3AA8qLbZekFobWHr2oKGEl/MFMD4lGoKB1zgfqfPjwNF +YkEV2Hr4K0c5ULTP4nTVB5c9h66Zg4V3VeatR6UwG5f5O3HPRn3zKAJq+3qfde3Z +phEGgMVmhaouXzT3c9gHzZFbrMNORM/VfyvMhco760mDo/Z0mCvV1qvEi/GVnnst +9KLnp1ZF6Qgq9LF9oBfsnahKRGLUq5X29/onWi/CDQehf8npNI78ypbpqSay2FsG +2WXS05LSfE9KgbwFjSaY/npFk8guTBx7xidFWrCMQZLxP3EKgfK1ehfkiNkdTB28 +8g0jhtnPv0t2yhnUoOIRQN4kH2iylkwo1zjqysPwmQRATDnVwvRzrr1f9phV8hYE +j1N33xVpMJ999vlSlBMcYkC5MDE/onI+cJD1uJw3qk2znMN+JOCSu55gn5mk2k4a +BhMujhyTliM7yikYYpscB/+Qo37NDzSEgVZkzu/cBo9lyAshB2oHLMpBzyjVCpE4 ++A92QvNKLouAjvG6LNioLpBj89Jmyr+jqSNDGhgOlIZVzmxJPWktKI00O1BkxrHM +WZayND8CqZnkITHGJGOv2Yh7v7V+I8GuD1knnKdBvrC3dCjh45/T7jAC6eyIyq9u +0L9itaNdDRhTx1TIIPwTBKsCAwEAAQ== +-----END PUBLIC KEY-----`; diff --git a/src/config.js b/src/config.js new file mode 100644 index 0000000..7c37e0d --- /dev/null +++ b/src/config.js @@ -0,0 +1,16 @@ +module.exports = { + LINKEDIN_CLIENT_ID: process.env.LINKEDIN_CLIENT_ID, + LINKEDIN_CLIENT_SECRET: process.env.LINKEDIN_CLIENT_SECRET, + COGNITO_REDIRECT_URI: process.env.COGNITO_REDIRECT_URI, + LINKEDIN_API_URL: process.env.LINKEDIN_API_URL, + LINKEDIN_LOGIN_URL: process.env.LINKEDIN_LOGIN_URL, + PORT: parseInt(process.env.PORT, 10) || undefined, + LINKEDIN_SCOPE: process.env.LINKEDIN_SCOPE, + + // Splunk logging variables + SPLUNK_URL: process.env.SPLUNK_URL, + SPLUNK_TOKEN: process.env.SPLUNK_TOKEN, + SPLUNK_SOURCE: process.env.SPLUNK_SOURCE, + SPLUNK_SOURCETYPE: process.env.SPLUNK_SOURCETYPE, + SPLUNK_INDEX: process.env.SPLUNK_INDEX +}; diff --git a/src/connectors/controllers.js b/src/connectors/controllers.js new file mode 100644 index 0000000..fec56d2 --- /dev/null +++ b/src/connectors/controllers.js @@ -0,0 +1,80 @@ +const logger = require('./logger'); +const openid = require('../openid'); + +module.exports = respond => ({ + authorize: (client_id, scope, state, response_type) => { + const authorizeUrl = openid.getAuthorizeUrl( + client_id, + scope, + state, + response_type + ); + logger.info('Redirecting to authorizeUrl'); + logger.debug('Authorize Url is: %s', authorizeUrl, {}); + respond.redirect(authorizeUrl); + }, + userinfo: tokenPromise => { + tokenPromise + .then(token => openid.getUserInfo(token)) + .then(userInfo => { + logger.debug('Resolved user infos:', userInfo, {}); + respond.success(userInfo); + }) + .catch(error => { + logger.error( + 'Failed to provide user info: %s', + error.message || error, + {} + ); + respond.error(error); + }); + }, + token: (code, state, host) => { + if (code) { + openid + .getTokens(code, state, host) + .then(tokens => { + logger.debug( + 'Token for (%s, %s, %s) provided', + code, + state, + host, + {} + ); + respond.success(tokens); + }) + .catch(error => { + logger.error( + 'Token for (%s, %s, %s) failed: %s', + code, + state, + host, + error.message || error, + {} + ); + respond.error(error); + }); + } else { + const error = new Error('No code supplied'); + logger.error( + 'Token for (%s, %s, %s) failed: %s', + code, + state, + host, + error.message || error, + {} + ); + respond.error(error); + } + }, + jwks: () => { + const jwks = openid.getJwks(); + logger.info('Providing access to JWKS: %j', jwks, {}); + respond.success(jwks); + }, + openIdConfiguration: host => { + const config = openid.getConfigFor(host); + logger.info('Providing configuration for %s: %j', host, config, {}); + respond.success(config); + } +}); diff --git a/src/connectors/lambda/authorize.js b/src/connectors/lambda/authorize.js new file mode 100644 index 0000000..4891494 --- /dev/null +++ b/src/connectors/lambda/authorize.js @@ -0,0 +1,18 @@ +const responder = require('./util/responder'); +const controllers = require('../controllers'); + +module.exports.handler = (event, context, callback) => { + const { + client_id, + scope, + state, + response_type + } = event.queryStringParameters; + + controllers(responder(callback)).authorize( + client_id, + scope, + state, + response_type + ); +}; diff --git a/src/connectors/lambda/jwks.js b/src/connectors/lambda/jwks.js new file mode 100644 index 0000000..9df2c35 --- /dev/null +++ b/src/connectors/lambda/jwks.js @@ -0,0 +1,6 @@ +const responder = require('./util/responder'); +const controllers = require('../controllers'); + +module.exports.handler = (event, context, callback) => { + controllers(responder(callback)).jwks(); +}; diff --git a/src/connectors/lambda/open-id-configuration.js b/src/connectors/lambda/open-id-configuration.js new file mode 100644 index 0000000..097bda6 --- /dev/null +++ b/src/connectors/lambda/open-id-configuration.js @@ -0,0 +1,12 @@ +const responder = require('./util/responder'); +const auth = require('./util/auth'); +const controllers = require('../controllers'); + +module.exports.handler = (event, context, callback) => { + controllers(responder(callback)).openIdConfiguration( + auth.getIssuer( + event.headers.Host, + event.requestContext && event.requestContext.stage + ) + ); +}; diff --git a/src/connectors/lambda/token.js b/src/connectors/lambda/token.js new file mode 100644 index 0000000..3074e3f --- /dev/null +++ b/src/connectors/lambda/token.js @@ -0,0 +1,34 @@ +const qs = require('querystring'); +const responder = require('./util/responder'); +const auth = require('./util/auth'); +const controllers = require('../controllers'); + +const parseBody = event => { + const contentType = event.headers['Content-Type']; + if (event.body) { + if (contentType.startsWith('application/x-www-form-urlencoded')) { + return qs.parse(event.body); + } + if (contentType.startsWith('application/json')) { + return JSON.parse(event.body); + } + } + return {}; +}; + +module.exports.handler = (event, context, callback) => { + const body = parseBody(event); + const query = event.queryStringParameters || {}; + + const code = body.code || query.code; + const state = body.state || query.state; + + controllers(responder(callback)).token( + code, + state, + auth.getIssuer( + event.headers.Host, + event.requestContext && event.requestContext.stage + ) + ); +}; diff --git a/src/connectors/lambda/userinfo.js b/src/connectors/lambda/userinfo.js new file mode 100644 index 0000000..8e8293b --- /dev/null +++ b/src/connectors/lambda/userinfo.js @@ -0,0 +1,7 @@ +const responder = require('./util/responder'); +const auth = require('./util/auth'); +const controllers = require('../controllers'); + +module.exports.handler = (event, context, callback) => { + controllers(responder(callback)).userinfo(auth.getBearerToken(event)); +}; diff --git a/src/connectors/lambda/util/auth.js b/src/connectors/lambda/util/auth.js new file mode 100644 index 0000000..883b7b6 --- /dev/null +++ b/src/connectors/lambda/util/auth.js @@ -0,0 +1,44 @@ +const logger = require('../../logger'); + +module.exports = { + getBearerToken: req => + new Promise((resolve, reject) => { + // This method implements https://tools.ietf.org/html/rfc6750 + const authHeader = req.headers.Authorization; + logger.debug('Detected authorization header %s', authHeader); + if (authHeader) { + // Section 2.1 Authorization request header + // Should be of the form 'Bearer ' + // We can ignore the 'Bearer ' bit + const authValue = authHeader.split(' ')[1]; + logger.debug('Section 2.1 Authorization bearer header: %s', authValue); + resolve(authValue); + } else if (req.queryStringParameters.access_token) { + // Section 2.3 URI query parameter + const accessToken = req.queryStringParameters.access_token; + logger.debug( + 'Section 2.3 Authorization query parameter: %s', + accessToken + ); + resolve(req.queryStringParameters.access_token); + } else if ( + req.headers['Content-Type'] === 'application/x-www-form-urlencoded' && + req.body + ) { + // Section 2.2 form encoded body parameter + const body = JSON.parse(req.body); + logger.debug('Section 2.2. Authorization form encoded body: %s', body); + resolve(body.access_token); + } else { + const msg = 'No token specified in request'; + logger.warn(msg); + reject(new Error(msg)); + } + }), + + getIssuer: (host, stage) => { + const lStage = stage; + const issuer = `${host}/${lStage}`; + return issuer; + } +}; diff --git a/src/connectors/lambda/util/responder.js b/src/connectors/lambda/util/responder.js new file mode 100644 index 0000000..9271f94 --- /dev/null +++ b/src/connectors/lambda/util/responder.js @@ -0,0 +1,35 @@ +const logger = require('../../logger'); + +module.exports = callback => ({ + success: response => { + logger.info('Success response'); + logger.debug('Response was: ', response); + callback(null, { + statusCode: 200, + body: JSON.stringify(response), + headers: { + 'Content-Type': 'application/json' + } + }); + }, + error: err => { + logger.error('Error response: ', err.message || err); + callback(null, { + statusCode: 400, + body: JSON.stringify(err.message), + headers: { + 'Content-Type': 'application/json' + } + }); + }, + redirect: url => { + logger.info('Redirect response'); + logger.debug('Redirect response to %s', url, {}); + callback(null, { + statusCode: 302, + headers: { + Location: url + } + }); + } +}); diff --git a/src/connectors/logger.js b/src/connectors/logger.js new file mode 100644 index 0000000..28bcae6 --- /dev/null +++ b/src/connectors/logger.js @@ -0,0 +1,48 @@ +const winston = require('winston'); +const { + SPLUNK_URL, + SPLUNK_TOKEN, + SPLUNK_SOURCE, + SPLUNK_SOURCETYPE, + SPLUNK_INDEX +} = require('../config'); + +const logger = winston.createLogger({ + level: 'info' +}); + +// Activate Splunk logging if Splunk's env variables are set +if (SPLUNK_URL) { + const SplunkStreamEvent = require('winston-splunk-httplogger'); // eslint-disable-line global-require + + const splunkSettings = { + url: SPLUNK_URL || 'localhost', + token: SPLUNK_TOKEN, + source: SPLUNK_SOURCE || '/var/log/GHOIdShim.log', + sourcetype: SPLUNK_SOURCETYPE || 'linkedin-cognito-openid-wrapper', + index: SPLUNK_INDEX || 'main', + maxBatchCount: 1 + }; + + logger.add( + new SplunkStreamEvent({ + splunk: splunkSettings, + format: winston.format.combine( + winston.format.splat(), + winston.format.timestamp() + ) + }) + ); +} else { + // STDOUT logging for dev/regular servers + logger.add( + new winston.transports.Console({ + format: winston.format.combine( + winston.format.splat(), + winston.format.simple() + ) + }) + ); +} + +module.exports = logger; diff --git a/src/connectors/web/app.js b/src/connectors/web/app.js new file mode 100644 index 0000000..782a8b3 --- /dev/null +++ b/src/connectors/web/app.js @@ -0,0 +1,24 @@ +const express = require('express'); +const bodyParser = require('body-parser'); +const routes = require('./routes'); +const { PORT } = require('../../config'); +const validateConfig = require('../../validate-config'); + +require('colors'); + +const app = express(); + +try { + validateConfig(); +} catch (e) { + console.error('Failed to start:'.red, e.message); + console.error(' See the readme for configuration information'); + process.exit(1); +} +console.info('Config is valid'.cyan); + +app.use(bodyParser.json()); +app.use(bodyParser.urlencoded({ extended: true })); +routes(app); +app.listen(PORT); +console.info(`Listening on ${PORT}`.cyan); diff --git a/src/connectors/web/auth.js b/src/connectors/web/auth.js new file mode 100644 index 0000000..75b288b --- /dev/null +++ b/src/connectors/web/auth.js @@ -0,0 +1,24 @@ +module.exports = { + getBearerToken: req => + new Promise((resolve, reject) => { + // This method implements https://tools.ietf.org/html/rfc6750 + const authHeader = req.get('Authorization'); + if (authHeader) { + // Section 2.1 Authorization request header + // Should be of the form 'Bearer ' + // We can ignore the 'Bearer ' bit + resolve(authHeader.split(' ')[1]); + } else if (req.query.access_token) { + // Section 2.3 URI query parameter + resolve(req.query.access_token); + } else if ( + req.get('Content-Type') === 'application/x-www-form-urlencoded' + ) { + // Section 2.2 form encoded body parameter + resolve(req.body.access_token); + } + reject(new Error('No token specified in request')); + }), + + getIssuer: host => `${host}` +}; diff --git a/src/connectors/web/handlers.js b/src/connectors/web/handlers.js new file mode 100644 index 0000000..7fba00d --- /dev/null +++ b/src/connectors/web/handlers.js @@ -0,0 +1,29 @@ +const responder = require('./responder'); +const auth = require('./auth'); +const controllers = require('../controllers'); + +module.exports = { + userinfo: (req, res) => { + controllers(responder(res)).userinfo(auth.getBearerToken(req)); + }, + token: (req, res) => { + const code = req.body.code || req.query.code; + const state = req.body.state || req.query.state; + + controllers(responder(res)).token(code, state, req.get('host')); + }, + jwks: (req, res) => controllers(responder(res)).jwks(), + authorize: (req, res) => + responder(res).redirect( + `https://www.linkedin.com/oauth/v2/authorization?client_id=${ + req.query.client_id + }&scope=${req.query.scope}&state=${req.query.state}&response_type=${ + req.query.response_type + }` + ), + openIdConfiguration: (req, res) => { + controllers(responder(res)).openIdConfiguration( + auth.getIssuer(req.get('host')) + ); + } +}; diff --git a/src/connectors/web/responder.js b/src/connectors/web/responder.js new file mode 100644 index 0000000..c5c0f58 --- /dev/null +++ b/src/connectors/web/responder.js @@ -0,0 +1,21 @@ +const util = require('util'); + +require('colors'); + +module.exports = res => ({ + success: data => { + res.format({ + 'application/json': () => { + res.json(data); + }, + default: () => { + res.status(406).send('Not Acceptable'); + } + }); + }, + error: error => { + res.statusCode = 400; + res.end(`Failure: ${util.inspect(error.message)}`); + }, + redirect: url => res.redirect(url) +}); diff --git a/src/connectors/web/routes.js b/src/connectors/web/routes.js new file mode 100644 index 0000000..819b7c4 --- /dev/null +++ b/src/connectors/web/routes.js @@ -0,0 +1,13 @@ +const handlers = require('./handlers'); + +module.exports = app => { + app.get('/userinfo', handlers.userinfo); + app.post('/userinfo', handlers.userinfo); + app.get('/token', handlers.token); + app.post('/token', handlers.token); + app.get('/authorize', handlers.authorize); + app.post('/authorize', handlers.authorize); + app.get('/jwks.json', handlers.jwks); + app.get('/.well-known/jwks.json', handlers.jwks); + app.get('/.well-known/openid-configuration', handlers.openIdConfiguration); +}; diff --git a/src/crypto.js b/src/crypto.js new file mode 100644 index 0000000..61a7248 --- /dev/null +++ b/src/crypto.js @@ -0,0 +1,30 @@ +const JSONWebKey = require('json-web-key'); +const jwt = require('jsonwebtoken'); +const { LINKEDIN_CLIENT_ID } = require('./config'); +const logger = require('./connectors/logger'); + +const KEY_ID = 'jwtRS256'; +const cert = require('../jwtRS256.key'); +const pubKey = require('../jwtRS256.key.pub'); + +module.exports = { + getPublicKey: () => ({ + alg: 'RS256', + kid: KEY_ID, + ...JSONWebKey.fromPEM(pubKey).toJSON() + }), + + makeIdToken: (payload, host) => { + const enrichedPayload = { + ...payload, + iss: `https://${host}`, + aud: LINKEDIN_CLIENT_ID + }; + logger.debug('Signing payload %j', enrichedPayload, {}); + return jwt.sign(enrichedPayload, cert, { + expiresIn: '1h', + algorithm: 'RS256', + keyid: KEY_ID + }); + } +}; diff --git a/src/helpers.js b/src/helpers.js new file mode 100644 index 0000000..a4e43ac --- /dev/null +++ b/src/helpers.js @@ -0,0 +1,3 @@ +module.exports = { + NumericDate: date => Math.floor(date / 1000) +}; diff --git a/src/helpers.test.js b/src/helpers.test.js new file mode 100644 index 0000000..7169ec3 --- /dev/null +++ b/src/helpers.test.js @@ -0,0 +1,16 @@ +const { NumericDate } = require('./helpers'); + +describe('NumericDate function', () => { + describe('with a date that could be rounded up', () => { + const date = new Date(1233999); + it('Returns the date in whole seconds since epoch', () => { + expect(NumericDate(date)).to.equal(1233); + }); + }); + describe('with a date that would not be rounded up', () => { + const date = new Date(1234000); + it('Returns the date in whole seconds since epoch', () => { + expect(NumericDate(date)).to.equal(1234); + }); + }); +}); diff --git a/src/linkedin.js b/src/linkedin.js new file mode 100644 index 0000000..716e200 --- /dev/null +++ b/src/linkedin.js @@ -0,0 +1,102 @@ +const axios = require('axios'); +const qs = require('qs'); +const { + LINKEDIN_CLIENT_ID, + LINKEDIN_CLIENT_SECRET, + COGNITO_REDIRECT_URI, + LINKEDIN_API_URL, + LINKEDIN_LOGIN_URL, + LINKEDIN_SCOPE, +} = require('./config'); +const logger = require('./connectors/logger'); + +const getApiEndpoints = ( + apiBaseUrl = LINKEDIN_API_URL, + loginBaseUrl = LINKEDIN_LOGIN_URL +) => ({ + userDetails: `${apiBaseUrl}/v2/me`, + userEmails: `${apiBaseUrl}/v2/clientAwareMemberHandles?q=members&projection=(elements*(primary,type,handle~))`, + oauthToken: `${apiBaseUrl}/oauth/v2/accessToken`, + oauthAuthorize: `${loginBaseUrl}/oauth/v2/authorization`, +}); + +const check = response => { + logger.debug('Checking response: %j', response, {}); + if (response.data) { + if (response.data.error) { + throw new Error( + `LinkedIn API responded with a failure: ${response.data.error}, ${ + response.data.error_description + }` + ); + } else if (response.status === 200) { + return response.data; + } + } + throw new Error( + `LinkedIn API responded with a failure: ${response.status} (${ + response.statusText + })` + ); +}; + +const linkedinGet = (url, accessToken) => + axios({ + method: 'get', + url, + headers: { + Authorization: `Bearer ${accessToken}` + } + }); + +module.exports = (apiBaseUrl, loginBaseUrl) => { + const urls = getApiEndpoints(apiBaseUrl, loginBaseUrl || apiBaseUrl); + return { + getAuthorizeUrl: (client_id, scope, state, response_type) => { + const scopesToSend = scope.split(' ').filter(s => s !== 'openid').join(' '); + return `${urls.oauthAuthorize}?client_id=${client_id}&scope=${encodeURIComponent( + scopesToSend + )}&state=${state}&response_type=${response_type}&redirect_uri=${COGNITO_REDIRECT_URI}`; + }, + getUserDetails: accessToken => + linkedinGet(urls.userDetails, accessToken).then(check), + getUserEmails: accessToken => + linkedinGet(urls.userEmails, accessToken).then(check), + getToken: (code, state) => { + const data = { + // OAuth required fields + grant_type: 'authorization_code', + redirect_uri: COGNITO_REDIRECT_URI, + client_id: LINKEDIN_CLIENT_ID, + response_type: 'code', + client_secret: LINKEDIN_CLIENT_SECRET, + code, + // State may not be present, so we conditionally include it + ...(state && { state }) + }; + + logger.debug( + 'Getting token from %s with data: %j', + urls.oauthToken, + data, + {} + ); + return axios.post( + urls.oauthToken, + qs.stringify(data), + { + headers: { + Accept: 'application/json', + // 'Content-Type': 'application/json' + }, + } + ).then(check) + .then(data => { + // Because LinkedIn doesn't return the scopes + data.scope = LINKEDIN_SCOPE; + data.token_type = 'bearer'; + return data; + }) + } + }; +}; diff --git a/src/openid.js b/src/openid.js new file mode 100644 index 0000000..9ee79cf --- /dev/null +++ b/src/openid.js @@ -0,0 +1,128 @@ +const logger = require('./connectors/logger'); +const { NumericDate } = require('./helpers'); +const crypto = require('./crypto'); +const linkedin = require('./linkedin'); + +const getJwks = () => ({ keys: [crypto.getPublicKey()] }); + +const getUserInfo = accessToken => + Promise.all([ + linkedin() + .getUserDetails(accessToken) + .then(userDetails => { + logger.debug('Fetched user details: %j', userDetails, {}); + // Here we map the linkedin user response to the standard claims from + // OpenID. The mapping was constructed by following + // https://docs.microsoft.com/en-us/linkedin/shared/integrations/people/profile-api?context=linkedin/consumer/context + // and + // https://docs.microsoft.com/en-us/linkedin/shared/references/v2/profile/lite-profile + const claims = { + sub: `${userDetails.id}`, // OpenID requires a string + name: `${userDetails.firstName.localized} ${userDetails.lastName.localized}`, + }; + logger.debug('Resolved claims: %j', claims, {}); + return claims; + }), + linkedin() + .getUserEmails(accessToken) + .then(userEmails => { + logger.debug('Fetched user emails: %j', userEmails, {}); + const primaryEmail = userEmails.elements.find(email => email.primary && (email.type === 'EMAIL')); + if (primaryEmail === undefined) { + throw new Error('User did not have a primary email address'); + } + const emailAddress = primaryEmail['handle~'].emailAddress; + const claims = { + email: emailAddress, + email_verified: true, + }; + logger.debug('Resolved claims: %j', claims, {}); + return claims; + }) + ]).then(claims => { + const mergedClaims = claims.reduce( + (acc, claim) => ({ ...acc, ...claim }), + {} + ); + logger.debug('Resolved combined claims: %j', mergedClaims, {}); + return mergedClaims; + }); + +const getAuthorizeUrl = (client_id, scope, state, response_type) => + linkedin().getAuthorizeUrl(client_id, scope, state, response_type); + +const getTokens = (code, state, host) => + linkedin() + .getToken(code, state) + .then(linkedinToken => { + logger.debug('Got token: %s', linkedinToken, {}); + // ** JWT ID Token required fields ** + // iss - issuer https url + // aud - audience that this token is valid for (LINKEDIN_CLIENT_ID) + // sub - subject identifier - must be unique + // ** Also required, but provided by jsonwebtoken ** + // exp - expiry time for the id token (seconds since epoch in UTC) + // iat - time that the JWT was issued (seconds since epoch in UTC) + + return new Promise(resolve => { + const payload = { + // This was commented because Cognito times out in under a second + // and generating the userInfo takes too long. + // It means the ID token is empty except for metadata. + // ...userInfo, + }; + + const idToken = crypto.makeIdToken(payload, host); + const tokenResponse = { + ...linkedinToken, + id_token: idToken + }; + + logger.debug('Resolved token response: %j', tokenResponse, {}); + + resolve(tokenResponse); + }); + }); + +const getConfigFor = host => ({ + issuer: `https://${host}`, + authorization_endpoint: `https://${host}/authorize`, + token_endpoint: `https://${host}/token`, + token_endpoint_auth_methods_supported: [ + 'client_secret_basic', + 'private_key_jwt' + ], + token_endpoint_auth_signing_alg_values_supported: ['RS256'], + userinfo_endpoint: `https://${host}/userinfo`, + // check_session_iframe: 'https://server.example.com/connect/check_session', + // end_session_endpoint: 'https://server.example.com/connect/end_session', + jwks_uri: `https://${host}/.well-known/jwks.json`, + // registration_endpoint: 'https://server.example.com/connect/register', + scopes_supported: ['openid', 'read:user', 'user:email'], + response_types_supported: [ + 'code', + 'code id_token', + 'id_token', + 'token id_token' + ], + + subject_types_supported: ['public'], + userinfo_signing_alg_values_supported: ['none'], + id_token_signing_alg_values_supported: ['RS256'], + request_object_signing_alg_values_supported: ['none'], + display_values_supported: ['page', 'popup'], + claims_supported: [ + 'sub', + 'name', + 'email', + 'email_verified', + ] +}); + +module.exports = { + getTokens, + getUserInfo, + getJwks, + getConfigFor, + getAuthorizeUrl +}; diff --git a/src/openid.test.js b/src/openid.test.js new file mode 100644 index 0000000..79c5ba2 --- /dev/null +++ b/src/openid.test.js @@ -0,0 +1,196 @@ +const openid = require('./openid'); +const github = require('./github'); +const crypto = require('./crypto'); + +jest.mock('./github'); +jest.mock('./crypto'); + +const MOCK_TOKEN = 'MOCK_TOKEN'; +const MOCK_CODE = 'MOCK_CODE'; + +describe('openid domain layer', () => { + const githubMock = { + getUserEmails: jest.fn(), + getUserDetails: jest.fn(), + getToken: jest.fn(), + getAuthorizeUrl: jest.fn() + }; + + beforeEach(() => { + github.mockImplementation(() => githubMock); + }); + + describe('userinfo function', () => { + const mockEmailsWithPrimary = withPrimary => { + githubMock.getUserEmails.mockImplementation(() => + Promise.resolve([ + { + primary: false, + email: 'not-this-email@example.com', + verified: false + }, + { primary: withPrimary, email: 'email@example.com', verified: true } + ]) + ); + }; + + describe('with a good token', () => { + describe('with complete user details', () => { + beforeEach(() => { + githubMock.getUserDetails.mockImplementation(() => + Promise.resolve({ + sub: 'Some sub', + name: 'some name', + login: 'username', + html_url: 'some profile', + avatar_url: 'picture.jpg', + blog: 'website', + updated_at: '2008-01-14T04:33:35Z' + }) + ); + }); + describe('with a primary email', () => { + beforeEach(() => { + mockEmailsWithPrimary(true); + }); + it('Returns the aggregated complete object', async () => { + const response = await openid.getUserInfo(MOCK_TOKEN); + expect(response).to.deep.equal({ + email: 'email@example.com', + email_verified: true, + name: 'some name', + picture: 'picture.jpg', + preferred_username: 'username', + profile: 'some profile', + sub: 'undefined', + updated_at: 1200285215, + website: 'website' + }); + }); + }); + describe('without a primary email', () => { + beforeEach(() => { + mockEmailsWithPrimary(false); + }); + it('fails', () => + expect(openid.getUserInfo('MOCK_TOKEN')).to.eventually.be.rejected); + }); + }); + }); + describe('with a bad token', () => { + beforeEach(() => { + githubMock.getUserDetails.mockImplementation(() => + Promise.reject(new Error('Bad token')) + ); + githubMock.getUserEmails.mockImplementation(() => + Promise.reject(new Error('Bad token')) + ); + }); + it('fails', () => + expect(openid.getUserInfo('bad token')).to.eventually.be.rejected); + }); + }); + describe('token function', () => { + describe('with the correct code', () => { + beforeEach(() => { + githubMock.getToken.mockImplementation(() => + Promise.resolve({ + access_token: 'SOME_TOKEN', + token_type: 'bearer', + scope: 'scope1,scope2' + }) + ); + crypto.makeIdToken.mockImplementation(() => 'ENCODED TOKEN'); + }); + + it('returns a token', async () => { + const token = await openid.getTokens( + MOCK_CODE, + 'some state', + 'somehost.com' + ); + expect(token).to.deep.equal({ + access_token: 'SOME_TOKEN', + id_token: 'ENCODED TOKEN', + scope: 'openid scope1 scope2', + token_type: 'bearer' + }); + }); + }); + describe('with a bad code', () => { + beforeEach(() => { + githubMock.getToken.mockImplementation(() => + Promise.reject(new Error('Bad code')) + ); + }); + it('fails', () => + expect(openid.getUserInfo('bad token', 'two', 'three')).to.eventually.be + .rejected); + }); + }); + describe('jwks', () => { + it('Returns the right structure', () => { + const mockKey = { key: 'mock' }; + crypto.getPublicKey.mockImplementation(() => mockKey); + expect(openid.getJwks()).to.deep.equal({ keys: [mockKey] }); + }); + }); + describe('authorization', () => { + beforeEach(() => { + githubMock.getAuthorizeUrl.mockImplementation( + (client_id, scope, state, response_type) => + `https://not-a-real-host.com/authorize?client_id=${client_id}&scope=${scope}&state=${state}&response_type=${response_type}` + ); + }); + it('Redirects to the authorization URL', () => { + expect( + openid.getAuthorizeUrl('client_id', 'scope', 'state', 'response_type') + ).to.equal( + 'https://not-a-real-host.com/authorize?client_id=client_id&scope=scope&state=state&response_type=response_type' + ); + }); + }); + describe('openid-configuration', () => { + describe('with a supplied hostname', () => { + it('returns the correct response', () => { + expect(openid.getConfigFor('not-a-real-host.com')).to.deep.equal({ + authorization_endpoint: 'https://not-a-real-host.com/authorize', + claims_supported: [ + 'sub', + 'name', + 'preferred_username', + 'profile', + 'picture', + 'website', + 'email', + 'email_verified', + 'updated_at', + 'iss', + 'aud' + ], + display_values_supported: ['page', 'popup'], + id_token_signing_alg_values_supported: ['RS256'], + issuer: 'https://not-a-real-host.com', + jwks_uri: 'https://not-a-real-host.com/.well-known/jwks.json', + request_object_signing_alg_values_supported: ['none'], + response_types_supported: [ + 'code', + 'code id_token', + 'id_token', + 'token id_token' + ], + scopes_supported: ['openid', 'read:user', 'user:email'], + subject_types_supported: ['public'], + token_endpoint: 'https://not-a-real-host.com/token', + token_endpoint_auth_methods_supported: [ + 'client_secret_basic', + 'private_key_jwt' + ], + token_endpoint_auth_signing_alg_values_supported: ['RS256'], + userinfo_endpoint: 'https://not-a-real-host.com/userinfo', + userinfo_signing_alg_values_supported: ['none'] + }); + }); + }); + }); +}); diff --git a/src/validate-config.js b/src/validate-config.js new file mode 100644 index 0000000..5646bcf --- /dev/null +++ b/src/validate-config.js @@ -0,0 +1,30 @@ +const config = require('./config'); + +const ensureString = variableName => { + if (typeof config[variableName] !== 'string') { + throw new Error( + `Environment variable ${variableName} must be set and be a string` + ); + } +}; + +const ensureNumber = variableName => { + if (typeof config[variableName] !== 'number') { + throw new Error( + `Environment variable ${variableName} must be set and be a number` + ); + } +}; + +const requiredStrings = [ + 'LINKEDIN_CLIENT_ID', + 'LINKEDIN_CLIENT_SECRET', + 'COGNITO_REDIRECT_URI' +]; + +const requiredNumbers = ['PORT']; + +module.exports = () => { + requiredStrings.forEach(ensureString); + requiredNumbers.forEach(ensureNumber); +}; diff --git a/webpack.config.js b/webpack.config.js new file mode 100644 index 0000000..2f5eddb --- /dev/null +++ b/webpack.config.js @@ -0,0 +1,60 @@ +const NodemonPlugin = require('nodemon-webpack-plugin'); +const nodeExternals = require('webpack-node-externals'); + +const baseConfig = { + mode: 'development', + target: 'node', + devtool: 'source-map', + module: { + rules: [ + { + test: /\.js$/, + exclude: /(node_modules)/, + use: { + loader: 'babel-loader' + } + }, + { + test: /\.(key|key.pub)$/, + use: [ + { + loader: 'raw-loader' + } + ] + } + ] + } +}; + +const config = [ + { + ...baseConfig, + output: { + libraryTarget: 'commonjs2', + path: `${__dirname}/dist-lambda`, + filename: '[name].js' + }, + entry: { + openIdConfiguration: './src/connectors/lambda/open-id-configuration.js', + token: './src/connectors/lambda/token.js', + userinfo: './src/connectors/lambda/userinfo.js', + jwks: './src/connectors/lambda/jwks.js', + authorize: './src/connectors/lambda/authorize.js' + } + }, + { + ...baseConfig, + output: { + libraryTarget: 'commonjs2', + path: `${__dirname}/dist-web`, + filename: '[name].js' + }, + entry: { + server: './src/connectors/web/app.js' + }, + externals: [nodeExternals()], + plugins: [new NodemonPlugin()] + } +]; + +module.exports = config;