added prettier, changed some files to mdx

This commit is contained in:
jordan-violet-sp
2022-09-15 22:25:38 -04:00
parent 1e0941c09a
commit 09202da00d
101 changed files with 4563 additions and 1905 deletions

View File

@@ -1,56 +1,56 @@
// @ts-check
// Note: type annotations allow type checking and IDEs autocompletion
const lightCodeTheme = require('prism-react-renderer/themes/github');
const darkCodeTheme = require('prism-react-renderer/themes/dracula');
const lightCodeTheme = require("prism-react-renderer/themes/github");
const darkCodeTheme = require("prism-react-renderer/themes/dracula");
const footer = require('./footer');
const navbar = require('./navbar');
const plugins = require('./plugins');
const footer = require("./footer");
const navbar = require("./navbar");
const plugins = require("./plugins");
/** @type {import('@docusaurus/types').Config} */
const config = {
title: 'SailPoint Developer Community',
url: 'https://developer.sailpoint.com',
baseUrl: '/developer.sailpoint.com/',
favicon: 'img/SailPoint-Logo-Icon.ico',
onBrokenLinks: 'warn',
onBrokenMarkdownLinks: 'warn',
onDuplicateRoutes: 'warn',
title: "SailPoint Developer Community",
url: "https://developer.sailpoint.com",
baseUrl: "/developer.sailpoint.com/",
favicon: "img/SailPoint-Logo-Icon.ico",
onBrokenLinks: "throw",
onBrokenMarkdownLinks: "throw",
onDuplicateRoutes: "warn",
i18n: {
defaultLocale: 'en',
locales: ['en'],
defaultLocale: "en",
locales: ["en"],
},
presets: [
[
'classic',
"classic",
/** @type {import('@docusaurus/preset-classic').Options} */
({
docs: {
editUrl: "https://github.com/sailpoint-oss/developer-community-site/edit/main/",
editUrl:
"https://github.com/sailpoint-oss/developer-community-site/edit/main/",
showLastUpdateAuthor: true,
showLastUpdateTime: true,
sidebarCollapsible: true,
sidebarPath: require.resolve('./sidebars.js'),
sidebarPath: require.resolve("./sidebars.js"),
docLayoutComponent: "@theme/DocPage",
docItemComponent: "@theme/ApiItem" // Derived from docusaurus-theme-openapi
docItemComponent: "@theme/ApiItem", // Derived from docusaurus-theme-openapi
},
theme: {
customCss: require.resolve('./src/css/custom.css'),
customCss: require.resolve("./src/css/custom.css"),
},
}),
],
],
themeConfig:
/** @type {import('@docusaurus/preset-classic').ThemeConfig} */
({
algolia: {
appId: 'TB01H1DFAM',
apiKey: '726952a7a9389c484b6c96808a3e0010',
indexName: 'prod_DEVELOPER_SAILPOINT_COM',
appId: "TB01H1DFAM",
apiKey: "726952a7a9389c484b6c96808a3e0010",
indexName: "prod_DEVELOPER_SAILPOINT_COM",
searchPagePath: false,
placeholder: "Search the Developer Community",
},
@@ -62,7 +62,7 @@ const config = {
},
colorMode: {
defaultMode: "light",
respectPrefersColorScheme: true
respectPrefersColorScheme: true,
},
navbar: navbar,
footer: footer,
@@ -73,9 +73,9 @@ const config = {
},
}),
plugins: plugins,
themes: ["docusaurus-theme-openapi-docs"]
plugins: plugins,
themes: ["docusaurus-theme-openapi-docs"],
};
module.exports = config;

View File

@@ -6,36 +6,59 @@ sidebar_label: Authentication
sidebar_position: 2
sidebar_class_name: authentication
keywords: ["authentication"]
description: The quickest way to authenticate and start using SailPoint APIs is to generate a personal access token.
description:
The quickest way to authenticate and start using SailPoint APIs is to generate
a personal access token.
slug: /api/authentication
tags: ["Authentication"]
---
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem';
## Overview
The quickest way to authenticate and start using SailPoint APIs is to generate a [personal access token](#personal-access-tokens). If you are interested in using OAuth2 for authentication, then please continue to read this document.
The quickest way to authenticate and start using SailPoint APIs is to generate a
[personal access token](#personal-access-tokens). If you are interested in using
OAuth2 for authentication, then please continue to read this document.
In order to use the IdentityNow REST API, you must first authenticate with IdentityNow and get an `access_token`. This `access_token` will need to be provided in the `Authorization` header of each API request. The steps of the flow are as follows:
In order to use the IdentityNow REST API, you must first authenticate with
IdentityNow and get an `access_token`. This `access_token` will need to be
provided in the `Authorization` header of each API request. The steps of the
flow are as follows:
![Flow](./img/http-client-identity-now.png)
1. **Access Token Request** - The HTTP client (a script, application, Postman, cURL, etc.) makes a request to IdentityNow to get an `access_token`. The details of this are described in the [Authentication Details](#authentication-details) section.
2. **Access Token Response** - Assuming the request is valid, IdentityNow will issue an `access_token` to the HTTP client in response.
3. **API Request** - The HTTP client makes a request to an IdentityNow API endpoint. Included in that request is the header `Authorization: Bearer {access_token}`.
4. **API Response** - Assuming the request and the `access_token` are valid, IdentityNow will return a response to the client. If unexpected errors occur, see the [Troubleshooting](#troubleshooting) section of this document.
1. **Access Token Request** - The HTTP client (a script, application, Postman,
cURL, etc.) makes a request to IdentityNow to get an `access_token`. The
details of this are described in the
[Authentication Details](#authentication-details) section.
2. **Access Token Response** - Assuming the request is valid, IdentityNow will
issue an `access_token` to the HTTP client in response.
3. **API Request** - The HTTP client makes a request to an IdentityNow API
endpoint. Included in that request is the header
`Authorization: Bearer {access_token}`.
4. **API Response** - Assuming the request and the `access_token` are valid,
IdentityNow will return a response to the client. If unexpected errors occur,
see the [Troubleshooting](#troubleshooting) section of this document.
The SailPoint authentication/authorization model is fully [OAuth 2.0](https://oauth.net/2/) compliant, with issued `access_tokens` leveraging the [JSON Web Token (JWT)](https://jwt.io/) standard. This document provides the necessary information for interacting with SailPoint's OAuth2 services.
The SailPoint authentication/authorization model is fully
[OAuth 2.0](https://oauth.net/2/) compliant, with issued `access_tokens`
leveraging the [JSON Web Token (JWT)](https://jwt.io/) standard. This document
provides the necessary information for interacting with SailPoint's OAuth2
services.
## Find Your Tenant's OAuth Details
This document assumes your IDN instance is using the domain name supplied by SailPoint. If your instance is using a vanity URL, then you will need to open the following URL in your browser to get your OAuth info. See [finding your org/tenant name](./getting-started.md#finding-your-orgtenant-name) in the [getting started guide](./getting-started.md) to get your `{tenant}`.
This document assumes your IDN instance is using the domain name supplied by
SailPoint. If your instance is using a vanity URL, then you will need to open
the following URL in your browser to get your OAuth info. See
[finding your org/tenant name](./getting-started.md#finding-your-orgtenant-name)
in the [getting started guide](./getting-started.md) to get your `{tenant}`.
`https://{tenant}.api.identitynow.com/oauth/info`
This page will present you with your `authorizeEndpoint` and `tokenEndpoint`, which you will need to follow along with the examples in this document.
This page will present you with your `authorizeEndpoint` and `tokenEndpoint`,
which you will need to follow along with the examples in this document.
```json
{
@@ -51,47 +74,98 @@ This page will present you with your `authorizeEndpoint` and `tokenEndpoint`, wh
## Personal Access Tokens
A personal access token is a method of authenticating to an API as a user without needing to supply a username and password. The primary use case for personal access tokens is in scripts or programs that don't have an easy way to implement an OAuth 2.0 flow and that need to call API endpoints that require a user context. Personal access tokens are also convenient when using Postman to explore and test APIs.
A personal access token is a method of authenticating to an API as a user
without needing to supply a username and password. The primary use case for
personal access tokens is in scripts or programs that don't have an easy way to
implement an OAuth 2.0 flow and that need to call API endpoints that require a
user context. Personal access tokens are also convenient when using Postman to
explore and test APIs.
:::info Update
Previously, only users with the `Admin` or `Source Admin` role were allowed to generate personal access tokens. Now, all users are able to generate personal access tokens!
Previously, only users with the `Admin` or `Source Admin` role were allowed to
generate personal access tokens. Now, all users are able to generate personal
access tokens!
:::
To generate a personal access token from the IdentityNow UI, perform the following steps after logging into your IdentityNow instance:
To generate a personal access token from the IdentityNow UI, perform the
following steps after logging into your IdentityNow instance:
1. Select **Preferences** from the drop-down menu under your username, then **Personal Access Tokens** on the left. You can also go straight to the page using this URL, replacing `{tenant}` with your IdentityNow tenant: `https://{tenant}.identitynow.com/ui/d/user-preferences/personal-access-tokens`.
1. Select **Preferences** from the drop-down menu under your username, then
**Personal Access Tokens** on the left. You can also go straight to the page
using this URL, replacing `{tenant}` with your IdentityNow tenant:
`https://{tenant}.identitynow.com/ui/d/user-preferences/personal-access-tokens`.
2. Click **New Token** and enter a meaningful description to help differentiate the token from others.
2. Click **New Token** and enter a meaningful description to help differentiate
the token from others.
:::caution
The **New Token** button will be disabled when youve reached the limit of 10 personal access tokens per user. To avoid reaching this limit, we recommend you delete any tokens that are no longer needed.
:::
:::caution
3. Click **Create Token** to generate and view the two components that comprise the token: the `Secret` and the `Client ID`.
The **New Token** button will be disabled when youve reached the limit of 10
personal access tokens per user. To avoid reaching this limit, we recommend you
delete any tokens that are no longer needed.
:::danger Important
After you create the token, the value of the `Client ID` will be visible in the Personal Access Tokens list, but the corresponding `Secret` will not be visible after you close the window. You will need to store the `Secret` somewhere secure.
:::
:::
4. Copy both values somewhere that will be secure and accessible to you when you need to use the the token.
3. Click **Create Token** to generate and view the two components that comprise
the token: the `Secret` and the `Client ID`.
To generate a personal access token from the API, use the [create personal access token endpoint](/idn/api/beta/create-personal-access-token).
:::danger Important
To use a personal access token to generate an `access_token` that can be used to authenticate requests to the API, follow the [Client Credentials Grant Flow](#client-credentials-grant-flow), using the `Client ID` and `Client Secret` obtained from the personal access token.
After you create the token, the value of the `Client ID` will be visible in the
Personal Access Tokens list, but the corresponding `Secret` will not be visible
after you close the window. You will need to store the `Secret` somewhere
secure.
:::
4. Copy both values somewhere that will be secure and accessible to you when you
need to use the the token.
To generate a personal access token from the API, use the
[create personal access token endpoint](/idn/api/beta/create-personal-access-token).
To use a personal access token to generate an `access_token` that can be used to
authenticate requests to the API, follow the
[Client Credentials Grant Flow](#client-credentials-grant-flow), using the
`Client ID` and `Client Secret` obtained from the personal access token.
## OAuth 2.0
[OAuth 2.0](https://oauth.net/2/) is an industry-standard protocol for authorization, and provides a variety of authorization flows for web applications, desktop applications, mobile phones, and devices. This specification and its extensions are developed within the [IETF OAuth Working Group](https://www.ietf.org/mailman/listinfo/oauth).
[OAuth 2.0](https://oauth.net/2/) is an industry-standard protocol for
authorization, and provides a variety of authorization flows for web
applications, desktop applications, mobile phones, and devices. This
specification and its extensions are developed within the
[IETF OAuth Working Group](https://www.ietf.org/mailman/listinfo/oauth).
There are several different authorization flows that OAuth 2.0 supports, and each of these has a grant-type which defines the different use cases. Some of the common ones which might be used with IdentityNow are as follows:
There are several different authorization flows that OAuth 2.0 supports, and
each of these has a grant-type which defines the different use cases. Some of
the common ones which might be used with IdentityNow are as follows:
1. [**Authorization Code**](https://oauth.net/2/grant-types/authorization-code/) - This grant type is used by clients to exchange an authorization code for an `access_token`. This is mainly used for web applications as there is a login into IdentityNow, with a subsequent redirect back to the web application / client.
2. [**Client Credentials**](https://oauth.net/2/grant-types/client-credentials/) - This grant type is used by clients to obtain an `access_token` outside the context of a user. Because this is outside of a user context, only a subset of IdentityNow REST APIs may be accessible with this kind of grant type.
3. [**Refresh Token**](https://oauth.net/2/grant-types/refresh-token/) - This grant type is used by clients in order to exchange a refresh token for a new `access_token` when the existing `access_token` has expired. This allows clients to continue using the API without having to re-authenticate as frequently. This grant type is commonly used together with `Authorization Code` to prevent a user from having to log in several times per day.
1. [**Authorization Code**](https://oauth.net/2/grant-types/authorization-code/) -
This grant type is used by clients to exchange an authorization code for an
`access_token`. This is mainly used for web applications as there is a login
into IdentityNow, with a subsequent redirect back to the web application /
client.
2. [**Client Credentials**](https://oauth.net/2/grant-types/client-credentials/) -
This grant type is used by clients to obtain an `access_token` outside the
context of a user. Because this is outside of a user context, only a subset
of IdentityNow REST APIs may be accessible with this kind of grant type.
3. [**Refresh Token**](https://oauth.net/2/grant-types/refresh-token/) - This
grant type is used by clients in order to exchange a refresh token for a new
`access_token` when the existing `access_token` has expired. This allows
clients to continue using the API without having to re-authenticate as
frequently. This grant type is commonly used together with
`Authorization Code` to prevent a user from having to log in several times
per day.
## JSON Web Token (JWT)
[JSON Web Token (JWT)](https://jwt.io) is an industry-standard protocol for creating access tokens which assert various claims about the resource who has authenticated. The tokens have a specific structure consisting of a header, payload, and signature.
[JSON Web Token (JWT)](https://jwt.io) is an industry-standard protocol for
creating access tokens which assert various claims about the resource who has
authenticated. The tokens have a specific structure consisting of a header,
payload, and signature.
A raw JWT might look like this:
@@ -106,36 +180,33 @@ If you were to decode the access token data, it might look something like this:
```json
{
"alg": "HS256",
"typ": "JWT"
"alg": "HS256",
"typ": "JWT"
}
```
</TabItem>
<TabItem value="payload" label="Payload">
```json
{
"tenant_id": "58eb06a4-dcd7-4e96-8fac-cca2afc03e61",
"internal": true,
"pod": "cook",
"org": "example",
"identity_id": "ff80818155fe8c080155fe8d925b0316",
"user_name": "slpt.services",
"strong_auth": true,
"authorities": [
"ORG_ADMIN"
],
"client_id": "nKBPOwjJH8LXSjIl",
"strong_auth_supported": true,
"user_id": "595826",
"scope": [
"read",
"write"
],
"exp": 1565888319,
"jti": "c98d1236-4513-4c89-a0d0-0cb9f3276b56"
"tenant_id": "58eb06a4-dcd7-4e96-8fac-cca2afc03e61",
"internal": true,
"pod": "cook",
"org": "example",
"identity_id": "ff80818155fe8c080155fe8d925b0316",
"user_name": "slpt.services",
"strong_auth": true,
"authorities": ["ORG_ADMIN"],
"client_id": "nKBPOwjJH8LXSjIl",
"strong_auth_supported": true,
"user_id": "595826",
"scope": ["read", "write"],
"exp": 1565888319,
"jti": "c98d1236-4513-4c89-a0d0-0cb9f3276b56"
}
```
</TabItem>
<TabItem value="signature" label="Signature">
@@ -146,6 +217,7 @@ base64UrlEncode(payload),
{secret}
)
```
</TabItem>
</Tabs>
@@ -153,29 +225,43 @@ You can check the JWT access token data online at [jwt.io](https://jwt.io).
## Authentication Details
This section details how to call the SailPoint Platform OAuth 2.0 token endpoints to get an `access_token`.
This section details how to call the SailPoint Platform OAuth 2.0 token
endpoints to get an `access_token`.
### Prerequisites
Before any OAuth 2.0 token requests can be initiated, a Client ID and secret are necessary. As an `ORG_ADMIN`, browse to your API Management Admin Page at `https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel` and create an API client with the appropriate grant types for your use case. If you are not an admin of your org, you can ask an admin to create this for you. Be sure to save your `Client Secret` somewhere secure, as you will not be able to view or change it later.
Before any OAuth 2.0 token requests can be initiated, a Client ID and secret are
necessary. As an `ORG_ADMIN`, browse to your API Management Admin Page at
`https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel`
and create an API client with the appropriate grant types for your use case. If
you are not an admin of your org, you can ask an admin to create this for you.
Be sure to save your `Client Secret` somewhere secure, as you will not be able
to view or change it later.
### OAuth 2.0 Token Request
When authenticating to IdentityNow, the OAuth 2.0 token endpoint resides on the IdentityNow API Gateway at:
When authenticating to IdentityNow, the OAuth 2.0 token endpoint resides on the
IdentityNow API Gateway at:
```text
POST https://{tenant}.api.identitynow.com/oauth/token
```
How you call this endpoint to get your token depends largely on the OAuth 2.0 flow and grant type you wish to implement. The details for each grant type within IdentityNow are described in the following sections.
How you call this endpoint to get your token depends largely on the OAuth 2.0
flow and grant type you wish to implement. The details for each grant type
within IdentityNow are described in the following sections.
### Authorization Code Grant Flow
Further Reading: [https://oauth.net/2/grant-types/authorization-code/](https://oauth.net/2/grant-types/authorization-code/)
Further Reading:
[https://oauth.net/2/grant-types/authorization-code/](https://oauth.net/2/grant-types/authorization-code/)
This grant type is used by clients to exchange an authorization code for an `access_token`. This is mainly used for web apps as there is a login into IdentityNow, with a subsequent redirect back to the web app / client.
This grant type is used by clients to exchange an authorization code for an
`access_token`. This is mainly used for web apps as there is a login into
IdentityNow, with a subsequent redirect back to the web app / client.
The OAuth 2.0 client you are using must have `AUTHORIZATION_CODE` as one of its grant types. The redirect URLs must also match the list in the client as well:
The OAuth 2.0 client you are using must have `AUTHORIZATION_CODE` as one of its
grant types. The redirect URLs must also match the list in the client as well:
```json
{
@@ -194,6 +280,7 @@ The OAuth 2.0 client you are using must have `AUTHORIZATION_CODE` as one of its
...
}
```
<br></br>
### Authorization Flow
@@ -204,37 +291,45 @@ The OAuth 2.0 client you are using must have `AUTHORIZATION_CODE` as one of its
2. The web app sends an authorization request to IdentityNow in the form:
```Text
GET https://{tenant}.identitynow.com/oauth/authorize?client_id={client-id}&client_secret={client-secret}&response_type=code&redirect_uri={redirect-url}
```
```Text
GET https://{tenant}.identitynow.com/oauth/authorize?client_id={client-id}&client_secret={client-secret}&response_type=code&redirect_uri={redirect-url}
```
3. IdentityNow redirects the user to a login prompt to authenticate to IdentityNow.
3. IdentityNow redirects the user to a login prompt to authenticate to
IdentityNow.
4. The user authenticates to IdentityNow.
5. Once authentication is successful, IdentityNow issues an authorization code back to the web app.
5. Once authentication is successful, IdentityNow issues an authorization code
back to the web app.
6. The web app submits an **OAuth 2.0 Token Request** to IdentityNow in the form:
6. The web app submits an **OAuth 2.0 Token Request** to IdentityNow in the
form:
```text
POST https://{tenant}.api.identitynow.com/oauth/token?grant_type=authorization_code&client_id={client-id}&client_secret={client-secret}&code={code}&redirect_uri={redirect-url}
```
```text
POST https://{tenant}.api.identitynow.com/oauth/token?grant_type=authorization_code&client_id={client-id}&client_secret={client-secret}&code={code}&redirect_uri={redirect-url}
```
>**Note**: the token endpoint URL is `{tenant}.api.identitynow.com`, while the authorize URL is `{tenant}.identitynow.com`. Be sure to use the correct URL when setting up your webapp to use this flow.
> **Note**: the token endpoint URL is `{tenant}.api.identitynow.com`, while the
> authorize URL is `{tenant}.identitynow.com`. Be sure to use the correct URL
> when setting up your webapp to use this flow.
7. IdentityNow validates the token request and submits a response. If successful, the response will contain a JWT `access_token`.
7. IdentityNow validates the token request and submits a response. If
successful, the response will contain a JWT `access_token`.
The query parameters in the OAuth 2.0 token request for the Authorization Code grant are as follows:
The query parameters in the OAuth 2.0 token request for the Authorization Code
grant are as follows:
| Key | Description |
| ------------- | ------------------------------------------------------------ |
| `grant_type` | Set to `authorization_code` for the authorization code grant type. |
| `client_id` | This is the client ID for the API client (e.g. `b61429f5-203d-494c-94c3-04f54e17bc5c`). This can be generated at `https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel` |
| `client_secret `| This is the client secret for the API client (e.g. `c924417c85b19eda40e171935503d8e9747ca60ddb9b48ba4c6bb5a7145fb6c5`). This can be generated at `https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel` |
| `code` | This is a code returned by `/oauth/authorize`. |
| `redirect_uri` | This is a URL of the application to redirect to once the token has been granted. |
| Key | Description |
| ---------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `grant_type` | Set to `authorization_code` for the authorization code grant type. |
| `client_id` | This is the client ID for the API client (e.g. `b61429f5-203d-494c-94c3-04f54e17bc5c`). This can be generated at `https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel` |
| `client_secret ` | This is the client secret for the API client (e.g. `c924417c85b19eda40e171935503d8e9747ca60ddb9b48ba4c6bb5a7145fb6c5`). This can be generated at `https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel` |
| `code` | This is a code returned by `/oauth/authorize`. |
| `redirect_uri` | This is a URL of the application to redirect to once the token has been granted. |
Here is an example OAuth 2.0 token request for the Authorization Code grant type.
Here is an example OAuth 2.0 token request for the Authorization Code grant
type.
```bash
curl -X POST \
@@ -244,11 +339,20 @@ curl -X POST \
### Client Credentials Grant Flow
Further Reading: [https://oauth.net/2/grant-types/client-credentials/](https://oauth.net/2/grant-types/client-credentials/)
Further Reading:
[https://oauth.net/2/grant-types/client-credentials/](https://oauth.net/2/grant-types/client-credentials/)
This grant type is used by clients to obtain an access token outside the context of a user. This is probably the simplest authentication flow, but comes with a major drawback; API endpoints that require [user level permissions](https://documentation.sailpoint.com/saas/help/common/users/user_level_matrix.html) will not work. [Personal Access Tokens](#personal-access-tokens) are a form of Client Credentials that have a user context, so they do not share this drawback. However, the APIs that can be invoked with a personal access token depend on the permissions of the user that generated it.
This grant type is used by clients to obtain an access token outside the context
of a user. This is probably the simplest authentication flow, but comes with a
major drawback; API endpoints that require
[user level permissions](https://documentation.sailpoint.com/saas/help/common/users/user_level_matrix.html)
will not work. [Personal Access Tokens](#personal-access-tokens) are a form of
Client Credentials that have a user context, so they do not share this drawback.
However, the APIs that can be invoked with a personal access token depend on the
permissions of the user that generated it.
An OAuth 2.0 client using the Client Credentials flow must have `CLIENT_CREDENTIALS` as one of its grantTypes:
An OAuth 2.0 client using the Client Credentials flow must have
`CLIENT_CREDENTIALS` as one of its grantTypes:
```json
{
@@ -265,27 +369,31 @@ An OAuth 2.0 client using the Client Credentials flow must have `CLIENT_CREDENTI
}
```
[Personal Access Tokens](#personal-access-tokens) are implicly granted a `CLIENT_CREDENTIALS` grant type.
[Personal Access Tokens](#personal-access-tokens) are implicly granted a
`CLIENT_CREDENTIALS` grant type.
The overall authorization flow looks like this:
The overall authorization flow looks like this:
1. The client submits an **OAuth 2.0 Token Request** to IdentityNow in the form:
```text
POST https://{tenant}.api.identitynow.com/oauth/token?grant_type=client_credentials&client_id={client-id}&client_secret={client-secret}
```
```text
POST https://{tenant}.api.identitynow.com/oauth/token?grant_type=client_credentials&client_id={client-id}&client_secret={client-secret}
```
2. IdentityNow validates the token request and submits a response. If successful, the response will contain a JWT access token.
2. IdentityNow validates the token request and submits a response. If
successful, the response will contain a JWT access token.
The query parameters in the OAuth 2.0 Token Request for the Client Credentials grant are as follows:
The query parameters in the OAuth 2.0 Token Request for the Client Credentials
grant are as follows:
| Key | Description |
| ------------- | ------------------------------------------------------------ |
| `grant_type` | Set to `CLIENT_CREDENTIALS` for the authorization code grant type. |
| `client_id` | This is the client ID describing for the API client (e.g. `b61429f5-203d-494c-94c3-04f54e17bc5c`). This can be generated at `https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel` or by [creating a personal access token](#personal-access-tokens). |
| Key | Description |
| --------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `grant_type` | Set to `CLIENT_CREDENTIALS` for the authorization code grant type. |
| `client_id` | This is the client ID describing for the API client (e.g. `b61429f5-203d-494c-94c3-04f54e17bc5c`). This can be generated at `https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel` or by [creating a personal access token](#personal-access-tokens). |
| `client_secret` | This is the client secret describing for the API client (e.g. `c924417c85b19eda40e171935503d8e9747ca60ddb9b48ba4c6bb5a7145fb6c5`). This can be generated at `https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel` or by [creating a personal access token](#personal-access-tokens). |
Here is an example request to generate an `access_token` using Client Credentials.
Here is an example request to generate an `access_token` using Client
Credentials.
```bash
curl -X POST \
@@ -295,11 +403,17 @@ curl -X POST \
### Refresh Token Grant Flow
Further Reading: [https://oauth.net/2/grant-types/refresh-token/](https://oauth.net/2/grant-types/refresh-token/)
Further Reading:
[https://oauth.net/2/grant-types/refresh-token/](https://oauth.net/2/grant-types/refresh-token/)
This grant type is used by clients in order to exchange a refresh token for a new `access_token` once the existing `access_token` has expired. This allows clients to continue to have a valid `access_token` without the need for the user to login as frequently.
This grant type is used by clients in order to exchange a refresh token for a
new `access_token` once the existing `access_token` has expired. This allows
clients to continue to have a valid `access_token` without the need for the user
to login as frequently.
The OAuth 2.0 client you are using must have `REFRESH_TOKEN` as one of its grant types, and is typically used in conjunction with another grant type, like `CLIENT_CREDENTIALS` or `AUTHORIZATION_CODE`:
The OAuth 2.0 client you are using must have `REFRESH_TOKEN` as one of its grant
types, and is typically used in conjunction with another grant type, like
`CLIENT_CREDENTIALS` or `AUTHORIZATION_CODE`:
```json
{
@@ -317,26 +431,31 @@ The OAuth 2.0 client you are using must have `REFRESH_TOKEN` as one of its grant
}
```
The overall authorization flow looks like this:
The overall authorization flow looks like this:
1. The client application receives an `access_token` and a `refresh_token` via one of the other OAuth grant flows, like `AUTHORIZATION_CODE`.
2. The client application notices that the `access_token` is about to expire, based on the `expires_in` attribute contained within the JWT token.
1. The client application receives an `access_token` and a `refresh_token` via
one of the other OAuth grant flows, like `AUTHORIZATION_CODE`.
2. The client application notices that the `access_token` is about to expire,
based on the `expires_in` attribute contained within the JWT token.
3. The client submits an **OAuth 2.0 Token Request** to IdentityNow in the form:
```text
POST https://{tenant}.api.identitynow.com/oauth/token?grant_type=refresh_token&client_id={client_id}&client_secret={client_secret}&refresh_token={refresh_token}
```
```text
POST https://{tenant}.api.identitynow.com/oauth/token?grant_type=refresh_token&client_id={client_id}&client_secret={client_secret}&refresh_token={refresh_token}
```
4. IdentityNow validates the token request and submits a response. If successful, the response will contain a new `access_token` and `refresh_token`.
4. IdentityNow validates the token request and submits a response. If
successful, the response will contain a new `access_token` and
`refresh_token`.
The query parameters in the OAuth 2.0 Token Request for the Refresh Token grant are as follows:
The query parameters in the OAuth 2.0 Token Request for the Refresh Token grant
are as follows:
| Key | Description |
| ------------- | ------------------------------------------------------------ |
| `grant_type` | Set to `refresh_token` for the authorization code grant type. |
| `client_id` | This is the client ID for the API client (e.g. `b61429f5-203d-494c-94c3-04f54e17bc5c`). This can be generated at `https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel`. |
| Key | Description |
| --------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `grant_type` | Set to `refresh_token` for the authorization code grant type. |
| `client_id` | This is the client ID for the API client (e.g. `b61429f5-203d-494c-94c3-04f54e17bc5c`). This can be generated at `https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel`. |
| `client_secret` | This is the client secret for the API client (e.g. `c924417c85b19eda40e171935503d8e9747ca60ddb9b48ba4c6bb5a7145fb6c5`). This can be generated at `https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel`. |
| `refresh_token` | This is the `refresh_token` that was provided along with the now expired `access_token`. |
| `refresh_token` | This is the `refresh_token` that was provided along with the now expired `access_token`. |
Here is an example call OAuth 2.0 Token Request for the Refresh Token grant.
@@ -348,7 +467,8 @@ curl -X POST \
## OAuth 2.0 Token Response
A successful request to `https://{tenant}.api.identitynow.com/oauth/token` will contain a response body similar to this:
A successful request to `https://{tenant}.api.identitynow.com/oauth/token` will
contain a response body similar to this:
```json
{
@@ -371,7 +491,10 @@ A successful request to `https://{tenant}.api.identitynow.com/oauth/token` will
}
```
The `access_token` contains the JSON Web Token which is subsequently used in any further REST API calls through the IdentityNow API gateway. To use the `access_token`, simply include it in the `Authorization` header as a `Bearer` token. For example:
The `access_token` contains the JSON Web Token which is subsequently used in any
further REST API calls through the IdentityNow API gateway. To use the
`access_token`, simply include it in the `Authorization` header as a `Bearer`
token. For example:
```bash
curl -X GET \
@@ -380,42 +503,76 @@ curl -X GET \
-H 'cache-control: no-cache'
```
The `expires_in` describes the lifetime, in seconds, of the `access_token`. For example, the value 749 means that the `access_token` will expire in 12.5 minutes from the time the response was generated. The exact expiration date is also contained within the `access_token`. You can view this expiration time by decoding the JWT `access_token` using a tool like [jwt.io](https://jwt.io/).
The `expires_in` describes the lifetime, in seconds, of the `access_token`. For
example, the value 749 means that the `access_token` will expire in 12.5 minutes
from the time the response was generated. The exact expiration date is also
contained within the `access_token`. You can view this expiration time by
decoding the JWT `access_token` using a tool like [jwt.io](https://jwt.io/).
The `refresh_token` contains a JSON Web Token for use in a [Refresh Token](#refresh-token-grant-flow) grant flow. The `refresh_token` will only be present if the API client has the `REFRESH_CODE` grant flow.
The `refresh_token` contains a JSON Web Token for use in a
[Refresh Token](#refresh-token-grant-flow) grant flow. The `refresh_token` will
only be present if the API client has the `REFRESH_CODE` grant flow.
The `user_id` and `identity_id` define the identity context of the person that authenticated. This is not set for the Client Credentials grant type since it doesn't have a user context.
The `user_id` and `identity_id` define the identity context of the person that
authenticated. This is not set for the Client Credentials grant type since it
doesn't have a user context.
## Which OAuth 2.0 Grant Flow Should I use
Deciding which OAuth 2.0 grant flow you should use largely depends on your use case.
Deciding which OAuth 2.0 grant flow you should use largely depends on your use
case.
### Daily Work or Quick Actions
For daily work or short, quick administrative actions, you may not really need to worry about grant types, as an access token can easily be obtained in the user interface. In order to see this:
For daily work or short, quick administrative actions, you may not really need
to worry about grant types, as an access token can easily be obtained in the
user interface. In order to see this:
1. Login to IdentityNow.
2. Go to `https://{tenant}.identitynow.com/ui/session`.
3. The `accessToken` is visible in the user interface.
4. Use this access token in the `Authorization` header when making API calls. If the access token expires, log back into Identity Now and retrieve the new access token.
4. Use this access token in the `Authorization` header when making API calls. If
the access token expires, log back into Identity Now and retrieve the new
access token.
While this is very simple to use, this is only valid for a short period of time (a few minutes).
While this is very simple to use, this is only valid for a short period of time
(a few minutes).
### Postman
If you are using the popular HTTP client, [Postman](https://www.getpostman.com), you have a couple of options on how you might setup your authorization. You can just leverage the accessToken as mentioned above, or you can also configure Postman to use OAuth 2.0 directly.
If you are using the popular HTTP client, [Postman](https://www.getpostman.com),
you have a couple of options on how you might setup your authorization. You can
just leverage the accessToken as mentioned above, or you can also configure
Postman to use OAuth 2.0 directly.
### Web Applications
If you are making a web application, the best grant flow to use is the [Authorization Code](#authorization-code-grant-flow) grant flow. This will allow users to be directed to IdentityNow to login, and then redirected back to the web application via a URL redirect. This also works well with SSO, strong authentication, or pass-through authentication mechanisms.
If you are making a web application, the best grant flow to use is the
[Authorization Code](#authorization-code-grant-flow) grant flow. This will allow
users to be directed to IdentityNow to login, and then redirected back to the
web application via a URL redirect. This also works well with SSO, strong
authentication, or pass-through authentication mechanisms.
SailPoint does not recommend using a password grant flow for web applications as it would involve entering IdentityNow credentials in the web application. This flow also doesn't allow you to work with SSO, strong authentication, or pass-through authentication.
SailPoint does not recommend using a password grant flow for web applications as
it would involve entering IdentityNow credentials in the web application. This
flow also doesn't allow you to work with SSO, strong authentication, or
pass-through authentication.
### Scripts or Programs
If you are writing scripts or programs that leverage the IdentityNow APIs, which OAuth 2.0 grant from you should use typically depends on what you are doing, and which user context you need to operate under.
If you are writing scripts or programs that leverage the IdentityNow APIs, which
OAuth 2.0 grant from you should use typically depends on what you are doing, and
which user context you need to operate under.
Because scripts, code, or programs do not have an interactive web-interface it is difficult, but not impossible, to implement a working [Authorization Code](#authorization-code-grant-flow) flow. Most scripts or programs typically run as a [Client Credentials](#client-credentials-grant-flow). If your APIs can work under an API context without a user, then [Client Credentials](#client-credentials-grant-flow) is ideal. However, if your APIs need a user or admin context, then the [Personal Access Token](#personal-access-tokens) approach will be more suitable.
Because scripts, code, or programs do not have an interactive web-interface it
is difficult, but not impossible, to implement a working
[Authorization Code](#authorization-code-grant-flow) flow. Most scripts or
programs typically run as a
[Client Credentials](#client-credentials-grant-flow). If your APIs can work
under an API context without a user, then
[Client Credentials](#client-credentials-grant-flow) is ideal. However, if your
APIs need a user or admin context, then the
[Personal Access Token](#personal-access-tokens) approach will be more suitable.
## Troubleshooting
@@ -423,63 +580,90 @@ Having issues? Follow these steps.
1. **Verify the API End Point Calls**
1. Verify the structure of the API call:
1. Verify that the API calls are going through the API gateway:
`https://{tenant}.api.identitynow.com`
2. Verify you are calling their version correctly:
- Private APIs: `https://{tenant}.api.identitynow.com/cc/api/{endpoint}`
- V2 APIs: `https://{tenant}.api.identitynow.com/v2/{endpoint}`
- V3 APIs: `https://{tenant}.api.identitynow.com/v3/{endpoint}`
- Beta APIs: `https://{tenant}.api.identitynow.com/beta/{endpoint}`
3. Verify that the API calls have the correct headers (e.g., `content-type`), query parameters, and body data.
2. If the HTTP response is **401 Unauthorized** , this is an indication that either there is no `Authorization` header or the `access_token` is invalid. Verify that the API calls are supplying the `access_token` in the `Authorization` header correctly (ex. `Authorization: Bearer {access_token}`) and that the `access_token` has not expired.
3. If the HTTP response is **403 Forbidden**, this is an indication that the `access_token` is valid, but the user you are running as doesn't have access to this endpoint. Check the access rights which are associated with the user.
:::info
This can also be due to calling an API which expects a user, but your authorization grant type might not have a user context. Calling most administrative APIs with a CLIENT_CREDENTIAL grant will often produce this result.
:::
1. Verify the structure of the API call:
1. Verify that the API calls are going through the API gateway:
`https://{tenant}.api.identitynow.com`
1. Verify you are calling their version correctly:
- Private APIs: `https://{tenant}.api.identitynow.com/cc/api/{endpoint}`
- V2 APIs: `https://{tenant}.api.identitynow.com/v2/{endpoint}`
- V3 APIs: `https://{tenant}.api.identitynow.com/v3/{endpoint}`
- Beta APIs: `https://{tenant}.api.identitynow.com/beta/{endpoint}`
1. Verify that the API calls have the correct headers (e.g., `content-type`),
query parameters, and body data.
1. If the HTTP response is **401 Unauthorized** , this is an indication that
either there is no `Authorization` header or the `access_token` is invalid.
Verify that the API calls are supplying the `access_token` in the
`Authorization` header correctly (ex. `Authorization: Bearer {access_token}`)
and that the `access_token` has not expired.
1. If the HTTP response is **403 Forbidden**, this is an indication that the
`access_token` is valid, but the user you are running as doesn't have access
to this endpoint. Check the access rights which are associated with the user.
:::info
This can also be due to calling an API which expects a user, but your
authorization grant type might not have a user context. Calling most
administrative APIs with a CLIENT_CREDENTIAL grant will often produce this
result.
:::
2. **Verify the OAuth 2.0 Client**
1. Verify that the OAuth 2.0 Client is not a Legacy OAuth client. Legacy OAuth clients will not work.
This is very apparent by looking at the Client ID, as OAuth 2.0 Client IDs have dashes. Here is an example:
Legacy Client ID: `G6xLlBBOKIcOAQuK`
OAuth 2.0 Client ID: `b61429f5-203d-494c-94c3-04f54e17bc5c`
1. Verify that the OAuth 2.0 Client is not a Legacy OAuth client. Legacy OAuth
clients will not work. This is very apparent by looking at the Client ID, as
OAuth 2.0 Client IDs have dashes. Here is an example: Legacy Client ID:
`G6xLlBBOKIcOAQuK` OAuth 2.0 Client ID:
`b61429f5-203d-494c-94c3-04f54e17bc5c`
2. Verify the OAuth 2.0 Client ID exists. This can be verified by calling:
1. Verify the OAuth 2.0 Client ID exists. This can be verified by calling:
```text
GET /beta/oauth-clients/{client-id}
```
```text
GET /beta/oauth-clients/{client-id}
```
or
or
```text
GET /beta/oauth-clients/
```
```text
GET /beta/oauth-clients/
```
You can also view all of the active clients in the UI by going to `https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel`.
You can also view all of the active clients in the UI by going to
`https://{tenant}.identitynow.com/ui/admin/#admin:global:security:apimanagementpanel`.
3. Verify that the OAuth 2.0 Client grant types match the OAuth 2.0 grant type flow you are trying to use. For instance, this client will work with [Authorization Code](#authorization-code-grant-flow) and [Client Credentials](#client-Credentials-grant-flow) flows, but not [Refresh Token](#refresh-token-grant-flow) flows:
3. Verify that the OAuth 2.0 Client grant types match the OAuth 2.0 grant type
flow you are trying to use. For instance, this client will work with
[Authorization Code](#authorization-code-grant-flow) and
[Client Credentials](#client-Credentials-grant-flow) flows, but not
[Refresh Token](#refresh-token-grant-flow) flows:
```json
{
"enabled": true,
"created": "2019-05-23T02:06:20.685Z",
"name": "My Application",
"description": "My Application",
"id": "b61429f5-203d-494c-94c3-04f54e17bc5c",
"secret": null,
"grantTypes": [
"AUTHORIZATION_CODE",
"CLIENT_CREDENTIALS"
],
...
}
```
```json
{
"enabled": true,
"created": "2019-05-23T02:06:20.685Z",
"name": "My Application",
"description": "My Application",
"id": "b61429f5-203d-494c-94c3-04f54e17bc5c",
"secret": null,
"grantTypes": [
"AUTHORIZATION_CODE",
"CLIENT_CREDENTIALS"
],
...
}
```
4. If using an A[Authorization Code](#authorization-code-grant-flow) flow, verify the redirect URL(s) for your application match the `redirectUris` value in the client. You can check this using the [oauth-clients endpoint](/idn/api/beta/list-oauth-client).
4. If using an A[Authorization Code](#authorization-code-grant-flow) flow,
verify the redirect URL(s) for your application match the `redirectUris`
value in the client. You can check this using the
[oauth-clients endpoint](/idn/api/beta/list-oauth-client).
3. **Verify the OAuth 2.0 Calls**
5. **Verify the OAuth 2.0 Calls**
1. Verify that the OAuth call flow is going to the right URLs, with the correct query parameters and data values. A common source of errors is using the wrong host for authorization and token API calls. The token endpoint URL is `{tenant}.api.identitynow.com`, while the authorize URL is `{tenant}.identitynow.com`.
6. Verify that the OAuth call flow is going to the right URLs, with the correct
query parameters and data values. A common source of errors is using the
wrong host for authorization and token API calls. The token endpoint URL is
`{tenant}.api.identitynow.com`, while the authorize URL is
`{tenant}.identitynow.com`.

View File

@@ -13,55 +13,100 @@ tags: ["Getting Started"]
## Find Your Tenant Name
To form the proper URL for an API request, you must know your tenant name. To find your tenant name by log into IdentityNow, navigate to Admin, select the Dashboard dropdown, and select Overview. The org name is displayed within the Org Details section of the dashboard. If you do not have admin access, you can still find your tenant name and the API base URL you will use for API calls. To do so, view your session details when you are logged into your IdentityNow instance. Change your URL to the following: `https://{your-IdentityNow-hostname}.com/ui/session`, where `{your-IdentityNow-hostname}` is your company's domain name for accessing IdentityNow. The session detail you want is the `baseUrl`, which has the form of `https://{tenant}.api.identitynow.com`.
To form the proper URL for an API request, you must know your tenant name. To
find your tenant name by log into IdentityNow, navigate to Admin, select the
Dashboard dropdown, and select Overview. The org name is displayed within the
Org Details section of the dashboard. If you do not have admin access, you can
still find your tenant name and the API base URL you will use for API calls. To
do so, view your session details when you are logged into your IdentityNow
instance. Change your URL to the following:
`https://{your-IdentityNow-hostname}.com/ui/session`, where
`{your-IdentityNow-hostname}` is your company's domain name for accessing
IdentityNow. The session detail you want is the `baseUrl`, which has the form of
`https://{tenant}.api.identitynow.com`.
## Make Your First API Call
To get started, create a [personal access token](./authentication.md#personal-access-tokens), which can then be used to generate access tokens to authenticate your API calls. To generate a personal access token from IdentityNow, do the following after logging into your IdentityNow instance:
To get started, create a
[personal access token](./authentication.md#personal-access-tokens), which can
then be used to generate access tokens to authenticate your API calls. To
generate a personal access token from IdentityNow, do the following after
logging into your IdentityNow instance:
1. Select **Preferences** from the drop-down menu under your username. Then select **Personal Access Tokens** on the left. You can also go straight to the page using this URL, replacing `{tenant}` with your IdentityNow tenant: `https://{tenant}.identitynow.com/ui/d/user-preferences/personal-access-tokens`.
1. Select **Preferences** from the drop-down menu under your username. Then
select **Personal Access Tokens** on the left. You can also go straight to
the page using this URL, replacing `{tenant}` with your IdentityNow tenant:
`https://{tenant}.identitynow.com/ui/d/user-preferences/personal-access-tokens`.
2. Select **New Token** and enter a meaningful description to differentiate the token from others.
2. Select **New Token** and enter a meaningful description to differentiate the
token from others.
:::caution
The **New Token** button will be disabled when you reach the limit of 10 personal access tokens per user. To avoid reaching this limit, delete any tokens that are no longer needed.
:::
:::caution
3. Select **Create Token** to generate and view two components the token comprises: the `Secret` and the `Client ID`.
The **New Token** button will be disabled when you reach the limit of 10
personal access tokens per user. To avoid reaching this limit, delete any tokens
that are no longer needed.
:::danger Important
After you create the token, the value of the `Client ID` will be visible in the Personal Access Tokens list, but the corresponding `Secret` will not be visible after you close the window. Store the `Secret` somewhere secure.
:::
:::
4. Copy both values somewhere that will be secure and accessible to you when you need to use the the token.
3. Select **Create Token** to generate and view two components the token
comprises: the `Secret` and the `Client ID`.
5. To create an `access_token` that can be used to authenticate API requests, use the following cURL command, replacing `{tenant}` with your IdentityNow tenant. The response body will contain an `access_token`, which will look like a long string of random characters.
:::danger Important
```bash
curl --location --request POST 'https://{tenant}.api.identitynow.com/oauth/token?grant_type=client_credentials&client_id={client_id}&client_secret={secret}'
```
After you create the token, the value of the `Client ID` will be visible in
the Personal Access Tokens list, but the corresponding `Secret` will not be
visible after you close the window. Store the `Secret` somewhere secure.
6. To test your `access_token`, execute the following cURL command, replacing `{tenant}` with your IdentityNow tenant and `access_token` with the token you generated in the previous step. If this is successful, you should get a JSON representation of an identity in your tenant.
:::
```bash
curl --request GET --url 'https://{tenant}.api.identitynow.com/v3/public-identities?limit=1' --header 'authorization: Bearer {access_token}'
```
4. Copy both values somewhere that will be secure and accessible to you when you
need to use the the token.
For more information about SailPoint Platform authentication, see [API Authentication](./authentication.md)
5. To create an `access_token` that can be used to authenticate API requests,
use the following cURL command, replacing `{tenant}` with your IdentityNow
tenant. The response body will contain an `access_token`, which will look
like a long string of random characters.
```bash
curl --location --request POST 'https://{tenant}.api.identitynow.com/oauth/token?grant_type=client_credentials&client_id={client_id}&client_secret={secret}'
```
6. To test your `access_token`, execute the following cURL command, replacing
`{tenant}` with your IdentityNow tenant and `access_token` with the token you
generated in the previous step. If this is successful, you should get a JSON
representation of an identity in your tenant.
```bash
curl --request GET --url 'https://{tenant}.api.identitynow.com/v3/public-identities?limit=1' --header 'authorization: Bearer {access_token}'
```
For more information about SailPoint Platform authentication, see
[API Authentication](./authentication.md)
## Rate Limits
There is a rate limit of 100 requests per `access_token` per 10 seconds for V3 API calls through the API gateway. If you exceed the rate limit, expect the following response from the API:
There is a rate limit of 100 requests per `access_token` per 10 seconds for V3
API calls through the API gateway. If you exceed the rate limit, expect the
following response from the API:
**HTTP Status Code**: 429 Too Many Requests
**Headers**:
* **Retry-After**: {seconds to wait before rate limit resets}
- **Retry-After**: {seconds to wait before rate limit resets}
## Authorization
Each API resource requires a specific level of authorization attached to your `access_token`. You can view these levels of authorization in the [user level access matrix](https://documentation.sailpoint.com/saas/help/common/users/user_level_matrix.html). Review the authorization constraints for each API endpoint to understand the user level needed to invoke the endpoint. Tokens generated outside of a user context, like the [Client Credentials](./authentication.md#client-credentials-grant-flow) grant type, are limited in the endpoints that it can call. If your token does not have permission to call an endpoint, you will receive the following response:
Each API resource requires a specific level of authorization attached to your
`access_token`. You can view these levels of authorization in the
[user level access matrix](https://documentation.sailpoint.com/saas/help/common/users/user_level_matrix.html).
Review the authorization constraints for each API endpoint to understand the
user level needed to invoke the endpoint. Tokens generated outside of a user
context, like the
[Client Credentials](./authentication.md#client-credentials-grant-flow) grant
type, are limited in the endpoints that it can call. If your token does not have
permission to call an endpoint, you will receive the following response:
**HTTP Status Code**: 403 Forbidden
@@ -83,4 +128,11 @@ Each API resource requires a specific level of authorization attached to your `a
## API Tools
There are several API tools that make exploring and testing APIs easier than using the command line or a programming language. One tool is [Postman](https://www.postman.com/downloads/). To import the SailPoint REST APIs into a tool like Postman, you must first download the REST specification. Navigate to the [V3 IDN APIs](/idn/api/v3) and select "Download OpenAPI specification". You can then import the JSON file in Postman by using the [import wizard](https://learning.postman.com/docs/getting-started/importing-and-exporting-data/) within Postman.
There are several API tools that make exploring and testing APIs easier than
using the command line or a programming language. One tool is
[Postman](https://www.postman.com/downloads/). To import the SailPoint REST APIs
into a tool like Postman, you must first download the REST specification.
Navigate to the [V3 IDN APIs](/idn/api/v3) and select "Download OpenAPI
specification". You can then import the JSON file in Postman by using the
[import wizard](https://learning.postman.com/docs/getting-started/importing-and-exporting-data/)
within Postman.

View File

@@ -6,16 +6,20 @@ sidebar_label: Rate Limiting
sidebar_position: 4
sidebar_class_name: rateLimit
keywords: ["rate limit"]
description: There is a rate limit of 100 requests per access_token per 10 seconds for V3 API calls through the API gateway.
description:
There is a rate limit of 100 requests per access_token per 10 seconds for V3
API calls through the API gateway.
tags: ["Rate Limit"]
---
## Rate Limits
There is a rate limit of 100 requests per `access_token` per 10 seconds for V3 API calls through the API gateway. If you exceed the rate limit, expect the following response from the API:
There is a rate limit of 100 requests per `access_token` per 10 seconds for V3
API calls through the API gateway. If you exceed the rate limit, expect the
following response from the API:
**HTTP Status Code**: 429 Too Many Requests
**Headers**:
* **Retry-After**: {seconds to wait before rate limit resets}
- **Retry-After**: {seconds to wait before rate limit resets}

View File

@@ -6,83 +6,96 @@ sidebar_label: Standard Collection Parameters
sidebar_position: 3
sidebar_class_name: standardCollectionParameters
keywords: ["standard collection parameters"]
description: Many endpoints in the IdentityNow API support a generic syntax for paginating, filtering and sorting the results.
description:
Many endpoints in the IdentityNow API support a generic syntax for paginating,
filtering and sorting the results.
tags: ["Standard Collection Parameters"]
---
Many endpoints in the IdentityNow API support a generic syntax for paginating, filtering and sorting the results. A collection endpoint has the following characteristics:
Many endpoints in the IdentityNow API support a generic syntax for paginating,
filtering and sorting the results. A collection endpoint has the following
characteristics:
* The HTTP verb is always GET.
* The last component in the URL is a plural noun (ex. `/v3/public-identities`).
* The return value from a successful request is always an array of JSON objects. This array may be empty if there are no results.
- The HTTP verb is always GET.
- The last component in the URL is a plural noun (ex. `/v3/public-identities`).
- The return value from a successful request is always an array of JSON objects.
This array may be empty if there are no results.
## Paginating Results
Use the following optional query parameters to achieve pagination:
|Name|Description|Default|Constraints|
|---|---|---|---|
|`limit`|Integer specifying the maximum number of records to return in a single API call. If it is not specified, a default limit is used.|`250`|Maxiumum of 250 records per page|
|`offset`|Integer specifying the offset of the first result from the beginning of the collection. The **offset** value is record-based, not page-based, and the index starts at 0. For example, **offset=0** and **limit=20** returns records 0-19, but **offset=1** and **limit=20** returns records 1-20.|`0`|Between 0 and the last record index.
|`count`|Boolean indicating whether a total count is returned, factoring in any filter parameters, in the **X-Total-Count** response header. The value is the total size of the collection that would be returned if **limit** and **offset** were ignored. For example, if the total number of records is 1000, then count=true would return 1000 in the **X-Total-Count** header. Because requesting a total count can have performance impact, do not send **count=true** if that value is not being used.|`false`|Must be `true` or `false`|
| Name | Description | Default | Constraints |
| -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------- | ------------------------------------ |
| `limit` | Integer specifying the maximum number of records to return in a single API call. If it is not specified, a default limit is used. | `250` | Maxiumum of 250 records per page |
| `offset` | Integer specifying the offset of the first result from the beginning of the collection. The **offset** value is record-based, not page-based, and the index starts at 0. For example, **offset=0** and **limit=20** returns records 0-19, but **offset=1** and **limit=20** returns records 1-20. | `0` | Between 0 and the last record index. |
| `count` | Boolean indicating whether a total count is returned, factoring in any filter parameters, in the **X-Total-Count** response header. The value is the total size of the collection that would be returned if **limit** and **offset** were ignored. For example, if the total number of records is 1000, then count=true would return 1000 in the **X-Total-Count** header. Because requesting a total count can have performance impact, do not send **count=true** if that value is not being used. | `false` | Must be `true` or `false` |
Examples:
* GET `/v3/public-identities?limit=2`
* GET `/v3/public-identities?limit=20&offset=4`
* GET `/v3/public-identities?count=true`
- GET `/v3/public-identities?limit=2`
- GET `/v3/public-identities?limit=20&offset=4`
- GET `/v3/public-identities?count=true`
## Filtering Results
Any collection with a `filters` parameter supports filtering. This means that an item is only included in the returned array if the filters expression evaluates to true for that item. Check the available request parameters for the collection endpoint you are using to see if it supports filtering.
Any collection with a `filters` parameter supports filtering. This means that an
item is only included in the returned array if the filters expression evaluates
to true for that item. Check the available request parameters for the collection
endpoint you are using to see if it supports filtering.
### Data Types
Filter expressions are applicable to fields of the following types:
* Numeric
* Boolean: either **true** or **false**
* Strings. Enumerated values are a special case of this.
* Date-time. In V3, all date time values are in ISO-8601 format, as specified in [RFC 3339 - Date and Time on the Internet: Timestamps](https://tools.ietf.org/html/rfc3339).
- Numeric
- Boolean: either **true** or **false**
- Strings. Enumerated values are a special case of this.
- Date-time. In V3, all date time values are in ISO-8601 format, as specified in
[RFC 3339 - Date and Time on the Internet: Timestamps](https://tools.ietf.org/html/rfc3339).
### Filter Syntax
The V3 filter syntax is similar to, but not exactly the same as, that specified by the SCIM standard. These are some key differences:
The V3 filter syntax is similar to, but not exactly the same as, that specified
by the SCIM standard. These are some key differences:
* A slightly different set of supported operators
* Case-sensitivity of operators. All V3 filter operators are in lowercase; specifying "EQ" instead of "eq" is not allowed.
- A slightly different set of supported operators
- Case-sensitivity of operators. All V3 filter operators are in lowercase;
specifying "EQ" instead of "eq" is not allowed.
### Primitive Operators
These filter operators apply directly to fields and their values:
|Operator|Description|Example|
|---|---|---|
|`ca`|True if the collection-valued field contains all the listed values.|groups ca ("Venezia","Firenze")|
|`co`|True if the value of the field contains the specified value as a substring.(Applicable to string-valued fields only.)|name co "Rajesh"|
|`eq`|True if the value of the field indicated by the first operand is equal to the value specified by the second operand.|identitySummary.id eq "2c9180846e85e4b8016eafeba20c1314"|
|`ge`|True if the value of the field indicated by the first operand is greater or equal to the value specified by the second operand.|daysUntilEscalation ge 7 name ge "Genaro"|
|`gt`|True if the value of the field indicated by the first operand is greater than the value specified by the second operand.|daysUntilEscalation gt 7 name gt "Genaro" created gt 2018-12-18T23:05:55Z|
|`in`|True if the field value is in the list of values.|accountActivityItemId in ("2c9180846b0a0583016b299f210c1314","2c9180846b0a0581016b299e82560c1314")|
|`le`|True if the value of the field indicated by the first operand is less or equal to the value specified by the second operand.|daysUntilEscalation le 7 name le "Genaro"|
|`lt`|True if the value of the field indicated by the first operand is less than the value specified by the second operand.|daysUntilEscalation lt 7 name lt "Genaro" created lt 2018-12-18T23:05:55Z|
|`ne`|True if the value of the field indicated by the first operand is not equal to the value specified by the second operand.|type ne "ROLE"|
|`pr`|True if the field is present, that is, not null.|pr accountRequestInfo|
|`sw`|True if the value of the field starts with the specified value.(Applicable to string-valued fields only.)|name sw "Rajesh"|
| Operator | Description | Example |
| -------- | ------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- |
| `ca` | True if the collection-valued field contains all the listed values. | groups ca ("Venezia","Firenze") |
| `co` | True if the value of the field contains the specified value as a substring.(Applicable to string-valued fields only.) | name co "Rajesh" |
| `eq` | True if the value of the field indicated by the first operand is equal to the value specified by the second operand. | identitySummary.id eq "2c9180846e85e4b8016eafeba20c1314" |
| `ge` | True if the value of the field indicated by the first operand is greater or equal to the value specified by the second operand. | daysUntilEscalation ge 7 name ge "Genaro" |
| `gt` | True if the value of the field indicated by the first operand is greater than the value specified by the second operand. | daysUntilEscalation gt 7 name gt "Genaro" created gt 2018-12-18T23:05:55Z |
| `in` | True if the field value is in the list of values. | accountActivityItemId in ("2c9180846b0a0583016b299f210c1314","2c9180846b0a0581016b299e82560c1314") |
| `le` | True if the value of the field indicated by the first operand is less or equal to the value specified by the second operand. | daysUntilEscalation le 7 name le "Genaro" |
| `lt` | True if the value of the field indicated by the first operand is less than the value specified by the second operand. | daysUntilEscalation lt 7 name lt "Genaro" created lt 2018-12-18T23:05:55Z |
| `ne` | True if the value of the field indicated by the first operand is not equal to the value specified by the second operand. | type ne "ROLE" |
| `pr` | True if the field is present, that is, not null. | pr accountRequestInfo |
| `sw` | True if the value of the field starts with the specified value.(Applicable to string-valued fields only.) | name sw "Rajesh" |
### Composite Operators
These operators are applied to other filter expressions:
|Operator|Description|Example|
|---|---|---|
|`and`|True if both the filter-valued operands are true.|startDate gt 2018 and name sw "Genaro"|
|`not`|True if the filter-valued operand is false.|not groups ca ("Venezia","Firenze")|
|`or`|True if either of the filter-valued operands are true.|startDate gt 2018 or name sw "Genaro"|
| Operator | Description | Example |
| -------- | ------------------------------------------------------ | -------------------------------------- |
| `and` | True if both the filter-valued operands are true. | startDate gt 2018 and name sw "Genaro" |
| `not` | True if the filter-valued operand is false. | not groups ca ("Venezia","Firenze") |
| `or` | True if either of the filter-valued operands are true. | startDate gt 2018 or name sw "Genaro" |
### Escaping Special Characters in a Filter
Certain characters must be escaped before they can be used in a filter expression. For example, the following filter expression attempting to find all sources with the name `#Employees` will produce a 400 error:
Certain characters must be escaped before they can be used in a filter
expression. For example, the following filter expression attempting to find all
sources with the name `#Employees` will produce a 400 error:
`/v3/sources?filters=name eq "#Employees"`
@@ -90,52 +103,86 @@ To properly escape this filter, do the following:
`/v3/sources?filters=name eq "%23Employees"`
If you are searching for a string containing double quotes, use the following escape sequence:
If you are searching for a string containing double quotes, use the following
escape sequence:
`/v3/sources/?filters=name eq "\"Employees\""`
The following table lists the special characters that are incompatible with `filters` and how to escape them.
The following table lists the special characters that are incompatible with
`filters` and how to escape them.
|Character|Escape Sequence|
|---|---|
|#|%23|
|%|%25|
|&|%26|
| \\ | \\\\ |
|"| \\" |
| Character | Escape Sequence |
| --------- | --------------- |
| # | %23 |
| % | %25 |
| & | %26 |
| \\ | \\\\ |
| " | \\" |
### Known Limitations
Although filter expressions are a very general mechanism, individual API endpoints will only support filtering on a specific set of fields that are relevant to that endpoint, and will frequently only support a subset of operations for each field. For example, an endpoint might allow filtering on the name field but not support use of the co operator on that field. Consult the documentation for each API endpoint to determine what fields and operators can be used. Attempts to use an unsupported filter expression will result in a 400 Bad Request response.
Although filter expressions are a very general mechanism, individual API
endpoints will only support filtering on a specific set of fields that are
relevant to that endpoint, and will frequently only support a subset of
operations for each field. For example, an endpoint might allow filtering on the
name field but not support use of the co operator on that field. Consult the
documentation for each API endpoint to determine what fields and operators can
be used. Attempts to use an unsupported filter expression will result in a 400
Bad Request response.
Examples:
* `/v3/public-identities?filters=email eq "john.doe@example.com"`
* `/v3/public-identities?filters=firstname sw "john" or email sw "joe"`
* `not prop1 eq val1 or prop2 eq val2 and prop3 eq val3` is equivalent to `(not (prop1 eq val1)) or ((prop2 eq val2) and (prop3 eq val3))`
* `not (prop1 eq val1 or prop2 eq val2) and prop3 eq val3` is equivalent to `(not ((prop1 eq val1) or (prop2 eq val2))) and (prop3 eq val3)`
- `/v3/public-identities?filters=email eq "john.doe@example.com"`
- `/v3/public-identities?filters=firstname sw "john" or email sw "joe"`
- `not prop1 eq val1 or prop2 eq val2 and prop3 eq val3` is equivalent to
`(not (prop1 eq val1)) or ((prop2 eq val2) and (prop3 eq val3))`
- `not (prop1 eq val1 or prop2 eq val2) and prop3 eq val3` is equivalent to
`(not ((prop1 eq val1) or (prop2 eq val2))) and (prop3 eq val3)`
:::info
* Spaces in URLs must be escaped with `%20`. Most programming languages, frameworks, libraries, and tools will do this for you, but some won't. In the event that your tool doesn't escape spaces, you will need to format your query as `/v3/public-identities?filters=email%20eq%20"john.doe@example.com"`
* You must escape spaces in URLs with `%20`. Most programming languages, frameworks, libraries, and tools do this for you, but some do not. In the event that your tool does not escape spaces, you must format your query as `/v3/public-identities?filters=email%20eq%20"john.doe@example.com"`
- Spaces in URLs must be escaped with `%20`. Most programming languages,
frameworks, libraries, and tools will do this for you, but some won't. In the
event that your tool doesn't escape spaces, you will need to format your query
as `/v3/public-identities?filters=email%20eq%20"john.doe@example.com"`
* Unless explicitly noted otherwise, strings are compared lexicographically. Most comparisons are not case sensitive. Any situations where the comparisons are case sensitive will be called out.
- You must escape spaces in URLs with `%20`. Most programming languages,
frameworks, libraries, and tools do this for you, but some do not. In the
event that your tool does not escape spaces, you must format your query as
`/v3/public-identities?filters=email%20eq%20"john.doe@example.com"`
* Date-times are compared temporally; an earlier date-time is less than a later date-time.
- Unless explicitly noted otherwise, strings are compared lexicographically.
Most comparisons are not case sensitive. Any situations where the comparisons
are case sensitive will be called out.
- Date-times are compared temporally; an earlier date-time is less than a later
date-time.
- The usual precedence and associativity of the composite operators applies,
with **not** having higher priority than **and**, which in turn has higher
priority than **or**. You can use parentheses to override this precedence.
* The usual precedence and associativity of the composite operators applies, with **not** having higher priority than **and**, which in turn has higher priority than **or**. You can use parentheses to override this precedence.
:::
### Sorting Results
Result sorting is supported with the standard `sorters` parameter. Its syntax is a set of comma-separated field names. You may optionally prefix each field name with a "-" character, indicating that the sort is descending based on the value of that field. Otherwise, the sort is ascending.
Result sorting is supported with the standard `sorters` parameter. Its syntax is
a set of comma-separated field names. You may optionally prefix each field name
with a "-" character, indicating that the sort is descending based on the value
of that field. Otherwise, the sort is ascending.
For example, to sort primarily by **type** in ascending order, and secondarily by **modified date** in descending order, use `sorters=type,-modified`
For example, to sort primarily by **type** in ascending order, and secondarily
by **modified date** in descending order, use `sorters=type,-modified`
## Putting it all Together
Pagination, filters, and sorters can be mixed and match to achieve the desired output for a given collection endpoint. Here are some examples:
Pagination, filters, and sorters can be mixed and match to achieve the desired
output for a given collection endpoint. Here are some examples:
* `/v3/public-identities?limit=20&filters=firstname eq "john"&sorters=-name` returns the first 20 identities that have a first name of John and are sorted in descending order by full name.
* `/v3/account-activities?limit=10&offset=2&sorters=-created` sorts the results by descending created time, so the most recent activities appear first. The limit and offset returns the 3rd page of this sorted response with 10 records displayed.
- `/v3/public-identities?limit=20&filters=firstname eq "john"&sorters=-name`
returns the first 20 identities that have a first name of John and are sorted
in descending order by full name.
- `/v3/account-activities?limit=10&offset=2&sorters=-created` sorts the results
by descending created time, so the most recent activities appear first. The
limit and offset returns the 3rd page of this sorted response with 10 records
displayed.

View File

@@ -4,7 +4,8 @@ title: Access Request Dynamic Approval
pagination_label: Access Request Dynamic Approval
sidebar_label: Access Request Dynamic Approval
sidebar_class_name: accessRequestDynamicApproval
keywords: ["event", "trigger", "access", "request", "dynamic", "approval", "available"]
keywords:
["event", "trigger", "access", "request", "dynamic", "approval", "available"]
description: Fires after an access request is submitted.
slug: /docs/event-triggers/triggers/access-request-dynamic-approval
tags: ["Event Triggers", "Available Event Triggers", "Request Response"]
@@ -12,18 +13,29 @@ tags: ["Event Triggers", "Available Event Triggers", "Request Response"]
## Event Context
The Access Request Dynamic Approval event trigger provides a way to route a request to an additional approval step by an identity or a governance group.
The Access Request Dynamic Approval event trigger provides a way to route a
request to an additional approval step by an identity or a governance group.
When an access request is submitted, the Access Request Dynamic Approval trigger does the following:
When an access request is submitted, the Access Request Dynamic Approval trigger
does the following:
- Sends data about the access request and expects a response including the ID of an existing identity or workgroup (i.e. governance group) to add to the approval workflow.
- Based on the ID received, an approval task is assigned to the identity or governance group in IdentityNow for a decision as an additional step after other configured approval requirements are met.
- If the new approver is also the target identity for this request, the manager is assigned instead. If the identity has no manager, a random org admin is assigned.
- If the ID of the additional approver is wrong, then a random org admin is assigned.
- You can choose to **NOT** add an additional approver by providing an empty object as the
response to the triggered REST request.
- Sends data about the access request and expects a response including the ID of
an existing identity or workgroup (i.e. governance group) to add to the
approval workflow.
- Based on the ID received, an approval task is assigned to the identity or
governance group in IdentityNow for a decision as an additional step after
other configured approval requirements are met.
- If the new approver is also the target identity for this request, the manager
is assigned instead. If the identity has no manager, a random org admin is
assigned.
- If the ID of the additional approver is wrong, then a random org admin is
assigned.
- You can choose to **NOT** add an additional approver by providing an empty
object as the response to the triggered REST request.
You can use this trigger to develop logic outside of IdentityNows out-of-the-box offerings to route an approval step to users such as the following:
You can use this trigger to develop logic outside of IdentityNows
out-of-the-box offerings to route an approval step to users such as the
following:
- The recipients department head
- The recipients cost center
@@ -32,9 +44,16 @@ You can use this trigger to develop logic outside of IdentityNows out-of-the-
## Configuration
This is a `REQUEST_RESPONSE` trigger type. For more information about how to respond to a `REQUEST_RESPONSE` type trigger, see [responding to a request response type trigger](../responding-to-a-request-response-trigger.md) . This trigger intercepts newly submitted access requests and allows the subscribing service to add one additional identity or governance group as the last step in the approver list for the access request.
This is a `REQUEST_RESPONSE` trigger type. For more information about how to
respond to a `REQUEST_RESPONSE` type trigger, see
[responding to a request response type trigger](../responding-to-a-request-response-trigger.mdx)
. This trigger intercepts newly submitted access requests and allows the
subscribing service to add one additional identity or governance group as the
last step in the approver list for the access request.
The subscribing service will receive the following input from the trigger
service.
The subscribing service will receive the following input from the trigger service.
<!-- The input schema can be found in the [API specification](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Dynamic-Approver-Event-Trigger-Input): -->
```json
@@ -63,9 +82,13 @@ The subscribing service will receive the following input from the trigger servic
}
```
The subscribing service can use this information to make a decision about whether to add additional approvers to the access request.
The subscribing service can use this information to make a decision about
whether to add additional approvers to the access request.
<!-- The output schema can be found in the [API specification](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Dynamic-Approver-Event-Trigger-Output). -->
To add an identity to the approver list, the subscribing service responds to the event trigger with the following payload:
To add an identity to the approver list, the subscribing service responds to the
event trigger with the following payload:
```json
{
@@ -75,7 +98,8 @@ To add an identity to the approver list, the subscribing service responds to the
}
```
To add a governance group to the approver list, the subscribing service responds to the event trigger with the following payload:
To add a governance group to the approver list, the subscribing service responds
to the event trigger with the following payload:
```json
{
@@ -85,7 +109,8 @@ To add a governance group to the approver list, the subscribing service responds
}
```
If no identity or group should be added to a particular access request, then the subscribing service responds with an empty object:
If no identity or group should be added to a particular access request, then the
subscribing service responds with an empty object:
```json
{}
@@ -96,4 +121,4 @@ If no identity or group should be added to a particular access request, then the
- **Trigger Type**: [REQUEST_RESPONSE](../trigger-types.md#request-response)
<!-- [Input Schema](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Dynamic-Approver-Event-Trigger-Input)
[Output Schema](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Dynamic-Approver-Event-Trigger-Output) -->
- [How to respond to a REQUEST_RESPONSE trigger](../responding-to-a-request-response-trigger.md)
- [How to respond to a REQUEST_RESPONSE trigger](../responding-to-a-request-response-trigger.mdx)

View File

@@ -4,7 +4,17 @@ title: Access Request Postapproval
pagination_label: Access Request Postapproval
sidebar_label: Access Request Postapproval
sidebar_class_name: accessRequestPostapproval
keywords: ["event", "trigger", "access", "request", "postapproval", "post", "approval", "available"]
keywords:
[
"event",
"trigger",
"access",
"request",
"postapproval",
"post",
"approval",
"available",
]
description: Fires after an access request is approved.
slug: /docs/event-triggers/triggers/access-request-postapproval
tags: ["Event Triggers", "Available Event Triggers", "Fire and Forget"]
@@ -12,17 +22,25 @@ tags: ["Event Triggers", "Available Event Triggers", "Fire and Forget"]
## Event Context
The SailPoint IdentityNow platform now includes event triggers within the Access Request Approval workflow. The Access Request Postapproval event trigger provides more proactive governance and ensures users can quickly obtain needed access.
The SailPoint IdentityNow platform now includes event triggers within the Access
Request Approval workflow. The Access Request Postapproval event trigger
provides more proactive governance and ensures users can quickly obtain needed
access.
![Flow](./img/access-request-postapproval-path.png)
When an access request is approved, some uses cases for this trigger include the following:
When an access request is approved, some uses cases for this trigger include the
following:
- Notify the requester that the access request has been approved or denied.
- Notify the administrator or system to take the appropriate provisioning actions for the requested access.
- Notify a third party system to trigger another action (e.g. customer feedback survey, initiate another business process), or it can be used for auditing once an access request decision has been made.
- Notify the administrator or system to take the appropriate provisioning
actions for the requested access.
- Notify a third party system to trigger another action (e.g. customer feedback
survey, initiate another business process), or it can be used for auditing
once an access request decision has been made.
The Access Request event trigger is a flexible way to extend the Access Request workflow after access is approved for the requester.
The Access Request event trigger is a flexible way to extend the Access Request
workflow after access is approved for the requester.
This is an example input from this trigger:
@@ -70,4 +88,4 @@ This is an example input from this trigger:
## Additional Information and Links
- **Trigger Type**: [FIRE_AND_FORGET](../trigger-types.md#fire-and-forget)
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Post-Approval-Event-Trigger-Input) -->
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Post-Approval-Event-Trigger-Input) -->

View File

@@ -12,28 +12,45 @@ tags: ["Event Triggers", "Available Event Triggers", "Request Response"]
## Event Context
The platform now includes event triggers within the Access Request approval workflow. The Access Request Submitted event trigger provides more proactive governance, ensures users can quickly obtain needed access, and helps with more preventative measures towards unintended access.
The platform now includes event triggers within the Access Request approval
workflow. The Access Request Submitted event trigger provides more proactive
governance, ensures users can quickly obtain needed access, and helps with more
preventative measures towards unintended access.
![Flow](./img/access-request-preapproval-path.png)
When an access request is submitted, some uses cases for this trigger include the following:
When an access request is submitted, some uses cases for this trigger include
the following:
- Provide the approver with additional context about the access request, like any Separation of Duties (SOD) policy violations, for example.
- Notify the approver through a different medium, such as Slack or Outlook Actionable Messages.
- Send a Terms of Agreement form of the requested Application to be signed by the access requester.
- On average, you can expect about 1 access request for every 4 identities within your org per day. On average you can expect about 1 to 2 access requests within a 10 second period.
- Provide the approver with additional context about the access request, like
any Separation of Duties (SOD) policy violations, for example.
- Notify the approver through a different medium, such as Slack or Outlook
Actionable Messages.
- Send a Terms of Agreement form of the requested Application to be signed by
the access requester.
- On average, you can expect about 1 access request for every 4 identities
within your org per day. On average you can expect about 1 to 2 access
requests within a 10 second period.
Additional use cases include the following:
- Send a Slack Notification to the approver or an approval channel and approve the request within Slack.
- Send a Slack Notification to the approver or an approval channel and approve
the request within Slack.
- Create an Outlook Actionable Message.
- Create a Google Doc for the requester to fill out and submit.
## Configuration
This is a `REQUEST_RESPONSE` trigger type. For more information about how to respond to a `REQUEST_RESPONSE` type trigger, see [responding to a request response type trigger](../responding-to-a-request-response-trigger.md). This trigger intercepts newly submitted access requests and allows the subscribing service to perform a preliminary approval/denial before the access request moves to the next approver in the chain.
This is a `REQUEST_RESPONSE` trigger type. For more information about how to
respond to a `REQUEST_RESPONSE` type trigger, see
[responding to a request response type trigger](../responding-to-a-request-response-trigger.mdx).
This trigger intercepts newly submitted access requests and allows the
subscribing service to perform a preliminary approval/denial before the access
request moves to the next approver in the chain.
The subscribing service will receive the following input from the trigger
service.
The subscribing service will receive the following input from the trigger service.
<!-- The input schema can be found in the [API specification](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Pre-Approval-Event-Trigger-Input): -->
```json
@@ -62,9 +79,13 @@ The subscribing service will receive the following input from the trigger servic
}
```
The subscribing service can use this information to make a decision about whether to approve or deny the request.
The subscribing service can use this information to make a decision about
whether to approve or deny the request.
<!-- The output schema can be found in the [API specification](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Pre-Approval-Event-Trigger-Output). -->
To approve an access request, the subscribing service responds to the event trigger with the following payload:
To approve an access request, the subscribing service responds to the event
trigger with the following payload:
```json
{
@@ -74,7 +95,8 @@ To approve an access request, the subscribing service responds to the event trig
}
```
To deny an access request, the subscribing service responds to the event trigger with the following payload:
To deny an access request, the subscribing service responds to the event trigger
with the following payload:
```json
{
@@ -84,13 +106,18 @@ To deny an access request, the subscribing service responds to the event trigger
}
```
This event trigger interrupts the normal workflow for access requests. Access requests can only proceed if the subscribing service responds within the alotted time by approving the request. If the subscribing service is non-responsive or it is responding with an incorrect payload, access requests will fail after the **Separation of Duties** check. If you see numerous access requests failing at this stage, verify that your subscribing service itself is operating correctly.
This event trigger interrupts the normal workflow for access requests. Access
requests can only proceed if the subscribing service responds within the alotted
time by approving the request. If the subscribing service is non-responsive or
it is responding with an incorrect payload, access requests will fail after the
**Separation of Duties** check. If you see numerous access requests failing at
this stage, verify that your subscribing service itself is operating correctly.
![AR failed](./img/access-request-preapproval-failure.png)
## Additional Information and Links
- **Trigger Type**: [REQUEST_RESPONSE](../trigger-types.md#request-response)
<!-- [Input Schema](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Pre-Approval-Event-Trigger-Input)
[Output Schema](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Pre-Approval-Event-Trigger-Output) -->
- [How to respond to a REQUEST_RESPONSE trigger](../responding-to-a-request-response-trigger.md)
<!-- [Input Schema](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Pre-Approval-Event-Trigger-Input)
[Output Schema](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Pre-Approval-Event-Trigger-Output) -->
- [How to respond to a REQUEST_RESPONSE trigger](../responding-to-a-request-response-trigger.mdx)

View File

@@ -4,26 +4,42 @@ title: Account Aggregation Completed
pagination_label: Account Aggregation Completed
sidebar_label: Account Aggregation Completed
sidebar_class_name: accountAggregationCompleted
keywords: ["event", "trigger", "account", "aggregation", "completed", "available"]
description: Fires after an account aggregation completed, terminated, or failed.
keywords:
["event", "trigger", "account", "aggregation", "completed", "available"]
description:
Fires after an account aggregation completed, terminated, or failed.
slug: /docs/event-triggers/triggers/account-aggregation-completed
tags: ["Event Triggers", "Available Event Triggers", "Fire and Forget"]
---
## Event Context
The platform has introduced an event trigger within the Source Aggregation workflow to provide additional monitoring capabilities. This trigger helps ensure account aggregations are performing as expected and identity data always reflects current source account information for better identity governance. Aggregations connect to a source and collect account information from the source to discover the number of accounts that have been added, changed, or removed. For more information about account aggregation see [Account Aggregation Data flow](https://community.sailpoint.com/t5/Technical-White-Papers/Account-Aggregation-Data-Flow/ta-p/79914#toc-hId-1367430234)
The platform has introduced an event trigger within the Source Aggregation
workflow to provide additional monitoring capabilities. This trigger helps
ensure account aggregations are performing as expected and identity data always
reflects current source account information for better identity governance.
Aggregations connect to a source and collect account information from the source
to discover the number of accounts that have been added, changed, or removed.
For more information about account aggregation see
[Account Aggregation Data flow](https://community.sailpoint.com/t5/Technical-White-Papers/Account-Aggregation-Data-Flow/ta-p/79914#toc-hId-1367430234)
![Flow](./img/aggregation-diagram.png)
After the initial collection of accounts in the source system during aggregation completes, some uses cases for this trigger include the following:
After the initial collection of accounts in the source system during aggregation
completes, some uses cases for this trigger include the following:
- Notify an administrator that IdentityNow was able to successfully connect to the source system and collect source accounts.
- Notify an administrator when the aggregation is terminated manually during the account collection phase.
- Notify an administrator or system (e.g. PagerDuty) that IdentityNow failed to collect accounts during aggregation and indicate required remediation for the source system.
- Notify an administrator that IdentityNow was able to successfully connect to
the source system and collect source accounts.
- Notify an administrator when the aggregation is terminated manually during the
account collection phase.
- Notify an administrator or system (e.g. PagerDuty) that IdentityNow failed to
collect accounts during aggregation and indicate required remediation for the
source system.
:::info
This event trigger does not include entitlement aggregations.
:::
This is an example input from this trigger:
@@ -38,12 +54,8 @@ This is an example input from this trigger:
"status": "Success",
"started": "2020-06-29T22:01:50.474Z",
"completed": "2020-06-29T22:02:04.090Z",
"errors": [
"Accounts unable to be aggregated."
],
"warnings": [
"Account Skipped"
],
"errors": ["Accounts unable to be aggregated."],
"warnings": ["Account Skipped"],
"stats": {
"scanned": 200,
"unchanged": 190,
@@ -66,19 +78,32 @@ The source account activity is summarized in `stats`, as seen in this example:
}
```
In this example, there are 10 changed accounts (`scanned` (200) - `unchanged` - (190)). Changed accounts include accounts that are `added` (6) and accounts that are `changed` (4), equaling 10 accounts. Removed accounts may or may not be included in the changed account total depending on the sources. For this example, `removed` (3) may be considered a changed account in some sources and would show a `scanned` count of 203 instead of 200.
In this example, there are 10 changed accounts (`scanned` (200) - `unchanged` -
(190)). Changed accounts include accounts that are `added` (6) and accounts that
are `changed` (4), equaling 10 accounts. Removed accounts may or may not be
included in the changed account total depending on the sources. For this
example, `removed` (3) may be considered a changed account in some sources and
would show a `scanned` count of 203 instead of 200.
> This event trigger fires even without changed accounts. The unchanged count will match the scanned accounts in the response.
> This event trigger fires even without changed accounts. The unchanged count
> will match the scanned accounts in the response.
The status of the aggregation can be one of three possible values:
- **Success**: Account collection was successful and aggregation can move to the next step.
- **Error**: There is a failure in account collection or an issue connecting to the source. The `errors` vary by source.
- **Termination**: The aggregation was terminated during the account collection phase. Aggregation can be terminated when the account deletion threshold is exceeded. For example, an account delete threshold of 10% is set by default for the source, and if the number of `removed` accounts for the above example is 21 (more than 10% of `scanned` accounts (200)), the aggregation is cancelled.
- **Success**: Account collection was successful and aggregation can move to the
next step.
- **Error**: There is a failure in account collection or an issue connecting to
the source. The `errors` vary by source.
- **Termination**: The aggregation was terminated during the account collection
phase. Aggregation can be terminated when the account deletion threshold is
exceeded. For example, an account delete threshold of 10% is set by default
for the source, and if the number of `removed` accounts for the above example
is 21 (more than 10% of `scanned` accounts (200)), the aggregation is
cancelled.
![Account_Delete_Threshold](./img/aggregation-delete-threshold.png)
## Additional Information and Links
- **Trigger Type**: [FIRE_AND_FORGET](../trigger-types.md#fire-and-forget)
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Account-Aggregation-Completed-Event-Trigger-Input) -->
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Account-Aggregation-Completed-Event-Trigger-Input) -->

View File

@@ -14,14 +14,22 @@ tags: ["Event Triggers", "Available Event Triggers", "Fire and Forget"]
![Flow](./img/trigger-path.png)
Identity Attribute Changed events occur when any attributes aggegrated from an authoritative source differ from the current attributes for an identity during an identity refresh. See [Configuring Correlation](https://community.sailpoint.com/t5/Connectors/Configuring-Correlation/ta-p/74045) for more information.
Identity Attribute Changed events occur when any attributes aggegrated from an
authoritative source differ from the current attributes for an identity during
an identity refresh. See
[Configuring Correlation](https://community.sailpoint.com/t5/Connectors/Configuring-Correlation/ta-p/74045)
for more information.
This event trigger provides a flexible way to extend Joiner-Mover-Leaver processes. This provides more proactive governance and ensures users can quickly get necessary access when they enter your organization.
This event trigger provides a flexible way to extend Joiner-Mover-Leaver
processes. This provides more proactive governance and ensures users can quickly
get necessary access when they enter your organization.
Some uses cases for this trigger include the following:
- Notify an administrator or system to take the appropriate provisioning actions as part of the Mover workflow.
- Notify a system to trigger another action, like triggering a certification campaign when an identity's manager changes, for example.
- Notify an administrator or system to take the appropriate provisioning actions
as part of the Mover workflow.
- Notify a system to trigger another action, like triggering a certification
campaign when an identity's manager changes, for example.
This is an example input from this trigger:
@@ -45,4 +53,4 @@ This is an example input from this trigger:
## Additional Information and Links
- **Trigger Type**: [FIRE_AND_FORGET](../trigger-types.md#fire-and-forget)
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Identity-Attributes-Changed-Event-Trigger-Input) -->
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Identity-Attributes-Changed-Event-Trigger-Input) -->

View File

@@ -14,14 +14,25 @@ tags: ["Event Triggers", "Available Event Triggers", "Fire and Forget"]
![Flow](./img/identity-created-path.png)
Identity Created events occur when a new identity is detected during an aggregration and refresh from an authoritative source. New identities are detected when an account from the authoritative source is not correlated to an existing identity. For more information, see [Configuring Correlation](https://community.sailpoint.com/t5/Connectors/Configuring-Correlation/ta-p/74045). The Identity Created event contains all of the identity attributes as they are configured in the identity profile. For more information, see [Mapping Identity Profiles](https://community.sailpoint.com/t5/Admin-Help/Mapping-Identity-Profiles/ta-p/77877).
Identity Created events occur when a new identity is detected during an
aggregration and refresh from an authoritative source. New identities are
detected when an account from the authoritative source is not correlated to an
existing identity. For more information, see
[Configuring Correlation](https://community.sailpoint.com/t5/Connectors/Configuring-Correlation/ta-p/74045).
The Identity Created event contains all of the identity attributes as they are
configured in the identity profile. For more information, see
[Mapping Identity Profiles](https://community.sailpoint.com/t5/Admin-Help/Mapping-Identity-Profiles/ta-p/77877).
This event trigger provides a flexible way to extend Joiner-Mover-Leaver processes. This provides more proactive governance and ensures users can quickly get necessary access when they enter your organization.
This event trigger provides a flexible way to extend Joiner-Mover-Leaver
processes. This provides more proactive governance and ensures users can quickly
get necessary access when they enter your organization.
Some uses cases for this trigger include the following:
- Notify an administrator or system to take the appropriate birthright provisioning actions as part of the Joiner workflow.
- Notify a third party system to trigger another action (e.g. create an onboarding experience for a new hire).
- Notify an administrator or system to take the appropriate birthright
provisioning actions as part of the Joiner workflow.
- Notify a third party system to trigger another action (e.g. create an
onboarding experience for a new hire).
This is an example input from this trigger:
@@ -41,4 +52,4 @@ This is an example input from this trigger:
## Additional Information and Links
- **Trigger Type**: [FIRE_AND_FORGET](../trigger-types.md#fire-and-forget)
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Identity-Created-Event-Trigger-Input) -->
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Identity-Created-Event-Trigger-Input) -->

View File

@@ -11,9 +11,12 @@ slug: /docs/event-triggers/available
tags: ["Event Triggers", "Available Event Triggers"]
---
import DocCardList from '@theme/DocCardList';
import {useCurrentSidebarCategory} from '@docusaurus/theme-common';
import DocCardList from "@theme/DocCardList";
import { useCurrentSidebarCategory } from "@docusaurus/theme-common";
The event triggers in this section are generally available to all IDN tenants. Event triggers currently in development are considered [Early Access](../early-access/index.md) and require a support ticket to be enabled in a tenant.
The event triggers in this section are generally available to all IDN tenants.
Event triggers currently in development are considered
[Early Access](../early-access/index.mdx) and require a support ticket to be
enabled in a tenant.
<DocCardList items={useCurrentSidebarCategory().items}/>
<DocCardList items={useCurrentSidebarCategory().items} />

View File

@@ -4,7 +4,8 @@ title: Provisioning Action Completed
pagination_label: Provisioning Action Completed
sidebar_label: Provisioning Action Completed
sidebar_class_name: provisioningActionCompleted
keywords: ["event", "trigger", "provisioning", "action", "completed", "available"]
keywords:
["event", "trigger", "provisioning", "action", "completed", "available"]
description: Fires after a provisioning action completed on a source.
slug: /docs/event-triggers/triggers/provisioning-action-completed
tags: ["Event Triggers", "Available Event Triggers", "Fire and Forget"]
@@ -14,14 +15,21 @@ tags: ["Event Triggers", "Available Event Triggers", "Fire and Forget"]
![Flow](./img/provisioning-action.png)
The Provisioning Action Completed event trigger notifies subscribed applications after the action is completed. This event trigger provides a flexible way to extend the Provisioning workflow after access has changed for an identity within SailPoint. This provides more proactive governance and ensures users can quickly get necessary access.
The Provisioning Action Completed event trigger notifies subscribed applications
after the action is completed. This event trigger provides a flexible way to
extend the Provisioning workflow after access has changed for an identity within
SailPoint. This provides more proactive governance and ensures users can quickly
get necessary access.
Some uses cases for this trigger include the following:
- Notify the requester that the access request has been fulfilled.
- Notify an application user and/or access certifier that access has been revoked.
- Notify an application user and/or access certifier that access has been
revoked.
- Notify an administrator or system that provisioning has been completed.
- Notify a third party system to trigger another action, like continuing additional provisioning actions or auditing of provisioning activities, for example.
- Notify a third party system to trigger another action, like continuing
additional provisioning actions or auditing of provisioning activities, for
example.
This is an example input from this trigger:
@@ -30,12 +38,8 @@ This is an example input from this trigger:
"trackingNumber": "4b4d982dddff4267ab12f0f1e72b5a6d",
"sources": "Corp AD, Corp LDAP, Corp Salesforce",
"action": "IdentityRefresh",
"errors": [
"Connector AD Failed"
],
"warnings": [
"Notification Skipped due to invalid email"
],
"errors": ["Connector AD Failed"],
"warnings": ["Notification Skipped due to invalid email"],
"recipient": {
"type": "IDENTITY",
"id": "2c91808568c529c60168cca6f90c1313",
@@ -75,20 +79,26 @@ This is an example input from this trigger:
Before consuming this event trigger, the following prerequesites must be met:
- An oAuth Client configured with authority as `ORG_ADMIN`.
- An org enabled with the `ARSENAL_ALLOW_POSTPROVISIONING_TRIGGERS` feature flag.
- An org enabled with the `ARSENAL_ALLOW_POSTPROVISIONING_TRIGGERS` feature
flag.
- Configure connectors for provisioning into target applications.
- An org configured for automated provisioning. See the Event Context section for specific setup.
- An org configured for automated provisioning. See the Event Context section
for specific setup.
To provision to a target application, the connector for the source must support the following connector features:
To provision to a target application, the connector for the source must support
the following connector features:
- `ENABLE` - Can enable or disable accounts.
- `UNLOCK` - Can lock or unlock accounts.
- `PROVISIONING` - Can write to accounts. Currently, the trigger does not include attribute synchronization.
- `PROVISIONING` - Can write to accounts. Currently, the trigger does not
include attribute synchronization.
- `PASSWORD` - Can update password for accounts.
For a list of supported connectors and features, see [Supported Connectors for IdentityNow](https://community.sailpoint.com/t5/Connectors/Supported-Sources-Connectors-for-IdentityNow/ta-p/80019).
For a list of supported connectors and features, see
[Supported Connectors for IdentityNow](https://community.sailpoint.com/t5/Connectors/Supported-Sources-Connectors-for-IdentityNow/ta-p/80019).
For information about configuring sources for provisioning, see [How can I edit the Create Profile on a source?](https://community.sailpoint.com/t5/Connectors/How-can-I-edit-the-Create-Profile-on-a-source/ta-p/74429).
For information about configuring sources for provisioning, see
[How can I edit the Create Profile on a source?](https://community.sailpoint.com/t5/Connectors/How-can-I-edit-the-Create-Profile-on-a-source/ta-p/74429).
Provisioning events occur in these workflows:
@@ -100,34 +110,47 @@ Provisioning events occur in these workflows:
### Access Request
When an Access Request approval process has completed with all positive approvals, the access request is fulfilled with provisioning to the target application with requested access.
When an Access Request approval process has completed with all positive
approvals, the access request is fulfilled with provisioning to the target
application with requested access.
![Flow](./img/provisioning-access-request.png)
Access acquired through a role request can also be revoked, and those changes can be provisioned to an account.
Access acquired through a role request can also be revoked, and those changes
can be provisioned to an account.
The following steps must be completed:
- Source Connector configured for `PROVISIONING`.
Access requests in SailPoint SaaS currently do not support `ACCOUNT_ONLY_REQUEST` or `ADDITIONAL_ACCOUNT_REQUEST`.
- Source Connector configured for `PROVISIONING`. Access requests in SailPoint
SaaS currently do not support `ACCOUNT_ONLY_REQUEST` or
`ADDITIONAL_ACCOUNT_REQUEST`.
- Source entitlements mapped in Account Schema.
- Access profile using source entitlements. Role setup is optional.
- Application enabled for Access Request.
> **NOTE:** There is no indication to the approver in the IdentityNow UI that the approval is for a revoke action. This must be considered for all usage of these APIs.
> **NOTE:** There is no indication to the approver in the IdentityNow UI that
> the approval is for a revoke action. This must be considered for all usage of
> these APIs.
![Flow](./img/provisioning-access-request-2.png)
### Certification
Provisioning removal of accounts acquired through Access Request occurs through certifications.
> **Note:** Certifications cannot revoke access acquired via role membership or lifecycle Changes.
Provisioning removal of accounts acquired through Access Request occurs through
certifications.
> **Note:** Certifications cannot revoke access acquired via role membership or
> lifecycle Changes.
![Flow](./img/provisioning-access-request-certification.png)
### Role Membership
Access defined in access profiles can be grouped into roles, and roles can be assigned to identities using `COMPLEX_CRITERION` or `IDENTITY_LIST`. See [Admin UI](https://community.sailpoint.com/t5/Admin-Help/Standard-Role-Membership-Criteria-Options/ta-p/74392) for information on how to set `COMPLEX_CRITERION`.
Access defined in access profiles can be grouped into roles, and roles can be
assigned to identities using `COMPLEX_CRITERION` or `IDENTITY_LIST`. See
[Admin UI](https://community.sailpoint.com/t5/Admin-Help/Standard-Role-Membership-Criteria-Options/ta-p/74392)
for information on how to set `COMPLEX_CRITERION`.
> **Note:** `CUSTOM` role membership through rules is no longer supported.
Roles can also be mapped from an authoritative source.
@@ -142,7 +165,8 @@ This trigger fires when an account has been provisioned, enabled, or disabled.
To provision access with lifecycle states, the prerequisites must be met:
- Source connector configured for `ENABLE` to enable/disable accounts and/or `PROVISIONING` to create/update/delete accounts.
- Source connector configured for `ENABLE` to enable/disable accounts and/or
`PROVISIONING` to create/update/delete accounts.
- Source entitlements mapped from an authoritative source.
- Source entitlements mapped to access profiles.
- Identity profile using an authoritative source.
@@ -150,14 +174,17 @@ To provision access with lifecycle states, the prerequisites must be met:
### Password Management
Password changes can be provisioned to target applications through password reset or password interception. Also, unlocking of accounts can be provisioned via password change within SailPoint SaaS.
Password changes can be provisioned to target applications through password
reset or password interception. Also, unlocking of accounts can be provisioned
via password change within SailPoint SaaS.
For password management setup, you must configure the following:
- Source connector configured for `PASSWORD` for password changes and/or `UNLOCK` for unlocking changes.
- Source connector configured for `PASSWORD` for password changes and/or
`UNLOCK` for unlocking changes.
- Password sync group
## Additional Information and Links
- **Trigger Type**: [FIRE_AND_FORGET](../trigger-types.md#fire-and-forget)
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Provisioning-Completed-Event-Trigger-Input) -->
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Provisioning-Completed-Event-Trigger-Input) -->

View File

@@ -14,16 +14,31 @@ tags: ["Event Triggers", "Available Event Triggers", "Fire and Forget"]
![Flow](./img/saved-search-path.png)
Users can subscribe to Saved Searches and receive an email of a report generated from the saved search. For example, a user can save a search query called "Identities with upcoming end dates" and create a subscription to receive a daily report showing identities with an end date within 10 days from the current date. This event trigger can also notify an external HTTP application that a report generated from a saved search subscription is available to be processed.
Users can subscribe to Saved Searches and receive an email of a report generated
from the saved search. For example, a user can save a search query called
"Identities with upcoming end dates" and create a subscription to receive a
daily report showing identities with an end date within 10 days from the current
date. This event trigger can also notify an external HTTP application that a
report generated from a saved search subscription is available to be processed.
Saved Search Completed events occur based on the schedules set for saved search subscriptions. For example, if you have a scheduled saved search for Monday, Tuesday, Wednesday, Thursday, Friday at 6:00 GMT, your HTTP endpoint will also receive a notification at those times. This can be set using the `schedule` object in the [create scheduled search endpoint](/idn/api/v3/scheduled-search-create).
Saved Search Completed events occur based on the schedules set for saved search
subscriptions. For example, if you have a scheduled saved search for Monday,
Tuesday, Wednesday, Thursday, Friday at 6:00 GMT, your HTTP endpoint will also
receive a notification at those times. This can be set using the `schedule`
object in the
[create scheduled search endpoint](/idn/api/v3/scheduled-search-create).
To receive this event when a saved search query does not have any results, set `emailEmptyResults` to `TRUE`. You can also set the expiration date in the `expiration` field within the `schedule` object. Your HTTP endpoint will stop receiving these events when the scheduled search expires.
To receive this event when a saved search query does not have any results, set
`emailEmptyResults` to `TRUE`. You can also set the expiration date in the
`expiration` field within the `schedule` object. Your HTTP endpoint will stop
receiving these events when the scheduled search expires.
Some uses cases for this trigger include the following:
- Perform quality control, such as continuously checking for Separation of Duties (SOD) violations.
- Respond to upcoming joiner-mover-leaver scenarios, such as deprovisioning access before an employee's separation date.
- Perform quality control, such as continuously checking for Separation of
Duties (SOD) violations.
- Respond to upcoming joiner-mover-leaver scenarios, such as deprovisioning
access before an employee's separation date.
This is an example input from this trigger:
@@ -38,23 +53,17 @@ This is an example input from this trigger:
"Account": {
"count": 3,
"noun": "accounts",
"preview": [
[]
]
"preview": [[]]
},
"Entitlement": {
"count": 2,
"noun": "entitlements",
"preview": [
[]
]
"preview": [[]]
},
"Identity": {
"count": 2,
"noun": "identities",
"preview": [
[]
]
"preview": [[]]
}
},
"signedS3Url": "https://sptcbu-org-data-useast1.s3.amazonaws.com/arsenal-john/reports/Events%20Export.2020-05-06%2018%2759%20GMT.3e580592-86e4-4953-8aea-49e6ef20a086.zip?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20200506T185919Z&X-Amz-SignedHeaders=host&X-Amz-Expires=899&X-Amz-Credential=AKIAV5E54XOGTS4Q4L7A%2F20200506%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Signature=2e732bb97a12a1fd8a215613e3c31fcdae8ba1fb6a25916843ab5b51d2ddefbc"
@@ -64,4 +73,4 @@ This is an example input from this trigger:
## Additional Information and Links
- **Trigger Type**: [FIRE_AND_FORGET](../trigger-types.md#fire-and-forget)
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Saved-Search-Complete-Event-Trigger-Input) -->
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Saved-Search-Complete-Event-Trigger-Input) -->

View File

@@ -5,16 +5,18 @@ pagination_label: Source Created
sidebar_label: Source Created
sidebar_class_name: sourceCreated
keywords: ["event", "trigger", "source", "created", "available"]
description: Fires after a source is created.
description: Fires after a source is created.
slug: /docs/event-triggers/triggers/source-created
tags: ["Event Triggers", "Available Event Triggers", "Fire and Forget"]
---
## Event Context
Source Created events occur when a new source is successfully created via the API or the Admin UI. Some uses cases for this trigger include the following:
Source Created events occur when a new source is successfully created via the
API or the Admin UI. Some uses cases for this trigger include the following:
- Provide evidence to show auditors connector logic and sources are not manipulated outside of proper change control processes.
- Provide evidence to show auditors connector logic and sources are not
manipulated outside of proper change control processes.
- Auto-configure new sources with proper owners using external data sources.
This is an example input from this trigger:
@@ -37,4 +39,4 @@ This is an example input from this trigger:
## Additional Information and Links
- **Trigger Type**: [FIRE_AND_FORGET](../trigger-types.md#fire-and-forget)
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Source-Created-Event-Trigger-Input) -->
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Source-Created-Event-Trigger-Input) -->

View File

@@ -5,16 +5,18 @@ pagination_label: Source Deleted
sidebar_label: Source Deleted
sidebar_class_name: sourceDeleted
keywords: ["event", "trigger", "source", "deleted", "available"]
description: Fires after a source is deleted.
description: Fires after a source is deleted.
slug: /docs/event-triggers/triggers/source-deleted
tags: ["Event Triggers", "Available Event Triggers", "Fire and Forget"]
---
## Event Context
Source Deleted events occur when a source is successfully deleted via the API or the Admin UI. Some uses cases for this trigger include the following:
Source Deleted events occur when a source is successfully deleted via the API or
the Admin UI. Some uses cases for this trigger include the following:
- Provide evidence to show auditors that connector logic and sources are not manipulated outside of proper change control processes.
- Provide evidence to show auditors that connector logic and sources are not
manipulated outside of proper change control processes.
- Alert admins when a source was deleted incorrectly.
This is an example input from this trigger:
@@ -37,4 +39,4 @@ This is an example input from this trigger:
## Additional Information and Links
- **Trigger Type**: [FIRE_AND_FORGET](../trigger-types.md#fire-and-forget)
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Source-Deleted-Event-Trigger-Input) -->
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Source-Deleted-Event-Trigger-Input) -->

View File

@@ -5,16 +5,18 @@ pagination_label: Source Updated
sidebar_label: Source Updated
sidebar_class_name: sourceUpdated
keywords: ["event", "trigger", "source", "updated", "available"]
description: Fires after a source is updated.
description: Fires after a source is updated.
slug: /docs/event-triggers/triggers/source-updated
tags: ["Event Triggers", "Available Event Triggers", "Fire and Forget"]
---
## Event Context
Source Updated events occur when configuration changes are made to a source. Some uses cases for this trigger include the following:
Source Updated events occur when configuration changes are made to a source.
Some uses cases for this trigger include the following:
- Provide evidence to show auditors connector logic and sources are not manipulated outside of proper change control processes.
- Provide evidence to show auditors connector logic and sources are not
manipulated outside of proper change control processes.
- Trigger review of an updated source.
This is an example input from this trigger:
@@ -37,4 +39,4 @@ This is an example input from this trigger:
## Additional Information and Links
- **Trigger Type**: [FIRE_AND_FORGET](../trigger-types.md#fire-and-forget)
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Source-Updated-Event-Trigger-Input) -->
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Source-Updated-Event-Trigger-Input) -->

View File

@@ -12,18 +12,25 @@ tags: ["Event Triggers", "Available Event Triggers", "Fire and Forget"]
## Event Context
VA (Virtual Appliance) Cluster Status Change Events occur when a health check is run on a VA cluster and the health status is different from the previous health check. Customers can use this trigger to monitor all the health status changes of their VA clusters.
VA (Virtual Appliance) Cluster Status Change Events occur when a health check is
run on a VA cluster and the health status is different from the previous health
check. Customers can use this trigger to monitor all the health status changes
of their VA clusters.
Some uses cases for this trigger include the following:
- Create real-time health dashboards for VA clusters.
- Notify an administrator or system to take the appropriate actions when a health status changes.
- Notify an administrator or system to take the appropriate actions when a
health status changes.
Additional notes about VA Cluster Status Changes:
- VA cluster health checks run every 30 minutes.
- This trigger will invoke on any VA cluster health status change (i.e. healthy -> unhealthy, unhealthy -> healthy).
- See [troubleshooting virtual appliances](https://community.sailpoint.com/t5/IdentityNow-Connectors/Virtual-Appliance-Troubleshooting-Guide/ta-p/78735) for more information.
- This trigger will invoke on any VA cluster health status change (i.e. healthy
-> unhealthy, unhealthy -> healthy).
- See
[troubleshooting virtual appliances](https://community.sailpoint.com/t5/IdentityNow-Connectors/Virtual-Appliance-Troubleshooting-Guide/ta-p/78735)
for more information.
Healthy Cluster Source

View File

@@ -10,23 +10,36 @@ slug: /docs/event-triggers/triggers/identity-deleted
tags: ["Event Triggers", "Early Access Event Triggers", "Fire and Forget"]
---
:::info
This is an early access event trigger. Please contact support to have it enabled in your tenant.
This is an early access event trigger. Please contact support to have it enabled
in your tenant.
:::
## Event Context
![Flow](./img/identity-deleted-path.png)
Identity deleted events occur when an identity's associated account is deleted from the identity's authoritative source. After accounts are aggregated and the identity refresh process finds an identity that is not correlated to an account, the associated identity is deleted from IdentityNow. For more information, see [Configuring Correlation](https://community.sailpoint.com/t5/Connectors/Configuring-Correlation/ta-p/74045). The Identity deleted event contains any identity attributes as they are configured in the identity profile. For more information, see [Mapping Identity Profiles](https://community.sailpoint.com/t5/Admin-Help/Mapping-Identity-Profiles/ta-p/77877).
Identity deleted events occur when an identity's associated account is deleted
from the identity's authoritative source. After accounts are aggregated and the
identity refresh process finds an identity that is not correlated to an account,
the associated identity is deleted from IdentityNow. For more information, see
[Configuring Correlation](https://community.sailpoint.com/t5/Connectors/Configuring-Correlation/ta-p/74045).
The Identity deleted event contains any identity attributes as they are
configured in the identity profile. For more information, see
[Mapping Identity Profiles](https://community.sailpoint.com/t5/Admin-Help/Mapping-Identity-Profiles/ta-p/77877).
This event trigger provides a flexible way to extend joiner-mover-leaver processes. This provides more proactive governance and ensures users can quickly get necessary access when they enter your organization.
This event trigger provides a flexible way to extend joiner-mover-leaver
processes. This provides more proactive governance and ensures users can quickly
get necessary access when they enter your organization.
Some uses cases for this trigger include the following:
- Notify an administrator or system to take the appropriate provisioning actions as part of the leaver workflow.
- Notify a system to trigger another action (e.g. deactivate an employees badge upon termination).
- Notify an administrator or system to take the appropriate provisioning actions
as part of the leaver workflow.
- Notify a system to trigger another action (e.g. deactivate an employees badge
upon termination).
This is an example input from this trigger:
@@ -46,4 +59,4 @@ This is an example input from this trigger:
## Additional Information and Links
- **Trigger Type**: [FIRE_AND_FORGET](../trigger-types.md#fire-and-forget)
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Identity-Deleted-Event-Trigger-Input) -->
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Identity-Deleted-Event-Trigger-Input) -->

View File

@@ -11,9 +11,12 @@ slug: /docs/event-triggers/early-access
tags: ["Event Triggers", "Early Access Event Triggers"]
---
import DocCardList from '@theme/DocCardList';
import {useCurrentSidebarCategory} from '@docusaurus/theme-common';
import DocCardList from "@theme/DocCardList";
import { useCurrentSidebarCategory } from "@docusaurus/theme-common";
New event triggers undergoing active development may appear in the early access event trigger list. You can use these triggers by submitting a support ticket to have them enabled in your tenant. Because these triggers are early access, they are subject to change at any time.
New event triggers undergoing active development may appear in the early access
event trigger list. You can use these triggers by submitting a support ticket to
have them enabled in your tenant. Because these triggers are early access, they
are subject to change at any time.
<DocCardList items={useCurrentSidebarCategory().items}/>
<DocCardList items={useCurrentSidebarCategory().items} />

View File

@@ -11,14 +11,21 @@ tags: ["Event Triggers", "Early Access Event Triggers", "Fire and Forget"]
---
:::info
This is an early access event trigger. Please contact support to have it enabled in your tenant.
This is an early access event trigger. Please contact support to have it enabled
in your tenant.
:::
## Event Context
Source Account Created events occur after a new account is detected during an account aggregration and refresh from a source. This trigger cannot determine whether account creation happened on a source or in IdentityNow. It omits events related to IdentityNow accounts, such as the IdentityNow Admin.
Source Account Created events occur after a new account is detected during an
account aggregration and refresh from a source. This trigger cannot determine
whether account creation happened on a source or in IdentityNow. It omits events
related to IdentityNow accounts, such as the IdentityNow Admin.
Use this event trigger to watch for new accounts with highly privileged access, such as an account created in Active Directory Domain Admins.
Use this event trigger to watch for new accounts with highly privileged access,
such as an account created in Active Directory Domain Admins.
This is an example input from this trigger:
@@ -50,4 +57,4 @@ This is an example input from this trigger:
## Additional information and links
- **Trigger Type**: [FIRE_AND_FORGET](../trigger-types.md#fire-and-forget)
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Source-Account-Created-Event-Trigger-Input) -->
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Source-Account-Created-Event-Trigger-Input) -->

View File

@@ -11,14 +11,23 @@ tags: ["Event Triggers", "Early Access Event Triggers", "Fire and Forget"]
---
:::info
This is an early access event trigger. Please contact support to have it enabled in your tenant.
This is an early access event trigger. Please contact support to have it enabled
in your tenant.
:::
## Event Context
Source Account Deleted events occur whenever an account is deleted from its source during an account aggregation operation. The account may have been manually removed or deleted as the result of a provisioning event. The trigger cannot determine whether the account deletion happened on a source or in IdentityNow. It omits events related to IdentityNow accounts, such as the IdentityNow Admin.
Source Account Deleted events occur whenever an account is deleted from its
source during an account aggregation operation. The account may have been
manually removed or deleted as the result of a provisioning event. The trigger
cannot determine whether the account deletion happened on a source or in
IdentityNow. It omits events related to IdentityNow accounts, such as the
IdentityNow Admin.
Use this event trigger to watch for deletions of authoritative accounts, such as an account deleted on Workday.
Use this event trigger to watch for deletions of authoritative accounts, such as
an account deleted on Workday.
This is an example input from this trigger:
@@ -50,4 +59,4 @@ This is an example input from this trigger:
## Additional information and links
- **Trigger Type**: [FIRE_AND_FORGET](../trigger-types.md#fire-and-forget)
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Source-Account-Deleted-Event-Trigger-Input) -->
<!-- [Input schema](https://developer.sailpoint.com/apis/beta/#section/Source-Account-Deleted-Event-Trigger-Input) -->

View File

@@ -12,19 +12,28 @@ tags: ["Event Triggers", "Early Access Event Triggers", "Fire and Forget"]
---
:::info
This is an early access event trigger. Please contact support to have it enabled in your tenant.
This is an early access event trigger. Please contact support to have it enabled
in your tenant.
:::
## Event Context
Source Account Updated events occur whenever one or more account attributes change on a single account during an account aggregation operation. The trigger cannot determine whether the account update happened on a source or in IdentityNow. It omits events related to IdentityNow accounts, such as the IdentityNow Admin. The following actions are considered updates:
Source Account Updated events occur whenever one or more account attributes
change on a single account during an account aggregation operation. The trigger
cannot determine whether the account update happened on a source or in
IdentityNow. It omits events related to IdentityNow accounts, such as the
IdentityNow Admin. The following actions are considered updates:
- Update account attributes
- Enable or disable an account
- Lock or unlock source accounts
- Change source account password
Use this event trigger to watch for updates to accounts that add highly privileged access, such as an account that is granted privileged access on a sensitive source.
Use this event trigger to watch for updates to accounts that add highly
privileged access, such as an account that is granted privileged access on a
sensitive source.
This is an example input from this trigger:

View File

@@ -6,125 +6,109 @@ sidebar_label: Filtering Events
sidebar_position: 4
sidebar_class_name: filteringEvents
keywords: ["filtering", "events"]
description: Many triggers can produce a staggering amount of events if left unfiltered. Event filtering helps you solve this problem.
description:
Many triggers can produce a staggering amount of events if left unfiltered.
Event filtering helps you solve this problem.
slug: /docs/event-triggers/filtering-events
tags: ["Event Triggers"]
---
## What is a Filter
Many triggers can produce a staggering amount of events if left unfiltered, resulting in more network traffic and more processing time on a subscribing service. Your subscribing service usually only needs to be notified of events containing a key attribute or value you want to process. For example, the Identity Attributes Changed trigger emits an event whenever an identity has a change in attributes. This can occur during the mover process when an identity changes departments or a manager is promoted, resulting in several identities receiving a new manager. Rather than inundate your subscribing service with every identity change, you can use an event trigger filter to specify which events your service is interested in processing.
Many triggers can produce a staggering amount of events if left unfiltered,
resulting in more network traffic and more processing time on a subscribing
service. Your subscribing service usually only needs to be notified of events
containing a key attribute or value you want to process. For example, the
Identity Attributes Changed trigger emits an event whenever an identity has a
change in attributes. This can occur during the mover process when an identity
changes departments or a manager is promoted, resulting in several identities
receiving a new manager. Rather than inundate your subscribing service with
every identity change, you can use an event trigger filter to specify which
events your service is interested in processing.
## Benefits of Using Filters
Network bandwidth and processing power come at a cost, especially when you are using managed solutions like AWS or no-code providers like Zapier. Without filtering, a subscribing service would be sent every single event that the trigger receives. The first thing any subscriber must do in this scenario is inspect each event to figure out which ones it must process and which ones it can ignore. Taking this approach with managed providers that charge per invocation, like AWS Lambda, can become expensive. Furthermore, some no-code providers may put a limit on the total number of invocations that a service can make in a given month, which would be quickly exhausted with this approach. Trigger filters take the filtering logic out of your subscribing service and place it on the event trigger within SailPoint, so you only receive the events matching your filter criteria.
Network bandwidth and processing power come at a cost, especially when you are
using managed solutions like AWS or no-code providers like Zapier. Without
filtering, a subscribing service would be sent every single event that the
trigger receives. The first thing any subscriber must do in this scenario is
inspect each event to figure out which ones it must process and which ones it
can ignore. Taking this approach with managed providers that charge per
invocation, like AWS Lambda, can become expensive. Furthermore, some no-code
providers may put a limit on the total number of invocations that a service can
make in a given month, which would be quickly exhausted with this approach.
Trigger filters take the filtering logic out of your subscribing service and
place it on the event trigger within SailPoint, so you only receive the events
matching your filter criteria.
## Constructing a Filter
Filters are constructed using a [Goessner JSONpath expression](https://goessner.net/articles/JsonPath/).
Filters are constructed using a
[Goessner JSONpath expression](https://goessner.net/articles/JsonPath/).
### Expressions
JSONPath expressions specify a path to an element or array of elements in a JSON structure. Expressions are used to select data in a JSON structure to check for the existence of attributes or to narrow down the data where the filter logic is applied.
JSONPath expressions specify a path to an element or array of elements in a JSON
structure. Expressions are used to select data in a JSON structure to check for
the existence of attributes or to narrow down the data where the filter logic is
applied.
| Expression | Description | Example |
| --- | --- | --- |
| $ | **Root** - The root object / element. | $ |
| @ | **Current** - The current object / element of an array. | $.changes[?(@.attribute == "department")] |
| . | **Child operator** - Selects a child element of an object. | $.identity |
| .. | **Recursive descent** - JSONPath borrows this syntax from E4X. | $..id |
| * | **Wildcard** - All objects / elements regardless of their names. | $.changes[*]|
| [] | **Subscript** - In Javascript and JSON, it is the native array operator. | $.changes[1].attribute |
| [,] | **Union** - Selects elements of an array. | $.changes[0,1,2] |
| [start:stop:step] | **Array slice** - Selects elements of an array. | $.changes[0:2:1] |
| [:n] | **Array slice** - Selects the first `n` elements of an array. | $.changes[:2] |
| [-n:] | **Array slice** - Selects the last `n` elements of an array. | $.changes[-1:] |
| ?() | **Filter expression** - Applies a filter expression. | $[?($.identity.name == "john.doe")] |
| () | **Script expression** - Applies a script expression. | $.changes[(@.length-1)] |
| Expression | Description | Example |
| ----------------- | ------------------------------------------------------------------------ | ----------------------------------------- |
| $ | **Root** - The root object / element. | $ |
| @ | **Current** - The current object / element of an array. | $.changes[?(@.attribute == "department")] |
| . | **Child operator** - Selects a child element of an object. | $.identity |
| .. | **Recursive descent** - JSONPath borrows this syntax from E4X. | $..id |
| \* | **Wildcard** - All objects / elements regardless of their names. | $.changes[*] |
| [] | **Subscript** - In Javascript and JSON, it is the native array operator. | $.changes[1].attribute |
| [,] | **Union** - Selects elements of an array. | $.changes[0,1,2] |
| [start:stop:step] | **Array slice** - Selects elements of an array. | $.changes[0:2:1] |
| [:n] | **Array slice** - Selects the first `n` elements of an array. | $.changes[:2] |
| [-n:] | **Array slice** - Selects the last `n` elements of an array. | $.changes[-1:] |
| ?() | **Filter expression** - Applies a filter expression. | $[?($.identity.name == "john.doe")] |
| () | **Script expression** - Applies a script expression. | $.changes[(@.length-1)] |
### Operators
JSONPath operators provide more options to filter JSON structures.
| Operator | Description | Example |
| --- | --- | --- |
| == | **Equals to** - Evaluates to `true` if operands match. | $[?($.identity.name == "john.doe")] |
| != | **Not equal to** - Evaluates to `true` if operands do not match. | $[?($.identity.name != "george.washington")] |
| > | **Greater than** - Evaluates to `true` if the left operand is greater than the right operand. It works on strings and numbers. | $[?($.attributes.created > '2020-04-27T16:48:33.200Z')] |
| >= | **Greater than or equal to** - Evaluates to `true` if the left operand is greater than or equal to the right operand. | $[?($.attributes.created >= '2020-04-27T16:48:33.597Z')] |
| < | **Less than** - Evaluates to `true` if the left operand is less than the right operand. | $[?($.attributes.created < '2020-04-27T16:48:33.200Z')] |
| <= | **Less than or equal to** - Evaluates to `true` if the left operand is less than or equal to the right operand. | $[?($.attributes.created <= '2020-04-27T16:48:33.200Z')] |
| && | Logical **AND** operator that evaluates `true` only if both conditions are `true`. | $.changes[?(@.attribute == "cloudLifecycleState" && @.newValue == "terminated")] |
| ! | **Not** - Negates the boolean expression. | $.identity.attributes[?(!@.alternateEmail)] |
| \|\| | Logical **OR** operator that evaluates `true` if at least one condition is `true`. | $.changes[?(@.attribute == "cloudLifecycleState" \|\| @.attribute == "department")] |
| contains | **Contains** - Checks whether a string contains the specified substring (case sensitive). | $[?($.identity.name contains "john")] |
| Operator | Description | Example |
| -------- | ------------------------------------------------------------------------------------------------------------------------------ | ----------------------------------------------------------------------------------- |
| == | **Equals to** - Evaluates to `true` if operands match. | $[?($.identity.name == "john.doe")] |
| != | **Not equal to** - Evaluates to `true` if operands do not match. | $[?($.identity.name != "george.washington")] |
| > | **Greater than** - Evaluates to `true` if the left operand is greater than the right operand. It works on strings and numbers. | $[?($.attributes.created > '2020-04-27T16:48:33.200Z')] |
| >= | **Greater than or equal to** - Evaluates to `true` if the left operand is greater than or equal to the right operand. | $[?($.attributes.created >= '2020-04-27T16:48:33.597Z')] |
| < | **Less than** - Evaluates to `true` if the left operand is less than the right operand. | $[?($.attributes.created < '2020-04-27T16:48:33.200Z')] |
| <= | **Less than or equal to** - Evaluates to `true` if the left operand is less than or equal to the right operand. | $[?($.attributes.created <= '2020-04-27T16:48:33.200Z')] |
| && | Logical **AND** operator that evaluates `true` only if both conditions are `true`. | $.changes[?(@.attribute == "cloudLifecycleState" && @.newValue == "terminated")] |
| ! | **Not** - Negates the boolean expression. | $.identity.attributes[?(!@.alternateEmail)] |
| \|\| | Logical **OR** operator that evaluates `true` if at least one condition is `true`. | $.changes[?(@.attribute == "cloudLifecycleState" \|\| @.attribute == "department")] |
| contains | **Contains** - Checks whether a string contains the specified substring (case sensitive). | $[?($.identity.name contains "john")] |
### Developing Filters
Developing a filter can be faster when you use a tool like an online [JSONpath editor](https://jsonpath.herokuapp.com/). These tools can provide quick feedback on your filter, allowing you to focus on the exact filter expression you want before testing it in IdentityNow. Be aware, however, that these online tools may have subtle differences compared with SailPoint's implementation of Goessner JSONpath. Always test your JSONpath filter in IdentityNow before using it in production.
Developing a filter can be faster when you use a tool like an online
[JSONpath editor](https://jsonpath.herokuapp.com/). These tools can provide
quick feedback on your filter, allowing you to focus on the exact filter
expression you want before testing it in IdentityNow. Be aware, however, that
these online tools may have subtle differences compared with SailPoint's
implementation of Goessner JSONpath. Always test your JSONpath filter in
IdentityNow before using it in production.
Start by opening a [JSONpath editor](https://jsonpath.herokuapp.com/) in your browser. Make sure that the correct implementation is selected if there is more than one option. You can then paste in an example trigger input and start crafting your JSONpath expression.
Start by opening a [JSONpath editor](https://jsonpath.herokuapp.com/) in your
browser. Make sure that the correct implementation is selected if there is more
than one option. You can then paste in an example trigger input and start
crafting your JSONpath expression.
![JSONPath editor](./img/jsonpath-editor.png)
Most of the examples provided in the operator tables above can be used against the Identity Attributes Changed event trigger input, as seen below. You can find all of the input/output schemas for the other available triggers in our [API specification](/idn/api/beta/triggers#available-event-triggers).
Most of the examples provided in the operator tables above can be used against
the Identity Attributes Changed event trigger input, as seen below. You can find
all of the input/output schemas for the other available triggers in our
[API specification](/idn/api/beta/triggers#available-event-triggers).
```json
{
"identity": {
"id": "ee769173319b41d19ccec6cea52f237b",
"name": "john.doe",
"type": "IDENTITY"
},
"changes": [
{
"attribute": "department",
"oldValue": "Sales",
"newValue": "Marketing"
},
{
"attribute": "manager",
"oldValue": {
"id": "ee769173319b41d19ccec6c235423237b",
"name": "robert.brown",
"type": "IDENTITY"
},
"newValue": {
"id": "ee769173319b41d19ccec6c235423236c",
"name": "mary.johnson",
"type": "IDENTITY"
}
},
{
"attribute": "cloudLifecycleState",
"oldValue": "active",
"newValue": "terminated"
}
]
}
```
## Validating Filters
When you are finished developing your JSONpath filter, you must validate it with SailPoint's trigger service. There are two ways to do this: use the UI or the API.
### Validating Filters Using the UI
To validate a filter using the UI, subscribe to a new event trigger or edit an existing one. In the configuration options, paste your JSONpath expression in the `Filter` input box and select `Update`. If you do not receive an error message, then your filter expression is valid with SailPoint.
![UI filter](./img/ui-filter.png)
### Validating Filters Using the API
You can validate a trigger filter by using the [validate filter](/idn/api/beta/validate-filter) API endpoint. You must escape any double quotes, as seen in the example payload in the API description. Also, you must provide a sample input for the validation engine to run against. It is best to use the input example included in the input/output schemas for the event trigger you want to apply your filter to. Refer to [this table](/idn/api/beta/triggers#available-event-triggers) to find the schema of your event trigger. This is an example request:
```text
POST https://{tenant}.api.identitynow.com/beta/trigger-subscriptions/validate-filter
```
```json
{
"input": {
"identity": {
"id": "ee769173319b41d19ccec6cea52f237b",
"name": "john.doe",
@@ -155,17 +139,98 @@ POST https://{tenant}.api.identitynow.com/beta/trigger-subscriptions/validate-fi
"newValue": "terminated"
}
]
},
"filter": "$[?($.identity.name == \"john.doe\")]"
}
```
## Validating Filters
When you are finished developing your JSONpath filter, you must validate it with
SailPoint's trigger service. There are two ways to do this: use the UI or the
API.
### Validating Filters Using the UI
To validate a filter using the UI, subscribe to a new event trigger or edit an
existing one. In the configuration options, paste your JSONpath expression in
the `Filter` input box and select `Update`. If you do not receive an error
message, then your filter expression is valid with SailPoint.
![UI filter](./img/ui-filter.png)
### Validating Filters Using the API
You can validate a trigger filter by using the
[validate filter](/idn/api/beta/validate-filter) API endpoint. You must escape
any double quotes, as seen in the example payload in the API description. Also,
you must provide a sample input for the validation engine to run against. It is
best to use the input example included in the input/output schemas for the event
trigger you want to apply your filter to. Refer to
[this table](/idn/api/beta/triggers#available-event-triggers) to find the schema
of your event trigger. This is an example request:
```text
POST https://{tenant}.api.identitynow.com/beta/trigger-subscriptions/validate-filter
```
```json
{
"input": {
"identity": {
"id": "ee769173319b41d19ccec6cea52f237b",
"name": "john.doe",
"type": "IDENTITY"
},
"changes": [
{
"attribute": "department",
"oldValue": "Sales",
"newValue": "Marketing"
},
{
"attribute": "manager",
"oldValue": {
"id": "ee769173319b41d19ccec6c235423237b",
"name": "robert.brown",
"type": "IDENTITY"
},
"newValue": {
"id": "ee769173319b41d19ccec6c235423236c",
"name": "mary.johnson",
"type": "IDENTITY"
}
},
{
"attribute": "cloudLifecycleState",
"oldValue": "active",
"newValue": "terminated"
}
]
},
"filter": "$[?($.identity.name == \"john.doe\")]"
}
```
## Testing Filters
If SailPoint accepts your trigger filter, you must test whether it actually works. You must configure your trigger subscription to point to the URL of your testing service. [webhook.site](https://webhook.site) is an easy to use testing service. Just copy the unique URL it generates and paste it into your subscription's integration URL field. The easiest way to test a trigger subscription is to use the UI to fire off a test event.
If SailPoint accepts your trigger filter, you must test whether it actually
works. You must configure your trigger subscription to point to the URL of your
testing service. [webhook.site](https://webhook.site) is an easy to use testing
service. Just copy the unique URL it generates and paste it into your
subscription's integration URL field. The easiest way to test a trigger
subscription is to use the UI to fire off a test event.
![test subscription](./img/test-subscription.png)
Once you fire off a test event, monitor your webhook.site webpage for an incoming event. If the filter matches the test input, you will an event come in. If the filter does not match the input, then it will nott fire. Test both scenarios to make sure your filter is not always evaluating to `true`, and that it will indeed evaluate to `false` under the correct circumstances. For example, the filter `$[?($.identity.name contains "john")]` will match the test event for Identity Attributes Changed and you will see an event in webhook.site, but you also want to make sure that `$[?($.identity.name contains "archer")]` doesn't fire because the test input is always the same.
Once you fire off a test event, monitor your webhook.site webpage for an
incoming event. If the filter matches the test input, you will an event come in.
If the filter does not match the input, then it will nott fire. Test both
scenarios to make sure your filter is not always evaluating to `true`, and that
it will indeed evaluate to `false` under the correct circumstances. For example,
the filter `$[?($.identity.name contains "john")]` will match the test event for
Identity Attributes Changed and you will see an event in webhook.site, but you
also want to make sure that `$[?($.identity.name contains "archer")]` doesn't
fire because the test input is always the same.
If you want to control the test input to validate your filter against a more robust set of data, use the [test invocation](/idn/api/beta/start-test-invocation) API endpoint.
If you want to control the test input to validate your filter against a more
robust set of data, use the
[test invocation](/idn/api/beta/start-test-invocation) API endpoint.

View File

@@ -6,23 +6,52 @@ sidebar_label: Event Triggers
sidebar_position: 2
sidebar_class_name: eventTriggers
keywords: ["event", "triggers", "webhooks"]
description: The result of any action performed in a service is called an event. Services like IdentityNow constantly generate events like an update to a setting or the completion of an account aggregation.
description:
The result of any action performed in a service is called an event. Services
like IdentityNow constantly generate events like an update to a setting or the
completion of an account aggregation.
slug: /docs/event-triggers
tags: ["Event Triggers"]
---
## What Are Triggers
The result of any action performed in a service is called an **event**. Services like IdentityNow constantly generate events like an update to a setting or the completion of an account aggregation. Most events a service generates are of little value to clients, so services create event triggers, also known as web hooks, that allow clients to subscribe to specific events they are interested in. Similar to news letters or RSS feeds, each subscription tells the service what event a client is interested in and where to send the client the notification.
The result of any action performed in a service is called an **event**. Services
like IdentityNow constantly generate events like an update to a setting or the
completion of an account aggregation. Most events a service generates are of
little value to clients, so services create event triggers, also known as web
hooks, that allow clients to subscribe to specific events they are interested
in. Similar to news letters or RSS feeds, each subscription tells the service
what event a client is interested in and where to send the client the
notification.
## How Are Triggers Different from APIs
The biggest difference between event triggers and APIs is how data is accessed. Requesting data with an API is an active process, but receiving data from an event trigger is a passive process. Clients who want to get the latest data with an API must initiate the request. Clients who subscribe to an event trigger do not need to initiate a request. They are notified when the event occurs. This is similar to keeping up with the latest world news on the internet. You can initiate the request for data by opening a news website in your browser, or you can subscribe to a mail list to receive the latest news as it happens.
The biggest difference between event triggers and APIs is how data is accessed.
Requesting data with an API is an active process, but receiving data from an
event trigger is a passive process. Clients who want to get the latest data with
an API must initiate the request. Clients who subscribe to an event trigger do
not need to initiate a request. They are notified when the event occurs. This is
similar to keeping up with the latest world news on the internet. You can
initiate the request for data by opening a news website in your browser, or you
can subscribe to a mail list to receive the latest news as it happens.
## When to Use Triggers
It is best to use event triggers when you need to react to an event in real-time. Although you can set up a polling mechanism using APIs, polling uses more bandwidth and resources, and if you poll too quickly, you can reach an API's rate limits. Event triggers use less bandwidth, they do not affect your API rate limit, and they are as close as you can get to real-time. However, event triggers have downsides to consider. They must be accessible from the public internet so the trigger service knows where to send the notification, and they can be harder to configure and operate than APIs are.
It is best to use event triggers when you need to react to an event in
real-time. Although you can set up a polling mechanism using APIs, polling uses
more bandwidth and resources, and if you poll too quickly, you can reach an
API's rate limits. Event triggers use less bandwidth, they do not affect your
API rate limit, and they are as close as you can get to real-time. However,
event triggers have downsides to consider. They must be accessible from the
public internet so the trigger service knows where to send the notification, and
they can be harder to configure and operate than APIs are.
## How to Get Started With Event Triggers
Event triggers require different setup and testing steps than APIs do, so you should follow each document to better understand event triggers and the necessary steps to configure one. If this is your first time using event triggers, then you should use the [webhook testing service](./preparing-a-subscriber-service.md#webhook-testing-service) as you follow along.
Event triggers require different setup and testing steps than APIs do, so you
should follow each document to better understand event triggers and the
necessary steps to configure one. If this is your first time using event
triggers, then you should use the
[webhook testing service](./preparing-a-subscriber-service.md#webhook-testing-service)
as you follow along.

View File

@@ -6,33 +6,69 @@ sidebar_label: Preparing a Subscriber Service
sidebar_position: 2
sidebar_class_name: preparingSubscriberService
keywords: ["event", "triggers", "subscriber"]
description: Before you can subscribe to an event trigger, you must prepare a service that can accept incoming HTTP requests from the event trigger service.
description:
Before you can subscribe to an event trigger, you must prepare a service that
can accept incoming HTTP requests from the event trigger service.
slug: /docs/event-triggers/preparing-subscriber-service
tags: ["Event Triggers"]
---
Before you can subscribe to an event trigger, you must prepare a service that can accept incoming HTTP requests from the event trigger service. More specifically, your client service must accept a POST request to an endpoint of its choosing, with the ability to parse the JSON data sent by the trigger. There are many ways to accomplish this, but this guide covers four of the most common types of client services you can build to handle event triggers.
Before you can subscribe to an event trigger, you must prepare a service that
can accept incoming HTTP requests from the event trigger service. More
specifically, your client service must accept a POST request to an endpoint of
its choosing, with the ability to parse the JSON data sent by the trigger. There
are many ways to accomplish this, but this guide covers four of the most common
types of client services you can build to handle event triggers.
## Webhook Testing Service
There are many webhook testing websites that generate a unique URL you can use to subscribe to an event trigger and explore the data sent by the trigger. One site is https://webhook.site. This site generates a unique URL whenever you open it, which you can copy and paste into the subscription configuration in IdentityNow. Any events that the trigger generates will be sent to this website for you to analyze.
There are many webhook testing websites that generate a unique URL you can use
to subscribe to an event trigger and explore the data sent by the trigger. One
site is https://webhook.site. This site generates a unique URL whenever you open
it, which you can copy and paste into the subscription configuration in
IdentityNow. Any events that the trigger generates will be sent to this website
for you to analyze.
![Webhook.site](./img/webhook-site.png)
The purpose of webhook testing services is to make it easy to set up a trigger and see the data of the events that will eventually be sent to your production service. This can help in the early development process when you explore the data the event trigger sends and how to best access the data you need.
The purpose of webhook testing services is to make it easy to set up a trigger
and see the data of the events that will eventually be sent to your production
service. This can help in the early development process when you explore the
data the event trigger sends and how to best access the data you need.
## Native SaaS Workflows
Some SaaS vendors provide built-in workflow builders in their products so you do not have to use a no-code provider. Slack, for example, has a premium [workflow builder](https://slack.com/help/articles/360035692513-Guide-to-Workflow-Builder) feature that generates a unique URL you can use to configure your subscription. Slack's workflow builder can then listen for events sent by your trigger and perform Slack specific actions on the data, like sending a user a message when his or her access request is approved.
Some SaaS vendors provide built-in workflow builders in their products so you do
not have to use a no-code provider. Slack, for example, has a premium
[workflow builder](https://slack.com/help/articles/360035692513-Guide-to-Workflow-Builder)
feature that generates a unique URL you can use to configure your subscription.
Slack's workflow builder can then listen for events sent by your trigger and
perform Slack specific actions on the data, like sending a user a message when
his or her access request is approved.
![Slack workflow](./img/slack-workflow.png)
## No-code Provider
No-code/low-code providers, like Zapier and Microsoft Power Automate, make it easy to consume event triggers and perform actions based on the event data. They are popular solutions for those looking to prototype or quickly create automated business processes, and they cater to novices and advanced users alike. Each no-code provider has documentation about how to create a new workflow and subscribe to an event trigger or webhook, so you must find the relevant documentation for your no-code provider to learn how to set one up. Zapier has the ability to configure a webhook action that generates a unique URL you can configure in your event trigger subscription.
No-code/low-code providers, like Zapier and Microsoft Power Automate, make it
easy to consume event triggers and perform actions based on the event data. They
are popular solutions for those looking to prototype or quickly create automated
business processes, and they cater to novices and advanced users alike. Each
no-code provider has documentation about how to create a new workflow and
subscribe to an event trigger or webhook, so you must find the relevant
documentation for your no-code provider to learn how to set one up. Zapier has
the ability to configure a webhook action that generates a unique URL you can
configure in your event trigger subscription.
![Zapier webhook](./img/zapier-webhook.png)
## Custom Application
A custom application is one you write in a language of your choosing and host in your own infrastructure, cloud, or on-premise. This is the most advanced option for implementing an event trigger client service. Although it requires a great deal of skill and knowledge to build, deploy, and operate your own service that can consume requests over HTTP, a custom application offers the most power and flexibility to implement your use cases. You can learn more about custom applications by checking out our [Event Trigger Example Application](https://github.com/sailpoint-oss/event-trigger-examples).
A custom application is one you write in a language of your choosing and host in
your own infrastructure, cloud, or on-premise. This is the most advanced option
for implementing an event trigger client service. Although it requires a great
deal of skill and knowledge to build, deploy, and operate your own service that
can consume requests over HTTP, a custom application offers the most power and
flexibility to implement your use cases. You can learn more about custom
applications by checking out our
[Event Trigger Example Application](https://github.com/sailpoint-oss/event-trigger-examples).

View File

@@ -1,190 +0,0 @@
---
id: responding-to-request-response-trigger
title: Responding To Request Response Triggers
pagination_label: Responding To Request Response Triggers
sidebar_label: Responding To Request Response Triggers
sidebar_position: 6
sidebar_class_name: respondingRequestResponseTriggers
keywords: ["event", "trigger", "request reseponse"]
description: You can specify how your application interacts with a REQUEST_RESPONSE type trigger service by selecting an invocation response mode in the Response Type dropdown when editing or creating a REQUEST_RESPONSE subscription.
slug: /docs/event-triggers/responding-request-response-trigger
tags: ["Event Triggers"]
---
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
## Invocation Response Modes for REQUEST_RESPONSE Type Triggers
You can specify how your application interacts with a `REQUEST_RESPONSE` type trigger service by selecting an invocation response mode in the **Response Type** dropdown when editing or creating a `REQUEST_RESPONSE` subscription. There are three response modes to choose from: `SYNC`, `ASYNC`, and `DYNAMIC`. These response modes are only available when the subscription type is set to `HTTP`.
| Response Modes | Description |
|----------|:-------------:|
| `SYNC` | This type of response creates a *synchronous* flow between the trigger service and the custom application. Once a trigger has been invoked, the custom application is expected to respond within 10 seconds. If the application takes longer than 10 seconds to respond, the trigger invocation will terminate without making any decisions. |
| `ASYNC` | This type of response creates an *asynchronous* flow between the trigger service and the custom application. When a trigger is invoked, the custom application does not need to respond immediately. The trigger service will provide a URL and a secret that the custom application can use to complete the invocation at a later time. The application must complete the invocation before the configured deadline on the subscription.|
| `DYNAMIC` | This type of response gives the custom application the ability to choose whether it handles the invocation request synchronously or asynchronously on a per-event basis. In some cases, the application may choose `SYNC` mode because it is able to respond quickly to the invocation. In other cases, it may choose `ASYNC` because it needs to run a long running task before responding to the invocation.|
## Responding to REQUEST_RESPONSE Trigger
<Tabs>
<TabItem value="sync" label="SYNC Response" default>
<!-- Uncomment this once the model definition links are fixed
The custom application responds to the trigger invocation with an appropriate payload. For example, the application may receive a request from the [Access Request Dynamic Approver](https://developer.sailpoint.com/apis/beta/#tag/Event-Trigger-Models) trigger. The application will have **10 seconds** to analyze the event details and respond with a 200 (OK) status code and a [response payload](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Dynamic-Approver-Event-Trigger-Output) that contains the identity to add to the approval chain. -->
The custom application responds to the trigger invocation with an appropriate payload. For example, the application may receive a request from the Access Request Dynamic Approver trigger. The application will have **10 seconds** to analyze the event details and respond with a 200 (OK) status code and a response payload that contains the identity to add to the approval chain. For example, the response may look like this:
200 (OK)
```json
{
"id": "2c91808b6ef1d43e016efba0ce470906",
"name": "Adam Adams",
"type": "IDENTITY"
}
```
</TabItem>
<TabItem value="async" label="ASYNC Respose">
<!-- Uncomment this once the model definition links are fixed
The custom application only needs to acknowledge that it has received the trigger invocation request by returning an HTTP status of 200 (OK) with an empty JSON object (ex. `{}`) in the response body within **10 seconds** of receiving the event. It then has until the configured deadline on the subscription to provide a full response to the invocation. For example, the application may receive a request from the [Access Request Dynamic Approver](https://developer.sailpoint.com/apis/beta/#tag/Event-Trigger-Models) trigger. An example of the request payload that the application might receive is as follows: -->
The custom application only needs to acknowledge that it has received the trigger invocation request by returning an HTTP status of 200 (OK) with an empty JSON object (ex. `{}`) in the response body within **10 seconds** of receiving the event. It then has until the configured deadline on the subscription to provide a full response to the invocation. For example, the application may receive a request from the Access Request Dynamic Approver trigger. An example of the request payload that the application might receive is as follows:
```json
{
"_metadata": {
"callbackURL": "https://{tenant}.api.identitynow.com/beta/trigger-invocations/e9103ca9-02c4-bb0f-9441-94b3af012345/complete",
"responseMode": "async",
"secret": "c1c60493-3347-4550-9c00-123cdde",
"triggerId": "idn:access-request-dynamic-approver",
"triggerType": "requestResponse"
},
"accessRequestId": "4b4d982dddff4267ab12f0f1e72b5a6d",
"requestedBy": {
"id": "2c91808b6ef1d43e016efba0ce470906",
"name": "Adam Admin",
"type": "IDENTITY"
},
"requestedFor": {
"id": "2c91808b6ef1d43e016efba0ce470909",
"name": "Ed Engineer",
"type": "IDENTITY"
},
"requestedItems": [
{
"comment": "Ed needs this access for his day to day job activities",
"description": "Engineering Access",
"id": "2c91808b6ef1d43e016efba0ce470904",
"name": "Engineering Access",
"operation": "Add",
"type": "ACCESS_PROFILE"
}
]
}
```
The application will immediately respond to the invocation with a 200 (OK) status code and an empty JSON object.
200 (OK)
```json
{}
```
Once the application has made a decision on how to respond, it will use the `callbackURL` and `secret` provided in the `_metadata` object from the original request to complete the invocation. An example response might look like the following:
POST `https://{tenant}.api.identitynow.com/beta/trigger-invocations/e9103ca9-02c4-bb0f-9441-94b3af012345/complete`
```json
{
"secret": "0f11f2a4-7c94-4bf3-a2bd-742580fe3bde",
"output": {
"id": "2c91808b6ef1d43e016efba0ce470906",
"name": "Adam Adams",
"type": "IDENTITY"
}
}
```
</TabItem>
<TabItem value="dynamic" label="DYNAMIC Response">
The custom application determines arbitrarily whether to respond to the trigger invocation as `SYNC` or `ASYNC`. If the application wishes to respond as `SYNC`, it should follow the directions for a `SYNC` response type, responding within **10 seconds** of the invocation. In the case of `ASYNC`, the custom application only needs to acknowledge that it has received the trigger invocation request with a 202 (Accepted) within **10 seconds** of receiving the event and complete the invocation at a later time using the `callbackURL` and `secret` provided in the `_metadata` object.
An example of the request payload that the application might receive is as follows:
```json
{
"_metadata": {
"callbackURL": "https://{tenant}.api.identitynow.com/beta/trigger-invocations/e9103ca9-02c4-bb0f-9441-94b3af012345/complete",
"responseMode": "async",
"secret": "c1c60493-3347-4550-9c00-123cdde",
"triggerId": "idn:access-request-dynamic-approver",
"triggerType": "requestResponse"
},
"accessRequestId": "4b4d982dddff4267ab12f0f1e72b5a6d",
"requestedBy": {
"id": "2c91808b6ef1d43e016efba0ce470906",
"name": "Adam Admin",
"type": "IDENTITY"
},
"requestedFor": {
"id": "2c91808b6ef1d43e016efba0ce470909",
"name": "Ed Engineer",
"type": "IDENTITY"
},
"requestedItems": [
{
"comment": "Ed needs this access for his day to day job activities",
"description": "Engineering Access",
"id": "2c91808b6ef1d43e016efba0ce470904",
"name": "Engineering Access",
"operation": "Add",
"type": "ACCESS_PROFILE"
}
]
}
```
To respond as `SYNC`, simply respond to the invocation within 10 seconds.
200 (OK)
```json
{
"id": "2c91808b6ef1d43e016efba0ce470906",
"name": "Adam Adams",
"type": "IDENTITY"
}
```
To respond as `ASYNC`, start by responding to the invocation with a 202 (Accepted).
202 (Accepted)
```json
{}
```
Then, use the `callbackURL` and `secret` to send a POST request to the invocation with the decision.
POST `https://{tenant}.api.identitynow.com/beta/trigger-invocations/e9103ca9-02c4-bb0f-9441-94b3af012345/complete`
```json
{
"secret": "0f11f2a4-7c94-4bf3-a2bd-742580fe3bde",
"output": {
"id": "2c91808b6ef1d43e016efba0ce470906",
"name": "Adam Adams",
"type": "IDENTITY"
}
}
```
</TabItem>
</Tabs>
---
## Trigger Invocation Status
To check the status of a particular trigger invocation, you can use the [list invocation statuses](/idn/api/beta/list-invocation-status) endpoint. The status endpoint works for both `REQUEST_RESPONSE` and `FIRE_AND_FORGET` triggers. However, the status of `FIRE_AND_FORGET` trigger invocations will contain null values in their `completeInvocationInput` since `FIRE_AND_FORGET` triggers don't need a response to complete.

View File

@@ -0,0 +1,233 @@
---
id: responding-to-request-response-trigger
title: Responding To Request Response Triggers
pagination_label: Responding To Request Response Triggers
sidebar_label: Responding To Request Response Triggers
sidebar_position: 6
sidebar_class_name: respondingRequestResponseTriggers
keywords: ["event", "trigger", "request reseponse"]
description:
You can specify how your application interacts with a REQUEST_RESPONSE type
trigger service by selecting an invocation response mode in the Response Type
dropdown when editing or creating a REQUEST_RESPONSE subscription.
slug: /docs/event-triggers/responding-request-response-trigger
tags: ["Event Triggers"]
---
import Tabs from "@theme/Tabs";
import TabItem from "@theme/TabItem";
## Invocation Response Modes for REQUEST_RESPONSE Type Triggers
You can specify how your application interacts with a `REQUEST_RESPONSE` type
trigger service by selecting an invocation response mode in the **Response
Type** dropdown when editing or creating a `REQUEST_RESPONSE` subscription.
There are three response modes to choose from: `SYNC`, `ASYNC`, and `DYNAMIC`.
These response modes are only available when the subscription type is set to
`HTTP`.
| Response Modes | Description |
| -------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: |
| `SYNC` | This type of response creates a _synchronous_ flow between the trigger service and the custom application. Once a trigger has been invoked, the custom application is expected to respond within 10 seconds. If the application takes longer than 10 seconds to respond, the trigger invocation will terminate without making any decisions. |
| `ASYNC` | This type of response creates an _asynchronous_ flow between the trigger service and the custom application. When a trigger is invoked, the custom application does not need to respond immediately. The trigger service will provide a URL and a secret that the custom application can use to complete the invocation at a later time. The application must complete the invocation before the configured deadline on the subscription. |
| `DYNAMIC` | This type of response gives the custom application the ability to choose whether it handles the invocation request synchronously or asynchronously on a per-event basis. In some cases, the application may choose `SYNC` mode because it is able to respond quickly to the invocation. In other cases, it may choose `ASYNC` because it needs to run a long running task before responding to the invocation. |
## Responding to REQUEST_RESPONSE Trigger
<Tabs>
<TabItem value="sync" label="SYNC Response" default>
<!-- Uncomment this once the model definition links are fixed
The custom application responds to the trigger invocation with an appropriate payload. For example, the application may receive a request from the [Access Request Dynamic Approver](https://developer.sailpoint.com/apis/beta/#tag/Event-Trigger-Models) trigger. The application will have **10 seconds** to analyze the event details and respond with a 200 (OK) status code and a [response payload](https://developer.sailpoint.com/apis/beta/#section/Access-Request-Dynamic-Approver-Event-Trigger-Output) that contains the identity to add to the approval chain. -->
The custom application responds to the trigger invocation with an appropriate
payload. For example, the application may receive a request from the Access
Request Dynamic Approver trigger. The application will have **10 seconds** to
analyze the event details and respond with a 200 (OK) status code and a response
payload that contains the identity to add to the approval chain. For example,
the response may look like this:
200 (OK)
```json
{
"id": "2c91808b6ef1d43e016efba0ce470906",
"name": "Adam Adams",
"type": "IDENTITY"
}
```
</TabItem>
<TabItem value="async" label="ASYNC Respose">
<!-- Uncomment this once the model definition links are fixed
The custom application only needs to acknowledge that it has received the trigger invocation request by returning an HTTP status of 200 (OK) with an empty JSON object (ex. `{}`) in the response body within **10 seconds** of receiving the event. It then has until the configured deadline on the subscription to provide a full response to the invocation. For example, the application may receive a request from the [Access Request Dynamic Approver](https://developer.sailpoint.com/apis/beta/#tag/Event-Trigger-Models) trigger. An example of the request payload that the application might receive is as follows: -->
The custom application only needs to acknowledge that it has received the
trigger invocation request by returning an HTTP status of 200 (OK) with an empty
JSON object (ex. `{}`) in the response body within **10 seconds** of receiving
the event. It then has until the configured deadline on the subscription to
provide a full response to the invocation. For example, the application may
receive a request from the Access Request Dynamic Approver trigger. An example
of the request payload that the application might receive is as follows:
```json
{
"_metadata": {
"callbackURL": "https://{tenant}.api.identitynow.com/beta/trigger-invocations/e9103ca9-02c4-bb0f-9441-94b3af012345/complete",
"responseMode": "async",
"secret": "c1c60493-3347-4550-9c00-123cdde",
"triggerId": "idn:access-request-dynamic-approver",
"triggerType": "requestResponse"
},
"accessRequestId": "4b4d982dddff4267ab12f0f1e72b5a6d",
"requestedBy": {
"id": "2c91808b6ef1d43e016efba0ce470906",
"name": "Adam Admin",
"type": "IDENTITY"
},
"requestedFor": {
"id": "2c91808b6ef1d43e016efba0ce470909",
"name": "Ed Engineer",
"type": "IDENTITY"
},
"requestedItems": [
{
"comment": "Ed needs this access for his day to day job activities",
"description": "Engineering Access",
"id": "2c91808b6ef1d43e016efba0ce470904",
"name": "Engineering Access",
"operation": "Add",
"type": "ACCESS_PROFILE"
}
]
}
```
The application will immediately respond to the invocation with a 200 (OK)
status code and an empty JSON object.
200 (OK)
```json
{}
```
Once the application has made a decision on how to respond, it will use the
`callbackURL` and `secret` provided in the `_metadata` object from the original
request to complete the invocation. An example response might look like the
following:
POST
`https://{tenant}.api.identitynow.com/beta/trigger-invocations/e9103ca9-02c4-bb0f-9441-94b3af012345/complete`
```json
{
"secret": "0f11f2a4-7c94-4bf3-a2bd-742580fe3bde",
"output": {
"id": "2c91808b6ef1d43e016efba0ce470906",
"name": "Adam Adams",
"type": "IDENTITY"
}
}
```
</TabItem>
<TabItem value="dynamic" label="DYNAMIC Response">
The custom application determines arbitrarily whether to respond to the trigger
invocation as `SYNC` or `ASYNC`. If the application wishes to respond as `SYNC`,
it should follow the directions for a `SYNC` response type, responding within
**10 seconds** of the invocation. In the case of `ASYNC`, the custom application
only needs to acknowledge that it has received the trigger invocation request
with a 202 (Accepted) within **10 seconds** of receiving the event and complete
the invocation at a later time using the `callbackURL` and `secret` provided in
the `_metadata` object.
An example of the request payload that the application might receive is as
follows:
```json
{
"_metadata": {
"callbackURL": "https://{tenant}.api.identitynow.com/beta/trigger-invocations/e9103ca9-02c4-bb0f-9441-94b3af012345/complete",
"responseMode": "async",
"secret": "c1c60493-3347-4550-9c00-123cdde",
"triggerId": "idn:access-request-dynamic-approver",
"triggerType": "requestResponse"
},
"accessRequestId": "4b4d982dddff4267ab12f0f1e72b5a6d",
"requestedBy": {
"id": "2c91808b6ef1d43e016efba0ce470906",
"name": "Adam Admin",
"type": "IDENTITY"
},
"requestedFor": {
"id": "2c91808b6ef1d43e016efba0ce470909",
"name": "Ed Engineer",
"type": "IDENTITY"
},
"requestedItems": [
{
"comment": "Ed needs this access for his day to day job activities",
"description": "Engineering Access",
"id": "2c91808b6ef1d43e016efba0ce470904",
"name": "Engineering Access",
"operation": "Add",
"type": "ACCESS_PROFILE"
}
]
}
```
To respond as `SYNC`, simply respond to the invocation within 10 seconds.
200 (OK)
```json
{
"id": "2c91808b6ef1d43e016efba0ce470906",
"name": "Adam Adams",
"type": "IDENTITY"
}
```
To respond as `ASYNC`, start by responding to the invocation with a 202
(Accepted).
202 (Accepted)
```json
{}
```
Then, use the `callbackURL` and `secret` to send a POST request to the
invocation with the decision.
POST
`https://{tenant}.api.identitynow.com/beta/trigger-invocations/e9103ca9-02c4-bb0f-9441-94b3af012345/complete`
```json
{
"secret": "0f11f2a4-7c94-4bf3-a2bd-742580fe3bde",
"output": {
"id": "2c91808b6ef1d43e016efba0ce470906",
"name": "Adam Adams",
"type": "IDENTITY"
}
}
```
</TabItem>
</Tabs>
---
## Trigger Invocation Status
To check the status of a particular trigger invocation, you can use the
[list invocation statuses](/idn/api/beta/list-invocation-status) endpoint. The
status endpoint works for both `REQUEST_RESPONSE` and `FIRE_AND_FORGET`
triggers. However, the status of `FIRE_AND_FORGET` trigger invocations will
contain null values in their `completeInvocationInput` since `FIRE_AND_FORGET`
triggers don't need a response to complete.

View File

@@ -6,26 +6,48 @@ sidebar_label: Subscribing to a Trigger
sidebar_position: 3
sidebar_class_name: subscribingToTrigger
keywords: ["event", "trigger", "subscribing"]
description: Usually, you will subscribe to event triggers using the user interface in IDN. Refer to subscribing to event triggers to learn how to subscribe to an event trigger through the IDN UI.
description:
Usually, you will subscribe to event triggers using the user interface in IDN.
Refer to subscribing to event triggers to learn how to subscribe to an event
trigger through the IDN UI.
slug: /docs/event-triggers/subscribing-to-trigger
tags: ["Event Triggers"]
---
## View the Available Triggers
SailPoint is continuously developing new event triggers to satisfy different use cases. Some of these triggers are considered **early access** and are only available in an IDN tenant upon request. To see a list of available event triggers in your tenant, go to the **Event Triggers** tab in the **Admin** section of IdentityNow. The first page is a list of your tenant's available event triggers. You can select each trigger to learn more about its type, what causes it to fire, and what the payload will look like.
SailPoint is continuously developing new event triggers to satisfy different use
cases. Some of these triggers are considered **early access** and are only
available in an IDN tenant upon request. To see a list of available event
triggers in your tenant, go to the **Event Triggers** tab in the **Admin**
section of IdentityNow. The first page is a list of your tenant's available
event triggers. You can select each trigger to learn more about its type, what
causes it to fire, and what the payload will look like.
![Available triggers](./img/available-triggers.png)
## Subscribe to a Trigger from the UI
Usually, you will subscribe to event triggers using the user interface in IDN.
Refer to [subscribing to event triggers](https://documentation.sailpoint.com/saas/help/common/event_triggers.html#subscribing-to-event-triggers) to learn how to subscribe to an event trigger through the IDN UI.
Refer to
[subscribing to event triggers](https://documentation.sailpoint.com/saas/help/common/event_triggers.html#subscribing-to-event-triggers)
to learn how to subscribe to an event trigger through the IDN UI.
## Subscribe to a Trigger from the API
Sometimes, you may need to use the API to subscribe to event triggers. This can occur when you want to programatically subscribe/unsubscribe from event triggers in a custom application or no-code solution that does not have a native integration with SailPoint.
Sometimes, you may need to use the API to subscribe to event triggers. This can
occur when you want to programatically subscribe/unsubscribe from event triggers
in a custom application or no-code solution that does not have a native
integration with SailPoint.
If this is your first time calling a SailPoint API, refer to the [getting started guide](../../../api/getting-started.md) to learn how to generate a token and call the APIs.
If this is your first time calling a SailPoint API, refer to the
[getting started guide](../../../api/getting-started.md) to learn how to
generate a token and call the APIs.
Start by reviewing the list of [available event triggers](/idn/api/beta/triggers#available-event-triggers), and take note of the **ID** of the trigger you want to subscribe to (ex `idn:access-request-dynamic-approver`). Use the [create subscription](/idn/api/beta/create-subscription) endpoint to subscribe to an event trigger of your choosing. See the API docs for the latest details about how to craft a subscription request.
Start by reviewing the list of
[available event triggers](/idn/api/beta/triggers#available-event-triggers), and
take note of the **ID** of the trigger you want to subscribe to (ex
`idn:access-request-dynamic-approver`). Use the
[create subscription](/idn/api/beta/create-subscription) endpoint to subscribe
to an event trigger of your choosing. See the API docs for the latest details
about how to craft a subscription request.

View File

@@ -6,22 +6,40 @@ sidebar_label: Testing Triggers
sidebar_position: 5
sidebar_class_name: testingTriggers
keywords: ["event", "trigger", "testing"]
description: It is important to test your trigger subscription configuration with your actual subscribing service before enabling your subscription for production use.
description:
It is important to test your trigger subscription configuration with your
actual subscribing service before enabling your subscription for production
use.
slug: /docs/event-triggers/testing-triggers
tags: ["Event Triggers"]
---
It is important to test your trigger subscription configuration with your actual subscribing service (not a test site like [webhook.site](https://webhook.site)) before enabling your subscription for production use. Testing subscriptions ensures that your subscribing service can successfully receive events and that you are receiving the correct events based on the filter you have provided.
It is important to test your trigger subscription configuration with your actual
subscribing service (not a test site like [webhook.site](https://webhook.site))
before enabling your subscription for production use. Testing subscriptions
ensures that your subscribing service can successfully receive events and that
you are receiving the correct events based on the filter you have provided.
## Sending Test Invocations
The easiest way to send a test event to your subscribing service is to use the **Test Subscription** command. Go to your subscription in the Event Trigger UI, select **Options** to the right of the subscription, and select **Test Subscription**.
The easiest way to send a test event to your subscribing service is to use the
**Test Subscription** command. Go to your subscription in the Event Trigger UI,
select **Options** to the right of the subscription, and select **Test
Subscription**.
![test subscription](./img/test-subscription.png)
Doing so sends a test event to your subscribing service, using the default example payload for the specific trigger you are subscribing to. This is an easy way to validate that your service can receive events, but it lacks the ability to modify the event payload to test your filter against different payloads. However, there is an API endpoint you can use to modify the test payload.
Doing so sends a test event to your subscribing service, using the default
example payload for the specific trigger you are subscribing to. This is an easy
way to validate that your service can receive events, but it lacks the ability
to modify the event payload to test your filter against different payloads.
However, there is an API endpoint you can use to modify the test payload.
If you want to control the test input to validate your filter against a more robust set of data, you can use the [test invocation](/idn/api/beta/start-test-invocation) API endpoint. You can use this API to send an input payload with any values that you want. This is an example of an invocation of this API:
If you want to control the test input to validate your filter against a more
robust set of data, you can use the
[test invocation](/idn/api/beta/start-test-invocation) API endpoint. You can use
this API to send an input payload with any values that you want. This is an
example of an invocation of this API:
```text
POST `https://{tenant}.api.identitynow.com/beta/trigger-invocations/test`
@@ -29,31 +47,31 @@ POST `https://{tenant}.api.identitynow.com/beta/trigger-invocations/test`
```json
{
"triggerId": "idn:access-request-pre-approval",
"input": {
"accessRequestId": "2c91808b6ef1d43e016efba0ce470904",
"requestedFor": {
"type": "IDENTITY",
"id": "2c91808568c529c60168cca6f90c1313",
"name": "William Wilson"
},
"requestedItems": [
{
"id": "2c91808b6ef1d43e016efba0ce470904",
"name": "Engineering Access",
"description": "Access to engineering database",
"type": "ACCESS_PROFILE",
"operation": "Add",
"comment": "William needs this access to do his job."
"triggerId": "idn:access-request-pre-approval",
"input": {
"accessRequestId": "2c91808b6ef1d43e016efba0ce470904",
"requestedFor": {
"type": "IDENTITY",
"id": "2c91808568c529c60168cca6f90c1313",
"name": "William Wilson"
},
"requestedItems": [
{
"id": "2c91808b6ef1d43e016efba0ce470904",
"name": "Engineering Access",
"description": "Access to engineering database",
"type": "ACCESS_PROFILE",
"operation": "Add",
"comment": "William needs this access to do his job."
}
],
"requestedBy": {
"type": "IDENTITY",
"id": "2c91808568c529c60168cca6f90c1314",
"name": "Rob Robertson"
}
],
"requestedBy": {
"type": "IDENTITY",
"id": "2c91808568c529c60168cca6f90c1314",
"name": "Rob Robertson"
}
},
"contentJson": {}
"contentJson": {}
}
```
@@ -61,23 +79,38 @@ POST `https://{tenant}.api.identitynow.com/beta/trigger-invocations/test`
### Trigger Service Issues
If your subscribing service is not receiving your test invocations, you have a couple of options to debug the issue. Start by viewing the activity log for the subscription in the UI to ensure your test events are actually being sent.
If your subscribing service is not receiving your test invocations, you have a
couple of options to debug the issue. Start by viewing the activity log for the
subscription in the UI to ensure your test events are actually being sent.
![activity log](./img/activity-log.png)
Check the **Created** date with the time you sent the test events. If they are being sent, check the event details. Look for any errors being reported, and ensure your subscribing service's subscription ID is in the `subcriptionId` the event was sent to.
Check the **Created** date with the time you sent the test events. If they are
being sent, check the event details. Look for any errors being reported, and
ensure your subscribing service's subscription ID is in the `subcriptionId` the
event was sent to.
![debug connection](./img/debug-connection.png)
You can also view the activity log by using the [list latest invocation statuses](/idn/api/beta/list-invocation-status) endpoint.
You can also view the activity log by using the
[list latest invocation statuses](/idn/api/beta/list-invocation-status)
endpoint.
### Filter Issues
If you do not see your events in the activity log, it may be a filtering issue. If the filter you configured on the subscription is not matching the test event data, no event will be sent. Double check your filter expression with the test payload in a JSONpath editor to ensure the filter is valid and matches your data. See [Filtering Events](./filtering-events.md) for more information.
If you do not see your events in the activity log, it may be a filtering issue.
If the filter you configured on the subscription is not matching the test event
data, no event will be sent. Double check your filter expression with the test
payload in a JSONpath editor to ensure the filter is valid and matches your
data. See [Filtering Events](./filtering-events.md) for more information.
### Misconfigured Subscription
Double check that your subscription configuration is correct.
Double check that your subscription configuration is correct.
- Ensure the URL you provided is accessible from the public internet. If your subscribing service is hosted internally in your company's intranet, you may be able to access it from your computer, but the trigger service may not be able to.
- Verify that the authentication details are correct. Verify that the username/password or bearer token is valid.
- Ensure the URL you provided is accessible from the public internet. If your
subscribing service is hosted internally in your company's intranet, you may
be able to access it from your computer, but the trigger service may not be
able to.
- Verify that the authentication details are correct. Verify that the
username/password or bearer token is valid.

View File

@@ -6,23 +6,43 @@ sidebar_label: Trigger Types
sidebar_position: 1
sidebar_class_name: triggerTypes
keywords: ["event", "trigger", "types"]
description: Different types of triggerst exist, and those types of triggers do different things depending on their type.
description:
Different types of triggerst exist, and those types of triggers do different
things depending on their type.
slug: /docs/event-triggers/trigger-types
tags: ["Event Triggers"]
---
## Fire and Forget
A fire and forget trigger only supports one-way communication with subscribers. Its only job is to forward each event it receives to each subscribing service. This trigger type does not wait for a response from subscribers. It has no way of knowing whether subscribers actually receive the event, and it does not have any mechanism for resending events. Think of this trigger type as live television. You can only see what is happening in real-time. You cannot rewind the live feed or interact with the broadcast in any way. This trigger type is the simplest and most common trigger type among SailPoint's event triggers.
A fire and forget trigger only supports one-way communication with subscribers.
Its only job is to forward each event it receives to each subscribing service.
This trigger type does not wait for a response from subscribers. It has no way
of knowing whether subscribers actually receive the event, and it does not have
any mechanism for resending events. Think of this trigger type as live
television. You can only see what is happening in real-time. You cannot rewind
the live feed or interact with the broadcast in any way. This trigger type is
the simplest and most common trigger type among SailPoint's event triggers.
:::caution
Fire and forget triggers can have a maximum of 50 subscribers per event.
:::
## Request Response
A request response trigger allows two-way communication between the trigger service and the subscriber. The main difference with this trigger type is that it expects a response from the subscriber with directions about how to proceed with the event. For example, the access request dynamic approval event trigger will send the subscriber details about the access request, and the subscriber may respond to the trigger with the identity ID to include in the approval process for an access request. This trigger type allows subscribers to not only receive events in real-time, but to act on them as well.
A request response trigger allows two-way communication between the trigger
service and the subscriber. The main difference with this trigger type is that
it expects a response from the subscriber with directions about how to proceed
with the event. For example, the access request dynamic approval event trigger
will send the subscriber details about the access request, and the subscriber
may respond to the trigger with the identity ID to include in the approval
process for an access request. This trigger type allows subscribers to not only
receive events in real-time, but to act on them as well.
:::caution
Request response triggers can only have one subscriber per event.
:::

View File

@@ -1,127 +0,0 @@
---
id: docs
title: IdentityNow
pagination_label: Introduction
sidebar_label: IdentityNow
sidebar_position: 1
sidebar_class_name: IdentityNow
hide_title: true
keywords: ["IdentityNow", "development", "developer", "portal", "getting started", "docs", "documentation"]
description: This is the intoduction documentation to development on the IdentityNow platform.
slug: /docs
tags: ["Introduction", "Getting Started"]
---
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
🧭 There are many different ways in which you are able to extend the IdentityNow platfrom beyond what comes out of the box. Please, explore our documentation and see what is possible! This documentation assumes that you are a current customer or partner and already have access to the IdentityNow application.
:::info Looking to become a partner?
If you are interested in becoming a partner, be it an ISV or Channel/Implementation partner, [click here](https://www.sailpoint.com/partners/become-partner/).
:::
## Before You Get Started
Please read this introduction carefully, as it contains recommendations and need-to-know information pertaining to all features of the IdentityNow platform.
### Authentication
Many of the interactions you have through our various features will have you interacting with our APIs either directly or indirectly. It would be valuable to familiarize yourself with [Authentication](../../api/authentication.md) on our platform.
### Understanding JSON
JSON (JavaScript Object Notation) is a lightweight data-interchange format. It is easy for humans to read and write. It is easy for machines to parse and generate. JSON is at the heart of every API and development feature that SailPoint offers in IdentityNow—usually either inputs or outputs to/from a system. [Learn more about JSON here](https://www.w3schools.com/js/js_json_intro.asp).
### Understanding Webhooks
A webhook in web development is a method of augmenting or altering the behavior of a web page or web application with custom callbacks. These callbacks may be maintained, modified, and managed by third-party users and developers who may not necessarily be affiliated with the originating website or application. Our [Event Triggers](docs/identity-now/event-triggers) are a form of webhook, for example. [Learn more about webhooks here](https://zapier.com/blog/what-are-webhooks/).
## Recommended Technologies
While you can use whichever development tools you are most comfortable with or find most useful, we will recommend tools here for those that are new to development.
:::tip
Our team, when developing documentation, example code/applications, videos, etc. will almost always use one of the tools/languages listed below.
:::
### IDEs (Integrated Development Environments)
IDEs are great for consolidating different aspects of programming into one tool. They're great for not only writing code, but managing your code as well. While you can use any IDE you feel is best fit for you and the task, here is what we use:
<Tabs groupId="operating-systems">
<TabItem value="win" label="Windows">
| IDE | Description |
|---|---|
| [VS Code](https://code.visualstudio.com/) | VS Code is a lightweight IDE that we believe is perfect for development on our IdentityNow platform. We also have great plug-in support from our community, like [this one](https://marketplace.visualstudio.com/items?itemName=yannick-beot-sp.vscode-sailpoint-identitynow)! |
| [IntelliJ](https://www.jetbrains.com/idea/) | If you happen to be writing in Java or developing Rules on our platform, we typically recommend IntelliJ. While Java development can be done in VS Code, you will have an easier time using an IDE that was purpose-built for Java. |
</TabItem>
<TabItem value="mac" label="Mac">
| IDE | Description |
|---|---|
| [VS Code](https://code.visualstudio.com/) | VS Code is a lightweight IDE that we believe is perfect for development on our IdentityNow platform. We also have great plug-in support from our community, like [this one](https://marketplace.visualstudio.com/items?itemName=yannick-beot-sp.vscode-sailpoint-identitynow)! |
| [IntelliJ](https://www.jetbrains.com/idea/) | If you happen to be writing in Java or developing Rules on our platform, we typically recommend IntelliJ. While Java development can be done in VS Code, you will have an easier time using an IDE that was purpose-built for Java. |
</TabItem>
<TabItem value="linux" label="Linux">
| IDE | Description |
|---|---|
| [VS Code](https://code.visualstudio.com/) | VS Code is a lightweight IDE that we believe is perfect for development on our IdentityNow platform. We also have great plug-in support from our community, like [this one](https://marketplace.visualstudio.com/items?itemName=yannick-beot-sp.vscode-sailpoint-identitynow)! |
| [IntelliJ](https://www.jetbrains.com/idea/) | If you happen to be writing in Java or developing Rules on our platform, we typically recommend IntelliJ. While Java development can be done in VS Code, you will have an easier time using an IDE that was purpose-built for Java. |
</TabItem>
</Tabs>
---
### CLI Environments
When interacting with our platform or writing code related to IdentityNow, we often use the CLI. While you can use any CLI that you feel is best fit for you and your job, here are the CLI environments we use and recommend:
<Tabs groupId="operating-systems">
<TabItem value="win" label="Windows">
| CLI Tool | Description |
|---|---|
| Windows PowerShell | Windows PowerShell is a modern terminal on windows (also available on Mac/Linux) that offers versatile CLI, task automation, and configuration management options. |
| [Windows Terminal](https://apps.microsoft.com/store/detail/windows-terminal/9N0DX20HK701?hl=en-us&gl=us) | The Windows Terminal is a modern, fast, efficient, powerful, and productive terminal application for users of command-line tools and shells like Command Prompt, PowerShell, and WSL. Its main features include multiple tabs, panes, Unicode and UTF-8 character support, a GPU accelerated text rendering engine, and custom themes, styles, and configurations. Terminal is just a more beautiful version of PowerShell 😁 |
</TabItem>
<TabItem value="mac" label="Mac">
| CLI Tool | Description |
|---|---|
| Mac Terminal (default) | On Mac, we recommend using the default terminal. |
</TabItem>
<TabItem value="linux" label="Linux">
| CLI Tool | Description |
|---|---|
| Linux Terminal (default) | On Linux, we recommend using the default terminal. |
</TabItem>
</Tabs>
---
### Version Control
Writing code typically requires version control to adequately track changes in sets of files. While you can use any version control that you feel is best fit for you and your job, here are the version control tools that we use and recommend:
| Version Control Tool | Description |
|---|---|
| [git](https://git-scm.com/) | Git is a free and open-source, distributed version control system designed to handle everything from small to very large projects. Git runs locally on your machine. |
| [GitHub](https://github.com) | GitHub is an internet hosting service for managing git in the cloud. We use GitHub on our team to collaborate amongst the other developers on our team, as well as with our community. |
---
### Programming Languages
| Language | Primary Platform Uses | Description |
|---|---|---|
| Typescript | <ul><li>SaaS Connectivity</li><li>Test</li></ul> | This is a description of why Typescript. |
---
### API Clients
API clients make it easy to call APIs without having to first write code. API clients are great for testing and getting familiar with APIs to get a better understanding of what the inputs/outputs are and how they work.
| API Client | Description |
|---|---|
| [Postman](https://www.postman.com/downloads/) | Postman is an API platform for building and using APIs. Postman simplifies each step of the API lifecycle and streamlines collaboration so you can create better APIs—faster. |
## Glossary
Identity is a complex topic and there are many terms used, and quite often! Please [refer to our glossary](https://documentation.sailpoint.com/saas/help/common/glossary.html) whenever possible if you aren't sure what something means.

View File

@@ -0,0 +1,192 @@
---
id: docs
title: IdentityNow
pagination_label: Introduction
sidebar_label: IdentityNow
sidebar_position: 1
sidebar_class_name: IdentityNow
hide_title: true
keywords:
[
"IdentityNow",
"development",
"developer",
"portal",
"getting started",
"docs",
"documentation",
]
description:
This is the intoduction documentation to development on the IdentityNow
platform.
slug: /docs
tags: ["Introduction", "Getting Started"]
---
import Tabs from "@theme/Tabs";
import TabItem from "@theme/TabItem";
🧭 There are many different ways in which you are able to extend the IdentityNow
platfrom beyond what comes out of the box. Please, explore our documentation and
see what is possible! This documentation assumes that you are a current customer
or partner and already have access to the IdentityNow application.
:::info
Looking to become a partner? If you are interested in becoming a partner, be it
an ISV or Channel/Implementation partner,
[click here](https://www.sailpoint.com/partners/become-partner/).
:::
## Before You Get Started
Please read this introduction carefully, as it contains recommendations and
need-to-know information pertaining to all features of the IdentityNow platform.
### Authentication
Many of the interactions you have through our various features will have you
interacting with our APIs either directly or indirectly. It would be valuable to
familiarize yourself with [Authentication](../../api/authentication.md) on our
platform.
### Understanding JSON
JSON (JavaScript Object Notation) is a lightweight data-interchange format. It
is easy for humans to read and write. It is easy for machines to parse and
generate. JSON is at the heart of every API and development feature that
SailPoint offers in IdentityNow—usually either inputs or outputs to/from a
system.
[Learn more about JSON here](https://www.w3schools.com/js/js_json_intro.asp).
### Understanding Webhooks
A webhook in web development is a method of augmenting or altering the behavior
of a web page or web application with custom callbacks. These callbacks may be
maintained, modified, and managed by third-party users and developers who may
not necessarily be affiliated with the originating website or application. Our
[Event Triggers](docs/identity-now/event-triggers) are a form of webhook, for
example.
[Learn more about webhooks here](https://zapier.com/blog/what-are-webhooks/).
## Recommended Technologies
While you can use whichever development tools you are most comfortable with or
find most useful, we will recommend tools here for those that are new to
development.
:::tip
Our team, when developing documentation, example code/applications, videos, etc.
will almost always use one of the tools/languages listed below.
:::
### IDEs (Integrated Development Environments)
IDEs are great for consolidating different aspects of programming into one tool.
They're great for not only writing code, but managing your code as well. While
you can use any IDE you feel is best fit for you and the task, here is what we
use:
<Tabs groupId="operating-systems">
<TabItem value="win" label="Windows">
| IDE | Description |
| ------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| [VS Code](https://code.visualstudio.com/) | VS Code is a lightweight IDE that we believe is perfect for development on our IdentityNow platform. We also have great plug-in support from our community, like [this one](https://marketplace.visualstudio.com/items?itemName=yannick-beot-sp.vscode-sailpoint-identitynow)! |
| [IntelliJ](https://www.jetbrains.com/idea/) | If you happen to be writing in Java or developing Rules on our platform, we typically recommend IntelliJ. While Java development can be done in VS Code, you will have an easier time using an IDE that was purpose-built for Java. |
</TabItem>
<TabItem value="mac" label="Mac">
| IDE | Description |
| ------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| [VS Code](https://code.visualstudio.com/) | VS Code is a lightweight IDE that we believe is perfect for development on our IdentityNow platform. We also have great plug-in support from our community, like [this one](https://marketplace.visualstudio.com/items?itemName=yannick-beot-sp.vscode-sailpoint-identitynow)! |
| [IntelliJ](https://www.jetbrains.com/idea/) | If you happen to be writing in Java or developing Rules on our platform, we typically recommend IntelliJ. While Java development can be done in VS Code, you will have an easier time using an IDE that was purpose-built for Java. |
</TabItem>
<TabItem value="linux" label="Linux">
| IDE | Description |
| ------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| [VS Code](https://code.visualstudio.com/) | VS Code is a lightweight IDE that we believe is perfect for development on our IdentityNow platform. We also have great plug-in support from our community, like [this one](https://marketplace.visualstudio.com/items?itemName=yannick-beot-sp.vscode-sailpoint-identitynow)! |
| [IntelliJ](https://www.jetbrains.com/idea/) | If you happen to be writing in Java or developing Rules on our platform, we typically recommend IntelliJ. While Java development can be done in VS Code, you will have an easier time using an IDE that was purpose-built for Java. |
</TabItem>
</Tabs>
---
### CLI Environments
When interacting with our platform or writing code related to IdentityNow, we
often use the CLI. While you can use any CLI that you feel is best fit for you
and your job, here are the CLI environments we use and recommend:
<Tabs groupId="operating-systems">
<TabItem value="win" label="Windows">
| CLI Tool | Description |
| -------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| Windows PowerShell | Windows PowerShell is a modern terminal on windows (also available on Mac/Linux) that offers versatile CLI, task automation, and configuration management options. |
| [Windows Terminal](https://apps.microsoft.com/store/detail/windows-terminal/9N0DX20HK701?hl=en-us&gl=us) | The Windows Terminal is a modern, fast, efficient, powerful, and productive terminal application for users of command-line tools and shells like Command Prompt, PowerShell, and WSL. Its main features include multiple tabs, panes, Unicode and UTF-8 character support, a GPU accelerated text rendering engine, and custom themes, styles, and configurations. Terminal is just a more beautiful version of PowerShell 😁 |
</TabItem>
<TabItem value="mac" label="Mac">
| CLI Tool | Description |
| ---------------------- | ------------------------------------------------ |
| Mac Terminal (default) | On Mac, we recommend using the default terminal. |
</TabItem>
<TabItem value="linux" label="Linux">
| CLI Tool | Description |
| ------------------------ | -------------------------------------------------- |
| Linux Terminal (default) | On Linux, we recommend using the default terminal. |
</TabItem>
</Tabs>
---
### Version Control
Writing code typically requires version control to adequately track changes in
sets of files. While you can use any version control that you feel is best fit
for you and your job, here are the version control tools that we use and
recommend:
| Version Control Tool | Description |
| ---------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| [git](https://git-scm.com/) | Git is a free and open-source, distributed version control system designed to handle everything from small to very large projects. Git runs locally on your machine. |
| [GitHub](https://github.com) | GitHub is an internet hosting service for managing git in the cloud. We use GitHub on our team to collaborate amongst the other developers on our team, as well as with our community. |
---
### Programming Languages
| Language | Primary Platform Uses | Description |
| ---------- | ------------------------------------------------ | ---------------------------------------- |
| Typescript | <ul><li>SaaS Connectivity</li><li>Test</li></ul> | This is a description of why Typescript. |
---
### API Clients
API clients make it easy to call APIs without having to first write code. API
clients are great for testing and getting familiar with APIs to get a better
understanding of what the inputs/outputs are and how they work.
| API Client | Description |
| --------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| [Postman](https://www.postman.com/downloads/) | Postman is an API platform for building and using APIs. Postman simplifies each step of the API lifecycle and streamlines collaboration so you can create better APIs—faster. |
## Glossary
Identity is a complex topic and there are many terms used, and quite often!
Please
[refer to our glossary](https://documentation.sailpoint.com/saas/help/common/glossary.html)
whenever possible if you aren't sure what something means.

View File

@@ -10,33 +10,47 @@ slug: /docs/saas-configuration
tags: ["SaaS Configuration"]
---
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
import Tabs from "@theme/Tabs";
import TabItem from "@theme/TabItem";
This is a guide about using the SailPoint SaaS Configuration APIs to import configurations into and export configurations from the SailPoint SaaS system. Use these APIs to get configurations in bulk in support of environmental promotion, go-live, or tenant-to-tenant configuration management processes and pipelines.
This is a guide about using the SailPoint SaaS Configuration APIs to import
configurations into and export configurations from the SailPoint SaaS system.
Use these APIs to get configurations in bulk in support of environmental
promotion, go-live, or tenant-to-tenant configuration management processes and
pipelines.
For more details around how to manage configurations, refer to [SailPoint SaaS Change Management and Deployment Best Practices](https://community.sailpoint.com/t5/IdentityNow-Articles/SailPoint-SaaS-Change-Management-and-Deployment-Best-Practices/ta-p/189871).
For more details around how to manage configurations, refer to
[SailPoint SaaS Change Management and Deployment Best Practices](https://community.sailpoint.com/t5/IdentityNow-Articles/SailPoint-SaaS-Change-Management-and-Deployment-Best-Practices/ta-p/189871).
## Audience
This document is intended for technically proficient administrators, implementers, integrators or even developers. No coding experience is necessary, but being able to understand JSON data structures and make REST API web-service calls is necessary to fully understand this guide.
This document is intended for technically proficient administrators,
implementers, integrators or even developers. No coding experience is necessary,
but being able to understand JSON data structures and make REST API web-service
calls is necessary to fully understand this guide.
## Supported Objects
| **Object** | **Object Type** | **Export** | **Import** |
| :-------------------------- | :--------------------- | :----------------------------------------------------------- | :----------------------------------------------------------- |
| **Object** | **Object Type** | **Export** | **Import** |
| :-------------------------- | :--------------------- | :------------------------------------------------------------------------------------------------------------ | :------------------------------------------------------------------------------------------------------------ |
| Event Trigger Subscriptions | `TRIGGER_SUBSCRIPTION` | ![:check_mark:](https://pf-emoji-service--cdn.us-east-1.prod.public.atl-paas.net/atlassian/check_mark_32.png) | ![:check_mark:](https://pf-emoji-service--cdn.us-east-1.prod.public.atl-paas.net/atlassian/check_mark_32.png) |
| Identity Profiles | `IDENTITY_PROFILE` | ![:check_mark:](https://pf-emoji-service--cdn.us-east-1.prod.public.atl-paas.net/atlassian/check_mark_32.png) | ![:check_mark:](https://pf-emoji-service--cdn.us-east-1.prod.public.atl-paas.net/atlassian/check_mark_32.png) |
| Rules | `RULE` | ![:check_mark:](https://pf-emoji-service--cdn.us-east-1.prod.public.atl-paas.net/atlassian/check_mark_32.png) | ![:check_mark:](https://pf-emoji-service--cdn.us-east-1.prod.public.atl-paas.net/atlassian/check_mark_32.png) |
| Sources | `SOURCE` | ![:check_mark:](https://pf-emoji-service--cdn.us-east-1.prod.public.atl-paas.net/atlassian/check_mark_32.png) | ![:check_mark:](https://pf-emoji-service--cdn.us-east-1.prod.public.atl-paas.net/atlassian/check_mark_32.png) |
| Transforms | `TRANSFORM` | ![:check_mark:](https://pf-emoji-service--cdn.us-east-1.prod.public.atl-paas.net/atlassian/check_mark_32.png) | ![:check_mark:](https://pf-emoji-service--cdn.us-east-1.prod.public.atl-paas.net/atlassian/check_mark_32.png) |
:::tip
The available supported objects are also available via REST API! See List Configuration Objects in the **API Reference** section of this document.
The available supported objects are also available via REST API! See List
Configuration Objects in the **API Reference** section of this document.
:::
**Rule Import and Export -** Rules can be exported from one tenant and imported into another. Cloud rules have already been reviewed and installed in other tenants, and connector rules do not require a rule review. During the import and export process, rules cannot be changed in the migration process because these are validated by the usage of `jwsHeader` and `jwsSignature` in the object.
**Rule Import and Export -** Rules can be exported from one tenant and imported
into another. Cloud rules have already been reviewed and installed in other
tenants, and connector rules do not require a rule review. During the import and
export process, rules cannot be changed in the migration process because these
are validated by the usage of `jwsHeader` and `jwsSignature` in the object.
## Exporting Configurations
@@ -49,12 +63,27 @@ The available supported objects are also available via REST API! See List Confi
![img](./img/sp-config-export.png)
1. **Start Export** - Start the export process by configuring a JSON payload for the export options. This payload will be sent to `POST /beta/sp-config/export`.
2. **Response with Export Status** - An export status will be given in response. This contains a `jobId` and a `status` to be used to subsequently monitor the process. Initially, this may have a status of `NOT_STARTED`.
3. **Get Export Status** - Using the `jobId` from the previous status, call `GET /beta/sp-config/export/{id}` where the `{id}` is the `jobId`.
4. **Response with Export Status** - An export status will be given in response. This contains a `jobId` and a `status` to be used to subsequently monitor the process. After a period of time, the process `status` should move to either `COMPLETE` or `FAILED`. Depending on the amount of objects being exported, this could take awhile. It may be ncessary to iterate over steps 3 and 4 until the status reflects a completion. If it takes too long, the export process may expire.
5. **Get Export Results** - Once the status is `COMPLETE`, download the export results by calling `GET /beta/sp-config/export/{id}/download` where the `{id}` is the `jobId`.
6. **Response with Export Results** - In response, the export process will produce a set of JSON objects you can download as an export result set. These will reflect the objects that were selected in the export options earlier.
1. **Start Export** - Start the export process by configuring a JSON payload for
the export options. This payload will be sent to
`POST /beta/sp-config/export`.
2. **Response with Export Status** - An export status will be given in response.
This contains a `jobId` and a `status` to be used to subsequently monitor the
process. Initially, this may have a status of `NOT_STARTED`.
3. **Get Export Status** - Using the `jobId` from the previous status, call
`GET /beta/sp-config/export/{id}` where the `{id}` is the `jobId`.
4. **Response with Export Status** - An export status will be given in response.
This contains a `jobId` and a `status` to be used to subsequently monitor the
process. After a period of time, the process `status` should move to either
`COMPLETE` or `FAILED`. Depending on the amount of objects being exported,
this could take awhile. It may be ncessary to iterate over steps 3 and 4
until the status reflects a completion. If it takes too long, the export
process may expire.
5. **Get Export Results** - Once the status is `COMPLETE`, download the export
results by calling `GET /beta/sp-config/export/{id}/download` where the
`{id}` is the `jobId`.
6. **Response with Export Results** - In response, the export process will
produce a set of JSON objects you can download as an export result set. These
will reflect the objects that were selected in the export options earlier.
## Importing Configurations
@@ -67,12 +96,27 @@ The available supported objects are also available via REST API! See List Confi
![img](./img/sp-config-import.png)
1. **Start Import** - Start the import process by configuring a JSON payload for the import options. This will then be sent to `POST /beta/sp-config/import`.
2. **Response with Import Status** - An import status will be given in response. This contains a `jobId` and a `status` to be used to subsequently monitor the process. Initially this might have a status of `NOT_STARTED`.
3. **Get Import Status** - Using the `jobId` from the previous status, call `GET /beta/sp-config/import/{id}` where the `{id}` is the `jobId`.
4. **Response with Import Status** - An import status will be given in response. This contains a `jobId` and a `status` to be used to subsequently monitor the process. After a period of time, the process `status` will move to either `COMPLETE` or `FAILED`. Depending on the amount of objects being imported, this could take awhile. It may be necessary to iterate over steps 3 and 4 until the status reflects a completion. If it takes too long, the import process may expire.
5. **Get Import Results** - Once the status is `COMPLETE`, download the import results by calling `GET /beta/sp-config/import/{id}/download` where the `{id}` is the `jobId`.
6. **Response with Import Results** - In response, the import process should produce listing of object that successfully imported, as well as any errors, warnings, or information about the import process. This result set will reflect the objects that were selected to be imported earlier.
1. **Start Import** - Start the import process by configuring a JSON payload for
the import options. This will then be sent to `POST /beta/sp-config/import`.
2. **Response with Import Status** - An import status will be given in response.
This contains a `jobId` and a `status` to be used to subsequently monitor the
process. Initially this might have a status of `NOT_STARTED`.
3. **Get Import Status** - Using the `jobId` from the previous status, call
`GET /beta/sp-config/import/{id}` where the `{id}` is the `jobId`.
4. **Response with Import Status** - An import status will be given in response.
This contains a `jobId` and a `status` to be used to subsequently monitor the
process. After a period of time, the process `status` will move to either
`COMPLETE` or `FAILED`. Depending on the amount of objects being imported,
this could take awhile. It may be necessary to iterate over steps 3 and 4
until the status reflects a completion. If it takes too long, the import
process may expire.
5. **Get Import Results** - Once the status is `COMPLETE`, download the import
results by calling `GET /beta/sp-config/import/{id}/download` where the
`{id}` is the `jobId`.
6. **Response with Import Results** - In response, the import process should
produce listing of object that successfully imported, as well as any errors,
warnings, or information about the import process. This result set will
reflect the objects that were selected to be imported earlier.
## API Reference Guide
@@ -99,6 +143,7 @@ Lists all available objects that can be imported and exported into the system.
GET /beta/sp-config/config-objects
Authorization: Bearer {token}
```
</TabItem>
<TabItem value="response" label="Response">
@@ -185,6 +230,7 @@ Content-Type: application/json
}
]
```
</TabItem>
</Tabs>
@@ -240,6 +286,7 @@ Content-Type: application/json
}
}
```
</TabItem>
<TabItem value="response" label="Response">
@@ -259,6 +306,7 @@ Content-Type: application/json
"completed": null
}
```
</TabItem>
</Tabs>
@@ -275,6 +323,7 @@ Gets the status of an export process.
GET /beta/sp-config/export/{id}
Authorization: Bearer {token}
```
</TabItem>
<TabItem value="response" label="Response">
@@ -294,6 +343,7 @@ Content-Type: application/json
"completed": "2021-08-27T15:55:37.583Z"
}
```
</TabItem>
</Tabs>
@@ -310,6 +360,7 @@ Gets the results of an export process.
GET /beta/sp-config/export/{id}/download
Authorization: Bearer {token}
```
</TabItem>
<TabItem value="response" label="Response">
@@ -452,6 +503,7 @@ Content-Type: application/json
]
}
```
</TabItem>
</Tabs>
@@ -471,6 +523,7 @@ Content-Type: multipart/form-data
data: (File) data.json
```
</TabItem>
<TabItem value="response" label="Response">
@@ -490,13 +543,19 @@ Content-Type: application/json
"completed": null
}
```
</TabItem>
</Tabs>
:::tip
Import also has a “preview” option you can use to see what an import will look like without actually having to import and change your tenant. Any errors discovered during reference or resource resolution will be provided. To use this, simply set query option `preview` to `true`.
Import also has a “preview” option you can use to see what an import will look
like without actually having to import and change your tenant. Any errors
discovered during reference or resource resolution will be provided. To use
this, simply set query option `preview` to `true`.
Example: POST /beta/sp-config/import?preview=true
:::
### Import Status
@@ -512,6 +571,7 @@ Gets the status of an import process.
GET /beta/sp-config/import/{id}
Authorization: Bearer {token}
```
</TabItem>
<TabItem value="response" label="Response">
@@ -531,6 +591,7 @@ Content-Type: application/json
"completed": "2021-06-04T02:59:57.563Z"
}
```
</TabItem>
</Tabs>
@@ -547,6 +608,7 @@ Gets the results of an import process.
GET /beta/sp-config/import/{id}/download
Authorization: Bearer {token}
```
</TabItem>
<TabItem value="response" label="Response">
@@ -582,5 +644,6 @@ Content-Type: application/json
}
}
```
</TabItem>
</Tabs>

View File

@@ -0,0 +1,310 @@
{
"info": {
"_postman_id": "c8ba3692-1d77-410e-b908-7eded1822fa7",
"name": "SaaS Connectivity",
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json",
"_exporter_id": "19269819"
},
"item": [
{
"name": "Test local stdTestConnection",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"type\": \"std:test-connection\",\r\n \"input\": {},\r\n \"config\": {\r\n \"token\": \"apikey\"\r\n }\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:3000",
"protocol": "http",
"host": [
"localhost"
],
"port": "3000"
}
},
"response": []
},
{
"name": "Test local stdAccountList",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"type\": \"std:account:list\",\r\n \"input\": {},\r\n \"config\": {\r\n \"token\": \"apikey\"\r\n }\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:3000",
"protocol": "http",
"host": [
"localhost"
],
"port": "3000"
}
},
"response": []
},
{
"name": "Test local stdAccountRead",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"type\": \"std:account:read\",\r\n \"input\": {\r\n \"key\": {\"simple\": { \"id\": \"john.doe\"}},\r\n \"identity\": \"john.doe\"\r\n },\r\n \"config\": {\r\n \"token\": \"apikey\"\r\n }\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:3000",
"protocol": "http",
"host": [
"localhost"
],
"port": "3000"
}
},
"response": []
},
{
"name": "Test local stdAccountCreate",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"type\": \"std:account:create\",\r\n \"input\": {\r\n \"attributes\": {\r\n \"id\": \"suzie.baker\",\r\n \"email\": \"suzie.baker@domain.com\",\r\n \"department\": \"external\",\r\n \"displayName\": \"Suzie Baker\",\r\n \"password\": \"test\",\r\n \"entitlements\": [\r\n \"user\",\r\n \"administrator\"\r\n ]\r\n }\r\n },\r\n \"config\": {\r\n \"token\": \"apikey\"\r\n }\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:3000",
"protocol": "http",
"host": [
"localhost"
],
"port": "3000"
}
},
"response": []
},
{
"name": "Test local stdAccountUpdate",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"type\": \"std:account:update\",\r\n \"input\": {\r\n \"key\": {\"simple\": { \"id\": \"john.doe\"}},\r\n \"identity\": \"john.doe\",\r\n \"changes\": [\r\n {\r\n \"op\": \"Remove\",\r\n \"attribute\": \"entitlements\",\r\n \"value\": \"user\"\r\n }\r\n ]\r\n },\r\n \"config\": {\r\n \"token\": \"apikey\"\r\n }\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:3000",
"protocol": "http",
"host": [
"localhost"
],
"port": "3000"
}
},
"response": []
},
{
"name": "Test local stdAccountDelete",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"type\": \"std:account:delete\",\r\n \"input\": {\r\n \"key\": {\"simple\": { \"id\": \"john.doe\"}},\r\n \"identity\": \"john.doe\"\r\n },\r\n \"config\": {\r\n \"token\": \"apikey\"\r\n }\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:3000",
"protocol": "http",
"host": [
"localhost"
],
"port": "3000"
}
},
"response": []
},
{
"name": "Test local stdEntitlementList",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"type\": \"std:entitlement:list\",\r\n \"input\": {\r\n \"type\": \"group\"\r\n },\r\n \"config\": {\r\n \"token\": \"apikey\"\r\n }\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:3000",
"protocol": "http",
"host": [
"localhost"
],
"port": "3000"
}
},
"response": []
},
{
"name": "Test local stdEntitlementRead",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"type\": \"std:entitlement:read\",\r\n \"input\": {\r\n \"key\": {\"simple\": { \"id\": \"john.doe\"}},\r\n \"identity\": \"john.doe\",\r\n \"type\": \"group\"\r\n },\r\n \"config\": {\r\n \"token\": \"apikey\"\r\n }\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:3000",
"protocol": "http",
"host": [
"localhost"
],
"port": "3000"
}
},
"response": []
},
{
"name": "Test local stdAccountDisable",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"type\": \"std:account:disable\",\r\n \"input\": {\r\n \"key\": {\"simple\": { \"id\": \"john.doe\"}},\r\n \"identity\": \"john.doe\"\r\n },\r\n \"config\": {\r\n \"token\": \"apikey\"\r\n }\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:3000",
"protocol": "http",
"host": [
"localhost"
],
"port": "3000"
}
},
"response": []
},
{
"name": "Test local stdAccountEnable",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"type\": \"std:account:enable\",\r\n \"input\": {\r\n \"key\": {\"simple\": { \"id\": \"john.doe\"}},\r\n \"identity\": \"john.doe\"\r\n },\r\n \"config\": {\r\n \"token\": \"apikey\"\r\n }\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:3000",
"protocol": "http",
"host": [
"localhost"
],
"port": "3000"
}
},
"response": []
},
{
"name": "Test local stdAccountUnlock",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"type\": \"std:account:unlock\",\r\n \"input\": {\r\n \"key\": {\"simple\": { \"id\": \"john.doe\"}},\r\n \"identity\": \"john.doe\"\r\n },\r\n \"config\": {\r\n \"token\": \"apikey\"\r\n }\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:3000",
"protocol": "http",
"host": [
"localhost"
],
"port": "3000"
}
},
"response": []
},
{
"name": "Test local stdAccountDiscoverSchema",
"request": {
"method": "POST",
"header": [],
"body": {
"mode": "raw",
"raw": "{\r\n \"type\": \"std:account:discover-schema\",\r\n \"input\": {},\r\n \"config\": {\r\n \"token\": \"apikey\"\r\n }\r\n}",
"options": {
"raw": {
"language": "json"
}
}
},
"url": {
"raw": "http://localhost:3000",
"protocol": "http",
"host": [
"localhost"
],
"port": "3000"
}
},
"response": []
}
]
}

View File

@@ -6,7 +6,8 @@ sidebar_label: Common CLI Commands
sidebar_position: 3
sidebar_class_name: commonCliCommands
keywords: ["connectivity", "connectors", "commands", "cli"]
description: These are the CLI commands most commonly used when building SaaS Connectors.
description:
These are the CLI commands most commonly used when building SaaS Connectors.
slug: /docs/saas-connectivity/common-cli-commands
tags: ["Connectivity"]
---
@@ -14,19 +15,24 @@ tags: ["Connectivity"]
Below is a list of commands and their usages:
- **Development**
- Create a project on your local system: ```sp conn init "my-project"```
- Test your connector locally: ```npm run dev```
- Create a project on your local system: `sp conn init "my-project"`
- Test your connector locally: `npm run dev`
- **Deployment**
- Create an empty connector in your IDN Org (used to get id so you can upload): ```sp conn create "my-project"```
- Build a project: ```npm run pack-zip```
- Upload your connector to your IDN Org: ```sp conn upload -c [connectorID | connectorAlias] -f dist/[connector filename].zip```
- Create an empty connector in your IDN Org (used to get id so you can
upload): `sp conn create "my-project"`
- Build a project: `npm run pack-zip`
- Upload your connector to your IDN Org:
`sp conn upload -c [connectorID | connectorAlias] -f dist/[connector filename].zip`
- **Exploring**
- List connectors in your IDN Org: ```sp conn list```
- List your connector tags: ```sp conn tags list -c [connectorID | connectorAlias]```
- List connectors in your IDN Org: `sp conn list`
- List your connector tags:
`sp conn tags list -c [connectorID | connectorAlias]`
- **Testing and Debugging**
- Test your connector on the IDN Org: ```sp connectors invoke [action] -c [connectorID | connectorAlias] -p config.json```
- Get a list of actions: ```sp conn invoke -h```
- Run read-only integration tests against your connector: ```sp conn validate -p config.json -c [connectorID | connectorAlias] -r```
- Tail IDN Org connector logs: ```sp conn logs tail```
- Test your connector on the IDN Org:
`sp connectors invoke [action] -c [connectorID | connectorAlias] -p config.json`
- Get a list of actions: `sp conn invoke -h`
- Run read-only integration tests against your connector:
`sp conn validate -p config.json -c [connectorID | connectorAlias] -r`
- Tail IDN Org connector logs: `sp conn logs tail`
- **Delete**
- Delete a connector: ```sp conn delete -c [connectorID | connectorAlias]```
- Delete a connector: `sp conn delete -c [connectorID | connectorAlias]`

View File

@@ -9,10 +9,10 @@ slug: /docs/saas-connectivity/commands/account-create
tags: ["Connectivity", "Connector Command"]
---
| Input/Output | Data Type |
|:-------------|:---------------------------:|
| Input | StdAccountCreateInput |
| Output | StdAccountCreateOutput |
| Input/Output | Data Type |
| :----------- | :--------------------: |
| Input | StdAccountCreateInput |
| Output | StdAccountCreateOutput |
### Example StdAccountCreateInput
@@ -57,17 +57,32 @@ tags: ["Connectivity", "Connector Command"]
## Description
The account create command triggers whenever IDN is told to provision entitlements for an identity on the target source, but no account for the identity on the target source exists yet. For example, if you create an access profile that grants a group on the target source and then add that access profile to a role, any identity matching that roles membership criteria will be granted to the group. IDN determines which identities do not have accounts on the target source and triggers the account create command for each identity. If an identity already has an account, then it invokes the account update command.
The account create command triggers whenever IDN is told to provision
entitlements for an identity on the target source, but no account for the
identity on the target source exists yet. For example, if you create an access
profile that grants a group on the target source and then add that access
profile to a role, any identity matching that roles membership criteria will be
granted to the group. IDN determines which identities do not have accounts on
the target source and triggers the account create command for each identity. If
an identity already has an account, then it invokes the account update command.
## The Provisioning Plan
The account create command accepts a provisioning plan from IDN and creates the corresponding account(s) in the target source. When you configure your source in IDN, you must set up Create Profile to tell IDN how to provision new accounts for your source.
The account create command accepts a provisioning plan from IDN and creates the
corresponding account(s) in the target source. When you configure your source in
IDN, you must set up Create Profile to tell IDN how to provision new accounts
for your source.
You can create the provisioning plan through the ```accountCreateTemplate``` in the ```connector-spec.json``` file, and you can also modify its behavior in IDN using the create profile screen:
You can create the provisioning plan through the `accountCreateTemplate` in the
`connector-spec.json` file, and you can also modify its behavior in IDN using
the create profile screen:
![Account Create](./img/account_create_idn.png)
Create Profile provides the instructions for the provisioning plan and determines which attributes are sent to your connector code. For example, if all the account attributes in the preceding image are configured for a value, then the following JSON payload is sent to your connector:
Create Profile provides the instructions for the provisioning plan and
determines which attributes are sent to your connector code. For example, if all
the account attributes in the preceding image are configured for a value, then
the following JSON payload is sent to your connector:
```javascript
{
@@ -90,7 +105,9 @@ You can create the provisioning plan through the ```accountCreateTemplate``` in
}
```
The provisioning plan does not include any disabled attributes. In the earlier image, ```password``` is disabled, so the payload to your connector does not not include a field for ```password```:
The provisioning plan does not include any disabled attributes. In the earlier
image, `password` is disabled, so the payload to your connector does not not
include a field for `password`:
```javascript
{
@@ -112,9 +129,10 @@ The provisioning plan does not include any disabled attributes. In the earlier i
}
```
The provisioning plan presents multi-valued entitlements in two different ways:
The provisioning plan presents multi-valued entitlements in two different ways:
If a multi-valued entitlement, like groups, has only one value, then the provisioning plan represents it as a string value:
If a multi-valued entitlement, like groups, has only one value, then the
provisioning plan represents it as a string value:
```javascript
{
@@ -133,7 +151,8 @@ If a multi-valued entitlement, like groups, has only one value, then the provisi
}
```
If a multi-valued entitlement has more than one value, then the plan represents it as an array:
If a multi-valued entitlement has more than one value, then the plan represents
it as an array:
```javascript
{
@@ -155,7 +174,10 @@ If a multi-valued entitlement has more than one value, then the plan represents
}
```
Your connector code must handle the possibility of both cases. The following code example from [AirtableAccount.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/models/AirtableAccount.ts) shows how to handle a multi-valued attribute:
Your connector code must handle the possibility of both cases. The following
code example from
[AirtableAccount.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/models/AirtableAccount.ts)
shows how to handle a multi-valued attribute:
```javascript
public static createWithStdAccountCreateInput(record: StdAccountCreateInput): AirtableAccount {
@@ -177,27 +199,53 @@ public static createWithStdAccountCreateInput(record: StdAccountCreateInput): Ai
## The return object
When the account is returned to IDN, any values you set are updated in IDN. So if an account ID is auto-generated on the source system, you must send the account ID back to IDN so IDN is aware of it for future account update activities. This is useful for the compound key type.
When the account is returned to IDN, any values you set are updated in IDN. So
if an account ID is auto-generated on the source system, you must send the
account ID back to IDN so IDN is aware of it for future account update
activities. This is useful for the compound key type.
## Password Handling
There are three main ways to handle passwords on a source:
1. SSO, LDAP, or other federated authentication mechanisms are the preferred means of providing user login on a target source. If your source can integrate with a federated login service, use that service. If your source requires you to provide a password when you create accounts, even with a federated login, it is best to create a strong, random password. Your users will use the federated login, so they never need to know this password.
1. SSO, LDAP, or other federated authentication mechanisms are the preferred
means of providing user login on a target source. If your source can
integrate with a federated login service, use that service. If your source
requires you to provide a password when you create accounts, even with a
federated login, it is best to create a strong, random password. Your users
will use the federated login, so they never need to know this password.
2. If your source has a password reset feature at login, it is best to initially create the account with a strong, random password the user does not have access to. Once the account is created, make the user request a password reset to set their own password. This method is the safest alternative to federated authentication because the initial password is strong and never known to anyone, and the user can generate his or her own password through secure channels.
2. If your source has a password reset feature at login, it is best to initially
create the account with a strong, random password the user does not have
access to. Once the account is created, make the user request a password
reset to set their own password. This method is the safest alternative to
federated authentication because the initial password is strong and never
known to anyone, and the user can generate his or her own password through
secure channels.
3. The least secure method is setting a static password in the create profile that is well known among your users. This approach is not recommended. It does not require any automated communications with your users.
3. The least secure method is setting a static password in the create profile
that is well known among your users. This approach is not recommended. It
does not require any automated communications with your users.
There are two ways you can generate random passwords:
1. Use the “Create Password” generator in Create Profile. (This can also be configured in the ```accountCreateTemplate```)
1. Use the “Create Password” generator in Create Profile. (This can also be
configured in the `accountCreateTemplate`)
![Create Password](./img/create_password_idn.png)
2. Disable the 'password' field.
Use Create Profile and generate a random password in code. There are some JavaScript libraries that can generate random strings suitable for passwords, like [random-string](https://www.npmjs.com/package/random-string) and [crypto-random-string](https://www.npmjs.com/package/crypto-random-string). Import either one of these libraries into your code to use them. The following example from [airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts) uses a ternary operator to ensure the password is always provided. If the provisioning plan provides a password, use that value. If the provisioning plan does not provide a password, generate a random one.
Use Create Profile and generate a random password in code. There are some
JavaScript libraries that can generate random strings suitable for passwords,
like [random-string](https://www.npmjs.com/package/random-string) and
[crypto-random-string](https://www.npmjs.com/package/crypto-random-string).
Import either one of these libraries into your code to use them. The following
example from
[airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts)
uses a ternary operator to ensure the password is always provided. If the
provisioning plan provides a password, use that value. If the provisioning plan
does not provide a password, generate a random one.
```javascript
async createAccount(input: StdAccountCreateInput): Promise<AirtableAccount> {
@@ -213,7 +261,7 @@ async createAccount(input: StdAccountCreateInput): Promise<AirtableAccount> {
"lastName": account.lastName,
"locked": account.locked ? 'true' : 'false',
"password": account.password ? account.password : crypto.randomBytes(20).toString('hex'),
"entitlements": account.entitlments.join(',')
"entitlements": account.entitlments.join(',')
}).then(record => {
const airtableRecord = AirtableAccount.createWithRecords(record)
return airtableRecord
@@ -226,7 +274,10 @@ async createAccount(input: StdAccountCreateInput): Promise<AirtableAccount> {
## Testing in IdentityNow
One way to test whether the account create code works in IDN is to set up an access profile and role that grants members an entitlement from the connectors target source. Start by creating an access profile that grants one or more entitlements from the target source.
One way to test whether the account create code works in IDN is to set up an
access profile and role that grants members an entitlement from the connectors
target source. Start by creating an access profile that grants one or more
entitlements from the target source.
![Testing 1](./img/testing1.png)
@@ -234,8 +285,11 @@ Next, create a role that uses the access profile created in the previous step.
![Testing 2](./img/testing2.png)
Modify the role membership to use Identity List and select one or more users that do not have accounts in the target source yet.
Modify the role membership to use Identity List and select one or more users
that do not have accounts in the target source yet.
![Testing 3](./img/testing3.png)
Click the Update button in the upper right corner to initiate the account provisioning process. Doing so creates the account(s) on the target source once the process is complete.
Click the Update button in the upper right corner to initiate the account
provisioning process. Doing so creates the account(s) on the target source once
the process is complete.

View File

@@ -9,10 +9,10 @@ slug: /docs/saas-connectivity/commands/account-delete
tags: ["Connectivity", "Connector Command"]
---
| Input/Output | Data Type |
|:-------------|:---------------------------:|
| Input | StdAccountDeleteInput |
| Output | StdAccountDeleteOutput |
| Input/Output | Data Type |
| :----------- | :--------------------: |
| Input | StdAccountDeleteInput |
| Output | StdAccountDeleteOutput |
### Example StdAccountDeleteInput
@@ -29,14 +29,22 @@ tags: ["Connectivity", "Connector Command"]
### Example StdAccountDeleteOutput
```javascript
{}
{
}
```
## Description
The account delete command sends one attribute from IDN, the identity to delete. This can be passed to your connector to delete the account from the source system.
The account delete command sends one attribute from IDN, the identity to delete.
This can be passed to your connector to delete the account from the source
system.
Enable account delete in IDN through a BeforeProvisioning rule. The connector honors whichever operation the provisioning plan sends. For more information, see the [documentation](https://community.sailpoint.com/t5/IdentityNow-Articles/IdentityNow-Rule-Guide/ta-p/76665) and an [example implementation](https://community.sailpoint.com/t5/IdentityNow-Wiki/IdentityNow-Rule-Guide-Before-Provisioning-Rule/ta-p/77415).
Enable account delete in IDN through a BeforeProvisioning rule. The connector
honors whichever operation the provisioning plan sends. For more information,
see the
[documentation](https://community.sailpoint.com/t5/IdentityNow-Articles/IdentityNow-Rule-Guide/ta-p/76665)
and an
[example implementation](https://community.sailpoint.com/t5/IdentityNow-Wiki/IdentityNow-Rule-Guide-Before-Provisioning-Rule/ta-p/77415).
The following snippet shows an example of account delete command implementation:

View File

@@ -9,10 +9,10 @@ slug: /docs/saas-connectivity/commands/account-discover
tags: ["Connectivity", "Connector Command"]
---
| Input/Output | Data Type |
|:-------------|:----------------------------:|
| Input | undefined |
| Output | StdTestConnectionOutput |
| Input/Output | Data Type |
| :----------- | :---------------------: |
| Input | undefined |
| Output | StdTestConnectionOutput |
### Example StdTestConnectionOutput
@@ -51,11 +51,27 @@ tags: ["Connectivity", "Connector Command"]
## Description
The account discover schema command tells IDN to dynamically create the account schema for the source rather than use the account schema provided by the connector in connector-spec.json. It is often ideal to statically define the account schema because it is generally more performant and easier to develop and reason about the code. However, some sources have schemas that can be different for each customer deployment. It can also be difficult to determine which account attributes to statically expose, which requires the schema to be dynamically generated. SalesForce is an example of a source that can have thousands of account attributes, which makes it impractical to statically define a set of attributes that satisfies all connector users. Although the SalesForce connector defines a standard set of account attributes out of the box, it also allows schema discovery for users looking for more attributes.
The account discover schema command tells IDN to dynamically create the account
schema for the source rather than use the account schema provided by the
connector in connector-spec.json. It is often ideal to statically define the
account schema because it is generally more performant and easier to develop and
reason about the code. However, some sources have schemas that can be different
for each customer deployment. It can also be difficult to determine which
account attributes to statically expose, which requires the schema to be
dynamically generated. SalesForce is an example of a source that can have
thousands of account attributes, which makes it impractical to statically define
a set of attributes that satisfies all connector users. Although the SalesForce
connector defines a standard set of account attributes out of the box, it also
allows schema discovery for users looking for more attributes.
## Implementation
If your connector requires dynamic schema discovery, you must add std:account:discover-schema to the list of commands in connector-spec.json. Because the account schema is dynamic, you do not need to specify an accountSchema or an accountCreateTemplate object in connector-spec.json. Your connector-spec.json file will look similar to this example from the [Airtable connector](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/connector-spec.json).
If your connector requires dynamic schema discovery, you must add
std:account:discover-schema to the list of commands in connector-spec.json.
Because the account schema is dynamic, you do not need to specify an
accountSchema or an accountCreateTemplate object in connector-spec.json. Your
connector-spec.json file will look similar to this example from the
[Airtable connector](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/connector-spec.json).
```javascript
{
@@ -204,7 +220,10 @@ If your connector requires dynamic schema discovery, you must add std:account:di
## Programmatically build an account schema
There are many ways to programmatically build the account schema for a source. This section will cover one such method. To start, register your command in the main connector file, [index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts).
There are many ways to programmatically build the account schema for a source.
This section will cover one such method. To start, register your command in the
main connector file,
[index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts).
```javascript
export const connector = async () => {
@@ -229,7 +248,13 @@ export const connector = async () => {
}
```
Next, implement the ```discoverSchema()``` function in your client code. The following function calls the necessary endpoints to get the full schema of the user account you want to represent in IDN. After you receive a response from your call, you must build your account schema object that will return to IDN. The response has a structure like the accountSchema property in the connector-spec.json file. The following is an example from [airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts).
Next, implement the `discoverSchema()` function in your client code. The
following function calls the necessary endpoints to get the full schema of the
user account you want to represent in IDN. After you receive a response from
your call, you must build your account schema object that will return to IDN.
The response has a structure like the accountSchema property in the
connector-spec.json file. The following is an example from
[airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts).
```javascript
async getAccountSchema(): Promise<StdAccountDiscoverSchemaOutput> {
@@ -341,15 +366,22 @@ This code produces the following payload that will be sent back to IDN.
}
```
There are many properties in this payload, so you may want to remove some, but it can be hard to determine which properties to keep in a dynamic way. If you can programmatically determine which properties to remove, you can alter the ```discoverSchema()``` function to remove them.
There are many properties in this payload, so you may want to remove some, but
it can be hard to determine which properties to keep in a dynamic way. If you
can programmatically determine which properties to remove, you can alter the
`discoverSchema()` function to remove them.
## Test in IdentityNow
To test the account discover schema command in IDN, ensure that you upload your latest connector code and create a new source in IDN. After you configure and test your source connection, go to the Account Schema page. You will see an empty schema.
To test the account discover schema command in IDN, ensure that you upload your
latest connector code and create a new source in IDN. After you configure and
test your source connection, go to the Account Schema page. You will see an
empty schema.
![Discover Schema 1](./img/discover_schema_idn1.png)
To discover the schema for this source, click the Options dropdown in the upper right and select Discover Schema.
To discover the schema for this source, click the Options dropdown in the
upper right and select Discover Schema.
![Discover Schema 2](./img/discover_schema_idn2.png)
@@ -357,6 +389,8 @@ IDN then asks you to assign attributes to Account ID and 'Account Name.'
![Discover Schema 3](./img/discover_schema_idn3.png)
Save the schema. You now have a populated account schema. A user of this source must provide further details, like descriptions and identifying which attributes are entitlements.
Save the schema. You now have a populated account schema. A user of this source
must provide further details, like descriptions and identifying which attributes
are entitlements.
![Discover Schema 4](./img/discover_schema_idn4.png)

View File

@@ -9,12 +9,12 @@ slug: /docs/saas-connectivity/commands/account-enable
tags: ["Connectivity", "Connector Command"]
---
| Input/Output | Data Type |
|:-------------|:------------------------------------:|
| Input - Enable | StdAccountEnableInput |
| Output - Enable | StdAccountEnableOutput |
| Input - Disable | StdAccountDisableInput |
| Output -Disable | StdAccountDisableOutput |
| Input/Output | Data Type |
| :-------------- | :---------------------: |
| Input - Enable | StdAccountEnableInput |
| Output - Enable | StdAccountEnableOutput |
| Input - Disable | StdAccountDisableInput |
| Output -Disable | StdAccountDisableOutput |
### Example StdAccountEnableInput/StdAccountDisableInput
@@ -50,13 +50,29 @@ tags: ["Connectivity", "Connector Command"]
```
## Description
You typically invoke the account enable and account disable commands during the joiner, mover, leaver (JML) lifecycle. An identitys leaving from the organization or change to a role that does not require access to one or more accounts triggers the account disable command. An identitys rejoining the organization or move to a role that grants access to a previously disabled account triggers the account enable command.
Disabling accounts is generally preferred if the source supports account disabling so the account data remains for later reactivation or inspection. If the source does not support account disabling or deleting accounts is preferred when an identity leaves the organization, the connector performs the necessary steps to delete an account with the account disable function.
You typically invoke the account enable and account disable commands during the
joiner, mover, leaver (JML) lifecycle. An identitys leaving from the
organization or change to a role that does not require access to one or more
accounts triggers the account disable command. An identitys rejoining the
organization or move to a role that grants access to a previously disabled
account triggers the account enable command.
>🚧 It is important to note that although SaaS Connectivity supports the account delete command, IDN never sends the account delete command, only the account enable command. The connectors developer determines the appropriate action for account disable on the source.
Disabling accounts is generally preferred if the source supports account
disabling so the account data remains for later reactivation or inspection. If
the source does not support account disabling or deleting accounts is preferred
when an identity leaves the organization, the connector performs the necessary
steps to delete an account with the account disable function.
Account enable/disable is similar to implementing the account update command. If you have implemented your source call to modify any of the values on your source, then you can use the same method to implement the command. The following code implements enable and disable:
> 🚧 It is important to note that although SaaS Connectivity supports the
> account delete command, IDN never sends the account delete command, only the
> account enable command. The connectors developer determines the appropriate
> action for account disable on the source.
Account enable/disable is similar to implementing the account update command. If
you have implemented your source call to modify any of the values on your
source, then you can use the same method to implement the command. The following
code implements enable and disable:
```javascript
.stdAccountDisable(async (context: Context, input: StdAccountDisableInput, res: Response<StdAccountDisableOutput>) => {
@@ -80,4 +96,4 @@ Account enable/disable is similar to implementing the account update command. If
account = await airtable.changeAccount(account, change)
res.send(account.toStdAccountEnableOutput())
})
```
```

View File

@@ -9,10 +9,10 @@ slug: /docs/saas-connectivity/commands/account-list
tags: ["Connectivity", "Connector Command"]
---
| Input/Output | Data Type |
|:-------------|:-------------------------:|
| Input | undefined |
| Output | StdAccountListOutput |
| Input/Output | Data Type |
| :----------- | :------------------: |
| Input | undefined |
| Output | StdAccountListOutput |
### Example StdAccountListOutput
@@ -39,13 +39,24 @@ tags: ["Connectivity", "Connector Command"]
## Description
The account list command aggregates all accounts from the target source into IdentityNow. IDN calls this command during a manual or scheduled account aggregation.
The account list command aggregates all accounts from the target source into
IdentityNow. IDN calls this command during a manual or scheduled account
aggregation.
![Account List](./img/account_list_idn.png)
## Implementation
For you to be able to implement this endpoint, the web service must expose an API for listing user accounts and entitlements (i.e. roles or groups). Sometimes, a target sources API has a single endpoint providing all the attributes and entitlements a source account contains. However, some APIs may break these attributes and entitlements into separate API endpoints, requiring you to make multiple calls to gather all an account's necessary data. The following code from [airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts) shows the necessary steps to create a complete account from the various endpoints the API offers:
For you to be able to implement this endpoint, the web service must expose an
API for listing user accounts and entitlements (i.e. roles or groups).
Sometimes, a target sources API has a single endpoint providing all the
attributes and entitlements a source account contains. However, some APIs may
break these attributes and entitlements into separate API endpoints, requiring
you to make multiple calls to gather all an account's necessary data. The
following code from
[airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts)
shows the necessary steps to create a complete account from the various
endpoints the API offers:
```javascript
async getAllAccounts(): Promise<AirtableAccount[]> {
@@ -63,7 +74,9 @@ async getAllAccounts(): Promise<AirtableAccount[]> {
}
```
The following code snippet from [index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts) shows how to register the account list command on the connector object:
The following code snippet from
[index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts)
shows how to register the account list command on the connector object:
```javascript
export const connector = async () => {
@@ -85,7 +98,9 @@ export const connector = async () => {
...
```
IDN expects each user in the target source to be converted into a format IDN understands. The specific attributes the web service returns depend on what your source provides.
IDN expects each user in the target source to be converted into a format IDN
understands. The specific attributes the web service returns depend on what your
source provides.
```javascript
public toStdAccountListOutput(): StdAccountListOutput {
@@ -110,7 +125,9 @@ private buildStandardObject(): StdAccountListOutput | StdAccountCreateOutput | S
}
```
The result of the account list command is not an array of objects but several individual objects. This is the format IDN expects, so if you see something like the following result while testing, it is normal:
The result of the account list command is not an array of objects but several
individual objects. This is the format IDN expects, so if you see something like
the following result while testing, it is normal:
```javascript
{

View File

@@ -9,10 +9,10 @@ slug: /docs/saas-connectivity/commands/account-read
tags: ["Connectivity", "Connector Command"]
---
| Input/Output | Data Type |
|:-------------|:-------------------------:|
| Input | StdAccountReadInput |
| Output | StdAccountReadOutput |
| Input/Output | Data Type |
| :----------- | :------------------: |
| Input | StdAccountReadInput |
| Output | StdAccountReadOutput |
### Example StdAccountReadInput
@@ -49,13 +49,18 @@ tags: ["Connectivity", "Connector Command"]
## Description
The account read command aggregates a single account from the target source into IdentityNow. IDN can call this command during a “one-off” account refresh, which you can trigger by aggregating an individual account in IDN.
The account read command aggregates a single account from the target source into
IdentityNow. IDN can call this command during a “one-off” account refresh, which
you can trigger by aggregating an individual account in IDN.
![Account Read](./img/account_read_idn.png)
## Implementation
Implementation of account read is similar to account list's implementation, except the code only needs to get one account, not all the accounts. The following snippet is from [airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts):
Implementation of account read is similar to account list's implementation,
except the code only needs to get one account, not all the accounts. The
following snippet is from
[airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts):
```javascript
async getAccount(identity: SimpleKeyType | CompoundKeyType): Promise<AirtableAccount> {
@@ -83,9 +88,14 @@ async getAccount(identity: SimpleKeyType | CompoundKeyType): Promise<AirtableAcc
}
```
One special case of this command is the ```NotFound``` type. On line 20, if an account is not found, the ```ConnectorError``` is thrown with the ```ConnectorErrorType.NotFound``` type. This tells IDN the account does not exist, and IDN then triggers the account create logic to generate the account.
One special case of this command is the `NotFound` type. On line 20, if an
account is not found, the `ConnectorError` is thrown with the
`ConnectorErrorType.NotFound` type. This tells IDN the account does not exist,
and IDN then triggers the account create logic to generate the account.
The following code snippet from [index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts) shows how to register the account read command on the connector object:
The following code snippet from
[index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts)
shows how to register the account read command on the connector object:
```javascript
// Connector must be exported as module property named connector

View File

@@ -9,10 +9,10 @@ slug: /docs/saas-connectivity/commands/account-unlock
tags: ["Connectivity", "Connector Command"]
---
| Input/Output | Data Type |
|:-------------|:-------------------------:|
| Input | StdAccountUnlockInput |
| Output | StdAccountUnlockOutput |
| Input/Output | Data Type |
| :----------- | :--------------------: |
| Input | StdAccountUnlockInput |
| Output | StdAccountUnlockOutput |
### Example StdAccountUnlockInput
@@ -49,9 +49,13 @@ tags: ["Connectivity", "Connector Command"]
## Description
The account lock and account unlock commands provide ways to temporarily prevent access to an account. IDN only supports the unlock command, so accounts must be locked on the source level, but they can be unlocked through IDN, and IDN can store the account's status.
The account lock and account unlock commands provide ways to temporarily prevent
access to an account. IDN only supports the unlock command, so accounts must be
locked on the source level, but they can be unlocked through IDN, and IDN can
store the account's status.
Implementing account unlock is similar to the other commands that update attributes on an account. The following code unlocks an account:
Implementing account unlock is similar to the other commands that update
attributes on an account. The following code unlocks an account:
```javascript
.stdAccountUnlock(async (context: Context, input: StdAccountUnlockInput, res: Response<StdAccountUnlockOutput>) => {

View File

@@ -9,10 +9,10 @@ slug: /docs/saas-connectivity/commands/account-update
tags: ["Connectivity", "Connector Command"]
---
| Input/Output | Data Type |
|:-------------|:---------------------------:|
| Input | StdAccountUpdateInput |
| Output | StdAccountUpdateOutput |
| Input/Output | Data Type |
| :----------- | :--------------------: |
| Input | StdAccountUpdateInput |
| Output | StdAccountUpdateOutput |
### Example StdAccountUpdateInput
@@ -58,19 +58,39 @@ tags: ["Connectivity", "Connector Command"]
## Description
The account update command triggers whenever IDN is told to modify an identity's attributes or entitlements on the target source. For example, granting an identity a new entitlement through a role, changing an identitys lifecycle state, or modifying an identity attribute tied to an account attribute all trigger the account update command.
The account update command triggers whenever IDN is told to modify an identity's
attributes or entitlements on the target source. For example, granting an
identity a new entitlement through a role, changing an identitys lifecycle
state, or modifying an identity attribute tied to an account attribute all
trigger the account update command.
## Input Schema
The payload from IDN contains the ID of the identity to modify, the configuration items the connector needs to call the source API, and one or more change operations to apply to the identity. Each operation has the following special considerations:
The payload from IDN contains the ID of the identity to modify, the
configuration items the connector needs to call the source API, and one or more
change operations to apply to the identity. Each operation has the following
special considerations:
- **Set:** Set tells the connector to overwrite the current value of the attribute or entitlement with the new value provided in the payload. The entire entitlement array resets if there are multi-valued entitlements.
- **Set:** Set tells the connector to overwrite the current value of the
attribute or entitlement with the new value provided in the payload. The
entire entitlement array resets if there are multi-valued entitlements.
- **Add:** Add only works for multi-valued entitlements. Add tells the connector to add one or more values to the entitlement. Add is often useful for group entitlements when new groups are added to the identity. If only one entitlement is added, it is represented as a ```string```. If more than one entitlement is added, it represented as an ```array of strings```.
- **Add:** Add only works for multi-valued entitlements. Add tells the connector
to add one or more values to the entitlement. Add is often useful for group
entitlements when new groups are added to the identity. If only one
entitlement is added, it is represented as a `string`. If more than one
entitlement is added, it represented as an `array of strings`.
- **Remove:** Remove is similar to add, but it also works for attributes or single-valued entitlements. If you apply remove to multi-valued entitlements, doing so tells the connector to remove the value(s) from the entitlement. If only one entitlement is removed, it is represented as a ```string```. If more than one entitlement is removed, it is represented as an ```array of strings```. If you apply remove to a single-valued entitlement or account attribute, doing so tells the connector to set the value to ```null``` or ```empty```.
- **Remove:** Remove is similar to add, but it also works for attributes or
single-valued entitlements. If you apply remove to multi-valued entitlements,
doing so tells the connector to remove the value(s) from the entitlement. If
only one entitlement is removed, it is represented as a `string`. If more than
one entitlement is removed, it is represented as an `array of strings`. If you
apply remove to a single-valued entitlement or account attribute, doing so
tells the connector to set the value to `null` or `empty`.
The following example payload tells the connector to perform the following update actions:
The following example payload tells the connector to perform the following
update actions:
- Set the title of the account to “Developer Advocate.”
@@ -94,10 +114,28 @@ The following example payload tells the connector to perform the following updat
## Response Schema
After the connector applies the operations defined in the input payload, the connector must respond to IDN with the changes to the account so IDN can update the identity accordingly. If an account update operation results in no changes to the account, the connector responds with an empty object ```{}```. If the update operation results in one or more changes to the account, the connector responds with the complete account as it exists in the source, just like an account read response. IDN can parse the response and apply the differences accordingly.
After the connector applies the operations defined in the input payload, the
connector must respond to IDN with the changes to the account so IDN can update
the identity accordingly. If an account update operation results in no changes
to the account, the connector responds with an empty object `{}`. If the update
operation results in one or more changes to the account, the connector responds
with the complete account as it exists in the source, just like an account read
response. IDN can parse the response and apply the differences accordingly.
## Testing in IdentityNow
You can test the account update command the way you test the [Account Create](./account-create.md) command. Follow the steps in “Testing in IdentityNow” from “Account Create” to set up an access profile and role. Be sure to run the aggregation so the account(s) are created in the target source. Once the account(s) are created in the target source, modify the access profile to grant an additional entitlement. Return to the role and click the Update button in the upper right corner. Doing so triggers the account update command because the accounts are already created in the target source. Once the update is complete, ensure the account(s) have the additional entitlement.
You can test the account update command the way you test the
[Account Create](./account-create.md) command. Follow the steps in “Testing in
IdentityNow” from “Account Create” to set up an access profile and role. Be sure
to run the aggregation so the account(s) are created in the target source. Once
the account(s) are created in the target source, modify the access profile to
grant an additional entitlement. Return to the role and click the Update
button in the upper right corner. Doing so triggers the account update command
because the accounts are already created in the target source. Once the update
is complete, ensure the account(s) have the additional entitlement.
Note: Testing the account update command for removing entitlements using this method does not work. You can remove the entitlement from the access profile and run an update, but IDN will not send an update command to the connector to remove the entitlement. We are looking for suggestions on how to test the removal of entitlements.
Note: Testing the account update command for removing entitlements using this
method does not work. You can remove the entitlement from the access profile and
run an update, but IDN will not send an update command to the connector to
remove the entitlement. We are looking for suggestions on how to test the
removal of entitlements.

View File

@@ -9,16 +9,16 @@ slug: /docs/saas-connectivity/commands/entitlement-list
tags: ["Connectivity", "Connector Command"]
---
| Input/Output | Data Type |
|:-------------|:---------------------------:|
| Input | StdEntitlementListInput |
| Output | StdEntitlementListOutput |
| Input/Output | Data Type |
| :----------- | :----------------------: |
| Input | StdEntitlementListInput |
| Output | StdEntitlementListOutput |
### Example StdEntitlementListInput
```javascript
{
"type": "group"
"type": "group"
}
```
@@ -41,13 +41,23 @@ tags: ["Connectivity", "Connector Command"]
## Description
The entitlement list command triggers during a manual or scheduled entitlement aggregation operation within IDN. This operation gathers a list of all entitlements available on the target source, usually multi-valued entitlements like groups or roles. This operation provides IDN administrators with a list of entitlements available on the source so they can create access profiles and roles accordingly, and it provides IDN with more details about the entitlements. The entitlement schemas minimum requirements are name and ID, but you can add other values, such as created date, updated date, status, etc.
The entitlement list command triggers during a manual or scheduled entitlement
aggregation operation within IDN. This operation gathers a list of all
entitlements available on the target source, usually multi-valued entitlements
like groups or roles. This operation provides IDN administrators with a list of
entitlements available on the source so they can create access profiles and
roles accordingly, and it provides IDN with more details about the entitlements.
The entitlement schemas minimum requirements are name and ID, but you can add
other values, such as created date, updated date, status, etc.
![Discover Schema 4](./img/entitlement_list_idn.png)
## Defining the Schema
The entitlement schema is defined in the [connector-spec.json](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/connector-spec.json) file. Currently, only the multi-valued “group” type is supported. The following values are the minimum requirements, but you can add more attributes.
The entitlement schema is defined in the
[connector-spec.json](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/connector-spec.json)
file. Currently, only the multi-valued “group” type is supported. The following
values are the minimum requirements, but you can add more attributes.
```javascript
...
@@ -75,7 +85,8 @@ The entitlement schema is defined in the [connector-spec.json](https://github.co
## Implementation
This can be implemented in the main connector file, [index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts):
This can be implemented in the main connector file,
[index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts):
```javascript
...

View File

@@ -10,13 +10,17 @@ tags: ["Connectivity", "Connector Command"]
---
:::note
At this time Entitlement Read is not triggered from IDN for any specific workflow and as such it is not necessary to implement this in order to have a fully functional connector.
At this time Entitlement Read is not triggered from IDN for any specific
workflow and as such it is not necessary to implement this in order to have a
fully functional connector.
:::
| Input/Output | Data Type |
|:-------------|:---------------------------:|
| Input | StdEntitlementReadInput |
| Output | StdEntitlementReadOutput |
| Input/Output | Data Type |
| :----------- | :----------------------: |
| Input | StdEntitlementReadInput |
| Output | StdEntitlementReadOutput |
### Example StdEntitlementReadInput
@@ -50,7 +54,10 @@ At this time Entitlement Read is not triggered from IDN for any specific workflo
## Response Schema
Entitlement read fetches a single entitlements attributes and returns the resulting object to IDN, similar to how entitlement list does. You can implement this in the main connector file, [index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts):
Entitlement read fetches a single entitlements attributes and returns the
resulting object to IDN, similar to how entitlement list does. You can implement
this in the main connector file,
[index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts):
```javascript
...

View File

@@ -9,28 +9,42 @@ slug: /docs/saas-connectivity/commands/test-connection
tags: ["Connectivity", "Connector Command"]
---
| Input/Output | Data Type |
|:-------------|:-------------------------:|
| Input | undefined |
| Output | StdTestConnectionOutput |
| Input/Output | Data Type |
| :----------- | :---------------------: |
| Input | undefined |
| Output | StdTestConnectionOutput |
### Example StdTestConnectionOutput
```javascript
{}
{
}
```
## Summary
The test connection command ensures the connector can communicate with the target web service. It validates API credentials, host names, ports, and other configuration items. To implement this command, look for either a health endpoint or a simple GET endpoint. Some web services implement a health endpoint that returns status information about the service, which can be useful to test a connection. If no health endpoint exists, use a simple GET endpoint that takes few to no parameters to ensure the connector can make a successful call to the web service.
The test connection command ensures the connector can communicate with the
target web service. It validates API credentials, host names, ports, and other
configuration items. To implement this command, look for either a health
endpoint or a simple GET endpoint. Some web services implement a health endpoint
that returns status information about the service, which can be useful to test a
connection. If no health endpoint exists, use a simple GET endpoint that takes
few to no parameters to ensure the connector can make a successful call to the
web service.
Use Test Connection in the IDN UI after an admin has finished entering configuration information for a new instance of the connector.
Use Test Connection in the IDN UI after an admin has finished entering
configuration information for a new instance of the connector.
![Test Connection](./img/test_command_idn.png)
## Implementation
In [index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts), add the test connection function handler to your connector. Within this function, send a simple request to your web service to ensure the connection works. The web service this connector targets has a JavaScript SDK, so define your own function like the following example to test the connection:
In
[index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts),
add the test connection function handler to your connector. Within this
function, send a simple request to your web service to ensure the connection
works. The web service this connector targets has a JavaScript SDK, so define
your own function like the following example to test the connection:
```javascript
export const connector = async () => {
@@ -50,11 +64,13 @@ export const connector = async () => {
}
```
To implement the ```testConnection()``` function, use the following function created in the web service client code, [airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts).
To implement the `testConnection()` function, use the following function created
in the web service client code,
[airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts).
```javascript
/**
* Test connection by listing users from the Airtable instance.
* Test connection by listing users from the Airtable instance.
* This will make sure the apiKey has the correct access.
* @returns empty struct if response is 2XX
*/
@@ -69,4 +85,7 @@ To implement the ```testConnection()``` function, use the following function cre
}
```
This function calls an endpoint on the target web service to list all users. If the call is successful, the web service returns an empty object, which is okay because you do not need to do anything with the data. Your only goal is to ensure that you can make API calls with the provided configuration.
This function calls an endpoint on the target web service to list all users. If
the call is successful, the web service returns an empty object, which is okay
because you do not need to do anything with the data. Your only goal is to
ensure that you can make API calls with the provided configuration.

View File

@@ -6,71 +6,139 @@ sidebar_label: Connector Spec File
sidebar_position: 4
sidebar_class_name: connectorSpecFile
keywords: ["connectivity", "connectors", "spec", "specification"]
description: The connector spec file tells IDN how the connector should interact between IDN and the custom connector. It is the glue between IDN and the connector, so understanding the different sections are key to understanding how to build a custom connectors.
description:
The connector spec file tells IDN how the connector should interact between
IDN and the custom connector. It is the glue between IDN and the connector, so
understanding the different sections are key to understanding how to build a
custom connectors.
slug: /docs/saas-connectivity/connector-spec
tags: ["Connectivity"]
---
## Summary
The connector spec file tells IDN how the connector should interact between IDN and the custom connector. It is the glue between IDN and the connector, so understanding the different sections are key to understanding how to build a custom connectors.
The connector spec file tells IDN how the connector should interact between IDN
and the custom connector. It is the glue between IDN and the connector, so
understanding the different sections are key to understanding how to build a
custom connectors.
## Sample File
To see a sample spec file, see this link: [connector-spec.json](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/connector-spec.json)
To see a sample spec file, see this link:
[connector-spec.json](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/connector-spec.json)
## Description of Fields
The following describes in detail the different fields in the connector spec:
- **name:** The name of the connector as it appears in IDN. Tags can be appended to this name.
- **name:** The name of the connector as it appears in IDN. Tags can be appended
to this name.
- **keyType:** Either “simple” or “compound” This determines which type of key your connector expects to receive and send back for each of the commands. This must always be indicated in your connector spec - the connector returns the correct type for each command that returns a key type.
- For example, the stdAccountRead command input is the StdAccountReadInput. if you select keyType as “simple,” then the StdAccountReadInput.key will be the type SimpleKey.
- **keyType:** Either “simple” or “compound” This determines which type of key
your connector expects to receive and send back for each of the commands. This
must always be indicated in your connector spec - the connector returns the
correct type for each command that returns a key type.
- **commands:** The list of commands the connector supports. A full list of available commands can be found here.
- For example, the stdAccountRead command input is the StdAccountReadInput. if
you select keyType as “simple,” then the StdAccountReadInput.key will be the
type SimpleKey.
- **sourceConfig** A list of configuration items you must provide when you create a source in IDN. The order of these items is preserved in the UI.
- **type:** This is always “section” - it indicates a new section.
- **key:** The name of the configuration item as it is referenced in code.
- **label:** The name of the configuration item as it appears in the UI.
- **required** (Optional): Set to 'false' by default. Valid values are 'true' or 'false.' You must populate required configuration items in the IDN source configuration wizard before continuing.
- **type:** The configuration items' types. The following types are valid:
- text
- password
- url
- email
- number
- checkbox
- json
- **accountSchema:** The schema for an account in IDN populated by data from the source.
- **displayAttribute:** Identifies the attribute (defined below) used to map to ```Account Name``` in the IdentityNow account schema. This should be a unique value even though it is not required because the connector will use this value to correlate accounts in IDN to accounts in the source system.
- **identityAttribute:** Identifies the attribute (defined below) used to map to ```Account ID``` in the IdentityNow account schema. This must be a globally unique identifier, such as email address, employee ID, etc.
- **groupAttribute:** Identifies the attribute used to map accounts to entitlements. For example, a web service can define ```groups``` that users are members of, and the ```groups``` grant entitlements to each user. In this case, **groupAttribute** is “groups,” and there is also an account attribute called “groups”.
- **attributes:** One or more attributes that map to a users attribute on the target source. Each attribute defines the following:
- **name:** The attributes name as it appears in IDN.
- **type:** The attributes type. Possible values are ```string```, ```boolean```, ```long```, and ```int```.
- **description:** A helpful description of the attribute. This is useful to source owners when they are trying to understand the account schema.
- **managed:** This indicates whether the entitlements are manageable through IDN or read-only.
- **entitlement:** This boolean indicates whether the attribute is an entitlement. Entitlements give identities privileges on the source system. Use this indication to determine which fields to synchronize with accounts in IDN for tasks such as separation of duties and role assignment. The boolean indicates whether the attribute is an entitlement.
- **multi:** This indicates entitlements that are stored in an array format. This one field can store multiple entitlements for a single account.
- **entitlementSchemas:** A list of entitlement schemas in IDN populated by data from the source.
- **type:** The entitlements type. Currently, only ```group``` is supported.
- **displayAttribute:** The entitlement attributes name. This can be the ```name``` or another human friendly identifier for a group.
- **identityAttribute:** The entitlement attributes unique ID. This can be the ```id``` or another unique key for a group.
- **attributes:** The entitlements list of attributes. This list of attributes is an example: ```id```, ```name```, and ```description```.
- **name:** The name of the attribute as it appears in IDN.
- **type:** The attributes type. Possible values are ```string```, ```boolean```, ```long```, and ```int```.
- **description:** A helpful description the attribute. This is useful to source owners when they are trying to understand the account schema.
- **accountCreateTemplate:** A map of identity attributes IDN will pass to the connector to create an account in the target source.
- **key:** The unique identifier of the attribute. This is also the name that is presented in the Create Profile screen in IDN.
- **label:** A friendly name for presentation purposes.
- **type:** The attributes type. Possible values are ```string```, ```boolean```, ```long```, and ```int```.
- **initialValue (Optional):** Use this to specify identitAttribute mapping, generator or default values.
- **type:** The initial value type. Possible values are ```identityAttribute```, ```generator```, ```static```.
- **attributes:** Attributes change depending on the type selected.
- **name:** Use this to identify the mapping for identityAttribute type, or the generator to use (```Create Password```, ```Create Unique Account ID```).
- **value:** Use this as the default value for the static type.
- **maxSize:** Use this for the Create Unique Account ID generator type. This value specifies the maximum size of the username to be generated.
- **maxUniqueChecks:** Use this for the Create Unique Account ID generator type. This value specifies the maximum retries in case a unique ID is not found with the first random generated user.
- **template:** Use this for the Create Unique Account ID generator type. This value specifies the template used for generation. Example: ```"$(firstname).$(lastname)$(uniqueCounter)"```.
- **required (Optional):** Determines whether the account create operation requires this attribute. It defaults to ```false```. If it is ```true``` and IdentityNow encounters an identity missing this attribute, IDN does not send the account to the connector for account creation.
- **commands:** The list of commands the connector supports. A full list of
available commands can be found here.
- **sourceConfig** A list of configuration items you must provide when you
create a source in IDN. The order of these items is preserved in the UI.
- **type:** This is always “section” - it indicates a new section.
- **key:** The name of the configuration item as it is referenced in code.
- **label:** The name of the configuration item as it appears in the UI.
- **required** (Optional): Set to 'false' by default. Valid values are
'true' or 'false.' You must populate required configuration items in the
IDN source configuration wizard before continuing.
- **type:** The configuration items' types. The following types are valid:
- text
- password
- url
- email
- number
- checkbox
- json
- **accountSchema:** The schema for an account in IDN populated by data from
the source.
- **displayAttribute:** Identifies the attribute (defined below) used to
map to `Account Name` in the IdentityNow account schema. This should be
a unique value even though it is not required because the connector will
use this value to correlate accounts in IDN to accounts in the source
system.
- **identityAttribute:** Identifies the attribute (defined below) used to
map to `Account ID` in the IdentityNow account schema. This must be a
globally unique identifier, such as email address, employee ID, etc.
- **groupAttribute:** Identifies the attribute used to map accounts to
entitlements. For example, a web service can define `groups` that users
are members of, and the `groups` grant entitlements to each user. In
this case, **groupAttribute** is “groups,” and there is also an account
attribute called “groups”.
- **attributes:** One or more attributes that map to a users attribute on
the target source. Each attribute defines the following:
- **name:** The attributes name as it appears in IDN.
- **type:** The attributes type. Possible values are `string`,
`boolean`, `long`, and `int`.
- **description:** A helpful description of the attribute. This is
useful to source owners when they are trying to understand the account
schema.
- **managed:** This indicates whether the entitlements are manageable
through IDN or read-only.
- **entitlement:** This boolean indicates whether the attribute is an
entitlement. Entitlements give identities privileges on the source
system. Use this indication to determine which fields to synchronize
with accounts in IDN for tasks such as separation of duties and role
assignment. The boolean indicates whether the attribute is an
entitlement.
- **multi:** This indicates entitlements that are stored in an array
format. This one field can store multiple entitlements for a single
account.
- **entitlementSchemas:** A list of entitlement schemas in IDN populated
by data from the source.
- **type:** The entitlements type. Currently, only `group` is
supported.
- **displayAttribute:** The entitlement attributes name. This can be
the `name` or another human friendly identifier for a group.
- **identityAttribute:** The entitlement attributes unique ID. This can
be the `id` or another unique key for a group.
- **attributes:** The entitlements list of attributes. This list of
attributes is an example: `id`, `name`, and `description`.
- **name:** The name of the attribute as it appears in IDN.
- **type:** The attributes type. Possible values are `string`,
`boolean`, `long`, and `int`.
- **description:** A helpful description the attribute. This is useful
to source owners when they are trying to understand the account
schema.
- **accountCreateTemplate:** A map of identity attributes IDN will pass to
the connector to create an account in the target source.
- **key:** The unique identifier of the attribute. This is also the name
that is presented in the Create Profile screen in IDN.
- **label:** A friendly name for presentation purposes.
- **type:** The attributes type. Possible values are `string`,
`boolean`, `long`, and `int`.
- **initialValue (Optional):** Use this to specify identitAttribute
mapping, generator or default values.
- **type:** The initial value type. Possible values are
`identityAttribute`, `generator`, `static`.
- **attributes:** Attributes change depending on the type selected.
- **name:** Use this to identify the mapping for identityAttribute
type, or the generator to use (`Create Password`,
`Create Unique Account ID`).
- **value:** Use this as the default value for the static type.
- **maxSize:** Use this for the Create Unique Account ID generator
type. This value specifies the maximum size of the username to be
generated.
- **maxUniqueChecks:** Use this for the Create Unique Account ID
generator type. This value specifies the maximum retries in case a
unique ID is not found with the first random generated user.
- **template:** Use this for the Create Unique Account ID generator
type. This value specifies the template used for generation.
Example: `"$(firstname).$(lastname)$(uniqueCounter)"`.
- **required (Optional):** Determines whether the account create
operation requires this attribute. It defaults to `false`. If it is
`true` and IdentityNow encounters an identity missing this
attribute, IDN does not send the account to the connector for
account creation.

View File

@@ -6,11 +6,18 @@ sidebar_label: Example Connectors
sidebar_position: 5
sidebar_class_name: exampleConnectors
keywords: ["connectivity", "connectors", "example"]
description: Here are a few example connectors that were built for you to download and learn from.
description:
Here are a few example connectors that were built for you to download and
learn from.
slug: /docs/saas-connectivity/example-connectors
tags: ["Connectivity"]
---
- [Airtable connector](https://github.com/sailpoint-oss/airtable-example-connector) is a real connector that works like a flat file data source and is great for demonstrating how a connector works.
- [Airtable connector](https://github.com/sailpoint-oss/airtable-example-connector)
is a real connector that works like a flat file data source and is great for
demonstrating how a connector works.
- [Discourse Connector](https://github.com/sailpoint-oss/discourse-connector-2) is a real connector that works with the [Discourse service](https://www.discourse.org/). The documentation for each command references code from this example application.
- [Discourse Connector](https://github.com/sailpoint-oss/discourse-connector-2)
is a real connector that works with the
[Discourse service](https://www.discourse.org/). The documentation for each
command references code from this example application.

View File

@@ -6,12 +6,24 @@ sidebar_label: API Calls
sidebar_position: 1
sidebar_class_name: apiCalls
keywords: ["connectivity", "connectors", "api calls"]
description: Calling API endpoints sequentially for hundreds or thousands of accounts is slow. If several API calls are required to build a users account, then it is recommended that you use asynchronous functions to speed up this task.
description:
Calling API endpoints sequentially for hundreds or thousands of accounts is
slow. If several API calls are required to build a users account, then it is
recommended that you use asynchronous functions to speed up this task.
slug: /docs/saas-connectivity/in-depth/api-calls
tags: ["Connectivity"]
---
Calling API endpoints sequentially for hundreds or thousands of accounts is slow. If several API calls are required to build a users account, then it is recommended that you use asynchronous functions to speed up this task. Asynchronous functions allow your program to execute several commands at once, which is especially important for high latency commands like calling API endpoints - each call to an endpoint can take anywhere from several milliseconds to several seconds. The following code snippet from [discourse-client.ts](https://github.com/sailpoint-oss/discourse-connector-2/blob/main/Discourse/src/discourse-client.ts) shows how you can use asynchronous functions to quickly build a list of account profiles for your sources users:
Calling API endpoints sequentially for hundreds or thousands of accounts is
slow. If several API calls are required to build a users account, then it is
recommended that you use asynchronous functions to speed up this task.
Asynchronous functions allow your program to execute several commands at once,
which is especially important for high latency commands like calling API
endpoints - each call to an endpoint can take anywhere from several milliseconds
to several seconds. The following code snippet from
[discourse-client.ts](https://github.com/sailpoint-oss/discourse-connector-2/blob/main/Discourse/src/discourse-client.ts)
shows how you can use asynchronous functions to quickly build a list of account
profiles for your sources users:
```javascript
async getUsers(): Promise<User[]> {
@@ -37,12 +49,22 @@ async getUsers(): Promise<User[]> {
}
```
- Line 3 gets all the user IDs for a default group to which all the users you want to track are assigned.
- Line 6 gets more attributes for each user present in the group. There can be hundreds of users who need their attributes fetched, so use Promise.all to build and execute the API calls asynchronously, speeding up the operations completion time.
- Line 3 gets all the user IDs for a default group to which all the users you
want to track are assigned.
- Line 9 uses the same strategy as Line 6, except it calls another endpoint that will get each users email address, which isnt present in the previous API call. Use Promise.all again to speed up the operation.
- Line 6 gets more attributes for each user present in the group. There can be
hundreds of users who need their attributes fetched, so use Promise.all to
build and execute the API calls asynchronously, speeding up the operations
completion time.
- Line 12-14 combines the data you gathered from the preceding calls to complete your user accounts.
- Line 9 uses the same strategy as Line 6, except it calls another endpoint that
will get each users email address, which isnt present in the previous API
call. Use Promise.all again to speed up the operation.
>📘 As a general guideline, any time you must execute several API calls that all call the same endpoint, it is recommended that you use Promise.all to speed up the operation.
- Line 12-14 combines the data you gathered from the preceding calls to complete
your user accounts.
> 📘 As a general guideline, any time you must execute several API calls that
> all call the same endpoint, it is recommended that you use Promise.all to
> speed up the operation.

View File

@@ -6,83 +6,103 @@ sidebar_label: Debugging
sidebar_position: 2
sidebar_class_name: debugging
keywords: ["connectivity", "connectors", "debugging"]
description: An easy way to debug locally is to use console.log() to print debug information to your console.
description:
An easy way to debug locally is to use console.log() to print debug
information to your console.
slug: /docs/saas-connectivity/in-depth/debugging
tags: ["Connectivity"]
---
## Debug locally
An easy way to debug locally is to use ```console.log()``` to print debug information to your console. You can add ```console.log()``` statements anywhere, and the messages they print can contain static text or variables. For example, to see the contents of an input object when you are invoking the ```stdAccountCreate``` command, you can craft the following debug logic:
An easy way to debug locally is to use `console.log()` to print debug
information to your console. You can add `console.log()` statements anywhere,
and the messages they print can contain static text or variables. For example,
to see the contents of an input object when you are invoking the
`stdAccountCreate` command, you can craft the following debug logic:
```javascript
export const connector = async () => {
return createConnector()
.stdAccountCreate(async (context: Context, input: StdAccountCreateInput, res: Response<StdAccountCreateOutput>) => {
// Print the contents of input to the console. Must use
// JSON.stringify() to print the contents of an object.
console.log(`Input received for account create: ${JSON.stringify(input)}`)
if (!input.attributes.id) {
throw new ConnectorError('identity cannot be null')
}
const user = await airtable.createAccount(input)
logger.info(user, "created user in Airtable")
res.send(user.toStdAccountCreateOutput())
})
}
return createConnector().stdAccountCreate(
async (
context: Context,
input: StdAccountCreateInput,
res: Response<StdAccountCreateOutput>
) => {
// Print the contents of input to the console. Must use
// JSON.stringify() to print the contents of an object.
console.log(
`Input received for account create: ${JSON.stringify(input)}`
);
if (!input.attributes.id) {
throw new ConnectorError("identity cannot be null");
}
const user = await airtable.createAccount(input);
logger.info(user, "created user in Airtable");
res.send(user.toStdAccountCreateOutput());
}
);
};
```
```console.log()``` statements work anywhere, and they work when you deploy your connector to IDN. However, these statements can create clutter in your code. You will often have to clean up debug statements once you are done.
`console.log()` statements work anywhere, and they work when you deploy your
connector to IDN. However, these statements can create clutter in your code. You
will often have to clean up debug statements once you are done.
If your IDE supports debugging JavaScript, then your IDEs built-in debugger can be a powerful and easy way to debug your code.
If your IDE supports debugging JavaScript, then your IDEs built-in debugger can
be a powerful and easy way to debug your code.
## Debug in VS Code
### Debug through the javascript debug terminal
In VS Code, open a javascript debug terminal window and run the npm run dev command.
In VS Code, open a javascript debug terminal window and run the npm run dev
command.
```npm run dev```
`npm run dev`
Now you can set breakpoints in your typescript files in VS Code:
![debugging 1](./img/debugging1.png)
### Debug through the VS Code Debug configuration
To simplify the debugging process, you can consolidate the debugging steps into a VS Code launch configuration. The following snippet is an example of how you would do so:
To simplify the debugging process, you can consolidate the debugging steps into
a VS Code launch configuration. The following snippet is an example of how you
would do so:
**Launch.json:**
```json
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Launch Program via NPM",
"request": "launch",
"runtimeArgs": [
"run-script",
"dev"
],
"windows": {
"runtimeExecutable": "npm.cmd",
},
"linux": {
"runtimeExecutable": "npm",
},
"skipFiles": [
"<node_internals>/**"
],
"type": "node"
}
]
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Launch Program via NPM",
"request": "launch",
"runtimeArgs": ["run-script", "dev"],
"windows": {
"runtimeExecutable": "npm.cmd"
},
"linux": {
"runtimeExecutable": "npm"
},
"skipFiles": ["<node_internals>/**"],
"type": "node"
}
]
}
```
With these configurations set, you can run the debugger by selecting the options shown in the following image:
With these configurations set, you can run the debugger by selecting the options
shown in the following image:
![debugging 2](./img/debugging2.png)
## Debug in IdentityNow
You can use the ```sp conn logs``` command to gain insight into how your connector is performing while running in IDN. See the section on logging for more information.
You can use the `sp conn logs` command to gain insight into how your connector
is performing while running in IDN. See the section on logging for more
information.

View File

@@ -6,15 +6,23 @@ sidebar_label: Error Handling
sidebar_position: 3
sidebar_class_name: errorHandling
keywords: ["connectivity", "connectors", "error handling"]
description: Any time code can fail due to validation issues, connectivity or configuration errors, handle the error and provide information back to the user about what went wrong.
description:
Any time code can fail due to validation issues, connectivity or configuration
errors, handle the error and provide information back to the user about what
went wrong.
slug: /docs/saas-connectivity/in-depth/error-handling
tags: ["Connectivity"]
---
Any time code can fail due to validation issues, connectivity or configuration errors, handle the error and provide information back to the user about what went wrong. If you handle your errors properly, it will be easier to debug and pinpoint what happened in your connector when something goes wrong.
Any time code can fail due to validation issues, connectivity or configuration
errors, handle the error and provide information back to the user about what
went wrong. If you handle your errors properly, it will be easier to debug and
pinpoint what happened in your connector when something goes wrong.
## Connector Errors
The connector SDK has a built-in ConnectorError to use in your project to handle most generic errors:
The connector SDK has a built-in ConnectorError to use in your project to handle
most generic errors:
[airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts)
@@ -43,32 +51,39 @@ export class AirtableClient {
```
## Custom Errors
You can also create custom errors and use them in your code to give more meaningful and specific responses to error states. For example, when you are configuring your connector, it is recommended that you throw an ```InvalidConfigurationError``` instead of a generic ConnectorError. To do this, create the custom error:
You can also create custom errors and use them in your code to give more
meaningful and specific responses to error states. For example, when you are
configuring your connector, it is recommended that you throw an
`InvalidConfigurationError` instead of a generic ConnectorError. To do this,
create the custom error:
[invalid-configuration-error.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/errors/invalid-configuration-error.ts)
```javascript
import { ConnectorError, ConnectorErrorType } from '@sailpoint/connector-sdk'
import { ConnectorError, ConnectorErrorType } from "@sailpoint/connector-sdk";
/**
* Thrown when an application missing configuration during initialization
*/
export class InvalidConfigurationError extends ConnectorError {
/**
* Constructor
* @param message Error message
* @param type ConnectorErrorType they type of error
*/
constructor(message: string, type?: ConnectorErrorType) {
super(message, type)
this.name = 'InvalidConfigurationError'
}
/**
* Constructor
* @param message Error message
* @param type ConnectorErrorType they type of error
*/
constructor(message: string, type?: ConnectorErrorType) {
super(message, type);
this.name = "InvalidConfigurationError";
}
}
```
Then throw the error in your code:
[airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts)
```javascript
import { InvalidConfigurationError } from "./errors/invalid-configuration-error"
@@ -90,4 +105,4 @@ export class AirtableClient {
...
}
```
```

View File

@@ -6,17 +6,18 @@ sidebar_label: Linting
sidebar_position: 4
sidebar_class_name: linting
keywords: ["connectivity", "connectors", "linting"]
description: Automatically check your connector source code for programmatic and stylistic errors.
description:
Automatically check your connector source code for programmatic and stylistic
errors.
slug: /docs/saas-connectivity/in-depth/linting
tags: ["Connectivity"]
---
To add linting to your project, simply install the linter using NPM:
``npm install --save-dev eslint @typescript-eslint/parser @typescript-eslint/eslint-plugin`
```npm install --save-dev eslint @typescript-eslint/parser @typescript-eslint/eslint-plugin``
Then add the ```.eslintrc.yml``` file on the project root:
Then add the `.eslintrc.yml` file on the project root:
```yaml
env:
@@ -25,11 +26,11 @@ env:
extends:
- eslint:recommended
- plugin:@typescript-eslint/recommended
parser: '@typescript-eslint/parser'
parser: "@typescript-eslint/parser"
parserOptions:
ecmaVersion: latest
sourceType: module
plugins:
- '@typescript-eslint'
- "@typescript-eslint"
rules: {}
```
```

View File

@@ -12,7 +12,9 @@ tags: ["Connectivity"]
---
## Printing Logs with the CLI
Fetch logs from IDN by issuing the ```sp conn logs``` command:
Fetch logs from IDN by issuing the `sp conn logs` command:
```bash
$ sp conn logs
@@ -25,9 +27,12 @@ $ sp conn logs
[2022-07-14T11:04:24.890-04:00] INFO | invokeCommand ▶︎ Command invocation complete : std:test-connection, for connector version: 8. Elapsed time 125.749µs
[2022-07-14T11:04:24.941-04:00] INFO | commandOutcome ▶︎ {"commandType":"std:test-connection","completed":true,"elapsed":49,"message":"command completed","requestId":"cca732a2-084d-4433-9bd5-ed22fa397d8d","version":8}
```
To tail the logs to see output as it happens, execute the ```sp conn logs tail``` command.
It can also be helpful to execute the logs command along with grep to filter your results to a specific connector or text:
To tail the logs to see output as it happens, execute the `sp conn logs tail`
command.
It can also be helpful to execute the logs command along with grep to filter
your results to a specific connector or text:
```bash
$ sp conn logs | grep 'connector version 29'
@@ -35,7 +40,9 @@ $ sp conn logs | grep 'connector version 29'
```
## Logging with console.log
anywhere that you use console.log in your code will expose the output to the logs. The following example has a printed statement in the index.ts file:
anywhere that you use console.log in your code will expose the output to the
logs. The following example has a printed statement in the index.ts file:
```javascript
// Connector must be exported as module property named connector
@@ -54,7 +61,10 @@ export const connector = async () => {
...
```
When you run the ```sp conn logs``` command, you will see the following in the output:
When you run the `sp conn logs` command, you will see the following in the
output:
```bash
$ sp conn logs tail
@@ -64,20 +74,23 @@ $ sp conn logs tail
## Logging using the SDK
Use the built in logging tool to simplify the logging process and enhance your loggers capabilities. To start, import the logger from the sdk:
Use the built in logging tool to simplify the logging process and enhance your
loggers capabilities. To start, import the logger from the sdk:
```import { logger as SDKLogger } from '@sailpoint/connector-sdk'```
`import { logger as SDKLogger } from '@sailpoint/connector-sdk'`
Next, add a simple configuration for the logger to use throughout your application.
Next, add a simple configuration for the logger to use throughout your
application.
[logger.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/logger/logger.ts)
```javascript
import { logger as SDKLogger } from '@sailpoint/connector-sdk'
import { logger as SDKLogger } from "@sailpoint/connector-sdk";
export const logger = SDKLogger.child(
// specify your connector name
{ connectorName: 'Airtable' }
)
// specify your connector name
{ connectorName: "Airtable" }
);
```
Now you can import your logger into your project and start logging.
@@ -108,36 +121,42 @@ export const connector = async () => {
## Configuring the SDK to Mask Sensitive Values
The SDK Logger uses [Pino](https://github.com/pinojs/pino) under the hood, which has the built-in capability to search and remove json paths that can contain sensitive information.
The SDK Logger uses [Pino](https://github.com/pinojs/pino) under the hood, which
has the built-in capability to search and remove json paths that can contain
sensitive information.
>🚧 Never expose any Personal Identifiable Information in any logging operations.
> 🚧 Never expose any Personal Identifiable Information in any logging
> operations.
Start by looking at line 116 to 122 in your logger configuration, which looks like the one below:
Start by looking at line 116 to 122 in your logger configuration, which looks
like the one below:
```javascript
import { logger as SDKLogger } from '@sailpoint/connector-sdk'
import { logger as SDKLogger } from "@sailpoint/connector-sdk";
export const logger = SDKLogger.child(
// specify your connector name
{ connectorName: 'Airtable' },
// This is optional for removing specific information you might not want to be logged
{
redact: {
paths: [
'*.password',
'*.username',
'*.email',
'*.id',
'*.firstName',
'*.lastName',
'*.displayName'
],
censor: '****',
},
}
)
// specify your connector name
{ connectorName: "Airtable" },
// This is optional for removing specific information you might not want to be logged
{
redact: {
paths: [
"*.password",
"*.username",
"*.email",
"*.id",
"*.firstName",
"*.lastName",
"*.displayName",
],
censor: "****",
},
}
);
```
Now compare that with the object you want to remove information from while still logging information in it:
Now compare that with the object you want to remove information from while still
logging information in it:
[AirtableAccount.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/models/AirtableAccount.ts)
@@ -156,7 +175,11 @@ export class AirtableAccount {
entitlments!: Array<string>
}
```
Now when you log the contents of an ```AirtableAccount``` object, you will see all the fields redacted. For example, in [index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts) we log the ```accounts``` in the following code snippet:
Now when you log the contents of an `AirtableAccount` object, you will see all
the fields redacted. For example, in
[index.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/index.ts)
we log the `accounts` in the following code snippet:
```javascript
.stdAccountList(async (context: Context, input: undefined, res: Response<StdAccountListOutput>) => {
@@ -170,6 +193,7 @@ Now when you log the contents of an ```AirtableAccount``` object, you will see a
```
which results in the following log output:
```bash
$ sp conn logs
@@ -178,4 +202,5 @@ $ sp conn logs
[2022-07-14T11:19:30.678-04:00] INFO | commandOutcome ▶︎ {"commandType":"std:account:list","completed":true,"elapsed":1290,"message":"command completed","requestId":"379a8a4510944daf9d02b51a29ae863e","version":8}
```
You can see that any of the PII information has now been transformed into "****"
You can see that any of the PII information has now been transformed into
"\*\*\*\*"

View File

@@ -11,9 +11,25 @@ slug: /docs/saas-connectivity/in-depth/handling-rate-limits
tags: ["Connectivity"]
---
APIs often implement rate limits to prevent any one user from abusing the API or using an unfair amount of resources, limiting what other users of the API can do. The rate limits can manifest in many ways, but one of the most common ways is the 429 (Too Many Requests) HTTP status code. You must check the documentation of the API you are using to see whether it enforces rate limits and how it notifies you when you reach that limit. An example of rate limit documentation for Stripes API can be found [here](https://stripe.com/docs/rate-limits).
APIs often implement rate limits to prevent any one user from abusing the API or
using an unfair amount of resources, limiting what other users of the API can
do. The rate limits can manifest in many ways, but one of the most common ways
is the 429 (Too Many Requests) HTTP status code. You must check the
documentation of the API you are using to see whether it enforces rate limits
and how it notifies you when you reach that limit. An example of rate limit
documentation for Stripes API can be found
[here](https://stripe.com/docs/rate-limits).
If you are using a vendor supplied client library for the API, check the documentation for that client library to see whether it handles rate limits for you. If it does, you do not need to worry about rate limits. If it does not or if you have to implement your own library for interacting with the target API, you must handle rate limiting yourself. If you are implementing your own library for the target API, the easiest way to handle rate limits is to use the [axios-retry](https://www.npmjs.com/package/axios-retry) NPM package in conjunction with the [axios](https://www.npmjs.com/package/axios) HTTP request library. Start by including both packages in the dependencies section of your ```package.json``` file:
If you are using a vendor supplied client library for the API, check the
documentation for that client library to see whether it handles rate limits for
you. If it does, you do not need to worry about rate limits. If it does not or
if you have to implement your own library for interacting with the target API,
you must handle rate limiting yourself. If you are implementing your own library
for the target API, the easiest way to handle rate limits is to use the
[axios-retry](https://www.npmjs.com/package/axios-retry) NPM package in
conjunction with the [axios](https://www.npmjs.com/package/axios) HTTP request
library. Start by including both packages in the dependencies section of your
`package.json` file:
```json
...
@@ -25,7 +41,15 @@ If you are using a vendor supplied client library for the API, check the documen
...
```
Next, run ```npm install``` in your project directory to install the packages. Once they are installed, go to the section of your code that handles API calls to your source and wrap your Axios HTTP client object in an Axios retry object. In the following snippet, the code automatically retries an API call that fails with a 429 error code three times, using exponential back-off between each API call. You can configure this better to suit your APIs rate limit. The following code snippet from [discourse-client.ts](https://github.com/sailpoint-oss/discourse-connector-2/blob/main/src/discourse-client.ts) shows the code necessary to set up the retry logic:
Next, run `npm install` in your project directory to install the packages. Once
they are installed, go to the section of your code that handles API calls to
your source and wrap your Axios HTTP client object in an Axios retry object. In
the following snippet, the code automatically retries an API call that fails
with a 429 error code three times, using exponential back-off between each API
call. You can configure this better to suit your APIs rate limit. The following
code snippet from
[discourse-client.ts](https://github.com/sailpoint-oss/discourse-connector-2/blob/main/src/discourse-client.ts)
shows the code necessary to set up the retry logic:
```javascript
import { ConnectorError } from "@sailpoint/connector-sdk"
@@ -39,7 +63,7 @@ export class DiscourseClient {
private readonly apiKey?: string
private readonly apiUsername?: string
private readonly baseUrl?: string
httpClient: AxiosInstance
constructor(config: any) {
@@ -58,7 +82,7 @@ export class DiscourseClient {
if (this.baseUrl == null) {
throw new ConnectorError('baseUrl must be provided from config')
}
this.httpClient = axios.create({
baseURL: this.baseUrl,
headers: {
@@ -69,7 +93,7 @@ export class DiscourseClient {
// Wrap our Axios HTTP client in an Axios retry object to automatically
// handle rate limiting. By default, this logic will retry a given
// API call 3 times before failing. Read the documentation for
// API call 3 times before failing. Read the documentation for
// axios-retry on NPM to see more configuration options.
axiosRetry(this.httpClient, {
retryDelay: axiosRetry.exponentialDelay,
@@ -83,7 +107,8 @@ export class DiscourseClient {
...
```
Because ```axios-retry``` wraps an ```axios``` object, you can make API calls like you normally would with Axios without any special options or configuration.
Because `axios-retry` wraps an `axios` object, you can make API calls like you
normally would with Axios without any special options or configuration.
```javascript
private async getUserEmailAddress(username: string): Promise<string> {

View File

@@ -13,7 +13,9 @@ tags: ["Connectivity"]
## Getting Started
When you set up a new project, the following test files are created: ```index.spec.ts``` and ```my-client.spec.ts```. Execute the tests immediately using npm test.
When you set up a new project, the following test files are created:
`index.spec.ts` and `my-client.spec.ts`. Execute the tests immediately using npm
test.
```bash
$ npm run test
@@ -24,11 +26,11 @@ $ npm run test
PASS src/my-client.spec.ts
PASS src/index.spec.ts
--------------|---------|----------|---------|---------|-------------------
File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s
File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s
--------------|---------|----------|---------|---------|-------------------
All files | 72 | 100 | 75 | 70.83 |
index.ts | 56.25 | 100 | 50 | 53.33 | 29-56
my-client.ts | 100 | 100 | 100 | 100 |
All files | 72 | 100 | 75 | 70.83 |
index.ts | 56.25 | 100 | 50 | 53.33 | 29-56
my-client.ts | 100 | 100 | 100 | 100 |
--------------|---------|----------|---------|---------|-------------------
Test Suites: 2 passed, 2 total
@@ -38,60 +40,74 @@ Time: 1.937 s
Ran all test suites.
{"level":"INFO","message":"Running test connection"}
```
You can also view the results in an html report by viewing the ```index.html``` file inside the ```coverage/lcov-report``` folder:
![Account List](./img/testing1.png)
![Account List](./img/testing2.png)
You can also view the results in an html report by viewing the `index.html` file
inside the `coverage/lcov-report` folder:
![Account List](./img/testing1.png) ![Account List](./img/testing2.png)
## Testing Techniques
[Jest](https://jestjs.io/docs/getting-started) is a testing framework provided for javascript that focuses on simplicity. CLI includes it when it generates the project. It is recommended to use Jest to test your code.
[Jest](https://jestjs.io/docs/getting-started) is a testing framework provided
for javascript that focuses on simplicity. CLI includes it when it generates the
project. It is recommended to use Jest to test your code.
Testing your code is important because it can highlight implementation issues before they get into production. If your tests are setup with good descriptions, the tests can also help explain why certain conditions are important in the code, so if a new developer breaks a test, he or she will know what broke and why the functionality is important.
Testing your code is important because it can highlight implementation issues
before they get into production. If your tests are setup with good descriptions,
the tests can also help explain why certain conditions are important in the
code, so if a new developer breaks a test, he or she will know what broke and
why the functionality is important.
If you have good tests setup, then you can quickly identify and fix changes or updates that occur in dependent sources.
If you have good tests setup, then you can quickly identify and fix changes or
updates that occur in dependent sources.
Jest provides [many different ways to test your code](https://jestjs.io/docs/using-matchers). Some techniques are listed below:
Jest provides
[many different ways to test your code](https://jestjs.io/docs/using-matchers).
Some techniques are listed below:
### Test a method and evaluate the response using `expect`
### Test a method and evaluate the response using ```expect```
```javascript
it('get users populates correct fields', async () => {
// Execute the method
let res = await discourseClient.getUsers()
it("get users populates correct fields", async () => {
// Execute the method
let res = await discourseClient.getUsers();
// Check the response, and make sure it is an array with exactly 2 elements
expect(res.length).toBe(2)
// Evaluate the response email and ensure it matches the expected result
expect(res[0].email === 'test.test@test.com')
})
// Check the response, and make sure it is an array with exactly 2 elements
expect(res.length).toBe(2);
// Evaluate the response email and ensure it matches the expected result
expect(res[0].email === "test.test@test.com");
});
```
- Line 4 executes the method.
- Line 7 asserts that the response is an array with 2 elements.
- Line 10 evaluates the email field in the array to ensure it matches the expected result.
- Line 10 evaluates the email field in the array to ensure it matches the
expected result.
### Test a method to ensure it calls another method using ```spyOn```
### Test a method to ensure it calls another method using `spyOn`
```javascript
it('password is generated when not provided', async () => {
// Create the spy for later use. We want to know details about this method.
const spy = jest.spyOn(DiscourseClient.prototype as any, "generateRandomPassword")
// Execute the method
let res = await discourseClient.createUser({ "email": "", "username": "test" })
// Validate that the internal method "generateRandomPassword" was called
expect(spy).toBeCalled();
// Validate the email field matches the expected result
expect(res.email === 'test.test@test.com')
})
```
- Line 4 sets up the spy. “generateRandomPassword” is an internal method that gets called when the password is not provided.
- Line 4 sets up the spy. “generateRandomPassword” is an internal method that
gets called when the password is not provided.
- Line 7 executes the method.
@@ -99,14 +115,24 @@ Jest provides [many different ways to test your code](https://jestjs.io/docs/usi
## Setting up Mock Services
The easiest way to mock your client is to set up a mock service that returns data just like your service would in production so you can test all your functions and data manipulation in your unit tests.
The easiest way to mock your client is to set up a mock service that returns
data just like your service would in production so you can test all your
functions and data manipulation in your unit tests.
Mocks help test your code without actually invoking your service and allow you to simulate the kind of response your client expects to receive. They can also help you pinpoint where failures occur in case something changes on your service. By using a mock service, you can test your entire application without connecting to your service.
Mocks help test your code without actually invoking your service and allow you
to simulate the kind of response your client expects to receive. They can also
help you pinpoint where failures occur in case something changes on your
service. By using a mock service, you can test your entire application without
connecting to your service.
Create a mock file
Jest provides a way to set up a mock service. It stores your mock files in a folder called \_\_mocks__. If you name your typescript files the exact same as the files they are mocking, those mock implementations will be called instead when your unit tests are running. In the following example, a mock has been created to simulate calls to the airtable client:
Create a mock file Jest provides a way to set up a mock service. It stores your
mock files in a folder called \_\_mocks\_\_. If you name your typescript files
the exact same as the files they are mocking, those mock implementations will be
called instead when your unit tests are running. In the following example, a
mock has been created to simulate calls to the airtable client:
[airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/__mocks__/airtable.ts)
```javascript
import { AttributeChange, CompoundKeyType, ConnectorError, ConnectorErrorType, SimpleKeyType, StdAccountCreateInput, StdAccountDiscoverSchemaOutput } from "@sailpoint/connector-sdk"
import { AirtableAccount } from "../models/AirtableAccount"
@@ -128,7 +154,7 @@ export class AirtableClient {
}
}
async getAllAccounts(): Promise<AirtableAccount[]> {
async getAllAccounts(): Promise<AirtableAccount[]> {
const recordArray: Array<AirtableAccount> = []
const account = Object.assign(new AirtableAccount(), accountJson)
recordArray.push(account)
@@ -159,7 +185,7 @@ export class AirtableClient {
} else {
throw new ConnectorError("Account not found", ConnectorErrorType.NotFound)
}
}
async getAccountSchema(): Promise<StdAccountDiscoverSchemaOutput> {
@@ -184,37 +210,52 @@ export class AirtableClient {
}
```
The method signatures are exactly the same on this mock file as the signature sin the "real" [airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts). The only difference is that the response objects from all the calls are made without actually calling any external dependencies, so it can be run quickly in a unit test without having to make api calls to a real client
The method signatures are exactly the same on this mock file as the signature
sin the "real"
[airtable.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/airtable.ts).
The only difference is that the response objects from all the calls are made
without actually calling any external dependencies, so it can be run quickly in
a unit test without having to make api calls to a real client
### Define json mock objects
The responses are stored in directly imported json files. This helps keep the code focused on the logic and allows the response objects to be more easily generated directly from a tool like Postman without requiring any major formatting of the response. Enable this situation by setting ```"resolveJsonModule": true``` in your ```tsconfig.json```. The following response file is an example:
The responses are stored in directly imported json files. This helps keep the
code focused on the logic and allows the response objects to be more easily
generated directly from a tool like Postman without requiring any major
formatting of the response. Enable this situation by setting
`"resolveJsonModule": true` in your `tsconfig.json`. The following response file
is an example:
[account.json](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/src/__mocks__/account.json)
```json
{
"airtableId": "1234",
"displayName": "Test User",
"email": "test@test.com",
"id": "1234",
"enabled": true,
"locked": false,
"department": "accounting",
"firstName": "test",
"lastName": "user",
"password": "password1234",
"entitlments": ["ent1", "ent2"]
"airtableId": "1234",
"displayName": "Test User",
"email": "test@test.com",
"id": "1234",
"enabled": true,
"locked": false,
"department": "accounting",
"firstName": "test",
"lastName": "user",
"password": "password1234",
"entitlments": ["ent1", "ent2"]
}
```
### Use the mock in your tests
The mock is defined in the test file, and Jest does the rest. Jest overrides all the calls to use the methods in the ```__mocks__``` folder.
The mock is defined in the test file, and Jest does the rest. Jest overrides all
the calls to use the methods in the `__mocks__` folder.
[index.spec.ts](https://github.com/sailpoint-oss/airtable-example-connector/blob/main/test/index.spec.ts)
```javascript
import { connector } from '../src/index'
import { StandardCommand } from '@sailpoint/connector-sdk'
import { PassThrough } from 'stream'
import { connector } from "../src/index";
import { StandardCommand } from "@sailpoint/connector-sdk";
import { PassThrough } from "stream";
// setup your mock object
jest.mock('../src/airtable')
```
jest.mock("../src/airtable");
```

View File

@@ -6,29 +6,56 @@ sidebar_label: SaaS Connectivity
sidebar_position: 4
sidebar_class_name: saasConnectivity
keywords: ["connectivity", "connectors"]
description: SaaS Connectivity is a cloud based connector runtime that makes developing and deploying web service connectors easy.
description:
SaaS Connectivity is a cloud based connector runtime that makes developing and
deploying web service connectors easy.
slug: /docs/saas-connectivity
tags: ["Connectivity"]
---
SaaS Connectivity is a cloud based connector runtime that makes developing and deploying web service connectors easier than Connector 1.0 does. However, because the cloud hosts SaaS Connectivity, not a Virtual Appliance (VA), SaaS Connectivity is limited in the types of applications it can connect to. For example, you cannot use SaaS Connectivity to connect to on-prem services that can only communicate within an intranet (no public internet access). This excludes JDBC and Mainframe applications, to name a few.
SaaS Connectivity is a cloud based connector runtime that makes developing and
deploying web service connectors easier than Connector 1.0 does. However,
because the cloud hosts SaaS Connectivity, not a Virtual Appliance (VA), SaaS
Connectivity is limited in the types of applications it can connect to. For
example, you cannot use SaaS Connectivity to connect to on-prem services that
can only communicate within an intranet (no public internet access). This
excludes JDBC and Mainframe applications, to name a few.
## What Are Connectors
Connectors are the bridges between the SailPoint Identity Now (IDN) SaaS platform and the source systems that IDN needs to communicate with and aggregate data from. An example of a source system IDN may need to communicate with would be an Oracle HR system or GitHub. In these cases, IDN synchronizes data between systems to ensure account entitlements and state are correct through the organization.
Connectors are the bridges between the SailPoint Identity Now (IDN) SaaS
platform and the source systems that IDN needs to communicate with and aggregate
data from. An example of a source system IDN may need to communicate with would
be an Oracle HR system or GitHub. In these cases, IDN synchronizes data between
systems to ensure account entitlements and state are correct through the
organization.
## Why Are We Introducing a New Connector
VA connectors always communicate with external sources through the Virtual Appliance (VA) as seen in the diagram below:
VA connectors always communicate with external sources through the Virtual
Appliance (VA) as seen in the diagram below:
![Old Connectivity](./img/old_connectivity_diagram.png)
VA connectors can be disadvantageous because you need an on-prem virtual appliance to have any external connectivity with them, even when that connectivity is a SaaS service like Salesforce.com.
VA connectors can be disadvantageous because you need an on-prem virtual
appliance to have any external connectivity with them, even when that
connectivity is a SaaS service like Salesforce.com.
It is also challenging to create a custom connector in the VA Connector framework. Therefore, there are generic connectors available such as flat file, JDBC and webservice connectors. These options provide flexibility in configuring almost any source, but this configuration can be complex. For example, when you create a JDBC connector, you must use SQL to define the data model.
It is also challenging to create a custom connector in the VA Connector
framework. Therefore, there are generic connectors available such as flat file,
JDBC and webservice connectors. These options provide flexibility in configuring
almost any source, but this configuration can be complex. For example, when you
create a JDBC connector, you must use SQL to define the data model.
The new Cloud connectors work differently - they run on the IDN platform instead (see diagram below).
The new Cloud connectors work differently - they run on the IDN platform instead
(see diagram below).
![New Connectivity](./img/new_connectivity_diagram.png)
With this process, you can run an entire IDN instance without a VA. The new connector also includes a CLI tool to manage cloud connectors and an SDK to create custom connectors. Because it is simpler to create a custom connector, you can create specific connectors for a variety of sources, and the connectors' configuration can be much simpler. For example, you can now configure a formerly complicated webservice connector by providing two parameters (Base URL and API Key) in a custom cloud connector.
With this process, you can run an entire IDN instance without a VA. The new
connector also includes a CLI tool to manage cloud connectors and an SDK to
create custom connectors. Because it is simpler to create a custom connector,
you can create specific connectors for a variety of sources, and the connectors'
configuration can be much simpler. For example, you can now configure a formerly
complicated webservice connector by providing two parameters (Base URL and API
Key) in a custom cloud connector.

View File

@@ -6,11 +6,14 @@ sidebar_label: Postman Collection
sidebar_position: 6
sidebar_class_name: postmanCollection
keywords: ["connectivity", "connectors", "postman"]
description: Use the following Postman Collection file to run tests for each of the commands locally.
description:
Use the following Postman Collection file to run tests for each of the
commands locally.
slug: /docs/saas-connectivity/postman-collection
tags: ["Connectivity", "Postman"]
---
Use the following Postman Collection file to run tests for each of the commands locally.
Use the following Postman Collection file to run tests for each of the commands
locally.
[Postman Collection](../../../files/SaaS_Connectivity.postman_collection)
[Postman Collection](../../../files/SaaS_Connectivity.postman_collection)

View File

@@ -6,26 +6,39 @@ sidebar_label: Prerequisites
sidebar_position: 1
sidebar_class_name: prerequisites
keywords: ["connectivity", "connectors", "prerequisites"]
description: These are some prerequisites you must have before you start building SaaS Connectors.
description:
These are some prerequisites you must have before you start building SaaS
Connectors.
slug: /docs/saas-connectivity/prerequisites
tags: ["Connectivity"]
---
## Packages
To build the CLI, the following packages are required:
- Golang >= 1.17
- Make >= 3.81
To develop a connector, the following packages are required:
- Node >= 14.17.3
## IDE
Although you can develop connectors in a text editor, use an Integrated Development Environment (IDE) for a better experience. There are many IDEs that support Javascript/Typescript, including [Visual Sudio Code](https://code.visualstudio.com/Download), a free IDE with native support for Javascript/Typescript. VS Code provides syntax highlight, debugging, hints, code completion, and other helpful options.
Although you can develop connectors in a text editor, use an Integrated
Development Environment (IDE) for a better experience. There are many IDEs that
support Javascript/Typescript, including
[Visual Sudio Code](https://code.visualstudio.com/Download), a free IDE with
native support for Javascript/Typescript. VS Code provides syntax highlight,
debugging, hints, code completion, and other helpful options.
## Install CLI
SailPoint provides a CLI tool to manage the connectors' lifecycles. To install and set up the CLI, follow the instructions in this repository's README file (TBD. This repository is not public yet): [SailPoint CLI on GitHub](https://github.com/sailpoint-oss/sp-connector-cli)
SailPoint provides a CLI tool to manage the connectors' lifecycles. To install
and set up the CLI, follow the instructions in this repository's README file
(TBD. This repository is not public yet):
[SailPoint CLI on GitHub](https://github.com/sailpoint-oss/sp-connector-cli)
## Create New Project
@@ -35,11 +48,15 @@ To create an empty connector project, run the following command:
sp conn init my-first-project
```
The CLI init command creates a new folder with your project name in the location where you run the command.
The CLI init command creates a new folder with your project name in the location
where you run the command.
Run npm install to change the directory to the project folder and install the dependencies. You may need to provide your GitHub credentials because the CLI tool depends on a SailPoint internal GitHub repository.
Run npm install to change the directory to the project folder and install the
dependencies. You may need to provide your GitHub credentials because the CLI
tool depends on a SailPoint internal GitHub repository.
### Source Files
The earlier command creates the initial project source directory below:
```
@@ -55,15 +72,32 @@ my-first-project
```
This directory contains three main files:
- **index.ts:** Use this file to register all the available commands the connector supports, provide the necessary configuration options to the client code implementing the API for the source, and pass data the client code obtains to IdentityNow. This file can either use a vendor supplied client Software Development Kit (SDK) to interact with the web service or reference custom client code within the project.
- **my-client.ts:** Use this template to create custom client code to interact with a web services APIs. If the web service does not provide an SDK, you can modify this file to implement the necessary API calls to interact with the source web service.
- **index.ts:** Use this file to register all the available commands the
connector supports, provide the necessary configuration options to the client
code implementing the API for the source, and pass data the client code
obtains to IdentityNow. This file can either use a vendor supplied client
Software Development Kit (SDK) to interact with the web service or reference
custom client code within the project.
- **connector-spec.ts** This file describes how the connector works to IDN. More information about the connector spec is available in the next section. At a high level, it has the information for the following:
- What commands the connector supports
- What config values the user must provide when creating the connector
- Defining the account schema
- Defining the entitlment schema
- Defining the account create template that maps fields from IDN to the connector
- **my-client.ts:** Use this template to create custom client code to interact
with a web services APIs. If the web service does not provide an SDK, you can
modify this file to implement the necessary API calls to interact with the
source web service.
These files are templates that provide guidance to begin implementing the connector on the target web service. Although you can implement a connector's entire functionality within these three files (or even just one if the web service provides an SDK), you can implement your own code architecture, like breaking out common utility functions into a separate file or creating separate files for each operation.
- **connector-spec.ts** This file describes how the connector works to IDN. More
information about the connector spec is available in the next section. At a
high level, it has the information for the following:
- What commands the connector supports
- What config values the user must provide when creating the connector
- Defining the account schema
- Defining the entitlment schema
- Defining the account create template that maps fields from IDN to the
connector
These files are templates that provide guidance to begin implementing the
connector on the target web service. Although you can implement a connector's
entire functionality within these three files (or even just one if the web
service provides an SDK), you can implement your own code architecture, like
breaking out common utility functions into a separate file or creating separate
files for each operation.

View File

@@ -6,18 +6,30 @@ sidebar_label: Test, Build, and Deploy
sidebar_position: 2
sidebar_class_name: testBuildDeploy
keywords: ["connectivity", "connectors", "test", "build", "deploy"]
description: As you implement command handlers, you must test them. The connector SDK provides some utility methods to locally run your connector to test, build, and deploy.
description:
As you implement command handlers, you must test them. The connector SDK
provides some utility methods to locally run your connector to test, build,
and deploy.
slug: /docs/saas-connectivity/test-build-deploy
tags: ["Connectivity"]
---
## Testing Your Connector
You can use the following Postman Collection file to locally run tests for each of the commands.
You can use the following Postman Collection file to locally run tests for each
of the commands.
[Postman Collection](./broken-link.md)
[Postman Collection](./assets/SaaS_Connectivity.postman_collection)
As you implement command handlers, you must test them. The connector SDK provides some utility methods to locally run your connector. To start, run ```npm run dev``` within the connector project folder. This script locally starts an Express server on port 3000, which can be used to invoke a command against the connector. You do not need to restart this process after making changes to connector code. Once the Express server is started, you can send ```POST``` requests to ```localhost:3000``` and test your command handlers. For example, you can run ```POST localhost:3000``` with the following payload to run the stdAccountRead handler method.
As you implement command handlers, you must test them. The connector SDK
provides some utility methods to locally run your connector. To start, run
`npm run dev` within the connector project folder. This script locally starts an
Express server on port 3000, which can be used to invoke a command against the
connector. You do not need to restart this process after making changes to
connector code. Once the Express server is started, you can send `POST` requests
to `localhost:3000` and test your command handlers. For example, you can run
`POST localhost:3000` with the following payload to run the stdAccountRead
handler method.
```json
{
@@ -30,22 +42,35 @@ As you implement command handlers, you must test them. The connector SDK provide
}
}
```
- **type:** The command handlers name. It also refers to the operation being performed.
- **type:** The command handlers name. It also refers to the operation being
performed.
- **input:** Input to provide to the command handler.
- **config:** The configuration values required to test locally. A ```token``` value is not required, but the default project specifies ```token```, so you must include it in your request to begin.
- **config:** The configuration values required to test locally. A `token` value
is not required, but the default project specifies `token`, so you must
include it in your request to begin.
## Create and Upload Connector Bundle
Follow these steps to use the CLI to package a connector bundle, create it in your IdentityNow org, and upload it to IdentityNow.
Follow these steps to use the CLI to package a connector bundle, create it in
your IdentityNow org, and upload it to IdentityNow.
### Package Connector Files
You must compress the files in the connector project into a zip file before uploading them to IdentityNow.
Use the CLI to run ```npm run pack-zip``` to build and package the connector bundle. Put the resulting zip file in the ```dist``` folder.
You must compress the files in the connector project into a zip file before
uploading them to IdentityNow.
Use the CLI to run `npm run pack-zip` to build and package the connector bundle.
Put the resulting zip file in the `dist` folder.
### Create Connector In Your Org
Before uploading the zip file, you must create an entry for the connector in your IdentityNow org. Run ```sp conn create "my-project"``` to create a connector entry.
The response to this command contains a connector ID you can use to manage this connector.
Before uploading the zip file, you must create an entry for the connector in
your IdentityNow org. Run `sp conn create "my-project"` to create a connector
entry.
The response to this command contains a connector ID you can use to manage this
connector.
```bash
$ sp conn create "example-connector"
@@ -73,10 +98,13 @@ $ sp conn list
```
### Upload Connector Zip File to IdentityNow
Run ```sp conn upload -c [connectorID | connectorAlias] -f dist/[connector filename].zip``` to upload the zip file built from the previous step to IdentityNow.
Run
`sp conn upload -c [connectorID | connectorAlias] -f dist/[connector filename].zip`
to upload the zip file built from the previous step to IdentityNow.
```bash
$ sp conn upload -c example-connector -f dist/example-connector-0.1.0.zip
$ sp conn upload -c example-connector -f dist/example-connector-0.1.0.zip
+--------------------------------------+---------+
| CONNECTOR ID | VERSION |
+--------------------------------------+---------+
@@ -84,8 +112,10 @@ $ sp conn upload -c example-connector -f dist/example-connector-0.1.0.zip
+--------------------------------------+---------+
```
The first version upload of connector zip file also creates the ```latest``` tag, pointing to the latest version of the connector file. After uploading the connector bundle zip file, you can run ```sp conn tags list -c example-connector``` to see the connector tags.
The first version upload of connector zip file also creates the `latest` tag,
pointing to the latest version of the connector file. After uploading the
connector bundle zip file, you can run `sp conn tags list -c example-connector`
to see the connector tags.
```bash
$ sp conn tags list -c example-connector
@@ -97,10 +127,17 @@ $ sp conn tags list -c example-connector
```
## Test Your Connector in IdentityNow
Follow these steps to test a connector bundle in both IdentityNow and the IdentityNow user interface (UI).
Follow these steps to test a connector bundle in both IdentityNow and the
IdentityNow user interface (UI).
### Test Your Connector Bundle In IdentityNow
The connector CLI provides ways to test invoking commands with any connector upload version. Before running a command, create a file, **config.json**, in the root project folder. Include any configuration items required to interact with the target web service in this file, such as API token, username, password, organization, version, etc. The following snippet is an example:
The connector CLI provides ways to test invoking commands with any connector
upload version. Before running a command, create a file, **config.json**, in the
root project folder. Include any configuration items required to interact with
the target web service in this file, such as API token, username, password,
organization, version, etc. The following snippet is an example:
```json
{
@@ -108,15 +145,17 @@ The connector CLI provides ways to test invoking commands with any connector upl
}
```
This file is required and requires at least one key value even if your connector does not require anything.
This file is required and requires at least one key value even if your connector
does not require anything.
Next, invoke the command using the connector ID and config.json. For example, this command invokes std:account:list command on the connector:
Next, invoke the command using the connector ID and config.json. For example,
this command invokes std:account:list command on the connector:
```
sp connectors invoke account-list -c example-connector -p config.json
```
You will receive a list of JSON objects for each account the connector contains.
You will receive a list of JSON objects for each account the connector contains.
```bash
$ sp connectors invoke account-list -c example-connector -p config.json
@@ -124,11 +163,16 @@ $ sp connectors invoke account-list -c example-connector -p config.json
{"key":{"simple":{"id":"john.doe"}},"disabled":false,"locked":false,"attributes":{"id":"john.doe","displayName":"John Doe","entitlements":["administrator","sailpoint"]}}
```
>⚠️ Sensitive information!
> ⚠️ Sensitive information!
>
> Ensure that you add config.json to your .gitignore file so you do not accidentally store secrets in your code repository.
> Ensure that you add config.json to your .gitignore file so you do not
> accidentally store secrets in your code repository.
## Test Your Connector from IdentityNow UI
Go to your IdentityNow orgs source section. Create a source from the connector you just uploaded. This connector will display in the dropdown list: **example-connector (tag: latest)**
After creating a source, you can to test connection, aggregate account, etc. from the IdentityNow UI.
Go to your IdentityNow orgs source section. Create a source from the connector
you just uploaded. This connector will display in the dropdown list:
**example-connector (tag: latest)**
After creating a source, you can to test connection, aggregate account, etc.
from the IdentityNow UI.

View File

@@ -12,11 +12,12 @@ tags: ["Transforms", "Guides"]
# Transform Guides
Not sure how to use transforms yet? Read these guides to see how you can use transforms and learn how to get started!
Not sure how to use transforms yet? Read these guides to see how you can use
transforms and learn how to get started!
```mdx-code-block
import DocCardList from '@theme/DocCardList';
import {useCurrentSidebarCategory} from '@docusaurus/theme-common';
<DocCardList items={useCurrentSidebarCategory().items}/>
```
```

View File

@@ -13,22 +13,28 @@ tags: ["Transforms", "Guides", "Password"]
## Overview
In this guide, you will learn how to create a nested transform in order to generate a temporary password from a user's attributes.
In this guide, you will learn how to create a nested transform in order to
generate a temporary password from a user's attributes.
- The authoritative source's data feed includes both a first_name and a last_name field for every worker.
- The authoritative source's data feed includes both a first_name and a
last_name field for every worker.
- A hire date is provided within the authoritative source data feed: the hire_date field is provided for every worker and is in the format of YYYY-MM-DD.
- A hire date is provided within the authoritative source data feed: the
hire_date field is provided for every worker and is in the format of
YYYY-MM-DD.
For an initial (temporary) password, set a static value driven off a formula that can be communicated to the new hire by email. This is the formula:
For an initial (temporary) password, set a static value driven off a formula
that can be communicated to the new hire by email. This is the formula:
- The first character is the user's first initial in lowercase.
- The user's last name comes next with the first character in uppercase.
- The user's last name comes next with the first character in uppercase.
- The user's two-digit start month comes next (from the user's hire date).
- The last part of the password is a static string: "RstP\*!7".
## Create the Example Source from a Deliminated file
This is the CSV file you will upload to create your source for testing this transform:
This is the CSV file you will upload to create your source for testing this
transform:
| id | email | first_name | last_name | hire_date |
| ------ | ---------------------------- | ---------- | --------- | ---------- |
@@ -36,29 +42,37 @@ This is the CSV file you will upload to create your source for testing this tran
| 100011 | frank.williams@sailpoint.com | Frank | Williams | 2020-07-10 |
| 100012 | paddy.lowe@sailpoint.com | Paddy | Lowe | 2020-09-20 |
To upload your CSV source, go to **Admin** > **Connections** > **Sources** and select **Create New**.
To upload your CSV source, go to **Admin** > **Connections** > **Sources** and
select **Create New**.
Fill in the form to create a source:
![Create Source](./img/create_source.png)
The source configuration workflow will appear. Keep all the default settings and under **Review and Finish** on the left hand side, select **Exit Configuration**.
The source configuration workflow will appear. Keep all the default settings and
under **Review and Finish** on the left hand side, select **Exit
Configuration**.
## Upload Schema and Accounts
In your newly created source, go to **Import Data** > **Account Schema**. Under **Options**, select **Upload Schema**. Locate the CSV file from earlier in this document.
In your newly created source, go to **Import Data** > **Account Schema**. Under
**Options**, select **Upload Schema**. Locate the CSV file from earlier in this
document.
Once your account schema is uploaded, you will see your available attributes to use within the transform.
Once your account schema is uploaded, you will see your available attributes to
use within the transform.
![Create Source](./img/account_schema.png)
Now you can upload your accounts. Go to **Import Data** > **Import Accounts** > **Import Data**. Locate the CSV file from earlier in this document.
Now you can upload your accounts. Go to **Import Data** > **Import Accounts** >
**Import Data**. Locate the CSV file from earlier in this document.
![Account Summary](./img/account_summary.png)
## Create an Identity Profile for the Source
Create an identity profile for your source. Go to **Admin** > **Identities** > **Identity Profiles** and select **New**.
Create an identity profile for your source. Go to **Admin** > **Identities** >
**Identity Profiles** and select **New**.
![Identity Profile](./img/account_summary.png)
@@ -66,11 +80,21 @@ Fill out the form and select the source you created earlier.
## Create the Transform
To create the transform for generating the user's temporary password, you will use multiple different operations. You are going to break it out into pieces and then put it all together at the end. The [static transform](../operations/static.md) will be your main transform. You will use nested transforms to create each part of the password and then use those variables created in the final value.
To create the transform for generating the user's temporary password, you will
use multiple different operations. You are going to break it out into pieces and
then put it all together at the end. The
[static transform](../operations/static.md) will be your main transform. You
will use nested transforms to create each part of the password and then use
those variables created in the final value.
### The First Character is the User's First Initial in Lowercase
The first part of the password is the user's first intitial in lowercase. You can create that attribute by using the [substring operation](../operations/substring.md) to get the first initial and then passing that attribute as input into the [lower operation](../operations/lower.md). In this example, the variable is `firstInitialLowercase`, and you will use it later in your static string.
The first part of the password is the user's first intitial in lowercase. You
can create that attribute by using the
[substring operation](../operations/substring.md) to get the first initial and
then passing that attribute as input into the
[lower operation](../operations/lower.md). In this example, the variable is
`firstInitialLowercase`, and you will use it later in your static string.
**First Initial Variable**
@@ -129,7 +153,12 @@ The first part of the password is the user's first intitial in lowercase. You ca
### The User's Last Name Comes Next with the First Character in Uppercase
Adding to the transform, you can create a variable for the first character of the last name. You can do so by using the [substring operation](/idn/docs/transforms/operations/substring) and the [upper operation](/idn/docs/transforms/operations/upper). Once you have the variable `lastInitialUppercase` created, you can add that variable to the end of the static string in the value key.
Adding to the transform, you can create a variable for the first character of
the last name. You can do so by using the
[substring operation](/idn/docs/transforms/operations/substring) and the
[upper operation](/idn/docs/transforms/operations/upper). Once you have the
variable `lastInitialUppercase` created, you can add that variable to the end of
the static string in the value key.
**Last Initial Variable**
@@ -155,7 +184,7 @@ Adding to the transform, you can create a variable for the first character of th
}
```
**Transform Body**
**Transform Body**
```json
{
@@ -205,7 +234,10 @@ Adding to the transform, you can create a variable for the first character of th
}
```
You also need the end of the last name without the first character you already have capitalized from the last step. You can get that by using the substring method and providing only the begin key, which will return everything after the index you specify.
You also need the end of the last name without the first character you already
have capitalized from the last step. You can get that by using the substring
method and providing only the begin key, which will return everything after the
index you specify.
**Last Name Variable**
@@ -290,7 +322,10 @@ You also need the end of the last name without the first character you already h
### The User's Two-Digit Start Month Comes Next, Taken from the Hire_Date
To get the two-digit start month, use the [split operation](/idn/docs/transforms/operations/split). The `hire_date` is in the format of `YYYY-MM-DD`. To to get the month, split on `-` and set the index to return as 1.
To get the two-digit start month, use the
[split operation](/idn/docs/transforms/operations/split). The `hire_date` is in
the format of `YYYY-MM-DD`. To to get the month, split on `-` and set the index
to return as 1.
**Hire Date Month Variable**
@@ -390,7 +425,8 @@ To get the two-digit start month, use the [split operation](/idn/docs/transforms
### The Last Part of the Password is a Static String: "RstP\*!7"
To add the final part of the password, which is the static string `RstP\*!7`, use the static operation.
To add the final part of the password, which is the static string `RstP\*!7`,
use the static operation.
**Static String Variable**
@@ -492,7 +528,12 @@ To add the final part of the password, which is the static string `RstP\*!7`, us
To verify your transform is working, create the transfrom through the REST API.
To call the APIs for transforms, you need a personal access token and your tenant's name to provide with the request. For more information about how to get a personal access token, see [Personal Access Tokens](/idn/docs/getting-started/authentication#personal-access-tokens). For more information about how to get the name of your tenant, see [Finding Your Organization Tenant Name](/idn/docs/getting-started#finding-your-orgtenant-name).
To call the APIs for transforms, you need a personal access token and your
tenant's name to provide with the request. For more information about how to get
a personal access token, see
[Personal Access Tokens](/idn/docs/getting-started/authentication#personal-access-tokens).
For more information about how to get the name of your tenant, see
[Finding Your Organization Tenant Name](/idn/docs/getting-started#finding-your-orgtenant-name).
```bash
curl --location --request POST 'https://{tenant}.api.identitynow.com/v3/transforms' \
@@ -578,22 +619,31 @@ curl --location --request POST 'https://{tenant}.api.identitynow.com/v3/transfor
}'
```
Once you have created the transform successfully, you can apply the new transform and preview what the password will look like for each user.
Once you have created the transform successfully, you can apply the new
transform and preview what the password will look like for each user.
Log in to your IdentityNow tenant and go to **Admin** > **Identities** > **Identity Profiles**. Select the name of the profile you created earlier, Transform Example. Select the **Mappings** tab, scroll to the bottom and select **Add New Attribute**. Name the attribute `Temporary Password`. To save the new mappings, you must fill out the id, email, first name and last name mappings.
Log in to your IdentityNow tenant and go to **Admin** > **Identities** >
**Identity Profiles**. Select the name of the profile you created earlier,
Transform Example. Select the **Mappings** tab, scroll to the bottom and select
**Add New Attribute**. Name the attribute `Temporary Password`. To save the new
mappings, you must fill out the id, email, first name and last name mappings.
![Attribute Mapping](./img/temporary_password_attribute_mapping.png)
Once you have saved the mappings, select **Preview** in the upper right of the page and select the Lewis Hamilton identity under **Identity to Preview**. The temporaryPassword shows up as `lHamilton12RstP*!7`.
Once you have saved the mappings, select **Preview** in the upper right of the
page and select the Lewis Hamilton identity under **Identity to Preview**. The
temporaryPassword shows up as `lHamilton12RstP*!7`.
This is an example table of values with the temporary password for each user:
| id | email | first_name | last_name | hire_date | temporaryPassword |
| ------ | ---------------------------- | ---------- | --------- | ---------- | ------------------ |
| 100010 | lewis.hamilton@sailpoint.com | Lewis | hamilton | 2020-12-12 | lHamilton12RstP*!7 |
| 100011 | frank.williams@sailpoint.com | Frank | Williams | 2020-07-10 | fWilliams07RstP*!7 |
| 100012 | paddy.lowe@sailpoint.com | Paddy | Lowe | 2020-09-20 | pLowe09RstP*!7 |
| id | email | first_name | last_name | hire_date | temporaryPassword |
| ------ | ---------------------------- | ---------- | --------- | ---------- | ------------------- |
| 100010 | lewis.hamilton@sailpoint.com | Lewis | hamilton | 2020-12-12 | lHamilton12RstP\*!7 |
| 100011 | frank.williams@sailpoint.com | Frank | Williams | 2020-07-10 | fWilliams07RstP\*!7 |
| 100012 | paddy.lowe@sailpoint.com | Paddy | Lowe | 2020-09-20 | pLowe09RstP\*!7 |
## Next Steps
Looking for more examples or having trouble with one of your complex transforms? Reach out in the [Developer Community Forum](https://developer.sailpoint.com/discuss/).
Looking for more examples or having trouble with one of your complex transforms?
Reach out in the
[Developer Community Forum](https://developer.sailpoint.com/discuss/).

View File

@@ -13,32 +13,44 @@ tags: ["Transforms", "Guides", "First"]
## Overview
In this guide, you will learn how to use [IdentityNow's Transform REST APIs](/idn/api/v3/transforms) to do the following:
In this guide, you will learn how to use
[IdentityNow's Transform REST APIs](/idn/api/v3/transforms) to do the following:
* [List Transforms in Your IdentityNow Tenant](#list-transforms-in-your-identitynow-tenant)
* [Create a Transform](#create-a-transform)
* [Get Transform by ID](#get-transform-by-id)
* [Update a Transform](#update-a-transform)
* [Delete a Transform](#delete-a-transform)
- [List Transforms in Your IdentityNow Tenant](#list-transforms-in-your-identitynow-tenant)
- [Create a Transform](#create-a-transform)
- [Get Transform by ID](#get-transform-by-id)
- [Update a Transform](#update-a-transform)
- [Delete a Transform](#delete-a-transform)
## List Transforms in your IdentityNow Tenant
To call the APIs for transforms, you need a personal access token and your tenant's name to provide with the request. For more information about how to get a personal access token, see [Personal Access Tokens](../../../../api/authentication.md#personal-access-tokens). For more information about how to get the name of your tenant, see [Finding Your Organization Tenant Name](../../../../api/getting-started.md#finding-your-orgtenant-name).
To call the APIs for transforms, you need a personal access token and your
tenant's name to provide with the request. For more information about how to get
a personal access token, see
[Personal Access Tokens](../../../../api/authentication.md#personal-access-tokens).
For more information about how to get the name of your tenant, see
[Finding Your Organization Tenant Name](../../../../api/getting-started.md#finding-your-orgtenant-name).
Before you create your first custom transform, see what transforms are already in the tenant. You can get this information by calling the [List Transforms API](/idn/api/v3/get-transforms-list).
Before you create your first custom transform, see what transforms are already
in the tenant. You can get this information by calling the
[List Transforms API](/idn/api/v3/get-transforms-list).
```bash
curl --location --request GET 'https://{tenant}.api.identitynow.com/v3/transforms' --header 'Authorization: Bearer {token}'
```
The response body contains an array of transform objects containing the following values:
The response body contains an array of transform objects containing the
following values:
- **id** - The id of the transform
- **name** - The name of the transform
- **type** - The type of transform, see [Transform Operations](../operations/index.md)
- **type** - The type of transform, see
[Transform Operations](../operations/index.md)
- **attributes** - Object of attributes related to the transform
- **internal** - A `true` or `false` attribute to determine whether the transform is internal or custom
- **true** - The transform is internal and cannot be modified without contacting Sailpoint.
- **internal** - A `true` or `false` attribute to determine whether the
transform is internal or custom
- **true** - The transform is internal and cannot be modified without
contacting Sailpoint.
- **false** - The tranform is custom and can be modified with the API.
```json
@@ -79,7 +91,11 @@ The response body contains an array of transform objects containing the followin
## Create a Transform
This [lookup transform](../operations/lookup.md) takes the input value of an attribute, locates it in the table provided, and returns its corresponding value. If the transform does not find your input value in the lookup table, it returns the default value. Replace `{tenant}` and `{token}` with the values you got ealier.
This [lookup transform](../operations/lookup.md) takes the input value of an
attribute, locates it in the table provided, and returns its corresponding
value. If the transform does not find your input value in the lookup table, it
returns the default value. Replace `{tenant}` and `{token}` with the values you
got ealier.
```bash
curl --location --request POST 'https://{tenant}.api.identitynow.com/v3/transforms' \
@@ -103,30 +119,34 @@ curl --location --request POST 'https://{tenant}.api.identitynow.com/v3/transfor
```json
{
"id": "b23788a0-41a2-453b-89ae-0d670fa0cb6a",
"name": "Country Code To Timezone",
"type": "lookup",
"attributes": {
"table": {
"EN-US": "CST",
"ES-MX": "CST",
"EN-GB": "GMT",
"default": "GMT"
}
},
"internal": false
"id": "b23788a0-41a2-453b-89ae-0d670fa0cb6a",
"name": "Country Code To Timezone",
"type": "lookup",
"attributes": {
"table": {
"EN-US": "CST",
"ES-MX": "CST",
"EN-GB": "GMT",
"default": "GMT"
}
},
"internal": false
}
```
Once you have created the transform, you can find it in IdentityNow by going to **Admin** > **Identities** > **Identity Profiles** > (An Identity Profile) > **Mappings** (tab).
Once you have created the transform, you can find it in IdentityNow by going to
**Admin** > **Identities** > **Identity Profiles** > (An Identity Profile) >
**Mappings** (tab).
![Mappings Tab](./img/mappings_tab.png)
For more information about creating transforms, see [Create Transform](/idn/api/v3/create-transform).
For more information about creating transforms, see
[Create Transform](/idn/api/v3/create-transform).
## Get Transform by ID
To get the transform created with the API, call the `GET` endpoint, using the `id` returned by the create API response.
To get the transform created with the API, call the `GET` endpoint, using the
`id` returned by the create API response.
```bash
curl --location --request GET 'https://{tenant}.api.identitynow.com/v3/transforms/b23788a0-41a2-453b-89ae-0d670fa0cb6a' \
@@ -137,26 +157,28 @@ curl --location --request GET 'https://{tenant}.api.identitynow.com/v3/transform
```json
{
"id": "b23788a0-41a2-453b-89ae-0d670fa0cb6a",
"name": "Country Code To Timezone",
"type": "lookup",
"attributes": {
"table": {
"EN-US": "CST",
"ES-MX": "CST",
"EN-GB": "GMT",
"default": "GMT"
}
},
"internal": false
"id": "b23788a0-41a2-453b-89ae-0d670fa0cb6a",
"name": "Country Code To Timezone",
"type": "lookup",
"attributes": {
"table": {
"EN-US": "CST",
"ES-MX": "CST",
"EN-GB": "GMT",
"default": "GMT"
}
},
"internal": false
}
```
For more information about getting a transform by its `id` see the API [Transform by ID](/idn/api/v3/get-transform).
For more information about getting a transform by its `id` see the API
[Transform by ID](/idn/api/v3/get-transform).
## Update a Transform
To update a transform, call the `PUT` endpoint with the updated transform body. This example adds another item to the lookup table, `EN-CA.`
To update a transform, call the `PUT` endpoint with the updated transform body.
This example adds another item to the lookup table, `EN-CA.`
:::caution
@@ -187,35 +209,42 @@ curl --location --request PUT 'https://{tenant}.api.identitynow.com/v3/transform
```json
{
"id": "b23788a0-41a2-453b-89ae-0d670fa0cb6a",
"name": "Country Code To Timezone",
"type": "lookup",
"attributes": {
"table": {
"EN-US": "CST",
"ES-MX": "CST",
"EN-GB": "GMT",
"EN-CA": "MST",
"default": "GMT"
}
},
"internal": false
"id": "b23788a0-41a2-453b-89ae-0d670fa0cb6a",
"name": "Country Code To Timezone",
"type": "lookup",
"attributes": {
"table": {
"EN-US": "CST",
"ES-MX": "CST",
"EN-GB": "GMT",
"EN-CA": "MST",
"default": "GMT"
}
},
"internal": false
}
```
For more information about updating transforms, see [Update a transform](/idn/api/v3/update-transform).
For more information about updating transforms, see
[Update a transform](/idn/api/v3/update-transform).
## Delete a Transform
To delete the transform, call the DELETE endpoint with the `id` of the transform to delete. The server responds with a 204 when the transform is successfully removed.
To delete the transform, call the DELETE endpoint with the `id` of the transform
to delete. The server responds with a 204 when the transform is successfully
removed.
```bash
curl --location --request DELETE 'https://{tenant}.api.identitynow.com/v3/transforms/b23788a0-41a2-453b-89ae-0d670fa0cb6a' \
--header 'Authorization: Bearer {token}'
```
For more information about deleting transforms, see the API [Delete Transform](/idn/api/v3/delete-transform).
For more information about deleting transforms, see the API
[Delete Transform](/idn/api/v3/delete-transform).
## Next Steps
Congratulations on creating your first transform! Now that you understand the lifecycle of transforms, see [complex usecase](./temporary-password.md) to learn how to use a nested transform structure to create a temporary password that can be sent to each user.
Congratulations on creating your first transform! Now that you understand the
lifecycle of transforms, see [complex usecase](./temporary-password.md) to learn
how to use a nested transform structure to create a temporary password that can
be sent to each user.

View File

@@ -11,122 +11,182 @@ slug: /docs/transforms
tags: ["Transforms"]
---
# Building Transforms in IdentityNow
In SailPoint's cloud services, transforms allow you to manipulate attribute values while aggregating from or provisioning to a source. This guide provides a reference to help you understand the purpose, configuration, and usage of transforms.
In SailPoint's cloud services, transforms allow you to manipulate attribute
values while aggregating from or provisioning to a source. This guide provides a
reference to help you understand the purpose, configuration, and usage of
transforms.
## What Are Transforms
Transforms are configurable objects that define easy ways to manipulate attribute data without requiring you to write code. Transforms are configurable building blocks with sets of inputs and outputs:
Transforms are configurable objects that define easy ways to manipulate
attribute data without requiring you to write code. Transforms are configurable
building blocks with sets of inputs and outputs:
![What are Transforms 1](./img/what_are_transforms_1.png)
Because there is no code to write, an administrator can configure these by using a JSON object structure and uploading them into IdentityNow using [IdentityNow's Transform REST APIs](/idn/api/v3/transforms).
Because there is no code to write, an administrator can configure these by using
a JSON object structure and uploading them into IdentityNow using
[IdentityNow's Transform REST APIs](/idn/api/v3/transforms).
:::info
Sometimes transforms are referred to as Seaspray, the codename for transforms. IdentityNow Transforms and Seaspray are essentially the same.
Sometimes transforms are referred to as Seaspray, the codename for transforms.
IdentityNow Transforms and Seaspray are essentially the same.
:::
## How Transforms Work
Transforms typically have an input(s) and output(s). The way the transformation occurs mainly depends on the type of transform. Refer to [Operations in IdentityNow Transforms](./operations/index.md) for more information.
Transforms typically have an input(s) and output(s). The way the transformation
occurs mainly depends on the type of transform. Refer to
[Operations in IdentityNow Transforms](./operations/index.md) for more
information.
For example, a [Lower transform](./operations/lower.md) transforms any input text strings into lowercase versions as output. So if the input were `Foo`, the lowercase output of the transform would be `foo`:
For example, a [Lower transform](./operations/lower.md) transforms any input
text strings into lowercase versions as output. So if the input were `Foo`, the
lowercase output of the transform would be `foo`:
![How Transforms Work 1](./img/how_transforms_work_1.png)
There are other types of transforms too. For example, an [E.164 Phone transform](./operations/e164-phone.md) transforms any input phone number strings into an E.164 formatted version as output. So if the input were `(512) 346-2000`, the output would be `+1 5123462000`:
There are other types of transforms too. For example, an
[E.164 Phone transform](./operations/e164-phone.md) transforms any input phone
number strings into an E.164 formatted version as output. So if the input were
`(512) 346-2000`, the output would be `+1 5123462000`:
![How Transforms Work 2](./img/how_transforms_work_2.png)
### Multiple Transform Inputs
In the previous examples, each transform had a single input. Some transforms can specify more than one input. For example, the [Concat transform](./operations/concatenation.md) concatenates one or more strings together. If `Foo` and `Bar` were inputs, the transformed output would be `FooBar`:
In the previous examples, each transform had a single input. Some transforms can
specify more than one input. For example, the
[Concat transform](./operations/concatenation.md) concatenates one or more
strings together. If `Foo` and `Bar` were inputs, the transformed output would
be `FooBar`:
![How Transforms Work 3](./img/how_transforms_work_3.png)
### Complex Nested Transforms
For more complex use cases, a single transform may not be enough. It is possible to link several transforms together. IdentityNow calls these 'nested' transforms because they are transform objects within other transform objects.
For more complex use cases, a single transform may not be enough. It is possible
to link several transforms together. IdentityNow calls these 'nested' transforms
because they are transform objects within other transform objects.
An example of a nested transform would be using the previous [Concat transform](./operations/concatenation.md) and passing its output as an input to another [Lower transform](./operations/lower.md). If the inputs `Foo` and `Bar` were passed into the transforms, the ultimate output would be `foobar`, concatenated and in lowercase.
An example of a nested transform would be using the previous
[Concat transform](./operations/concatenation.md) and passing its output as an
input to another [Lower transform](./operations/lower.md). If the inputs `Foo`
and `Bar` were passed into the transforms, the ultimate output would be
`foobar`, concatenated and in lowercase.
![How Transforms Work 4](./img/how_transforms_work_4.png)
There is no hard limit for the number of transforms that can be nested. However, the more transforms applied, the more complex the nested transform will be, which can make it difficult to understand and maintain.
There is no hard limit for the number of transforms that can be nested. However,
the more transforms applied, the more complex the nested transform will be,
which can make it difficult to understand and maintain.
## Configuring Transform Behavior
Some transforms can specify an attributes map that configures the transform behavior. Each transform type has different configuration attributes and different uses. To better understand what is configurable per transform, refer to the Transform Types section and the associated Transform guide(s) that cover each transform.
Some transforms can specify an attributes map that configures the transform
behavior. Each transform type has different configuration attributes and
different uses. To better understand what is configurable per transform, refer
to the Transform Types section and the associated Transform guide(s) that cover
each transform.
It is possible to extend the earlier complex nested transform example. If a Replace transform, which replaces certain strings with replacement text, were added, and the transform were configured to replace `Bar` with `Baz` the output would be added as an input to the Concat and Lower transforms:
It is possible to extend the earlier complex nested transform example. If a
Replace transform, which replaces certain strings with replacement text, were
added, and the transform were configured to replace `Bar` with `Baz` the output
would be added as an input to the Concat and Lower transforms:
![Configuring Transform Behavior 1](./img/configuring_transform_behavior_1.png)
The output of the Replace transform would be `Baz` which is then passed as an input to the Concat transform along with `Foo` producing an output of `FooBaz`. This is then passed as an input into the Lower transform, producing a final output of `foobaz`.
The output of the Replace transform would be `Baz` which is then passed as an
input to the Concat transform along with `Foo` producing an output of `FooBaz`.
This is then passed as an input into the Lower transform, producing a final
output of `foobaz`.
## Transform Syntax
Transforms are JSON objects. Prior to this, the transforms have been shown as flows of building blocks to help illustrate basic transform ideas. However at the simplest level, a transform looks like this:
Transforms are JSON objects. Prior to this, the transforms have been shown as
flows of building blocks to help illustrate basic transform ideas. However at
the simplest level, a transform looks like this:
```json
{
"name": "Lowercase Department",
"type": "lower",
"attributes": {
"transform-attribute-1": "attribute-1-value",
"transform-attribute-2": "attribute-2-value"
}
"name": "Lowercase Department",
"type": "lower",
"attributes": {
"transform-attribute-1": "attribute-1-value",
"transform-attribute-2": "attribute-2-value"
}
}
```
There are three main components of a transform object:
1. `name` - This specifies the name of the transform. It refers to a transform in the IdentityNow API or User Interface (UI). Only provide a name on the root-level transform. Nested transforms do not have names.
1. `name` - This specifies the name of the transform. It refers to a transform
in the IdentityNow API or User Interface (UI). Only provide a name on the
root-level transform. Nested transforms do not have names.
2. `type` - This specifies the transform type, which ultimately determines the transform's behavior.
2. `type` - This specifies the transform type, which ultimately determines the
transform's behavior.
3. `attributes` - This specifies any attributes or configurations for controlling how the transform works. As mentioned earlier in [Configuring Transform Behavior](#configuring-transform-behavior), each transform type has different sets of attributes available.
3. `attributes` - This specifies any attributes or configurations for
controlling how the transform works. As mentioned earlier in
[Configuring Transform Behavior](#configuring-transform-behavior), each
transform type has different sets of attributes available.
## Template Engine
Seaspray ships with the Apache Velocity template engine that allows a transform to reference, transform, and render values passed into the transform context. Every string value in a Seaspray transform can contain templated text and will run through the template engine.
Seaspray ships with the Apache Velocity template engine that allows a transform
to reference, transform, and render values passed into the transform context.
Every string value in a Seaspray transform can contain templated text and will
run through the template engine.
### Example
In the following string, the text `$firstName` is replaced by the value of firstName in the template context. The same goes for `$lastName`.
In the following string, the text `$firstName` is replaced by the value of
firstName in the template context. The same goes for `$lastName`.
If $firstName=John and $lastName=Doe then the string `$firstName.$lastName` would render as `John.Doe`.
If
$firstName=John and $lastName=Doe then the string `$firstName.$lastName`would render as`John.Doe`.
### Identity Attribute Context
The following variables are available to the Apache Velocity template engine when a transform is used to source an identity attribute.
The following variables are available to the Apache Velocity template engine
when a transform is used to source an identity attribute.
| Variable | Type | Description |
|---|---|---|
| identity | sailpoint.object.Identity | This is the identity the attribute promotion is performed on. |
| oldValue | Object | This is the definition of the attribute being promoted. |
| attributeDefinition | sailpoint.object.ObjectAttribute | This is the attribute's previous value. |
| Variable | Type | Description |
| ------------------- | -------------------------------- | ------------------------------------------------------------- |
| identity | sailpoint.object.Identity | This is the identity the attribute promotion is performed on. |
| oldValue | Object | This is the definition of the attribute being promoted. |
| attributeDefinition | sailpoint.object.ObjectAttribute | This is the attribute's previous value. |
### Account Profile Context
The following variables are available to the Apache Velocity template engine when a transform is used in an account profile.
The following variables are available to the Apache Velocity template engine
when a transform is used in an account profile.
| Variable | Type | Description |
|---|---|---|
| field | sailpoint.object.Field | This is the field definition backing the account profile attribute. |
| identity | sailpoint.object.Identity | This is the identity the account profile is generating for. |
| Variable | Type | Description |
| ----------- | ---------------------------- | ------------------------------------------------------------------------- |
| field | sailpoint.object.Field | This is the field definition backing the account profile attribute. |
| identity | sailpoint.object.Identity | This is the identity the account profile is generating for. |
| application | sailpoint.object.Application | This is the application backing the source that owns the account profile. |
| current | Object | This is the attribute's current value. |
| current | Object | This is the attribute's current value. |
## Implicit vs Explicit Input
A special configuration attribute available to all transforms is input. If the input attribute is not specified, this is referred to as implicit input, and the system determines the input based on what is configured. If the input attribute is specified, then this is referred to as explicit input, and the system's input is ignored in favor of whatever the transform explicitly specifies. A good way to understand this concept is to walk through an example. Imagine that IdentityNow has the following:
A special configuration attribute available to all transforms is input. If the
input attribute is not specified, this is referred to as implicit input, and the
system determines the input based on what is configured. If the input attribute
is specified, then this is referred to as explicit input, and the system's input
is ignored in favor of whatever the transform explicitly specifies. A good way
to understand this concept is to walk through an example. Imagine that
IdentityNow has the following:
- An account on Source 1 with department set to `Services`.
- An account on Source 2 with department set to `Engineering`.
The following two examples explain how a transform with an implicit or explicit input would work with those sources.
The following two examples explain how a transform with an implicit or explicit
input would work with those sources.
### Implicit Input
@@ -134,7 +194,8 @@ An identity profile is configured the following way:
![Configuring Transform Behavior 2](./img/configuring_transform_behavior_2.png)
As an example, the "Lowercase Department" transform being used is written the following way:
As an example, the "Lowercase Department" transform being used is written the
following way:
```json
{
@@ -144,9 +205,13 @@ As an example, the "Lowercase Department" transform being used is written the fo
}
```
Notice that the attributes has no input. This is an implicit input example. The transform uses the input provided by the attribute you mapped on the identity profile.
Notice that the attributes has no input. This is an implicit input example. The
transform uses the input provided by the attribute you mapped on the identity
profile.
In this example, the transform would produce `services` when the source is aggregated because Source 1 is providing a department of `Services` which the transform then lowercases.
In this example, the transform would produce `services` when the source is
aggregated because Source 1 is providing a department of `Services` which the
transform then lowercases.
### Explicit Input
@@ -168,25 +233,39 @@ As an example, the `Lowercase Department` has been changed the following way:
}
```
Notice that there is an `input` in the attributes. This is an explicit input example. The transform uses the value Source 2 provides for the `department` attribute, ignoring your configuration in the identity profile.
Notice that there is an `input` in the attributes. This is an explicit input
example. The transform uses the value Source 2 provides for the `department`
attribute, ignoring your configuration in the identity profile.
In this example, the transform would produce "engineering" because Source 2 is providing a department of `Engineering` which the transform then lowercases. Though the system is still providing an implicit input of Source 1's department attribute, the transform ignores this and uses the explicit input specified as Source 2's department attribute.
In this example, the transform would produce "engineering" because Source 2 is
providing a department of `Engineering` which the transform then lowercases.
Though the system is still providing an implicit input of Source 1's department
attribute, the transform ignores this and uses the explicit input specified as
Source 2's department attribute.
:::tip
This is also an example of a nested transform.
:::
### Account Transforms
Account attribute transforms are configured on the account create profiles. They determine the templates for new accounts created during provisioning events.
Account attribute transforms are configured on the account create profiles. They
determine the templates for new accounts created during provisioning events.
#### Configuration
These can be configured in IdentityNow by going to **Admin** > **Sources** > (A Source) > **Accounts** (tab) > **Create Profile**. These can also be configured with IdentityNow REST APIs.
These can be configured in IdentityNow by going to **Admin** > **Sources** > (A
Source) > **Accounts** (tab) > **Create Profile**. These can also be configured
with IdentityNow REST APIs.
You can select the installed, available transforms from this interface. Alternately, you can add more complex transforms with REST APIs.
You can select the installed, available transforms from this interface.
Alternately, you can add more complex transforms with REST APIs.
In the following example, we can call the [Create Provisioning Policy API](/idn/api/v3/create-provisioning-policy) to create a full name field using the first and last name identity attributes.
In the following example, we can call the
[Create Provisioning Policy API](/idn/api/v3/create-provisioning-policy) to
create a full name field using the first and last name identity attributes.
```bash
curl --location --request POST 'https://{tenant}.api.identitynow.com/v3/sources/{source_id}/provisioning-policies' \
@@ -255,58 +334,107 @@ curl --location --request POST 'https://{tenant}.api.identitynow.com/v3/sources/
}'
```
For more information on the IdentityNow REST API endpoints used to managed transform objects in APIs, refer to [IdentityNow Transform REST APIs](/idn/api/v3/transforms).
For more information on the IdentityNow REST API endpoints used to managed
transform objects in APIs, refer to
[IdentityNow Transform REST APIs](/idn/api/v3/transforms).
:::tip
For details about authentication against REST APIs, refer to the [authentication docs](../../../api/authentication.md).
For details about authentication against REST APIs, refer to the
[authentication docs](../../../api/authentication.md).
:::
#### Testing Transforms on Account Create
To test a transform for an account create profile, you must generate a new account creation provisioning event. This involves granting access to an identity who does not already have an account on this source; an account is created as a byproduct of the access assignment. This can be initiated with access request or even role assignment.
To test a transform for an account create profile, you must generate a new
account creation provisioning event. This involves granting access to an
identity who does not already have an account on this source; an account is
created as a byproduct of the access assignment. This can be initiated with
access request or even role assignment.
#### Applying Transforms on Account Create
Once the transforms are saved to the account profile, they are automatically applied for any subsequent provisioning events.
Once the transforms are saved to the account profile, they are automatically
applied for any subsequent provisioning events.
## Testing Transforms
**Testing Transforms in Identity Profile Mappings**
To test a transform for identity data, go to **Identities** > **Identity Profiles** and select **Mappings**. Select the transform to map one of your identity attributes, select **Save**, and preview your identity data.
To test a transform for identity data, go to **Identities** > **Identity
Profiles** and select **Mappings**. Select the transform to map one of your
identity attributes, select **Save**, and preview your identity data.
**Testing Transforms for Account Attributes**
To test a transform for account data, you must provision a new account on that source. For example, you can create an access request that would result in a new account on that source, or you can assign a new role.
To test a transform for account data, you must provision a new account on that
source. For example, you can create an access request that would result in a new
account on that source, or you can assign a new role.
## Transform Best Practices
- **Designing Complex Transforms** - Start with small transform *building blocks* and add to them. It can be helpful to diagram out the inputs and outputs if you are using many transforms.
- **Designing Complex Transforms** - Start with small transform _building
blocks_ and add to them. It can be helpful to diagram out the inputs and
outputs if you are using many transforms.
- **JSON Editor** - Because transforms are JSON objects, it is recommended that you use a good JSON editor. Atom, Sublime Text, and Microsoft Code work well because they have JSON formatting and plugins that can do JSON validation, completion, formatting, and folding. This is very useful for large complex JSON objects.
- **JSON Editor** - Because transforms are JSON objects, it is recommended that
you use a good JSON editor. Atom, Sublime Text, and Microsoft Code work well
because they have JSON formatting and plugins that can do JSON validation,
completion, formatting, and folding. This is very useful for large complex
JSON objects.
- **Leverage Examples** - Many implementations use similar sets of transforms, and a lot of common solutions can be found in examples. Feel free to share your own transform examples on the [Developer Community forum](https://developer.sailpoint.com/discuss)!
- **Leverage Examples** - Many implementations use similar sets of transforms,
and a lot of common solutions can be found in examples. Feel free to share
your own transform examples on the
[Developer Community forum](https://developer.sailpoint.com/discuss)!
- **Same Problem, Multiple Solutions** - There can be multiple ways to solve the same problem, but use the solution that makes the most sense to your implementation and is easiest to administer and understand.
- **Same Problem, Multiple Solutions** - There can be multiple ways to solve the
same problem, but use the solution that makes the most sense to your
implementation and is easiest to administer and understand.
- **Encapsulate Repetition** - If you are copying and pasting the same transforms over and over, it can be useful to make a transform a standalone transform and make other transforms reference it by using the reference type.
- **Encapsulate Repetition** - If you are copying and pasting the same
transforms over and over, it can be useful to make a transform a standalone
transform and make other transforms reference it by using the reference type.
- **Plan for Bad Data** - Data will not always be perfect, so plan for data failures and try to ensure transforms still produce workable results in case data is missing, malformed, or there are incorrect values.
- **Plan for Bad Data** - Data will not always be perfect, so plan for data
failures and try to ensure transforms still produce workable results in case
data is missing, malformed, or there are incorrect values.
## Transforms vs. Rules
Sometimes it can be difficult to decide when to implement a transform and when to implement a rule. Both transforms and rules can calculate values for identity or account attributes.
Sometimes it can be difficult to decide when to implement a transform and when
to implement a rule. Both transforms and rules can calculate values for identity
or account attributes.
Despite their functional similarity, transforms and rules have very different implementations. Transforms are JSON-based configurations, editable with IdentityNow's transform REST APIs. Rules are implemented with code (typically BeanShell, a Java-like syntax), so they must follow the [IdentityNow Rule Guidelines](https://community.sailpoint.com/docs/DOC-12122), and they require SailPoint to be reviewed and installed into the tenant. Rules, however, can do things that transforms cannot in some cases.
Despite their functional similarity, transforms and rules have very different
implementations. Transforms are JSON-based configurations, editable with
IdentityNow's transform REST APIs. Rules are implemented with code (typically
BeanShell, a Java-like syntax), so they must follow the
[IdentityNow Rule Guidelines](https://community.sailpoint.com/docs/DOC-12122),
and they require SailPoint to be reviewed and installed into the tenant. Rules,
however, can do things that transforms cannot in some cases.
Because transforms have easier and more accessible implementations, they are generally recommended. With transforms, any IdentityNow administrator can view, create, edit, and delete transforms directly with REST API without SailPoint involvement.
Because transforms have easier and more accessible implementations, they are
generally recommended. With transforms, any IdentityNow administrator can view,
create, edit, and delete transforms directly with REST API without SailPoint
involvement.
If something cannot be done with a transform, then consider using a rule. When you are transitioning from a transform to a rule, you must take special consideration when you decide where the rule executes.
If something cannot be done with a transform, then consider using a rule. When
you are transitioning from a transform to a rule, you must take special
consideration when you decide where the rule executes.
- If you are calculating identity attributes, you can use [Identity Attribute rules](https://community.sailpoint.com/docs/DOC-12616) instead of identity transforms.
- If you are calculating identity attributes, you can use
[Identity Attribute rules](https://community.sailpoint.com/docs/DOC-12616)
instead of identity transforms.
- If you are calculating account attributes (during provisioning), you can use [Attribute Generator rules](https://community.sailpoint.com/docs/DOC-12645) instead of account transforms.
- If you are calculating account attributes (during provisioning), you can use
[Attribute Generator rules](https://community.sailpoint.com/docs/DOC-12645)
instead of account transforms.
- All rules you build must follow the [IdentityNow Rule Guidelines](https://community.sailpoint.com/docs/DOC-12122).
- All rules you build must follow the
[IdentityNow Rule Guidelines](https://community.sailpoint.com/docs/DOC-12122).
If you use a rule, make note of it for administrative purposes. The best practice is to check in these types of artifacts into some sort of version control (e.g., GitHub, et. Al.) for records.
If you use a rule, make note of it for administrative purposes. The best
practice is to check in these types of artifacts into some sort of version
control (e.g., GitHub, et. Al.) for records.

View File

@@ -12,19 +12,30 @@ tags: ["Transforms", "Operations", "Account", "Attribute"]
## Overview
Use the account attribute transform to look up an account for a particular source on an identity and return a specific attribute value from that account.
Use the account attribute transform to look up an account for a particular
source on an identity and return a specific attribute value from that account.
:::note Other Considerations
- If there are multiple accounts, then IdentityNow by default takes the value from the oldest account (based on the account created date). You can configure this behavior by specifying `accountSortAttribute` and `accountSortDescending` attributes.
- If there are multiple accounts and the oldest account has a null attribute value, by default IdentityNow moves to the next account that can have a value (if there are any). You can override this behavior with the `accountReturnFirstLink` property.
- You can filter the multiple accounts returned based on the data they contain so that you can target specific accounts. This is often used to target accounts that are "active" instead of those that are not.
- If there are multiple accounts, then IdentityNow by default takes the value
from the oldest account (based on the account created date). You can configure
this behavior by specifying `accountSortAttribute` and `accountSortDescending`
attributes.
- If there are multiple accounts and the oldest account has a null attribute
value, by default IdentityNow moves to the next account that can have a value
(if there are any). You can override this behavior with the
`accountReturnFirstLink` property.
- You can filter the multiple accounts returned based on the data they contain
so that you can target specific accounts. This is often used to target
accounts that are "active" instead of those that are not.
:::
## Transform Structure
The account attribute transform's configuration can take several attributes as inputs. The following example shows a fully configured transform with all required and optional attributes.
The account attribute transform's configuration can take several attributes as
inputs. The following example shows a fully configured transform with all
required and optional attributes.
```json
{
@@ -46,54 +57,95 @@ The account attribute transform's configuration can take several attributes as i
- **Required Attributes**
- **type** - This must always be set to `accountAttribute`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **sourceName** - This is a reference to the source to search for accounts.
- This is a reference by a source's display name attribute (e.g., Active Directory). If the display name is updated, this reference must also be updated.
- As an alternative, you can provide an `applicationId` or `applicationName` instead.
- `applicationId` - This is a reference by a source's external GUID/ID attribute (e.g., "ff8081815a8b3925015a8b6adac901ff").
- `applicationName` - This is a reference by a source's immutable name attribute (e.g., "Active Directory \[source\]").
- **attributeName** - The name of the attribute on the account to return. This matches the name of the account attribute name visible in the user interface or on the source schema.
- This is a reference by a source's display name attribute (e.g., Active
Directory). If the display name is updated, this reference must also be
updated.
- As an alternative, you can provide an `applicationId` or `applicationName`
instead.
- `applicationId` - This is a reference by a source's external GUID/ID
attribute (e.g., "ff8081815a8b3925015a8b6adac901ff").
- `applicationName` - This is a reference by a source's immutable name
attribute (e.g., "Active Directory \[source\]").
- **attributeName** - The name of the attribute on the account to return. This
matches the name of the account attribute name visible in the user interface
or on the source schema.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This is a `true` or `false` value indicating whether the transform logic must be reevaluated every evening as part of the identity refresh process.
- **accountSortAttribute** - This configuration's value is a string name of the attribute to use when determining the ordering of returned accounts when there are multiple entries.
- **requiresPeriodicRefresh** - This is a `true` or `false` value indicating
whether the transform logic must be reevaluated every evening as part of the
identity refresh process.
- **accountSortAttribute** - This configuration's value is a string name of
the attribute to use when determining the ordering of returned accounts when
there are multiple entries.
- Accounts can be sorted by any schema attribute.
- If no sort attribute is defined, the transform will default to "created" (ascending sort on created date - oldest object wins).
- **accountSortDescending** - This configuration's value is a boolean (true/false). It controls the sort order when there are multiple accounts.
- If no sort attribute is defined, the transform will default to "created"
(ascending sort on created date - oldest object wins).
- **accountSortDescending** - This configuration's value is a boolean
(true/false). It controls the sort order when there are multiple accounts.
- If not defined, the transform will default to false (ascending order)
- **accountReturnFirstLink** - This configuration's value is a boolean (true/false). It controls which account to source a value from for an attribute. If this flag is set to true, the transform returns the value from the first account in the list, even if it is null. If this flag is set to false, the transform returns the first non-null value.
- If the configuration's value is not defined, the transform will default to the false setting.
- **accountFilter** - This expression queries the database to narrow search results. This configuration's value is a `sailpoint.object.Filter` expression for searching against the database. The default filter always includes the source and identity, and any subsequent expressions are combined in an AND operation with the existing search criteria.
- **accountReturnFirstLink** - This configuration's value is a boolean
(true/false). It controls which account to source a value from for an
attribute. If this flag is set to true, the transform returns the value from
the first account in the list, even if it is null. If this flag is set to
false, the transform returns the first non-null value.
- If the configuration's value is not defined, the transform will default to
the false setting.
- **accountFilter** - This expression queries the database to narrow search
results. This configuration's value is a `sailpoint.object.Filter`
expression for searching against the database. The default filter always
includes the source and identity, and any subsequent expressions are
combined in an AND operation with the existing search criteria.
- Only certain searchable attributes are available:
- `nativeIdentity` - This is the account ID.
- `displayName` - This is the account name.
- `entitlements` - This boolean value determine whether the account has entitlements.
- **accountPropertyFilter** - Use this expression to search and filter accounts in memory. This configuration's value is a `sailpoint.object.Filter` expression for searching against the returned resultset.
- All account attributes are available for filtering because this operation is performed in memory.
- `entitlements` - This boolean value determine whether the account has
entitlements.
- **accountPropertyFilter** - Use this expression to search and filter
accounts in memory. This configuration's value is a
`sailpoint.object.Filter` expression for searching against the returned
resultset.
- All account attributes are available for filtering because this operation
is performed in memory.
- Examples:
- `(status != "terminated")`
- `(department == "Engineering")`
- `(groups.containsAll({"Admin"}) || location == "Austin")`
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
HR systems can have multiple HR records for a person, especially in rehire and conversion scenarios. In order to get the correct identity data, you must get data from only the latest active accounts.
HR systems can have multiple HR records for a person, especially in rehire and
conversion scenarios. In order to get the correct identity data, you must get
data from only the latest active accounts.
- `sourceName` is "Corporate HR" because that is the name of the authoritative source.
- `sourceName` is "Corporate HR" because that is the name of the authoritative
source.
- `attributeName` is "HIREDATE" because that is the attribute you want from the authoritative source.
- `attributeName` is "HIREDATE" because that is the attribute you want from the
authoritative source.
- `accountSortAttribute` is "created" because you want to sort on created dates in case there are multiple accounts.
- `accountSortAttribute` is "created" because you want to sort on created dates
in case there are multiple accounts.
- `accountSortDescending` is true because you want to sort based on the newest or latest account from the HR system.
- `accountSortDescending` is true because you want to sort based on the newest
or latest account from the HR system.
- `accountReturnFirstLink` is true because you want to return the value of HIREDATE, event if it is null.
- `accountReturnFirstLink` is true because you want to return the value of
HIREDATE, event if it is null.
- `accountPropertyFilter` is filtering the accounts to look at only active accounts. Terminated accounts will not appear (assuming there are no data issues).
- `accountPropertyFilter` is filtering the accounts to look at only active
accounts. Terminated accounts will not appear (assuming there are no data
issues).
:::info
You cannot use `accountFilter` here because WORKER_STATUS\_\_c is not a searchable attribute, but `accountPropertyFilter` works instead.
You cannot use `accountFilter` here because WORKER_STATUS\_\_c is not a
searchable attribute, but `accountPropertyFilter` works instead.
:::
@@ -118,11 +170,15 @@ You cannot use `accountFilter` here because WORKER_STATUS\_\_c is not a searchab
<p>&nbsp;</p>
When you are mapping values like a username, focus on primary accounts from a particular source or accounts that are not service accounts.
When you are mapping values like a username, focus on primary accounts from a
particular source or accounts that are not service accounts.
- `sourceName` is "Active Directory" because that is the source this data is coming from.
- `attributeName` is "sAMAccountName" because you are mapping the username of the user.
- `accountFilter` is an expression filtering the accounts to make sure they are not service accounts.
- `sourceName` is "Active Directory" because that is the source this data is
coming from.
- `attributeName` is "sAMAccountName" because you are mapping the username of
the user.
- `accountFilter` is an expression filtering the accounts to make sure they are
not service accounts.
:::info

File diff suppressed because one or more lines are too long

View File

@@ -12,13 +12,19 @@ tags: ["Transforms", "Operations", "Base64", "Encode"]
## Overview
Base64 is mostly used to encode binary data like images so that the data can be represented as a string within HTML, email or other text documents. Base64 is also commonly used to encode data that can be unsupported or damaged during transfer, storage, or output.
Base64 is mostly used to encode binary data like images so that the data can be
represented as a string within HTML, email or other text documents. Base64 is
also commonly used to encode data that can be unsupported or damaged during
transfer, storage, or output.
The base64 encode transform allows you to take incoming data and encode it using a Base64-based text encoding scheme. The output of the transform is a string comprising 64 basic ASCII characters.
The base64 encode transform allows you to take incoming data and encode it using
a Base64-based text encoding scheme. The output of the transform is a string
comprising 64 basic ASCII characters.
:::note Other Considerations
- If the input to the Base64 encode transform is null, the transform returns a null value.
- If the input to the Base64 encode transform is null, the transform returns a
null value.
:::
@@ -37,13 +43,17 @@ The Base64 encode transform only requires the `type` and `name` attributes:
- **Required Attributes**
- **type** - This must be set to `base64Encode`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
## Examples
This example takes the incoming attribute configured in the identity profile attribute UI and returns it as a Base64 encoded string.
This example takes the incoming attribute configured in the identity profile
attribute UI and returns it as a Base64 encoded string.
Input:
@@ -70,7 +80,8 @@ MTIzNA==
<p>&nbsp;</p>
This example takes a binary image as in input and returns it as a Base64 encoded string.
This example takes a binary image as in input and returns it as a Base64 encoded
string.
Input:

View File

@@ -12,11 +12,15 @@ tags: ["Transforms", "Operations", "Concatenation"]
## Overview
Use the concatenation transform to join two or more string values into a combined output. The concatenation transform often joins elements such as first and last name into a full display name, but it has many other uses.
Use the concatenation transform to join two or more string values into a
combined output. The concatenation transform often joins elements such as first
and last name into a full display name, but it has many other uses.
## Transform Structure
The concatenation transform requires an array list of `values` that need to be joined. These values can be static strings or the return values of other nested transforms.
The concatenation transform requires an array list of `values` that need to be
joined. These values can be static strings or the return values of other nested
transforms.
```json
{
@@ -32,14 +36,19 @@ The concatenation transform requires an array list of `values` that need to be j
- **Required Attributes**
- **type** - This must always be set to `concat`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **values** - This is the array of items to join.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
## Examples
This transform joins the user's first name from the "HR Source" with his/her last name, adds a space between them, and then adds a parenthetical note that the user is a contractor at the end.
This transform joins the user's first name from the "HR Source" with his/her
last name, adds a space between them, and then adds a parenthetical note that
the user is a contractor at the end.
**Transform Request Body**:
@@ -74,7 +83,8 @@ This transform joins the user's first name from the "HR Source" with his/her las
<p>&nbsp;</p>
This transform joins the user's job title with his/her job code value and adds a hyphen between those two pieces of data.
This transform joins the user's job title with his/her job code value and adds a
hyphen between those two pieces of data.
**Transform Request Body**:

View File

@@ -12,19 +12,28 @@ tags: ["Transforms", "Operations", "Conditional"]
## Overview
Use the conditional transform to output different values depending on simple conditional logic. This is a convenient transform - the same capability can be implemented with a "static" transform, but this transform has greater simplicity and null-safe error checking.
Use the conditional transform to output different values depending on simple
conditional logic. This is a convenient transform - the same capability can be
implemented with a "static" transform, but this transform has greater simplicity
and null-safe error checking.
:::note Other Considerations
- The two operands within the transform cannot be null; if they are, an IllegalArgumentException is thrown.
- The `expression` attribute must be "eq," or the transform will throw an IllegalArgumentException.
- All attribute string values are case-sensitive, so differently cased strings (e.g., "engineering" and "Engineering") will not return as matched.
- The two operands within the transform cannot be null; if they are, an
IllegalArgumentException is thrown.
- The `expression` attribute must be "eq," or the transform will throw an
IllegalArgumentException.
- All attribute string values are case-sensitive, so differently cased strings
(e.g., "engineering" and "Engineering") will not return as matched.
:::
## Transform Structure
In addition to the `type` and `name` attributes, the conditional transform requires an `expression`, a `positiveCondition`, and a `negativeCondition`. If the expression evaluates to false, the transform returns the negative condition; otherwise it returns the positive condition.
In addition to the `type` and `name` attributes, the conditional transform
requires an `expression`, a `positiveCondition`, and a `negativeCondition`. If
the expression evaluates to false, the transform returns the negative condition;
otherwise it returns the positive condition.
```json
{
@@ -42,16 +51,25 @@ In addition to the `type` and `name` attributes, the conditional transform requi
- **Required Attributes**
- **type** - This must always be set to `conditional`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **expression** - This comparison statement follows the structure of `ValueA eq ValueB` where `ValueA` and `ValueB` are static strings or outputs of other transforms; the `eq` operator is the only valid comparison.
- **positiveCondition** - This is the output of the transform if the expression evaluates to true.
- **negativeCondition** - This is the output of the transform if the expression evaluates to false.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **expression** - This comparison statement follows the structure of
`ValueA eq ValueB` where `ValueA` and `ValueB` are static strings or outputs
of other transforms; the `eq` operator is the only valid comparison.
- **positiveCondition** - This is the output of the transform if the
expression evaluates to true.
- **negativeCondition** - This is the output of the transform if the
expression evaluates to false.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
## Examples
This transform takes the user's HR-defined department attribute and compares it to the value of "Science". If this is the user's department, the transform returns `true`. Otherwise, it returns `false`.
This transform takes the user's HR-defined department attribute and compares it
to the value of "Science". If this is the user's department, the transform
returns `true`. Otherwise, it returns `false`.
**Transform Request Body**:
@@ -78,7 +96,10 @@ This transform takes the user's HR-defined department attribute and compares it
<p>&nbsp;</p>
This transform extends the previous one by returning the output of another Seaspray transform depending on the result of the expression. You can assign Seaspray transforms' outputs to variables and then reference them within the `positiveCondition` and `negativeCondition` attributes.
This transform extends the previous one by returning the output of another
Seaspray transform depending on the result of the expression. You can assign
Seaspray transforms' outputs to variables and then reference them within the
`positiveCondition` and `negativeCondition` attributes.
**Transform Request Body**:

View File

@@ -12,18 +12,31 @@ tags: ["Transforms", "Operations", "Date", "Compare"]
## Overview
Use the date compare transform to compare two dates and, depending on the comparison result, return one value if one date is after the other or return a different value if it is before the other. A common use case is to calculate lifecycle states (e.g., the user is "active" if the current date is greater than or equal to the user's hire date, etc.).
Use the date compare transform to compare two dates and, depending on the
comparison result, return one value if one date is after the other or return a
different value if it is before the other. A common use case is to calculate
lifecycle states (e.g., the user is "active" if the current date is greater than
or equal to the user's hire date, etc.).
:::note Other Considerations
- In addition to explicit date values, the transform recognizes the "now" keyword that always evaluates to the exact date and time when the transform is evaluating.
- All dates **must** be in [ISO8601 format](https://en.wikipedia.org/wiki/ISO_8601) in order for the date compare transform to evaluate properly.
- In addition to explicit date values, the transform recognizes the "now"
keyword that always evaluates to the exact date and time when the transform is
evaluating.
- All dates **must** be in
[ISO8601 format](https://en.wikipedia.org/wiki/ISO_8601) in order for the date
compare transform to evaluate properly.
:::
## Transform Structure
The date compare transform takes as an input the two dates to compare, denoted as `firstDate` and `secondDate`. The transform also requires an `operator` designation so it knows which condition to evaluate for. Lastly, the transform requires both a `positiveCondition` and a `negativeCondition` -- the former returns if the comparison evaluates to `true`; the latter returns if the comparison evaluates to `false`.
The date compare transform takes as an input the two dates to compare, denoted
as `firstDate` and `secondDate`. The transform also requires an `operator`
designation so it knows which condition to evaluate for. Lastly, the transform
requires both a `positiveCondition` and a `negativeCondition` -- the former
returns if the comparison evaluates to `true`; the latter returns if the
comparison evaluates to `false`.
```json
{
@@ -49,22 +62,33 @@ The date compare transform takes as an input the two dates to compare, denoted a
- **Required Attributes**
- **type** - This must always be set to `dateCompare`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **firstDate** - This is the first date to consider (i.e., the date that would be on the left hand side of the comparison operation).
- **secondDate** - This is the second date to consider (i.e., the date that would be on the right hand side of the comparison operation).
- **operator** - This is the comparison to perform. The following values are valid:
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **firstDate** - This is the first date to consider (i.e., the date that
would be on the left hand side of the comparison operation).
- **secondDate** - This is the second date to consider (i.e., the date that
would be on the right hand side of the comparison operation).
- **operator** - This is the comparison to perform. The following values are
valid:
- **LT**: Strictly less than: firstDate < secondDate
- **LTE**: Less than or equal to: firstDate <= secondDate
- **GT**: Strictly greater than: firstDate > secondDate
- **GTE**: Greater than or equal to: firstDate >= secondDate
- **positiveCondition** - This is the value to return if the comparison is true.
- **negativeCondition** - This is the value to return if the comparison is false.
- **positiveCondition** - This is the value to return if the comparison is
true.
- **negativeCondition** - This is the value to return if the comparison is
false.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
## Examples
This transform accomplishes a basic lifecycle state calculation. It compares the user's termination date with his/her HR record. If the current datetime (denoted by `now`) is less than that date, the transform returns "active". If the current datetime is greater than that date, the transform returns "terminated".
This transform accomplishes a basic lifecycle state calculation. It compares the
user's termination date with his/her HR record. If the current datetime (denoted
by `now`) is less than that date, the transform returns "active". If the current
datetime is greater than that date, the transform returns "terminated".
**Transform Request Body**:
@@ -92,7 +116,9 @@ This transform accomplishes a basic lifecycle state calculation. It compares the
<p>&nbsp;</p>
This transform compares the user's hire date to a fixed date in the past. If the user was hired prior to January 1, 1996, the transform returns "legacy". If the user was hired later than January 1, 1996, it returns "regular".
This transform compares the user's hire date to a fixed date in the past. If the
user was hired prior to January 1, 1996, the transform returns "legacy". If the
user was hired later than January 1, 1996, it returns "regular".
**Transform Request Body**:

View File

@@ -12,24 +12,35 @@ tags: ["Transforms", "Operations", "Date", "Format"]
## Overview
Use the date format transform to convert datetime strings from one format to another. This is often useful when you are syncing data from one system to another, where each application uses a different format for date and time data.
Use the date format transform to convert datetime strings from one format to
another. This is often useful when you are syncing data from one system to
another, where each application uses a different format for date and time data.
This transform leverages the Java SimpleDateFormat syntax; see the [References](#references) section for more information on this standard.
This transform leverages the Java SimpleDateFormat syntax; see the
[References](#references) section for more information on this standard.
:::note Other Considerations
- In addition to explicit SimpleDateFormat syntax, the date format transform also recognizes several built-in "named" constructs:
- **ISO8601:** This is the date format corresponding to the ISO8601 standard. The exact format is expressed as yyyy-MM-dd'T'HH:mm:ss.SSSX.
- **LDAP:** This is the date format corresponding to the LDAP date format standard, also expressed as yyyyMMddHHmmss.Z.
- **PEOPLE_SOFT:** This is the date format format used by People Soft, also expressed as MM/dd/yyyy.
- **EPOCH_TIME_JAVA:** This represents the incoming date value as the elapsed time in milliseconds from midnight, January 1st, 1970.
- **EPOCH_TIME_WIN32:** This represents the incoming date value as the elapsed time in 100-nanosecond intervals from midnight, January 1st, 1601.
- In addition to explicit SimpleDateFormat syntax, the date format transform
also recognizes several built-in "named" constructs:
- **ISO8601:** This is the date format corresponding to the ISO8601 standard.
The exact format is expressed as yyyy-MM-dd'T'HH:mm:ss.SSSX.
- **LDAP:** This is the date format corresponding to the LDAP date format
standard, also expressed as yyyyMMddHHmmss.Z.
- **PEOPLE_SOFT:** This is the date format format used by People Soft, also
expressed as MM/dd/yyyy.
- **EPOCH_TIME_JAVA:** This represents the incoming date value as the elapsed
time in milliseconds from midnight, January 1st, 1970.
- **EPOCH_TIME_WIN32:** This represents the incoming date value as the elapsed
time in 100-nanosecond intervals from midnight, January 1st, 1601.
:::
## Transform Structure
The date format transform takes whatever value provided as the input, parses the datetime based on the `inputFormat` provided, and then reformats it into the desired `outputFormat`.
The date format transform takes whatever value provided as the input, parses the
datetime based on the `inputFormat` provided, and then reformats it into the
desired `outputFormat`.
```json
{
@@ -46,18 +57,30 @@ The date format transform takes whatever value provided as the input, parses the
- **Required Attributes**
- **type** - This must always be set to `dateFormat`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **inputFormat** - This string value indicates either the explicit SimpleDateFormat or the built-in named format of the incoming data.
- If no inputFormat is provided, the transform assumes that it is in [ISO8601 format](https://en.wikipedia.org/wiki/ISO_8601).
- **outputFormat** - This string value indicates either the explicit SimpleDateFormat or the built-in named format that the data is formatted into.
- If no outputFormat is provided, the transform assumes that it is in [ISO8601 format](https://en.wikipedia.org/wiki/ISO_8601).
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **inputFormat** - This string value indicates either the explicit
SimpleDateFormat or the built-in named format of the incoming data.
- If no inputFormat is provided, the transform assumes that it is in
[ISO8601 format](https://en.wikipedia.org/wiki/ISO_8601).
- **outputFormat** - This string value indicates either the explicit
SimpleDateFormat or the built-in named format that the data is formatted
into.
- If no outputFormat is provided, the transform assumes that it is in
[ISO8601 format](https://en.wikipedia.org/wiki/ISO_8601).
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
This transform takes the incoming Java epoch-based timestamp and formats it as an ISO8601 compatible string.
This transform takes the incoming Java epoch-based timestamp and formats it as
an ISO8601 compatible string.
```bash
Input: 144642632190
@@ -81,11 +104,12 @@ Output: 1974-08-02T02:30:32.190-00
<p>&nbsp;</p>
This transform takes the incoming date, formatted as a common US date string, and formats it to match the date structure of most database systems.
This transform takes the incoming date, formatted as a common US date string,
and formats it to match the date structure of most database systems.
```bash
Input: 4/1/1975
Output: 1975-04-01
Output: 1975-04-01
```
**Transform Request Body**:

View File

@@ -5,34 +5,53 @@ pagination_label: Date Math
sidebar_label: Date Math
sidebar_class_name: dateMath
keywords: ["transforms", "operations", "date", "math"]
description: Add, subtract, and round components of a timestamp's incoming value.
description:
Add, subtract, and round components of a timestamp's incoming value.
slug: /docs/transforms/operations/date-math
tags: ["Transforms", "Operations", "Date", "Math"]
---
## Overview
Use the date math transform to add, subtract, and round components of a timestamp's incoming value. It also allows you to work with a referential value of "now" to run operations against the current date and time instead of a fixed value.
Use the date math transform to add, subtract, and round components of a
timestamp's incoming value. It also allows you to work with a referential value
of "now" to run operations against the current date and time instead of a fixed
value.
The output format for the DateMath transform is "yyyy-MM-dd'T'HH:mm." When you use this transform inside another transform (e.g., [dateCompare](./date-compare.md)), make sure to convert to [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) first.
The output format for the DateMath transform is "yyyy-MM-dd'T'HH:mm." When you
use this transform inside another transform (e.g.,
[dateCompare](./date-compare.md)), make sure to convert to
[ISO8601](https://en.wikipedia.org/wiki/ISO_8601) first.
:::note Other Considerations
- The input datetime value must always be in [ISO8601 format](https://en.wikipedia.org/wiki/ISO_8601), in UTC time zone:
- The input datetime value must always be in
[ISO8601 format](https://en.wikipedia.org/wiki/ISO_8601), in UTC time zone:
- yyyy-MM-ddThh:mm:ss:nnnZ
- 2020-10-28T12:00:00.000Z, as an example
- The dateFormat transform can help get data into this format.
- The industry standard for rounding is actually date/time truncation. When rounding down, the fractional value is truncated from the incoming data. When rounding up, the fractional value is truncated and the next unit of time is added. Refer to the Transform Structure section below for examples.
- When you are rounding, the "week" unit of time is not supported as a metric, and attempting to round up or down a week will result in an error.
- If you are using the "now" keyword and an input date is also applied as the implicitly or explicitly definted input parameter, the transform prefers using "now" and ignores the data in the `input` attribute.
- The industry standard for rounding is actually date/time truncation. When
rounding down, the fractional value is truncated from the incoming data. When
rounding up, the fractional value is truncated and the next unit of time is
added. Refer to the Transform Structure section below for examples.
- When you are rounding, the "week" unit of time is not supported as a metric,
and attempting to round up or down a week will result in an error.
- If you are using the "now" keyword and an input date is also applied as the
implicitly or explicitly definted input parameter, the transform prefers
using "now" and ignores the data in the `input` attribute.
:::
## Transform Structure
The date math transform takes the input value and executes addition, subtraction and/or rounding operations to that value based on an `expression` configuration value. As indicated earlier, the input datetime must be in [ISO8601 format](https://en.wikipedia.org/wiki/ISO_8601). The `expression` value leverages the following abbreviations to indicate which date or time component to evaluate:
The date math transform takes the input value and executes addition, subtraction
and/or rounding operations to that value based on an `expression` configuration
value. As indicated earlier, the input datetime must be in
[ISO8601 format](https://en.wikipedia.org/wiki/ISO_8601). The `expression` value
leverages the following abbreviations to indicate which date or time component
to evaluate:
> - "y" - year
> - "M" - month
@@ -52,10 +71,14 @@ Also, the operational logic is defined by usage of one of the following symbols:
Some examples of expressions are:
> - `"expression": "now"` returns the current date and time.
> - `"expression": "now/h"` returns the current date and time, rounded to the hour.
> - `"expression": "now/h"` returns the current date and time, rounded to the
> hour.
> - `"expression": "now+1w"` returns one week from the current date and time.
> - `"expression": "now+1y+1M+2d-4h+1m-3s/s"` returns the current date and time plus one year, one month, two days, minus four hours, plus one minute and minus three seconds, rounded to the second.
> - `"expression": "+3M"` returns the date and time that would be three months more than the value provided as an input to the transform.
> - `"expression": "now+1y+1M+2d-4h+1m-3s/s"` returns the current date and time
> plus one year, one month, two days, minus four hours, plus one minute and
> minus three seconds, rounded to the second.
> - `"expression": "+3M"` returns the date and time that would be three months
> more than the value provided as an input to the transform.
```json
{
@@ -80,21 +103,34 @@ Some examples of expressions are:
- **Required Attributes**
- **type** - This must always be set to `dateMath.`
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **expression** - A string value of the date and time components to operate on, along with the math operations to execute. Multiple operations on multiple components are supported.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **expression** - A string value of the date and time components to operate
on, along with the math operations to execute. Multiple operations on
multiple components are supported.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **roundUp** - This `true` or `false` value indicates whether the transform rounds up or down when the `expression` defines a rounding ("/") operation. If this value is not provided, the transform defaults to `false`.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **roundUp** - This `true` or `false` value indicates whether the transform
rounds up or down when the `expression` defines a rounding ("/") operation.
If this value is not provided, the transform defaults to `false`.
- `true` indicates the transform rounds up (i.e., truncate the fractional date/time component indicated and then add one unit of that component).
- `false` indicates the transform rounds down (i.e., truncate the fractional date/time component indicated).
- `input` - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- `true` indicates the transform rounds up (i.e., truncate the fractional
date/time component indicated and then add one unit of that component).
- `false` indicates the transform rounds down (i.e., truncate the fractional
date/time component indicated).
- `input` - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
This transform takes the current date, subtracts five days from it, and rounds down to the lowest day.
This transform takes the current date, subtracts five days from it, and rounds
down to the lowest day.
**Transform Request Body**:
@@ -113,7 +149,10 @@ This transform takes the current date, subtracts five days from it, and rounds d
<p>&nbsp;</p>
This transform takes the `startDate` attribute from a user's record in the "HR Source," converts it from its native format to an [ISO8601-formatted](https://en.wikipedia.org/wiki/ISO_8601) string, and then adds twelve hours to it. The final value is then rounded up to the next second.
This transform takes the `startDate` attribute from a user's record in the "HR
Source," converts it from its native format to an
[ISO8601-formatted](https://en.wikipedia.org/wiki/ISO_8601) string, and then
adds twelve hours to it. The final value is then rounded up to the next second.
**Transform Request Body**:
@@ -146,7 +185,12 @@ This transform takes the `startDate` attribute from a user's record in the "HR S
<p>&nbsp;</p>
This transform take the `HIREDATE` from Workday and converts it to [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) to be used in the Date Math transform. The Date Math transform then creates a new Date of `HIREDATE + 1`. Since that is then outputted in the format "yyyy-MM-dd'T'HH:mm," you can then use it in a [dateFormat](/idn/docs/transforms/operations/date-format) transform to give a WIN32 formatted date.
This transform take the `HIREDATE` from Workday and converts it to
[ISO8601](https://en.wikipedia.org/wiki/ISO_8601) to be used in the Date Math
transform. The Date Math transform then creates a new Date of `HIREDATE + 1`.
Since that is then outputted in the format "yyyy-MM-dd'T'HH:mm," you can then
use it in a [dateFormat](/idn/docs/transforms/operations/date-format) transform
to give a WIN32 formatted date.
**Transform Request Body**:

View File

@@ -12,7 +12,10 @@ tags: ["Transforms", "Operations", "Diacritical"]
## Overview
Use the decompose diacritical marks transform to clean or standardize symbols used within language to inform the reader how to say or pronounce a letter. These symbols are often incompatible with downstream applications and must be standardized to another character set such as ASCII.
Use the decompose diacritical marks transform to clean or standardize symbols
used within language to inform the reader how to say or pronounce a letter.
These symbols are often incompatible with downstream applications and must be
standardized to another character set such as ASCII.
The following are examples of diacritical marks:
@@ -23,7 +26,8 @@ The following are examples of diacritical marks:
## Transform Structure
The transform for decompose diacritical marks requires only the transform's `type` and `name` attributes:
The transform for decompose diacritical marks requires only the transform's
`type` and `name` attributes:
```json
{
@@ -35,13 +39,20 @@ The transform for decompose diacritical marks requires only the transform's `typ
## Attributes
- **Required Attributes**
- **type** - This must always be set to `decomposeDiacriticalMarks`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
@@ -63,7 +74,8 @@ Output: "Aric"
<p>&nbsp;</p>
This transform takes the user's "LastName" attribute from the "HR Source" and replaces any diacritical marks with ASCII-compatible values.
This transform takes the user's "LastName" attribute from the "HR Source" and
replaces any diacritical marks with ASCII-compatible values.
```bash
Input: "Dubçek"

View File

@@ -12,17 +12,20 @@ tags: ["Transforms", "Operations", "Phone"]
## Overview
Use the E.164 phone transform to convert an incoming phone number string into an E.164-compatible number.
Use the E.164 phone transform to convert an incoming phone number string into an
E.164-compatible number.
:::note Other Considerations
- If the input string to the transform does not represent a valid phone number, the transform returns null.
- If the input string to the transform does not represent a valid phone number,
the transform returns null.
:::
## Transform Structure
The E.164 phone transform only requires the transform's `type` and `name` attributes:
The E.164 phone transform only requires the transform's `type` and `name`
attributes:
```json
{
@@ -36,16 +39,26 @@ The E.164 phone transform only requires the transform's `type` and `name` attrib
- **Required Attributes**
- **type** - This must always be set to `E.164phone.`
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **defaultRegion** - This is an optional attribute used to define the phone number region to format into. If no defaultRegion is provided, the transform takes US as the default country. The format of the country code must be in [ISO 3166-1 alpha-2 format](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2).
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
- **defaultRegion** - This is an optional attribute used to define the phone
number region to format into. If no defaultRegion is provided, the transform
takes US as the default country. The format of the country code must be in
[ISO 3166-1 alpha-2 format](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2).
## Examples
This transform transforms a phone number seperated by `-` into the E.164 Phone format.
This transform transforms a phone number seperated by `-` into the E.164 Phone
format.
```bash
Input: "512-777-2222"
@@ -73,7 +86,8 @@ Output: "+1512459222"
<p>&nbsp;</p>
This transform transforms a phone number seperated by `.` into the E.164 Phone format.
This transform transforms a phone number seperated by `.` into the E.164 Phone
format.
```bash
Input: "779.284.2727"
@@ -101,7 +115,8 @@ Output: "+17792842727"
<p>&nbsp;</p>
This transform transforms a phone number and country region code into the E.164 Phone format.
This transform transforms a phone number and country region code into the E.164
Phone format.
```bash
Input: "0412345678"

View File

@@ -12,11 +12,23 @@ tags: ["Transforms", "Operations", "First", "Valid"]
## Overview
Use the first valid transform to perform if/then/else operations on multiple different data points to return the first piece of data that is not null. This is often useful for the SailPoint username (uid) attribute in which case each identity requires a value, but the desired information is not available yet (e.g., Active Directory username). In these cases, you can use a first valid transform to populate the uid attribute with the user's linked Active Directory (AD) account information if the uid attribute is not null. If the attribute is null, use a different attribute from a source that the user does have, like his/her employee number.
Use the first valid transform to perform if/then/else operations on multiple
different data points to return the first piece of data that is not null. This
is often useful for the SailPoint username (uid) attribute in which case each
identity requires a value, but the desired information is not available yet
(e.g., Active Directory username). In these cases, you can use a first valid
transform to populate the uid attribute with the user's linked Active Directory
(AD) account information if the uid attribute is not null. If the attribute is
null, use a different attribute from a source that the user does have, like
his/her employee number.
## Transform Structure
The first valid transform requires an array list of `values` that you must consider. These can be static strings or other nested transforms' return values. Remember that the transform returns the first entry in the array that evaluates to a non-null value, so you are recommended to provide the entries in the array in descending order of preference.
The first valid transform requires an array list of `values` that you must
consider. These can be static strings or other nested transforms' return values.
Remember that the transform returns the first entry in the array that evaluates
to a non-null value, so you are recommended to provide the entries in the array
in descending order of preference.
```json
{
@@ -54,15 +66,22 @@ The first valid transform requires an array list of `values` that you must consi
- **Required Attributes**
- **type** - This must always be set to `firstValid`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **values** - This is an array of attributes to evaluate for existence.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **ignoreErrors** - This `true` or `false` value indicates whether to proceed to the next option if an error (like an NPE) occurs.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **ignoreErrors** - This `true` or `false` value indicates whether to proceed
to the next option if an error (like an NPE) occurs.
## Examples
This transform first attempts to return the user's `sAMAccountName` from his/her AD account. In the event that the user does not have an AD account, the transform then attempts to return the user's Okta login. If the Okta login is also blank, the transform returns the user's employee ID from his/her HR record.
This transform first attempts to return the user's `sAMAccountName` from his/her
AD account. In the event that the user does not have an AD account, the
transform then attempts to return the user's Okta login. If the Okta login is
also blank, the transform returns the user's employee ID from his/her HR record.
**Transform Request Body**:
@@ -102,7 +121,12 @@ This transform first attempts to return the user's `sAMAccountName` from his/her
<p>&nbsp;</p>
This transform is often useful for populating the work email identity attribute. Since the work email attribute is a required field for a valid identity, it cannot be blank. However, often new hires do not have an AD account and/or email provisioned until after the user has been provisioned. A common practice in this situation is to return a static string of "none" to ensure that this required attribute does not remain empty.
This transform is often useful for populating the work email identity attribute.
Since the work email attribute is a required field for a valid identity, it
cannot be blank. However, often new hires do not have an AD account and/or email
provisioned until after the user has been provisioned. A common practice in this
situation is to return a static string of "none" to ensure that this required
attribute does not remain empty.
**Transform Request Body**:
@@ -134,7 +158,16 @@ This transform is often useful for populating the work email identity attribute.
<p>&nbsp;</p>
This transform is often useful for populating an attribute called "Manager DN". It pulls the manager of the identity and then gets the identity attribute "Network DN" for the manager. "Network DN" pulls directly from `distinguishedName` in AD. With this transform, you can set a user's manager's DN as an identity attribute to allow for attribute sync down to AD. Without `ignoreErrors` set to `true`, this transform throws a null pointer exception (NPE) for any user without a manager. With `ignoreErrors` set to true, the first value in the `firstValid` throws an error for users without managers, but the error is ignored, and the transform selects the empty string to set the "Manager DN" identity attribute to.
This transform is often useful for populating an attribute called "Manager DN".
It pulls the manager of the identity and then gets the identity attribute
"Network DN" for the manager. "Network DN" pulls directly from
`distinguishedName` in AD. With this transform, you can set a user's manager's
DN as an identity attribute to allow for attribute sync down to AD. Without
`ignoreErrors` set to `true`, this transform throws a null pointer exception
(NPE) for any user without a manager. With `ignoreErrors` set to true, the first
value in the `firstValid` throws an error for users without managers, but the
error is ignored, and the transform selects the empty string to set the "Manager
DN" identity attribute to.
**Transform Request Body**:

View File

@@ -12,17 +12,31 @@ tags: ["Transforms", "Operations", "Generate", "Random"]
## Overview
Use the generate random string transform as an out-of-the-box rule transform provided through SailPoint's Cloud Services Utility rule. The transform allows you to generate a random string of any length, using true/false flags to denote whether the stringe includes numbers and/or special characters.
Use the generate random string transform as an out-of-the-box rule transform
provided through SailPoint's Cloud Services Utility rule. The transform allows
you to generate a random string of any length, using true/false flags to denote
whether the stringe includes numbers and/or special characters.
:::note Other Considerations
- The generate random string transform shares some common features with two other transforms: [random numeric](./random-numeric.md) and [random alphanumeric](./random-alphanumeric.md). In most cases, either of these other two out-of-the-box transforms are recommended. However, the one advantage of the generate random string transform is its support for special characters, so a common use for this transform is generating random passwords that meet basic complexity requirements.
- The generate random string transform shares some common features with two
other transforms: [random numeric](./random-numeric.md) and
[random alphanumeric](./random-alphanumeric.md). In most cases, either of
these other two out-of-the-box transforms are recommended. However, the one
advantage of the generate random string transform is its support for special
characters, so a common use for this transform is generating random passwords
that meet basic complexity requirements.
:::
## Transform Structure
The structure of a generate random string transform requires the `name` of the referenced rule to be the "Cloud Services Deployment Utility" rule built by SailPoint. You must also must set `operation` to `generateRandomString`, provide a `length`, and provide the true/false attributes for `includeNumbers` and `includeSpecialChars`. Last, you must include the `type` and `name` attributes required for all transforms:
The structure of a generate random string transform requires the `name` of the
referenced rule to be the "Cloud Services Deployment Utility" rule built by
SailPoint. You must also must set `operation` to `generateRandomString`, provide
a `length`, and provide the true/false attributes for `includeNumbers` and
`includeSpecialChars`. Last, you must include the `type` and `name` attributes
required for all transforms:
```json
{
@@ -42,11 +56,16 @@ The structure of a generate random string transform requires the `name` of the r
- **Required Attributes**
- **type** - This must always be set to `rule`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **attributes.name** - This must always be set to `Cloud Services Deployment Utility`.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **attributes.name** - This must always be set to
`Cloud Services Deployment Utility`.
- **operation** - This must always be set to `generateRandomString`.
- **includeNumbers** - You must set this value to `true` or `false` to indicate whether the generator logic includes numbers.
- **includeSpecialChars** - You must set this value to `true` or `false` to indicate whether the generator logic includes the followin special characters:
- **includeNumbers** - You must set this value to `true` or `false` to
indicate whether the generator logic includes numbers.
- **includeSpecialChars** - You must set this value to `true` or `false` to
indicate whether the generator logic includes the followin special
characters:
- !
- @
- \#
@@ -61,13 +80,17 @@ The structure of a generate random string transform requires the `name` of the r
- \>
- ?
- **length** - This is the required length ofthe randomly generated string.
> **Note** Due to identity attribute data constraints, the maximum allowable value is 450 characters.
> **Note** Due to identity attribute data constraints, the maximum allowable
> value is 450 characters.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
## Examples
This transform generates a 16-character random string containing letters, numbers and special characters.
This transform generates a 16-character random string containing letters,
numbers and special characters.
**Transform Request Body**:
@@ -89,7 +112,8 @@ This transform generates a 16-character random string containing letters, number
<p>&nbsp;</p>
This transform generates an 8-character random string containing only letters and numbers.
This transform generates an 8-character random string containing only letters
and numbers.
**Transform Request Body**:

View File

@@ -12,11 +12,17 @@ tags: ["Transforms", "Operations", "End"]
## Overview
Use the get end of string transform as an out-of-the-box rule transform provided through SailPoint's Cloud Services Deployment Utility rule. The transform allows you to get the rightmost N characters of a string.
Use the get end of string transform as an out-of-the-box rule transform provided
through SailPoint's Cloud Services Deployment Utility rule. The transform allows
you to get the rightmost N characters of a string.
## Transform Structure
The structure of a get end of string transform requires the `name` of the referenced rule to be the `Cloud Services Deployment Utility` rule built by SailPoint. You must also set `operation` to `getEndOfString,` and provide a `numChars` value. Last, you must include the `type` and `name` attributes required for all transforms:
The structure of a get end of string transform requires the `name` of the
referenced rule to be the `Cloud Services Deployment Utility` rule built by
SailPoint. You must also set `operation` to `getEndOfString,` and provide a
`numChars` value. Last, you must include the `type` and `name` attributes
required for all transforms:
```json
{
@@ -33,15 +39,25 @@ The structure of a get end of string transform requires the `name` of the refere
## Attributes
- **Required Attributes**
- **type** - This must always be set to `rule`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **attributes.name** - This must always be set to `Cloud Services Deployment Utility`.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **attributes.name** - This must always be set to
`Cloud Services Deployment Utility`.
- **operation** - This must always be set to `getEndOfString`.
- **numChars** - This specifies how many of the rightmost characters within the incoming string the transform returns. If the value of numChars is greater than the string length, the transform returns null.
- **numChars** - This specifies how many of the rightmost characters within
the incoming string the transform returns. If the value of numChars is
greater than the string length, the transform returns null.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
@@ -70,7 +86,8 @@ Output: "1234"
<p>&nbsp;</p>
This transform returns a null value because the incoming string length is only 15 characters long, but the transform requests the rightmost 16 characters.
This transform returns a null value because the incoming string length is only
15 characters long, but the transform requests the rightmost 16 characters.
**Transform Request Body**:
@@ -79,7 +96,7 @@ This transform returns a null value because the incoming string length is only 1
"attributes": {
"name": "Cloud Services Deployment Utility",
"operation": "getEndOfString",
"numChars": "16",
"numChars": "16",
"input": "This is a test."
},
"type": "rule",

View File

@@ -4,7 +4,8 @@ title: Get Reference Identity Attribute
pagination_label: Get Reference Identity Attribute
sidebar_label: Get Reference Identity Attribute
sidebar_class_name: getReferenceIdentityAttribute
keywords: ["transforms", "operations", "get", "reference", "identity", "attribute"]
keywords:
["transforms", "operations", "get", "reference", "identity", "attribute"]
description: Get another user's identity attribute.
slug: /docs/transforms/operations/get-reference-identity-attribute
tags: ["Transforms", "Operations", "Identity", "Attribute"]
@@ -12,11 +13,20 @@ tags: ["Transforms", "Operations", "Identity", "Attribute"]
## Overview
Use the get reference identity attribute transform as an out-of-the-box rule provided through SailPoint's Cloud Services Deployment Utility rule. The transform allows you to get the identity attribute of another user from within a given identity's calculation. For your convenience, the transform allows you to use "manager" as a referential lookup to the target identity.
Use the get reference identity attribute transform as an out-of-the-box rule
provided through SailPoint's Cloud Services Deployment Utility rule. The
transform allows you to get the identity attribute of another user from within a
given identity's calculation. For your convenience, the transform allows you to
use "manager" as a referential lookup to the target identity.
## Transform Structure
The structure of a get reference identity transform requires the `name` of the referenced rule to be the `Cloud Services Deployment Utility` rule built by SailPoint. Additionally, you must set the `operation` to `getReferenceIdentityAttribute` and specify a `uid` attribute that correlates to the identity whose attribute is desired. Last, you must include the `type` and `name` attributes required for all transforms:
The structure of a get reference identity transform requires the `name` of the
referenced rule to be the `Cloud Services Deployment Utility` rule built by
SailPoint. Additionally, you must set the `operation` to
`getReferenceIdentityAttribute` and specify a `uid` attribute that correlates to
the identity whose attribute is desired. Last, you must include the `type` and
`name` attributes required for all transforms:
```json
{
@@ -34,15 +44,22 @@ The structure of a get reference identity transform requires the `name` of the r
## Attributes
- **Required Attributes**
- **type** - This must always be set to `rule`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **attributes.name** - This must always be set to `Cloud Services Deployment Utility`.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **attributes.name** - This must always be set to
`Cloud Services Deployment Utility`.
- **operation** - This must always be set to `getReferenceIdentityAttribute`.
- **uid** - This is the SailPoint User Name (uid) value of the identity whose attribute is desired.
- For your convenience, you can use the "manager" keyword to look up the user's manager and then get that manager's identity attribute.
- **uid** - This is the SailPoint User Name (uid) value of the identity whose
attribute is desired.
- For your convenience, you can use the "manager" keyword to look up the
user's manager and then get that manager's identity attribute.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
## Examples
@@ -67,7 +84,8 @@ This transform gets the user's manager's email address.
<p>&nbsp;</p>
This transform gets the alternate phone number for the user identified as "corporate.admin".
This transform gets the alternate phone number for the user identified as
"corporate.admin".
**Transform Request Body**:

View File

@@ -12,17 +12,25 @@ tags: ["Transforms", "Operations", "Identity", "Attribute"]
## Overview
Use the identity attribute transform to get the value of a user's identity attribute. This transform is often useful within a source's account create or disable profile.
Use the identity attribute transform to get the value of a user's identity
attribute. This transform is often useful within a source's account create or
disable profile.
:::note Other Considerations
- This transform is **not** intended for use within an another identity profile attribute's calculation. Identity attribute calculations are multi-threaded processes, and there is no guarantee that a specific attribute has current data, or even exists, at the time of calculation within any given transform. *Referencing identity attributes within another identity attribute's calculation can lead to identity exceptions.*
- This transform is **not** intended for use within an another identity profile
attribute's calculation. Identity attribute calculations are multi-threaded
processes, and there is no guarantee that a specific attribute has current
data, or even exists, at the time of calculation within any given transform.
_Referencing identity attributes within another identity attribute's
calculation can lead to identity exceptions._
:::
## Transform Structure
The transform for identity attributes requires the desired identity attribute's system `name,` along with the `type` and `name` attributes:
The transform for identity attributes requires the desired identity attribute's
system `name,` along with the `type` and `name` attributes:
```json
{
@@ -37,13 +45,21 @@ The transform for identity attributes requires the desired identity attribute's
## Attributes
- **Required Attributes**
- **type** - This must always be set to `identityAttribute`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **attributes.name** - The system (camel-cased) name of the identity attribute to bring in.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **attributes.name** - The system (camel-cased) name of the identity
attribute to bring in.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples

View File

@@ -12,17 +12,26 @@ tags: ["Transforms", "Operations", "Index"]
## Overview
Use the index of transform to get the location of a specific substring within an incoming value. This transform is often useful in conjunction with the substring transform for getting parts of strings that can be dynamic in length or composition. If the substring you are searching for does not occur within the data, the transform returns -1.
Use the index of transform to get the location of a specific substring within an
incoming value. This transform is often useful in conjunction with the substring
transform for getting parts of strings that can be dynamic in length or
composition. If the substring you are searching for does not occur within the
data, the transform returns -1.
:::note Other Considerations
- If the substring you are searching for occurs multiple times within the incoming data, the transform returns the location of the first occurrence. If you want the last occurrence of a substring, use the [Last Index Of](./last-index-of.md) transform. If you want an occurrence that is neither first nor last, use the [Substring](./substring.md) transform.
- If the substring you are searching for occurs multiple times within the
incoming data, the transform returns the location of the first occurrence. If
you want the last occurrence of a substring, use the
[Last Index Of](./last-index-of.md) transform. If you want an occurrence that
is neither first nor last, use the [Substring](./substring.md) transform.
:::
## Transform Structure
The indexOf transform requires only the substring which you want to search for, along with the `type` and `name` attributes:
The indexOf transform requires only the substring which you want to search for,
along with the `type` and `name` attributes:
```json
{
@@ -37,17 +46,26 @@ The indexOf transform requires only the substring which you want to search for,
## Attributes
- **Required Attributes**
- **type** - This must always be set to `indexOf`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **substring** - This is the string whose beginning location within the incoming data you want to find.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **substring** - This is the string whose beginning location within the
incoming data you want to find.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
The "admin_" substring occurs at the very beginning of the input string, so this transform returns 0.
The "admin\_" substring occurs at the very beginning of the input string, so
this transform returns 0.
```bash
Input: "admin_jsmith"
@@ -70,7 +88,9 @@ Output: "0"
<p>&nbsp;</p>
Though the letter "b" occurs multiple times throughout the input string, the first time it occurs is within the index location 1, so the transform returns that value.
Though the letter "b" occurs multiple times throughout the input string, the
first time it occurs is within the index location 1, so the transform returns
that value.
```bash
Input: "abcabcabc"

View File

@@ -10,11 +10,14 @@ slug: /docs/transforms/operations
tags: ["Transforms", "Operations"]
---
This document lists each type of operation you can perform in a transform. Sometimes you will hear these transforms referred to as **Seaspray**, the codename for transforms.
This document lists each type of operation you can perform in a transform.
Sometimes you will hear these transforms referred to as **Seaspray**, the
codename for transforms.
## Transform Operations
Seaspray ships out of the box with a number of primitive operations. The following sections describe the operations.
Seaspray ships out of the box with a number of primitive operations. The
following sections describe the operations.
```mdx-code-block
import DocCardList from '@theme/DocCardList';

View File

@@ -12,7 +12,9 @@ tags: ["Transforms", "Operations", "ISO3166"]
## Overview
Use the ISO3166 transform to convert an incoming string into an [ISO 3166](https://en.wikipedia.org/wiki/ISO_3166) country code value. The incoming data must be either a recognized country name or country code:
Use the ISO3166 transform to convert an incoming string into an
[ISO 3166](https://en.wikipedia.org/wiki/ISO_3166) country code value. The
incoming data must be either a recognized country name or country code:
- The alpha2 country code (e.g. "ES")
- The alpha3 country code (e.g. "ESP)
@@ -28,13 +30,15 @@ The output value can be any of these three values:
:::note Other Considerations
If the input string to the transform does not represent a valid country code or country name, the transform returns null.
If the input string to the transform does not represent a valid country code or
country name, the transform returns null.
:::
## Transform Structure
The transform for iso3166 only requires the transform's `type` and `name` attributes:
The transform for iso3166 only requires the transform's `type` and `name`
attributes:
```json
{
@@ -48,19 +52,28 @@ The transform for iso3166 only requires the transform's `type` and `name` attrib
- **Required Attributes**
- **type** - This must always be set to `iso3166`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **format** - Use this optional value to denote which ISO 3166 format to return. The following values are valid:
- `alpha2` - Two-character country code (e.g., "US"). This is the default value if you do not provide a format.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **format** - Use this optional value to denote which ISO 3166 format to
return. The following values are valid:
- `alpha2` - Two-character country code (e.g., "US"). This is the default
value if you do not provide a format.
- `alpha3` - Three-character country code (e.g., "USA")
- `numeric` - The numeric country code (e.g., "840")
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
Because no specific format is provided, this transform defaults to the alpha2 output and returns "US".
Because no specific format is provided, this transform defaults to the alpha2
output and returns "US".
```bash
Input: "United States of America"
@@ -80,7 +93,8 @@ Output: "US"
<p>&nbsp;</p>
Because the desired format is specified as numeric, the output of this transform returns "724".
Because the desired format is specified as numeric, the output of this transform
returns "724".
```bash
Input: "ES"

View File

@@ -12,17 +12,26 @@ tags: ["Transforms", "Operations", "Last", "Index"]
## Overview
Use the last index of transform to get the last location of a specific substring within an incoming value. This transform is often useful in conjunction with the substring transform for getting parts of strings that can be dynamic in length or composition. If the substring you are searching for does not occur within the data, the transform returns -1.
Use the last index of transform to get the last location of a specific substring
within an incoming value. This transform is often useful in conjunction with the
substring transform for getting parts of strings that can be dynamic in length
or composition. If the substring you are searching for does not occur within the
data, the transform returns -1.
:::note Other Considerations
If the substring you are searching for occurs multiple times within the incoming data, the transform returns the location of the last occurrence. If you want the first occurrence of a substring, use the [Index Of](./index-of.md) transform. If you want an occurrence that is neither first nor last, use the [Substring](./substring.md) transform.
If the substring you are searching for occurs multiple times within the incoming
data, the transform returns the location of the last occurrence. If you want the
first occurrence of a substring, use the [Index Of](./index-of.md) transform. If
you want an occurrence that is neither first nor last, use the
[Substring](./substring.md) transform.
:::
## Transform Structure
The lastIndexOf transform requires only the substring you want to search for, along with the transform's `type` and `name` attributes:
The lastIndexOf transform requires only the substring you want to search for,
along with the transform's `type` and `name` attributes:
```json
{
@@ -37,17 +46,26 @@ The lastIndexOf transform requires only the substring you want to search for, al
## Attributes
- **Required Attributes**
- **type** - This must always be set to `lastIndexOf`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **substring** - This is the string whose beginning location within the incoming data you want to find.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **substring** - This is the string whose beginning location within the
incoming data you want to find.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
The "admin_" substring only occurs once at the very beginning of the input string, so this transform returns 0.
The "admin\_" substring only occurs once at the very beginning of the input
string, so this transform returns 0.
```bash
Input: "admin_jsmith"
@@ -70,7 +88,8 @@ Output: "0"
<p>&nbsp;</p>
While the letter "b" occurs multiple times throughout the input string, the last time it occurs is within index location 7, so this transform returns that value.
While the letter "b" occurs multiple times throughout the input string, the last
time it occurs is within index location 7, so this transform returns that value.
**Transform Request Body**:

View File

@@ -12,17 +12,23 @@ tags: ["Transforms", "Operations", "Left", "Pad"]
## Overview
Use the left pad transform to pad an incoming string with a user-supplied character out to a specific number of characters. This transform is often useful for data normalization situations in which data such as employee IDs are not uniform in length but need to be for downstream systems.
Use the left pad transform to pad an incoming string with a user-supplied
character out to a specific number of characters. This transform is often useful
for data normalization situations in which data such as employee IDs are not
uniform in length but need to be for downstream systems.
:::note Other Considerations
- If the input to the left pad transform is null, the transform returns a null value.
- If the input to the left pad transform is null, the transform returns a null
value.
:::
## Transform Structure
In addition to the standard `type` and `name` attributes, the left pad transform requires the `length` attribute, which tells the transform how many characters to pad the incoming string to.
In addition to the standard `type` and `name` attributes, the left pad transform
requires the `length` attribute, which tells the transform how many characters
to pad the incoming string to.
```json
{
@@ -38,19 +44,31 @@ In addition to the standard `type` and `name` attributes, the left pad transform
## Attributes
- **Required Attributes**
- **type** - This must always be set to `leftPad`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **length** - This is an integer value for the final output string's desired length.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **length** - This is an integer value for the final output string's desired
length.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **padding** - This string value represents the character the transform will pad the incoming data to to get to the desired length.
- If no padding value is provided, the transform defaults to a single space (" ") character for padding.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **padding** - This string value represents the character the transform will
pad the incoming data to to get to the desired length.
- If no padding value is provided, the transform defaults to a single space
(" ") character for padding.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
This transform takes the incoming attribute configured in the identity profile attribute UI and ensures it is padded out to 8 characters in length by adding "0"s to the left.
This transform takes the incoming attribute configured in the identity profile
attribute UI and ensures it is padded out to 8 characters in length by adding
"0"s to the left.
```bash
Input: "1234"
@@ -74,7 +92,8 @@ Output: "00001234"
<p>&nbsp;</p>
This transform takes the user's `employeeID` attribute from the HR source and ensures it is padded out to 7 characters in length by adding "x"s to the left.
This transform takes the user's `employeeID` attribute from the HR source and
ensures it is padded out to 7 characters in length by adding "x"s to the left.
```bash
Input: "1234"

View File

@@ -12,26 +12,32 @@ tags: ["Transforms", "Operations", "Lookup"]
## Overview
Use the lookup transform to take in an incoming string value and compare it to a list of key-value pairs to determine which output to return. If the incoming data matches a key, the transform returns the corresponding value. If the incoming key does not match a key, the transform returns the table's optional default value.
Use the lookup transform to take in an incoming string value and compare it to a
list of key-value pairs to determine which output to return. If the incoming
data matches a key, the transform returns the corresponding value. If the
incoming key does not match a key, the transform returns the table's optional
default value.
:::note Other Considerations
- If the input does not match any key value within the table and no default value is provided, the transform returns null.
- If the input does not match any key value within the table and no default
value is provided, the transform returns null.
:::
## Transform Structure
In addition to the `type` and `name` attributes, the structure of a lookup transform involves a `table` entry of key-value pairs:
In addition to the `type` and `name` attributes, the structure of a lookup
transform involves a `table` entry of key-value pairs:
```json
{
"attributes": {
"table": {
"USA": "Americas",
"FRA": "EMEA",
"AUS": "APAC",
"default": "Unknown Region"
"USA": "Americas",
"FRA": "EMEA",
"AUS": "APAC",
"default": "Unknown Region"
}
},
"type": "lookup",
@@ -42,18 +48,32 @@ In addition to the `type` and `name` attributes, the structure of a lookup trans
## Attributes
- **Required Attributes**
- **type** - This must always be set to `lookup`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **table** - This is a JSON object of key-value pairs. The key is the string the transform tries to match to the input, and the value is the output string the transform returns if it matches the key.
> **Note** This is a use for the optional default key value here: if none of the three countries in the earlier example matches the input string, the transform returns "Unknown Region" for the attribute mapped to this transform.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **table** - This is a JSON object of key-value pairs. The key is the string
the transform tries to match to the input, and the value is the output
string the transform returns if it matches the key.
> **Note** This is a use for the optional default key value here: if none of
> the three countries in the earlier example matches the input string, the
> transform returns "Unknown Region" for the attribute mapped to this
> transform.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
This transform tries to map a telephone area code to a city in Texas. There is no default entry in the table map, so the transform returns null if there is no provided area code that is not one of the provided four values.
This transform tries to map a telephone area code to a city in Texas. There is
no default entry in the table map, so the transform returns null if there is no
provided area code that is not one of the provided four values.
**Transform Request Body**:
@@ -61,10 +81,10 @@ This transform tries to map a telephone area code to a city in Texas. There is n
{
"attributes": {
"table": {
"512": "Austin",
"281": "Houston",
"214": "Dallas",
"210": "San Antonio"
"512": "Austin",
"281": "Houston",
"214": "Dallas",
"210": "San Antonio"
}
},
"type": "lookup",
@@ -76,7 +96,9 @@ This transform tries to map a telephone area code to a city in Texas. There is n
<p>&nbsp;</p>
This transform extends the previous one to show how multiple key values can be mapped to the same output value. However, duplicate key values are not allowed, so this will throw an error.
This transform extends the previous one to show how multiple key values can be
mapped to the same output value. However, duplicate key values are not allowed,
so this will throw an error.
**Transform Request Body**:
@@ -84,12 +106,12 @@ This transform extends the previous one to show how multiple key values can be m
{
"attributes": {
"table": {
"512": "Austin",
"281": "Houston",
"713": "Houston",
"832": "Houston",
"214": "Dallas",
"210": "San Antonio"
"512": "Austin",
"281": "Houston",
"713": "Houston",
"832": "Houston",
"214": "Dallas",
"210": "San Antonio"
}
},
"type": "lookup",

View File

@@ -29,10 +29,16 @@ The lower transform only requires the transform's `type` and `name` attributes:
- **Required Attributes**
- **type** - This must always be set to `lower`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
![Lowercase Transform Input Screenshot](./img/lower_transform_input.png)
@@ -66,7 +72,8 @@ Output:"active"
<p>&nbsp;</p>
This transform takes in the input "All-Access" and produces "all-access" as the output.
This transform takes in the input "All-Access" and produces "all-access" as the
output.
```bash
Input:"All-Access"

View File

@@ -5,33 +5,44 @@ pagination_label: Name Normalizer
sidebar_label: Name Normalizer
sidebar_class_name: nameNormalizer
keywords: ["transforms", "operations", "name normalizer"]
description: Clean or standardize the spelling of strings coming in from source systems.
description:
Clean or standardize the spelling of strings coming in from source systems.
slug: /docs/transforms/operations/name-normalizer
tags: ["Transforms", "Operations", "Name"]
---
## Overview
Use the name normalizer transform to clean or standardize the spelling of strings coming in from source systems. The most common use for this transform is for names and other proper nouns, but the transform is not necessarily limited to those data elements.
Use the name normalizer transform to clean or standardize the spelling of
strings coming in from source systems. The most common use for this transform is
for names and other proper nouns, but the transform is not necessarily limited
to those data elements.
The normalization logic within the transform handles a wide range of use cases:
- Proper casing/capitalization of names
- Any string containing either a space, a hyphen or an apostrophe - the transform splits these by that character and capitalizes the first character of each resulting substring.
- Special replacements of patterns that include "MC" and "MAC" (or case-based variations of those two strings)
- The transform automatically converts "MC" to "Mc" and "MAC" to "Mac" when they are part of a patronymic last name.
- Consistent capitalization of strings that are part of a toponymic surname or a generational suffix:
- Any string containing either a space, a hyphen or an apostrophe - the
transform splits these by that character and capitalizes the first character
of each resulting substring.
- Special replacements of patterns that include "MC" and "MAC" (or case-based
variations of those two strings)
- The transform automatically converts "MC" to "Mc" and "MAC" to "Mac" when
they are part of a patronymic last name.
- Consistent capitalization of strings that are part of a toponymic surname or a
generational suffix:
- Convert "VON" to "von"
- Convert "DEL" to "del"
- Convert "OF" to "of"
- Convert "DE" to "de"
- Convert "LA" to "la"
- Convert "Y" to "y"
- Convert Roman numeral suffixes to all capitalized letters (e.g., "iii" becomes "III")
- Convert Roman numeral suffixes to all capitalized letters (e.g., "iii"
becomes "III")
## Transform Structure
The name normalizer transform only requires the transform's `type` and `name` attributes:
The name normalizer transform only requires the transform's `type` and `name`
attributes:
```json
{
@@ -45,11 +56,17 @@ The name normalizer transform only requires the transform's `type` and `name` at
- **Required Attributes**
- **type** - This must always be set to `normalizeNames`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
@@ -79,7 +96,8 @@ Output: "John von Smith"
<p>&nbsp;</p>
This transform takes the user's "LastName" attribute from the "HR Source" and normalizes the name to a consistent format.
This transform takes the user's "LastName" attribute from the "HR Source" and
normalizes the name to a consistent format.
```bash
Input: "Dr. JOHN D. O'BRIEN"

View File

@@ -12,11 +12,13 @@ tags: ["Transforms", "Operations", "Random", "Alphanumeric"]
## Overview
Use the random alphanumeric transform to generate a random string of any length, comprising both numbers and letters (both lowercase and uppercase).
Use the random alphanumeric transform to generate a random string of any length,
comprising both numbers and letters (both lowercase and uppercase).
## Transform Structure
The random alphanumeric transform only requires the standard `type` and `name` attributes:
The random alphanumeric transform only requires the standard `type` and `name`
attributes:
```json
{
@@ -28,19 +30,26 @@ The random alphanumeric transform only requires the standard `type` and `name` a
## Attributes
- **Required Attributes**
- **type** - This must always be set to `randomAlphaNumeric`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **length** - This is the integer value specifying the required size/number of characters the random string must contain.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **length** - This is the integer value specifying the required size/number
of characters the random string must contain.
- This value must be a positive number and cannot be blank.
- If no length is provided, the transform defaults to a value of 32.
- Due to identity attribute data constraints, the maximum allowable value is 450 characters.
- Due to identity attribute data constraints, the maximum allowable value is
450 characters.
## Examples
Since no explicit length is provided, this transform generates a 32-character random string, such as "VtPeE9WL56lMTlvfjr02KXqS3KtgDSuk".
Since no explicit length is provided, this transform generates a 32-character
random string, such as "VtPeE9WL56lMTlvfjr02KXqS3KtgDSuk".
**Transform Request Body**:
@@ -62,7 +71,7 @@ This transform generates a 10-character random string, such as "5GH2qsjU27".
```json
{
"attributes": {
"length": "10"
"length": "10"
},
"type": "randomAlphaNumeric",
"name": "Random Alphanumeric Transform"

View File

@@ -16,7 +16,8 @@ Use the random numeric transform to generate a random number of any length.
## Transform Structure
The random numeric transform only requires the standard `type` and `name` attributes:
The random numeric transform only requires the standard `type` and `name`
attributes:
```json
{
@@ -28,19 +29,26 @@ The random numeric transform only requires the standard `type` and `name` attrib
## Attributes
- **Required Attributes**
- **type** - This must always be set to `randomNumeric`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **length** - This is the integer value specifying the required size/number of digits the random number must contain.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **length** - This is the integer value specifying the required size/number
of digits the random number must contain.
- This value must be a positive number and cannot be blank.
- If no length is provided, the transform defaults to a value of 10.
- Due to identity attribute data constraints, the maximum allowable value is 450 characters.
- Due to identity attribute data constraints, the maximum allowable value is
450 characters.
## Examples
No explicit length is provided, so this transform generates a 10-digit random integer, such as "2334776774".
No explicit length is provided, so this transform generates a 10-digit random
integer, such as "2334776774".
**Transform Request Body**:
@@ -62,7 +70,7 @@ This transform generates a 6-digit random integer, such as "759931".
```json
{
"attributes": {
"length": "6"
"length": "6"
},
"type": "randomNumeric",
"name": "Random Numeric Transform"

View File

@@ -12,11 +12,16 @@ tags: ["Transforms", "Operations", "Reference"]
## Overview
Use the reference transform to reuse a transform that has already been written within another transform. This transform is often useful when you want to repeat the same logic multiple times within other transforms. This transform allows you to maintain only one transform and have it propagate through to other implementations of that logic.
Use the reference transform to reuse a transform that has already been written
within another transform. This transform is often useful when you want to repeat
the same logic multiple times within other transforms. This transform allows you
to maintain only one transform and have it propagate through to other
implementations of that logic.
## Transform Structure
In addition to the standard `type` and `name` attributes, the structure of a reference transform requires the `id` of the transform you want to reference:
In addition to the standard `type` and `name` attributes, the structure of a
reference transform requires the `id` of the transform you want to reference:
```json
{
@@ -31,17 +36,28 @@ In addition to the standard `type` and `name` attributes, the structure of a ref
## Attributes
- **Required Attributes**
- **type** - This must always be set to `reference`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **id** - This ID specifies the name of the pre-existing transform you want to use within your current transform.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **id** - This ID specifies the name of the pre-existing transform you want
to use within your current transform.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
If you had a "Get Worker Type" transform that evaluated multiple pieces of data to determine whether a user is an employee or a contractor, this transform would output the result of that transform just as if the logic were contained directly within this transform.
If you had a "Get Worker Type" transform that evaluated multiple pieces of data
to determine whether a user is an employee or a contractor, this transform would
output the result of that transform just as if the logic were contained directly
within this transform.
**Transform Request Body**:
@@ -59,7 +75,9 @@ If you had a "Get Worker Type" transform that evaluated multiple pieces of data
<p>&nbsp;</p>
This transform builds the user's display name, adds a hyphen to the end, and then adds the evaluated worker type from the earlier transform to build a string that would look something like "John Smith - Employee".
This transform builds the user's display name, adds a hyphen to the end, and
then adds the evaluated worker type from the earlier transform to build a string
that would look something like "John Smith - Employee".
**Transform Request Body**:

View File

@@ -12,19 +12,27 @@ tags: ["Transforms", "Operations", "Replace"]
## Overview
The replace all transform works like the replace transform, except that it can perform multiple replace operations on the incoming data instead of just one pattern. Use the replace all transform to find multiple patterns of characters within incoming data and replace all instances of those patterns with alternate values. The transform recognizes standard regex syntax. See the [References](#references) section for more information about regex.
The replace all transform works like the replace transform, except that it can
perform multiple replace operations on the incoming data instead of just one
pattern. Use the replace all transform to find multiple patterns of characters
within incoming data and replace all instances of those patterns with alternate
values. The transform recognizes standard regex syntax. See the
[References](#references) section for more information about regex.
## Transform Structure
The replace transform takes a `table` attribute of key-value pairs as an argument. Each pair identifies the pattern to search for as its key and the replacement string as its value. The transform also requires the standard `type` and `name` attributes:
The replace transform takes a `table` attribute of key-value pairs as an
argument. Each pair identifies the pattern to search for as its key and the
replacement string as its value. The transform also requires the standard `type`
and `name` attributes:
```json
{
"attributes": {
"table": {
"-": " ",
"\"": "'",
"ñ": "n"
"-": " ",
"\"": "'",
"ñ": "n"
}
},
"type": "replaceAll",
@@ -35,18 +43,27 @@ The replace transform takes a `table` attribute of key-value pairs as an argumen
## Attributes
- **Required Attributes**
- **type** - This must always be set to `replaceAll`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **regex** - This is the pattern you want to replace.
- **replacement** - This is the replacement string that replaces the pattern wherever it occurs.
- **replacement** - This is the replacement string that replaces the pattern
wherever it occurs.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
This transform makes a simple set of special character replacements, exchanging a space for a hyphen and removing the Spanish tilde from the "n."
This transform makes a simple set of special character replacements, exchanging
a space for a hyphen and removing the Spanish tilde from the "n."
```bash
Input: "Enrique Jose-Piñon"
@@ -59,9 +76,9 @@ Output: "Enrique Jose Pinon"
{
"attributes": {
"table": {
".": "-",
"\"": "'",
"ñ": "n"
".": "-",
"\"": "'",
"ñ": "n"
}
},
"type": "replaceAll",
@@ -73,7 +90,8 @@ Output: "Enrique Jose Pinon"
<p>&nbsp;</p>
This example uses more complex regex patterns to remove any alphabet characters from the input string and replace periods with hyphens.
This example uses more complex regex patterns to remove any alphabet characters
from the input string and replace periods with hyphens.
```bash
Input: "ad512.777.1234"
@@ -92,7 +110,7 @@ Output: "512-777-1234"
}
},
"type": "replaceAll",
"name": "Replace All Transform",
"name": "Replace All Transform"
}
```

View File

@@ -12,11 +12,17 @@ tags: ["Transforms", "Operations", "Replace"]
## Overview
Use the replace transform to find a given pattern of characters within incoming data and replace all instances of that pattern with alternate values. The transform recognizes standard regex syntax. See the [References](#references) section for more information about regex.
Use the replace transform to find a given pattern of characters within incoming
data and replace all instances of that pattern with alternate values. The
transform recognizes standard regex syntax. See the [References](#references)
section for more information about regex.
## Transform Structure
The replace transform takes a `regex` attribute as an argument to identify which pattern to replace and a `replacement` attribute for the characters to replace the pattern with. The transform also requires the standard `type` and `name` attributes:
The replace transform takes a `regex` attribute as an argument to identify which
pattern to replace and a `replacement` attribute for the characters to replace
the pattern with. The transform also requires the standard `type` and `name`
attributes:
```json
{
@@ -32,18 +38,27 @@ The replace transform takes a `regex` attribute as an argument to identify which
## Attributes
- **Required Attributes**
- **type** - This must always be set to `replace`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **regex** - This is the pattern you want to replace.
- **replacement** - This is the replacement string that replaces the pattern wherever it occurs.
- **replacement** - This is the replacement string that replaces the pattern
wherever it occurs.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
This transform makes a simple word replacement, exchanging "IIQ" for "IdentityNow".
This transform makes a simple word replacement, exchanging "IIQ" for
"IdentityNow".
```bash
Input: "Working with IIQ is fun"
@@ -67,7 +82,8 @@ Output: "Working with IdentityNow is fun"
<p>&nbsp;</p>
This example uses a more complex regex pattern to remove any non-alphabet characters from the input string.
This example uses a more complex regex pattern to remove any non-alphabet
characters from the input string.
```bash
Input: "The quick brown fox jumped over 10 lazy dogs"

View File

@@ -12,17 +12,23 @@ tags: ["Transforms", "Operations"]
## Overview
Use the right pad transform to pad an incoming string with a user-supplied character out to a specific number of characters. This transform is often useful for data normalization situations in which data such as employee IDs are not uniform in length but need to be for downstream systems.
Use the right pad transform to pad an incoming string with a user-supplied
character out to a specific number of characters. This transform is often useful
for data normalization situations in which data such as employee IDs are not
uniform in length but need to be for downstream systems.
:::note Other Considerations
- If the input to the right pad transform is null, the transform returns a null value.
- If the input to the right pad transform is null, the transform returns a null
value.
:::
## Transform Structure
In addition to the standard `type` and `name` attributes, the right pad transform requires the `length` attribute, which tells the transform how many characters to pad the incoming string to.
In addition to the standard `type` and `name` attributes, the right pad
transform requires the `length` attribute, which tells the transform how many
characters to pad the incoming string to.
```json
{
@@ -38,18 +44,27 @@ In addition to the standard `type` and `name` attributes, the right pad transfor
## Attributes
- **Required Attributes**
- **type** - This must always be set to `rightPad`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **length** - This is an integer value for the final output string's desired length.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **length** - This is an integer value for the final output string's desired
length.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **padding** - This string value represents the character the transform will pad the incoming data to to get to the desired length.
- If no padding value is provided, the transform defaults to a single space (" ") character for padding.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **padding** - This string value represents the character the transform will
pad the incoming data to to get to the desired length.
- If no padding value is provided, the transform defaults to a single space
(" ") character for padding.
## Examples
This transform takes the incoming attribute configured in the Identity Profile attribute UI and ensures it is padded out to 8 characters in length by adding "0"s to the right.
This transform takes the incoming attribute configured in the Identity Profile
attribute UI and ensures it is padded out to 8 characters in length by adding
"0"s to the right.
```bash
Input: "1234"
@@ -73,7 +88,8 @@ Output: "12340000"
<p>&nbsp;</p>
This example takes the user's employeeID attribute from the HR source and ensures it is padded out to 7 characters in length by adding "x"s to the right.
This example takes the user's employeeID attribute from the HR source and
ensures it is padded out to 7 characters in length by adding "x"s to the right.
```bash
Input: "1234"

View File

@@ -5,18 +5,23 @@ pagination_label: Rule
sidebar_label: Rule
sidebar_class_name: rule
keywords: ["transforms", "operations", "rule"]
description: Reuse rule logic that has already been written for a previous use case.
description:
Reuse rule logic that has already been written for a previous use case.
slug: /docs/transforms/operations/rule
tags: ["Transforms", "Operations", "Rule"]
---
## Overview
Like the reference transform, the rule transform allows you to reuse logic that has already been written for a previous use case. However, you can use the rule transform to reuse code contained within a Generic rule that either is not possible through only transforms or is too complex to maintain with Seaspray.
Like the reference transform, the rule transform allows you to reuse logic that
has already been written for a previous use case. However, you can use the rule
transform to reuse code contained within a Generic rule that either is not
possible through only transforms or is too complex to maintain with Seaspray.
## Transform Structure
In addition to the standard `type` and `name` attributes, the structure of a rule transform requires the `name` of the rule you want to reference:
In addition to the standard `type` and `name` attributes, the structure of a
rule transform requires the `name` of the rule you want to reference:
```json
{
@@ -31,17 +36,27 @@ In addition to the standard `type` and `name` attributes, the structure of a rul
## Attributes
- **Required Attributes**
- **type** - This must always be set to `rule`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **attributes.name** - This is the name of the Generic rule the transform must invoke.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **attributes.name** - This is the name of the Generic rule the transform
must invoke.
- **Optional Attributes**
- The rule transform can implement variables within the attributes list. These variables can be defined as static string values or even as the results of other transforms. Any variables defined here are passed to the Generic rule and are available for calculation within that code.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- The rule transform can implement variables within the attributes list. These
variables can be defined as static string values or even as the results of
other transforms. Any variables defined here are passed to the Generic rule
and are available for calculation within that code.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
## Examples
If you had a "Generate Random Number" rule that produced a random integer value, this transform would invoke that rule and return the output of the code contained within it.
If you had a "Generate Random Number" rule that produced a random integer value,
this transform would invoke that rule and return the output of the code
contained within it.
**Transform Request Body**:
@@ -59,7 +74,13 @@ If you had a "Generate Random Number" rule that produced a random integer value,
<p>&nbsp;</p>
This transform shows a more complex use case in which you have a Generic rule written to perform various string manipulation tasks. If the manner the rule code uses to determine which task to run is passed to it by the `operation` variable and the operation is intended to get the last n characters of a string, n can be provided to the rule via the `numChars` variable. This transform invokes rule code to get the last three characters of the string passed to it with the UI configuration.
This transform shows a more complex use case in which you have a Generic rule
written to perform various string manipulation tasks. If the manner the rule
code uses to determine which task to run is passed to it by the `operation`
variable and the operation is intended to get the last n characters of a string,
n can be provided to the rule via the `numChars` variable. This transform
invokes rule code to get the last three characters of the string passed to it
with the UI configuration.
**Transform Request Body**:

View File

@@ -12,11 +12,18 @@ tags: ["Transforms", "Operations", "Split"]
## Overview
Use the split transform to use a specific character or regex string as a delimiter and convert a single incoming string into an array of values. This transform then returns the Nth element of that array. This transform is often useful when you want to split combined names into their constituent parts or when you want to simplify an ordered list of values into a single attribute.
Use the split transform to use a specific character or regex string as a
delimiter and convert a single incoming string into an array of values. This
transform then returns the Nth element of that array. This transform is often
useful when you want to split combined names into their constituent parts or
when you want to simplify an ordered list of values into a single attribute.
## Transform Structure
In addition to the standard `type` and `name` attributes, the split transform requires the `delimiter` and `index` attributes. These parameters, respectively, tell the transform what to use as the pattern to split the string with and which entry in the resulting array of values you want it to return.
In addition to the standard `type` and `name` attributes, the split transform
requires the `delimiter` and `index` attributes. These parameters, respectively,
tell the transform what to use as the pattern to split the string with and which
entry in the resulting array of values you want it to return.
```json
{
@@ -32,21 +39,36 @@ In addition to the standard `type` and `name` attributes, the split transform re
## Attributes
- **Required Attributes**
- **type** - This must always be set to `split`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **delimiter** - This can be either a single character or a regex expression. Transforms use it identify the break point between two substrings in the incoming data.
- **index** - This is the integer value for the desired array element after the incoming data has been split into a list. The array is a 0-based object, so the first array element would be index 0, the second element would be index 1, etc.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **delimiter** - This can be either a single character or a regex expression.
Transforms use it identify the break point between two substrings in the
incoming data.
- **index** - This is the integer value for the desired array element after
the incoming data has been split into a list. The array is a 0-based object,
so the first array element would be index 0, the second element would be
index 1, etc.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **throws** - This boolean (true/false) value indicates whether an exception is thrown and returned as an output when an index is out of bounds with the resulting array (i.e., the provided `index` value is larger than the size of the array).
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **throws** - This boolean (true/false) value indicates whether an exception
is thrown and returned as an output when an index is out of bounds with the
resulting array (i.e., the provided `index` value is larger than the size of
the array).
- true - The transform returns "IndexOutOfBoundsException".
- false - The transform returns null.
- If no throws value is provided, the transform default to false and returns a null.
- If no throws value is provided, the transform default to false and returns
a null.
## Examples
This transform takes the incoming attribute configured in the Identity Profile attribute UI, splits it based on the colon `:` character, and returns the second array element (array index 1).
This transform takes the incoming attribute configured in the Identity Profile
attribute UI, splits it based on the colon `:` character, and returns the second
array element (array index 1).
```bash
Input: "abc:123"
@@ -70,7 +92,8 @@ Output: "123"
<p>&nbsp;</p>
This transform takes the explicit string provided, splits it based on the space (" ") character, and returns the fourth word (array index 3).
This transform takes the explicit string provided, splits it based on the space
(" ") character, and returns the fourth word (array index 3).
```bash
Input: "The quick brown fox jumped over 10 lazy dogs"

View File

@@ -12,20 +12,34 @@ tags: ["Transforms", "Operations", "Static"]
## Overview
Use the static transform to return a fixed string value, or more commonly, to evaluate [Velocity Template Language](https://velocity.apache.org/engine/1.7/user-guide.html). The latter implementation is often useful when you are using if/then/else logic or iterating through a for loop. The static transform can also take other dynamically provided variables as inputs into the `value` attribute.
Use the static transform to return a fixed string value, or more commonly, to
evaluate
[Velocity Template Language](https://velocity.apache.org/engine/1.7/user-guide.html).
The latter implementation is often useful when you are using if/then/else logic
or iterating through a for loop. The static transform can also take other
dynamically provided variables as inputs into the `value` attribute.
:::note Other Considerations
- When you are using static transforms within the create profile for a source, you can enter the value directly in the input field if the user selects Static as the attribute type.
- Because IdentityNow supports the ability to order attributes in a create profile, you can use the Velocity template of an attribute higher in the list to generate a static value for an attribute that is lower in the list. For example, if you wanted to generate an email address based on a display name, you would need to do the follwing:
- Move the "displayName" attribute higher in the list so that it is created before the email address.
- In the "mail" attribute, select Static for the attribute type. In the Static Value field, enter `$displayName@yourdomain.com`.
- When you are using static transforms within the create profile for a source,
you can enter the value directly in the input field if the user selects Static
as the attribute type.
- Because IdentityNow supports the ability to order attributes in a create
profile, you can use the Velocity template of an attribute higher in the list
to generate a static value for an attribute that is lower in the list. For
example, if you wanted to generate an email address based on a display name,
you would need to do the follwing:
- Move the "displayName" attribute higher in the list so that it is created
before the email address.
- In the "mail" attribute, select Static for the attribute type. In the Static
Value field, enter `$displayName@yourdomain.com`.
:::
## Transform Structure
In addition to the standard `type` and `name` attributes, the static transform requires a value attribute to be specified:
In addition to the standard `type` and `name` attributes, the static transform
requires a value attribute to be specified:
```json
{
@@ -40,17 +54,27 @@ In addition to the standard `type` and `name` attributes, the static transform r
## Attributes
- **Required Attributes**
- **type** - This must always be set to `static.`
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **value** - This must evaluate to a JSON string either through a fixed value or through conditional logic using Velocity Template Language.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **value** - This must evaluate to a JSON string either through a fixed value
or through conditional logic using Velocity Template Language.
- **Optional Attributes**
- The static transform can implement variables within the `value` expression. These variables can be defined as optional attributes within the transform and can themselves be the results of other transforms.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- The static transform can implement variables within the `value` expression.
These variables can be defined as optional attributes within the transform
and can themselves be the results of other transforms.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
## Examples
This transform uses a dynamic variable called `workerType`, which is set to the value of the user's HR record's empType value. The static transform then returns that value through the use of Velocity variable notation (i.e., `$<variableName>`).
This transform uses a dynamic variable called `workerType`, which is set to the
value of the user's HR record's empType value. The static transform then returns
that value through the use of Velocity variable notation (i.e.,
`$<variableName>`).
**Transform Request Body**:
@@ -75,7 +99,8 @@ This transform uses a dynamic variable called `workerType`, which is set to the
<p>&nbsp;</p>
This transform extends the previous one to show how you can use if/else logic to return data based on contingent logic.
This transform extends the previous one to show how you can use if/else logic to
return data based on contingent logic.
**Transform Request Body**:

View File

@@ -12,17 +12,23 @@ tags: ["Transforms", "Operations", "Substring"]
## Overview
Use the substring transform to get the inner portion of a string passed into the transform. You can use the substring transform to get the first n characters or get a set number of characters within the middle of a string.
Use the substring transform to get the inner portion of a string passed into the
transform. You can use the substring transform to get the first n characters or
get a set number of characters within the middle of a string.
:::note Other Considerations
- The substring transform does not currently provide an easy way to get the last n characters of a string. To do so, use the [Get End of String](./get-end-of-string.md) transform.
- The substring transform does not currently provide an easy way to get the last
n characters of a string. To do so, use the
[Get End of String](./get-end-of-string.md) transform.
:::
## Transform Structure
In addition to the standard `type` and `name` attributes, the substring transform requires you to provide the beginning location of the input, which indicates the start of the desired substring output:
In addition to the standard `type` and `name` attributes, the substring
transform requires you to provide the beginning location of the input, which
indicates the start of the desired substring output:
```json
{
@@ -37,22 +43,41 @@ In addition to the standard `type` and `name` attributes, the substring transfor
## Attributes
- **Required Attributes**
- **type** - This must always be set to `substring.`
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **begin** - This is the integer value for the location within the input data that contains the first character of the substring you want to return.
- If `begin` is set to -1, the transform begins at character 0 of the input data.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **begin** - This is the integer value for the location within the input data
that contains the first character of the substring you want to return.
- If `begin` is set to -1, the transform begins at character 0 of the input
data.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **beginOffset** - This integer value is the number of characters to add to the `begin` attribute when the transform returns a substring. The transform only uses this attribute if `begin` is not -1.
- **end** - This is the integer value for the location within the input data that no longer contains the substring you want to return.
- If `end` is -1 or not provided at all, the substring transform returns everything up to the end of the input string.
- **endOffset** - This integer value is the number of characters to add to the `end` attribute when the transform returns a substring. The transform only uses this attribute if `end` is provided and is not -1.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **beginOffset** - This integer value is the number of characters to add to
the `begin` attribute when the transform returns a substring. The transform
only uses this attribute if `begin` is not -1.
- **end** - This is the integer value for the location within the input data
that no longer contains the substring you want to return.
- If `end` is -1 or not provided at all, the substring transform returns
everything up to the end of the input string.
- **endOffset** - This integer value is the number of characters to add to the
`end` attribute when the transform returns a substring. The transform only
uses this attribute if `end` is provided and is not -1.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples
This transform uses a zero-based array to determine that the letter "c" is the begin index of the substring it will return because "c" is in index location 2. Index location 4 contains the letter "e," so the transform will return a substring consisting of anything between the letters "c" and "e," including "c" but excluding "e".
This transform uses a zero-based array to determine that the letter "c" is the
begin index of the substring it will return because "c" is in index location 2.
Index location 4 contains the letter "e," so the transform will return a
substring consisting of anything between the letters "c" and "e," including "c"
but excluding "e".
```bash
Input: "abcdef"
@@ -76,7 +101,13 @@ Output: "cd"
<p>&nbsp;</p>
This transform uses a zero-based array to determine that the substring transform must begin with the letter "c" because even though the letter "b' is in index location 1, the beginOffset value indicates one additional character must be skipped. Index location 3 contains the letter "d," so the transform would normally end at the character preceding "d." However, with the endOffset value set to 2, the transform must include an additional two characters, "d" and "e". Thus, the transform returns "cde".
This transform uses a zero-based array to determine that the substring transform
must begin with the letter "c" because even though the letter "b' is in index
location 1, the beginOffset value indicates one additional character must be
skipped. Index location 3 contains the letter "d," so the transform would
normally end at the character preceding "d." However, with the endOffset value
set to 2, the transform must include an additional two characters, "d" and "e".
Thus, the transform returns "cde".
```bash
Input: "abcdef"

View File

@@ -5,14 +5,16 @@ pagination_label: Trim
sidebar_label: Trim
sidebar_class_name: trim
keywords: ["transforms", "operations", "trim"]
description: Trim whitespaces from both the beginning and ending of input strings.
description:
Trim whitespaces from both the beginning and ending of input strings.
slug: /docs/transforms/operations/substring
tags: ["Transforms", "Operations", "Trim"]
---
## Overview
Use the trim transform to trim whitespaces from both the beginning and ending of input strings.
Use the trim transform to trim whitespaces from both the beginning and ending of
input strings.
## Transform Structure
@@ -30,11 +32,17 @@ The trim transform only requires the transform's `type` and `name` attributes:
- **Required Attributes**
- **type** - This must always be set to `trim`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples

View File

@@ -30,11 +30,17 @@ The upper transform only requires the transform's `type` and `name` attributes:
- **Required Attributes**
- **type** - This must always be set to `upper`.
- **name** - This is a required attribute for all transforms. It represents the name of the transform as it will appear in the UI's dropdown menus.
- **name** - This is a required attribute for all transforms. It represents
the name of the transform as it will appear in the UI's dropdown menus.
- **Optional Attributes**
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether the transform logic should be reevaluated every evening as part of the identity refresh process.
- **input** - This is an optional attribute that can explicitly define the input data passed into the transform logic. If no input is provided, the transform takes its input from the source and attribute combination configured with the UI.
- **requiresPeriodicRefresh** - This `true` or `false` value indicates whether
the transform logic should be reevaluated every evening as part of the
identity refresh process.
- **input** - This is an optional attribute that can explicitly define the
input data passed into the transform logic. If no input is provided, the
transform takes its input from the source and attribute combination
configured with the UI.
## Examples

View File

@@ -5,29 +5,55 @@ pagination_label: Username Generator
sidebar_label: Username Generator
sidebar_class_name: usernameGenerator
keywords: ["transforms", "operations", "username", "generator"]
description: Derive a unique value for an attribute in an account create profile.
description:
Derive a unique value for an attribute in an account create profile.
slug: /docs/transforms/operations/username-generator
tags: ["Transforms", "Operations", "Username","Generator"]
tags: ["Transforms", "Operations", "Username", "Generator"]
---
## Overview
Use the username generator transform to specify logic to use when it derives a unique value for an attribute in an account create profile. The generator's logic can be as simple as a combination of parts of a user's name and/or HR data (e.g., firstName.lastName), but sometimes generator logic such as a uniqueness counter can be necessary to find a unique value in the target system (e.g., firstName.lastName1 if firstName.lastName is already taken).
Use the username generator transform to specify logic to use when it derives a
unique value for an attribute in an account create profile. The generator's
logic can be as simple as a combination of parts of a user's name and/or HR data
(e.g., firstName.lastName), but sometimes generator logic such as a uniqueness
counter can be necessary to find a unique value in the target system (e.g.,
firstName.lastName1 if firstName.lastName is already taken).
:::note Other Considerations
- The transform allows you to use "uniqueCounter" as a reserved variable for numerically trying the next iteration of the pattern. Once the generator is active on a pattern with the "uniqueCounter," it will keep incrementing until it either has found a unique username candidate, or it exhausts the "cloudMaxUniqueChecks" value. This means that the generator does not process any patterns after one containing "uniqueCounter." The "uniqueCounter" **must always be last** in the pattern list.
- Within the account attribute definition structure, there is a field for "cloudMaxUniqueChecks" that identifies how many times the generator must invoke the uniqueness check logic before it stops executing. The maximum allowed value for this field is 50.
- The transform allows you to use "uniqueCounter" as a reserved variable for
numerically trying the next iteration of the pattern. Once the generator is
active on a pattern with the "uniqueCounter," it will keep incrementing until
it either has found a unique username candidate, or it exhausts the
"cloudMaxUniqueChecks" value. This means that the generator does not process
any patterns after one containing "uniqueCounter." The "uniqueCounter" **must
always be last** in the pattern list.
- Within the account attribute definition structure, there is a field for
"cloudMaxUniqueChecks" that identifies how many times the generator must
invoke the uniqueness check logic before it stops executing. The maximum
allowed value for this field is 50.
:::
## Transform Structure
The username generator transform is intended for use as a configuration within the account create profile for a source. Thus, this transform's structure is more extensive than a typical Seaspray implementation -- it must be assigned to a create profile attribute (designated by `name`) and provide certain uniqueness check attributes such as `cloudMaxSize`, `cloudMaxUniqueChecks`, and `cloudRequired`.
The username generator transform is intended for use as a configuration within
the account create profile for a source. Thus, this transform's structure is
more extensive than a typical Seaspray implementation -- it must be assigned to
a create profile attribute (designated by `name`) and provide certain uniqueness
check attributes such as `cloudMaxSize`, `cloudMaxUniqueChecks`, and
`cloudRequired`.
The `cloudMaxSize` attribute denotes the maximum length of generated data allowable as a result of the generator logic. The transform truncates any characters over the `cloudMaxSize`. The `cloudMaxUniqueChecks` attribute determines the maximum number of iterations the generator must attempt before failing to generate a value. The `cloudRequired` attribute is an internal flag required for the IdentityNow platform - leave it as `true`.
The `cloudMaxSize` attribute denotes the maximum length of generated data
allowable as a result of the generator logic. The transform truncates any
characters over the `cloudMaxSize`. The `cloudMaxUniqueChecks` attribute
determines the maximum number of iterations the generator must attempt before
failing to generate a value. The `cloudRequired` attribute is an internal flag
required for the IdentityNow platform - leave it as `true`.
Provide the username generator transform itself in the create profile attribute entry's `transform` parameter.
Provide the username generator transform itself in the create profile attribute
entry's `transform` parameter.
```json
{
@@ -95,20 +121,39 @@ Provide the username generator transform itself in the create profile attribute
## Attributes
- **Required Attributes**
- **type** - This must always be set to `usernameGenerator`.
- **patterns** - This is a JSON array of patterns for the generator to evaluate for uniqueness, in sequential order.
> **Note** that you can leverage `$uniqueCounter` here to automatically increment a counter if the generated value is not available and you want to try appending numeric values (i.e., 1, 2, 3, etc.) instead of progressing beyond the current pattern.
- **patterns** - This is a JSON array of patterns for the generator to
evaluate for uniqueness, in sequential order.
> **Note** that you can leverage `$uniqueCounter` here to automatically
> increment a counter if the generated value is not available and you want
> to try appending numeric values (i.e., 1, 2, 3, etc.) instead of
> progressing beyond the current pattern.
- **Optional Attributes**
- **sourceCheck** - This boolean value (true/false) indicates whether the generator must check only the IdentityNow database's representation of accounts for uniqueness, or whether it must query the target system directly. If no value is provided, the attribute defaults to `false`.
- `true` indicates the generator must check the target system directly. The generator only respects this setting if the system supports the `getObject` functionality. For systems that lack the ability to query for single account objects, the generator ignores this setting and defaults to `false`. The generator only checks the attribute identified in the account schema as the `accountID`.
- `false` indicates the generator must check only the IdentityNow database of accounts. The generator only checks the `accountID`.
- **sourceCheck** - This boolean value (true/false) indicates whether the
generator must check only the IdentityNow database's representation of
accounts for uniqueness, or whether it must query the target system
directly. If no value is provided, the attribute defaults to `false`.
- `true` indicates the generator must check the target system directly. The
generator only respects this setting if the system supports the
`getObject` functionality. For systems that lack the ability to query for
single account objects, the generator ignores this setting and defaults to
`false`. The generator only checks the attribute identified in the account
schema as the `accountID`.
- `false` indicates the generator must check only the IdentityNow database
of accounts. The generator only checks the `accountID`.
## Examples
This generator takes the user's first initial, appends the user's full last name, and then leverages a uniqueness counter to generate a unique value for userId. For example, if the user's name were John Doe, the username generator would first try `jdoe.` If that were not unique, it would progress to `jdoe1`, then `jdoe2`, until `jdoe25`.
This generator takes the user's first initial, appends the user's full last
name, and then leverages a uniqueness counter to generate a unique value for
userId. For example, if the user's name were John Doe, the username generator
would first try `jdoe.` If that were not unique, it would progress to `jdoe1`,
then `jdoe2`, until `jdoe25`.
If the generator does not find a unique value within the first 25 tries, it returns an IllegalStateException.
If the generator does not find a unique value within the first 25 tries, it
returns an IllegalStateException.
**Transform Request Body**:
@@ -126,9 +171,7 @@ If the generator does not find a unique value within the first 25 tries, it retu
"type": "usernameGenerator",
"attributes": {
"sourceCheck": true,
"patterns": [
"$fi$ln${uniqueCounter}"
],
"patterns": ["$fi$ln${uniqueCounter}"],
"ln": {
"type": "identityAttribute",
"attributes": {
@@ -158,9 +201,14 @@ If the generator does not find a unique value within the first 25 tries, it retu
<p>&nbsp;</p>
This generator takes the user's first name, appends a period and then the user's full last name, and then adds a uniqueness counter to generate a unique value for accountId. For example, if the user's name were Adam Smith, the username generator would first try "adam.smith". If that were not unique, it would progress to "adam.smith1", then "adam.smith2", until "adam.smith10".
This generator takes the user's first name, appends a period and then the user's
full last name, and then adds a uniqueness counter to generate a unique value
for accountId. For example, if the user's name were Adam Smith, the username
generator would first try "adam.smith". If that were not unique, it would
progress to "adam.smith1", then "adam.smith2", until "adam.smith10".
If the generator does not find a unique value within the first 10 tries, it returns an IllegalStateException.
If the generator does not find a unique value within the first 10 tries, it
returns an IllegalStateException.
**Transform Request Body**:
@@ -178,9 +226,7 @@ If the generator does not find a unique value within the first 10 tries, it retu
"type": "usernameGenerator",
"attributes": {
"sourceCheck": true,
"patterns": [
"$fn.$ln${uniqueCounter}"
],
"patterns": ["$fn.$ln${uniqueCounter}"],
"fn": {
"type": "identityAttribute",
"attributes": {

Some files were not shown because too many files have changed in this diff Show More