generate speakeasy sdk

This commit is contained in:
Mike Lueders
2024-02-29 15:07:52 -06:00
parent e8d67cbb9c
commit b6bc5b703f
27 changed files with 2456 additions and 1 deletions

2
.gitattributes vendored Normal file
View File

@@ -0,0 +1,2 @@
# This allows generated code to be indexed correctly
*.py linguist-generated=false

8
.gitignore vendored
View File

@@ -1 +1,7 @@
.idea
venv/
src/*.egg-info/
__pycache__/
.pytest_cache/
.python-version
.DS_Store
.idea

38
.speakeasy/gen.lock Executable file
View File

@@ -0,0 +1,38 @@
lockVersion: 2.0.0
id: f973dc21-d1bd-4e48-971a-a3918272099b
management:
docChecksum: 3851061474ad0b751f638e537d98fd25
docVersion: "1"
speakeasyVersion: internal
generationVersion: 2.275.2
releaseVersion: 0.0.1
configChecksum: 5be955b039369e57b218570e7f64d06f
features:
python:
constsAndDefaults: 0.1.2
core: 4.5.0
flattening: 2.81.1
globalServerURLs: 2.82.1
generatedFiles:
- src/shippo/sdkconfiguration.py
- src/shippo/sdk.py
- pylintrc
- setup.py
- src/shippo/__init__.py
- src/shippo/utils/__init__.py
- src/shippo/utils/retries.py
- src/shippo/utils/utils.py
- src/shippo/models/errors/sdkerror.py
- tests/helpers.py
- src/shippo/models/operations/example.py
- src/shippo/models/__init__.py
- src/shippo/models/errors/__init__.py
- src/shippo/models/operations/__init__.py
- docs/models/operations/examplerequest.md
- docs/models/operations/exampleresponse.md
- docs/sdks/shippo/README.md
- USAGE.md
- .gitattributes
- src/shippo/_hooks/sdkhooks.py
- src/shippo/_hooks/types.py
- src/shippo/_hooks/__init__.py

35
.speakeasy/gen.yaml Executable file
View File

@@ -0,0 +1,35 @@
configVersion: 2.0.0
generation:
sdkClassName: shippo
maintainOpenAPIOrder: true
usageSnippets:
optionalPropertyRendering: withExample
useClassNamesForArrayFields: true
fixes:
nameResolutionDec2023: true
parameterOrderingFeb2024: true
requestResponseComponentNamesFeb2024: true
auth:
oAuth2ClientCredentialsEnabled: false
python:
version: 0.0.1
additionalDependencies:
dependencies: {}
extraDependencies:
dev: {}
author: Speakeasy
clientServerStatusCodesAsErrors: true
description: Python Client SDK Generated by Speakeasy
flattenGlobalSecurity: true
imports:
option: openapi
paths:
callbacks: models/callbacks
errors: models/errors
operations: models/operations
shared: models/components
webhooks: models/webhooks
inputModelSuffix: input
maxMethodParams: 4
outputModelSuffix: output
packageName: shippo-api-client

9
.speakeasy/workflow.yaml Normal file
View File

@@ -0,0 +1,9 @@
workflowVersion: 1.0.0
sources:
openapi:
inputs:
- location: ./openapi.yaml
targets:
first-target:
target: python
source: openapi

164
README.md Normal file
View File

@@ -0,0 +1,164 @@
# shippo-api-client
<div align="left">
<a href="https://speakeasyapi.dev/"><img src="https://custom-icon-badges.demolab.com/badge/-Built%20By%20Speakeasy-212015?style=for-the-badge&logoColor=FBE331&logo=speakeasy&labelColor=545454" /></a>
<a href="https://opensource.org/licenses/MIT">
<img src="https://img.shields.io/badge/License-MIT-blue.svg" style="width: 100px; height: 28px;" />
</a>
</div>
## 🏗 **Welcome to your new SDK!** 🏗
It has been generated successfully based on your OpenAPI spec. However, it is not yet ready for production use. Here are some next steps:
- [ ] 🛠 Make your SDK feel handcrafted by [customizing it](https://www.speakeasyapi.dev/docs/customize-sdks)
- [ ] ♻️ Refine your SDK quickly by iterating locally with the [Speakeasy CLI](https://github.com/speakeasy-api/speakeasy)
- [ ] 🎁 Publish your SDK to package managers by [configuring automatic publishing](https://www.speakeasyapi.dev/docs/productionize-sdks/publish-sdks)
- [ ] ✨ When ready to productionize, delete this section from the README
<!-- Start SDK Installation [installation] -->
## SDK Installation
```bash
pip install git+<UNSET>.git
```
<!-- End SDK Installation [installation] -->
<!-- Start SDK Example Usage [usage] -->
## SDK Example Usage
### Example
```python
import shippo
s = shippo.Shippo()
res = s.example(results_per_page=904965)
if res.status_code == 200:
# handle response
pass
```
<!-- End SDK Example Usage [usage] -->
<!-- Start Available Resources and Operations [operations] -->
## Available Resources and Operations
### [Shippo SDK](docs/sdks/shippo/README.md)
* [example](docs/sdks/shippo/README.md#example)
<!-- End Available Resources and Operations [operations] -->
<!-- Start Error Handling [errors] -->
## Error Handling
Handling errors in this SDK should largely match your expectations. All operations return a response object or raise an error. If Error objects are specified in your OpenAPI Spec, the SDK will raise the appropriate Error type.
| Error Object | Status Code | Content Type |
| --------------- | --------------- | --------------- |
| errors.SDKError | 4x-5xx | */* |
### Example
```python
import shippo
from shippo.models import errors
s = shippo.Shippo()
res = None
try:
res = s.example(results_per_page=904965)
except errors.SDKError as e:
# handle exception
raise(e)
if res.status_code == 200:
# handle response
pass
```
<!-- End Error Handling [errors] -->
<!-- Start Server Selection [server] -->
## Server Selection
### Select Server by Index
You can override the default server globally by passing a server index to the `server_idx: int` optional parameter when initializing the SDK client instance. The selected server will then be used as the default on the operations that use it. This table lists the indexes associated with the available servers:
| # | Server | Variables |
| - | ------ | --------- |
| 0 | `https://example.com` | None |
#### Example
```python
import shippo
s = shippo.Shippo(
server_idx=0,
)
res = s.example(results_per_page=904965)
if res.status_code == 200:
# handle response
pass
```
### Override Server URL Per-Client
The default server can also be overridden globally by passing a URL to the `server_url: str` optional parameter when initializing the SDK client instance. For example:
```python
import shippo
s = shippo.Shippo(
server_url="https://example.com",
)
res = s.example(results_per_page=904965)
if res.status_code == 200:
# handle response
pass
```
<!-- End Server Selection [server] -->
<!-- Start Custom HTTP Client [http-client] -->
## Custom HTTP Client
The Python SDK makes API calls using the [requests](https://pypi.org/project/requests/) HTTP library. In order to provide a convenient way to configure timeouts, cookies, proxies, custom headers, and other low-level configuration, you can initialize the SDK client with a custom `requests.Session` object.
For example, you could specify a header for every request that this sdk makes as follows:
```python
import shippo
import requests
http_client = requests.Session()
http_client.headers.update({'x-custom-header': 'someValue'})
s = shippo.Shippo(client: http_client)
```
<!-- End Custom HTTP Client [http-client] -->
<!-- Placeholder for Future Speakeasy SDK Sections -->
# Development
## Maturity
This SDK is in beta, and there may be breaking changes between versions without a major version update. Therefore, we recommend pinning usage
to a specific package version. This way, you can install the same version each time without breaking changes unless you are intentionally
looking for the latest version.
## Contributions
While we value open-source contributions to this SDK, this library is generated programmatically.
Feel free to open a PR or a Github issue as a proof of concept and we'll do our best to include it in a future release!
### SDK Created by [Speakeasy](https://docs.speakeasyapi.dev/docs/using-speakeasy/client-sdks)

14
USAGE.md Normal file
View File

@@ -0,0 +1,14 @@
<!-- Start SDK Example Usage [usage] -->
```python
import shippo
s = shippo.Shippo()
res = s.example(results_per_page=904965)
if res.status_code == 200:
# handle response
pass
```
<!-- End SDK Example Usage [usage] -->

View File

@@ -0,0 +1,8 @@
# ExampleRequest
## Fields
| Field | Type | Required | Description |
| -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- |
| `results_per_page` | *Optional[int]* | :heavy_minus_sign: | The number of results to return per page (max 100) |

View File

@@ -0,0 +1,10 @@
# ExampleResponse
## Fields
| Field | Type | Required | Description |
| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- |
| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation |
| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation |
| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing |

View File

@@ -0,0 +1,41 @@
# Shippo SDK
## Overview
### Available Operations
* [example](#example)
## example
### Example Usage
```python
import shippo
s = shippo.Shippo()
res = s.example(results_per_page=904965)
if res.status_code == 200:
# handle response
pass
```
### Parameters
| Parameter | Type | Required | Description |
| -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- |
| `results_per_page` | *Optional[int]* | :heavy_minus_sign: | The number of results to return per page (max 100) |
### Response
**[operations.ExampleResponse](../../models/operations/exampleresponse.md)**
### Errors
| Error Object | Status Code | Content Type |
| --------------- | --------------- | --------------- |
| errors.SDKError | 4x-5xx | */* |

644
pylintrc Normal file
View File

@@ -0,0 +1,644 @@
[MAIN]
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
# in a server-like mode.
clear-cache-post-run=no
# Load and enable all available extensions. Use --list-extensions to see a list
# all available extensions.
#enable-all-extensions=
# In error mode, messages with a category besides ERROR or FATAL are
# suppressed, and no reports are done by default. Error mode is compatible with
# disabling specific errors.
#errors-only=
# Always return a 0 (non-error) status code, even if lint errors are found.
# This is primarily useful in continuous integration scripts.
#exit-zero=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-allow-list=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
# for backward compatibility.)
extension-pkg-whitelist=
# Return non-zero exit code if any of these messages/categories are detected,
# even if score is above --fail-under value. Syntax same as enable. Messages
# specified are enabled, while categories only check already-enabled messages.
fail-on=
# Specify a score threshold under which the program will exit with error.
fail-under=10
# Interpret the stdin as a python script, whose filename needs to be passed as
# the module_or_package argument.
#from-stdin=
# Files or directories to be skipped. They should be base names, not paths.
ignore=CVS
# Add files or directories matching the regular expressions patterns to the
# ignore-list. The regex matches against paths and can be in Posix or Windows
# format. Because '\\' represents the directory delimiter on Windows systems,
# it can't be used as an escape character.
ignore-paths=
# Files or directories matching the regular expression patterns are skipped.
# The regex matches against base names, not paths. The default value ignores
# Emacs file locks
ignore-patterns=^\.#
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use, and will cap the count on Windows to
# avoid hangs.
jobs=1
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Minimum Python version to use for version dependent checks. Will default to
# the version used to run pylint.
py-version=3.8
# Discover python modules and packages in the file system subtree.
recursive=no
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# In verbose mode, extra non-checker-related info will be displayed.
#verbose=
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style. If left empty, argument names will be checked with the set
# naming style.
#argument-rgx=
# Naming style matching correct attribute names.
#attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style. If left empty, attribute names will be checked with the set naming
# style.
attr-rgx=[^\W\d][^\W]*|__.*__$
# Bad variable names which should always be refused, separated by a comma.
bad-names=
# Bad variable names regexes, separated by a comma. If names match any regex,
# they will always be refused
bad-names-rgxs=
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style. If left empty, class attribute names will be checked
# with the set naming style.
#class-attribute-rgx=
# Naming style matching correct class constant names.
class-const-naming-style=UPPER_CASE
# Regular expression matching correct class constant names. Overrides class-
# const-naming-style. If left empty, class constant names will be checked with
# the set naming style.
#class-const-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style. If left empty, class names will be checked with the set naming style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style. If left empty, constant names will be checked with the set naming
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style. If left empty, function names will be checked with the set
# naming style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_,
e
# Good variable names regexes, separated by a comma. If names match any regex,
# they will always be accepted
good-names-rgxs=
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style. If left empty, inline iteration names will be checked
# with the set naming style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style. If left empty, method names will be checked with the set naming style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style. If left empty, module names will be checked with the set naming style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Regular expression matching correct type variable names. If left empty, type
# variable names will be checked with the set naming style.
#typevar-rgx=
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style. If left empty, variable names will be checked with the set
# naming style.
#variable-rgx=
[CLASSES]
# Warn about protected attribute access inside special methods
check-protected-access-in-special-methods=no
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[DESIGN]
# List of regular expressions of class ancestor names to ignore when counting
# public methods (see R0903)
exclude-too-few-public-methods=
# List of qualified class names to ignore when counting class parents (see
# R0901)
ignored-parents=
# Maximum number of arguments for function / method.
max-args=5
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
# Maximum number of branch for function / method body.
max-branches=12
# Maximum number of locals for function / method body.
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=25
# Maximum number of return / yield for function / method body.
max-returns=6
# Maximum number of statements in function / method body.
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[EXCEPTIONS]
# Exceptions that will emit a warning when caught.
overgeneral-exceptions=builtins.BaseException,builtins.Exception
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=100
# Maximum number of lines in a module.
max-module-lines=1000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow explicit reexports by alias from a package __init__.
allow-reexport-from-package=no
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=
# Output a graph (.gv or any supported image format) of external dependencies
# to the given file (report RP0402 must not be disabled).
ext-import-graph=
# Output a graph (.gv or any supported image format) of all (i.e. internal and
# external) dependencies to the given file (report RP0402 must not be
# disabled).
import-graph=
# Output a graph (.gv or any supported image format) of internal dependencies
# to the given file (report RP0402 must not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[LOGGING]
# The type of string formatting that logging methods do. `old` means using %
# formatting, `new` is for `{}` formatting.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
# UNDEFINED.
confidence=HIGH,
CONTROL_FLOW,
INFERENCE,
INFERENCE_FAILURE,
UNDEFINED
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then re-enable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=raw-checker-failed,
bad-inline-option,
locally-disabled,
file-ignored,
suppressed-message,
useless-suppression,
deprecated-pragma,
use-symbolic-message-instead,
trailing-whitespace,
line-too-long,
missing-class-docstring,
missing-module-docstring,
missing-function-docstring,
too-many-instance-attributes,
wrong-import-order,
too-many-arguments,
broad-exception-raised,
too-few-public-methods,
too-many-branches,
chained-comparison,
duplicate-code,
trailing-newlines,
too-many-public-methods,
too-many-locals,
too-many-lines,
using-constant-test,
too-many-statements,
cyclic-import,
too-many-nested-blocks,
too-many-boolean-expressions,
no-else-raise,
bare-except
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[METHOD_ARGS]
# List of qualified names (i.e., library.method) which require a timeout
# parameter e.g. 'requests.api.get,requests.api.post'
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
# Regular expression of note tags to take in consideration.
notes-rgx=
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit,argparse.parse_error
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
# 'convention', and 'info' which contain the number of messages in each
# category, as well as 'statement' which is the total number of statements
# analyzed. This score is used by the global evaluation report (RP0004).
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
#output-format=
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[SIMILARITIES]
# Comments are removed from the similarity computation
ignore-comments=yes
# Docstrings are removed from the similarity computation
ignore-docstrings=yes
# Imports are removed from the similarity computation
ignore-imports=yes
# Signatures are removed from the similarity computation
ignore-signatures=yes
# Minimum lines number of a similarity.
min-similarity-lines=4
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: none. To make it work,
# install the 'python-enchant' package.
spelling-dict=
# List of comma separated words that should be considered directives if they
# appear at the beginning of a comment and should not be checked.
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[STRING]
# This flag controls whether inconsistent-quotes generates a warning when the
# character used as a quote delimiter is used inconsistently within a module.
check-quote-consistency=no
# This flag controls whether the implicit-str-concat should generate a warning
# on implicit string concatenation in sequences defined over several lines.
check-str-concat-over-line-jumps=no
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of symbolic message names to ignore for Mixin members.
ignored-checks-for-mixins=no-member,
not-async-context-manager,
not-context-manager,
attribute-defined-outside-init
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# Regex pattern to define which classes are considered mixins.
mixin-class-rgx=.*[Mm]ixin
# List of decorators that change the signature of a decorated function.
signature-mutators=
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of names allowed to shadow builtins
allowed-redefined-builtins=id,object
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io

43
setup.py Normal file
View File

@@ -0,0 +1,43 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
import setuptools
try:
with open("README.md", "r") as fh:
long_description = fh.read()
except FileNotFoundError:
long_description = ""
setuptools.setup(
name="shippo-api-client",
version="0.0.1",
author="Speakeasy",
description="Python Client SDK Generated by Speakeasy",
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(where="src"),
install_requires=[
"certifi>=2023.7.22",
"charset-normalizer>=3.2.0",
"dataclasses-json>=0.6.4",
"idna>=3.4",
"jsonpath-python>=1.0.6",
"marshmallow>=3.19.0",
"mypy-extensions>=1.0.0",
"packaging>=23.1",
"python-dateutil>=2.8.2",
"requests>=2.31.0",
"six>=1.16.0",
"typing-inspect>=0.9.0",
"typing_extensions>=4.7.1",
"urllib3>=1.26.18",
],
extras_require={
"dev": [
"pylint==2.16.2",
],
},
package_dir={'': 'src'},
python_requires='>=3.8',
package_data={"shippo-api-client": ["py.typed"]},
)

4
src/shippo/__init__.py Normal file
View File

@@ -0,0 +1,4 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
from .sdk import *
from .sdkconfiguration import *

View File

@@ -0,0 +1,4 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
from .sdkhooks import *
from .types import *

View File

@@ -0,0 +1,55 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
import requests
from .types import SDKInitHook, BeforeRequestContext, BeforeRequestHook, AfterSuccessContext, AfterSuccessHook, AfterErrorContext, AfterErrorHook, Hooks
from typing import List, Optional, Tuple, Union
class SDKHooks(Hooks):
sdk_init_hooks: List[SDKInitHook] = []
before_request_hooks: List[BeforeRequestHook] = []
after_success_hooks: List[AfterSuccessHook] = []
after_error_hooks: List[AfterErrorHook] = []
def __init__(self):
pass
def register_sdk_init_hook(self, hook: SDKInitHook) -> None:
self.sdk_init_hooks.append(hook)
def register_before_request_hook(self, hook: BeforeRequestHook) -> None:
self.before_request_hooks.append(hook)
def register_after_success_hook(self, hook: AfterSuccessHook) -> None:
self.after_success_hooks.append(hook)
def register_after_error_hook(self, hook: AfterErrorHook) -> None:
self.after_error_hooks.append(hook)
def sdk_init(self, base_url: str, client: requests.Session) -> Tuple[str, requests.Session]:
for hook in self.sdk_init_hooks:
base_url, client = hook.sdk_init(base_url, client)
return base_url, client
def before_request(self, hook_ctx: BeforeRequestContext, request: requests.PreparedRequest) -> Union[requests.PreparedRequest, Exception]:
for hook in self.before_request_hooks:
request = hook.before_request(hook_ctx, request)
if isinstance(request, Exception):
raise request
return request
def after_success(self, hook_ctx: AfterSuccessContext, response: requests.Response) -> requests.Response:
for hook in self.after_success_hooks:
response = hook.after_success(hook_ctx, response)
if isinstance(response, Exception):
raise response
return response
def after_error(self, hook_ctx: AfterErrorContext, response: Optional[requests.Response], error: Optional[Exception]) -> Tuple[Optional[requests.Response], Optional[Exception]]:
for hook in self.after_error_hooks:
result = hook.after_error(hook_ctx, response, error)
if isinstance(result, Exception):
raise result
response, error = result
return response, error

View File

@@ -0,0 +1,70 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
import requests as requests_http
from abc import ABC, abstractmethod
from typing import Any, Callable, List, Optional, Tuple, Union
class HookContext:
operation_id: str
oauth2_scopes: Optional[List[str]] = None
security_source: Optional[Union[Any, Callable[[], Any]]] = None
def __init__(self, operation_id: str, oauth2_scopes: Optional[List[str]], security_source: Optional[Union[Any, Callable[[], Any]]]):
self.operation_id = operation_id
self.oauth2_scopes = oauth2_scopes
self.security_source = security_source
class BeforeRequestContext(HookContext):
pass
class AfterSuccessContext(HookContext):
pass
class AfterErrorContext(HookContext):
pass
class SDKInitHook(ABC):
@abstractmethod
def sdk_init(self, base_url: str, client: requests_http.Session) -> Tuple[str, requests_http.Session]:
pass
class BeforeRequestHook(ABC):
@abstractmethod
def before_request(self, hook_ctx: BeforeRequestContext, request: requests_http.PreparedRequest) -> Union[requests_http.PreparedRequest, Exception]:
pass
class AfterSuccessHook(ABC):
@abstractmethod
def after_success(self, hook_ctx: AfterSuccessContext, response: requests_http.Response) -> Union[requests_http.PreparedRequest, Exception]:
pass
class AfterErrorHook(ABC):
@abstractmethod
def after_error(self, hook_ctx: AfterErrorContext, response: Optional[requests_http.Response], error: Optional[Exception]) -> Union[Tuple[Optional[requests_http.PreparedRequest], Optional[Exception]], Exception]:
pass
class Hooks(ABC):
@abstractmethod
def register_sdk_init_hook(self, hook: SDKInitHook):
pass
@abstractmethod
def register_before_request_hook(self, hook: BeforeRequestHook):
pass
@abstractmethod
def register_after_success_hook(self, hook: AfterSuccessHook):
pass
@abstractmethod
def register_after_error_hook(self, hook: AfterErrorHook):
pass

View File

@@ -0,0 +1,4 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
# package

View File

@@ -0,0 +1,5 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
from .sdkerror import *
__all__ = ["SDKError"]

View File

@@ -0,0 +1,24 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
import requests as requests_http
class SDKError(Exception):
"""Represents an error returned by the API."""
message: str
status_code: int
body: str
raw_response: requests_http.Response
def __init__(self, message: str, status_code: int, body: str, raw_response: requests_http.Response):
self.message = message
self.status_code = status_code
self.body = body
self.raw_response = raw_response
def __str__(self):
body = ''
if len(self.body) > 0:
body = f'\n{self.body}'
return f'{self.message}: Status {self.status_code}{body}'

View File

@@ -0,0 +1,5 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
from .example import *
__all__ = ["ExampleRequest","ExampleResponse"]

View File

@@ -0,0 +1,26 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
from __future__ import annotations
import dataclasses
import requests as requests_http
from typing import Optional
@dataclasses.dataclass
class ExampleRequest:
results_per_page: Optional[int] = dataclasses.field(default=25, metadata={'query_param': { 'field_name': 'results_per_page', 'style': 'form', 'explode': True }})
r"""The number of results to return per page (max 100)"""
@dataclasses.dataclass
class ExampleResponse:
content_type: str = dataclasses.field()
r"""HTTP response content type for this operation"""
status_code: int = dataclasses.field()
r"""HTTP response status code for this operation"""
raw_response: requests_http.Response = dataclasses.field()
r"""Raw HTTP response; suitable for custom response parsing"""

106
src/shippo/sdk.py Normal file
View File

@@ -0,0 +1,106 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
import requests as requests_http
from .sdkconfiguration import SDKConfiguration
from shippo import utils
from shippo._hooks import HookContext, SDKHooks
from shippo.models import errors, operations
from typing import Dict, Optional
class Shippo:
sdk_configuration: SDKConfiguration
def __init__(self,
server_idx: int = None,
server_url: str = None,
url_params: Dict[str, str] = None,
client: requests_http.Session = None,
retry_config: utils.RetryConfig = None
) -> None:
"""Instantiates the SDK configuring it with the provided parameters.
:param server_idx: The index of the server to use for all operations
:type server_idx: int
:param server_url: The server URL to use for all operations
:type server_url: str
:param url_params: Parameters to optionally template the server URL with
:type url_params: Dict[str, str]
:param client: The requests.Session HTTP client to use for all operations
:type client: requests_http.Session
:param retry_config: The utils.RetryConfig to use globally
:type retry_config: utils.RetryConfig
"""
if client is None:
client = requests_http.Session()
if server_url is not None:
if url_params is not None:
server_url = utils.template_url(server_url, url_params)
self.sdk_configuration = SDKConfiguration(client, None, server_url, server_idx, retry_config=retry_config)
hooks = SDKHooks()
current_server_url, *_ = self.sdk_configuration.get_server_details()
server_url, self.sdk_configuration.client = hooks.sdk_init(current_server_url, self.sdk_configuration.client)
if current_server_url != server_url:
self.sdk_configuration.server_url = server_url
# pylint: disable=protected-access
self.sdk_configuration._hooks=hooks
def example(self, results_per_page: Optional[int] = None) -> operations.ExampleResponse:
hook_ctx = HookContext(operation_id='Example', oauth2_scopes=[], security_source=None)
request = operations.ExampleRequest(
results_per_page=results_per_page,
)
base_url = utils.template_url(*self.sdk_configuration.get_server_details())
url = base_url + '/example'
headers = {}
query_params = utils.get_query_params(operations.ExampleRequest, request)
headers['Accept'] = '*/*'
headers['user-agent'] = self.sdk_configuration.user_agent
client = self.sdk_configuration.client
try:
req = self.sdk_configuration.get_hooks().before_request(
hook_ctx,
requests_http.Request('GET', url, params=query_params, headers=headers).prepare(),
)
http_res = client.send(req)
except Exception as e:
_, e = self.sdk_configuration.get_hooks().after_error(hook_ctx, None, e)
raise e
if utils.match_status_codes(['4XX','5XX'], http_res.status_code):
http_res, e = self.sdk_configuration.get_hooks().after_error(hook_ctx, http_res, None)
if e:
raise e
else:
result = self.sdk_configuration.get_hooks().after_success(hook_ctx, http_res)
if isinstance(result, Exception):
raise result
http_res = result
content_type = http_res.headers.get('Content-Type')
res = operations.ExampleResponse(status_code=http_res.status_code, content_type=content_type, raw_response=http_res)
if http_res.status_code == 200:
pass
elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600:
raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res)
return res

View File

@@ -0,0 +1,40 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
import requests as requests_http
from ._hooks import SDKHooks
from .utils import utils
from .utils.retries import RetryConfig
from dataclasses import dataclass
from typing import Dict, Tuple
SERVERS = [
'https://example.com',
]
"""Contains the list of servers available to the SDK"""
@dataclass
class SDKConfiguration:
client: requests_http.Session
server_url: str = ''
server_idx: int = 0
language: str = 'python'
openapi_doc_version: str = '1'
sdk_version: str = '0.0.1'
gen_version: str = '2.275.2'
user_agent: str = 'speakeasy-sdk/python 0.0.1 2.275.2 1 shippo-api-client'
retry_config: RetryConfig = None
_hooks: SDKHooks = None
def get_server_details(self) -> Tuple[str, Dict[str, str]]:
if self.server_url:
return utils.remove_suffix(self.server_url, '/'), {}
if self.server_idx is None:
self.server_idx = 0
return SERVERS[self.server_idx], {}
def get_hooks(self) -> SDKHooks:
return self._hooks

View File

@@ -0,0 +1,4 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
from .retries import *
from .utils import *

120
src/shippo/utils/retries.py Normal file
View File

@@ -0,0 +1,120 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
import random
import time
from typing import List
import requests
class BackoffStrategy:
initial_interval: int
max_interval: int
exponent: float
max_elapsed_time: int
def __init__(self, initial_interval: int, max_interval: int, exponent: float, max_elapsed_time: int):
self.initial_interval = initial_interval
self.max_interval = max_interval
self.exponent = exponent
self.max_elapsed_time = max_elapsed_time
class RetryConfig:
strategy: str
backoff: BackoffStrategy
retry_connection_errors: bool
def __init__(self, strategy: str, backoff: BackoffStrategy, retry_connection_errors: bool):
self.strategy = strategy
self.backoff = backoff
self.retry_connection_errors = retry_connection_errors
class Retries:
config: RetryConfig
status_codes: List[str]
def __init__(self, config: RetryConfig, status_codes: List[str]):
self.config = config
self.status_codes = status_codes
class TemporaryError(Exception):
response: requests.Response
def __init__(self, response: requests.Response):
self.response = response
class PermanentError(Exception):
inner: Exception
def __init__(self, inner: Exception):
self.inner = inner
def retry(func, retries: Retries):
if retries.config.strategy == 'backoff':
def do_request():
res: requests.Response
try:
res = func()
for code in retries.status_codes:
if "X" in code.upper():
code_range = int(code[0])
status_major = res.status_code / 100
if status_major >= code_range and status_major < code_range + 1:
raise TemporaryError(res)
else:
parsed_code = int(code)
if res.status_code == parsed_code:
raise TemporaryError(res)
except requests.exceptions.ConnectionError as exception:
if retries.config.config.retry_connection_errors:
raise
raise PermanentError(exception) from exception
except requests.exceptions.Timeout as exception:
if retries.config.config.retry_connection_errors:
raise
raise PermanentError(exception) from exception
except TemporaryError:
raise
except Exception as exception:
raise PermanentError(exception) from exception
return res
return retry_with_backoff(do_request, retries.config.backoff.initial_interval, retries.config.backoff.max_interval, retries.config.backoff.exponent, retries.config.backoff.max_elapsed_time)
return func()
def retry_with_backoff(func, initial_interval=500, max_interval=60000, exponent=1.5, max_elapsed_time=3600000):
start = round(time.time()*1000)
retries = 0
while True:
try:
return func()
except PermanentError as exception:
raise exception.inner
except Exception as exception: # pylint: disable=broad-exception-caught
now = round(time.time()*1000)
if now - start > max_elapsed_time:
if isinstance(exception, TemporaryError):
return exception.response
raise
sleep = ((initial_interval/1000) *
exponent**retries + random.uniform(0, 1))
if sleep > max_interval/1000:
sleep = max_interval/1000
time.sleep(sleep)
retries += 1

913
src/shippo/utils/utils.py Normal file
View File

@@ -0,0 +1,913 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
import base64
import json
import re
import sys
from dataclasses import Field, dataclass, fields, is_dataclass, make_dataclass
from datetime import date, datetime
from decimal import Decimal
from email.message import Message
from enum import Enum
from typing import (Any, Callable, Dict, List, Optional, Tuple, Union,
get_args, get_origin)
from xmlrpc.client import boolean
from typing_inspect import is_optional_type
import dateutil.parser
import requests
from dataclasses_json import DataClassJsonMixin
class SecurityClient:
client: requests.Session
query_params: Dict[str, str] = {}
headers: Dict[str, str] = {}
def __init__(self, client: requests.Session):
self.client = client
def send(self, request: requests.PreparedRequest, **kwargs):
request.prepare_url(url=request.url, params=self.query_params)
request.headers.update(self.headers)
return self.client.send(request, **kwargs)
def configure_security_client(client: requests.Session, security: dataclass):
client = SecurityClient(client)
if security is None:
return client
sec_fields: Tuple[Field, ...] = fields(security)
for sec_field in sec_fields:
value = getattr(security, sec_field.name)
if value is None:
continue
metadata = sec_field.metadata.get('security')
if metadata is None:
continue
if metadata.get('option'):
_parse_security_option(client, value)
return client
if metadata.get('scheme'):
# Special case for basic auth which could be a flattened struct
if metadata.get("sub_type") == "basic" and not is_dataclass(value):
_parse_security_scheme(client, metadata, security)
else:
_parse_security_scheme(client, metadata, value)
return client
def _parse_security_option(client: SecurityClient, option: dataclass):
opt_fields: Tuple[Field, ...] = fields(option)
for opt_field in opt_fields:
metadata = opt_field.metadata.get('security')
if metadata is None or metadata.get('scheme') is None:
continue
_parse_security_scheme(
client, metadata, getattr(option, opt_field.name))
def _parse_security_scheme(client: SecurityClient, scheme_metadata: Dict, scheme: any):
scheme_type = scheme_metadata.get('type')
sub_type = scheme_metadata.get('sub_type')
if is_dataclass(scheme):
if scheme_type == 'http' and sub_type == 'basic':
_parse_basic_auth_scheme(client, scheme)
return
scheme_fields: Tuple[Field, ...] = fields(scheme)
for scheme_field in scheme_fields:
metadata = scheme_field.metadata.get('security')
if metadata is None or metadata.get('field_name') is None:
continue
value = getattr(scheme, scheme_field.name)
_parse_security_scheme_value(
client, scheme_metadata, metadata, value)
else:
_parse_security_scheme_value(
client, scheme_metadata, scheme_metadata, scheme)
def _parse_security_scheme_value(client: SecurityClient, scheme_metadata: Dict, security_metadata: Dict, value: any):
scheme_type = scheme_metadata.get('type')
sub_type = scheme_metadata.get('sub_type')
header_name = security_metadata.get('field_name')
if scheme_type == "apiKey":
if sub_type == 'header':
client.headers[header_name] = value
elif sub_type == 'query':
client.query_params[header_name] = value
else:
raise Exception('not supported')
elif scheme_type == "openIdConnect":
client.headers[header_name] = _apply_bearer(value)
elif scheme_type == 'oauth2':
if sub_type != 'client_credentials':
client.headers[header_name] = _apply_bearer(value)
elif scheme_type == 'http':
if sub_type == 'bearer':
client.headers[header_name] = _apply_bearer(value)
else:
raise Exception('not supported')
else:
raise Exception('not supported')
def _apply_bearer(token: str) -> str:
return token.lower().startswith('bearer ') and token or f'Bearer {token}'
def _parse_basic_auth_scheme(client: SecurityClient, scheme: dataclass):
username = ""
password = ""
scheme_fields: Tuple[Field, ...] = fields(scheme)
for scheme_field in scheme_fields:
metadata = scheme_field.metadata.get('security')
if metadata is None or metadata.get('field_name') is None:
continue
field_name = metadata.get('field_name')
value = getattr(scheme, scheme_field.name)
if field_name == 'username':
username = value
if field_name == 'password':
password = value
data = f'{username}:{password}'.encode()
client.headers['Authorization'] = f'Basic {base64.b64encode(data).decode()}'
def generate_url(clazz: type, server_url: str, path: str, path_params: dataclass,
gbls: Dict[str, Dict[str, Dict[str, Any]]] = None) -> str:
path_param_fields: Tuple[Field, ...] = fields(clazz)
for field in path_param_fields:
request_metadata = field.metadata.get('request')
if request_metadata is not None:
continue
param_metadata = field.metadata.get('path_param')
if param_metadata is None:
continue
param = getattr(
path_params, field.name) if path_params is not None else None
param = _populate_from_globals(
field.name, param, 'pathParam', gbls)
if param is None:
continue
f_name = param_metadata.get("field_name", field.name)
serialization = param_metadata.get('serialization', '')
if serialization != '':
serialized_params = _get_serialized_params(
param_metadata, field.type, f_name, param)
for key, value in serialized_params.items():
path = path.replace(
'{' + key + '}', value, 1)
else:
if param_metadata.get('style', 'simple') == 'simple':
if isinstance(param, List):
pp_vals: List[str] = []
for pp_val in param:
if pp_val is None:
continue
pp_vals.append(_val_to_string(pp_val))
path = path.replace(
'{' + param_metadata.get('field_name', field.name) + '}', ",".join(pp_vals), 1)
elif isinstance(param, Dict):
pp_vals: List[str] = []
for pp_key in param:
if param[pp_key] is None:
continue
if param_metadata.get('explode'):
pp_vals.append(
f"{pp_key}={_val_to_string(param[pp_key])}")
else:
pp_vals.append(
f"{pp_key},{_val_to_string(param[pp_key])}")
path = path.replace(
'{' + param_metadata.get('field_name', field.name) + '}', ",".join(pp_vals), 1)
elif not isinstance(param, (str, int, float, complex, bool, Decimal)):
pp_vals: List[str] = []
param_fields: Tuple[Field, ...] = fields(param)
for param_field in param_fields:
param_value_metadata = param_field.metadata.get(
'path_param')
if not param_value_metadata:
continue
parm_name = param_value_metadata.get(
'field_name', field.name)
param_field_val = getattr(param, param_field.name)
if param_field_val is None:
continue
if param_metadata.get('explode'):
pp_vals.append(
f"{parm_name}={_val_to_string(param_field_val)}")
else:
pp_vals.append(
f"{parm_name},{_val_to_string(param_field_val)}")
path = path.replace(
'{' + param_metadata.get('field_name', field.name) + '}', ",".join(pp_vals), 1)
else:
path = path.replace(
'{' + param_metadata.get('field_name', field.name) + '}', _val_to_string(param), 1)
return remove_suffix(server_url, '/') + path
def is_optional(field):
return get_origin(field) is Union and type(None) in get_args(field)
def template_url(url_with_params: str, params: Dict[str, str]) -> str:
for key, value in params.items():
url_with_params = url_with_params.replace(
'{' + key + '}', value)
return url_with_params
def get_query_params(clazz: type, query_params: dataclass, gbls: Dict[str, Dict[str, Dict[str, Any]]] = None) -> Dict[
str, List[str]]:
params: Dict[str, List[str]] = {}
param_fields: Tuple[Field, ...] = fields(clazz)
for field in param_fields:
request_metadata = field.metadata.get('request')
if request_metadata is not None:
continue
metadata = field.metadata.get('query_param')
if not metadata:
continue
param_name = field.name
value = getattr(
query_params, param_name) if query_params is not None else None
value = _populate_from_globals(param_name, value, 'queryParam', gbls)
f_name = metadata.get("field_name")
serialization = metadata.get('serialization', '')
if serialization != '':
serialized_parms = _get_serialized_params(
metadata, field.type, f_name, value)
for key, value in serialized_parms.items():
if key in params:
params[key].extend(value)
else:
params[key] = [value]
else:
style = metadata.get('style', 'form')
if style == 'deepObject':
params = {**params, **_get_deep_object_query_params(
metadata, f_name, value)}
elif style == 'form':
params = {**params, **_get_delimited_query_params(
metadata, f_name, value, ",")}
elif style == 'pipeDelimited':
params = {**params, **_get_delimited_query_params(
metadata, f_name, value, "|")}
else:
raise Exception('not yet implemented')
return params
def get_headers(headers_params: dataclass) -> Dict[str, str]:
if headers_params is None:
return {}
headers: Dict[str, str] = {}
param_fields: Tuple[Field, ...] = fields(headers_params)
for field in param_fields:
metadata = field.metadata.get('header')
if not metadata:
continue
value = _serialize_header(metadata.get(
'explode', False), getattr(headers_params, field.name))
if value != '':
headers[metadata.get('field_name', field.name)] = value
return headers
def _get_serialized_params(metadata: Dict, field_type: type, field_name: str, obj: any) -> Dict[str, str]:
params: Dict[str, str] = {}
serialization = metadata.get('serialization', '')
if serialization == 'json':
params[metadata.get("field_name", field_name)
] = marshal_json(obj, field_type)
return params
def _get_deep_object_query_params(metadata: Dict, field_name: str, obj: any) -> Dict[str, List[str]]:
params: Dict[str, List[str]] = {}
if obj is None:
return params
if is_dataclass(obj):
obj_fields: Tuple[Field, ...] = fields(obj)
for obj_field in obj_fields:
obj_param_metadata = obj_field.metadata.get('query_param')
if not obj_param_metadata:
continue
obj_val = getattr(obj, obj_field.name)
if obj_val is None:
continue
if isinstance(obj_val, List):
for val in obj_val:
if val is None:
continue
if params.get(
f'{metadata.get("field_name", field_name)}[{obj_param_metadata.get("field_name", obj_field.name)}]') is None:
params[
f'{metadata.get("field_name", field_name)}[{obj_param_metadata.get("field_name", obj_field.name)}]'] = [
]
params[
f'{metadata.get("field_name", field_name)}[{obj_param_metadata.get("field_name", obj_field.name)}]'].append(
_val_to_string(val))
else:
params[
f'{metadata.get("field_name", field_name)}[{obj_param_metadata.get("field_name", obj_field.name)}]'] = [
_val_to_string(obj_val)]
elif isinstance(obj, Dict):
for key, value in obj.items():
if value is None:
continue
if isinstance(value, List):
for val in value:
if val is None:
continue
if params.get(f'{metadata.get("field_name", field_name)}[{key}]') is None:
params[f'{metadata.get("field_name", field_name)}[{key}]'] = [
]
params[
f'{metadata.get("field_name", field_name)}[{key}]'].append(_val_to_string(val))
else:
params[f'{metadata.get("field_name", field_name)}[{key}]'] = [
_val_to_string(value)]
return params
def _get_query_param_field_name(obj_field: Field) -> str:
obj_param_metadata = obj_field.metadata.get('query_param')
if not obj_param_metadata:
return ""
return obj_param_metadata.get("field_name", obj_field.name)
def _get_delimited_query_params(metadata: Dict, field_name: str, obj: any, delimiter: str) -> Dict[
str, List[str]]:
return _populate_form(field_name, metadata.get("explode", True), obj, _get_query_param_field_name, delimiter)
SERIALIZATION_METHOD_TO_CONTENT_TYPE = {
'json': 'application/json',
'form': 'application/x-www-form-urlencoded',
'multipart': 'multipart/form-data',
'raw': 'application/octet-stream',
'string': 'text/plain',
}
def serialize_request_body(request: dataclass, request_type: type, request_field_name: str, nullable: bool, optional: bool, serialization_method: str, encoder=None) -> Tuple[
str, any, any]:
if request is None:
if not nullable and optional:
return None, None, None
if not is_dataclass(request) or not hasattr(request, request_field_name):
return serialize_content_type(request_field_name, request_type, SERIALIZATION_METHOD_TO_CONTENT_TYPE[serialization_method],
request, encoder)
request_val = getattr(request, request_field_name)
if request_val is None:
if not nullable and optional:
return None, None, None
request_fields: Tuple[Field, ...] = fields(request)
request_metadata = None
for field in request_fields:
if field.name == request_field_name:
request_metadata = field.metadata.get('request')
break
if request_metadata is None:
raise Exception('invalid request type')
return serialize_content_type(request_field_name, request_type, request_metadata.get('media_type', 'application/octet-stream'),
request_val)
def serialize_content_type(field_name: str, request_type: any, media_type: str, request: dataclass, encoder=None) -> Tuple[str, any, List[List[any]]]:
if re.match(r'(application|text)\/.*?\+*json.*', media_type) is not None:
return media_type, marshal_json(request, request_type, encoder), None
if re.match(r'multipart\/.*', media_type) is not None:
return serialize_multipart_form(media_type, request)
if re.match(r'application\/x-www-form-urlencoded.*', media_type) is not None:
return media_type, serialize_form_data(field_name, request), None
if isinstance(request, (bytes, bytearray)):
return media_type, request, None
if isinstance(request, str):
return media_type, request, None
raise Exception(
f"invalid request body type {type(request)} for mediaType {media_type}")
def serialize_multipart_form(media_type: str, request: dataclass) -> Tuple[str, any, List[List[any]]]:
form: List[List[any]] = []
request_fields = fields(request)
for field in request_fields:
val = getattr(request, field.name)
if val is None:
continue
field_metadata = field.metadata.get('multipart_form')
if not field_metadata:
continue
if field_metadata.get("file") is True:
file_fields = fields(val)
file_name = ""
field_name = ""
content = bytes()
for file_field in file_fields:
file_metadata = file_field.metadata.get('multipart_form')
if file_metadata is None:
continue
if file_metadata.get("content") is True:
content = getattr(val, file_field.name)
else:
field_name = file_metadata.get(
"field_name", file_field.name)
file_name = getattr(val, file_field.name)
if field_name == "" or file_name == "" or content == bytes():
raise Exception('invalid multipart/form-data file')
form.append([field_name, [file_name, content]])
elif field_metadata.get("json") is True:
to_append = [field_metadata.get("field_name", field.name), [
None, marshal_json(val, field.type), "application/json"]]
form.append(to_append)
else:
field_name = field_metadata.get(
"field_name", field.name)
if isinstance(val, List):
for value in val:
if value is None:
continue
form.append(
[field_name + "[]", [None, _val_to_string(value)]])
else:
form.append([field_name, [None, _val_to_string(val)]])
return media_type, None, form
def serialize_dict(original: Dict, explode: bool, field_name, existing: Optional[Dict[str, List[str]]]) -> Dict[
str, List[str]]:
if existing is None:
existing = []
if explode is True:
for key, val in original.items():
if key not in existing:
existing[key] = []
existing[key].append(val)
else:
temp = []
for key, val in original.items():
temp.append(str(key))
temp.append(str(val))
if field_name not in existing:
existing[field_name] = []
existing[field_name].append(",".join(temp))
return existing
def serialize_form_data(field_name: str, data: dataclass) -> Dict[str, any]:
form: Dict[str, List[str]] = {}
if is_dataclass(data):
for field in fields(data):
val = getattr(data, field.name)
if val is None:
continue
metadata = field.metadata.get('form')
if metadata is None:
continue
field_name = metadata.get('field_name', field.name)
if metadata.get('json'):
form[field_name] = [marshal_json(val, field.type)]
else:
if metadata.get('style', 'form') == 'form':
form = {**form, **_populate_form(
field_name, metadata.get('explode', True), val, _get_form_field_name, ",")}
else:
raise Exception(
f'Invalid form style for field {field.name}')
elif isinstance(data, Dict):
for key, value in data.items():
form[key] = [_val_to_string(value)]
else:
raise Exception(f'Invalid request body type for field {field_name}')
return form
def _get_form_field_name(obj_field: Field) -> str:
obj_param_metadata = obj_field.metadata.get('form')
if not obj_param_metadata:
return ""
return obj_param_metadata.get("field_name", obj_field.name)
def _populate_form(field_name: str, explode: boolean, obj: any, get_field_name_func: Callable, delimiter: str) -> \
Dict[str, List[str]]:
params: Dict[str, List[str]] = {}
if obj is None:
return params
if is_dataclass(obj):
items = []
obj_fields: Tuple[Field, ...] = fields(obj)
for obj_field in obj_fields:
obj_field_name = get_field_name_func(obj_field)
if obj_field_name == '':
continue
val = getattr(obj, obj_field.name)
if val is None:
continue
if explode:
params[obj_field_name] = [_val_to_string(val)]
else:
items.append(
f'{obj_field_name}{delimiter}{_val_to_string(val)}')
if len(items) > 0:
params[field_name] = [delimiter.join(items)]
elif isinstance(obj, Dict):
items = []
for key, value in obj.items():
if value is None:
continue
if explode:
params[key] = _val_to_string(value)
else:
items.append(f'{key}{delimiter}{_val_to_string(value)}')
if len(items) > 0:
params[field_name] = [delimiter.join(items)]
elif isinstance(obj, List):
items = []
for value in obj:
if value is None:
continue
if explode:
if not field_name in params:
params[field_name] = []
params[field_name].append(_val_to_string(value))
else:
items.append(_val_to_string(value))
if len(items) > 0:
params[field_name] = [delimiter.join(
[str(item) for item in items])]
else:
params[field_name] = [_val_to_string(obj)]
return params
def _serialize_header(explode: bool, obj: any) -> str:
if obj is None:
return ''
if is_dataclass(obj):
items = []
obj_fields: Tuple[Field, ...] = fields(obj)
for obj_field in obj_fields:
obj_param_metadata = obj_field.metadata.get('header')
if not obj_param_metadata:
continue
obj_field_name = obj_param_metadata.get(
'field_name', obj_field.name)
if obj_field_name == '':
continue
val = getattr(obj, obj_field.name)
if val is None:
continue
if explode:
items.append(
f'{obj_field_name}={_val_to_string(val)}')
else:
items.append(obj_field_name)
items.append(_val_to_string(val))
if len(items) > 0:
return ','.join(items)
elif isinstance(obj, Dict):
items = []
for key, value in obj.items():
if value is None:
continue
if explode:
items.append(f'{key}={_val_to_string(value)}')
else:
items.append(key)
items.append(_val_to_string(value))
if len(items) > 0:
return ','.join([str(item) for item in items])
elif isinstance(obj, List):
items = []
for value in obj:
if value is None:
continue
items.append(_val_to_string(value))
if len(items) > 0:
return ','.join(items)
else:
return f'{_val_to_string(obj)}'
return ''
def unmarshal_json(data, typ, decoder=None):
unmarshal = make_dataclass('Unmarshal', [('res', typ)],
bases=(DataClassJsonMixin,))
json_dict = json.loads(data)
try:
out = unmarshal.from_dict({"res": json_dict})
except AttributeError as attr_err:
raise AttributeError(
f'unable to unmarshal {data} as {typ} - {attr_err}') from attr_err
return out.res if decoder is None else decoder(out.res)
def marshal_json(val, typ, encoder=None):
if not is_optional_type(typ) and val is None:
raise ValueError(
f"Could not marshal None into non-optional type: {typ}")
marshal = make_dataclass('Marshal', [('res', typ)],
bases=(DataClassJsonMixin,))
marshaller = marshal(res=val)
json_dict = marshaller.to_dict()
val = json_dict["res"] if encoder is None else encoder(json_dict["res"])
return json.dumps(val, separators=(',', ':'), sort_keys=True)
def match_content_type(content_type: str, pattern: str) -> boolean:
if pattern in (content_type, "*", "*/*"):
return True
msg = Message()
msg['content-type'] = content_type
media_type = msg.get_content_type()
if media_type == pattern:
return True
parts = media_type.split("/")
if len(parts) == 2:
if pattern in (f'{parts[0]}/*', f'*/{parts[1]}'):
return True
return False
def match_status_codes(status_codes: List[str], status_code: int) -> bool:
for code in status_codes:
if code == str(status_code):
return True
if code.endswith("XX") and code.startswith(str(status_code)[:1]):
return True
return False
def datetimeisoformat(optional: bool):
def isoformatoptional(val):
if optional and val is None:
return None
return _val_to_string(val)
return isoformatoptional
def dateisoformat(optional: bool):
def isoformatoptional(val):
if optional and val is None:
return None
return date.isoformat(val)
return isoformatoptional
def datefromisoformat(date_str: str):
return dateutil.parser.parse(date_str).date()
def bigintencoder(optional: bool):
def bigintencode(val: int):
if optional and val is None:
return None
return str(val)
return bigintencode
def bigintdecoder(val):
if isinstance(val, float):
raise ValueError(f"{val} is a float")
return int(val)
def decimalencoder(optional: bool, as_str: bool):
def decimalencode(val: Decimal):
if optional and val is None:
return None
if as_str:
return str(val)
return float(val)
return decimalencode
def decimaldecoder(val):
return Decimal(str(val))
def map_encoder(optional: bool, value_encoder: Callable):
def map_encode(val: Dict):
if optional and val is None:
return None
encoded = {}
for key, value in val.items():
encoded[key] = value_encoder(value)
return encoded
return map_encode
def map_decoder(value_decoder: Callable):
def map_decode(val: Dict):
decoded = {}
for key, value in val.items():
decoded[key] = value_decoder(value)
return decoded
return map_decode
def list_encoder(optional: bool, value_encoder: Callable):
def list_encode(val: List):
if optional and val is None:
return None
encoded = []
for value in val:
encoded.append(value_encoder(value))
return encoded
return list_encode
def list_decoder(value_decoder: Callable):
def list_decode(val: List):
decoded = []
for value in val:
decoded.append(value_decoder(value))
return decoded
return list_decode
def union_encoder(all_encoders: Dict[str, Callable]):
def selective_encoder(val: any):
if type(val) in all_encoders:
return all_encoders[type(val)](val)
return val
return selective_encoder
def union_decoder(all_decoders: List[Callable]):
def selective_decoder(val: any):
decoded = val
for decoder in all_decoders:
try:
decoded = decoder(val)
break
except (TypeError, ValueError):
continue
return decoded
return selective_decoder
def get_field_name(name):
def override(_, _field_name=name):
return _field_name
return override
def _val_to_string(val):
if isinstance(val, bool):
return str(val).lower()
if isinstance(val, datetime):
return val.isoformat().replace('+00:00', 'Z')
if isinstance(val, Enum):
return str(val.value)
return str(val)
def _populate_from_globals(param_name: str, value: any, param_type: str, gbls: Dict[str, Dict[str, Dict[str, Any]]]):
if value is None and gbls is not None:
if 'parameters' in gbls:
if param_type in gbls['parameters']:
if param_name in gbls['parameters'][param_type]:
global_value = gbls['parameters'][param_type][param_name]
if global_value is not None:
value = global_value
return value
def decoder_with_discriminator(field_name):
def decode_fx(obj):
kls = getattr(sys.modules['sdk.models.components'], obj[field_name])
return unmarshal_json(json.dumps(obj), kls)
return decode_fx
def remove_suffix(input_string, suffix):
if suffix and input_string.endswith(suffix):
return input_string[:-len(suffix)]
return input_string

61
tests/helpers.py Normal file
View File

@@ -0,0 +1,61 @@
"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT."""
import re
def sort_query_parameters(url):
parts = url.split("?")
if len(parts) == 1:
return url
query = parts[1]
params = query.split("&")
params.sort(key=lambda x: x.split('=')[0])
return parts[0] + "?" + "&".join(params)
def sort_serialized_maps(inp: any, regex: str, delim: str):
def sort_map(m):
entire_match = m.group(0)
groups = m.groups()
for group in groups:
pairs = []
if '=' in group:
pairs = group.split(delim)
pairs.sort(key=lambda x: x.split('=')[0])
else:
values = group.split(delim)
if len(values) == 1:
pairs = values
else:
pairs = [''] * int(len(values)/2)
# loop though every 2nd item
for i in range(0, len(values), 2):
pairs[int(i/2)] = values[i] + delim + values[i+1]
pairs.sort(key=lambda x: x.split(delim)[0])
entire_match = entire_match.replace(group, delim.join(pairs))
return entire_match
if isinstance(inp, str):
return re.sub(regex, sort_map, inp)
elif isinstance(inp, list):
for i, v in enumerate(inp):
inp[i] = sort_serialized_maps(v, regex, delim)
return inp
elif isinstance(inp, dict):
for k, v in inp.items():
inp[k] = sort_serialized_maps(v, regex, delim)
return inp
else:
raise Exception("Unsupported type")