diff --git a/CHANGELOG.md b/CHANGELOG.md index fe80957e..48a23634 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,49 +6,49 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added async support for helpers that are merged from opensearch-dsl-py ([#329](https://github.com/opensearch-project/opensearch-py/pull/329)) - Added search.md to guides ([#356](https://github.com/opensearch-project/opensearch-py/pull/356)) - Added index lifecycle guide ([#362](https://github.com/opensearch-project/opensearch-py/pull/362)) -- Added 'point in time' APIs to the pyi files in sync and async client ([#378](https://github.com/opensearch-project/opensearch-py/pull/378)) +- Added point in time APIs to the pyi files in sync and async client ([#378](https://github.com/opensearch-project/opensearch-py/pull/378)) - Added MacOS and Windows CI workflows ([#390](https://github.com/opensearch-project/opensearch-py/pull/390)) - Added support for the security plugin ([#399](https://github.com/opensearch-project/opensearch-py/pull/399)) -- Compatibility with OpenSearch 2.1.0 - 2.6.0 ([#381](https://github.com/opensearch-project/opensearch-py/pull/381)) -- Added 'allow_redirects' parameter in perform_request function for RequestsHttpConnection ([#401](https://github.com/opensearch-project/opensearch-py/pull/401)) -- Enhanced YAML test runner to use OpenSearch rest-api-spec YAML tests ([#414](https://github.com/opensearch-project/opensearch-py/pull/414) +- Supports OpenSearch 2.1.0 - 2.6.0 ([#381](https://github.com/opensearch-project/opensearch-py/pull/381)) +- Added `allow_redirects` to `RequestsHttpConnection#perform_request` ([#401](https://github.com/opensearch-project/opensearch-py/pull/401)) +- Enhanced YAML test runner to use OpenSearch `rest-api-spec` YAML tests ([#414](https://github.com/opensearch-project/opensearch-py/pull/414) - Added `Search#collapse` ([#409](https://github.com/opensearch-project/opensearch-py/issues/409)) - Added support for the ISM API ([#398](https://github.com/opensearch-project/opensearch-py/pull/398)) - Added `trust_env` to `AIOHttpConnection` ([#398](https://github.com/opensearch-project/opensearch-py/pull/438)) - Added support for latest OpenSearch versions 2.7.0, 2.8.0 ([#445](https://github.com/opensearch-project/opensearch-py/pull/445)) - +- Added samples ([#447](https://github.com/opensearch-project/opensearch-py/pull/447)) +- Improved CI performance of integration with unreleased OpenSearch ([#318](https://github.com/opensearch-project/opensearch-py/pull/318)) ### Changed -- Upgrading pytest-asyncio to latest version - 0.21.0 ([#339](https://github.com/opensearch-project/opensearch-py/pull/339)) -- Fixed flaky CI tests by replacing httpbin with a simple http_server ([#395](https://github.com/opensearch-project/opensearch-py/pull/395)) -- Move security from plugins to clients ([#442](https://github.com/opensearch-project/opensearch-py/pull/442)) +- Moved security from `plugins` to `clients` ([#442](https://github.com/opensearch-project/opensearch-py/pull/442)) ### Deprecated ### Removed -- Removed tests against Python 2.7 in github workflows ([#421](https://github.com/opensearch-project/opensearch-py/pull/421)) +- Removed support for Python 2.7 ([#421](https://github.com/opensearch-project/opensearch-py/pull/421)) ### Fixed +- Fixed flaky CI tests by replacing httpbin with a simple http_server ([#395](https://github.com/opensearch-project/opensearch-py/pull/395)) - Fixed import cycle when importing async helpers ([#311](https://github.com/opensearch-project/opensearch-py/pull/311)) -- Fixed make docs with sphinx([#433](https://github.com/opensearch-project/opensearch-py/pull/433)) -- Fixed userguide for async client ([#340](https://github.com/opensearch-project/opensearch-py/pull/340)) -- Include parsed error info in TransportError in async connections (fixes #225) ([#226](https://github.com/opensearch-project/opensearch-py/pull/226)) +- Fixed `make docs` with sphinx([#433](https://github.com/opensearch-project/opensearch-py/pull/433)) +- Fixed user guide for async client ([#340](https://github.com/opensearch-project/opensearch-py/pull/340)) +- Include parsed error info in `TransportError` in async connections ([#226](https://github.com/opensearch-project/opensearch-py/pull/226)) - Enhanced existing API generator to use OpenSearch OpenAPI spec ([#412](https://github.com/opensearch-project/opensearch-py/pull/412)) -- Fix crash when attempting to authenticate with an async connection (fixes #283)) ([#424](https://github.com/opensearch-project/opensearch-py/pull/424)) +- Fix crash when attempting to authenticate with an async connection ([#424](https://github.com/opensearch-project/opensearch-py/pull/424)) ### Security - Fixed CVE-2022-23491 reported in opensearch-dsl-py ([#295](https://github.com/opensearch-project/opensearch-py/pull/295)) -- Update ci workflows ([#318](https://github.com/opensearch-project/opensearch-py/pull/318)) ### Dependencies +- Bumps `pytest-asyncio` to 0.21.0 ([#339](https://github.com/opensearch-project/opensearch-py/pull/339)) - Bumps `sphinx` from <1.7 to <7.1 - Bumps `pytest-asyncio` from <=0.21.0 to <=0.21.1 ## [2.2.0] ### Added -- Merging opensearch-dsl-py into opensearch-py ([#287](https://github.com/opensearch-project/opensearch-py/pull/287)) -- Added upgrading.md file and updated it for opensearch-py 2.2.0 release ([#293](https://github.com/opensearch-project/opensearch-py/pull/293)) +- Merged opensearch-dsl-py into opensearch-py ([#287](https://github.com/opensearch-project/opensearch-py/pull/287)) +- Added UPGRADING.md and updated it for opensearch-py 2.2.0 release ([#293](https://github.com/opensearch-project/opensearch-py/pull/293)) ### Changed ### Deprecated ### Removed - Removed 'out/opensearchpy' folder which was produced while generating pyi files for plugins ([#288](https://github.com/opensearch-project/opensearch-py/pull/288)) - Removed low-level and high-level client terminology from guides ([#298](https://github.com/opensearch-project/opensearch-py/pull/298)) ### Fixed -- Fixed CVE - issue 86 mentioned in opensearch-dsl-py repo ([#295](https://github.com/opensearch-project/opensearch-py/pull/295)) +- Fixed CVE-2022-23491 ([#295](https://github.com/opensearch-project/opensearch-py/pull/295)) ### Security ## [2.1.1] @@ -72,31 +72,29 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## [2.0.1] ### Added -- Added Point in time API rest API([#191](https://github.com/opensearch-project/opensearch-py/pull/191)) -- Added pool_maxsize for RequestsHttpConnection ([#216](https://github.com/opensearch-project/opensearch-py/pull/216)) -- Github workflow for changelog verification ([#218](https://github.com/opensearch-project/opensearch-py/pull/218)) -- Added overload decorators to helpers-actions.pyi-"bulk" ([#239](https://github.com/opensearch-project/opensearch-py/pull/239)) -- Document Keberos authenticaion ([214](https://github.com/opensearch-project/opensearch-py/pull/214)) -- Add release workflows ([#240](https://github.com/opensearch-project/opensearch-py/pull/240)) -- Added SigV4 support for Async Opensearch Client ([#254](https://github.com/opensearch-project/opensearch-py/pull/254)) +- Added point in time support ([#191](https://github.com/opensearch-project/opensearch-py/pull/191)) +- Added `pool_maxsize` for `RequestsHttpConnection` ([#216](https://github.com/opensearch-project/opensearch-py/pull/216)) +- Added Github workflow for CHANGELOG verification ([#218](https://github.com/opensearch-project/opensearch-py/pull/218)) +- Added overload decorators to `helpers-actions.pyi-bulk` ([#239](https://github.com/opensearch-project/opensearch-py/pull/239)) +- Documented Keberos authentication ([214](https://github.com/opensearch-project/opensearch-py/pull/214)) +- Added release workflows ([#240](https://github.com/opensearch-project/opensearch-py/pull/240)) +- Added SigV4 support for async ([#254](https://github.com/opensearch-project/opensearch-py/pull/254)) - Compatibility with OpenSearch 2.1.0 - 2.4.1 ([#257](https://github.com/opensearch-project/opensearch-py/pull/257)) -- Adding explicit parameters for AIOHttpConnection and AsyncTransport ([#276](https://github.com/opensearch-project/opensearch-py/pull/276)) +- Added explicit parameters for `AIOHttpConnection` and `AsyncTransport` ([#276](https://github.com/opensearch-project/opensearch-py/pull/276)) +- Added support for a custom signing service name for AWS SigV4 ([#268](https://github.com/opensearch-project/opensearch-py/pull/268)) ### Changed -- Updated getting started to user guide ([#233](https://github.com/opensearch-project/opensearch-py/pull/233)) +- Updated getting started in user guide ([#233](https://github.com/opensearch-project/opensearch-py/pull/233)) - Updated CA certificate handling to check OpenSSL environment variables before defaulting to certifi ([#196](https://github.com/opensearch-project/opensearch-py/pull/196)) -- Updates `master` to `cluster_manager` to be inclusive ([#242](https://github.com/opensearch-project/opensearch-py/pull/242)) -- Support a custom signing service name for AWS SigV4 ([#268](https://github.com/opensearch-project/opensearch-py/pull/268)) +- Updated `master` to `cluster_manager` to be inclusive ([#242](https://github.com/opensearch-project/opensearch-py/pull/242)) - Updated CI tests to make them work locally ([#275](https://github.com/opensearch-project/opensearch-py/pull/275)) -- Fix bug with validation of 'timeout' parameter ([#387](Do not escape the "timeout" parameter.)) +- Fixed bug with validation of `timeout` ([#387](https://github.com/opensearch-project/opensearch-py/issues/387)) ### Deprecated - ### Removed - Removed patch versions in integration tests for OpenSearch 1.0.0 - 2.3.0 to reduce Github Action jobs ([#262](https://github.com/opensearch-project/opensearch-py/pull/262)) ### Fixed - Fixed DeprecationWarning emitted from urllib3 1.26.13+ ([#246](https://github.com/opensearch-project/opensearch-py/pull/246)) ### Security - [Unreleased]: https://github.com/opensearch-project/opensearch-py/compare/v2.2.0...HEAD [2.0.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.0...v2.0.1 [2.1.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.1...v2.1.0 diff --git a/USER_GUIDE.md b/USER_GUIDE.md index 19416d31..45b0ba20 100644 --- a/USER_GUIDE.md +++ b/USER_GUIDE.md @@ -1,43 +1,16 @@ -- [User guide of OpenSearch Python client](#user-guide-of-opensearch-python-client) +- [OpenSearch Python Client User Guide](#opensearch-python-client-user-guide) - [Setup](#setup) - - [Example](#example) - - [Creating a client](#creating-a-client) - - [Creating an index](#creating-an-index) - - [Adding a document to an index](#adding-a-document-to-an-index) - - [Adding documents in bulk](#adding-documents-in-bulk) - - [Adding documents in bulk using helper functions](#adding-documents-in-bulk-using-helper-functions) - - [Searching for a document](#searching-for-a-document) - - [Deleting a document](#deleting-a-document) - - [Deleting an index](#deleting-an-index) - - [Making API calls](#making-api-calls) - - [Point in time API](#point-in-time-api) - - [Using DSL features from opensearch-dsl-py](#using-dsl-features-from-opensearch-dsl-py) - - [Searching for documents with filters](#searching-for-documents-with-filters) - - [Using plugins](#using-plugins) - - [Alerting plugin](#alerting-plugin) - - [Searching for monitors](#searching-for-monitors) - - [Getting a monitor](#getting-a-monitor) - - [Creating a monitor](#creating-a-monitor) - - [Creating a destination](#creating-a-destination) - - [Getting alerts](#getting-alerts) - - [Acknowledge alerts](#acknowledge-alerts) - - [Index management plugin](#index-management-plugin) - - [Creating a policy](#creating-a-policy) - - [Getting a policy](#getting-a-policy) - - [Deleting a policy](#deleting-a-policy) - - [Security plugin](#security-plugin) - - [Creating a role](#creating-a-role) - - [Getting a role](#getting-a-role) - - [Creating a user](#creating-a-user) - - [Getting a user](#getting-a-user) - - [Using different authentication methods](#using-different-authentication-methods) - - [Using IAM credentials](#using-iam-credentials) - - [Pre-requisites to use `AWSV4SignerAuth`](#pre-requisites-to-use-awsv4signerauth) - - [Using IAM authentication with an async client](#using-iam-authentication-with-an-async-client) - - [Using Kerberos](#using-kerberos) - - [Using environment settings for proxy configuration](#using-environment-settings-for-proxy-configuration) - -# User guide of OpenSearch Python client + - [Basic Features](#basic-features) + - [Creating a Client](#creating-a-client) + - [Creating an Index](#creating-an-index) + - [Adding a Document to an Index](#adding-a-document-to-an-index) + - [Searching for a Document](#searching-for-a-document) + - [Deleting a Document](#deleting-a-document) + - [Deleting an Index](#deleting-an-index) + - [Advanced Features](#advanced-features) + - [Plugins](#plugins) + +# OpenSearch Python Client User Guide ## Setup @@ -53,20 +26,22 @@ Then import it like any other module: from opensearchpy import OpenSearch ``` -To add the async client to your project, install it using [pip](https://pip.pypa.io/): +For better performance we recommend the async client. To add the async client to your project, install it using [pip](https://pip.pypa.io/): ```bash pip install opensearch-py[async] ``` -If you prefer to add the client manually or just want to examine the source code, see [opensearch-py on GitHub](https://github.com/opensearch-project/opensearch-py). +In general, we recommend using a package manager, such as [poetry](https://python-poetry.org/docs/), for your projects. This is the package manager used for [samples](samples). +## Basic Features -## Example -In the example given below, we create a client, an index with non-default settings, insert a -document in the index, search for the document, delete the document and finally delete the index. +In the example below, we create a client, create an index with non-default settings, insert a +document into the index, search for the document, delete the document, and finally delete the index. -### Creating a client +You can find working versions of the code below that can be run with a local instance of OpenSearch in [samples](samples). + +### Creating a Client ```python from opensearchpy import OpenSearch @@ -75,37 +50,23 @@ host = 'localhost' port = 9200 auth = ('admin', 'admin') # For testing only. Don't store credentials in code. -# Provide a CA bundle if you use intermediate CAs with your root CA. -# If this is not given, the CA bundle is is discovered from the first available -# following options: -# - OpenSSL environment variables SSL_CERT_FILE and SSL_CERT_DIR -# - certifi bundle (https://pypi.org/project/certifi/) -# - default behavior of the connection backend (most likely system certs) -ca_certs_path = '/full/path/to/root-ca.pem' - -# Optional client certificates if you don't want to use HTTP basic authentication. -# client_cert_path = '/full/path/to/client.pem' -# client_key_path = '/full/path/to/client-key.pem' - -# Create the client with SSL/TLS enabled, but hostname verification disabled. client = OpenSearch( hosts = [{'host': host, 'port': port}], - http_compress = True, # enables gzip compression for request bodies http_auth = auth, - # client_cert = client_cert_path, - # client_key = client_key_path, use_ssl = True, - verify_certs = True, - ssl_assert_hostname = False, - ssl_show_warn = False, - ca_certs = ca_certs_path + verify_certs = False ) + +info = client.info() +print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") ``` -### Creating an index +See [hello.py](samples/hello/hello.py) for a working sample, and [guides/ssl](guides/ssl.md) for how to setup SSL certificates. + +### Creating an Index + ```python -# Create an index with non-default settings. -index_name = 'python-test-index3' +index_name = 'test-index' index_body = { 'settings': { 'index': { @@ -114,18 +75,23 @@ index_body = { } } -response = client.indices.create(index_name, body=index_body) -print('\nCreating index:') +response = client.indices.create( + index_name, + body=index_body +) + print(response) ``` -### Adding a document to an index +### Adding a Document to an Index + ```python document = { 'title': 'Moneyball', 'director': 'Bennett Miller', 'year': '2011' } + id = '1' response = client.index( @@ -135,45 +101,11 @@ response = client.index( refresh = True ) -print('\nAdding document:') print(response) ``` -### Adding documents in bulk -```python -docs = '''{"index": {"_index": "index-2022-06-08", "_id": "1"}} -{"name": "foo"} -{"index": {"_index": "index-2022-06-09", "_id": "2"}} -{"name": "bar"} -{"index": {"_index": "index-2022-06-10", "_id": "3"}} -{"name": "baz"}''' +### Searching for a Document -response = client.bulk(docs) - -print('\nAdding bulk documents:') -print(response) -``` - -### Adding documents in bulk using helper functions -```python -docs = [] -def generate_data(): - mywords = ['foo', 'bar', 'baz'] - for index, word in enumerate(mywords): - docs.append({ - "_index": "mywords", - "word": word, - "_id": index - }) - return docs - -response = helpers.bulk(client, generate_data(), max_retries=3) - -print('\nAdding bulk documents using helper:') -print(response) -``` - -### Searching for a document ```python q = 'miller' query = { @@ -190,530 +122,43 @@ response = client.search( body = query, index = index_name ) -print('\nSearch results:') + print(response) ``` -### Deleting a document +### Deleting a Document + ```python response = client.delete( index = index_name, id = id ) - -print('\nDeleting document:') -print(response) -``` - -### Deleting an index -```python -response = client.indices.delete( - index = index_name -) - -print('\nDeleting index:') -print(response) -``` -## Making API calls - -### Point in time API - -```python -# create a point in time on a index -index_name = "test-index" -response = client.create_point_in_time(index=index_name, - keep_alive="1m") - -pit_id = response.get("pit_id") -print('\n Point in time ID:') -print(pit_id) - -# To list all point in time which are alive in the cluster -response = client.list_all_point_in_time() -print('\n List of all Point in Time:') -print(response) - -# To delete point in time -pit_body = { - "pit_id": [pit_id] -} - -# To delete all point in time -# client.delete_point_in_time(body=None, all=True) -response = client.delete_point_in_time(body=pit_body) - -print('\n The deleted point in time:') -print(response) -``` - -## Using DSL features from opensearch-dsl-py -opensearch-dsl-py client is now merged into the opensearch-py client. Thus, opensearch-py supports creating and indexing documents, searching with and without filters, and updating documents using queries. See [opensearch-dsl-py client documentation](https://opensearch.org/docs/latest/clients/python-high-level/) for details. - -All the APIs newly added from opensearch-dsl-py are listed in [docs](https://github.com/opensearch-project/opensearch-py/tree/main/docs/source/api-ref). - -In the below example, [Search API](https://github.com/opensearch-project/opensearch-py/blob/main/opensearchpy/helpers/search.py) from opensearch-dsl-py client is used. - -### Searching for documents with filters - -```python -from opensearchpy import OpenSearch, Search - - # Use the above mentioned examples for creating client. - # Then,create an index - # Add a document to the index. - - # Search for the document. - s = Search(using=client, index=index_name) \ - .filter("term", category="search") \ - .query("match", title="python") - - response = s.execute() - - print('\nSearch results:') - for hit in response: - print(hit.meta.score, hit.title) - - # Delete the document. - # Delete the index. -``` - -## Using plugins - -Plugin client definitions can be found here -- - -### Alerting plugin - -#### Searching for monitors -[API definition](https://opensearch.org/docs/latest/monitoring-plugins/alerting/api/#search-monitors) -```python -print('\Searching for monitors:') - -query = { - "query": { - "match" : { - "monitor.name": "test-monitor" - } - } -} - -response = client.plugins.alerting.search_monitor(query) -print(response) -``` - -#### Getting a monitor -[API definition](https://opensearch.org/docs/latest/monitoring-plugins/alerting/api/#get-monitor) -```python -print('\Getting a monitor:') - -response = client.plugins.alerting.get_monitor("monitorID") -print(response) -``` - -#### Creating a monitor -[API definition](https://opensearch.org/docs/latest/monitoring-plugins/alerting/api/#create-a-bucket-level-monitor) -```python -print('\Creating a bucket level monitor:') - -query = { - "type": "monitor", - "name": "Demo bucket-level monitor", - "monitor_type": "bucket_level_monitor", - "enabled": True, - "schedule": { - "period": { - "interval": 1, - "unit": "MINUTES" - } - }, - "inputs": [ - { - "search": { - "indices": [ - "python-test-index3" - ], - "query": { - "size": 0, - "query": { - "bool": { - "filter": [ - { - "range": { - "order_date": { - "from": "||-1h", - "to": "", - "include_lower": True, - "include_upper": True, - "format": "epoch_millis" - } - } - } - ] - } - }, - "aggregations": { - "composite_agg": { - "composite": { - "sources": [ - { - "user": { - "terms": { - "field": "user" - } - } - } - ] - }, - "aggregations": { - "avg_products_base_price": { - "avg": { - "field": "products.base_price" - } - } - } - } - } - } - } - } - ], -} - -response = client.plugins.alerting.create_monitor(query) -print(response) -``` - -#### Creating a destination -[API definition](https://opensearch.org/docs/latest/monitoring-plugins/alerting/api/#create-destination) -```python -print('\Creating an email destination:') - -query = { - "type": "email", - "name": "my-email-destination", - "email": { - "email_account_id": "YjY7mXMBx015759_IcfW", - "recipients": [ - { - "type": "email_group", - "email_group_id": "YzY-mXMBx015759_dscs" - }, - { - "type": "email", - "email": "example@email.com" - } - ] - } -} - -response = client.plugins.alerting.create_destination(query) -print(response) -``` - -#### Getting alerts -[API definition](https://opensearch.org/docs/latest/monitoring-plugins/alerting/api/#get-alerts) -```python -print('\Getting alerts:') - -response = client.plugins.alerting.get_alerts() -print(response) -``` - -#### Acknowledge alerts -[API definition](https://opensearch.org/docs/latest/monitoring-plugins/alerting/api/#acknowledge-alert) -```python -print('\Acknowledge alerts:') - -query = { - "alerts": ["eQURa3gBKo1jAh6qUo49"] -} - -response = client.plugins.alerting.acknowledge_alert(query) -print(response) -``` - -### Index management plugin - -#### Creating a policy -[API definition](https://opensearch.org/docs/latest/im-plugin/ism/api/#create-policy) -```python -print('\Creating a policy:') - -policy_name = "test-policy" -policy_content = { - "policy": { - "description": "hot warm delete workflow", - "default_state": "hot", - "schema_version": 1, - "states": [ - { - "name": "hot", - "actions": [{"rollover": {"min_index_age": "1d"}}], - "transitions": [{"state_name": "warm"}], - }, - { - "name": "warm", - "actions": [{"replica_count": {"number_of_replicas": 5}}], - "transitions": [{"state_name": "delete", "conditions": {"min_index_age": "30d"}}], - }, - { - "name": "delete", - "actions": [ - { - "notification": { - "destination": {"chime": {"url": ""}}, - "message_template": {"source": "The index {{ctx.index}} is being deleted"}, - } - }, - {"delete": {}}, - ], - }, - ], - "ism_template": {"index_patterns": ["log*"], "priority": 100}, - } -} - -response = client.index_managment.put_policy(policy_name, body=policy_content) print(response) ``` -#### Getting a policy -[API definition](https://opensearch.org/docs/latest/im-plugin/ism/api/#get-policy) -```python -print('\Getting a policy:') - -policy_name = "test-policy" - -response = client.index_managment.get_policy(policy_name) -print(response) -``` +### Deleting an Index -#### Deleting a policy -[API definition](https://opensearch.org/docs/latest/index_managment/access-control/api/#create-user) ```python -print('\Deleting a policy:') - -policy_name = "test-policy" - -response = client.index_managment.delete_policy(policy_name) -print(response) -``` - -### Security plugin - -#### Creating a role -[API definition](https://opensearch.org/docs/latest/security/access-control/api/#create-role) -```python -print('\Creating a role:') - -role_name = "test-role" -role_content = { - "cluster_permissions": ["cluster_monitor"], - "index_permissions": [ - { - "index_patterns": ["index", "test-*"], - "allowed_actions": [ - "data_access", - "indices_monitor", - ], - } - ], -} - -response = client.security.put_role(role_name, body=role_content) -print(response) -``` - -#### Getting a role -[API definition](https://opensearch.org/docs/latest/security/access-control/api/#get-role) -```python -print('\Getting a role:') - -role_name = "test-role" - -response = client.security.get_role(role_name) -print(response) -``` - -#### Creating a user -[API definition](https://opensearch.org/docs/latest/security/access-control/api/#create-user) -```python -print('\Creating a user:') - -user_name = "test-user" -user_content = {"password": "test_password", "opendistro_security_roles": []} - -response = client.security.put_role(user_name, body=user_content) -print(response) -``` - -#### Getting a user -[API definition](https://opensearch.org/docs/latest/security/access-control/api/#get-user) -```python -print('\Getting a user:') - -user_name = "test-user" - -response = client.security.get_user(user_name) -print(response) -``` - -## Using different authentication methods - -It is possible to use different methods for the authentication to OpenSearch. The parameters of `connection_class` and `http_auth` can be used for this. The following examples show how to authenticate using IAM credentials and using Kerberos. - -### Using IAM credentials - -Refer the AWS documentation regarding usage of IAM credentials to sign requests to OpenSearch APIs - [Signing HTTP requests to Amazon OpenSearch Service.](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/request-signing.html#request-signing-python) - -Opensearch-py client library also provides an in-house IAM based authentication feature, `AWSV4SignerAuth` that will help users to connect to their opensearch clusters by making use of IAM roles. - -`AWSV4SignerAuth` uses RequestHttpConnection as transport class for communication with opensearch clusters. Opensearch-py client library provides `pool_maxsize` option to modify default connection-pool size. - -#### Pre-requisites to use `AWSV4SignerAuth` - - Python version 3.6 or above, - - Install [botocore](https://pypi.org/project/botocore/) using pip - - `pip install botocore` - -Here is the sample code that uses `AWSV4SignerAuth` - - -```python -from opensearchpy import OpenSearch, RequestsHttpConnection, AWSV4SignerAuth -import boto3 - -host = '' # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com -region = 'us-west-2' -service = 'es' # 'aoss' for OpenSearch Serverless -credentials = boto3.Session().get_credentials() -auth = AWSV4SignerAuth(credentials, region, service) -index_name = 'python-test-index3' - -client = OpenSearch( - hosts = [{'host': host, 'port': 443}], - http_auth = auth, - use_ssl = True, - verify_certs = True, - connection_class = RequestsHttpConnection, - pool_maxsize = 20 -) - -q = 'miller' -query = { - 'size': 5, - 'query': { - 'multi_match': { - 'query': q, - 'fields': ['title^2', 'director'] - } - } -} - -response = client.search( - body = query, +response = client.indices.delete( index = index_name ) -print('\nSearch results:') print(response) ``` -## Using IAM authentication with an async client - -Make sure to use `AsyncOpenSearch` with the `AsyncHttpConnection` connection class with the async `AWSV4SignerAsyncAuth` signer. +## Advanced Features -- Requires opensearch-py[async] +- [Authentication (IAM, SigV4)](guides/auth.md) +- [Configuring SSL](guides/ssl.md) +- [Bulk Indexing](guides/bulk.md) +- [High Level DSL](guides/dsl.md) +- [Index Lifecycle](guides/index_lifecycle.md) +- [Search](guides/search.md) +- [Point in Time](guides/point_in_time.md) +- [Using a Proxy](guides/proxy.md) +## Plugins -Here is the sample code that uses `AWSV4SignerAsyncAuth` - - -```python -from opensearchpy import AsyncOpenSearch, AsyncHttpConnection, AWSV4SignerAsyncAuth -import boto3 - -host = '' # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com -region = 'us-west-2' -service = 'es' # 'aoss' for OpenSearch Serverless -credentials = boto3.Session().get_credentials() -auth = AWSV4SignerAsyncAuth(credentials, region, service) -index_name = 'python-test-index3' - -client = AsyncOpenSearch( - hosts = [{'host': host, 'port': 443}], - http_auth = auth, - use_ssl = True, - verify_certs = True, - connection_class = AsyncHttpConnection -) - -async def search(): - q = 'miller' - query = { - 'size': 5, - 'query': { - 'multi_match': { - 'query': q, - 'fields': ['title^2', 'director'] - } - } - } - - response = await client.search( - body = query, - index = index_name - ) - - print('\nSearch results:') - print(response) - -search() -``` - -### Using Kerberos - -There are several python packages that provide Kerberos support over HTTP connections, such as [requests-kerberos](http://pypi.org/project/requests-kerberos) and [requests-gssapi](https://pypi.org/project/requests-gssapi). The following example shows how to setup the authentication. Note that some of the parameters, such as `mutual_authentication` might depend on the server settings. - -```python - -from opensearchpy import OpenSearch, RequestsHttpConnection -from requests_kerberos import HTTPKerberosAuth, OPTIONAL - -client = OpenSearch( - ['htps://...'], - use_ssl=True, - verify_certs=True, - connection_class=RequestsHttpConnection, - http_auth=HTTPKerberosAuth(mutual_authentication=OPTIONAL) -) - -health = client.cluster.health() -``` - -## Using environment settings for proxy configuration - -Tell connection to get proxy information from `HTTP_PROXY` / `HTTPS_PROXY` environment variables or `~/.netrc` file if present. - -```python -from opensearchpy import OpenSearch, RequestsHttpConnection - - -OpenSearch( - hosts=["htps://..."], - use_ssl=True, - verify_certs=True, - connection_class=RequestsHttpConnection, - trust_env=True, -) -``` - - -```python -from opensearchpy import AsyncOpenSearch, AIOHttpConnection - -client = AsyncOpenSearch( - hosts=["htps://..."], - use_ssl=True, - verify_certs=True, - connection_class=AIOHttpConnection, - trust_env=True, -) -``` +- [Security](guides/plugins/security.md) +- [Alerting](guides/plugins/alerting.md) +- [Index Management](guides/plugins/index_management.md) \ No newline at end of file diff --git a/guides/auth.md b/guides/auth.md new file mode 100644 index 00000000..4b314764 --- /dev/null +++ b/guides/auth.md @@ -0,0 +1,121 @@ +- [Authentication](#authentication) + - [IAM Authentication](#iam-authentication) + - [IAM Authentication with an Async Client](#iam-authentication-with-an-async-client) + - [Kerberos](#kerberos) + +# Authentication + +OpenSearch allows you to use different methods for the authentication via `connection_class` and `http_auth` parameters. + +## IAM Authentication + +Opensearch-py supports IAM-based authentication via `AWSV4SignerAuth`, which uses `RequestHttpConnection` as the transport class for communicating with OpenSearch clusters running in Amazon Managed OpenSearch and OpenSearch Serverless, and works in conjunction with [botocore](https://pypi.org/project/botocore/). + +```python +from opensearchpy import OpenSearch, RequestsHttpConnection, AWSV4SignerAuth +import boto3 + +host = '' # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com +region = 'us-west-2' +service = 'es' # 'aoss' for OpenSearch Serverless +credentials = boto3.Session().get_credentials() +auth = AWSV4SignerAuth(credentials, region, service) + +client = OpenSearch( + hosts = [{'host': host, 'port': 443}], + http_auth = auth, + use_ssl = True, + verify_certs = True, + connection_class = RequestsHttpConnection, + pool_maxsize = 20 +) + +index_name = 'test-index' + +q = 'miller' + +query = { + 'size': 5, + 'query': { + 'multi_match': { + 'query': q, + 'fields': ['title^2', 'director'] + } + } +} + +response = client.search( + body = query, + index = index_name +) + +print('\nSearch results:') +print(response) +``` + +## IAM Authentication with an Async Client + +Use `AsyncOpenSearch` with the `AsyncHttpConnection` connection class and the async `AWSV4SignerAsyncAuth` signer. + +```python +from opensearchpy import AsyncOpenSearch, AsyncHttpConnection, AWSV4SignerAsyncAuth +import boto3 + +host = '' # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com +region = 'us-west-2' +service = 'es' # 'aoss' for OpenSearch Serverless +credentials = boto3.Session().get_credentials() +auth = AWSV4SignerAsyncAuth(credentials, region, service) + +client = AsyncOpenSearch( + hosts = [{'host': host, 'port': 443}], + http_auth = auth, + use_ssl = True, + verify_certs = True, + connection_class = AsyncHttpConnection +) + +async def search(): + index_name = 'test-index' + + q = 'miller' + query = { + 'size': 5, + 'query': { + 'multi_match': { + 'query': q, + 'fields': ['title^2', 'director'] + } + } + } + + response = await client.search( + body = query, + index = index_name + ) + + print(response) + +search() +``` + +## Kerberos + +There are several python packages that provide Kerberos support over HTTP, such as [requests-kerberos](http://pypi.org/project/requests-kerberos) and [requests-gssapi](https://pypi.org/project/requests-gssapi). The following example shows how to setup Kerberos authentication. + +Note that some of the parameters, such as `mutual_authentication` might depend on the server settings. + +```python +from opensearchpy import OpenSearch, RequestsHttpConnection +from requests_kerberos import HTTPKerberosAuth, OPTIONAL + +client = OpenSearch( + ['htps://...'], + use_ssl=True, + verify_certs=True, + connection_class=RequestsHttpConnection, + http_auth=HTTPKerberosAuth(mutual_authentication=OPTIONAL) +) + +health = client.cluster.health() +``` diff --git a/guides/bulk.md b/guides/bulk.md new file mode 100644 index 00000000..8883a9fa --- /dev/null +++ b/guides/bulk.md @@ -0,0 +1,46 @@ +- [Bulk Indexing](#bulk-indexing) + - [Use a Helper](#use-a-helper) + +# Bulk Indexing + +The [Bulk API](https://opensearch.org/docs/latest/api-reference/document-apis/bulk/) lets you add, update, or delete multiple documents in a single request. + +```python +from opensearchpy import OpenSearch + +client = OpenSearch(...) + +docs = ''' +{"index": {"_index": "index-2022-06-08", "_id": "1"}} +{"name": "foo"} +{"index": {"_index": "index-2022-06-09", "_id": "2"}} +{"name": "bar"} +{"index": {"_index": "index-2022-06-10", "_id": "3"}} +{"name": "baz"} +''' + +response = client.bulk(docs) +print(response) +``` + +## Use a Helper + +```python +from opensearchpy import OpenSearch, helpers + +client = OpenSearch(...) + +docs = [] +def generate_data(): + mywords = ['foo', 'bar', 'baz'] + for index, word in enumerate(mywords): + docs.append({ + "_index": "mywords", + "word": word, + "_id": index + }) + return docs + +response = helpers.bulk(client, generate_data(), max_retries=3) +print(response) +``` diff --git a/guides/dsl.md b/guides/dsl.md new file mode 100644 index 00000000..8ca6c902 --- /dev/null +++ b/guides/dsl.md @@ -0,0 +1,25 @@ +- [High Level DSL](#high-level-dsl) + +## High Level DSL + +The opensearch-py client includes a high level interface called opensearch-py-dsl that supports creating and indexing documents, searching with and without filters, and updating documents using queries. See [opensearch-dsl-py client documentation](https://opensearch.org/docs/latest/clients/python-high-level/) for details and [the API reference](https://github.com/opensearch-project/opensearch-py/tree/main/docs/source/api-ref). + +In the below example, [Search API](https://github.com/opensearch-project/opensearch-py/blob/main/opensearchpy/helpers/search.py) from opensearch-dsl-py client is used. + +```python +from opensearchpy import OpenSearch, Search + +client = OpenSearch(...) + +s = Search( + using=client, + index=index_name + ) + .filter("term", category="search") + .query("match", title="python") + +response = s.execute() + +for hit in response: + print(hit.meta.score, hit.title) +``` \ No newline at end of file diff --git a/guides/index_lifecycle.md b/guides/index_lifecycle.md index 3a9bf86d..6971ebce 100644 --- a/guides/index_lifecycle.md +++ b/guides/index_lifecycle.md @@ -1,3 +1,12 @@ +- [Index Lifecycle](#index-lifecycle) + - [Setup](#setup) + - [Index API Actions](#index-api-actions) + - [Create a New Index](#create-a-new-index) + - [Update an Index](#update-an-index) + - [Get Metadata for an Index](#get-metadata-for-an-index) + - [Delete an Index](#delete-an-index) + - [Cleanup](#cleanup) + # Index Lifecycle This guide covers OpenSearch Python Client API actions for Index Lifecycle. You'll learn how to create, read, update, and delete indices in your OpenSearch cluster. We will also leverage index templates to create default settings and mappings for indices of certain patterns. @@ -29,7 +38,7 @@ print(client.info()) # Check server info and make sure the client is connected ## Index API Actions -### Create a new index +### Create a New Index You can quickly create an index with default settings and mappings by using the `indices.create` API action. The following example creates an index named `paintings` with default settings and mappings: diff --git a/guides/plugins/alerting.md b/guides/plugins/alerting.md new file mode 100644 index 00000000..da9bdd4c --- /dev/null +++ b/guides/plugins/alerting.md @@ -0,0 +1,149 @@ +- [Alerting Plugin](#alerting-plugin) + - [Creating a Monitor](#creating-a-monitor) + - [Get a Monitor](#get-a-monitor) + - [Search for a Monitor](#search-for-a-monitor) + - [Create an Email Destination](#create-an-email-destination) + - [Get Alerts](#get-alerts) + - [Acknowledge Alerts](#acknowledge-alerts) + +### Alerting Plugin + +You can use the [Alerting Plugin API](https://opensearch.org/docs/latest/observing-your-data/alerting/api/) to programmatically create, update, and manage monitors and alerts. + +#### Creating a Monitor + +Create a bucket-level monitor. + +```python +query = { + "type": "monitor", + "name": "Demo bucket-level monitor", + "monitor_type": "bucket_level_monitor", + "enabled": True, + "schedule": { + "period": { + "interval": 1, + "unit": "MINUTES" + } + }, + "inputs": [ + { + "search": { + "indices": [ + "test-index" + ], + "query": { + "size": 0, + "query": { + "bool": { + "filter": [ + { + "range": { + "order_date": { + "from": "||-1h", + "to": "", + "include_lower": True, + "include_upper": True, + "format": "epoch_millis" + } + } + } + ] + } + }, + "aggregations": { + "composite_agg": { + "composite": { + "sources": [ + { + "user": { + "terms": { + "field": "user" + } + } + } + ] + }, + "aggregations": { + "avg_products_base_price": { + "avg": { + "field": "products.base_price" + } + } + } + } + } + } + } + } + ], +} + +response = client.plugins.alerting.create_monitor(query) +print(response) +``` + +#### Get a Monitor + +```python +response = client.plugins.alerting.get_monitor("monitorID") +print(response) +``` + +#### Search for a Monitor + +```python +query = { + "query": { + "match" : { + "monitor.name": "test-monitor" + } + } +} + +response = client.plugins.alerting.search_monitor(query) +print(response) +``` + +#### Create an Email Destination + +```python +query = { + "type": "email", + "name": "my-email-destination", + "email": { + "email_account_id": "YjY7mXMBx015759_IcfW", + "recipients": [ + { + "type": "email_group", + "email_group_id": "YzY-mXMBx015759_dscs" + }, + { + "type": "email", + "email": "example@email.com" + } + ] + } +} + +response = client.plugins.alerting.create_destination(query) +print(response) +``` + +#### Get Alerts + +```python +response = client.plugins.alerting.get_alerts() +print(response) +``` + +#### Acknowledge Alerts + +```python +query = { + "alerts": ["eQURa3gBKo1jAh6qUo49"] +} + +response = client.plugins.alerting.acknowledge_alert(query) +print(response) +``` diff --git a/guides/plugins/index_management.md b/guides/plugins/index_management.md new file mode 100644 index 00000000..77f81a32 --- /dev/null +++ b/guides/plugins/index_management.md @@ -0,0 +1,68 @@ +- [Index Management Plugin](#index-management-plugin) + - [Create a Policy](#create-a-policy) + - [Get a Policy](#get-a-policy) + - [Delete a Policy](#delete-a-policy) + +### Index Management Plugin + +You can use the [Index Management Plugin (ISM) API](https://opensearch.org/docs/latest/im-plugin/ism/api) to programmatically automate periodic, administrative operations on indexes by triggering them based on changes in the index age, index size, or number of documents. + +#### Create a Policy + +```python +policy_name = "test-policy" + +policy_content = { + "policy": { + "description": "hot warm delete workflow", + "default_state": "hot", + "schema_version": 1, + "states": [ + { + "name": "hot", + "actions": [{"rollover": {"min_index_age": "1d"}}], + "transitions": [{"state_name": "warm"}], + }, + { + "name": "warm", + "actions": [{"replica_count": {"number_of_replicas": 5}}], + "transitions": [{"state_name": "delete", "conditions": {"min_index_age": "30d"}}], + }, + { + "name": "delete", + "actions": [ + { + "notification": { + "destination": {"chime": {"url": ""}}, + "message_template": {"source": "The index {{ctx.index}} is being deleted"}, + } + }, + {"delete": {}}, + ], + }, + ], + "ism_template": {"index_patterns": ["log*"], "priority": 100}, + } +} + +response = client.index_managment.put_policy(policy_name, body=policy_content) +print(response) +``` + +#### Get a Policy + +```python +policy_name = "test-policy" + +response = client.index_managment.get_policy(policy_name) +print(response) +``` + +#### Delete a Policy + +```python +policy_name = "test-policy" + +response = client.index_managment.delete_policy(policy_name) +print(response) +``` diff --git a/guides/plugins/security.md b/guides/plugins/security.md new file mode 100644 index 00000000..4a192692 --- /dev/null +++ b/guides/plugins/security.md @@ -0,0 +1,59 @@ +- [Security Plugin](#security-plugin) + - [Create a Role](#create-a-role) + - [Get a Role](#get-a-role) + - [Create a User](#create-a-user) + - [Get a User](#get-a-user) + +### Security Plugin + +The [Security Plugin API](https://opensearch.org/docs/latest/security/access-control/api/) lets you programmatically create and manage users, roles, role mappings, action groups, and tenants. + +#### Create a Role + +```python +role_name = "test-role" + +role_content = { + "cluster_permissions": ["cluster_monitor"], + "index_permissions": [ + { + "index_patterns": ["index", "test-*"], + "allowed_actions": [ + "data_access", + "indices_monitor", + ], + } + ], +} + +response = client.security.put_role(role_name, body=role_content) +print(response) +``` + +#### Get a Role + +```python +role_name = "test-role" + +response = client.security.get_role(role_name) +print(response) +``` + +#### Create a User + +```python +user_name = "test-user" +user_content = {"password": "test_password", "opendistro_security_roles": []} + +response = client.security.put_role(user_name, body=user_content) +print(response) +``` + +#### Get a User + +```python +user_name = "test-user" + +response = client.security.get_user(user_name) +print(response) +``` diff --git a/guides/point_in_time.md b/guides/point_in_time.md new file mode 100644 index 00000000..49c84c24 --- /dev/null +++ b/guides/point_in_time.md @@ -0,0 +1,43 @@ +- [Point-in-Time](#point-in-time) + +### Point-in-Time + +[Point in Time (PIT)](https://opensearch.org/docs/latest/search-plugins/point-in-time/) lets you run different queries against a dataset that is fixed in time. + +Create a point in time on an index. + +```python +index_name = "test-index" +response = client.create_point_in_time( + index=index_name, + keep_alive="1m" +) + +pit_id = response.get("pit_id") +print('\n Point in time ID:') +print(pit_id) +``` + +List all point in time which are alive in the cluster. + +```python +response = client.list_all_point_in_time() +print(response) +``` + +Delete a point in time. + +```python +pit_body = { + "pit_id": [pit_id] +} +response = client.delete_point_in_time(body=pit_body) +print(response) +``` + +Delete all point in time. + +```python +response = client.delete_point_in_time(body=None, all=True) +print(response) +``` diff --git a/guides/proxy.md b/guides/proxy.md new file mode 100644 index 00000000..5be7edf4 --- /dev/null +++ b/guides/proxy.md @@ -0,0 +1,33 @@ +- [Using a Proxy](#using-a-proxy) + - [Using a Proxy with a Sync Client](#using-a-proxy-with-a-sync-client) + - [Using a Proxy with an Async Client](#using-a-proxy-with-an-async-client) + +# Using a Proxy + +## Using a Proxy with a Sync Client + +```python +from opensearchpy import OpenSearch, RequestsHttpConnection + +OpenSearch( + hosts=["htps://..."], + use_ssl=True, + verify_certs=True, + connection_class=RequestsHttpConnection, + trust_env=True, +) +``` + +## Using a Proxy with an Async Client + +```python +from opensearchpy import AsyncOpenSearch, AIOHttpConnection + +client = AsyncOpenSearch( + hosts=["htps://..."], + use_ssl=True, + verify_certs=True, + connection_class=AIOHttpConnection, + trust_env=True, +) +``` \ No newline at end of file diff --git a/guides/search.md b/guides/search.md index 62e697f9..2b015d0e 100644 --- a/guides/search.md +++ b/guides/search.md @@ -1,16 +1,29 @@ +- [Search](#search) + - [Setup](#setup) + - [Search API](#search-api) + - [Basic Search](#basic-search) + - [Basic Pagination](#basic-pagination) + - [Pagination with Scroll](#pagination-with-scroll) + - [Pagination with Point in Time](#pagination-with-point-in-time) + - [Cleanup](#cleanup) + # Search + OpenSearch provides a powerful search API that allows you to search for documents in an index. The search API supports a number of parameters that allow you to customize the search operation. In this guide, we will explore the search API and its parameters. -# Setup +## Setup + Let's start by creating an index and adding some documents to it: ```python from opensearchpy import OpenSearch -# Create an OpenSearch client +# create an OpenSearch client client = OpenSearch(hosts=['localhost']) -# Create an index + +# create an index client.indices.create(index='movies') -# Add 10 documents to the index + +# add 10 documents to the index for i in range(10): client.index( index='movies', @@ -21,7 +34,8 @@ for i in range(10): 'year': 2008 + i } ) -# Add additional documents to the index + +# add additional documents to the index client.index( index='movies', body={ @@ -30,6 +44,7 @@ client.index( 'year': 1972 } ) + client.index( index='movies', body={ @@ -38,7 +53,8 @@ client.index( 'year': 1994 } ) -# Refresh the index to make the documents searchable + +# refresh the index to make the documents searchable client.indices.refresh(index='movies') ``` @@ -49,18 +65,20 @@ client.indices.refresh(index='movies') The search API allows you to search for documents in an index. The following example searches for ALL documents in the `movies` index: ```python -# Search for all documents in the 'movies' index +# search for all documents in the 'movies' index response = client.search(index='movies') -# Extract the count of hits from the response + +# extract the count of hits from the response hits_count = response['hits']['total']['value'] -# Print the count of hits + +# print the count of hits print("Total Hits: ", hits_count) ``` You can also search for documents that match a specific query. The following example searches for documents that match the query `dark knight`: ```python -# Define the query +# define the query query = { "query": { "match": { @@ -68,23 +86,26 @@ query = { } } } -# Search for documents in the 'movies' index with the given query + +# search for documents in the 'movies' index with the given query response = client.search(index='movies', body=query) -# Extract the hits from the response + +# extract the hits from the response hits = response['hits']['hits'] -# Print the hits + +# print the hits for hit in hits: print(hit) ``` -OpenSearch query DSL allows you to specify complex queries. Check out the [OpenSearch query DSL documentation](https://opensearch.org/docs/latest/query-dsl/) for more information. +OpenSearch query DSL allows you to specify more complex queries. Check out the [OpenSearch query DSL documentation](https://opensearch.org/docs/latest/query-dsl/) for more information. ### Basic Pagination The search API allows you to paginate through the search results. The following example searches for documents that match the query `dark knight`, sorted by `year` in ascending order, and returns the first 2 results after skipping the first 5 results: ```python -# Define the search query with sorting and pagination options +# define the search query with sorting and pagination options search_body = { "query": { "match": { @@ -99,17 +120,19 @@ search_body = { } ] } -# Perform the search operation on the 'movies' index with the defined query and pagination options + +# perform the search operation on the 'movies' index with the defined query and pagination options response = client.search( index='movies', size=2, from_=5, body=search_body ) -# Extract the hits from the response + +# extract the hits from the response hits = response['hits']['hits'] -# Print the hits +# print the hits for hit in hits: print(hit) ``` @@ -117,7 +140,7 @@ for hit in hits: With sorting, you can also use the `search_after` parameter to paginate through the search results. Let's say you have already displayed the first page of results, and you want to display the next page. You can use the `search_after` parameter to paginate through the search results. The following example will demonstrate how to get the first 3 pages of results using the search query of the previous example: ```python -# Define the search query with sorting and pagination options +# define the search query with sorting and pagination options search_body = { "query": { "match": { @@ -133,32 +156,39 @@ search_body = { ], "size": 2 } -# Perform the search operation on the 'movies' index with the defined query and pagination options + +# perform the search operation on the 'movies' index with the defined query and pagination options response = client.search( index='movies', body=search_body ) -# Extract the hits from the response + +# extract the hits from the response hits = response['hits']['hits'] -# Get the last sort value from the first page + +# get the last sort value from the first page search_after = hits[-1]['sort'] -# Fetch page 2 + +# fetch page 2 search_body["search_after"] = search_after response = client.search( index='movies', body=search_body ) hits_page_2 = response['hits']['hits'] -# Get the last sort value from page 2 + +# get the last sort value from page 2 search_after = hits_page_2[-1]['sort'] -# Fetch page 3 + +# fetch page 3 search_body["search_after"] = search_after response = client.search( index='movies', body=search_body ) + hits_page_3 = response['hits']['hits'] -# Print the hits from each page +# print the hits from each page print("Page 1:") for hit in hits: print(hit) @@ -170,13 +200,12 @@ for hit in hits_page_3: print(hit) ``` - -### Pagination with scroll +### Pagination with Scroll When retrieving large amounts of non-real-time data, you can use the `scroll` parameter to paginate through the search results. ```python -# Define the search query with scroll and pagination options +# define the search query with scroll and pagination options search_body = { "query": { "match": { @@ -185,40 +214,44 @@ search_body = { }, "size": 2 } -# Perform the initial search operation on the 'movies' index with the defined query and scroll options + +# perform the initial search operation on the 'movies' index with the defined query and scroll options page_1 = client.search( index='movies', scroll='1m', body=search_body ) -# Extract the scroll_id from the response + +# extract the scroll_id from the response scroll_id = page_1['_scroll_id'] -# Perform the scroll operation to get the next page of results + +# perform the scroll operation to get the next page of results page_2 = client.scroll( scroll_id=scroll_id, scroll='1m' ) -# Extract the scroll_id from the response + +# extract the scroll_id from the response scroll_id = page_2['_scroll_id'] -# Perform another scroll operation to get the third page of results + +# perform another scroll operation to get the third page of results page_3 = client.scroll( scroll_id=scroll_id, scroll='1m' ) -# Extract the hits from each page of results + +# extract the hits from each page of results hits_page_1 = page_1['hits']['hits'] hits_page_2 = page_2['hits']['hits'] hits_page_3 = page_3['hits']['hits'] ``` - - ### Pagination with Point in Time The scroll example above has one weakness: if the index is updated while you are scrolling through the results, they will be paginated inconsistently. To avoid this, you should use the "Point in Time" feature. The following example demonstrates how to use the `point_in_time` and `pit_id` parameters to paginate through the search results: ```python -# Define the search query with sorting and pagination options +# define the search query with sorting and pagination options search_body = { "query": { "match": { @@ -233,20 +266,24 @@ search_body = { } ] } + # create a point in time pit = client.create_point_in_time( index = 'movies', keep_alive = '1m' ) -# Include pit info in the search body + +# include pit info in the search body search_body.update( - {'pit': { - 'id': pit['pit_id'], - 'keep_alive': '1m' - } - }) + { + 'pit': { + 'id': pit['pit_id'], + 'keep_alive': '1m' + } + } +) pit_search_body = search_body -# Get the first 3 pages of results +# get the first 3 pages of results page_1 = client.search( size = 2, body = pit_search_body @@ -261,10 +298,12 @@ page_3 = client.search( size = 2, body = pit_search_body )['hits']['hits'] -# Print out the titles of the first 3 pages of results + +# print out the titles of the first 3 pages of results print([hit['_source']['title'] for hit in page_1]) print([hit['_source']['title'] for hit in page_2]) print([hit['_source']['title'] for hit in page_3]) + # delete the point in time client.delete_point_in_time(body = { 'pit_id': pit['pit_id'] }) ``` diff --git a/guides/ssl.md b/guides/ssl.md new file mode 100644 index 00000000..8ee73941 --- /dev/null +++ b/guides/ssl.md @@ -0,0 +1,37 @@ +- [SSL](#ssl) + +# SSL + +```python +from opensearchpy import OpenSearch + +host = 'localhost' +port = 9200 +auth = ('admin', 'admin') # For testing only. Don't store credentials in code. + +# Provide a CA bundle if you use intermediate CAs with your root CA. +# If this is not given, the CA bundle is is discovered from the first available +# following options: +# - OpenSSL environment variables SSL_CERT_FILE and SSL_CERT_DIR +# - certifi bundle (https://pypi.org/project/certifi/) +# - default behavior of the connection backend (most likely system certs) +ca_certs_path = '/full/path/to/root-ca.pem' + +# Optional client certificates if you don't want to use HTTP basic authentication. +# client_cert_path = '/full/path/to/client.pem' +# client_key_path = '/full/path/to/client-key.pem' + +# Create the client with SSL/TLS enabled, but hostname verification disabled. +client = OpenSearch( + hosts = [{'host': host, 'port': port}], + http_compress = True, # enables gzip compression for request bodies + http_auth = auth, + # client_cert = client_cert_path, + # client_key = client_key_path, + use_ssl = True, + verify_certs = True, + ssl_assert_hostname = False, + ssl_show_warn = False, + ca_certs = ca_certs_path +) +``` diff --git a/samples/README.md b/samples/README.md new file mode 100644 index 00000000..ad431cd8 --- /dev/null +++ b/samples/README.md @@ -0,0 +1,19 @@ +# OpenSearch Python Samples + +Most samples can be run using OpenSearch installed locally with docker. + +``` +docker pull opensearchproject/opensearch:latest +docker run -d -p 9200:9200 -p 9600:9600 -e "discovery.type=single-node" opensearchproject/opensearch:latest +``` + +## Prerequisites + +Install [poetry](https://python-poetry.org/docs/). + +## Run Samples + +``` +poetry install +poetry run hello/hello.py +``` diff --git a/samples/hello/hello.py b/samples/hello/hello.py new file mode 100755 index 00000000..e02e9fce --- /dev/null +++ b/samples/hello/hello.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python + +# A basic OpenSearch "Hello World" sample that prints the version of the server. + +from opensearchpy import OpenSearch + +# connect to OpenSearch + +host = 'localhost' +port = 9200 +auth = ('admin', 'admin') # For testing only. Don't store credentials in code. + +client = OpenSearch( + hosts = [{'host': host, 'port': port}], + http_auth = auth, + use_ssl = True, + verify_certs = False, + ssl_show_warn = False +) + +info = client.info() +print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") + +# create an index + +index_name = 'test-index' + +index_body = { + 'settings': { + 'index': { + 'number_of_shards': 4 + } + } +} + +response = client.indices.create( + index_name, + body=index_body +) + +print(response) + +# add a document to the index + +document = { + 'title': 'Moneyball', + 'director': 'Bennett Miller', + 'year': '2011' +} + +id = '1' + +response = client.index( + index = index_name, + body = document, + id = id, + refresh = True +) + +print(response) + +# search for a document + +q = 'miller' + +query = { + 'size': 5, + 'query': { + 'multi_match': { + 'query': q, + 'fields': ['title^2', 'director'] + } + } +} + +response = client.search( + body = query, + index = index_name +) + +print(response) + +# delete the document + +response = client.delete( + index = index_name, + id = id +) + +print(response) + +# delete the index + +response = client.indices.delete( + index = index_name +) + +print(response) diff --git a/samples/poetry.lock b/samples/poetry.lock new file mode 100644 index 00000000..136ad252 --- /dev/null +++ b/samples/poetry.lock @@ -0,0 +1,200 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.2.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, +] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "opensearch-py" +version = "2.2.0" +description = "Python client for OpenSearch" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" +files = [] +develop = false + +[package.dependencies] +certifi = ">=2022.12.07" +python-dateutil = "*" +requests = ">=2.4.0,<3.0.0" +six = "*" +urllib3 = ">=1.21.1,<2" + +[package.extras] +async = ["aiohttp (>=3,<4)"] +develop = ["black", "botocore", "coverage (<7.0.0)", "jinja2", "mock", "myst_parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] +docs = ["myst_parser", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] +kerberos = ["requests_kerberos"] + +[package.source] +type = "directory" +url = ".." + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "urllib3" +version = "1.26.16" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, + {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.7" +content-hash = "791b2dfa2f8737e5e6b351a7861709318eec0a174638235078b442bdbed81b76" diff --git a/samples/poetry.toml b/samples/poetry.toml new file mode 100644 index 00000000..eadfd54b --- /dev/null +++ b/samples/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +create = true \ No newline at end of file diff --git a/samples/pyproject.toml b/samples/pyproject.toml new file mode 100644 index 00000000..dfc0775a --- /dev/null +++ b/samples/pyproject.toml @@ -0,0 +1,15 @@ +[tool.poetry] +name = "package" +version = "0.1.0" +description = "OpenSearch samples." +authors = ["Daniel Doubrovkine "] +license = "Apache 2.0" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.7" +opensearch-py = { path = "../" } + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api"