massive update, probably broken
[dotfiles/.git] / .config / coc / extensions / coc-python-data / languageServer.0.5.59 / Typeshed / third_party / 2and3 / pynamodb / connection / table.pyi
1 from typing import Any, Optional\r
2 \r
3 class TableConnection:\r
4     table_name: Any\r
5     connection: Any\r
6     def __init__(self, table_name, region: Optional[Any] = ..., host: Optional[Any] = ..., session_cls: Optional[Any] = ..., request_timeout_seconds: Optional[Any] = ..., max_retry_attempts: Optional[Any] = ..., base_backoff_ms: Optional[Any] = ...) -> None: ...\r
7     def delete_item(self, hash_key, range_key: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_values: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ...\r
8     def update_item(self, hash_key, range_key: Optional[Any] = ..., attribute_updates: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ..., return_values: Optional[Any] = ...): ...\r
9     def put_item(self, hash_key, range_key: Optional[Any] = ..., attributes: Optional[Any] = ..., expected: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., return_values: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ...\r
10     def batch_write_item(self, put_items: Optional[Any] = ..., delete_items: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., return_item_collection_metrics: Optional[Any] = ...): ...\r
11     def batch_get_item(self, keys, consistent_read: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., attributes_to_get: Optional[Any] = ...): ...\r
12     def get_item(self, hash_key, range_key: Optional[Any] = ..., consistent_read: bool = ..., attributes_to_get: Optional[Any] = ...): ...\r
13     def scan(self, attributes_to_get: Optional[Any] = ..., limit: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., scan_filter: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., segment: Optional[Any] = ..., total_segments: Optional[Any] = ..., exclusive_start_key: Optional[Any] = ...): ...\r
14     def query(self, hash_key, attributes_to_get: Optional[Any] = ..., consistent_read: bool = ..., exclusive_start_key: Optional[Any] = ..., index_name: Optional[Any] = ..., key_conditions: Optional[Any] = ..., query_filters: Optional[Any] = ..., limit: Optional[Any] = ..., return_consumed_capacity: Optional[Any] = ..., scan_index_forward: Optional[Any] = ..., conditional_operator: Optional[Any] = ..., select: Optional[Any] = ...): ...\r
15     def describe_table(self): ...\r
16     def delete_table(self): ...\r
17     def update_table(self, read_capacity_units: Optional[Any] = ..., write_capacity_units: Optional[Any] = ..., global_secondary_index_updates: Optional[Any] = ...): ...\r
18     def create_table(self, attribute_definitions: Optional[Any] = ..., key_schema: Optional[Any] = ..., read_capacity_units: Optional[Any] = ..., write_capacity_units: Optional[Any] = ..., global_secondary_indexes: Optional[Any] = ..., local_secondary_indexes: Optional[Any] = ..., stream_specification: Optional[Any] = ...): ...\r