app-server
Advanced tools
| import typing as t | ||
| from wsgiref.types import StartResponse, WSGIEnvironment | ||
| from werkzeug.wrappers import Request, Response | ||
| class AppWrapper: | ||
| """simple wrapping app""" | ||
| @staticmethod | ||
| def wsgi_app( | ||
| environ: WSGIEnvironment, start_response: StartResponse | ||
| ) -> t.Iterable[bytes]: | ||
| request = Request(environ) | ||
| response = Response(f'Path not found or invalid: {request.path}', | ||
| status=404) | ||
| return response(environ, start_response) | ||
| def __call__( | ||
| self, environ: WSGIEnvironment, start_response: StartResponse | ||
| ) -> t.Iterable[bytes]: | ||
| return self.wsgi_app(environ, start_response) |
| import re | ||
| import typing as t | ||
| from wsgiref.types import StartResponse, WSGIEnvironment | ||
| from werkzeug.middleware.dispatcher import DispatcherMiddleware | ||
| class Dispatcher(DispatcherMiddleware): | ||
| """use regex to find a matching route""" | ||
| def __call__( | ||
| self, environ: WSGIEnvironment, start_response: StartResponse | ||
| ) -> t.Iterable[bytes]: | ||
| app = self.mounts["/"] | ||
| for route, _app in self.mounts.items(): | ||
| if re.match(route, environ["PATH_INFO"]): | ||
| app = _app | ||
| break | ||
| return app(environ, start_response) |
| import typing as t | ||
| from wsgiref.types import StartResponse, WSGIApplication, WSGIEnvironment | ||
| from werkzeug.middleware.http_proxy import ProxyMiddleware | ||
| from werkzeug.wsgi import get_path_info | ||
| class Proxy(ProxyMiddleware): | ||
| """this addition allows to redirect all routes to given targets""" | ||
| def __init__( | ||
| self, | ||
| app: WSGIApplication, | ||
| targets: t.Mapping[str, dict[str, t.Any]], | ||
| chunk_size: int = 2 << 13, | ||
| timeout: int = 10, | ||
| ) -> None: | ||
| super().__init__(app, targets, chunk_size, timeout) | ||
| def _set_defaults(opts): | ||
| opts.setdefault("remove_prefix", False) | ||
| opts.setdefault("host", "<auto>") | ||
| opts.setdefault("headers", {}) | ||
| opts.setdefault("ssl_context", None) | ||
| return opts | ||
| self.targets = { | ||
| f"{k}": _set_defaults(v) for k, v in targets.items() | ||
| } | ||
| def __call__( | ||
| self, environ: WSGIEnvironment, start_response: StartResponse | ||
| ) -> t.Iterable[bytes]: | ||
| path = get_path_info(environ, charset='utf-8', errors='replace') | ||
| app = self.app | ||
| for prefix, opts in self.targets.items(): | ||
| if path.startswith(prefix): | ||
| app = self.proxy_to(opts, path, prefix) | ||
| break | ||
| return app(environ, start_response) |
| import time | ||
| import typing as t | ||
| from werkzeug._internal import _logger # noqa | ||
| from werkzeug.serving import WSGIRequestHandler, _ansi_style, \ | ||
| _log_add_style | ||
| from werkzeug.urls import uri_to_iri | ||
| import logging | ||
| class CustomWSGIRequestHandler(WSGIRequestHandler): | ||
| @staticmethod | ||
| def log_date_time_string(): | ||
| """Return the current time formatted for logging.""" | ||
| now = time.time() | ||
| year, month, day, hh, mm, ss, x, y, z = time.localtime(now) | ||
| s = "%04d-%02d-%02d %02d:%02d:%02d" % ( | ||
| year, month, day, hh, mm, ss) | ||
| return s | ||
| def log_request( | ||
| self, | ||
| code: t.Union[int, str] = "-", | ||
| size: t.Union[int, str] = "-", | ||
| ) -> None: | ||
| """coloring the status code""" | ||
| try: | ||
| path = uri_to_iri(self.path) | ||
| msg = f"[{self.command}] {path}" | ||
| except AttributeError: | ||
| # path isn't set if the requestline was bad | ||
| msg = self.requestline | ||
| code = str(code) | ||
| log_type = "info" | ||
| if code != "200": # possibility to filter 200 requests | ||
| log_type = "warning" | ||
| if _log_add_style: | ||
| if code[0] == "1": # 1xx - Informational | ||
| code = _ansi_style(code, "bold") | ||
| elif code == "200": # 2xx - Success | ||
| pass | ||
| elif code == "304": # 304 - Resource Not Modified | ||
| code = _ansi_style(code, "cyan") | ||
| elif code[0] == "3": # 3xx - Redirection | ||
| code = _ansi_style(code, "green") | ||
| elif code == "404": # 404 - Resource Not Found | ||
| code = _ansi_style(code, "yellow") | ||
| elif code[0] == "4": # 4xx - Client Error | ||
| code = _ansi_style(code, "bold", "red") | ||
| else: # 5xx, or any other response | ||
| code = _ansi_style(code, "bold", "red") | ||
| self.log(log_type, '[%s] %s', code, msg) | ||
| def log(self, log_type: str, message: str, *args) -> None: | ||
| global _logger | ||
| if _logger is None: | ||
| _logger = logging.getLogger("werkzeug") | ||
| _logger.setLevel(logging.INFO) | ||
| _logger.addHandler(logging.StreamHandler()) | ||
| getattr(_logger, log_type)(f"[{self.log_date_time_string()}] {message % args}") |
| import mimetypes | ||
| import re | ||
| import time | ||
| import typing as t | ||
| from wsgiref.types import StartResponse, WSGIApplication, WSGIEnvironment | ||
| from werkzeug.http import http_date, is_resource_modified | ||
| from werkzeug.middleware.shared_data import SharedDataMiddleware | ||
| from werkzeug.utils import get_content_type | ||
| from werkzeug.wsgi import get_path_info, wrap_file | ||
| class SharedData(SharedDataMiddleware): | ||
| """use regex to find a matching files""" | ||
| def __init__( | ||
| self, | ||
| app: WSGIApplication, | ||
| exports: ( | ||
| dict[str, str | tuple[str, str]] | ||
| | t.Iterable[tuple[str, str | tuple[str, str]]] | ||
| ), | ||
| disallow: None = None, | ||
| cache: bool = True, | ||
| cache_timeout: int = 60 * 60 * 12, | ||
| fallback_mimetype: str = "application/octet-stream", | ||
| ) -> None: | ||
| self.org_exports = exports.copy() | ||
| super().__init__(app, exports, disallow, cache, cache_timeout, | ||
| fallback_mimetype) | ||
| def __call__( | ||
| self, environ: WSGIEnvironment, start_response: StartResponse | ||
| ) -> t.Iterable[bytes]: | ||
| path = get_path_info(environ) | ||
| file_loader = None | ||
| for search_path, loader in self.exports: | ||
| # let's check for regex, and inject real_path | ||
| if re.match(search_path, path): | ||
| real_path = re.sub(search_path, self.org_exports[search_path],path, 1) | ||
| real_filename, file_loader = self.get_file_loader(real_path)(None) | ||
| if file_loader is not None: | ||
| break | ||
| if search_path == path: | ||
| real_filename, file_loader = loader(None) | ||
| if file_loader is not None: | ||
| break | ||
| if not search_path.endswith("/"): | ||
| search_path += "/" | ||
| if path.startswith(search_path): | ||
| real_filename, file_loader = loader(path[len(search_path):]) | ||
| if file_loader is not None: | ||
| break | ||
| if file_loader is None or not self.is_allowed(real_filename): # noqa | ||
| return self.app(environ, start_response) | ||
| guessed_type = mimetypes.guess_type(real_filename) # type: ignore | ||
| mime_type = get_content_type(guessed_type[0] or self.fallback_mimetype,"utf-8") | ||
| try: | ||
| f, mtime, file_size = file_loader() | ||
| except: | ||
| return self.app(environ, start_response) # 404 | ||
| headers = [("Date", http_date())] | ||
| if self.cache: | ||
| etag = self.generate_etag(mtime, file_size, | ||
| real_filename) # type: ignore | ||
| headers += [ | ||
| ("Etag", f'"{etag}"'), | ||
| ("Cache-Control", f"max-age={self.cache_timeout}, public"), | ||
| ] | ||
| if not is_resource_modified(environ, etag, last_modified=mtime): | ||
| f.close() | ||
| start_response("304 Not Modified", headers) | ||
| return [] | ||
| headers.append(("Expires", http_date(time.time() + self.cache_timeout))) | ||
| else: | ||
| headers.append(("Cache-Control", "public")) | ||
| headers.extend( | ||
| ( | ||
| ("Content-Type", mime_type), | ||
| ("Content-Length", str(file_size)), | ||
| ("Last-Modified", http_date(mtime)), | ||
| ) | ||
| ) | ||
| start_response("200 OK", headers) | ||
| return wrap_file(environ, f) |
| import argparse | ||
| import os | ||
| import time | ||
| def patch_gunicorn(): | ||
| import gunicorn.workers.base | ||
| with open(gunicorn.workers.base.__file__, 'r+') as file: | ||
| content = file.read() | ||
| if "except (SyntaxError, NameError) as e:" in content: | ||
| return 0 | ||
| file.seek(0) | ||
| file.write(content.replace( | ||
| ' except SyntaxError as e:', | ||
| ' except (SyntaxError, NameError) as e:' | ||
| )) | ||
| def set_env_vars(application_id: str, args: argparse.Namespace, app_yaml: dict): | ||
| """set necessary environment variables""" | ||
| # First, merge the app.yaml into the environment so that the variables | ||
| # from the CLI can overwrite it. | ||
| if env_vars := app_yaml.get("env_variables"): | ||
| if not isinstance(env_vars, dict): | ||
| raise TypeError( | ||
| f"env_variables section in app.yaml must be a dict. Got {type(env_vars)}") | ||
| os.environ |= {k: str(v) for k, v in app_yaml["env_variables"].items()} | ||
| os.environ["GAE_ENV"] = "localdev" | ||
| os.environ["CLOUDSDK_CORE_PROJECT"] = application_id | ||
| os.environ["GOOGLE_CLOUD_PROJECT"] = application_id | ||
| os.environ["GAE_VERSION"] = str(time.time()) | ||
| os.environ["GRPC_ENABLE_FORK_SUPPORT"] = "0" | ||
| # Merge environment variables from CLI parameter | ||
| if args.env_var: | ||
| os.environ |= dict(v.split("=", 1) for v in args.env_var) |
+5
-7
| Metadata-Version: 2.1 | ||
| Name: app_server | ||
| Version: 0.9.11 | ||
| Version: 0.1.0 | ||
| Summary: a lightweight web application launcher for gunicorn and static files. | ||
| Home-page: https://github.com/viur-framework/viur-app_server | ||
| Author: Andreas H. Kelch | ||
| Author-email: ak@mausbrand.de | ||
| Author: Arne Gudermann | ||
| Author-email: ag@mausbrand.de | ||
| Project-URL: Bug Tracker, https://github.com/viur-framework/viur-app_server/issues | ||
@@ -14,5 +14,5 @@ Classifier: Programming Language :: Python :: 3 | ||
| License-File: LICENSE | ||
| Requires-Dist: werkzeug~=2.3.7 | ||
| Requires-Dist: werkzeug~=3.1.2 | ||
| Requires-Dist: pyyaml~=6.0 | ||
| Requires-Dist: gunicorn~=21.2.0 | ||
| Requires-Dist: gunicorn~=23.0.0 | ||
| Requires-Dist: fs~=2.4.16 | ||
@@ -30,4 +30,2 @@ Requires-Dist: grpcio>=1.58.0 | ||
| ### External APIs | ||
| App Server allows app code to connect to external APIs, eg [Google Cloud Datastore](https://cloud.google.com/datastore/docs/), like normal. To use the [local datastore emulator](https://cloud.google.com/datastore/docs/tools/datastore-emulator), first start it, then in a separate shell [set the appropriate environment variables](https://cloud.google.com/datastore/docs/tools/datastore-emulator#setting_environment_variables) (notably `DATASTORE_EMULATOR_HOST` and `DATASTORE_DATASET`) to point your app to it before you run App Server. | ||
@@ -34,0 +32,0 @@ ### Dependencies |
+0
-2
@@ -10,4 +10,2 @@ ## App Server | ||
| ### External APIs | ||
| App Server allows app code to connect to external APIs, eg [Google Cloud Datastore](https://cloud.google.com/datastore/docs/), like normal. To use the [local datastore emulator](https://cloud.google.com/datastore/docs/tools/datastore-emulator), first start it, then in a separate shell [set the appropriate environment variables](https://cloud.google.com/datastore/docs/tools/datastore-emulator#setting_environment_variables) (notably `DATASTORE_EMULATOR_HOST` and `DATASTORE_DATASET`) to point your app to it before you run App Server. | ||
@@ -14,0 +12,0 @@ ### Dependencies |
+4
-6
| [metadata] | ||
| name = app_server | ||
| version = attr: app_server.__version__ | ||
| author = Andreas H. Kelch | ||
| author_email = ak@mausbrand.de | ||
| author = Arne Gudermann | ||
| author_email = ag@mausbrand.de | ||
| description = a lightweight web application launcher for gunicorn and static files. | ||
@@ -22,5 +22,5 @@ long_description = file: README.md | ||
| install_requires = | ||
| werkzeug~=2.3.7 | ||
| werkzeug~=3.1.2 | ||
| pyyaml~=6.0 | ||
| gunicorn~=21.2.0 | ||
| gunicorn~=23.0.0 | ||
| fs~=2.4.16 | ||
@@ -35,4 +35,2 @@ grpcio>=1.58.0 | ||
| app_server = app_server:main | ||
| gcloud-storage-emulator = app_server.storage:main | ||
| gcloud-tasks-emulator = app_server.tasks:main | ||
@@ -39,0 +37,0 @@ [egg_info] |
| [console_scripts] | ||
| app_server = app_server:main | ||
| gcloud-storage-emulator = app_server.storage:main | ||
| gcloud-tasks-emulator = app_server.tasks:main |
| Metadata-Version: 2.1 | ||
| Name: app_server | ||
| Version: 0.9.11 | ||
| Version: 0.1.0 | ||
| Summary: a lightweight web application launcher for gunicorn and static files. | ||
| Home-page: https://github.com/viur-framework/viur-app_server | ||
| Author: Andreas H. Kelch | ||
| Author-email: ak@mausbrand.de | ||
| Author: Arne Gudermann | ||
| Author-email: ag@mausbrand.de | ||
| Project-URL: Bug Tracker, https://github.com/viur-framework/viur-app_server/issues | ||
@@ -14,5 +14,5 @@ Classifier: Programming Language :: Python :: 3 | ||
| License-File: LICENSE | ||
| Requires-Dist: werkzeug~=2.3.7 | ||
| Requires-Dist: werkzeug~=3.1.2 | ||
| Requires-Dist: pyyaml~=6.0 | ||
| Requires-Dist: gunicorn~=21.2.0 | ||
| Requires-Dist: gunicorn~=23.0.0 | ||
| Requires-Dist: fs~=2.4.16 | ||
@@ -30,4 +30,2 @@ Requires-Dist: grpcio>=1.58.0 | ||
| ### External APIs | ||
| App Server allows app code to connect to external APIs, eg [Google Cloud Datastore](https://cloud.google.com/datastore/docs/), like normal. To use the [local datastore emulator](https://cloud.google.com/datastore/docs/tools/datastore-emulator), first start it, then in a separate shell [set the appropriate environment variables](https://cloud.google.com/datastore/docs/tools/datastore-emulator#setting_environment_variables) (notably `DATASTORE_EMULATOR_HOST` and `DATASTORE_DATASET`) to point your app to it before you run App Server. | ||
@@ -34,0 +32,0 @@ ### Dependencies |
@@ -1,5 +0,5 @@ | ||
| werkzeug~=2.3.7 | ||
| werkzeug~=3.1.2 | ||
| pyyaml~=6.0 | ||
| gunicorn~=21.2.0 | ||
| gunicorn~=23.0.0 | ||
| fs~=2.4.16 | ||
| grpcio>=1.58.0 |
@@ -7,2 +7,8 @@ LICENSE | ||
| src/app_server/__init__.py | ||
| src/app_server/app_wrapper.py | ||
| src/app_server/dispatcher.py | ||
| src/app_server/proxy.py | ||
| src/app_server/request_handler.py | ||
| src/app_server/shared_data.py | ||
| src/app_server/utils.py | ||
| src/app_server.egg-info/PKG-INFO | ||
@@ -13,18 +19,2 @@ src/app_server.egg-info/SOURCES.txt | ||
| src/app_server.egg-info/requires.txt | ||
| src/app_server.egg-info/top_level.txt | ||
| src/app_server/storage/__init__.py | ||
| src/app_server/storage/exceptions.py | ||
| src/app_server/storage/server.py | ||
| src/app_server/storage/settings.py | ||
| src/app_server/storage/storage.py | ||
| src/app_server/storage/handlers/__init__.py | ||
| src/app_server/storage/handlers/buckets.py | ||
| src/app_server/storage/handlers/objects.py | ||
| src/app_server/tasks/__init__.py | ||
| src/app_server/tasks/server.py | ||
| src/app_server/tasks/proto/__init__.py | ||
| src/app_server/tasks/proto/cloudtasks_pb2.py | ||
| src/app_server/tasks/proto/cloudtasks_pb2_grpc.py | ||
| src/app_server/tasks/proto/queue_pb2.py | ||
| src/app_server/tasks/proto/target_pb2.py | ||
| src/app_server/tasks/proto/task_pb2.py | ||
| src/app_server.egg-info/top_level.txt |
+96
-342
| import argparse | ||
| import logging | ||
| import mimetypes | ||
| import os | ||
| from pathlib import Path | ||
| import re | ||
| import subprocess | ||
| import sys | ||
| import time | ||
| import typing as t | ||
| from pathlib import Path | ||
| import yaml | ||
| from werkzeug._internal import _logger # noqa | ||
| from werkzeug.http import http_date, is_resource_modified | ||
| from werkzeug.middleware.dispatcher import DispatcherMiddleware | ||
| from werkzeug.middleware.http_proxy import ProxyMiddleware | ||
| from werkzeug.middleware.shared_data import SharedDataMiddleware | ||
| from werkzeug.serving import run_simple, WSGIRequestHandler, _ansi_style, \ | ||
| _log_add_style | ||
| from werkzeug.urls import uri_to_iri | ||
| from werkzeug.utils import get_content_type | ||
| from werkzeug.wrappers import Request, Response | ||
| from werkzeug.wsgi import get_path_info, wrap_file | ||
| from werkzeug.serving import run_simple | ||
| __version__ = "0.9.11" | ||
| from . import utils | ||
| from .app_wrapper import AppWrapper | ||
| from .dispatcher import Dispatcher | ||
| from .proxy import Proxy | ||
| from .request_handler import CustomWSGIRequestHandler | ||
| from .shared_data import SharedData | ||
| __version__ = "0.1.0" | ||
| subprocesses = [] | ||
| class MainWSGIRequestHandler(WSGIRequestHandler): | ||
| def log_date_time_string(self): | ||
| """Return the current time formatted for logging.""" | ||
| now = time.time() | ||
| year, month, day, hh, mm, ss, x, y, z = time.localtime(now) | ||
| s = "%04d-%02d-%02d %02d:%02d:%02d" % ( | ||
| year, month, day, hh, mm, ss) | ||
| return s | ||
| def log_request( | ||
| self, | ||
| code: t.Union[int, str] = "-", | ||
| size: t.Union[int, str] = "-", | ||
| ) -> None: | ||
| """coloring the status code""" | ||
| try: | ||
| path = uri_to_iri(self.path) | ||
| msg = f"[{self.command}] {path}" | ||
| except AttributeError: | ||
| # path isn't set if the requestline was bad | ||
| msg = self.requestline | ||
| code = str(code) | ||
| log_type = "info" | ||
| if code != "200": # possibility to filter 200 requests | ||
| log_type = "warning" | ||
| if _log_add_style: | ||
| if code[0] == "1": # 1xx - Informational | ||
| code = _ansi_style(code, "bold") | ||
| elif code == "200": # 2xx - Success | ||
| pass | ||
| elif code == "304": # 304 - Resource Not Modified | ||
| code = _ansi_style(code, "cyan") | ||
| elif code[0] == "3": # 3xx - Redirection | ||
| code = _ansi_style(code, "green") | ||
| elif code == "404": # 404 - Resource Not Found | ||
| code = _ansi_style(code, "yellow") | ||
| elif code[0] == "4": # 4xx - Client Error | ||
| code = _ansi_style(code, "bold", "red") | ||
| else: # 5xx, or any other response | ||
| code = _ansi_style(code, "bold", "red") | ||
| self.log(log_type, '[%s] %s', code, msg) | ||
| def log(self, type: str, message: str, *args) -> None: | ||
| global _logger | ||
| if _logger is None: | ||
| _logger = logging.getLogger("werkzeug") | ||
| _logger.setLevel(logging.INFO) | ||
| _logger.addHandler(logging.StreamHandler()) | ||
| getattr(_logger, type)( | ||
| f"[{self.log_date_time_string()}] {message % args}") | ||
| class WrappingApp: | ||
| """simple wrapping app""" | ||
| def __init__(self, config): | ||
| pass | ||
| def wsgi_app(self, environ, start_response): | ||
| request = Request(environ) | ||
| response = Response(f'Path not found or invalid: {request.path}', | ||
| status=404) | ||
| return response(environ, start_response) | ||
| def __call__(self, environ, start_response): | ||
| return self.wsgi_app(environ, start_response) | ||
| class Proxy(ProxyMiddleware): | ||
| """this addition allows to redirect all routes to given targets""" | ||
| def __init__(self, app, targets, chunk_size=2 << 13, timeout=10): | ||
| super().__init__(app, targets, chunk_size, timeout) | ||
| def _set_defaults(opts): | ||
| opts.setdefault("remove_prefix", False) | ||
| opts.setdefault("host", "<auto>") | ||
| opts.setdefault("headers", {}) | ||
| opts.setdefault("ssl_context", None) | ||
| return opts | ||
| self.targets = { | ||
| f"{k}": _set_defaults(v) for k, v in targets.items() | ||
| } | ||
| def __call__(self, environ: "WSGIEnvironment", | ||
| start_response: "StartResponse") -> t.Iterable[bytes]: | ||
| path = get_path_info(environ, charset='utf-8', errors='replace') | ||
| app = self.app | ||
| for prefix, opts in self.targets.items(): | ||
| if path.startswith(prefix): | ||
| app = self.proxy_to(opts, path, prefix) | ||
| break | ||
| return app(environ, start_response) | ||
| class Dispatcher(DispatcherMiddleware): | ||
| """use regex to find a matching route""" | ||
| def __call__(self, environ, start_response): | ||
| app = self.mounts["/"] | ||
| for route, _app in self.mounts.items(): | ||
| if re.match(route, environ["PATH_INFO"]): | ||
| app = _app | ||
| break | ||
| return app(environ, start_response) | ||
| class SharedData(SharedDataMiddleware): | ||
| """use regex to find a matching files""" | ||
| def __init__( | ||
| self, | ||
| app, | ||
| exports, | ||
| disallow: None = None, | ||
| cache: bool = True, | ||
| cache_timeout: int = 60 * 60 * 12, | ||
| fallback_mimetype: str = "application/octet-stream", | ||
| ) -> None: | ||
| self.org_exports = exports.copy() | ||
| super().__init__(app, exports, disallow, cache, cache_timeout, | ||
| fallback_mimetype) | ||
| def __call__(self, environ, start_response): | ||
| path = get_path_info(environ) | ||
| file_loader = None | ||
| for search_path, loader in self.exports: | ||
| # lets check for regex, and inject real_path | ||
| if re.match(search_path, path): | ||
| real_path = re.sub(search_path, self.org_exports[search_path], | ||
| path, 1) | ||
| real_filename, file_loader = self.get_file_loader(real_path)( | ||
| None) | ||
| if file_loader is not None: | ||
| break | ||
| if search_path == path: | ||
| real_filename, file_loader = loader(None) | ||
| if file_loader is not None: | ||
| break | ||
| if not search_path.endswith("/"): | ||
| search_path += "/" | ||
| if path.startswith(search_path): | ||
| real_filename, file_loader = loader(path[len(search_path):]) | ||
| if file_loader is not None: | ||
| break | ||
| if file_loader is None or not self.is_allowed(real_filename): # noqa | ||
| return self.app(environ, start_response) | ||
| guessed_type = mimetypes.guess_type(real_filename) # type: ignore | ||
| mime_type = get_content_type(guessed_type[0] or self.fallback_mimetype, | ||
| "utf-8") | ||
| try: | ||
| f, mtime, file_size = file_loader() | ||
| except: | ||
| return self.app(environ, start_response) # 404 | ||
| headers = [("Date", http_date())] | ||
| if self.cache: | ||
| timeout = self.cache_timeout | ||
| etag = self.generate_etag(mtime, file_size, | ||
| real_filename) # type: ignore | ||
| headers += [ | ||
| ("Etag", f'"{etag}"'), | ||
| ("Cache-Control", f"max-age={timeout}, public"), | ||
| ] | ||
| if not is_resource_modified(environ, etag, last_modified=mtime): | ||
| f.close() | ||
| start_response("304 Not Modified", headers) | ||
| return [] | ||
| headers.append(("Expires", http_date(time.time() + timeout))) | ||
| else: | ||
| headers.append(("Cache-Control", "public")) | ||
| headers.extend( | ||
| ( | ||
| ("Content-Type", mime_type), | ||
| ("Content-Length", str(file_size)), | ||
| ("Last-Modified", http_date(mtime)), | ||
| ) | ||
| ) | ||
| start_response("200 OK", headers) | ||
| return wrap_file(environ, f) | ||
| def start_server( | ||
@@ -239,3 +28,3 @@ host: str, | ||
| gunicorn_port: int, | ||
| app_folder: str, | ||
| app_folder: Path, | ||
| app_yaml: dict, | ||
@@ -246,5 +35,4 @@ timeout: int, | ||
| """use the dispatcherMiddleware to connect SharedDataMiddleware and ProxyMiddleware with the wrapping app.""" | ||
| app = WrappingApp({}) | ||
| app = AppWrapper() | ||
| apps = {} | ||
| # make shared middlewares for static files as configured in app.yaml | ||
@@ -266,61 +54,22 @@ for route in app_yaml["handlers"]: | ||
| apps["/"] = Proxy(app.wsgi_app, { | ||
| "/": { | ||
| "target": f"{protocol}://{host}:{gunicorn_port}/", | ||
| "host": None | ||
| } | ||
| }, timeout=timeout) | ||
| apps["/"] = Proxy( | ||
| app.wsgi_app, | ||
| { | ||
| "/": { | ||
| "target": f"{protocol}://{host}:{gunicorn_port}/", | ||
| "host": None | ||
| } | ||
| }, | ||
| timeout=timeout | ||
| ) | ||
| app.wsgi_app = Dispatcher(app.wsgi_app, apps) | ||
| run_simple(host, port, app, use_debugger=False, use_reloader=True, | ||
| threaded=True, request_handler=MainWSGIRequestHandler) | ||
| threaded=True, request_handler=CustomWSGIRequestHandler) | ||
| def set_env_vars(application_id: str, args: argparse.Namespace, app_yaml: dict): | ||
| """set necessary environment variables""" | ||
| # First, merge the app.yaml into the environment so that the variables | ||
| # from the CLI can overwrite it. | ||
| if env_vars := app_yaml.get("env_variables"): | ||
| if not isinstance(env_vars, dict): | ||
| raise TypeError( | ||
| f"env_variables section in app.yaml must be a dict. Got {type(env_vars)}") | ||
| os.environ |= {k: str(v) for k, v in app_yaml["env_variables"].items()} | ||
| os.environ["GAE_ENV"] = "localdev" | ||
| os.environ["CLOUDSDK_CORE_PROJECT"] = application_id | ||
| os.environ["GOOGLE_CLOUD_PROJECT"] = application_id | ||
| os.environ["GAE_VERSION"] = str(time.time()) | ||
| os.environ["GRPC_ENABLE_FORK_SUPPORT"] = "0" | ||
| if args.storage: | ||
| os.environ["STORAGE_EMULATOR_HOST"] = \ | ||
| f"http://{args.host}:{args.storage_port}" | ||
| if args.tasks: | ||
| os.environ["TASKS_EMULATOR"] = f"{args.host}:{args.tasks_port}" | ||
| # Merge environment variables from CLI parameter | ||
| if args.env_var: | ||
| os.environ |= dict(v.split("=", 1) for v in args.env_var) | ||
| def patch_gunicorn(): | ||
| import gunicorn.workers.base | ||
| with open(gunicorn.workers.base.__file__, 'r+') as file: | ||
| content = file.read() | ||
| if "except (SyntaxError, NameError) as e:" in content: | ||
| return 0 | ||
| file.seek(0) | ||
| file.write(content.replace( | ||
| ' except SyntaxError as e:', | ||
| ' except (SyntaxError, NameError) as e:' | ||
| )) | ||
| def start_gunicorn( | ||
| args: argparse.Namespace, | ||
| app_yaml: dict, | ||
| app_folder: str, | ||
| app_folder: Path, | ||
| ) -> None: | ||
@@ -331,3 +80,3 @@ # Gunicorn call command | ||
| "entrypoint", | ||
| "gunicorn -b :$PORT --disable-redirect-access-to-syslog main:app" | ||
| f"gunicorn -b :$PORT --disable-redirect-access-to-syslog main:app" | ||
| ) | ||
@@ -361,3 +110,3 @@ entrypoint = entrypoint.replace(f"$PORT", str(args.gunicorn_port)) | ||
| """ | ||
| ap = argparse.ArgumentParser( | ||
| argument_parser = argparse.ArgumentParser( | ||
| description="alternative dev_appserver", | ||
@@ -367,42 +116,69 @@ epilog=f"Version: {__version__}" | ||
| ap.add_argument("distribution_folder", help="Path of the application") | ||
| ap.add_argument("--appyaml", default="app.yaml", | ||
| help="Path to app.yaml file (relative to the distribution_folder)") | ||
| ap.add_argument( | ||
| '-A', '--application', action='store', dest='app_id', required=True, | ||
| help='Set the application id') | ||
| ap.add_argument('--host', default="localhost", | ||
| help='host name to which application modules should bind') | ||
| ap.add_argument('--entrypoint', type=str, default=None, | ||
| help='The entrypoint is the basic gunicorn command. By default, it\'s taken from app.yaml. ' | ||
| 'This parameter can be used to set a different entrypoint. ' | ||
| 'To provide this parameter via ViUR-CLI, you have to double quote it: ' | ||
| ' --entrypoint "\'gunicorn -b :\$PORT --disable-redirect-access-to-syslog main:app\'"') | ||
| ap.add_argument('--port', type=int, default=8080, | ||
| help='port to which we bind the application') | ||
| ap.add_argument('--gunicorn_port', type=int, default=8090, | ||
| help='internal gunicorn port') | ||
| ap.add_argument('--workers', '--worker', type=int, default=1, | ||
| help='amount of gunicorn workers') | ||
| ap.add_argument('--threads', type=int, default=5, | ||
| help='amount of gunicorn threads') | ||
| ap.add_argument('--timeout', type=int, default=60, | ||
| help='Time is seconds before gunicorn abort a request') | ||
| ap.add_argument('-V', '--version', action='version', | ||
| version='%(prog)s ' + __version__) | ||
| argument_parser.add_argument( | ||
| "distribution_folder", | ||
| help="Path of the application" | ||
| ) | ||
| argument_parser.add_argument( | ||
| "--appyaml", | ||
| default="app.yaml", | ||
| help="Path to app.yaml file (relative to the distribution_folder)" | ||
| ) | ||
| argument_parser.add_argument( | ||
| '-A', '--application', | ||
| action='store', | ||
| dest='app_id', | ||
| required=True, | ||
| help='Set the application id' | ||
| ) | ||
| argument_parser.add_argument( | ||
| '--host', | ||
| default="localhost", | ||
| help='host name to which application modules should bind' | ||
| ) | ||
| argument_parser.add_argument( | ||
| '--entrypoint', | ||
| type=str, | ||
| default=None, | ||
| help='The entrypoint is the basic gunicorn command. By default, it\'s taken from app.yaml. ' | ||
| 'This parameter can be used to set a different entrypoint. ' | ||
| 'To provide this parameter via ViUR-CLI, you have to double quote it: ' | ||
| ' --entrypoint "\'gunicorn -b :$PORT --disable-redirect-access-to-syslog main:app\'"' | ||
| ) | ||
| argument_parser.add_argument( | ||
| '--port', | ||
| type=int, | ||
| default=8080, | ||
| help='port to which we bind the application' | ||
| ) | ||
| argument_parser.add_argument( | ||
| '--gunicorn_port', | ||
| type=int, | ||
| default=8090, | ||
| help='internal gunicorn port' | ||
| ) | ||
| argument_parser.add_argument( | ||
| '--workers', '--worker', | ||
| type=int, | ||
| default=1, | ||
| help='amount of gunicorn workers' | ||
| ) | ||
| argument_parser.add_argument( | ||
| '--threads', | ||
| type=int, | ||
| default=5, | ||
| help='amount of gunicorn threads' | ||
| ) | ||
| argument_parser.add_argument( | ||
| '--timeout', | ||
| type=int, | ||
| default=60, | ||
| help='Time is seconds before gunicorn abort a request' | ||
| ) | ||
| argument_parser.add_argument( | ||
| '-V', '--version', | ||
| action='version', | ||
| version=f'%(prog)s {__version__}' | ||
| ) | ||
| ap.add_argument('--storage', default=False, action="store_true", | ||
| dest="storage", help="also start Storage Emulator") | ||
| ap.add_argument('--storage_port', type=int, default=8092, | ||
| help='internal Storage Emulator Port') | ||
| ap.add_argument('--tasks', default=False, action='store_true', dest="tasks", | ||
| help='also start Task-Queue Emulator') | ||
| ap.add_argument('--tasks_port', type=int, default=8091, | ||
| help='internal Task-Queue Emulator Port') | ||
| ap.add_argument('--cron', default=False, action='store_true', dest="cron", | ||
| help='also start Cron Emulator') | ||
| ap.add_argument( | ||
| argument_parser.add_argument( | ||
| '--env_var', metavar="KEY=VALUE", nargs="*", | ||
@@ -414,3 +190,3 @@ help="Set environment variable for the runtime. Each env_var is in " | ||
| args = ap.parse_args() | ||
| args = argument_parser.parse_args() | ||
@@ -423,5 +199,4 @@ app_folder = Path(args.distribution_folder) | ||
| set_env_vars(args.app_id, args, app_yaml) | ||
| patch_gunicorn() | ||
| utils.set_env_vars(args.app_id, args, app_yaml) | ||
| utils.patch_gunicorn() | ||
| # Check for correct runtime | ||
@@ -432,25 +207,4 @@ current_runtime = f"python{sys.version_info.major}{sys.version_info.minor}" | ||
| if "WERKZEUG_RUN_MAIN" in os.environ and os.environ["WERKZEUG_RUN_MAIN"]: | ||
| if os.environ.get("WERKZEUG_RUN_MAIN"): | ||
| # only start subprocesses wenn reloader starts | ||
| if args.storage: | ||
| storage_subprocess = subprocess.Popen( | ||
| f"gcloud-storage-emulator start --port={args.storage_port}" | ||
| f" --default-bucket={args.app_id}.appspot.com".split()) | ||
| subprocesses.append(storage_subprocess) | ||
| if args.tasks and os.path.exists( | ||
| os.path.join(app_folder, 'queue.yaml')): | ||
| cron = "" | ||
| if args.cron: | ||
| cron = f"--cron-yaml={os.path.join(app_folder, 'cron.yaml')}" | ||
| tasks_subprocess = subprocess.Popen( | ||
| f"gcloud-tasks-emulator start -p={args.tasks_port} -t={args.port} {cron}" | ||
| f" --queue-yaml={os.path.join(app_folder, 'queue.yaml')}" | ||
| f" --queue-yaml-project={args.app_id} --queue-yaml-location=local -r 50".split()) | ||
| subprocesses.append(tasks_subprocess) | ||
| start_gunicorn(args, app_yaml, app_folder) | ||
@@ -457,0 +211,0 @@ |
| #!/usr/bin/env python | ||
| """ | ||
| GOOGLE STORAGE EMULATOR | ||
| Code was taken from: | ||
| https://gitlab.com/potato-oss/google-cloud/gcloud-storage-emulator | ||
| If you're interested in commercial support, training, or consultancy | ||
| then go ahead and contact the creators at opensource@potatolondon.com | ||
| This modified Version is slightly patched to work with ViUR Framework | ||
| - uploads don't recevied via PUT | ||
| - uploads don't have a content_type, in this case we use the rawdata | ||
| """ | ||
| import argparse | ||
| import logging | ||
| import sys | ||
| from .handlers.buckets import create_bucket | ||
| from .server import create_server | ||
| from .storage import Storage | ||
| # One after gcloud-task-emulator one | ||
| DEFAULT_PORT = 9023 | ||
| DEFAULT_HOST = "localhost" | ||
| def run_server(host, port, memory=False, default_bucket=None): | ||
| server = create_server(host, port, memory, default_bucket) | ||
| return server.run() | ||
| def wipe(keep_buckets=False): | ||
| print("Wiping...") | ||
| server = create_server(None, None, False) | ||
| server.wipe(keep_buckets=keep_buckets) | ||
| print("Done.") | ||
| return 0 | ||
| def prepare_args_parser(): | ||
| parser = argparse.ArgumentParser(description="Google Cloud Storage Emulator") | ||
| subparsers = parser.add_subparsers(title="subcommands", dest="subcommand") | ||
| start = subparsers.add_parser("start", help="start the emulator") | ||
| start.add_argument( | ||
| "--port", type=int, help="the port to run the server on", default=DEFAULT_PORT | ||
| ) | ||
| start.add_argument( | ||
| "-H", "--host", help="the host to run the server on", default=DEFAULT_HOST | ||
| ) | ||
| start.add_argument( | ||
| "--default-bucket", help="The default bucket. If provided, bucket will be created automatically" | ||
| ) | ||
| start.add_argument("-q", "--quiet", action="store_true", default=False, help="only outputs critical level logging") | ||
| start.add_argument("-M", "--no-store-on-disk", action="store_true", default=False, help="use in-memory storage") | ||
| start.add_argument("-D", "--data-dir", help="directory to use as the storage root") | ||
| wipe = subparsers.add_parser("wipe", help="Wipe the local data") | ||
| wipe.add_argument("--keep-buckets", help="If provided the data will be wiped but the existing buckets are kept") | ||
| create_bucket = subparsers.add_parser("create_bucket", help="create bucket") | ||
| create_bucket.add_argument( | ||
| "-n", "--name", | ||
| help="Name of the new bucket" | ||
| ) | ||
| return parser, subparsers | ||
| def main(): | ||
| parser, subparsers = prepare_args_parser() | ||
| args = parser.parse_args() | ||
| if args.subcommand not in subparsers.choices.keys(): | ||
| parser.print_usage() | ||
| sys.exit(1) | ||
| if args.subcommand == "wipe": | ||
| answer = input("This operation will IRREVERSIBLY DELETE all your data. Do you wish to proceed? [y/N] ").lower() | ||
| if answer in ("y", "ye", "yes"): | ||
| sys.exit(wipe(keep_buckets=args.keep_buckets)) | ||
| else: | ||
| print("wipe command cancelled") | ||
| sys.exit(1) | ||
| if args.subcommand == "create_bucket": | ||
| storage = Storage() | ||
| create_bucket(args.name, storage) | ||
| sys.exit(1) | ||
| root = logging.getLogger("") | ||
| stream_handler = logging.StreamHandler() | ||
| root.addHandler(stream_handler) | ||
| if args.quiet: | ||
| root.setLevel(logging.CRITICAL) | ||
| else: | ||
| root.setLevel(logging.DEBUG) | ||
| sys.exit(run_server(args.host, args.port, args.no_store_on_disk, args.default_bucket)) | ||
| if __name__ == "__main__": | ||
| main() |
| class NotFound(Exception): | ||
| pass | ||
| class Conflict(Exception): | ||
| pass |
| import logging | ||
| from datetime import datetime | ||
| from http import HTTPStatus | ||
| from .. import settings | ||
| from ..exceptions import NotFound, Conflict | ||
| logger = logging.getLogger("api.bucket") | ||
| CONFLICT = { | ||
| "error": { | ||
| "errors": [ | ||
| { | ||
| "domain": "global", | ||
| "reason": "conflict", | ||
| "message": "You already own this bucket. Please select another name." | ||
| } | ||
| ], | ||
| "code": 409, | ||
| "message": "You already own this bucket. Please select another name." | ||
| } | ||
| } | ||
| BAD_REQUEST = { | ||
| "error": { | ||
| "errors": [ | ||
| { | ||
| "domain": "global", | ||
| "reason": "invalid", | ||
| "message": "Empty bucket name" | ||
| } | ||
| ], | ||
| "code": 400, | ||
| "message": "Empty bucket name" | ||
| } | ||
| } | ||
| def _make_bucket_resource(bucket_name): | ||
| now = datetime.now().__str__() | ||
| return { | ||
| "kind": "storage#bucket", | ||
| "id": bucket_name, | ||
| "selfLink": "{}/b/{}".format(settings.API_ENDPOINT, bucket_name), | ||
| "projectNumber": "1234", | ||
| "name": bucket_name, | ||
| "timeCreated": now, | ||
| "updated": now, | ||
| "metageneration": "1", | ||
| "iamConfiguration": { | ||
| "bucketPolicyOnly": { | ||
| "enabled": False | ||
| }, | ||
| "uniformBucketLevelAccess": { | ||
| "enabled": False | ||
| } | ||
| }, | ||
| "location": "US", | ||
| "locationType": "multi-region", | ||
| "storageClass": "STANDARD", | ||
| "etag": "CAE=" | ||
| } | ||
| def get(request, response, storage, *args, **kwargs): | ||
| name = request.params.get("bucket_name") | ||
| if name and storage.buckets.get(name): | ||
| response.json(storage.buckets.get(name)) | ||
| else: | ||
| response.status = HTTPStatus.NOT_FOUND | ||
| def ls(request, response, storage, *args, **kwargs): | ||
| logger.info("[BUCKETS] List received") | ||
| response.json({ | ||
| "kind": "storage#buckets", | ||
| "items": list(storage.buckets.values()), | ||
| }) | ||
| def create_bucket(name, storage): | ||
| if storage.get_bucket(name): | ||
| return False | ||
| else: | ||
| bucket = _make_bucket_resource(name) | ||
| storage.create_bucket(name, bucket) | ||
| return bucket | ||
| def insert(request, response, storage, *args, **kwargs): | ||
| name = request.data.get("name") | ||
| if name: | ||
| logger.debug("[BUCKETS] Received request to create bucket with name {}".format(name)) | ||
| bucket = create_bucket(name, storage) | ||
| if not bucket: | ||
| response.status = HTTPStatus.CONFLICT | ||
| response.json(CONFLICT) | ||
| else: | ||
| bucket = _make_bucket_resource(name) | ||
| storage.create_bucket(name, bucket) | ||
| response.json(bucket) | ||
| else: | ||
| response.status = HTTPStatus.BAD_REQUEST | ||
| response.json(BAD_REQUEST) | ||
| def delete(request, response, storage, *args, **kwargs): | ||
| name = request.params.get("bucket_name") | ||
| if not name: | ||
| response.status = HTTPStatus.BAD_REQUEST | ||
| return response.json(BAD_REQUEST) | ||
| try: | ||
| storage.delete_bucket(name) | ||
| except NotFound: | ||
| response.status = HTTPStatus.NOT_FOUND | ||
| except Conflict: | ||
| response.status = HTTPStatus.CONFLICT |
| import math | ||
| import time | ||
| import urllib.parse | ||
| from datetime import datetime | ||
| from http import HTTPStatus | ||
| from ..exceptions import NotFound | ||
| def _make_object_resource(base_url, bucket_name, object_name, content_type, content_length): | ||
| time_id = math.floor(time.time()) | ||
| now = str(datetime.now()) | ||
| return { | ||
| "kind": "storage#object", | ||
| "id": "{}/{}/{}".format(bucket_name, object_name, time_id), | ||
| "selfLink": "/storage/v1/b/{}/o/{}".format(bucket_name, object_name), | ||
| "name": object_name, | ||
| "bucket": bucket_name, | ||
| "generation": str(time_id), | ||
| "metageneration": "1", | ||
| "contentType": content_type, | ||
| "timeCreated": now, | ||
| "updated": now, | ||
| "storageClass": "STANDARD", | ||
| "timeStorageClassUpdated": now, | ||
| "size": content_length, | ||
| "md5Hash": "NOT_IMPLEMENTED", | ||
| "mediaLink": "{}/download/storage/v1/b/{}/o/{}?generation={}&alt=media".format( | ||
| base_url, | ||
| bucket_name, | ||
| object_name, | ||
| time_id, | ||
| ), | ||
| "crc32c": "lj+ong==", | ||
| "etag": "CO6Q4+qNnOcCEAE=" | ||
| } | ||
| def _multipart_upload(request, response, storage): | ||
| obj = _make_object_resource( | ||
| request.base_url, | ||
| request.params["bucket_name"], | ||
| request.data["meta"]["name"], | ||
| request.data["content-type"], | ||
| str(len(request.data["content"])), | ||
| ) | ||
| storage.create_file( | ||
| request.params["bucket_name"], | ||
| request.data["meta"]["name"], | ||
| request.data["content"], | ||
| obj, | ||
| ) | ||
| response.json(obj) | ||
| def _create_resumable_upload(request, response, storage): | ||
| content_type = request.get_header('x-upload-content-type', 'application/octet-stream') | ||
| content_length = request.get_header('x-upload-content-length', None) | ||
| if isinstance(request.data,bytes): | ||
| return upload_partial(request,response,storage) | ||
| obj = _make_object_resource( | ||
| request.base_url, | ||
| request.params["bucket_name"], | ||
| request.data["name"], | ||
| content_type, | ||
| content_length, | ||
| ) | ||
| id = storage.create_resumable_upload( | ||
| request.params["bucket_name"], | ||
| request.data["name"], | ||
| obj, | ||
| ) | ||
| encoded_id = urllib.parse.urlencode({ | ||
| 'upload_id': id, | ||
| }) | ||
| response["Location"] = request.full_url + "&{}".format(encoded_id) | ||
| def insert(request, response, storage, *args, **kwargs): | ||
| uploadType = request.query.get("uploadType") | ||
| if not uploadType or len(uploadType) == 0: | ||
| response.status = HTTPStatus.BAD_REQUEST | ||
| return | ||
| uploadType = uploadType[0] | ||
| if uploadType == "resumable": | ||
| return _create_resumable_upload(request, response, storage) | ||
| if uploadType == "multipart": | ||
| return _multipart_upload(request, response, storage) | ||
| def upload_partial(request, response, storage, *args, **kwargs): | ||
| upload_id = request.query.get("upload_id")[0] | ||
| obj = storage.create_file_for_resumable_upload(upload_id, request.data) | ||
| return response.json(obj) | ||
| def get(request, response, storage, *args, **kwargs): | ||
| if request.query.get("alt") == ["media"]: | ||
| return download(request, response, storage, *args, **kwargs) | ||
| try: | ||
| obj = storage.get_file_obj(request.params["bucket_name"], request.params["object_id"]) | ||
| response.json(obj) | ||
| except NotFound: | ||
| response.status = HTTPStatus.NOT_FOUND | ||
| def ls(request, response, storage, *args, **kwargs): | ||
| bucket_name = request.params["bucket_name"] | ||
| prefix = request.query.get("prefix")[0] if request.query.get("prefix") else None | ||
| delimiter = request.query.get('delimiter')[0] if request.query.get("delimiter") else None | ||
| try: | ||
| files = storage.get_file_list(bucket_name, prefix, delimiter) | ||
| except NotFound: | ||
| response.status = HTTPStatus.NOT_FOUND | ||
| else: | ||
| response.json({ | ||
| "kind": "storage#object", | ||
| "items": files | ||
| }) | ||
| def copy(request, response, storage, *args, **kwargs): | ||
| try: | ||
| obj = storage.get_file_obj(request.params["bucket_name"], request.params["object_id"]) | ||
| except NotFound: | ||
| response.status = HTTPStatus.NOT_FOUND | ||
| return | ||
| dest_obj = _make_object_resource( | ||
| request.base_url, | ||
| request.params["dest_bucket_name"], | ||
| request.params["dest_object_id"], | ||
| obj["contentType"], | ||
| obj["size"], | ||
| ) | ||
| file = storage.get_file(request.params["bucket_name"], request.params["object_id"]) | ||
| storage.create_file(request.params["dest_bucket_name"], request.params["dest_object_id"], file, dest_obj) | ||
| response.json(dest_obj) | ||
| def download(request, response, storage, *args, **kwargs): | ||
| try: | ||
| file = storage.get_file(request.params["bucket_name"], request.params["object_id"]) | ||
| obj = storage.get_file_obj(request.params["bucket_name"], request.params["object_id"]) | ||
| response.write_file(file, content_type=obj.get("contentType")) | ||
| except NotFound: | ||
| response.status = HTTPStatus.NOT_FOUND | ||
| def delete(request, response, storage, *args, **kwargs): | ||
| try: | ||
| storage.delete_file(request.params["bucket_name"], request.params["object_id"]) | ||
| except NotFound: | ||
| response.status = HTTPStatus.NOT_FOUND |
| import json | ||
| import logging | ||
| import re | ||
| import threading | ||
| import time | ||
| from email.parser import BytesParser | ||
| from functools import partial | ||
| from http import server, HTTPStatus | ||
| from urllib.parse import parse_qs, urlparse, unquote | ||
| from . import settings | ||
| from .handlers import buckets, objects | ||
| from .storage import Storage | ||
| logger = logging.getLogger(__name__) | ||
| GET = "GET" | ||
| POST = "POST" | ||
| PUT = "PUT" | ||
| DELETE = "DELETE" | ||
| def _wipe_data(req, res, storage): | ||
| keep_buckets = bool(req.query.get('keep-buckets')) | ||
| logger.debug("Wiping storage") | ||
| if keep_buckets: | ||
| logger.debug("...while keeping the buckets") | ||
| storage.wipe(keep_buckets) | ||
| logger.debug("Storage wiped") | ||
| res.write("OK") | ||
| def _health_check(req, res, storage): | ||
| res.write("OK") | ||
| HANDLERS = ( | ||
| (r"^{}/b$".format(settings.API_ENDPOINT), {GET: buckets.ls, POST: buckets.insert}), | ||
| ( | ||
| r"^{}/b/(?P<bucket_name>[-.\w]+)$".format(settings.API_ENDPOINT), | ||
| {GET: buckets.get, DELETE: buckets.delete}), | ||
| ( | ||
| r"^{}/b/(?P<bucket_name>[-.\w]+)/o$".format(settings.API_ENDPOINT), | ||
| {GET: objects.ls} | ||
| ), | ||
| ( | ||
| r"^{}/b/(?P<bucket_name>[-.\w]+)/o/(?P<object_id>.*[^/]+)/copyTo/b/".format(settings.API_ENDPOINT) | ||
| + r"(?P<dest_bucket_name>[-.\w]+)/o/(?P<dest_object_id>.*[^/]+)$", | ||
| {POST: objects.copy} | ||
| ), | ||
| ( | ||
| r"^{}/b/(?P<bucket_name>[-.\w]+)/o/(?P<object_id>.*[^/]+)$".format(settings.API_ENDPOINT), | ||
| {GET: objects.get, DELETE: objects.delete} | ||
| ), | ||
| # Non-default API endpoints | ||
| ( | ||
| r"^{}/b/(?P<bucket_name>[-.\w]+)/o$".format(settings.UPLOAD_API_ENDPOINT), | ||
| {POST: objects.insert, PUT: objects.upload_partial} | ||
| ), | ||
| ( | ||
| r"^{}/b/(?P<bucket_name>[-.\w]+)/o/(?P<object_id>.*[^/]+)$".format( | ||
| settings.DOWNLOAD_API_ENDPOINT | ||
| ), | ||
| {GET: objects.download}, | ||
| ), | ||
| # Internal API, not supported by the real GCS | ||
| (r"^/$", {GET: _health_check}), # Health check endpoint | ||
| (r"^/wipe$", {GET: _wipe_data}), # Wipe all data | ||
| # Public file serving, same as object.download | ||
| (r"^/(?P<bucket_name>[-.\w]+)/(?P<object_id>.*[^/]+)$", {GET: objects.download}), | ||
| ) | ||
| def _read_data(request_handler): | ||
| if not request_handler.headers["Content-Length"]: | ||
| return None | ||
| raw_data = request_handler.rfile.read(int(request_handler.headers["Content-Length"])) | ||
| content_type = request_handler.headers["Content-Type"] | ||
| if not content_type: | ||
| content_type = "application/octet-stream" | ||
| if content_type.startswith("application/json"): | ||
| # RFC8259 mandates utf-8 | ||
| return json.loads(raw_data) | ||
| if content_type.startswith("multipart/"): | ||
| parser = BytesParser() | ||
| header = bytes("Content-Type:" + content_type + "\r\n", "utf-8") | ||
| msg = parser.parsebytes(header + raw_data) | ||
| payload = msg.get_payload() | ||
| # For multipart upload, google API expect the first item to be a json-encoded | ||
| # object, and the second (and only other) part, the file content | ||
| return { | ||
| "meta": json.loads(payload[0].get_payload()), | ||
| "content": payload[1].get_payload(decode=True), | ||
| "content-type": payload[1].get_content_type(), | ||
| } | ||
| return raw_data | ||
| class Request(object): | ||
| def __init__(self, request_handler, method): | ||
| super().__init__() | ||
| self._path = request_handler.path | ||
| self._request_handler = request_handler | ||
| self._server_address = request_handler.server.server_address | ||
| self._base_url = "http://{}:{}".format(self._server_address[0], self._server_address[1]) | ||
| self._full_url = self._base_url + self._path | ||
| self._parsed_url = urlparse(self._full_url) | ||
| self._query = parse_qs(self._parsed_url.query) | ||
| self._methtod = method | ||
| self._data = None | ||
| self._parsed_params = None | ||
| @property | ||
| def path(self): | ||
| return self._parsed_url.path | ||
| @property | ||
| def base_url(self): | ||
| return self._base_url | ||
| @property | ||
| def full_url(self): | ||
| return self._full_url | ||
| @property | ||
| def method(self): | ||
| return self._methtod | ||
| @property | ||
| def query(self): | ||
| return self._query | ||
| @property | ||
| def params(self): | ||
| if not self._match: | ||
| return None | ||
| if not self._parsed_params: | ||
| self._parsed_params = {} | ||
| for k, v in self._match.groupdict().items(): | ||
| self._parsed_params[k] = unquote(v) | ||
| return self._parsed_params | ||
| @property | ||
| def data(self): | ||
| if not self._data: | ||
| self._data = _read_data(self._request_handler) | ||
| return self._data | ||
| def get_header(self, key, default=None): | ||
| return self._request_handler.headers.get(key, default) | ||
| def set_match(self, match): | ||
| self._match = match | ||
| class Response(object): | ||
| def __init__(self, handler): | ||
| super().__init__() | ||
| self._handler = handler | ||
| self.status = HTTPStatus.OK | ||
| self._headers = {} | ||
| self._content = "" | ||
| def write(self, content): | ||
| logger.warning("[RESPONSE] Content handled as string, should be handled as stream") | ||
| self._content += content | ||
| def write_file(self, content, content_type="application/octet-stream"): | ||
| if content_type is not None: | ||
| self["Content-type"] = content_type | ||
| self._content = content | ||
| def json(self, obj): | ||
| self["Content-type"] = "application/json" | ||
| self._content = json.dumps(obj) | ||
| def __setitem__(self, key, value): | ||
| self._headers[key] = value | ||
| def __getitem__(self, key): | ||
| return self._headers[key] | ||
| def close(self): | ||
| self._handler.send_response(self.status.value, self.status.phrase) | ||
| for (k, v) in self._headers.items(): | ||
| self._handler.send_header(k, v) | ||
| content = self._content | ||
| if isinstance(self._content, str): | ||
| content = self._content.encode("utf-8") | ||
| self._handler.send_header("Content-Lenght", str(len(content))) | ||
| self._handler.end_headers() | ||
| self._handler.wfile.write(content) | ||
| class Router(object): | ||
| def __init__(self, request_handler): | ||
| super().__init__() | ||
| self._request_handler = request_handler | ||
| def handle(self, method): | ||
| request = Request(self._request_handler, method) | ||
| response = Response(self._request_handler) | ||
| for regex, handlers in HANDLERS: | ||
| pattern = re.compile(regex) | ||
| match = pattern.fullmatch(request.path) | ||
| if match: | ||
| request.set_match(match) | ||
| handler = handlers.get(method) | ||
| try: | ||
| handler(request, response, self._request_handler.storage) | ||
| except Exception as e: | ||
| logger.error("An error has occurred while running the handler for {} {}".format( | ||
| request.method, | ||
| request.full_url, | ||
| )) | ||
| logger.error(e) | ||
| raise e | ||
| break | ||
| else: | ||
| logger.error("Method not implemented: {} - {}".format(request.method, request.path)) | ||
| response.status = HTTPStatus.NOT_IMPLEMENTED | ||
| response.close() | ||
| class RequestHandler(server.BaseHTTPRequestHandler): | ||
| def __init__(self, storage, *args, **kwargs): | ||
| self.storage = storage | ||
| super().__init__(*args, **kwargs) | ||
| def do_GET(self): | ||
| router = Router(self) | ||
| router.handle(GET) | ||
| def do_POST(self): | ||
| router = Router(self) | ||
| router.handle(POST) | ||
| def do_DELETE(self): | ||
| router = Router(self) | ||
| router.handle(DELETE) | ||
| def do_PUT(self): | ||
| router = Router(self) | ||
| router.handle(PUT) | ||
| def log_message(self, format, *args): | ||
| logger.info(format % args) | ||
| class APIThread(threading.Thread): | ||
| def __init__(self, host, port, storage, *args, **kwargs): | ||
| super().__init__(*args, **kwargs) | ||
| self._host = host | ||
| self._port = port | ||
| self.is_running = threading.Event() | ||
| self._httpd = None | ||
| self._storage = storage | ||
| def run(self): | ||
| try: | ||
| self._httpd = server.HTTPServer((self._host, self._port), partial(RequestHandler, self._storage)) | ||
| self.is_running.set() | ||
| self._httpd.serve_forever() | ||
| except:pass | ||
| def join(self, timeout=None): | ||
| self.is_running.clear() | ||
| if self._httpd: | ||
| logger.info("[API] Stopping API server") | ||
| self._httpd.shutdown() | ||
| self._httpd.server_close() | ||
| class Server(object): | ||
| def __init__(self, host, port, in_memory=False, default_bucket=None): | ||
| self._storage = Storage(use_memory_fs=in_memory) | ||
| if default_bucket: | ||
| logging.debug('[STORAGE] Ensure default bucket "{}"'.format(default_bucket)) | ||
| buckets.create_bucket(default_bucket, self._storage) | ||
| self._api = APIThread(host, port, self._storage) | ||
| def start(self): | ||
| self._api.start() | ||
| self._api.is_running.wait() # Start the API thread | ||
| def stop(self): | ||
| self._api.join(timeout=1) | ||
| def wipe(self, keep_buckets=False): | ||
| self._storage.wipe(keep_buckets=keep_buckets) | ||
| def run(self): | ||
| try: | ||
| self.start() | ||
| logger.info("[STORAGE] Server up an running") | ||
| while True: | ||
| try: | ||
| time.sleep(0.1) | ||
| except KeyboardInterrupt: | ||
| logger.info("[STORAGE] Received keyboard interrupt") | ||
| break | ||
| finally: | ||
| self.stop() | ||
| def create_server(host, port, in_memory, default_bucket=None): | ||
| logger.info("[STORAGE] Starting server at {}:{}".format(host, port)) | ||
| return Server(host, port, in_memory=in_memory, default_bucket=default_bucket) |
| from os.path import abspath | ||
| API_ENDPOINT = "/storage/v1" | ||
| UPLOAD_API_ENDPOINT = "/upload/storage/v1" | ||
| BATCH_API_ENDPOINT = "/batch/storage/v1" | ||
| DOWNLOAD_API_ENDPOINT = "/download/storage/v1" | ||
| # pyfilesystem assumes OS fs within CWD as base | ||
| STORAGE_BASE = abspath("./") | ||
| STORAGE_DIR = ".cloudstorage" |
| import datetime | ||
| import json | ||
| import logging | ||
| import os | ||
| import fs | ||
| from fs.errors import FileExpected, ResourceNotFound | ||
| from .exceptions import Conflict, NotFound | ||
| from .settings import STORAGE_BASE, STORAGE_DIR | ||
| logger = logging.getLogger(__name__) | ||
| class Storage(object): | ||
| def __init__(self, use_memory_fs=False, data_dir=STORAGE_BASE): | ||
| if not os.path.isabs(data_dir): | ||
| raise ValueError("data_dir must be an absolute path") | ||
| self._use_memory_fs = use_memory_fs | ||
| self._data_dir = data_dir | ||
| self._pwd = fs.open_fs(data_dir) | ||
| try: | ||
| self._fs = self._pwd.makedir(STORAGE_DIR) | ||
| except fs.errors.DirectoryExists: | ||
| self._fs = self._pwd.opendir(STORAGE_DIR) | ||
| self._read_config_from_file() | ||
| def _write_config_to_file(self): | ||
| data = { | ||
| "buckets": self.buckets, | ||
| "objects": self.objects, | ||
| "resumable": self.resumable, | ||
| } | ||
| with self._fs.open(".meta", mode="w") as meta: | ||
| json.dump(data, meta, indent=2) | ||
| def _read_config_from_file(self): | ||
| try: | ||
| with self._fs.open(".meta", mode="r") as meta: | ||
| data = json.load(meta) | ||
| self.buckets = data.get("buckets") | ||
| self.objects = data.get("objects") | ||
| self.resumable = data.get("resumable") | ||
| except ResourceNotFound: | ||
| self.buckets = {} | ||
| self.objects = {} | ||
| self.resumable = {} | ||
| def _get_or_create_dir(self, bucket_name, file_name): | ||
| try: | ||
| bucket_dir = self._fs.makedir(bucket_name) | ||
| except fs.errors.DirectoryExists: | ||
| bucket_dir = self._fs.opendir(bucket_name) | ||
| dir_name = fs.path.dirname(file_name) | ||
| return bucket_dir.makedirs(dir_name, recreate=True) | ||
| def get_storage_base(self): | ||
| """Returns the pyfilesystem-compatible fs path to the storage | ||
| This is the OSFS if using disk storage, or "mem://" otherwise. | ||
| See https://docs.pyfilesystem.org/en/latest/guide.html#opening-filesystems for more info | ||
| Returns: | ||
| string -- The relevant filesystm | ||
| """ | ||
| if self._use_memory_fs: | ||
| return "mem://" | ||
| else: | ||
| return self._data_dir | ||
| def get_bucket(self, bucket_name): | ||
| """Get the bucket resourec object given the bucket name | ||
| Arguments: | ||
| bucket_name {str} -- Name of the bucket | ||
| Returns: | ||
| dict -- GCS-like Bucket resource | ||
| """ | ||
| return self.buckets.get(bucket_name) | ||
| def get_file_list(self, bucket_name, prefix=None, delimiter=None): | ||
| """Lists all the blobs in the bucket that begin with the prefix. | ||
| This can be used to list all blobs in a "folder", e.g. "public/". | ||
| The delimiter argument can be used to restrict the results to only the | ||
| "files" in the given "folder". Without the delimiter, the entire tree under | ||
| the prefix is returned. For example, given these blobs: | ||
| a/1.txt | ||
| a/b/2.txt | ||
| If you just specify prefix = 'a', you'll get back: | ||
| a/1.txt | ||
| a/b/2.txt | ||
| However, if you specify prefix='a' and delimiter='/', you'll get back: | ||
| a/1.txt | ||
| Additionally, the same request will return blobs.prefixes populated with: | ||
| a/b/ | ||
| Source: https://cloud.google.com/storage/docs/listing-objects#storage-list-objects-python | ||
| """ | ||
| if bucket_name not in self.buckets: | ||
| raise NotFound | ||
| bucket_objects = self.objects.get(bucket_name, {}) | ||
| if prefix: | ||
| # TODO: Still need to implement the last part of the doc string above to | ||
| # TODO: populate blobs.prefixes when using a delimiter. | ||
| return list(file_object for file_name, file_object in bucket_objects.items() | ||
| if file_name.startswith(prefix) | ||
| and (not delimiter or delimiter not in file_name[len(prefix+delimiter):])) | ||
| else: | ||
| return list(bucket_objects.values()) | ||
| def create_bucket(self, bucket_name, bucket_obj): | ||
| """Create a bucket object representation and save it to the current fs | ||
| Arguments: | ||
| bucket_name {str} -- Name of the GCS bucket | ||
| bucket_obj {dict} -- GCS-like Bucket resource | ||
| Returns: | ||
| [type] -- [description] | ||
| """ | ||
| self.buckets[bucket_name] = bucket_obj | ||
| self._write_config_to_file() | ||
| return bucket_obj | ||
| def create_file(self, bucket_name, file_name, content, file_obj): | ||
| """Create a text file given a string content | ||
| Arguments: | ||
| bucket_name {str} -- Name of the bucket to save to | ||
| file_name {str} -- File name used to store data | ||
| content {bytes} -- Content of the file to write | ||
| file_obj {dict} -- GCS-like Object resource | ||
| """ | ||
| file_dir = self._get_or_create_dir(bucket_name, file_name) | ||
| base_name = fs.path.basename(file_name) | ||
| with file_dir.open(base_name, mode="wb") as file: | ||
| file.write(content) | ||
| bucket_objects = self.objects.get(bucket_name, {}) | ||
| bucket_objects[file_name] = file_obj | ||
| self.objects[bucket_name] = bucket_objects | ||
| self._write_config_to_file() | ||
| def create_resumable_upload(self, bucket_name, file_name, file_obj): | ||
| """Initiate the necessary data to support partial upload. | ||
| This doesn't fully support partial upload, but expect the secondary PUT | ||
| call to send all the data in one go. | ||
| Basically, we try to comply to the bare minimum to the API described in | ||
| https://cloud.google.com/storage/docs/performing-resumable-uploads ignoring | ||
| any potential network failures | ||
| Arguments: | ||
| bucket_name {string} -- Name of the bucket to save to | ||
| file_name {string} -- File name used to store data | ||
| file_obj {dict} -- GCS Object resource | ||
| Returns: | ||
| str -- id of the resumable upload session (`upload_id`) | ||
| """ | ||
| file_id = "{}:{}:{}".format(bucket_name, file_name, datetime.datetime.now()) | ||
| self.resumable[file_id] = file_obj | ||
| self._write_config_to_file() | ||
| return file_id | ||
| def create_file_for_resumable_upload(self, file_id, content): | ||
| """Create a binary file following a partial upload request | ||
| This also updates the meta with the final file-size | ||
| Arguments: | ||
| file_id {str} -- the `upload_id` of the partial upload session | ||
| content {bytes} -- raw content to add to the file | ||
| Returns: | ||
| dict -- GCS-like Object resource | ||
| """ | ||
| file_obj = self.resumable[file_id] | ||
| bucket_name = file_obj["bucket"] | ||
| file_name = file_obj["name"] | ||
| file_dir = self._get_or_create_dir(bucket_name, file_name) | ||
| base_name = fs.path.basename(file_name) | ||
| with file_dir.open(base_name, mode="wb") as file: | ||
| file.write(content) | ||
| file_obj["size"] = str(len(content)) | ||
| bucket_objects = self.objects.get(bucket_name, {}) | ||
| bucket_objects[file_name] = file_obj | ||
| self.objects[bucket_name] = bucket_objects | ||
| del self.resumable[file_id] | ||
| self._write_config_to_file() | ||
| return file_obj | ||
| def get_file_obj(self, bucket_name, file_name): | ||
| """Gets the meta information for a file within a bucket | ||
| Arguments: | ||
| bucket_name {str} -- Name of the bucket | ||
| file_name {str} -- File name | ||
| Raises: | ||
| NotFound: Raised when the object doesn't exist | ||
| Returns: | ||
| dict -- GCS-like Object resource | ||
| """ | ||
| try: | ||
| return self.objects[bucket_name][file_name] | ||
| except KeyError: | ||
| raise NotFound | ||
| def get_file(self, bucket_name, file_name): | ||
| """Get the raw data of a file within a bucket | ||
| Arguments: | ||
| bucket_name {str} -- Name of the bucket | ||
| file_name {str} -- File name | ||
| Raises: | ||
| NotFound: Raised when the object doesn't exist | ||
| Returns: | ||
| bytes -- Raw content of the file | ||
| """ | ||
| try: | ||
| bucket_dir = self._fs.opendir(bucket_name) | ||
| return bucket_dir.open(file_name, mode="rb").read() | ||
| except (FileExpected, ResourceNotFound) as e: | ||
| logger.error("Resource not found:") | ||
| logger.error(e) | ||
| raise NotFound | ||
| def delete_bucket(self, bucket_name): | ||
| """Delete a bucket's meta and file | ||
| Arguments: | ||
| bucket_name {str} -- GCS bucket name | ||
| Raises: | ||
| NotFound: If the bucket doesn't exist | ||
| Conflict: If the bucket is not empty or there are pending uploads | ||
| """ | ||
| bucket_meta = self.buckets.get(bucket_name) | ||
| if bucket_meta is None: | ||
| raise NotFound("Bucket with name '{}' does not exist".format(bucket_name)) | ||
| bucket_objects = self.objects.get(bucket_name, {}) | ||
| if len(bucket_objects.keys()) != 0: | ||
| raise Conflict("Bucket '{}' is not empty".format(bucket_name)) | ||
| resumable_ids = [ | ||
| file_id | ||
| for (file_id, file_obj) in self.resumable.items() | ||
| if file_obj.get('bucket') == bucket_name | ||
| ] | ||
| if len(resumable_ids) != 0: | ||
| raise Conflict("Bucket '{}' has pending upload sessions".format(bucket_name)) | ||
| del self.buckets[bucket_name] | ||
| self._delete_dir(bucket_name) | ||
| self._write_config_to_file() | ||
| def delete_file(self, bucket_name, file_name): | ||
| try: | ||
| self.objects[bucket_name][file_name] | ||
| except KeyError: | ||
| raise NotFound("Object with name '{}' does not exist in bucket '{}'".format(bucket_name, file_name)) | ||
| del self.objects[bucket_name][file_name] | ||
| self._delete_file(bucket_name, file_name) | ||
| self._write_config_to_file() | ||
| def _delete_file(self, bucket_name, file_name): | ||
| try: | ||
| with self._fs.opendir(bucket_name) as bucket_dir: | ||
| bucket_dir.remove(file_name) | ||
| except ResourceNotFound: | ||
| logger.info("No file to remove '{}/{}'".format(bucket_name, file_name)) | ||
| def _delete_dir(self, path, force=True): | ||
| try: | ||
| remover = self._fs.removetree if force else self._fs.removedir | ||
| remover(path) | ||
| except ResourceNotFound: | ||
| logger.info("No folder to remove '{}'".format(path)) | ||
| def wipe(self, keep_buckets=False): | ||
| existing_buckets = self.buckets | ||
| self.buckets = {} | ||
| self.objects = {} | ||
| self.resumable = {} | ||
| try: | ||
| self._fs.remove('.meta') | ||
| for path in self._fs.listdir('.'): | ||
| self._fs.removetree(path) | ||
| except ResourceNotFound as e: | ||
| logger.warning(e) | ||
| if keep_buckets: | ||
| for k, v in existing_buckets.items(): | ||
| self.create_bucket(k, v) |
| #!/usr/bin/env python | ||
| """ | ||
| GOOGLE TASKS EMULATOR | ||
| Code was taken from: | ||
| https://gitlab.com/potato-oss/google-cloud/gcloud-tasks-emulator | ||
| If you're interested in commercial support, training, or consultancy | ||
| then go ahead and contact the creators at opensource@potatolondon.com | ||
| """ | ||
| import argparse | ||
| import logging | ||
| import sys | ||
| import yaml | ||
| from .server import create_server, DEFAULT_TARGET_PORT, DEFAULT_TARGET_HOST | ||
| # Random, apparently not often used | ||
| DEFAULT_PORT = 9022 | ||
| def run_server(host, port, target_host, target_port, default_queue_names, max_retries,crons): | ||
| server = create_server(host, port, target_host, target_port, default_queue_names, max_retries, crons) | ||
| return server.run() | ||
| def read_queue_yaml(path): | ||
| queues = [] | ||
| with open(path, "r") as f: | ||
| try: | ||
| data = yaml.safe_load(f) | ||
| for queue in data['queue']: | ||
| queues.append(queue['name']) | ||
| except yaml.YAMLError: | ||
| logging.exception("Error reading %s", path) | ||
| return False, queues | ||
| return True, queues | ||
| def read_cron_yaml(path): | ||
| crons = [] | ||
| with open(path, "r") as f: | ||
| try: | ||
| data = yaml.safe_load(f) | ||
| for cron in data['cron']: | ||
| crons.append(cron) | ||
| except yaml.YAMLError: | ||
| logging.exception("Error reading %s", path) | ||
| return False, crons | ||
| return True, crons | ||
| def prepare_args_parser(): | ||
| parser = argparse.ArgumentParser(description='Google Cloud Task Emulator') | ||
| subparsers = parser.add_subparsers(title='subcommands', dest="subcommand") | ||
| start = subparsers.add_parser('start', help='start the emulator') | ||
| start.add_argument( | ||
| "-p", "--port", | ||
| type=int, help='the port to run the server on', default=DEFAULT_PORT | ||
| ) | ||
| start.add_argument( | ||
| "-t", "--target-port", | ||
| type=int, help='the port to which the task runner will POST requests to', | ||
| default=DEFAULT_TARGET_PORT, | ||
| ) | ||
| start.add_argument( | ||
| "-H", "--target-host", | ||
| type=str, help="the hostname to submit tasks too (default is 'localhost')", | ||
| default=DEFAULT_TARGET_HOST, | ||
| ) | ||
| start.add_argument("-q", "--quiet", action="store_true", default=False) | ||
| start.add_argument( | ||
| "-d", "--default-queue", type=str, action="append", | ||
| help="If specified will create a queue with the passed name. " | ||
| "Name should be in the format of projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID" | ||
| ) | ||
| start.add_argument( | ||
| "-r", "--max-retries", | ||
| type=int, help='maximum number of retries when a task is failed (default is infinity)', | ||
| default=-1, | ||
| ) | ||
| start.add_argument( | ||
| "-Q", "--queue-yaml", | ||
| type=str, help="path to a queue.yaml to initialize the default queues", | ||
| default=None | ||
| ) | ||
| start.add_argument( | ||
| "-C", "--cron-yaml", | ||
| type=str, help="path to a cron.yaml to initialize the default queues", | ||
| default=None | ||
| ) | ||
| start.add_argument( | ||
| "-P", "--queue-yaml-project", | ||
| type=str, help="project ID to use for queues created from queue-yaml", | ||
| default="[PROJECT]" | ||
| ) | ||
| start.add_argument( | ||
| "-L", "--queue-yaml-location", | ||
| type=str, help="location ID to use for queues created from queue-yaml", | ||
| default="[LOCATION]" | ||
| ) | ||
| return parser | ||
| def main(): | ||
| print("Starting Cloud Tasks Emulator") | ||
| parser = prepare_args_parser() | ||
| args = parser.parse_args() | ||
| if args.subcommand != "start": | ||
| parser.print_usage() | ||
| sys.exit(1) | ||
| root = logging.getLogger() | ||
| root.addHandler(logging.StreamHandler()) | ||
| if args.quiet: | ||
| root.setLevel(logging.ERROR) | ||
| else: | ||
| root.setLevel(logging.INFO) | ||
| default_queues = set(args.default_queue or []) | ||
| crons = [] | ||
| # FIXME: We simply read queue names from queue.yaml. Instead we should | ||
| # read all queue parameters and handle them correctly in the emulator | ||
| if args.queue_yaml: | ||
| success, queues = read_queue_yaml(args.queue_yaml) | ||
| queues = [ | ||
| "projects/%s/locations/%s/queues/%s" % ( | ||
| args.queue_yaml_project, | ||
| args.queue_yaml_location, | ||
| x | ||
| ) for x in queues | ||
| ] | ||
| if success: | ||
| default_queues = default_queues.union(set(queues)) | ||
| else: | ||
| sys.exit(1) | ||
| if args.cron_yaml: | ||
| success, crons =read_cron_yaml(args.cron_yaml) | ||
| sys.exit(run_server("localhost", args.port, args.target_host, args.target_port, default_queues, args.max_retries,crons)) | ||
| if __name__ == '__main__': | ||
| main() |
| # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! | ||
| import grpc | ||
| from ..proto import ( | ||
| cloudtasks_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2, | ||
| ) | ||
| from ..proto import ( | ||
| queue_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2, | ||
| ) | ||
| from ..proto import ( | ||
| task_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2, | ||
| ) | ||
| from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 | ||
| from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 | ||
| from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 | ||
| class CloudTasksStub(object): | ||
| """Cloud Tasks allows developers to manage the execution of background | ||
| work in their applications. | ||
| """ | ||
| def __init__(self, channel): | ||
| """Constructor. | ||
| Args: | ||
| channel: A grpc.Channel. | ||
| """ | ||
| self.ListQueues = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/ListQueues", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.SerializeToString, | ||
| response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.FromString, | ||
| ) | ||
| self.GetQueue = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/GetQueue", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.SerializeToString, | ||
| response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, | ||
| ) | ||
| self.CreateQueue = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/CreateQueue", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.SerializeToString, | ||
| response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, | ||
| ) | ||
| self.UpdateQueue = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/UpdateQueue", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.SerializeToString, | ||
| response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, | ||
| ) | ||
| self.DeleteQueue = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/DeleteQueue", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.SerializeToString, | ||
| response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, | ||
| ) | ||
| self.PurgeQueue = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/PurgeQueue", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.SerializeToString, | ||
| response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, | ||
| ) | ||
| self.PauseQueue = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/PauseQueue", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.SerializeToString, | ||
| response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, | ||
| ) | ||
| self.ResumeQueue = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/ResumeQueue", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.SerializeToString, | ||
| response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.FromString, | ||
| ) | ||
| self.GetIamPolicy = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/GetIamPolicy", | ||
| request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString, | ||
| response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, | ||
| ) | ||
| self.SetIamPolicy = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/SetIamPolicy", | ||
| request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString, | ||
| response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, | ||
| ) | ||
| self.TestIamPermissions = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/TestIamPermissions", | ||
| request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString, | ||
| response_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString, | ||
| ) | ||
| self.ListTasks = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/ListTasks", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.SerializeToString, | ||
| response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.FromString, | ||
| ) | ||
| self.GetTask = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/GetTask", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.SerializeToString, | ||
| response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.FromString, | ||
| ) | ||
| self.CreateTask = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/CreateTask", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.SerializeToString, | ||
| response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.FromString, | ||
| ) | ||
| self.DeleteTask = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/DeleteTask", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.SerializeToString, | ||
| response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, | ||
| ) | ||
| self.RunTask = channel.unary_unary( | ||
| "/google.cloud.tasks.v2.CloudTasks/RunTask", | ||
| request_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.SerializeToString, | ||
| response_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.FromString, | ||
| ) | ||
| class CloudTasksServicer(object): | ||
| """Cloud Tasks allows developers to manage the execution of background | ||
| work in their applications. | ||
| """ | ||
| def ListQueues(self, request, context): | ||
| """Lists queues. | ||
| Queues are returned in lexicographical order. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def GetQueue(self, request, context): | ||
| """Gets a queue. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def CreateQueue(self, request, context): | ||
| """Creates a queue. | ||
| Queues created with this method allow tasks to live for a maximum of 31 | ||
| days. After a task is 31 days old, the task will be deleted regardless of whether | ||
| it was dispatched or not. | ||
| WARNING: Using this method may have unintended side effects if you are | ||
| using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. | ||
| Read | ||
| [Overview of Queue Management and | ||
| queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using | ||
| this method. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def UpdateQueue(self, request, context): | ||
| """Updates a queue. | ||
| This method creates the queue if it does not exist and updates | ||
| the queue if it does exist. | ||
| Queues created with this method allow tasks to live for a maximum of 31 | ||
| days. After a task is 31 days old, the task will be deleted regardless of whether | ||
| it was dispatched or not. | ||
| WARNING: Using this method may have unintended side effects if you are | ||
| using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. | ||
| Read | ||
| [Overview of Queue Management and | ||
| queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using | ||
| this method. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def DeleteQueue(self, request, context): | ||
| """Deletes a queue. | ||
| This command will delete the queue even if it has tasks in it. | ||
| Note: If you delete a queue, a queue with the same name can't be created | ||
| for 7 days. | ||
| WARNING: Using this method may have unintended side effects if you are | ||
| using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. | ||
| Read | ||
| [Overview of Queue Management and | ||
| queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using | ||
| this method. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def PurgeQueue(self, request, context): | ||
| """Purges a queue by deleting all of its tasks. | ||
| All tasks created before this method is called are permanently deleted. | ||
| Purge operations can take up to one minute to take effect. Tasks | ||
| might be dispatched before the purge takes effect. A purge is irreversible. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def PauseQueue(self, request, context): | ||
| """Pauses the queue. | ||
| If a queue is paused then the system will stop dispatching tasks | ||
| until the queue is resumed via | ||
| [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. Tasks can still be added | ||
| when the queue is paused. A queue is paused if its | ||
| [state][google.cloud.tasks.v2.Queue.state] is [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def ResumeQueue(self, request, context): | ||
| """Resume a queue. | ||
| This method resumes a queue after it has been | ||
| [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or | ||
| [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The state of a queue is stored | ||
| in the queue's [state][google.cloud.tasks.v2.Queue.state]; after calling this method it | ||
| will be set to [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. | ||
| WARNING: Resuming many high-QPS queues at the same time can | ||
| lead to target overloading. If you are resuming high-QPS | ||
| queues, follow the 500/50/5 pattern described in | ||
| [Managing Cloud Tasks Scaling | ||
| Risks](https://cloud.google.com/tasks/docs/manage-cloud-task-scaling). | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def GetIamPolicy(self, request, context): | ||
| """Gets the access control policy for a [Queue][google.cloud.tasks.v2.Queue]. | ||
| Returns an empty policy if the resource exists and does not have a policy | ||
| set. | ||
| Authorization requires the following | ||
| [Google IAM](https://cloud.google.com/iam) permission on the specified | ||
| resource parent: | ||
| * `cloudtasks.queues.getIamPolicy` | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def SetIamPolicy(self, request, context): | ||
| """Sets the access control policy for a [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing | ||
| policy. | ||
| Note: The Cloud Console does not check queue-level IAM permissions yet. | ||
| Project-level permissions are required to use the Cloud Console. | ||
| Authorization requires the following | ||
| [Google IAM](https://cloud.google.com/iam) permission on the specified | ||
| resource parent: | ||
| * `cloudtasks.queues.setIamPolicy` | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def TestIamPermissions(self, request, context): | ||
| """Returns permissions that a caller has on a [Queue][google.cloud.tasks.v2.Queue]. | ||
| If the resource does not exist, this will return an empty set of | ||
| permissions, not a [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. | ||
| Note: This operation is designed to be used for building permission-aware | ||
| UIs and command-line tools, not for authorization checking. This operation | ||
| may "fail open" without warning. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def ListTasks(self, request, context): | ||
| """Lists the tasks in a queue. | ||
| By default, only the [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved | ||
| due to performance considerations; | ||
| [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] controls the | ||
| subset of information which is returned. | ||
| The tasks may be returned in any order. The ordering may change at any | ||
| time. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def GetTask(self, request, context): | ||
| """Gets a task. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def CreateTask(self, request, context): | ||
| """Creates a task and adds it to a queue. | ||
| Tasks cannot be updated after creation; there is no UpdateTask command. | ||
| * The maximum task size is 100KB. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def DeleteTask(self, request, context): | ||
| """Deletes a task. | ||
| A task can be deleted if it is scheduled or dispatched. A task | ||
| cannot be deleted if it has executed successfully or permanently | ||
| failed. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def RunTask(self, request, context): | ||
| """Forces a task to run now. | ||
| When this method is called, Cloud Tasks will dispatch the task, even if | ||
| the task is already running, the queue has reached its [RateLimits][google.cloud.tasks.v2.RateLimits] or | ||
| is [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. | ||
| This command is meant to be used for manual debugging. For | ||
| example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can be used to retry a failed | ||
| task after a fix has been made or to manually force a task to be | ||
| dispatched now. | ||
| The dispatched task is returned. That is, the task that is returned | ||
| contains the [status][Task.status] after the task is dispatched but | ||
| before the task is received by its target. | ||
| If Cloud Tasks receives a successful response from the task's | ||
| target, then the task will be deleted; otherwise the task's | ||
| [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will be reset to the time that | ||
| [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called plus the retry delay specified | ||
| in the queue's [RetryConfig][google.cloud.tasks.v2.RetryConfig]. | ||
| [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns | ||
| [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a | ||
| task that has already succeeded or permanently failed. | ||
| """ | ||
| context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
| context.set_details("Method not implemented!") | ||
| raise NotImplementedError("Method not implemented!") | ||
| def add_CloudTasksServicer_to_server(servicer, server): | ||
| rpc_method_handlers = { | ||
| "ListQueues": grpc.unary_unary_rpc_method_handler( | ||
| servicer.ListQueues, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.FromString, | ||
| response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.SerializeToString, | ||
| ), | ||
| "GetQueue": grpc.unary_unary_rpc_method_handler( | ||
| servicer.GetQueue, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.FromString, | ||
| response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.SerializeToString, | ||
| ), | ||
| "CreateQueue": grpc.unary_unary_rpc_method_handler( | ||
| servicer.CreateQueue, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.FromString, | ||
| response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.SerializeToString, | ||
| ), | ||
| "UpdateQueue": grpc.unary_unary_rpc_method_handler( | ||
| servicer.UpdateQueue, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.FromString, | ||
| response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.SerializeToString, | ||
| ), | ||
| "DeleteQueue": grpc.unary_unary_rpc_method_handler( | ||
| servicer.DeleteQueue, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.FromString, | ||
| response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, | ||
| ), | ||
| "PurgeQueue": grpc.unary_unary_rpc_method_handler( | ||
| servicer.PurgeQueue, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.FromString, | ||
| response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.SerializeToString, | ||
| ), | ||
| "PauseQueue": grpc.unary_unary_rpc_method_handler( | ||
| servicer.PauseQueue, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.FromString, | ||
| response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.SerializeToString, | ||
| ), | ||
| "ResumeQueue": grpc.unary_unary_rpc_method_handler( | ||
| servicer.ResumeQueue, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.FromString, | ||
| response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.Queue.SerializeToString, | ||
| ), | ||
| "GetIamPolicy": grpc.unary_unary_rpc_method_handler( | ||
| servicer.GetIamPolicy, | ||
| request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString, | ||
| response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, | ||
| ), | ||
| "SetIamPolicy": grpc.unary_unary_rpc_method_handler( | ||
| servicer.SetIamPolicy, | ||
| request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString, | ||
| response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, | ||
| ), | ||
| "TestIamPermissions": grpc.unary_unary_rpc_method_handler( | ||
| servicer.TestIamPermissions, | ||
| request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString, | ||
| response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString, | ||
| ), | ||
| "ListTasks": grpc.unary_unary_rpc_method_handler( | ||
| servicer.ListTasks, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.FromString, | ||
| response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.SerializeToString, | ||
| ), | ||
| "GetTask": grpc.unary_unary_rpc_method_handler( | ||
| servicer.GetTask, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.FromString, | ||
| response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.SerializeToString, | ||
| ), | ||
| "CreateTask": grpc.unary_unary_rpc_method_handler( | ||
| servicer.CreateTask, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.FromString, | ||
| response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.SerializeToString, | ||
| ), | ||
| "DeleteTask": grpc.unary_unary_rpc_method_handler( | ||
| servicer.DeleteTask, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.FromString, | ||
| response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, | ||
| ), | ||
| "RunTask": grpc.unary_unary_rpc_method_handler( | ||
| servicer.RunTask, | ||
| request_deserializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.FromString, | ||
| response_serializer=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.Task.SerializeToString, | ||
| ), | ||
| } | ||
| generic_handler = grpc.method_handlers_generic_handler( | ||
| "google.cloud.tasks.v2.CloudTasks", rpc_method_handlers | ||
| ) | ||
| server.add_generic_rpc_handlers((generic_handler,)) |
| # -*- coding: utf-8 -*- | ||
| # Generated by the protocol buffer compiler. DO NOT EDIT! | ||
| # source: google/cloud/tasks_v2/proto/cloudtasks.proto | ||
| import sys | ||
| _b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) | ||
| from google.protobuf import descriptor as _descriptor | ||
| from google.protobuf import message as _message | ||
| from google.protobuf import reflection as _reflection | ||
| from google.protobuf import symbol_database as _symbol_database | ||
| # @@protoc_insertion_point(imports) | ||
| _sym_db = _symbol_database.Default() | ||
| from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 | ||
| from google.api import client_pb2 as google_dot_api_dot_client__pb2 | ||
| from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 | ||
| from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 | ||
| from ..proto import ( | ||
| queue_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2, | ||
| ) | ||
| from ..proto import ( | ||
| task_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2, | ||
| ) | ||
| from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 | ||
| from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 | ||
| from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 | ||
| from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 | ||
| DESCRIPTOR = _descriptor.FileDescriptor( | ||
| name="google/cloud/tasks_v2/proto/cloudtasks.proto", | ||
| package="google.cloud.tasks.v2", | ||
| syntax="proto3", | ||
| serialized_options=_b( | ||
| "\n\031com.google.cloud.tasks.v2B\017CloudTasksProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks\242\002\005TASKS" | ||
| ), | ||
| serialized_pb=_b( | ||
| '\n,google/cloud/tasks_v2/proto/cloudtasks.proto\x12\x15google.cloud.tasks.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\'google/cloud/tasks_v2/proto/queue.proto\x1a&google/cloud/tasks_v2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x83\x01\n\x11ListQueuesRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"[\n\x12ListQueuesResponse\x12,\n\x06queues\x18\x01 \x03(\x0b\x32\x1c.google.cloud.tasks.v2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x7f\n\x12\x43reateQueueRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x30\n\x05queue\x18\x02 \x01(\x0b\x32\x1c.google.cloud.tasks.v2.QueueB\x03\xe0\x41\x02"w\n\x12UpdateQueueRequest\x12\x30\n\x05queue\x18\x01 \x01(\x0b\x32\x1c.google.cloud.tasks.v2.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xaa\x01\n\x10ListTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"X\n\x11ListTasksResponse\x12*\n\x05tasks\x18\x01 \x03(\x0b\x32\x1b.google.cloud.tasks.v2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x7f\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"\xb4\x01\n\x11\x43reateTaskRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12.\n\x04task\x18\x02 \x01(\x0b\x32\x1b.google.cloud.tasks.v2.TaskB\x03\xe0\x41\x02\x12\x37\n\rresponse_view\x18\x03 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\x7f\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View2\xdd\x14\n\nCloudTasks\x12\x9e\x01\n\nListQueues\x12(.google.cloud.tasks.v2.ListQueuesRequest\x1a).google.cloud.tasks.v2.ListQueuesResponse";\x82\xd3\xe4\x93\x02,\x12*/v2/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x8b\x01\n\x08GetQueue\x12&.google.cloud.tasks.v2.GetQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"9\x82\xd3\xe4\x93\x02,\x12*/v2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa0\x01\n\x0b\x43reateQueue\x12).google.cloud.tasks.v2.CreateQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"H\x82\xd3\xe4\x93\x02\x33"*/v2/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xab\x01\n\x0bUpdateQueue\x12).google.cloud.tasks.v2.UpdateQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"S\x82\xd3\xe4\x93\x02\x39\x32\x30/v2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x8b\x01\n\x0b\x44\x65leteQueue\x12).google.cloud.tasks.v2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty"9\x82\xd3\xe4\x93\x02,**/v2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\x98\x01\n\nPurgeQueue\x12(.google.cloud.tasks.v2.PurgeQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"B\x82\xd3\xe4\x93\x02\x35"0/v2/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\x98\x01\n\nPauseQueue\x12(.google.cloud.tasks.v2.PauseQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"B\x82\xd3\xe4\x93\x02\x35"0/v2/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\x9b\x01\n\x0bResumeQueue\x12).google.cloud.tasks.v2.ResumeQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"C\x82\xd3\xe4\x93\x02\x36"1/v2/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\x9c\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"Q\x82\xd3\xe4\x93\x02@";/v2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa3\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"X\x82\xd3\xe4\x93\x02@";/v2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xce\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"c\x82\xd3\xe4\x93\x02\x46"A/v2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xa3\x01\n\tListTasks\x12\'.google.cloud.tasks.v2.ListTasksRequest\x1a(.google.cloud.tasks.v2.ListTasksResponse"C\x82\xd3\xe4\x93\x02\x34\x12\x32/v2/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x90\x01\n\x07GetTask\x12%.google.cloud.tasks.v2.GetTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"A\x82\xd3\xe4\x93\x02\x34\x12\x32/v2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xa0\x01\n\nCreateTask\x12(.google.cloud.tasks.v2.CreateTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"K\x82\xd3\xe4\x93\x02\x37"2/v2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x91\x01\n\nDeleteTask\x12(.google.cloud.tasks.v2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"A\x82\xd3\xe4\x93\x02\x34*2/v2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\x97\x01\n\x07RunTask\x12%.google.cloud.tasks.v2.RunTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"H\x82\xd3\xe4\x93\x02;"6/v2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBr\n\x19\x63om.google.cloud.tasks.v2B\x0f\x43loudTasksProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks\xa2\x02\x05TASKSb\x06proto3' | ||
| ), | ||
| dependencies=[ | ||
| google_dot_api_dot_annotations__pb2.DESCRIPTOR, | ||
| google_dot_api_dot_client__pb2.DESCRIPTOR, | ||
| google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, | ||
| google_dot_api_dot_resource__pb2.DESCRIPTOR, | ||
| google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2.DESCRIPTOR, | ||
| google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2.DESCRIPTOR, | ||
| google_dot_iam_dot_v1_dot_iam__policy__pb2.DESCRIPTOR, | ||
| google_dot_iam_dot_v1_dot_policy__pb2.DESCRIPTOR, | ||
| google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, | ||
| google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, | ||
| ], | ||
| ) | ||
| _LISTQUEUESREQUEST = _descriptor.Descriptor( | ||
| name="ListQueuesRequest", | ||
| full_name="google.cloud.tasks.v2.ListQueuesRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="parent", | ||
| full_name="google.cloud.tasks.v2.ListQueuesRequest.parent", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b( | ||
| "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" | ||
| ), | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="filter", | ||
| full_name="google.cloud.tasks.v2.ListQueuesRequest.filter", | ||
| index=1, | ||
| number=2, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="page_size", | ||
| full_name="google.cloud.tasks.v2.ListQueuesRequest.page_size", | ||
| index=2, | ||
| number=3, | ||
| type=5, | ||
| cpp_type=1, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="page_token", | ||
| full_name="google.cloud.tasks.v2.ListQueuesRequest.page_token", | ||
| index=3, | ||
| number=4, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=391, | ||
| serialized_end=522, | ||
| ) | ||
| _LISTQUEUESRESPONSE = _descriptor.Descriptor( | ||
| name="ListQueuesResponse", | ||
| full_name="google.cloud.tasks.v2.ListQueuesResponse", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="queues", | ||
| full_name="google.cloud.tasks.v2.ListQueuesResponse.queues", | ||
| index=0, | ||
| number=1, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=3, | ||
| has_default_value=False, | ||
| default_value=[], | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="next_page_token", | ||
| full_name="google.cloud.tasks.v2.ListQueuesResponse.next_page_token", | ||
| index=1, | ||
| number=2, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=524, | ||
| serialized_end=615, | ||
| ) | ||
| _GETQUEUEREQUEST = _descriptor.Descriptor( | ||
| name="GetQueueRequest", | ||
| full_name="google.cloud.tasks.v2.GetQueueRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="name", | ||
| full_name="google.cloud.tasks.v2.GetQueueRequest.name", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b( | ||
| "\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue" | ||
| ), | ||
| file=DESCRIPTOR, | ||
| ) | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=617, | ||
| serialized_end=689, | ||
| ) | ||
| _CREATEQUEUEREQUEST = _descriptor.Descriptor( | ||
| name="CreateQueueRequest", | ||
| full_name="google.cloud.tasks.v2.CreateQueueRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="parent", | ||
| full_name="google.cloud.tasks.v2.CreateQueueRequest.parent", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b( | ||
| "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" | ||
| ), | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="queue", | ||
| full_name="google.cloud.tasks.v2.CreateQueueRequest.queue", | ||
| index=1, | ||
| number=2, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b("\340A\002"), | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=691, | ||
| serialized_end=818, | ||
| ) | ||
| _UPDATEQUEUEREQUEST = _descriptor.Descriptor( | ||
| name="UpdateQueueRequest", | ||
| full_name="google.cloud.tasks.v2.UpdateQueueRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="queue", | ||
| full_name="google.cloud.tasks.v2.UpdateQueueRequest.queue", | ||
| index=0, | ||
| number=1, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b("\340A\002"), | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="update_mask", | ||
| full_name="google.cloud.tasks.v2.UpdateQueueRequest.update_mask", | ||
| index=1, | ||
| number=2, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=820, | ||
| serialized_end=939, | ||
| ) | ||
| _DELETEQUEUEREQUEST = _descriptor.Descriptor( | ||
| name="DeleteQueueRequest", | ||
| full_name="google.cloud.tasks.v2.DeleteQueueRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="name", | ||
| full_name="google.cloud.tasks.v2.DeleteQueueRequest.name", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b( | ||
| "\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue" | ||
| ), | ||
| file=DESCRIPTOR, | ||
| ) | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=941, | ||
| serialized_end=1016, | ||
| ) | ||
| _PURGEQUEUEREQUEST = _descriptor.Descriptor( | ||
| name="PurgeQueueRequest", | ||
| full_name="google.cloud.tasks.v2.PurgeQueueRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="name", | ||
| full_name="google.cloud.tasks.v2.PurgeQueueRequest.name", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b( | ||
| "\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue" | ||
| ), | ||
| file=DESCRIPTOR, | ||
| ) | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=1018, | ||
| serialized_end=1092, | ||
| ) | ||
| _PAUSEQUEUEREQUEST = _descriptor.Descriptor( | ||
| name="PauseQueueRequest", | ||
| full_name="google.cloud.tasks.v2.PauseQueueRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="name", | ||
| full_name="google.cloud.tasks.v2.PauseQueueRequest.name", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b( | ||
| "\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue" | ||
| ), | ||
| file=DESCRIPTOR, | ||
| ) | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=1094, | ||
| serialized_end=1168, | ||
| ) | ||
| _RESUMEQUEUEREQUEST = _descriptor.Descriptor( | ||
| name="ResumeQueueRequest", | ||
| full_name="google.cloud.tasks.v2.ResumeQueueRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="name", | ||
| full_name="google.cloud.tasks.v2.ResumeQueueRequest.name", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b( | ||
| "\340A\002\372A!\n\037cloudtasks.googleapis.com/Queue" | ||
| ), | ||
| file=DESCRIPTOR, | ||
| ) | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=1170, | ||
| serialized_end=1245, | ||
| ) | ||
| _LISTTASKSREQUEST = _descriptor.Descriptor( | ||
| name="ListTasksRequest", | ||
| full_name="google.cloud.tasks.v2.ListTasksRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="parent", | ||
| full_name="google.cloud.tasks.v2.ListTasksRequest.parent", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b( | ||
| "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" | ||
| ), | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="response_view", | ||
| full_name="google.cloud.tasks.v2.ListTasksRequest.response_view", | ||
| index=1, | ||
| number=2, | ||
| type=14, | ||
| cpp_type=8, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="page_size", | ||
| full_name="google.cloud.tasks.v2.ListTasksRequest.page_size", | ||
| index=2, | ||
| number=3, | ||
| type=5, | ||
| cpp_type=1, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="page_token", | ||
| full_name="google.cloud.tasks.v2.ListTasksRequest.page_token", | ||
| index=3, | ||
| number=4, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=1248, | ||
| serialized_end=1418, | ||
| ) | ||
| _LISTTASKSRESPONSE = _descriptor.Descriptor( | ||
| name="ListTasksResponse", | ||
| full_name="google.cloud.tasks.v2.ListTasksResponse", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="tasks", | ||
| full_name="google.cloud.tasks.v2.ListTasksResponse.tasks", | ||
| index=0, | ||
| number=1, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=3, | ||
| has_default_value=False, | ||
| default_value=[], | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="next_page_token", | ||
| full_name="google.cloud.tasks.v2.ListTasksResponse.next_page_token", | ||
| index=1, | ||
| number=2, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=1420, | ||
| serialized_end=1508, | ||
| ) | ||
| _GETTASKREQUEST = _descriptor.Descriptor( | ||
| name="GetTaskRequest", | ||
| full_name="google.cloud.tasks.v2.GetTaskRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="name", | ||
| full_name="google.cloud.tasks.v2.GetTaskRequest.name", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b( | ||
| "\340A\002\372A \n\036cloudtasks.googleapis.com/Task" | ||
| ), | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="response_view", | ||
| full_name="google.cloud.tasks.v2.GetTaskRequest.response_view", | ||
| index=1, | ||
| number=2, | ||
| type=14, | ||
| cpp_type=8, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=1510, | ||
| serialized_end=1637, | ||
| ) | ||
| _CREATETASKREQUEST = _descriptor.Descriptor( | ||
| name="CreateTaskRequest", | ||
| full_name="google.cloud.tasks.v2.CreateTaskRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="parent", | ||
| full_name="google.cloud.tasks.v2.CreateTaskRequest.parent", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b( | ||
| "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" | ||
| ), | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="task", | ||
| full_name="google.cloud.tasks.v2.CreateTaskRequest.task", | ||
| index=1, | ||
| number=2, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b("\340A\002"), | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="response_view", | ||
| full_name="google.cloud.tasks.v2.CreateTaskRequest.response_view", | ||
| index=2, | ||
| number=3, | ||
| type=14, | ||
| cpp_type=8, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=1640, | ||
| serialized_end=1820, | ||
| ) | ||
| _DELETETASKREQUEST = _descriptor.Descriptor( | ||
| name="DeleteTaskRequest", | ||
| full_name="google.cloud.tasks.v2.DeleteTaskRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="name", | ||
| full_name="google.cloud.tasks.v2.DeleteTaskRequest.name", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b( | ||
| "\340A\002\372A \n\036cloudtasks.googleapis.com/Task" | ||
| ), | ||
| file=DESCRIPTOR, | ||
| ) | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=1822, | ||
| serialized_end=1895, | ||
| ) | ||
| _RUNTASKREQUEST = _descriptor.Descriptor( | ||
| name="RunTaskRequest", | ||
| full_name="google.cloud.tasks.v2.RunTaskRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="name", | ||
| full_name="google.cloud.tasks.v2.RunTaskRequest.name", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b( | ||
| "\340A\002\372A \n\036cloudtasks.googleapis.com/Task" | ||
| ), | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="response_view", | ||
| full_name="google.cloud.tasks.v2.RunTaskRequest.response_view", | ||
| index=1, | ||
| number=2, | ||
| type=14, | ||
| cpp_type=8, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=1897, | ||
| serialized_end=2024, | ||
| ) | ||
| _LISTQUEUESRESPONSE.fields_by_name[ | ||
| "queues" | ||
| ].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE | ||
| _CREATEQUEUEREQUEST.fields_by_name[ | ||
| "queue" | ||
| ].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE | ||
| _UPDATEQUEUEREQUEST.fields_by_name[ | ||
| "queue" | ||
| ].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE | ||
| _UPDATEQUEUEREQUEST.fields_by_name[ | ||
| "update_mask" | ||
| ].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK | ||
| _LISTTASKSREQUEST.fields_by_name[ | ||
| "response_view" | ||
| ].enum_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK_VIEW | ||
| _LISTTASKSRESPONSE.fields_by_name[ | ||
| "tasks" | ||
| ].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK | ||
| _GETTASKREQUEST.fields_by_name[ | ||
| "response_view" | ||
| ].enum_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK_VIEW | ||
| _CREATETASKREQUEST.fields_by_name[ | ||
| "task" | ||
| ].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK | ||
| _CREATETASKREQUEST.fields_by_name[ | ||
| "response_view" | ||
| ].enum_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK_VIEW | ||
| _RUNTASKREQUEST.fields_by_name[ | ||
| "response_view" | ||
| ].enum_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK_VIEW | ||
| DESCRIPTOR.message_types_by_name["ListQueuesRequest"] = _LISTQUEUESREQUEST | ||
| DESCRIPTOR.message_types_by_name["ListQueuesResponse"] = _LISTQUEUESRESPONSE | ||
| DESCRIPTOR.message_types_by_name["GetQueueRequest"] = _GETQUEUEREQUEST | ||
| DESCRIPTOR.message_types_by_name["CreateQueueRequest"] = _CREATEQUEUEREQUEST | ||
| DESCRIPTOR.message_types_by_name["UpdateQueueRequest"] = _UPDATEQUEUEREQUEST | ||
| DESCRIPTOR.message_types_by_name["DeleteQueueRequest"] = _DELETEQUEUEREQUEST | ||
| DESCRIPTOR.message_types_by_name["PurgeQueueRequest"] = _PURGEQUEUEREQUEST | ||
| DESCRIPTOR.message_types_by_name["PauseQueueRequest"] = _PAUSEQUEUEREQUEST | ||
| DESCRIPTOR.message_types_by_name["ResumeQueueRequest"] = _RESUMEQUEUEREQUEST | ||
| DESCRIPTOR.message_types_by_name["ListTasksRequest"] = _LISTTASKSREQUEST | ||
| DESCRIPTOR.message_types_by_name["ListTasksResponse"] = _LISTTASKSRESPONSE | ||
| DESCRIPTOR.message_types_by_name["GetTaskRequest"] = _GETTASKREQUEST | ||
| DESCRIPTOR.message_types_by_name["CreateTaskRequest"] = _CREATETASKREQUEST | ||
| DESCRIPTOR.message_types_by_name["DeleteTaskRequest"] = _DELETETASKREQUEST | ||
| DESCRIPTOR.message_types_by_name["RunTaskRequest"] = _RUNTASKREQUEST | ||
| _sym_db.RegisterFileDescriptor(DESCRIPTOR) | ||
| ListQueuesRequest = _reflection.GeneratedProtocolMessageType( | ||
| "ListQueuesRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_LISTQUEUESREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for | ||
| [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. | ||
| Attributes: | ||
| parent: | ||
| Required. The location name. For example: | ||
| ``projects/PROJECT_ID/locations/LOCATION_ID`` | ||
| filter: | ||
| \ ``filter`` can be used to specify a subset of queues. Any | ||
| [Queue][google.cloud.tasks.v2.Queue] field can be used as a | ||
| filter and several operators as supported. For example: ``<=, | ||
| <, >=, >, !=, =, :``. The filter syntax is the same as | ||
| described in `Stackdriver's Advanced Logs Filters <https://clo | ||
| ud.google.com/logging/docs/view/advanced_filters>`_. Sample | ||
| filter "state: PAUSED". Note that using filters might cause | ||
| fewer queues than the requested page\_size to be returned. | ||
| page_size: | ||
| Requested page size. The maximum page size is 9800. If | ||
| unspecified, the page size will be the maximum. Fewer queues | ||
| than requested might be returned, even if more queues exist; | ||
| use the [next\_page\_token][google.cloud.tasks.v2.ListQueuesRe | ||
| sponse.next\_page\_token] in the response to determine if more | ||
| queues exist. | ||
| page_token: | ||
| A token identifying the page of results to return. To request | ||
| the first page results, page\_token must be empty. To request | ||
| the next page of results, page\_token must be the value of [ne | ||
| xt\_page\_token][google.cloud.tasks.v2.ListQueuesResponse.next | ||
| \_page\_token] returned from the previous call to | ||
| [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] | ||
| method. It is an error to switch the value of the | ||
| [filter][google.cloud.tasks.v2.ListQueuesRequest.filter] while | ||
| iterating through pages. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.ListQueuesRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(ListQueuesRequest) | ||
| ListQueuesResponse = _reflection.GeneratedProtocolMessageType( | ||
| "ListQueuesResponse", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_LISTQUEUESRESPONSE, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Response message for | ||
| [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. | ||
| Attributes: | ||
| queues: | ||
| The list of queues. | ||
| next_page_token: | ||
| A token to retrieve next page of results. To return the next | ||
| page of results, call | ||
| [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] with | ||
| this value as the [page\_token][google.cloud.tasks.v2.ListQueu | ||
| esRequest.page\_token]. If the next\_page\_token is empty, | ||
| there are no more results. The page token is valid for only 2 | ||
| hours. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.ListQueuesResponse) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(ListQueuesResponse) | ||
| GetQueueRequest = _reflection.GeneratedProtocolMessageType( | ||
| "GetQueueRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_GETQUEUEREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for | ||
| [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. | ||
| Attributes: | ||
| name: | ||
| Required. The resource name of the queue. For example: | ||
| ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.GetQueueRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(GetQueueRequest) | ||
| CreateQueueRequest = _reflection.GeneratedProtocolMessageType( | ||
| "CreateQueueRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_CREATEQUEUEREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for | ||
| [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. | ||
| Attributes: | ||
| parent: | ||
| Required. The location name in which the queue will be | ||
| created. For example: | ||
| ``projects/PROJECT_ID/locations/LOCATION_ID`` The list of | ||
| allowed locations can be obtained by calling Cloud Tasks' | ||
| implementation of [ListLocations][google.cloud.location.Locati | ||
| ons.ListLocations]. | ||
| queue: | ||
| Required. The queue to create. [Queue's | ||
| name][google.cloud.tasks.v2.Queue.name] cannot be the same as | ||
| an existing queue. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.CreateQueueRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(CreateQueueRequest) | ||
| UpdateQueueRequest = _reflection.GeneratedProtocolMessageType( | ||
| "UpdateQueueRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_UPDATEQUEUEREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for | ||
| [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. | ||
| Attributes: | ||
| queue: | ||
| Required. The queue to create or update. The queue's | ||
| [name][google.cloud.tasks.v2.Queue.name] must be specified. | ||
| Output only fields cannot be modified using UpdateQueue. Any | ||
| value specified for an output only field will be ignored. The | ||
| queue's [name][google.cloud.tasks.v2.Queue.name] cannot be | ||
| changed. | ||
| update_mask: | ||
| A mask used to specify which fields of the queue are being | ||
| updated. If empty, then all fields will be updated. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.UpdateQueueRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(UpdateQueueRequest) | ||
| DeleteQueueRequest = _reflection.GeneratedProtocolMessageType( | ||
| "DeleteQueueRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_DELETEQUEUEREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for | ||
| [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. | ||
| Attributes: | ||
| name: | ||
| Required. The queue name. For example: | ||
| ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.DeleteQueueRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(DeleteQueueRequest) | ||
| PurgeQueueRequest = _reflection.GeneratedProtocolMessageType( | ||
| "PurgeQueueRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_PURGEQUEUEREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for | ||
| [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. | ||
| Attributes: | ||
| name: | ||
| Required. The queue name. For example: | ||
| ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.PurgeQueueRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(PurgeQueueRequest) | ||
| PauseQueueRequest = _reflection.GeneratedProtocolMessageType( | ||
| "PauseQueueRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_PAUSEQUEUEREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for | ||
| [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. | ||
| Attributes: | ||
| name: | ||
| Required. The queue name. For example: | ||
| ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.PauseQueueRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(PauseQueueRequest) | ||
| ResumeQueueRequest = _reflection.GeneratedProtocolMessageType( | ||
| "ResumeQueueRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_RESUMEQUEUEREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for | ||
| [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. | ||
| Attributes: | ||
| name: | ||
| Required. The queue name. For example: | ||
| ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.ResumeQueueRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(ResumeQueueRequest) | ||
| ListTasksRequest = _reflection.GeneratedProtocolMessageType( | ||
| "ListTasksRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_LISTTASKSREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for listing tasks using | ||
| [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. | ||
| Attributes: | ||
| parent: | ||
| Required. The queue name. For example: | ||
| ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` | ||
| response_view: | ||
| The response\_view specifies which subset of the | ||
| [Task][google.cloud.tasks.v2.Task] will be returned. By | ||
| default response\_view is | ||
| [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all | ||
| information is retrieved by default because some data, such as | ||
| payloads, might be desirable to return only when needed | ||
| because of its large size or because of the sensitivity of | ||
| data that it contains. Authorization for | ||
| [FULL][google.cloud.tasks.v2.Task.View.FULL] requires | ||
| ``cloudtasks.tasks.fullView`` `Google IAM | ||
| <https://cloud.google.com/iam/>`_ permission on the | ||
| [Task][google.cloud.tasks.v2.Task] resource. | ||
| page_size: | ||
| Maximum page size. Fewer tasks than requested might be | ||
| returned, even if more tasks exist; use [next\_page\_token][go | ||
| ogle.cloud.tasks.v2.ListTasksResponse.next\_page\_token] in | ||
| the response to determine if more tasks exist. The maximum | ||
| page size is 1000. If unspecified, the page size will be the | ||
| maximum. | ||
| page_token: | ||
| A token identifying the page of results to return. To request | ||
| the first page results, page\_token must be empty. To request | ||
| the next page of results, page\_token must be the value of [ne | ||
| xt\_page\_token][google.cloud.tasks.v2.ListTasksResponse.next\ | ||
| _page\_token] returned from the previous call to | ||
| [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] | ||
| method. The page token is valid for only 2 hours. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.ListTasksRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(ListTasksRequest) | ||
| ListTasksResponse = _reflection.GeneratedProtocolMessageType( | ||
| "ListTasksResponse", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_LISTTASKSRESPONSE, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Response message for listing tasks using | ||
| [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. | ||
| Attributes: | ||
| tasks: | ||
| The list of tasks. | ||
| next_page_token: | ||
| A token to retrieve next page of results. To return the next | ||
| page of results, call | ||
| [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] with | ||
| this value as the [page\_token][google.cloud.tasks.v2.ListTask | ||
| sRequest.page\_token]. If the next\_page\_token is empty, | ||
| there are no more results. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.ListTasksResponse) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(ListTasksResponse) | ||
| GetTaskRequest = _reflection.GeneratedProtocolMessageType( | ||
| "GetTaskRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_GETTASKREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for getting a task using | ||
| [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. | ||
| Attributes: | ||
| name: | ||
| Required. The task name. For example: ``projects/PROJECT_ID/lo | ||
| cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` | ||
| response_view: | ||
| The response\_view specifies which subset of the | ||
| [Task][google.cloud.tasks.v2.Task] will be returned. By | ||
| default response\_view is | ||
| [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all | ||
| information is retrieved by default because some data, such as | ||
| payloads, might be desirable to return only when needed | ||
| because of its large size or because of the sensitivity of | ||
| data that it contains. Authorization for | ||
| [FULL][google.cloud.tasks.v2.Task.View.FULL] requires | ||
| ``cloudtasks.tasks.fullView`` `Google IAM | ||
| <https://cloud.google.com/iam/>`_ permission on the | ||
| [Task][google.cloud.tasks.v2.Task] resource. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.GetTaskRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(GetTaskRequest) | ||
| CreateTaskRequest = _reflection.GeneratedProtocolMessageType( | ||
| "CreateTaskRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_CREATETASKREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for | ||
| [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. | ||
| Attributes: | ||
| parent: | ||
| Required. The queue name. For example: | ||
| ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` | ||
| The queue must already exist. | ||
| task: | ||
| Required. The task to add. Task names have the following | ||
| format: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUE | ||
| UE_ID/tasks/TASK_ID``. The user can optionally specify a task | ||
| [name][google.cloud.tasks.v2.Task.name]. If a name is not | ||
| specified then the system will generate a random unique task | ||
| id, which will be set in the task returned in the | ||
| [response][google.cloud.tasks.v2.Task.name]. If | ||
| [schedule\_time][google.cloud.tasks.v2.Task.schedule\_time] is | ||
| not set or is in the past then Cloud Tasks will set it to the | ||
| current time. Task De-duplication: Explicitly specifying a | ||
| task ID enables task de-duplication. If a task's ID is | ||
| identical to that of an existing task or a task that was | ||
| deleted or executed recently then the call will fail with | ||
| [ALREADY\_EXISTS][google.rpc.Code.ALREADY\_EXISTS]. If the | ||
| task's queue was created using Cloud Tasks, then another task | ||
| with the same name can't be created for ~1hour after the | ||
| original task was deleted or executed. If the task's queue was | ||
| created using queue.yaml or queue.xml, then another task with | ||
| the same name can't be created for ~9days after the original | ||
| task was deleted or executed. Because there is an extra | ||
| lookup cost to identify duplicate task names, these | ||
| [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] | ||
| calls have significantly increased latency. Using hashed | ||
| strings for the task id or for the prefix of the task id is | ||
| recommended. Choosing task ids that are sequential or have | ||
| sequential prefixes, for example using a timestamp, causes an | ||
| increase in latency and error rates in all task commands. The | ||
| infrastructure relies on an approximately uniform distribution | ||
| of task ids to store and serve tasks efficiently. | ||
| response_view: | ||
| The response\_view specifies which subset of the | ||
| [Task][google.cloud.tasks.v2.Task] will be returned. By | ||
| default response\_view is | ||
| [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all | ||
| information is retrieved by default because some data, such as | ||
| payloads, might be desirable to return only when needed | ||
| because of its large size or because of the sensitivity of | ||
| data that it contains. Authorization for | ||
| [FULL][google.cloud.tasks.v2.Task.View.FULL] requires | ||
| ``cloudtasks.tasks.fullView`` `Google IAM | ||
| <https://cloud.google.com/iam/>`_ permission on the | ||
| [Task][google.cloud.tasks.v2.Task] resource. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.CreateTaskRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(CreateTaskRequest) | ||
| DeleteTaskRequest = _reflection.GeneratedProtocolMessageType( | ||
| "DeleteTaskRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_DELETETASKREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for deleting a task using | ||
| [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. | ||
| Attributes: | ||
| name: | ||
| Required. The task name. For example: ``projects/PROJECT_ID/lo | ||
| cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.DeleteTaskRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(DeleteTaskRequest) | ||
| RunTaskRequest = _reflection.GeneratedProtocolMessageType( | ||
| "RunTaskRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_RUNTASKREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.cloudtasks_pb2", | ||
| __doc__="""Request message for forcing a task to run now using | ||
| [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. | ||
| Attributes: | ||
| name: | ||
| Required. The task name. For example: ``projects/PROJECT_ID/lo | ||
| cations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` | ||
| response_view: | ||
| The response\_view specifies which subset of the | ||
| [Task][google.cloud.tasks.v2.Task] will be returned. By | ||
| default response\_view is | ||
| [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all | ||
| information is retrieved by default because some data, such as | ||
| payloads, might be desirable to return only when needed | ||
| because of its large size or because of the sensitivity of | ||
| data that it contains. Authorization for | ||
| [FULL][google.cloud.tasks.v2.Task.View.FULL] requires | ||
| ``cloudtasks.tasks.fullView`` `Google IAM | ||
| <https://cloud.google.com/iam/>`_ permission on the | ||
| [Task][google.cloud.tasks.v2.Task] resource. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.RunTaskRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(RunTaskRequest) | ||
| DESCRIPTOR._options = None | ||
| _LISTQUEUESREQUEST.fields_by_name["parent"]._options = None | ||
| _GETQUEUEREQUEST.fields_by_name["name"]._options = None | ||
| _CREATEQUEUEREQUEST.fields_by_name["parent"]._options = None | ||
| _CREATEQUEUEREQUEST.fields_by_name["queue"]._options = None | ||
| _UPDATEQUEUEREQUEST.fields_by_name["queue"]._options = None | ||
| _DELETEQUEUEREQUEST.fields_by_name["name"]._options = None | ||
| _PURGEQUEUEREQUEST.fields_by_name["name"]._options = None | ||
| _PAUSEQUEUEREQUEST.fields_by_name["name"]._options = None | ||
| _RESUMEQUEUEREQUEST.fields_by_name["name"]._options = None | ||
| _LISTTASKSREQUEST.fields_by_name["parent"]._options = None | ||
| _GETTASKREQUEST.fields_by_name["name"]._options = None | ||
| _CREATETASKREQUEST.fields_by_name["parent"]._options = None | ||
| _CREATETASKREQUEST.fields_by_name["task"]._options = None | ||
| _DELETETASKREQUEST.fields_by_name["name"]._options = None | ||
| _RUNTASKREQUEST.fields_by_name["name"]._options = None | ||
| _CLOUDTASKS = _descriptor.ServiceDescriptor( | ||
| name="CloudTasks", | ||
| full_name="google.cloud.tasks.v2.CloudTasks", | ||
| file=DESCRIPTOR, | ||
| index=0, | ||
| serialized_options=_b( | ||
| "\312A\031cloudtasks.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" | ||
| ), | ||
| serialized_start=2027, | ||
| serialized_end=4680, | ||
| methods=[ | ||
| _descriptor.MethodDescriptor( | ||
| name="ListQueues", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.ListQueues", | ||
| index=0, | ||
| containing_service=None, | ||
| input_type=_LISTQUEUESREQUEST, | ||
| output_type=_LISTQUEUESRESPONSE, | ||
| serialized_options=_b( | ||
| "\202\323\344\223\002,\022*/v2/{parent=projects/*/locations/*}/queues\332A\006parent" | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="GetQueue", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.GetQueue", | ||
| index=1, | ||
| containing_service=None, | ||
| input_type=_GETQUEUEREQUEST, | ||
| output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE, | ||
| serialized_options=_b( | ||
| "\202\323\344\223\002,\022*/v2/{name=projects/*/locations/*/queues/*}\332A\004name" | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="CreateQueue", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.CreateQueue", | ||
| index=2, | ||
| containing_service=None, | ||
| input_type=_CREATEQUEUEREQUEST, | ||
| output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE, | ||
| serialized_options=_b( | ||
| '\202\323\344\223\0023"*/v2/{parent=projects/*/locations/*}/queues:\005queue\332A\014parent,queue' | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="UpdateQueue", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.UpdateQueue", | ||
| index=3, | ||
| containing_service=None, | ||
| input_type=_UPDATEQUEUEREQUEST, | ||
| output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE, | ||
| serialized_options=_b( | ||
| "\202\323\344\223\002920/v2/{queue.name=projects/*/locations/*/queues/*}:\005queue\332A\021queue,update_mask" | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="DeleteQueue", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.DeleteQueue", | ||
| index=4, | ||
| containing_service=None, | ||
| input_type=_DELETEQUEUEREQUEST, | ||
| output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, | ||
| serialized_options=_b( | ||
| "\202\323\344\223\002,**/v2/{name=projects/*/locations/*/queues/*}\332A\004name" | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="PurgeQueue", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.PurgeQueue", | ||
| index=5, | ||
| containing_service=None, | ||
| input_type=_PURGEQUEUEREQUEST, | ||
| output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE, | ||
| serialized_options=_b( | ||
| '\202\323\344\223\0025"0/v2/{name=projects/*/locations/*/queues/*}:purge:\001*\332A\004name' | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="PauseQueue", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.PauseQueue", | ||
| index=6, | ||
| containing_service=None, | ||
| input_type=_PAUSEQUEUEREQUEST, | ||
| output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE, | ||
| serialized_options=_b( | ||
| '\202\323\344\223\0025"0/v2/{name=projects/*/locations/*/queues/*}:pause:\001*\332A\004name' | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="ResumeQueue", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.ResumeQueue", | ||
| index=7, | ||
| containing_service=None, | ||
| input_type=_RESUMEQUEUEREQUEST, | ||
| output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_queue__pb2._QUEUE, | ||
| serialized_options=_b( | ||
| '\202\323\344\223\0026"1/v2/{name=projects/*/locations/*/queues/*}:resume:\001*\332A\004name' | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="GetIamPolicy", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.GetIamPolicy", | ||
| index=8, | ||
| containing_service=None, | ||
| input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._GETIAMPOLICYREQUEST, | ||
| output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, | ||
| serialized_options=_b( | ||
| '\202\323\344\223\002@";/v2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\001*\332A\010resource' | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="SetIamPolicy", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.SetIamPolicy", | ||
| index=9, | ||
| containing_service=None, | ||
| input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._SETIAMPOLICYREQUEST, | ||
| output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, | ||
| serialized_options=_b( | ||
| '\202\323\344\223\002@";/v2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\001*\332A\017resource,policy' | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="TestIamPermissions", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.TestIamPermissions", | ||
| index=10, | ||
| containing_service=None, | ||
| input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSREQUEST, | ||
| output_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSRESPONSE, | ||
| serialized_options=_b( | ||
| '\202\323\344\223\002F"A/v2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\001*\332A\024resource,permissions' | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="ListTasks", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.ListTasks", | ||
| index=11, | ||
| containing_service=None, | ||
| input_type=_LISTTASKSREQUEST, | ||
| output_type=_LISTTASKSRESPONSE, | ||
| serialized_options=_b( | ||
| "\202\323\344\223\0024\0222/v2/{parent=projects/*/locations/*/queues/*}/tasks\332A\006parent" | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="GetTask", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.GetTask", | ||
| index=12, | ||
| containing_service=None, | ||
| input_type=_GETTASKREQUEST, | ||
| output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK, | ||
| serialized_options=_b( | ||
| "\202\323\344\223\0024\0222/v2/{name=projects/*/locations/*/queues/*/tasks/*}\332A\004name" | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="CreateTask", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.CreateTask", | ||
| index=13, | ||
| containing_service=None, | ||
| input_type=_CREATETASKREQUEST, | ||
| output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK, | ||
| serialized_options=_b( | ||
| '\202\323\344\223\0027"2/v2/{parent=projects/*/locations/*/queues/*}/tasks:\001*\332A\013parent,task' | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="DeleteTask", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.DeleteTask", | ||
| index=14, | ||
| containing_service=None, | ||
| input_type=_DELETETASKREQUEST, | ||
| output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, | ||
| serialized_options=_b( | ||
| "\202\323\344\223\0024*2/v2/{name=projects/*/locations/*/queues/*/tasks/*}\332A\004name" | ||
| ), | ||
| ), | ||
| _descriptor.MethodDescriptor( | ||
| name="RunTask", | ||
| full_name="google.cloud.tasks.v2.CloudTasks.RunTask", | ||
| index=15, | ||
| containing_service=None, | ||
| input_type=_RUNTASKREQUEST, | ||
| output_type=google_dot_cloud_dot_tasks__v2_dot_proto_dot_task__pb2._TASK, | ||
| serialized_options=_b( | ||
| '\202\323\344\223\002;"6/v2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\001*\332A\004name' | ||
| ), | ||
| ), | ||
| ], | ||
| ) | ||
| _sym_db.RegisterServiceDescriptor(_CLOUDTASKS) | ||
| DESCRIPTOR.services_by_name["CloudTasks"] = _CLOUDTASKS | ||
| # @@protoc_insertion_point(module_scope) |
| # -*- coding: utf-8 -*- | ||
| # Generated by the protocol buffer compiler. DO NOT EDIT! | ||
| # source: google/cloud/tasks_v2/proto/queue.proto | ||
| import sys | ||
| _b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) | ||
| from google.protobuf import descriptor as _descriptor | ||
| from google.protobuf import message as _message | ||
| from google.protobuf import reflection as _reflection | ||
| from google.protobuf import symbol_database as _symbol_database | ||
| # @@protoc_insertion_point(imports) | ||
| _sym_db = _symbol_database.Default() | ||
| from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 | ||
| from ..proto import ( | ||
| target_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2, | ||
| ) | ||
| from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 | ||
| from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 | ||
| from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 | ||
| DESCRIPTOR = _descriptor.FileDescriptor( | ||
| name="google/cloud/tasks_v2/proto/queue.proto", | ||
| package="google.cloud.tasks.v2", | ||
| syntax="proto3", | ||
| serialized_options=_b( | ||
| "\n\031com.google.cloud.tasks.v2B\nQueueProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks" | ||
| ), | ||
| serialized_pb=_b( | ||
| '\n\'google/cloud/tasks_v2/proto/queue.proto\x12\x15google.cloud.tasks.v2\x1a\x19google/api/resource.proto\x1a(google/cloud/tasks_v2/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\xb2\x04\n\x05Queue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12L\n\x1b\x61pp_engine_routing_override\x18\x02 \x01(\x0b\x32\'.google.cloud.tasks.v2.AppEngineRouting\x12\x36\n\x0brate_limits\x18\x03 \x01(\x0b\x32!.google.cloud.tasks.v2.RateLimits\x12\x38\n\x0cretry_config\x18\x04 \x01(\x0b\x32".google.cloud.tasks.v2.RetryConfig\x12\x31\n\x05state\x18\x05 \x01(\x0e\x32".google.cloud.tasks.v2.Queue.State\x12.\n\npurge_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12S\n\x1astackdriver_logging_config\x18\t \x01(\x0b\x32/.google.cloud.tasks.v2.StackdriverLoggingConfig"E\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03:\\\xea\x41Y\n\x1f\x63loudtasks.googleapis.com/Queue\x12\x36projects/{project}/locations/{location}/queues/{queue}"j\n\nRateLimits\x12!\n\x19max_dispatches_per_second\x18\x01 \x01(\x01\x12\x16\n\x0emax_burst_size\x18\x02 \x01(\x05\x12!\n\x19max_concurrent_dispatches\x18\x03 \x01(\x05"\xd1\x01\n\x0bRetryConfig\x12\x14\n\x0cmax_attempts\x18\x01 \x01(\x05\x12\x35\n\x12max_retry_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12.\n\x0bmin_backoff\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12.\n\x0bmax_backoff\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x05 \x01(\x05"2\n\x18StackdriverLoggingConfig\x12\x16\n\x0esampling_ratio\x18\x01 \x01(\x01\x42\x65\n\x19\x63om.google.cloud.tasks.v2B\nQueueProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3' | ||
| ), | ||
| dependencies=[ | ||
| google_dot_api_dot_resource__pb2.DESCRIPTOR, | ||
| google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2.DESCRIPTOR, | ||
| google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, | ||
| google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, | ||
| google_dot_api_dot_annotations__pb2.DESCRIPTOR, | ||
| ], | ||
| ) | ||
| _QUEUE_STATE = _descriptor.EnumDescriptor( | ||
| name="State", | ||
| full_name="google.cloud.tasks.v2.Queue.State", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| values=[ | ||
| _descriptor.EnumValueDescriptor( | ||
| name="STATE_UNSPECIFIED", | ||
| index=0, | ||
| number=0, | ||
| serialized_options=None, | ||
| type=None, | ||
| ), | ||
| _descriptor.EnumValueDescriptor( | ||
| name="RUNNING", index=1, number=1, serialized_options=None, type=None | ||
| ), | ||
| _descriptor.EnumValueDescriptor( | ||
| name="PAUSED", index=2, number=2, serialized_options=None, type=None | ||
| ), | ||
| _descriptor.EnumValueDescriptor( | ||
| name="DISABLED", index=3, number=3, serialized_options=None, type=None | ||
| ), | ||
| ], | ||
| containing_type=None, | ||
| serialized_options=None, | ||
| serialized_start=630, | ||
| serialized_end=699, | ||
| ) | ||
| _sym_db.RegisterEnumDescriptor(_QUEUE_STATE) | ||
| _QUEUE = _descriptor.Descriptor( | ||
| name="Queue", | ||
| full_name="google.cloud.tasks.v2.Queue", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="name", | ||
| full_name="google.cloud.tasks.v2.Queue.name", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="app_engine_routing_override", | ||
| full_name="google.cloud.tasks.v2.Queue.app_engine_routing_override", | ||
| index=1, | ||
| number=2, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="rate_limits", | ||
| full_name="google.cloud.tasks.v2.Queue.rate_limits", | ||
| index=2, | ||
| number=3, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="retry_config", | ||
| full_name="google.cloud.tasks.v2.Queue.retry_config", | ||
| index=3, | ||
| number=4, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="state", | ||
| full_name="google.cloud.tasks.v2.Queue.state", | ||
| index=4, | ||
| number=5, | ||
| type=14, | ||
| cpp_type=8, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="purge_time", | ||
| full_name="google.cloud.tasks.v2.Queue.purge_time", | ||
| index=5, | ||
| number=6, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="stackdriver_logging_config", | ||
| full_name="google.cloud.tasks.v2.Queue.stackdriver_logging_config", | ||
| index=6, | ||
| number=9, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[_QUEUE_STATE], | ||
| serialized_options=_b( | ||
| "\352AY\n\037cloudtasks.googleapis.com/Queue\0226projects/{project}/locations/{location}/queues/{queue}" | ||
| ), | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=231, | ||
| serialized_end=793, | ||
| ) | ||
| _RATELIMITS = _descriptor.Descriptor( | ||
| name="RateLimits", | ||
| full_name="google.cloud.tasks.v2.RateLimits", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="max_dispatches_per_second", | ||
| full_name="google.cloud.tasks.v2.RateLimits.max_dispatches_per_second", | ||
| index=0, | ||
| number=1, | ||
| type=1, | ||
| cpp_type=5, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=float(0), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="max_burst_size", | ||
| full_name="google.cloud.tasks.v2.RateLimits.max_burst_size", | ||
| index=1, | ||
| number=2, | ||
| type=5, | ||
| cpp_type=1, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="max_concurrent_dispatches", | ||
| full_name="google.cloud.tasks.v2.RateLimits.max_concurrent_dispatches", | ||
| index=2, | ||
| number=3, | ||
| type=5, | ||
| cpp_type=1, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=795, | ||
| serialized_end=901, | ||
| ) | ||
| _RETRYCONFIG = _descriptor.Descriptor( | ||
| name="RetryConfig", | ||
| full_name="google.cloud.tasks.v2.RetryConfig", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="max_attempts", | ||
| full_name="google.cloud.tasks.v2.RetryConfig.max_attempts", | ||
| index=0, | ||
| number=1, | ||
| type=5, | ||
| cpp_type=1, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="max_retry_duration", | ||
| full_name="google.cloud.tasks.v2.RetryConfig.max_retry_duration", | ||
| index=1, | ||
| number=2, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="min_backoff", | ||
| full_name="google.cloud.tasks.v2.RetryConfig.min_backoff", | ||
| index=2, | ||
| number=3, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="max_backoff", | ||
| full_name="google.cloud.tasks.v2.RetryConfig.max_backoff", | ||
| index=3, | ||
| number=4, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="max_doublings", | ||
| full_name="google.cloud.tasks.v2.RetryConfig.max_doublings", | ||
| index=4, | ||
| number=5, | ||
| type=5, | ||
| cpp_type=1, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=904, | ||
| serialized_end=1113, | ||
| ) | ||
| _STACKDRIVERLOGGINGCONFIG = _descriptor.Descriptor( | ||
| name="StackdriverLoggingConfig", | ||
| full_name="google.cloud.tasks.v2.StackdriverLoggingConfig", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="sampling_ratio", | ||
| full_name="google.cloud.tasks.v2.StackdriverLoggingConfig.sampling_ratio", | ||
| index=0, | ||
| number=1, | ||
| type=1, | ||
| cpp_type=5, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=float(0), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ) | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=1115, | ||
| serialized_end=1165, | ||
| ) | ||
| _QUEUE.fields_by_name[ | ||
| "app_engine_routing_override" | ||
| ].message_type = ( | ||
| google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2._APPENGINEROUTING | ||
| ) | ||
| _QUEUE.fields_by_name["rate_limits"].message_type = _RATELIMITS | ||
| _QUEUE.fields_by_name["retry_config"].message_type = _RETRYCONFIG | ||
| _QUEUE.fields_by_name["state"].enum_type = _QUEUE_STATE | ||
| _QUEUE.fields_by_name[ | ||
| "purge_time" | ||
| ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP | ||
| _QUEUE.fields_by_name[ | ||
| "stackdriver_logging_config" | ||
| ].message_type = _STACKDRIVERLOGGINGCONFIG | ||
| _QUEUE_STATE.containing_type = _QUEUE | ||
| _RETRYCONFIG.fields_by_name[ | ||
| "max_retry_duration" | ||
| ].message_type = google_dot_protobuf_dot_duration__pb2._DURATION | ||
| _RETRYCONFIG.fields_by_name[ | ||
| "min_backoff" | ||
| ].message_type = google_dot_protobuf_dot_duration__pb2._DURATION | ||
| _RETRYCONFIG.fields_by_name[ | ||
| "max_backoff" | ||
| ].message_type = google_dot_protobuf_dot_duration__pb2._DURATION | ||
| DESCRIPTOR.message_types_by_name["Queue"] = _QUEUE | ||
| DESCRIPTOR.message_types_by_name["RateLimits"] = _RATELIMITS | ||
| DESCRIPTOR.message_types_by_name["RetryConfig"] = _RETRYCONFIG | ||
| DESCRIPTOR.message_types_by_name["StackdriverLoggingConfig"] = _STACKDRIVERLOGGINGCONFIG | ||
| _sym_db.RegisterFileDescriptor(DESCRIPTOR) | ||
| Queue = _reflection.GeneratedProtocolMessageType( | ||
| "Queue", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_QUEUE, | ||
| __module__="google.cloud.tasks_v2.proto.queue_pb2", | ||
| __doc__="""A queue is a container of related tasks. Queues are | ||
| configured to manage how those tasks are dispatched. Configurable | ||
| properties include rate limits, retry options, queue types, and others. | ||
| Attributes: | ||
| name: | ||
| Caller-specified and required in | ||
| [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue], | ||
| after which it becomes output only. The queue name. The | ||
| queue name must have the following format: | ||
| ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` | ||
| - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers | ||
| ([0-9]), hyphens (-), colons (:), or periods (.). For more | ||
| information, see `Identifying projects | ||
| <https://cloud.google.com/resource-manager/docs/creating- | ||
| managing-projects#identifying_projects>`_ - ``LOCATION_ID`` | ||
| is the canonical ID for the queue's location. The list of | ||
| available locations can be obtained by calling [ListLocatio | ||
| ns][google.cloud.location.Locations.ListLocations]. For | ||
| more information, see | ||
| https://cloud.google.com/about/locations/. - ``QUEUE_ID`` can | ||
| contain letters ([A-Za-z]), numbers ([0-9]), or hyphens | ||
| (-). The maximum length is 100 characters. | ||
| app_engine_routing_override: | ||
| Overrides for [task-level app\_engine\_routing][google.cloud.t | ||
| asks.v2.AppEngineHttpRequest.app\_engine\_routing]. These | ||
| settings apply only to [App Engine | ||
| tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in this | ||
| queue. [Http tasks][google.cloud.tasks.v2.HttpRequest] are not | ||
| affected. If set, ``app_engine_routing_override`` is used for | ||
| all [App Engine | ||
| tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in the | ||
| queue, no matter what the setting is for the [task-level app\_ | ||
| engine\_routing][google.cloud.tasks.v2.AppEngineHttpRequest.ap | ||
| p\_engine\_routing]. | ||
| rate_limits: | ||
| Rate limits for task dispatches. | ||
| [rate\_limits][google.cloud.tasks.v2.Queue.rate\_limits] and | ||
| [retry\_config][google.cloud.tasks.v2.Queue.retry\_config] are | ||
| related because they both control task attempts. However they | ||
| control task attempts in different ways: - | ||
| [rate\_limits][google.cloud.tasks.v2.Queue.rate\_limits] | ||
| controls the total rate of dispatches from a queue (i.e. | ||
| all traffic dispatched from the queue, regardless of | ||
| whether the dispatch is from a first attempt or a retry). - | ||
| [retry\_config][google.cloud.tasks.v2.Queue.retry\_config] | ||
| controls what happens to particular a task after its first | ||
| attempt fails. That is, | ||
| [retry\_config][google.cloud.tasks.v2.Queue.retry\_config] | ||
| controls task retries (the second attempt, third attempt, | ||
| etc). The queue's actual dispatch rate is the result of: - | ||
| Number of tasks in the queue - User-specified throttling: | ||
| [rate\_limits][google.cloud.tasks.v2.Queue.rate\_limits], | ||
| [retry\_config][google.cloud.tasks.v2.Queue.retry\_config], | ||
| and the [queue's state][google.cloud.tasks.v2.Queue.state]. | ||
| - System throttling due to ``429`` (Too Many Requests) or | ||
| ``503`` (Service Unavailable) responses from the worker, | ||
| high error rates, or to smooth sudden large traffic spikes. | ||
| retry_config: | ||
| Settings that determine the retry behavior. - For tasks | ||
| created using Cloud Tasks: the queue-level retry settings | ||
| apply to all tasks in the queue that were created using Cloud | ||
| Tasks. Retry settings cannot be set on individual tasks. - | ||
| For tasks created using the App Engine SDK: the queue-level | ||
| retry settings apply to all tasks in the queue which do not | ||
| have retry settings explicitly set on the task and were | ||
| created by the App Engine SDK. See `App Engine | ||
| documentation <https://cloud.google.com/appengine/docs/standar | ||
| d/python/taskqueue/push/retrying-tasks>`_. | ||
| state: | ||
| Output only. The state of the queue. ``state`` can only be | ||
| changed by called | ||
| [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue], | ||
| [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue], | ||
| or uploading `queue.yaml/xml <https://cloud.google.com/appengi | ||
| ne/docs/python/config/queueref>`_. | ||
| [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] | ||
| cannot be used to change ``state``. | ||
| purge_time: | ||
| Output only. The last time this queue was purged. All tasks | ||
| that were [created][google.cloud.tasks.v2.Task.create\_time] | ||
| before this time were purged. A queue can be purged using | ||
| [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue], the | ||
| `App Engine Task Queue SDK, or the Cloud Console <https://clou | ||
| d.google.com/appengine/docs/standard/python/taskqueue/push/del | ||
| eting-tasks-and-queues#purging_all_tasks_from_a_queue>`_. | ||
| Purge time will be truncated to the nearest microsecond. Purge | ||
| time will be unset if the queue has never been purged. | ||
| stackdriver_logging_config: | ||
| Configuration options for writing logs to `Stackdriver Logging | ||
| <https://cloud.google.com/logging/docs/>`_. If this field is | ||
| unset, then no logs are written. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.Queue) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(Queue) | ||
| RateLimits = _reflection.GeneratedProtocolMessageType( | ||
| "RateLimits", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_RATELIMITS, | ||
| __module__="google.cloud.tasks_v2.proto.queue_pb2", | ||
| __doc__="""Rate limits. | ||
| This message determines the maximum rate that tasks can be dispatched by | ||
| a queue, regardless of whether the dispatch is a first task attempt or a | ||
| retry. | ||
| Note: The debugging command, | ||
| [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask], will run a task | ||
| even if the queue has reached its | ||
| [RateLimits][google.cloud.tasks.v2.RateLimits]. | ||
| Attributes: | ||
| max_dispatches_per_second: | ||
| The maximum rate at which tasks are dispatched from this | ||
| queue. If unspecified when the queue is created, Cloud Tasks | ||
| will pick the default. - The maximum allowed value is 500. | ||
| This field has the same meaning as `rate in queue.yaml/xml <ht | ||
| tps://cloud.google.com/appengine/docs/standard/python/config/q | ||
| ueueref#rate>`_. | ||
| max_burst_size: | ||
| Output only. The max burst size. Max burst size limits how | ||
| fast tasks in queue are processed when many tasks are in the | ||
| queue and the rate is high. This field allows the queue to | ||
| have a high rate so processing starts shortly after a task is | ||
| enqueued, but still limits resource usage when many tasks are | ||
| enqueued in a short period of time. The `token bucket | ||
| <https://wikipedia.org/wiki/Token_Bucket>`_ algorithm is used | ||
| to control the rate of task dispatches. Each queue has a token | ||
| bucket that holds tokens, up to the maximum specified by | ||
| ``max_burst_size``. Each time a task is dispatched, a token is | ||
| removed from the bucket. Tasks will be dispatched until the | ||
| queue's bucket runs out of tokens. The bucket will be | ||
| continuously refilled with new tokens based on [max\_dispatche | ||
| s\_per\_second][google.cloud.tasks.v2.RateLimits.max\_dispatch | ||
| es\_per\_second]. Cloud Tasks will pick the value of | ||
| ``max_burst_size`` based on the value of [max\_dispatches\_per | ||
| \_second][google.cloud.tasks.v2.RateLimits.max\_dispatches\_pe | ||
| r\_second]. For queues that were created or updated using | ||
| ``queue.yaml/xml``, ``max_burst_size`` is equal to | ||
| `bucket\_size <https://cloud.google.com/appengine/docs/standar | ||
| d/python/config/queueref#bucket_size>`_. Since | ||
| ``max_burst_size`` is output only, if | ||
| [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] is | ||
| called on a queue created by ``queue.yaml/xml``, | ||
| ``max_burst_size`` will be reset based on the value of [max\_d | ||
| ispatches\_per\_second][google.cloud.tasks.v2.RateLimits.max\_ | ||
| dispatches\_per\_second], regardless of whether [max\_dispatch | ||
| es\_per\_second][google.cloud.tasks.v2.RateLimits.max\_dispatc | ||
| hes\_per\_second] is updated. | ||
| max_concurrent_dispatches: | ||
| The maximum number of concurrent tasks that Cloud Tasks allows | ||
| to be dispatched for this queue. After this threshold has been | ||
| reached, Cloud Tasks stops dispatching tasks until the number | ||
| of concurrent requests decreases. If unspecified when the | ||
| queue is created, Cloud Tasks will pick the default. The | ||
| maximum allowed value is 5,000. This field has the same | ||
| meaning as `max\_concurrent\_requests in queue.yaml/xml <https | ||
| ://cloud.google.com/appengine/docs/standard/python/config/queu | ||
| eref#max_concurrent_requests>`_. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.RateLimits) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(RateLimits) | ||
| RetryConfig = _reflection.GeneratedProtocolMessageType( | ||
| "RetryConfig", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_RETRYCONFIG, | ||
| __module__="google.cloud.tasks_v2.proto.queue_pb2", | ||
| __doc__="""Retry config. | ||
| These settings determine when a failed task attempt is retried. | ||
| Attributes: | ||
| max_attempts: | ||
| Number of attempts per task. Cloud Tasks will attempt the | ||
| task ``max_attempts`` times (that is, if the first attempt | ||
| fails, then there will be ``max_attempts - 1`` retries). Must | ||
| be >= -1. If unspecified when the queue is created, Cloud | ||
| Tasks will pick the default. -1 indicates unlimited attempts. | ||
| This field has the same meaning as `task\_retry\_limit in | ||
| queue.yaml/xml <https://cloud.google.com/appengine/docs/standa | ||
| rd/python/config/queueref#retry_parameters>`_. | ||
| max_retry_duration: | ||
| If positive, ``max_retry_duration`` specifies the time limit | ||
| for retrying a failed task, measured from when the task was | ||
| first attempted. Once ``max_retry_duration`` time has passed | ||
| *and* the task has been attempted [max\_attempts][google.cloud | ||
| .tasks.v2.RetryConfig.max\_attempts] times, no further | ||
| attempts will be made and the task will be deleted. If zero, | ||
| then the task age is unlimited. If unspecified when the queue | ||
| is created, Cloud Tasks will pick the default. | ||
| ``max_retry_duration`` will be truncated to the nearest | ||
| second. This field has the same meaning as `task\_age\_limit | ||
| in queue.yaml/xml <https://cloud.google.com/appengine/docs/sta | ||
| ndard/python/config/queueref#retry_parameters>`_. | ||
| min_backoff: | ||
| A task will be | ||
| [scheduled][google.cloud.tasks.v2.Task.schedule\_time] for | ||
| retry between | ||
| [min\_backoff][google.cloud.tasks.v2.RetryConfig.min\_backoff] | ||
| and | ||
| [max\_backoff][google.cloud.tasks.v2.RetryConfig.max\_backoff] | ||
| duration after it fails, if the queue's | ||
| [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies | ||
| that the task should be retried. If unspecified when the | ||
| queue is created, Cloud Tasks will pick the default. | ||
| ``min_backoff`` will be truncated to the nearest second. This | ||
| field has the same meaning as `min\_backoff\_seconds in | ||
| queue.yaml/xml <https://cloud.google.com/appengine/docs/standa | ||
| rd/python/config/queueref#retry_parameters>`_. | ||
| max_backoff: | ||
| A task will be | ||
| [scheduled][google.cloud.tasks.v2.Task.schedule\_time] for | ||
| retry between | ||
| [min\_backoff][google.cloud.tasks.v2.RetryConfig.min\_backoff] | ||
| and | ||
| [max\_backoff][google.cloud.tasks.v2.RetryConfig.max\_backoff] | ||
| duration after it fails, if the queue's | ||
| [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies | ||
| that the task should be retried. If unspecified when the | ||
| queue is created, Cloud Tasks will pick the default. | ||
| ``max_backoff`` will be truncated to the nearest second. This | ||
| field has the same meaning as `max\_backoff\_seconds in | ||
| queue.yaml/xml <https://cloud.google.com/appengine/docs/standa | ||
| rd/python/config/queueref#retry_parameters>`_. | ||
| max_doublings: | ||
| The time between retries will double ``max_doublings`` times. | ||
| A task's retry interval starts at [min\_backoff][google.cloud. | ||
| tasks.v2.RetryConfig.min\_backoff], then doubles | ||
| ``max_doublings`` times, then increases linearly, and finally | ||
| retries retries at intervals of | ||
| [max\_backoff][google.cloud.tasks.v2.RetryConfig.max\_backoff] | ||
| up to [max\_attempts][google.cloud.tasks.v2.RetryConfig.max\_a | ||
| ttempts] times. For example, if | ||
| [min\_backoff][google.cloud.tasks.v2.RetryConfig.min\_backoff] | ||
| is 10s, | ||
| [max\_backoff][google.cloud.tasks.v2.RetryConfig.max\_backoff] | ||
| is 300s, and ``max_doublings`` is 3, then the a task will | ||
| first be retried in 10s. The retry interval will double three | ||
| times, and then increase linearly by 2^3 \* 10s. Finally, the | ||
| task will retry at intervals of | ||
| [max\_backoff][google.cloud.tasks.v2.RetryConfig.max\_backoff] | ||
| until the task has been attempted [max\_attempts][google.cloud | ||
| .tasks.v2.RetryConfig.max\_attempts] times. Thus, the requests | ||
| will retry at 10s, 20s, 40s, 80s, 160s, 240s, 300s, 300s, .... | ||
| If unspecified when the queue is created, Cloud Tasks will | ||
| pick the default. This field has the same meaning as | ||
| `max\_doublings in queue.yaml/xml <https://cloud.google.com/ap | ||
| pengine/docs/standard/python/config/queueref#retry_parameters>`__. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.RetryConfig) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(RetryConfig) | ||
| StackdriverLoggingConfig = _reflection.GeneratedProtocolMessageType( | ||
| "StackdriverLoggingConfig", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_STACKDRIVERLOGGINGCONFIG, | ||
| __module__="google.cloud.tasks_v2.proto.queue_pb2", | ||
| __doc__="""Configuration options for writing logs to `Stackdriver | ||
| Logging <https://cloud.google.com/logging/docs/>`_. | ||
| Attributes: | ||
| sampling_ratio: | ||
| Specifies the fraction of operations to write to `Stackdriver | ||
| Logging <https://cloud.google.com/logging/docs/>`_. This | ||
| field may contain any value between 0.0 and 1.0, inclusive. | ||
| 0.0 is the default and means that no operations are logged. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.StackdriverLoggingConfig) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(StackdriverLoggingConfig) | ||
| DESCRIPTOR._options = None | ||
| _QUEUE._options = None | ||
| # @@protoc_insertion_point(module_scope) |
| # -*- coding: utf-8 -*- | ||
| # Generated by the protocol buffer compiler. DO NOT EDIT! | ||
| # source: google/cloud/tasks_v2/proto/target.proto | ||
| import sys | ||
| _b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) | ||
| from google.protobuf.internal import enum_type_wrapper | ||
| from google.protobuf import descriptor as _descriptor | ||
| from google.protobuf import message as _message | ||
| from google.protobuf import reflection as _reflection | ||
| from google.protobuf import symbol_database as _symbol_database | ||
| # @@protoc_insertion_point(imports) | ||
| _sym_db = _symbol_database.Default() | ||
| from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 | ||
| from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 | ||
| DESCRIPTOR = _descriptor.FileDescriptor( | ||
| name="google/cloud/tasks_v2/proto/target.proto", | ||
| package="google.cloud.tasks.v2", | ||
| syntax="proto3", | ||
| serialized_options=_b( | ||
| "\n\031com.google.cloud.tasks.v2B\013TargetProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks" | ||
| ), | ||
| serialized_pb=_b( | ||
| '\n(google/cloud/tasks_v2/proto/target.proto\x12\x15google.cloud.tasks.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"\xe1\x02\n\x0bHttpRequest\x12\x10\n\x03url\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x36\n\x0bhttp_method\x18\x02 \x01(\x0e\x32!.google.cloud.tasks.v2.HttpMethod\x12@\n\x07headers\x18\x03 \x03(\x0b\x32/.google.cloud.tasks.v2.HttpRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12\x38\n\x0boauth_token\x18\x05 \x01(\x0b\x32!.google.cloud.tasks.v2.OAuthTokenH\x00\x12\x36\n\noidc_token\x18\x06 \x01(\x0b\x32 .google.cloud.tasks.v2.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"\xb2\x02\n\x14\x41ppEngineHttpRequest\x12\x36\n\x0bhttp_method\x18\x01 \x01(\x0e\x32!.google.cloud.tasks.v2.HttpMethod\x12\x43\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32\'.google.cloud.tasks.v2.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12I\n\x07headers\x18\x04 \x03(\x0b\x32\x38.google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42\x66\n\x19\x63om.google.cloud.tasks.v2B\x0bTargetProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3' | ||
| ), | ||
| dependencies=[ | ||
| google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, | ||
| google_dot_api_dot_annotations__pb2.DESCRIPTOR, | ||
| ], | ||
| ) | ||
| _HTTPMETHOD = _descriptor.EnumDescriptor( | ||
| name="HttpMethod", | ||
| full_name="google.cloud.tasks.v2.HttpMethod", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| values=[ | ||
| _descriptor.EnumValueDescriptor( | ||
| name="HTTP_METHOD_UNSPECIFIED", | ||
| index=0, | ||
| number=0, | ||
| serialized_options=None, | ||
| type=None, | ||
| ), | ||
| _descriptor.EnumValueDescriptor( | ||
| name="POST", index=1, number=1, serialized_options=None, type=None | ||
| ), | ||
| _descriptor.EnumValueDescriptor( | ||
| name="GET", index=2, number=2, serialized_options=None, type=None | ||
| ), | ||
| _descriptor.EnumValueDescriptor( | ||
| name="HEAD", index=3, number=3, serialized_options=None, type=None | ||
| ), | ||
| _descriptor.EnumValueDescriptor( | ||
| name="PUT", index=4, number=4, serialized_options=None, type=None | ||
| ), | ||
| _descriptor.EnumValueDescriptor( | ||
| name="DELETE", index=5, number=5, serialized_options=None, type=None | ||
| ), | ||
| _descriptor.EnumValueDescriptor( | ||
| name="PATCH", index=6, number=6, serialized_options=None, type=None | ||
| ), | ||
| _descriptor.EnumValueDescriptor( | ||
| name="OPTIONS", index=7, number=7, serialized_options=None, type=None | ||
| ), | ||
| ], | ||
| containing_type=None, | ||
| serialized_options=None, | ||
| serialized_start=1003, | ||
| serialized_end=1118, | ||
| ) | ||
| _sym_db.RegisterEnumDescriptor(_HTTPMETHOD) | ||
| HttpMethod = enum_type_wrapper.EnumTypeWrapper(_HTTPMETHOD) | ||
| HTTP_METHOD_UNSPECIFIED = 0 | ||
| POST = 1 | ||
| GET = 2 | ||
| HEAD = 3 | ||
| PUT = 4 | ||
| DELETE = 5 | ||
| PATCH = 6 | ||
| OPTIONS = 7 | ||
| _HTTPREQUEST_HEADERSENTRY = _descriptor.Descriptor( | ||
| name="HeadersEntry", | ||
| full_name="google.cloud.tasks.v2.HttpRequest.HeadersEntry", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="key", | ||
| full_name="google.cloud.tasks.v2.HttpRequest.HeadersEntry.key", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="value", | ||
| full_name="google.cloud.tasks.v2.HttpRequest.HeadersEntry.value", | ||
| index=1, | ||
| number=2, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=_b("8\001"), | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=414, | ||
| serialized_end=460, | ||
| ) | ||
| _HTTPREQUEST = _descriptor.Descriptor( | ||
| name="HttpRequest", | ||
| full_name="google.cloud.tasks.v2.HttpRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="url", | ||
| full_name="google.cloud.tasks.v2.HttpRequest.url", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=_b("\340A\002"), | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="http_method", | ||
| full_name="google.cloud.tasks.v2.HttpRequest.http_method", | ||
| index=1, | ||
| number=2, | ||
| type=14, | ||
| cpp_type=8, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="headers", | ||
| full_name="google.cloud.tasks.v2.HttpRequest.headers", | ||
| index=2, | ||
| number=3, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=3, | ||
| has_default_value=False, | ||
| default_value=[], | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="body", | ||
| full_name="google.cloud.tasks.v2.HttpRequest.body", | ||
| index=3, | ||
| number=4, | ||
| type=12, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b(""), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="oauth_token", | ||
| full_name="google.cloud.tasks.v2.HttpRequest.oauth_token", | ||
| index=4, | ||
| number=5, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="oidc_token", | ||
| full_name="google.cloud.tasks.v2.HttpRequest.oidc_token", | ||
| index=5, | ||
| number=6, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[_HTTPREQUEST_HEADERSENTRY], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[ | ||
| _descriptor.OneofDescriptor( | ||
| name="authorization_header", | ||
| full_name="google.cloud.tasks.v2.HttpRequest.authorization_header", | ||
| index=0, | ||
| containing_type=None, | ||
| fields=[], | ||
| ) | ||
| ], | ||
| serialized_start=131, | ||
| serialized_end=484, | ||
| ) | ||
| _APPENGINEHTTPREQUEST_HEADERSENTRY = _descriptor.Descriptor( | ||
| name="HeadersEntry", | ||
| full_name="google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="key", | ||
| full_name="google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry.key", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="value", | ||
| full_name="google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry.value", | ||
| index=1, | ||
| number=2, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=_b("8\001"), | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=414, | ||
| serialized_end=460, | ||
| ) | ||
| _APPENGINEHTTPREQUEST = _descriptor.Descriptor( | ||
| name="AppEngineHttpRequest", | ||
| full_name="google.cloud.tasks.v2.AppEngineHttpRequest", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="http_method", | ||
| full_name="google.cloud.tasks.v2.AppEngineHttpRequest.http_method", | ||
| index=0, | ||
| number=1, | ||
| type=14, | ||
| cpp_type=8, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="app_engine_routing", | ||
| full_name="google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing", | ||
| index=1, | ||
| number=2, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="relative_uri", | ||
| full_name="google.cloud.tasks.v2.AppEngineHttpRequest.relative_uri", | ||
| index=2, | ||
| number=3, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="headers", | ||
| full_name="google.cloud.tasks.v2.AppEngineHttpRequest.headers", | ||
| index=3, | ||
| number=4, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=3, | ||
| has_default_value=False, | ||
| default_value=[], | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="body", | ||
| full_name="google.cloud.tasks.v2.AppEngineHttpRequest.body", | ||
| index=4, | ||
| number=5, | ||
| type=12, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b(""), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[_APPENGINEHTTPREQUEST_HEADERSENTRY], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=487, | ||
| serialized_end=793, | ||
| ) | ||
| _APPENGINEROUTING = _descriptor.Descriptor( | ||
| name="AppEngineRouting", | ||
| full_name="google.cloud.tasks.v2.AppEngineRouting", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="service", | ||
| full_name="google.cloud.tasks.v2.AppEngineRouting.service", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="version", | ||
| full_name="google.cloud.tasks.v2.AppEngineRouting.version", | ||
| index=1, | ||
| number=2, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="instance", | ||
| full_name="google.cloud.tasks.v2.AppEngineRouting.instance", | ||
| index=2, | ||
| number=3, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="host", | ||
| full_name="google.cloud.tasks.v2.AppEngineRouting.host", | ||
| index=3, | ||
| number=4, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=795, | ||
| serialized_end=879, | ||
| ) | ||
| _OAUTHTOKEN = _descriptor.Descriptor( | ||
| name="OAuthToken", | ||
| full_name="google.cloud.tasks.v2.OAuthToken", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="service_account_email", | ||
| full_name="google.cloud.tasks.v2.OAuthToken.service_account_email", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="scope", | ||
| full_name="google.cloud.tasks.v2.OAuthToken.scope", | ||
| index=1, | ||
| number=2, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=881, | ||
| serialized_end=939, | ||
| ) | ||
| _OIDCTOKEN = _descriptor.Descriptor( | ||
| name="OidcToken", | ||
| full_name="google.cloud.tasks.v2.OidcToken", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="service_account_email", | ||
| full_name="google.cloud.tasks.v2.OidcToken.service_account_email", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="audience", | ||
| full_name="google.cloud.tasks.v2.OidcToken.audience", | ||
| index=1, | ||
| number=2, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=941, | ||
| serialized_end=1001, | ||
| ) | ||
| _HTTPREQUEST_HEADERSENTRY.containing_type = _HTTPREQUEST | ||
| _HTTPREQUEST.fields_by_name["http_method"].enum_type = _HTTPMETHOD | ||
| _HTTPREQUEST.fields_by_name["headers"].message_type = _HTTPREQUEST_HEADERSENTRY | ||
| _HTTPREQUEST.fields_by_name["oauth_token"].message_type = _OAUTHTOKEN | ||
| _HTTPREQUEST.fields_by_name["oidc_token"].message_type = _OIDCTOKEN | ||
| _HTTPREQUEST.oneofs_by_name["authorization_header"].fields.append( | ||
| _HTTPREQUEST.fields_by_name["oauth_token"] | ||
| ) | ||
| _HTTPREQUEST.fields_by_name[ | ||
| "oauth_token" | ||
| ].containing_oneof = _HTTPREQUEST.oneofs_by_name["authorization_header"] | ||
| _HTTPREQUEST.oneofs_by_name["authorization_header"].fields.append( | ||
| _HTTPREQUEST.fields_by_name["oidc_token"] | ||
| ) | ||
| _HTTPREQUEST.fields_by_name[ | ||
| "oidc_token" | ||
| ].containing_oneof = _HTTPREQUEST.oneofs_by_name["authorization_header"] | ||
| _APPENGINEHTTPREQUEST_HEADERSENTRY.containing_type = _APPENGINEHTTPREQUEST | ||
| _APPENGINEHTTPREQUEST.fields_by_name["http_method"].enum_type = _HTTPMETHOD | ||
| _APPENGINEHTTPREQUEST.fields_by_name[ | ||
| "app_engine_routing" | ||
| ].message_type = _APPENGINEROUTING | ||
| _APPENGINEHTTPREQUEST.fields_by_name[ | ||
| "headers" | ||
| ].message_type = _APPENGINEHTTPREQUEST_HEADERSENTRY | ||
| DESCRIPTOR.message_types_by_name["HttpRequest"] = _HTTPREQUEST | ||
| DESCRIPTOR.message_types_by_name["AppEngineHttpRequest"] = _APPENGINEHTTPREQUEST | ||
| DESCRIPTOR.message_types_by_name["AppEngineRouting"] = _APPENGINEROUTING | ||
| DESCRIPTOR.message_types_by_name["OAuthToken"] = _OAUTHTOKEN | ||
| DESCRIPTOR.message_types_by_name["OidcToken"] = _OIDCTOKEN | ||
| DESCRIPTOR.enum_types_by_name["HttpMethod"] = _HTTPMETHOD | ||
| _sym_db.RegisterFileDescriptor(DESCRIPTOR) | ||
| HttpRequest = _reflection.GeneratedProtocolMessageType( | ||
| "HttpRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| HeadersEntry=_reflection.GeneratedProtocolMessageType( | ||
| "HeadersEntry", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_HTTPREQUEST_HEADERSENTRY, | ||
| __module__="google.cloud.tasks_v2.proto.target_pb2" | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.HttpRequest.HeadersEntry) | ||
| ), | ||
| ), | ||
| DESCRIPTOR=_HTTPREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.target_pb2", | ||
| __doc__="""HTTP request. | ||
| The task will be pushed to the worker as an HTTP request. If the worker | ||
| or the redirected worker acknowledges the task by returning a successful | ||
| HTTP response code ([``200`` - ``299``]), the task will be removed from | ||
| the queue. If any other HTTP response code is returned or no response is | ||
| received, the task will be retried according to the following: | ||
| - User-specified throttling: [retry | ||
| configuration][google.cloud.tasks.v2.Queue.retry\_config], [rate | ||
| limits][google.cloud.tasks.v2.Queue.rate\_limits], and the [queue's | ||
| state][google.cloud.tasks.v2.Queue.state]. | ||
| - System throttling: To prevent the worker from overloading, Cloud | ||
| Tasks may temporarily reduce the queue's effective rate. | ||
| User-specified settings will not be changed. | ||
| System throttling happens because: | ||
| - Cloud Tasks backs off on all errors. Normally the backoff specified | ||
| in [rate limits][google.cloud.tasks.v2.Queue.rate\_limits] will be | ||
| used. But if the worker returns ``429`` (Too Many Requests), ``503`` | ||
| (Service Unavailable), or the rate of errors is high, Cloud Tasks | ||
| will use a higher backoff rate. The retry specified in the | ||
| ``Retry-After`` HTTP response header is considered. | ||
| - To prevent traffic spikes and to smooth sudden increases in traffic, | ||
| dispatches ramp up slowly when the queue is newly created or idle and | ||
| if large numbers of tasks suddenly become available to dispatch (due | ||
| to spikes in create task rates, the queue being unpaused, or many | ||
| tasks that are scheduled at the same time). | ||
| Attributes: | ||
| url: | ||
| Required. The full url path that the request will be sent to. | ||
| This string must begin with either "http://" or "https://". | ||
| Some examples are: ``http://acme.com`` and | ||
| ``https://acme.com/sales:8080``. Cloud Tasks will encode some | ||
| characters for safety and compatibility. The maximum allowed | ||
| URL length is 2083 characters after encoding. The | ||
| ``Location`` header response from a redirect response [``300`` | ||
| - ``399``] may be followed. The redirect is not counted as a | ||
| separate attempt. | ||
| http_method: | ||
| The HTTP method to use for the request. The default is POST. | ||
| headers: | ||
| HTTP request headers. This map contains the header field | ||
| names and values. Headers can be set when the [task is | ||
| created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. | ||
| These headers represent a subset of the headers that will | ||
| accompany the task's HTTP request. Some HTTP request headers | ||
| will be ignored or replaced. A partial list of headers that | ||
| will be ignored or replaced is: - Host: This will be | ||
| computed by Cloud Tasks and derived from | ||
| [HttpRequest.url][google.cloud.tasks.v2.HttpRequest.url]. - | ||
| Content-Length: This will be computed by Cloud Tasks. - User- | ||
| Agent: This will be set to ``"Google-Cloud-Tasks"``. - | ||
| X-Google-\*: Google use only. - X-AppEngine-\*: Google use | ||
| only. ``Content-Type`` won't be set by Cloud Tasks. You can | ||
| explicitly set ``Content-Type`` to a media type when the [task | ||
| is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. | ||
| For example, ``Content-Type`` can be set to | ||
| ``"application/octet-stream"`` or ``"application/json"``. | ||
| Headers which can have multiple values (according to RFC2616) | ||
| can be specified using comma-separated values. The size of | ||
| the headers must be less than 80KB. | ||
| body: | ||
| HTTP request body. A request body is allowed only if the | ||
| [HTTP method][google.cloud.tasks.v2.HttpRequest.http\_method] | ||
| is POST, PUT, or PATCH. It is an error to set body on a task | ||
| with an incompatible | ||
| [HttpMethod][google.cloud.tasks.v2.HttpMethod]. | ||
| authorization_header: | ||
| The mode for generating an ``Authorization`` header for HTTP | ||
| requests. If specified, all ``Authorization`` headers in the | ||
| [HttpRequest.headers][google.cloud.tasks.v2.HttpRequest.header | ||
| s] field will be overridden. | ||
| oauth_token: | ||
| If specified, an `OAuth token | ||
| <https://developers.google.com/identity/protocols/OAuth2>`_ | ||
| will be generated and attached as an ``Authorization`` header | ||
| in the HTTP request. This type of authorization should | ||
| generally only be used when calling Google APIs hosted on | ||
| \*.googleapis.com. | ||
| oidc_token: | ||
| If specified, an `OIDC <https://developers.google.com/identity | ||
| /protocols/OpenIDConnect>`_ token will be generated and | ||
| attached as an ``Authorization`` header in the HTTP request. | ||
| This type of authorization can be used for many scenarios, | ||
| including calling Cloud Run, or endpoints where you intend to | ||
| validate the token yourself. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.HttpRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(HttpRequest) | ||
| _sym_db.RegisterMessage(HttpRequest.HeadersEntry) | ||
| AppEngineHttpRequest = _reflection.GeneratedProtocolMessageType( | ||
| "AppEngineHttpRequest", | ||
| (_message.Message,), | ||
| dict( | ||
| HeadersEntry=_reflection.GeneratedProtocolMessageType( | ||
| "HeadersEntry", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_APPENGINEHTTPREQUEST_HEADERSENTRY, | ||
| __module__="google.cloud.tasks_v2.proto.target_pb2" | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry) | ||
| ), | ||
| ), | ||
| DESCRIPTOR=_APPENGINEHTTPREQUEST, | ||
| __module__="google.cloud.tasks_v2.proto.target_pb2", | ||
| __doc__="""App Engine HTTP request. | ||
| The message defines the HTTP request that is sent to an App Engine app | ||
| when the task is dispatched. | ||
| Using [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] | ||
| requires | ||
| ```appengine.applications.get`` <https://cloud.google.com/appengine/docs/admin-api/access-control>`_ | ||
| Google IAM permission for the project and the following scope: | ||
| ``https://www.googleapis.com/auth/cloud-platform`` | ||
| The task will be delivered to the App Engine app which belongs to the | ||
| same project as the queue. For more information, see `How Requests are | ||
| Routed <https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed>`_ | ||
| and how routing is affected by `dispatch | ||
| files <https://cloud.google.com/appengine/docs/python/config/dispatchref>`_. | ||
| Traffic is encrypted during transport and never leaves Google | ||
| datacenters. Because this traffic is carried over a communication | ||
| mechanism internal to Google, you cannot explicitly set the protocol | ||
| (for example, HTTP or HTTPS). The request to the handler, however, will | ||
| appear to have used the HTTP protocol. | ||
| The [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] used to | ||
| construct the URL that the task is delivered to can be set at the | ||
| queue-level or task-level: | ||
| - If [app\_engine\_routing\_override is set on the | ||
| queue][Queue.app\_engine\_routing\_override], this value is used for | ||
| all tasks in the queue, no matter what the setting is for the | ||
| [task-level | ||
| app\_engine\_routing][AppEngineHttpRequest.app\_engine\_routing]. | ||
| The ``url`` that the task will be sent to is: | ||
| - ``url =`` [host][google.cloud.tasks.v2.AppEngineRouting.host] ``+`` | ||
| [relative\_uri][google.cloud.tasks.v2.AppEngineHttpRequest.relative\_uri] | ||
| Tasks can be dispatched to secure app handlers, unsecure app handlers, | ||
| and URIs restricted with | ||
| ```login: admin`` <https://cloud.google.com/appengine/docs/standard/python/config/appref>`_. | ||
| Because tasks are not run as any user, they cannot be dispatched to URIs | ||
| restricted with | ||
| ```login: required`` <https://cloud.google.com/appengine/docs/standard/python/config/appref>`_ | ||
| Task dispatches also do not follow redirects. | ||
| The task attempt has succeeded if the app's request handler returns an | ||
| HTTP response code in the range [``200`` - ``299``]. The task attempt | ||
| has failed if the app's handler returns a non-2xx response code or Cloud | ||
| Tasks does not receive response before the | ||
| [deadline][google.cloud.tasks.v2.Task.dispatch\_deadline]. Failed tasks | ||
| will be retried according to the [retry | ||
| configuration][google.cloud.tasks.v2.Queue.retry\_config]. ``503`` | ||
| (Service Unavailable) is considered an App Engine system error instead | ||
| of an application error and will cause Cloud Tasks' traffic congestion | ||
| control to temporarily throttle the queue's dispatches. Unlike other | ||
| types of task targets, a ``429`` (Too Many Requests) response from an | ||
| app handler does not cause traffic congestion control to throttle the | ||
| queue. | ||
| Attributes: | ||
| http_method: | ||
| The HTTP method to use for the request. The default is POST. | ||
| The app's request handler for the task's target URL must be | ||
| able to handle HTTP requests with this http\_method, otherwise | ||
| the task attempt will fail with error code 405 (Method Not | ||
| Allowed). See `Writing a push task request handler <https://cl | ||
| oud.google.com/appengine/docs/java/taskqueue/push/creating- | ||
| handlers#writing_a_push_task_request_handler>`_ and the | ||
| documentation for the request handlers in the language your | ||
| app is written in e.g. `Python Request Handler <https://cloud. | ||
| google.com/appengine/docs/python/tools/webapp/requesthandlercl | ||
| ass>`_. | ||
| app_engine_routing: | ||
| Task-level setting for App Engine routing. - If | ||
| [app\_engine\_routing\_override is set on the | ||
| queue][Queue.app\_engine\_routing\_override], this value is | ||
| used for all tasks in the queue, no matter what the setting | ||
| is for the [task-level app\_engine\_routing][AppEngineHt | ||
| tpRequest.app\_engine\_routing]. | ||
| relative_uri: | ||
| The relative URI. The relative URI must begin with "/" and | ||
| must be a valid HTTP relative URI. It can contain a path and | ||
| query string arguments. If the relative URI is empty, then the | ||
| root path "/" will be used. No spaces are allowed, and the | ||
| maximum length allowed is 2083 characters. | ||
| headers: | ||
| HTTP request headers. This map contains the header field | ||
| names and values. Headers can be set when the [task is | ||
| created][google.cloud.tasks.v2.CloudTasks.CreateTask]. | ||
| Repeated headers are not supported but a header value can | ||
| contain commas. Cloud Tasks sets some headers to default | ||
| values: - ``User-Agent``: By default, this header is | ||
| ``"AppEngine-Google; (+http://code.google.com/appengine)"``. | ||
| This header can be modified, but Cloud Tasks will append | ||
| ``"AppEngine-Google; (+http://code.google.com/appengine)"`` to | ||
| the modified ``User-Agent``. If the task has a | ||
| [body][google.cloud.tasks.v2.AppEngineHttpRequest.body], Cloud | ||
| Tasks sets the following headers: - ``Content-Type``: By | ||
| default, the ``Content-Type`` header is set to | ||
| ``"application/octet-stream"``. The default can be overridden | ||
| by explicitly setting ``Content-Type`` to a particular | ||
| media type when the [task is | ||
| created][google.cloud.tasks.v2.CloudTasks.CreateTask]. For | ||
| example, ``Content-Type`` can be set to | ||
| ``"application/json"``. - ``Content-Length``: This is | ||
| computed by Cloud Tasks. This value is output only. It | ||
| cannot be changed. The headers below cannot be set or | ||
| overridden: - ``Host`` - ``X-Google-*`` - | ||
| ``X-AppEngine-*`` In addition, Cloud Tasks sets some headers | ||
| when the task is dispatched, such as headers containing | ||
| information about the task; see `request headers | ||
| <https://cloud.google.com/tasks/docs/creating-appengine- | ||
| handlers#reading_request_headers>`_. These headers are set | ||
| only when the task is dispatched, so they are not visible when | ||
| the task is returned in a Cloud Tasks response. Although | ||
| there is no specific limit for the maximum number of headers | ||
| or the size, there is a limit on the maximum size of the | ||
| [Task][google.cloud.tasks.v2.Task]. For more information, see | ||
| the [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] | ||
| documentation. | ||
| body: | ||
| HTTP request body. A request body is allowed only if the HTTP | ||
| method is POST or PUT. It is an error to set a body on a task | ||
| with an incompatible | ||
| [HttpMethod][google.cloud.tasks.v2.HttpMethod]. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.AppEngineHttpRequest) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(AppEngineHttpRequest) | ||
| _sym_db.RegisterMessage(AppEngineHttpRequest.HeadersEntry) | ||
| AppEngineRouting = _reflection.GeneratedProtocolMessageType( | ||
| "AppEngineRouting", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_APPENGINEROUTING, | ||
| __module__="google.cloud.tasks_v2.proto.target_pb2", | ||
| __doc__="""App Engine Routing. | ||
| Defines routing characteristics specific to App Engine - service, | ||
| version, and instance. | ||
| For more information about services, versions, and instances see `An | ||
| Overview of App | ||
| Engine <https://cloud.google.com/appengine/docs/python/an-overview-of-app-engine>`_, | ||
| `Microservices Architecture on Google App | ||
| Engine <https://cloud.google.com/appengine/docs/python/microservices-on-app-engine>`_, | ||
| `App Engine Standard request | ||
| routing <https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed>`_, | ||
| and `App Engine Flex request | ||
| routing <https://cloud.google.com/appengine/docs/flexible/python/how-requests-are-routed>`_. | ||
| Using [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] | ||
| requires | ||
| ```appengine.applications.get`` <https://cloud.google.com/appengine/docs/admin-api/access-control>`_ | ||
| Google IAM permission for the project and the following scope: | ||
| ``https://www.googleapis.com/auth/cloud-platform`` | ||
| Attributes: | ||
| service: | ||
| App service. By default, the task is sent to the service | ||
| which is the default service when the task is attempted. For | ||
| some queues or tasks which were created using the App Engine | ||
| Task Queue API, | ||
| [host][google.cloud.tasks.v2.AppEngineRouting.host] is not | ||
| parsable into | ||
| [service][google.cloud.tasks.v2.AppEngineRouting.service], | ||
| [version][google.cloud.tasks.v2.AppEngineRouting.version], and | ||
| [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. | ||
| For example, some tasks which were created using the App | ||
| Engine SDK use a custom domain name; custom domains are not | ||
| parsed by Cloud Tasks. If | ||
| [host][google.cloud.tasks.v2.AppEngineRouting.host] is not | ||
| parsable, then | ||
| [service][google.cloud.tasks.v2.AppEngineRouting.service], | ||
| [version][google.cloud.tasks.v2.AppEngineRouting.version], and | ||
| [instance][google.cloud.tasks.v2.AppEngineRouting.instance] | ||
| are the empty string. | ||
| version: | ||
| App version. By default, the task is sent to the version | ||
| which is the default version when the task is attempted. For | ||
| some queues or tasks which were created using the App Engine | ||
| Task Queue API, | ||
| [host][google.cloud.tasks.v2.AppEngineRouting.host] is not | ||
| parsable into | ||
| [service][google.cloud.tasks.v2.AppEngineRouting.service], | ||
| [version][google.cloud.tasks.v2.AppEngineRouting.version], and | ||
| [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. | ||
| For example, some tasks which were created using the App | ||
| Engine SDK use a custom domain name; custom domains are not | ||
| parsed by Cloud Tasks. If | ||
| [host][google.cloud.tasks.v2.AppEngineRouting.host] is not | ||
| parsable, then | ||
| [service][google.cloud.tasks.v2.AppEngineRouting.service], | ||
| [version][google.cloud.tasks.v2.AppEngineRouting.version], and | ||
| [instance][google.cloud.tasks.v2.AppEngineRouting.instance] | ||
| are the empty string. | ||
| instance: | ||
| App instance. By default, the task is sent to an instance | ||
| which is available when the task is attempted. Requests can | ||
| only be sent to a specific instance if `manual scaling is used | ||
| in App Engine Standard | ||
| <https://cloud.google.com/appengine/docs/python/an-overview- | ||
| of-app-engine?hl=en_US#scaling_types_and_instance_classes>`_. | ||
| App Engine Flex does not support instances. For more | ||
| information, see `App Engine Standard request routing | ||
| <https://cloud.google.com/appengine/docs/standard/python/how- | ||
| requests-are-routed>`_ and `App Engine Flex request routing | ||
| <https://cloud.google.com/appengine/docs/flexible/python/how- | ||
| requests-are-routed>`_. | ||
| host: | ||
| Output only. The host that the task is sent to. The host is | ||
| constructed from the domain name of the app associated with | ||
| the queue's project ID (for example .appspot.com), and the | ||
| [service][google.cloud.tasks.v2.AppEngineRouting.service], | ||
| [version][google.cloud.tasks.v2.AppEngineRouting.version], and | ||
| [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. | ||
| Tasks which were created using the App Engine SDK might have a | ||
| custom domain name. For more information, see `How Requests | ||
| are Routed | ||
| <https://cloud.google.com/appengine/docs/standard/python/how- | ||
| requests-are-routed>`_. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.AppEngineRouting) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(AppEngineRouting) | ||
| OAuthToken = _reflection.GeneratedProtocolMessageType( | ||
| "OAuthToken", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_OAUTHTOKEN, | ||
| __module__="google.cloud.tasks_v2.proto.target_pb2", | ||
| __doc__="""Contains information needed for generating an `OAuth | ||
| token <https://developers.google.com/identity/protocols/OAuth2>`_. This | ||
| type of authorization should generally only be used when calling Google | ||
| APIs hosted on \*.googleapis.com. | ||
| Attributes: | ||
| service_account_email: | ||
| \ `Service account email | ||
| <https://cloud.google.com/iam/docs/service-accounts>`_ to be | ||
| used for generating OAuth token. The service account must be | ||
| within the same project as the queue. The caller must have | ||
| iam.serviceAccounts.actAs permission for the service account. | ||
| scope: | ||
| OAuth scope to be used for generating OAuth access token. If | ||
| not specified, "https://www.googleapis.com/auth/cloud- | ||
| platform" will be used. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.OAuthToken) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(OAuthToken) | ||
| OidcToken = _reflection.GeneratedProtocolMessageType( | ||
| "OidcToken", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_OIDCTOKEN, | ||
| __module__="google.cloud.tasks_v2.proto.target_pb2", | ||
| __doc__="""Contains information needed for generating an `OpenID | ||
| Connect | ||
| token <https://developers.google.com/identity/protocols/OpenIDConnect>`_. | ||
| This type of authorization can be used for many scenarios, including | ||
| calling Cloud Run, or endpoints where you intend to validate the token | ||
| yourself. | ||
| Attributes: | ||
| service_account_email: | ||
| \ `Service account email | ||
| <https://cloud.google.com/iam/docs/service-accounts>`_ to be | ||
| used for generating OIDC token. The service account must be | ||
| within the same project as the queue. The caller must have | ||
| iam.serviceAccounts.actAs permission for the service account. | ||
| audience: | ||
| Audience to be used when generating OIDC token. If not | ||
| specified, the URI specified in target will be used. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.OidcToken) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(OidcToken) | ||
| DESCRIPTOR._options = None | ||
| _HTTPREQUEST_HEADERSENTRY._options = None | ||
| _HTTPREQUEST.fields_by_name["url"]._options = None | ||
| _APPENGINEHTTPREQUEST_HEADERSENTRY._options = None | ||
| # @@protoc_insertion_point(module_scope) |
| # -*- coding: utf-8 -*- | ||
| # Generated by the protocol buffer compiler. DO NOT EDIT! | ||
| # source: google/cloud/tasks_v2/proto/task.proto | ||
| import sys | ||
| _b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) | ||
| from google.protobuf import descriptor as _descriptor | ||
| from google.protobuf import message as _message | ||
| from google.protobuf import reflection as _reflection | ||
| from google.protobuf import symbol_database as _symbol_database | ||
| # @@protoc_insertion_point(imports) | ||
| _sym_db = _symbol_database.Default() | ||
| from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 | ||
| from ..proto import ( | ||
| target_pb2 as google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2, | ||
| ) | ||
| from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 | ||
| from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 | ||
| from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 | ||
| from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 | ||
| DESCRIPTOR = _descriptor.FileDescriptor( | ||
| name="google/cloud/tasks_v2/proto/task.proto", | ||
| package="google.cloud.tasks.v2", | ||
| syntax="proto3", | ||
| serialized_options=_b( | ||
| "\n\031com.google.cloud.tasks.v2B\tTaskProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks" | ||
| ), | ||
| serialized_pb=_b( | ||
| '\n&google/cloud/tasks_v2/proto/task.proto\x12\x15google.cloud.tasks.v2\x1a\x19google/api/resource.proto\x1a(google/cloud/tasks_v2/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xb4\x05\n\x04Task\x12\x0c\n\x04name\x18\x01 \x01(\t\x12N\n\x17\x61pp_engine_http_request\x18\x02 \x01(\x0b\x32+.google.cloud.tasks.v2.AppEngineHttpRequestH\x00\x12:\n\x0chttp_request\x18\x03 \x01(\x0b\x32".google.cloud.tasks.v2.HttpRequestH\x00\x12\x31\n\rschedule_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x11\x64ispatch_deadline\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x16\n\x0e\x64ispatch_count\x18\x07 \x01(\x05\x12\x16\n\x0eresponse_count\x18\x08 \x01(\x05\x12\x35\n\rfirst_attempt\x18\t \x01(\x0b\x32\x1e.google.cloud.tasks.v2.Attempt\x12\x34\n\x0clast_attempt\x18\n \x01(\x0b\x32\x1e.google.cloud.tasks.v2.Attempt\x12.\n\x04view\x18\x0b \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"1\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x08\n\x04\x46ULL\x10\x02:h\xea\x41\x65\n\x1e\x63loudtasks.googleapis.com/Task\x12\x43projects/{project}/locations/{location}/queues/{queue}/tasks/{task}B\x0e\n\x0cmessage_type"\xcf\x01\n\x07\x41ttempt\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rdispatch_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rresponse_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x0fresponse_status\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusBd\n\x19\x63om.google.cloud.tasks.v2B\tTaskProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3' | ||
| ), | ||
| dependencies=[ | ||
| google_dot_api_dot_resource__pb2.DESCRIPTOR, | ||
| google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2.DESCRIPTOR, | ||
| google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, | ||
| google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, | ||
| google_dot_rpc_dot_status__pb2.DESCRIPTOR, | ||
| google_dot_api_dot_annotations__pb2.DESCRIPTOR, | ||
| ], | ||
| ) | ||
| _TASK_VIEW = _descriptor.EnumDescriptor( | ||
| name="View", | ||
| full_name="google.cloud.tasks.v2.Task.View", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| values=[ | ||
| _descriptor.EnumValueDescriptor( | ||
| name="VIEW_UNSPECIFIED", | ||
| index=0, | ||
| number=0, | ||
| serialized_options=None, | ||
| type=None, | ||
| ), | ||
| _descriptor.EnumValueDescriptor( | ||
| name="BASIC", index=1, number=1, serialized_options=None, type=None | ||
| ), | ||
| _descriptor.EnumValueDescriptor( | ||
| name="FULL", index=2, number=2, serialized_options=None, type=None | ||
| ), | ||
| ], | ||
| containing_type=None, | ||
| serialized_options=None, | ||
| serialized_start=776, | ||
| serialized_end=825, | ||
| ) | ||
| _sym_db.RegisterEnumDescriptor(_TASK_VIEW) | ||
| _TASK = _descriptor.Descriptor( | ||
| name="Task", | ||
| full_name="google.cloud.tasks.v2.Task", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="name", | ||
| full_name="google.cloud.tasks.v2.Task.name", | ||
| index=0, | ||
| number=1, | ||
| type=9, | ||
| cpp_type=9, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=_b("").decode("utf-8"), | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="app_engine_http_request", | ||
| full_name="google.cloud.tasks.v2.Task.app_engine_http_request", | ||
| index=1, | ||
| number=2, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="http_request", | ||
| full_name="google.cloud.tasks.v2.Task.http_request", | ||
| index=2, | ||
| number=3, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="schedule_time", | ||
| full_name="google.cloud.tasks.v2.Task.schedule_time", | ||
| index=3, | ||
| number=4, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="create_time", | ||
| full_name="google.cloud.tasks.v2.Task.create_time", | ||
| index=4, | ||
| number=5, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="dispatch_deadline", | ||
| full_name="google.cloud.tasks.v2.Task.dispatch_deadline", | ||
| index=5, | ||
| number=6, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="dispatch_count", | ||
| full_name="google.cloud.tasks.v2.Task.dispatch_count", | ||
| index=6, | ||
| number=7, | ||
| type=5, | ||
| cpp_type=1, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="response_count", | ||
| full_name="google.cloud.tasks.v2.Task.response_count", | ||
| index=7, | ||
| number=8, | ||
| type=5, | ||
| cpp_type=1, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="first_attempt", | ||
| full_name="google.cloud.tasks.v2.Task.first_attempt", | ||
| index=8, | ||
| number=9, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="last_attempt", | ||
| full_name="google.cloud.tasks.v2.Task.last_attempt", | ||
| index=9, | ||
| number=10, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="view", | ||
| full_name="google.cloud.tasks.v2.Task.view", | ||
| index=10, | ||
| number=11, | ||
| type=14, | ||
| cpp_type=8, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=0, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[_TASK_VIEW], | ||
| serialized_options=_b( | ||
| "\352Ae\n\036cloudtasks.googleapis.com/Task\022Cprojects/{project}/locations/{location}/queues/{queue}/tasks/{task}" | ||
| ), | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[ | ||
| _descriptor.OneofDescriptor( | ||
| name="message_type", | ||
| full_name="google.cloud.tasks.v2.Task.message_type", | ||
| index=0, | ||
| containing_type=None, | ||
| fields=[], | ||
| ) | ||
| ], | ||
| serialized_start=255, | ||
| serialized_end=947, | ||
| ) | ||
| _ATTEMPT = _descriptor.Descriptor( | ||
| name="Attempt", | ||
| full_name="google.cloud.tasks.v2.Attempt", | ||
| filename=None, | ||
| file=DESCRIPTOR, | ||
| containing_type=None, | ||
| fields=[ | ||
| _descriptor.FieldDescriptor( | ||
| name="schedule_time", | ||
| full_name="google.cloud.tasks.v2.Attempt.schedule_time", | ||
| index=0, | ||
| number=1, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="dispatch_time", | ||
| full_name="google.cloud.tasks.v2.Attempt.dispatch_time", | ||
| index=1, | ||
| number=2, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="response_time", | ||
| full_name="google.cloud.tasks.v2.Attempt.response_time", | ||
| index=2, | ||
| number=3, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| _descriptor.FieldDescriptor( | ||
| name="response_status", | ||
| full_name="google.cloud.tasks.v2.Attempt.response_status", | ||
| index=3, | ||
| number=4, | ||
| type=11, | ||
| cpp_type=10, | ||
| label=1, | ||
| has_default_value=False, | ||
| default_value=None, | ||
| message_type=None, | ||
| enum_type=None, | ||
| containing_type=None, | ||
| is_extension=False, | ||
| extension_scope=None, | ||
| serialized_options=None, | ||
| file=DESCRIPTOR, | ||
| ), | ||
| ], | ||
| extensions=[], | ||
| nested_types=[], | ||
| enum_types=[], | ||
| serialized_options=None, | ||
| is_extendable=False, | ||
| syntax="proto3", | ||
| extension_ranges=[], | ||
| oneofs=[], | ||
| serialized_start=950, | ||
| serialized_end=1157, | ||
| ) | ||
| _TASK.fields_by_name[ | ||
| "app_engine_http_request" | ||
| ].message_type = ( | ||
| google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2._APPENGINEHTTPREQUEST | ||
| ) | ||
| _TASK.fields_by_name[ | ||
| "http_request" | ||
| ].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2._HTTPREQUEST | ||
| _TASK.fields_by_name[ | ||
| "schedule_time" | ||
| ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP | ||
| _TASK.fields_by_name[ | ||
| "create_time" | ||
| ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP | ||
| _TASK.fields_by_name[ | ||
| "dispatch_deadline" | ||
| ].message_type = google_dot_protobuf_dot_duration__pb2._DURATION | ||
| _TASK.fields_by_name["first_attempt"].message_type = _ATTEMPT | ||
| _TASK.fields_by_name["last_attempt"].message_type = _ATTEMPT | ||
| _TASK.fields_by_name["view"].enum_type = _TASK_VIEW | ||
| _TASK_VIEW.containing_type = _TASK | ||
| _TASK.oneofs_by_name["message_type"].fields.append( | ||
| _TASK.fields_by_name["app_engine_http_request"] | ||
| ) | ||
| _TASK.fields_by_name["app_engine_http_request"].containing_oneof = _TASK.oneofs_by_name[ | ||
| "message_type" | ||
| ] | ||
| _TASK.oneofs_by_name["message_type"].fields.append(_TASK.fields_by_name["http_request"]) | ||
| _TASK.fields_by_name["http_request"].containing_oneof = _TASK.oneofs_by_name[ | ||
| "message_type" | ||
| ] | ||
| _ATTEMPT.fields_by_name[ | ||
| "schedule_time" | ||
| ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP | ||
| _ATTEMPT.fields_by_name[ | ||
| "dispatch_time" | ||
| ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP | ||
| _ATTEMPT.fields_by_name[ | ||
| "response_time" | ||
| ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP | ||
| _ATTEMPT.fields_by_name[ | ||
| "response_status" | ||
| ].message_type = google_dot_rpc_dot_status__pb2._STATUS | ||
| DESCRIPTOR.message_types_by_name["Task"] = _TASK | ||
| DESCRIPTOR.message_types_by_name["Attempt"] = _ATTEMPT | ||
| _sym_db.RegisterFileDescriptor(DESCRIPTOR) | ||
| Task = _reflection.GeneratedProtocolMessageType( | ||
| "Task", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_TASK, | ||
| __module__="google.cloud.tasks_v2.proto.task_pb2", | ||
| __doc__="""A unit of scheduled work. | ||
| Attributes: | ||
| name: | ||
| Optionally caller-specified in | ||
| [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. | ||
| The task name. The task name must have the following format: | ||
| ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/ta | ||
| sks/TASK_ID`` - ``PROJECT_ID`` can contain letters | ||
| ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), or | ||
| periods (.). For more information, see `Identifying | ||
| projects <https://cloud.google.com/resource- | ||
| manager/docs/creating-managing- | ||
| projects#identifying_projects>`_ - ``LOCATION_ID`` is the | ||
| canonical ID for the task's location. The list of available | ||
| locations can be obtained by calling [ListLocations][google | ||
| .cloud.location.Locations.ListLocations]. For more | ||
| information, see https://cloud.google.com/about/locations/. - | ||
| ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers ([0-9]), | ||
| or hyphens (-). The maximum length is 100 characters. - | ||
| ``TASK_ID`` can contain only letters ([A-Za-z]), numbers | ||
| ([0-9]), hyphens (-), or underscores (\_). The maximum | ||
| length is 500 characters. | ||
| message_type: | ||
| Required. The message to send to the worker. | ||
| app_engine_http_request: | ||
| HTTP request that is sent to the App Engine app handler. An | ||
| App Engine task is a task that has [AppEngineHttpRequest][goog | ||
| le.cloud.tasks.v2.AppEngineHttpRequest] set. | ||
| http_request: | ||
| HTTP request that is sent to the worker. An HTTP task is a | ||
| task that has [HttpRequest][google.cloud.tasks.v2.HttpRequest] | ||
| set. | ||
| schedule_time: | ||
| The time when the task is scheduled to be attempted or | ||
| retried. ``schedule_time`` will be truncated to the nearest | ||
| microsecond. | ||
| create_time: | ||
| Output only. The time that the task was created. | ||
| ``create_time`` will be truncated to the nearest second. | ||
| dispatch_deadline: | ||
| The deadline for requests sent to the worker. If the worker | ||
| does not respond by this deadline then the request is | ||
| cancelled and the attempt is marked as a ``DEADLINE_EXCEEDED`` | ||
| failure. Cloud Tasks will retry the task according to the | ||
| [RetryConfig][google.cloud.tasks.v2.RetryConfig]. Note that | ||
| when the request is cancelled, Cloud Tasks will stop listing | ||
| for the response, but whether the worker stops processing | ||
| depends on the worker. For example, if the worker is stuck, it | ||
| may not react to cancelled requests. The default and maximum | ||
| values depend on the type of request: - For [HTTP | ||
| tasks][google.cloud.tasks.v2.HttpRequest], the default is | ||
| 10 minutes. The deadline must be in the interval [15 seconds, | ||
| 30 minutes]. - For [App Engine | ||
| tasks][google.cloud.tasks.v2.AppEngineHttpRequest], 0 | ||
| indicates that the request has the default deadline. The | ||
| default deadline depends on the `scaling type | ||
| <https://cloud.google.com/appengine/docs/standard/go/how- | ||
| instances-are-managed#instance_scaling>`_ of the service: | ||
| 10 minutes for standard apps with automatic scaling, 24 | ||
| hours for standard apps with manual and basic scaling, and 60 | ||
| minutes for flex apps. If the request deadline is set, it must | ||
| be in the interval [15 seconds, 24 hours 15 seconds]. | ||
| Regardless of the task's ``dispatch_deadline``, the app | ||
| handler will not run for longer than than the service's | ||
| timeout. We recommend setting the ``dispatch_deadline`` to | ||
| at most a few seconds more than the app handler's timeout. | ||
| For more information see `Timeouts | ||
| <https://cloud.google.com/tasks/docs/creating-appengine- | ||
| handlers#timeouts>`_. ``dispatch_deadline`` will be | ||
| truncated to the nearest millisecond. The deadline is an | ||
| approximate deadline. | ||
| dispatch_count: | ||
| Output only. The number of attempts dispatched. This count | ||
| includes attempts which have been dispatched but haven't | ||
| received a response. | ||
| response_count: | ||
| Output only. The number of attempts which have received a | ||
| response. | ||
| first_attempt: | ||
| Output only. The status of the task's first attempt. Only | ||
| [dispatch\_time][google.cloud.tasks.v2.Attempt.dispatch\_time] | ||
| will be set. The other | ||
| [Attempt][google.cloud.tasks.v2.Attempt] information is not | ||
| retained by Cloud Tasks. | ||
| last_attempt: | ||
| Output only. The status of the task's last attempt. | ||
| view: | ||
| Output only. The view specifies which subset of the | ||
| [Task][google.cloud.tasks.v2.Task] has been returned. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.Task) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(Task) | ||
| Attempt = _reflection.GeneratedProtocolMessageType( | ||
| "Attempt", | ||
| (_message.Message,), | ||
| dict( | ||
| DESCRIPTOR=_ATTEMPT, | ||
| __module__="google.cloud.tasks_v2.proto.task_pb2", | ||
| __doc__="""The status of a task attempt. | ||
| Attributes: | ||
| schedule_time: | ||
| Output only. The time that this attempt was scheduled. | ||
| ``schedule_time`` will be truncated to the nearest | ||
| microsecond. | ||
| dispatch_time: | ||
| Output only. The time that this attempt was dispatched. | ||
| ``dispatch_time`` will be truncated to the nearest | ||
| microsecond. | ||
| response_time: | ||
| Output only. The time that this attempt response was received. | ||
| ``response_time`` will be truncated to the nearest | ||
| microsecond. | ||
| response_status: | ||
| Output only. The response from the worker for this attempt. | ||
| If ``response_time`` is unset, then the task has not been | ||
| attempted or is currently running and the ``response_status`` | ||
| field is meaningless. | ||
| """, | ||
| # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.Attempt) | ||
| ), | ||
| ) | ||
| _sym_db.RegisterMessage(Attempt) | ||
| DESCRIPTOR._options = None | ||
| _TASK._options = None | ||
| # @@protoc_insertion_point(module_scope) |
| import logging, urllib | ||
| import threading | ||
| import time, sched | ||
| from concurrent import futures | ||
| from datetime import datetime | ||
| from typing import Dict | ||
| from urllib import error, request | ||
| from urllib.parse import parse_qs | ||
| import grpc | ||
| from google.api_core.exceptions import NotFound, FailedPrecondition, GoogleAPICallError | ||
| from .proto import (cloudtasks_pb2, cloudtasks_pb2_grpc, | ||
| queue_pb2, task_pb2, target_pb2) | ||
| from google.protobuf import empty_pb2 | ||
| from google.protobuf.timestamp_pb2 import Timestamp | ||
| from google.rpc.status_pb2 import Status | ||
| # Time to sleep between iterations of the threads | ||
| _LOOP_SLEEP_TIME = 0.1 | ||
| DEFAULT_TARGET_PORT = 80 | ||
| DEFAULT_TARGET_HOST = "localhost" | ||
| Queue = queue_pb2.Queue | ||
| Task = task_pb2.Task | ||
| Attempt = task_pb2.Attempt | ||
| logger = logging.getLogger("gcloud-tasks-emulator") | ||
| def _make_task_request(queue_name: str, task: Task, host: str, port: int): | ||
| logger.info("[TASKS] Submitting task %s", task.name) | ||
| headers = {} | ||
| data = None | ||
| if task.app_engine_http_request.relative_uri: | ||
| method = target_pb2.HttpMethod.Name(task.app_engine_http_request.http_method) | ||
| data = task.app_engine_http_request.body | ||
| url = "http://%s:%d%s" % ( | ||
| host, | ||
| port, | ||
| task.app_engine_http_request.relative_uri | ||
| ) | ||
| headers.update(getattr(task.app_engine_http_request, 'headers', {})) | ||
| headers.update({ | ||
| 'X-AppEngine-QueueName': queue_name, | ||
| 'X-AppEngine-TaskName': task.name.rsplit("/", 1)[-1], | ||
| 'X-AppEngine-TaskRetryCount': task.dispatch_count - 1, | ||
| 'X-AppEngine-TaskExecutionCount': task.dispatch_count - 1, | ||
| 'X-AppEngine-TaskETA': task.schedule_time.ToSeconds() | ||
| }) | ||
| elif task.http_request.url: | ||
| method = target_pb2.HttpMethod.Name(task.http_request.http_method) | ||
| data = task.http_request.body | ||
| url = task.http_request.url | ||
| headers.update(getattr(task.http_request, "headers", {})) | ||
| headers.update({ | ||
| 'X-CloudTasks-QueueName': queue_name, | ||
| 'X-CloudTasks-TaskName': task.name.rsplit("/", 1)[-1], | ||
| 'X-CloudTasks-TaskRetryCount': task.dispatch_count - 1, | ||
| 'X-CloudTasks-TaskExecutionCount': task.dispatch_count - 1, | ||
| 'X-CloudTasks-TaskETA': task.schedule_time.ToSeconds() | ||
| }) | ||
| else: | ||
| raise Exception("Either app_engine_http_request or http_request is required") | ||
| req = request.Request(url, method=method, data=data) | ||
| for k, v in headers.items(): | ||
| req.add_header(k, v) | ||
| logger.info("[TASKS] Requesting %s %s", req.method, req.full_url) | ||
| return request.urlopen(req) | ||
| class QueueState(object): | ||
| """ | ||
| Keeps the state of queues and tasks in memory | ||
| so they can be processed | ||
| """ | ||
| def __init__(self, target_host: str, target_port: int, max_retries: int): | ||
| self._queues: Dict[str, Queue] = {} | ||
| self._queue_tasks: Dict[str, Task] = {} | ||
| self._target_host: str = target_host | ||
| self._target_port: int = target_port | ||
| self._max_retries: int = max_retries | ||
| def create_queue(self, parent, queue: Queue): | ||
| assert queue.name | ||
| if queue.name not in self._queues: | ||
| self._queues[queue.name] = queue | ||
| self._queues[queue.name].state = ( | ||
| queue_pb2._QUEUE_STATE.values_by_name["RUNNING"].number | ||
| ) | ||
| self._queue_tasks[queue.name] = [] | ||
| logger.info("[TASKS] Created queue %s", queue.name) | ||
| return self._queues[queue.name] | ||
| def update_queue(self, queue: Queue): | ||
| if queue.name not in self._queues: | ||
| self.create_queue(None, queue) | ||
| else: | ||
| # FIXME: Updating queue properties has | ||
| # no effect currently on the emulator | ||
| pass | ||
| return self._queues[queue.name] | ||
| def create_task(self, queue: Queue, task: Task): | ||
| task.name = task.name or "%s/tasks/%s" % ( | ||
| queue, int(datetime.now().timestamp() * 1000000) | ||
| ) | ||
| if task.app_engine_http_request.relative_uri: | ||
| # Set a default http_method | ||
| task.app_engine_http_request.http_method = ( | ||
| task.app_engine_http_request.http_method or target_pb2.HttpMethod.Value("POST") | ||
| ) | ||
| elif task.http_request.url: | ||
| # Set a default http_method | ||
| task.http_request.http_method = ( | ||
| task.http_request.http_method or target_pb2.HttpMethod.Value("POST") | ||
| ) | ||
| if queue not in self._queue_tasks: | ||
| raise FailedPrecondition(f"Queue {queue} does not exist.") | ||
| self._queue_tasks[queue].append(task) | ||
| logger.info("[TASKS] Created task %s", task.name) | ||
| return task | ||
| def purge_queue(self, queue_name: str): | ||
| if queue_name in self._queues: | ||
| # Wipe the tasks out | ||
| logger.info("[TASKS] Purging queue %s", queue_name) | ||
| self._queue_tasks[queue_name] = [] | ||
| return self._queues[queue_name] | ||
| else: | ||
| logger.error( | ||
| "[TASKS] Tried to purge an invalid queue: %s", | ||
| queue_name | ||
| ) | ||
| raise ValueError("Invalid queue: %s" % queue_name) | ||
| def pause_queue(self, queue_name: str): | ||
| if queue_name in self._queues: | ||
| logger.info("[TASKS] Pausing queue %s", queue_name) | ||
| self._queues[queue_name].state = ( | ||
| queue_pb2._QUEUE_STATE.values_by_name["PAUSED"].number | ||
| ) | ||
| return self._queues[queue_name] | ||
| else: | ||
| logger.error( | ||
| "[TASKS] Tried to pause an invalid queue: %s", | ||
| queue_name | ||
| ) | ||
| raise ValueError("Invalid queue: %s" % queue_name) | ||
| def resume_queue(self, queue_name: str): | ||
| if queue_name in self._queues: | ||
| logger.info("[TASKS] Resuming queue %s", queue_name) | ||
| self._queues[queue_name].state = ( | ||
| queue_pb2._QUEUE_STATE.values_by_name["RUNNING"].number | ||
| ) | ||
| return self._queues[queue_name] | ||
| else: | ||
| logger.error( | ||
| "[TASKS] Tried to resume an invalid queue: %s", | ||
| queue_name | ||
| ) | ||
| raise ValueError("Invalid queue: %s" % queue_name) | ||
| def list_tasks(self, queue_name: str): | ||
| return self._queue_tasks[queue_name] | ||
| def queue_names(self): | ||
| return list(self._queues) | ||
| def queues(self): | ||
| return list(self._queues.values()) | ||
| def queue(self, name: str) -> Queue: | ||
| return self._queues[name] | ||
| def delete_queue(self, name: str): | ||
| if name in self._queues: | ||
| logger.info("[TASKS] Deleting queue %s", name) | ||
| del self._queues[name] | ||
| if name in self._queue_tasks: | ||
| del self._queue_tasks[name] | ||
| def submit_task(self, task_name: str, force_run: bool = False) -> Task: | ||
| """ | ||
| Actually executes a task. If force_run is True then the scheduled | ||
| time will be ignored. This is used mainly for the RunTask API call. | ||
| """ | ||
| try: | ||
| queue_name = task_name.rsplit("/", 2)[0] | ||
| if queue_name not in self._queue_tasks: | ||
| raise ValueError("Not a valid queue") | ||
| except IndexError: | ||
| # Invalid task name, raise ValueError | ||
| raise ValueError() | ||
| # This is a special-case that does not exist | ||
| # one the live server and it exists so that | ||
| # local development servers can direct a task | ||
| # to run on a particular port | ||
| qs = task_name.rsplit("?", 1)[-1] | ||
| if qs: | ||
| params = parse_qs(qs) | ||
| port = int(params.get("port", [self._target_port])[0]) | ||
| task_name = task_name.rsplit("?", 1)[0] | ||
| else: | ||
| port = self._target_port | ||
| index = None | ||
| # Locate the task in the queue | ||
| for i, task in enumerate(self._queue_tasks[queue_name]): | ||
| if task.name == task_name: | ||
| index = i | ||
| break | ||
| else: | ||
| logger.debug( | ||
| "[TASKS] Tasks were: %s", | ||
| [x.name for x in self._queue_tasks[queue_name]] | ||
| ) | ||
| raise NotFound("Task not found: %s" % task_name) | ||
| def now(): | ||
| current_time = Timestamp() | ||
| current_time.GetCurrentTime() | ||
| return current_time | ||
| schedule_time = now() | ||
| dispatch_time = None | ||
| response_time = None | ||
| response_status = 200 | ||
| task: Task = self._queue_tasks[queue_name].pop(index) # Remove the task | ||
| if ( | ||
| (not force_run) and | ||
| task.HasField("schedule_time") and | ||
| task.schedule_time.ToDatetime() >= schedule_time.ToDatetime() | ||
| ): | ||
| logger.info( | ||
| "[TASKS] Task %s is scheduled for future execution. Moving it to the end of the queue.", task_name | ||
| ) | ||
| self._queue_tasks[queue_name].append(task) | ||
| return task | ||
| task.dispatch_count += 1 | ||
| try: | ||
| dispatch_time = now() | ||
| response = _make_task_request(queue_name, task, self._target_host, port) | ||
| except error.HTTPError as e: | ||
| response_status = e.code | ||
| logger.error("[TASKS] Error submitting task %s, reason: %s", task_name, e.reason) | ||
| except (ConnectionError, error.URLError) as e: | ||
| response_status = 500 | ||
| logger.error( | ||
| "[TASKS] Error submitting task %s, reason: %s:\n%s: %s", | ||
| task_name, getattr(e, 'reason', ''), type(e).__name__, e | ||
| ) | ||
| logger.error( | ||
| "[TASKS] Host was %s:%s" % (self._target_host, port) | ||
| ) | ||
| except Exception as e: | ||
| response_status = 500 | ||
| logger.error("[TASKS] Unexpected exception while submitting task %s.", task_name, e) | ||
| else: | ||
| response_status = response.status | ||
| attempt = Attempt( | ||
| schedule_time=schedule_time, | ||
| dispatch_time=dispatch_time, | ||
| response_time=response_time, | ||
| response_status=Status(code=response_status) | ||
| ) | ||
| kwargs = { | ||
| "first_attempt": task.first_attempt or attempt, | ||
| "last_attempt": attempt | ||
| } | ||
| task.MergeFrom(Task(**kwargs)) | ||
| assert task | ||
| if 400 <= response_status < 600: | ||
| if self._max_retries < 0 or task.dispatch_count <= self._max_retries: | ||
| logger.info("[TASKS] Moving failed task %s to the back of the queue.", task_name) | ||
| self._queue_tasks[queue_name].append(task) | ||
| else: | ||
| logger.info("[TASKS] Giving up failed task %s", task_name) | ||
| return task | ||
| class Greeter(cloudtasks_pb2_grpc.CloudTasksServicer): | ||
| def __init__(self, state: QueueState): | ||
| super().__init__() | ||
| self._state: QueueState = state | ||
| def CreateQueue(self, request, context): | ||
| try: | ||
| return self._state.create_queue(request.parent, request.queue) | ||
| except GoogleAPICallError as err: | ||
| context.abort(err.grpc_status_code, err.message) | ||
| def UpdateQueue(self, request, context): | ||
| try: | ||
| return self._state.update_queue(request.queue) | ||
| except GoogleAPICallError as err: | ||
| context.abort(err.grpc_status_code, err.message) | ||
| def ListQueues(self, request, context): | ||
| try: | ||
| queues = [x for x in self._state.queues() if x.name.startswith(request.parent)] | ||
| return cloudtasks_pb2.ListQueuesResponse(queues=queues) | ||
| except GoogleAPICallError as err: | ||
| context.abort(err.grpc_status_code, err.message) | ||
| def GetQueue(self, request, context): | ||
| try: | ||
| return self._state.queue(request.name) | ||
| except GoogleAPICallError as err: | ||
| context.abort(err.grpc_status_code, err.message) | ||
| def PauseQueue(self, request, context): | ||
| try: | ||
| return self._state.pause_queue(request.name) | ||
| except GoogleAPICallError as err: | ||
| context.abort(err.grpc_status_code, err.message) | ||
| def ResumeQueue(self, request, context): | ||
| try: | ||
| return self._state.resume_queue(request.name) | ||
| except GoogleAPICallError as err: | ||
| context.abort(err.grpc_status_code, err.message) | ||
| def PurgeQueue(self, request, context): | ||
| try: | ||
| return self._state.purge_queue(request.name) | ||
| except GoogleAPICallError as err: | ||
| context.abort(err.grpc_status_code, err.message) | ||
| def ListTasks(self, request, context): | ||
| try: | ||
| return cloudtasks_pb2.ListTasksResponse( | ||
| tasks=self._state.list_tasks(request.parent) | ||
| ) | ||
| except GoogleAPICallError as err: | ||
| context.abort(err.grpc_status_code, err.message) | ||
| def DeleteQueue(self, request, context): | ||
| try: | ||
| self._state.delete_queue(request.name) | ||
| return empty_pb2.Empty() | ||
| except GoogleAPICallError as err: | ||
| context.abort(err.grpc_status_code, err.message) | ||
| def CreateTask(self, request, context): | ||
| try: | ||
| return self._state.create_task(request.parent, request.task) | ||
| except GoogleAPICallError as err: | ||
| context.abort(err.grpc_status_code, err.message) | ||
| def RunTask(self, request, context): | ||
| try: | ||
| return self._state.submit_task(request.name, force_run=True) | ||
| except GoogleAPICallError as err: | ||
| context.abort(err.grpc_status_code, err.message) | ||
| class APIThread(threading.Thread): | ||
| def __init__(self, state: QueueState, host: str, port: int, *args, **kwargs): | ||
| super().__init__(*args, **kwargs) | ||
| self._state = state | ||
| self._port = port | ||
| self._host = host | ||
| self._is_running = threading.Event() | ||
| self._httpd = None | ||
| def run(self): | ||
| self._httpd = grpc.server(futures.ThreadPoolExecutor(max_workers=2)) | ||
| cloudtasks_pb2_grpc.add_CloudTasksServicer_to_server( | ||
| Greeter(self._state), self._httpd | ||
| ) | ||
| interface = '%s:%s' % (self._host,self._port) | ||
| self._httpd.add_insecure_port(interface) | ||
| logger.info("[TASKS] Starting API server at %s", interface) | ||
| self._httpd.start() | ||
| while self._is_running.is_set(): | ||
| time.sleep(_LOOP_SLEEP_TIME) | ||
| def join(self, timeout=None): | ||
| self._is_running.clear() | ||
| if self._httpd: | ||
| self._httpd.stop(grace=0) | ||
| logger.info("[TASKS] Stopping API server") | ||
| class Processor(threading.Thread): | ||
| def __init__(self, state: QueueState): | ||
| super().__init__() | ||
| self._state: QueueState = state | ||
| self._is_running = threading.Event() | ||
| self._known_queues = set() | ||
| self._queue_threads = {} | ||
| def run(self): | ||
| self._is_running.set() | ||
| logger.info("[TASKS] Starting task processor") | ||
| while self._is_running.is_set(): | ||
| queue_names = self._state.queue_names() | ||
| for queue in queue_names: | ||
| self.process_queue(queue) | ||
| time.sleep(_LOOP_SLEEP_TIME) | ||
| def _process_queue(self, queue: str): | ||
| while self._is_running.is_set(): | ||
| # Queue was deleted, stop processing | ||
| if queue not in self._state._queues: | ||
| break | ||
| if queue not in self._state._queue_tasks: | ||
| break | ||
| if self._state.queue(queue).state == queue_pb2._QUEUE_STATE.values_by_name["RUNNING"].number: | ||
| tasks = self._state._queue_tasks[queue][:] | ||
| while tasks: | ||
| task = tasks.pop(0) | ||
| logger.info("[TASKS] Processing next task %s", task.name) | ||
| self._state.submit_task(task.name) | ||
| time.sleep(_LOOP_SLEEP_TIME) | ||
| def process_queue(self, queue_name: str): | ||
| if queue_name not in self._known_queues: | ||
| # A queue was just created | ||
| self._known_queues.add(queue_name) | ||
| thread = threading.Thread( | ||
| target=self._process_queue, args=[queue_name] | ||
| ) | ||
| self._queue_threads[queue_name] = thread | ||
| self._queue_threads[queue_name].start() | ||
| def join(self, timeout=None): | ||
| self._is_running.clear() | ||
| for thread in self._queue_threads.values(): | ||
| if thread.is_alive(): | ||
| thread.join(timeout=0) | ||
| super().join(timeout) | ||
| class CronScheduler(threading.Thread): | ||
| def __init__(self, crons, host, port): | ||
| super().__init__() | ||
| self._crons = crons | ||
| self._host = f"http://{host}:{port}" | ||
| self._scheduler = sched.scheduler(time.time, time.sleep) | ||
| self._event = None | ||
| def get_intervall_in_seconds(self,schedule_string): | ||
| mult_map = { | ||
| "seconds":1, | ||
| "minutes":60, | ||
| "mins":60, | ||
| "hours": 60*60, | ||
| "days": 60*60*24 | ||
| } | ||
| schedule_parts = self._crons[0]["schedule"].split(" ") | ||
| multiplier = mult_map[schedule_parts[2]] | ||
| return int(schedule_parts[1])*multiplier | ||
| def run(self): | ||
| logger.info("[TASKS] Starting cron processor") | ||
| intervall = self.get_intervall_in_seconds(self._crons[0]["schedule"]) | ||
| url = self._host+self._crons[0]["url"] | ||
| def call_cron_hook(sc): | ||
| print("Doing stuff...") | ||
| print(url) | ||
| urllib.request.urlopen(url) | ||
| sc.enter(intervall, 1, call_cron_hook, (sc,)) | ||
| self._event = self._scheduler.enter(intervall, 1, call_cron_hook, (self._scheduler,)) | ||
| self._scheduler.run() | ||
| def join(self, timeout=None): | ||
| for entry in self._scheduler.queue: | ||
| self._scheduler.cancel(entry) | ||
| super().join(timeout) | ||
| class Server(object): | ||
| def __init__(self, host, port, target_host, target_port, default_queue_names, max_retries, crons): | ||
| self._state = QueueState(target_host, target_port, max_retries) | ||
| self._api = APIThread(self._state, host, port) | ||
| self._processor = Processor(self._state) | ||
| self._cron_processor = CronScheduler(crons,host,target_port) | ||
| self._crons = crons | ||
| for default_queue_name in default_queue_names: | ||
| parent = default_queue_name.rsplit("/", 3)[0] | ||
| self._state.create_queue( | ||
| parent, Queue(name=default_queue_name) | ||
| ) | ||
| def start(self): | ||
| self._api.start() # Start the API thread | ||
| self._processor.start() | ||
| if self._crons: | ||
| self._cron_processor.start() | ||
| def stop(self): | ||
| self._processor.join(timeout=1) | ||
| self._api.join(timeout=1) | ||
| if self._crons: | ||
| self._cron_processor.join(timeout=1) | ||
| def run(self): | ||
| try: | ||
| self.start() | ||
| logger.info("[TASKS] All services started") | ||
| while True: | ||
| try: | ||
| time.sleep(_LOOP_SLEEP_TIME) | ||
| except KeyboardInterrupt: | ||
| break | ||
| finally: | ||
| self.stop() | ||
| def create_server( | ||
| host, port, | ||
| target_host=DEFAULT_TARGET_HOST, target_port=DEFAULT_TARGET_PORT, | ||
| default_queue_names=None, max_retries=-1, crons=None | ||
| ): | ||
| return Server(host, port, target_host, target_port, default_queue_names or [], max_retries,crons) |
Alert delta unavailable
Currently unable to show alert delta for PyPI packages.
23340
-91.26%19
-34.48%415
-93.21%