diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cb490c791..50e960cd7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,6 @@ repos: hooks: - id: trailing-whitespace - id: end-of-file-fixer - - id: check-yaml - id: check-added-large-files - repo: https://github.com/astral-sh/ruff-pre-commit diff --git a/README.md b/README.md index 0a7835c48..83d5876c1 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ The python SDK for running [Octue](https://octue.com) data services, digital twins, and applications - get faster data groundwork so you have more time for the science! -Read the docs [here.](https://octue-python-sdk.readthedocs.io/en/latest/) +Read the docs [here.](https://twined.octue.com/) Uses our [twined](https://twined.readthedocs.io/en/latest/) library for data validation. @@ -41,7 +41,6 @@ Usage: octue [OPTIONS] COMMAND [ARGS]... Read more in the docs: https://octue-python-sdk.readthedocs.io/en/latest/ Options: - --logger-uri TEXT Stream logs to a websocket at the given URI. --log-level [debug|info|warning|error] Log level used for the analysis. [default: info] diff --git a/docs/authentication.md b/docs/authentication.md index 32eb069d1..2a32c3371 100644 --- a/docs/authentication.md +++ b/docs/authentication.md @@ -1,17 +1,15 @@ -# Authentication +You need authentication while using Twined to: -You need authentication while using `octue` to: - -- Access data from Google Cloud Storage -- Use, run, or deploy Twined services +- Use or run services +- Access input and output data from analyses run on services Authentication is provided by a GCP service account. ## Creating a service account By setting up your Twined service network with the -[Twined Terraform modules](/deploying_services), a set of maintainer service accounts have already been -created with the required permissions. +[Twined Terraform modules](core_concepts/deploying_services.md), a set of maintainer service accounts have already been +created with the required permissions. These will have names starting with `maintainer-`. ## Using a service account @@ -19,19 +17,19 @@ created with the required permissions. 1. Access your service accounts [here](https://console.cloud.google.com/iam-admin/serviceaccounts), - making sure the correct project is selected + making sure the correct project is selected, or ask your Twined service network administrator 2. Click on the relevant service account, go to the "Keys" tab, and create (download) a JSON key for it - it will be called `-XXXXX.json`. !!! danger - It's best not to store this in your project to prevent accidentally + It's best not to store this in your repository to prevent accidentally committing it or building it into a docker image layer. Instead, keep it somewhere else on your local system with any other service account keys you already have. - If you must keep within your project, it's good practice to name the + If you must keep within your repository, it's good practice to name the file `gcp-credentials.json` and make sure that `gcp-cred*` is in your `.gitignore` and `.dockerignore` files. @@ -46,8 +44,7 @@ created with the required permissions. ### On GCP infrastructure / deployed services -- Credentials are automatically provided when running code or services - on GCP infrastructure, including the Kubernetes cluster -- `octue` uses these when when running on these platforms, so there's - no need to upload a service account key or include one in service - docker images +- Credentials are automatically provided when running code or services on GCP infrastructure, including the Kubernetes + cluster +- Twined uses these when running on these platforms, so there's no need to upload a service account key or include one + in service docker images diff --git a/docs/available_filters.md b/docs/available_filters.md deleted file mode 100644 index df59c77fb..000000000 --- a/docs/available_filters.md +++ /dev/null @@ -1,110 +0,0 @@ -# Available filters - -Lots of filters are available when using the `Dataset.files.filter` -method. We've broken them down by the type of attribute the datafiles -are being filtered by: - -- Numbers (e.g. `int`, `float`): - - - `is` - - `is_not` - - `equals` - - `not_equals` - - `lt` - - `lte` - - `gt` - - `gte` - - `in_range` - - `not_in_range` - -- Iterables (e.g. `list`, `set`, `tuple`, `dictionary`): - - - `is` - - `is_not` - - `equals` - - `not_equals` - - `contains` - - `not_contains` - - `icontains` - - `not_icontains` - -- `bool` - - - `is` - - `is_not` - -- `str` - - - `is` - - `is_not` - - `equals` - - `not_equals` - - `iequals` - - `not_iequals` - - `lt` (less than) - - `lte` (less than or equal) - - `gt` (greater than) - - `gte` (greater than or equal) - - `contains` - - `not_contains` - - `icontains` (case-insensitive contains) - - `not_icontains` - - `starts_with` - - `not_starts_with` - - `ends_with` - - `not_ends_with` - - `in_range` - - `not_in_range` - -- `NoneType` - - - `is` - - `is_not` - -- `LabelSet` - - - `is` - - `is_not` - - `equals` - - `not_equals` - - `contains` - - `not_contains` - - `any_label_contains` - - `not_any_label_contains` - - `any_label_starts_with` - - `not_any_label_starts_with` - - `any_label_ends_with` - - `not_any_label_ends_with` - -- `datetime.datetime` - - - `is` - - `is_not` - - `equals` - - `not_equals` - - `lt` (less than) - - `lte` (less than or equal) - - `gt` (greater than) - - `gte` (greater than or equal) - - `in_range` - - `not_in_range` - - `year_equals` - - `year_in` - - `month_equals` - - `month_in` - - `day_equals` - - `day_in` - - `weekday_equals` - - `weekday_in` - - `iso_weekday_equals` - - `iso_weekday_in` - - `time_equals` - - `time_in` - - `hour_equals` - - `hour_in` - - `minute_equals` - - `minute_in` - - `second_equals` - - `second_in` - - `in_date_range` - - `in_time_range` diff --git a/docs/asking_questions.md b/docs/core_concepts/asking_questions.md similarity index 96% rename from docs/asking_questions.md rename to docs/core_concepts/asking_questions.md index e1d78224e..8df657155 100644 --- a/docs/asking_questions.md +++ b/docs/core_concepts/asking_questions.md @@ -21,7 +21,7 @@ to a child for processing/analysis. Questions can be: or custom logic in your own webserver. Questions are always asked to a _revision_ of a service. You can ask a -service a question if you have its [SRUID](/services/#service-names), project ID, and the necessary permissions. +service a question if you have its [SRUID](services.md/#service-names), project ID, and the necessary permissions. ## Asking a question @@ -49,7 +49,7 @@ answer["output_manifest"]["my_dataset"].files If you're using an environment other than the `main` environment, then before asking any questions to your Twined services, set the `TWINED_SERVICES_TOPIC_NAME` environment variable to the name of the Twined services Pub/Sub topic - (this is set when [deploying a service network](/deploying_services/#deploying-services-advanced-developers-guide). + (this is set when [deploying a service network](deploying_services.md). It will be in the form `.octue.twined.services` !!! note @@ -64,7 +64,7 @@ You can also set the following options when you call `Child.ask`: - `children` - If the child has children of its own (i.e. grandchildren of the parent), this optional argument can be used to override the child's "default" children. This allows you to specify particular versions of grandchildren to - use (see [this subsection below](#overriding-a-childs-children). + use (see [this subsection below](#overriding-a-childs-children)). - `subscribe_to_logs` - if true, the child will forward its logs to you - `allow_local_files` - if true, local files/datasets are allowed in any input manifest you supply - `handle_monitor_message` - if provided a function, it will be called on any monitor messages from the child @@ -242,10 +242,10 @@ at once instead of one after another. ## Asking a question within a service If you have -[created your own Twined service](/creating_services) and want to ask children questions, you can do this more +[created your own Twined service](creating_services.md) and want to ask children questions, you can do this more easily than above. Children are accessible from the `analysis` object by the keys you give them in the -[service configuration](/creating_services/#octueyaml) file. For example, you can ask an `elevation` service a +[service configuration](creating_services.md/#octueyaml) file. For example, you can ask an `elevation` service a question like this: ```python @@ -314,7 +314,7 @@ you want it to use (dynamic children) to the children will instead go to the dynamic children. Note that: - You must provide the children in the same format as they're provided - in the [service configuration](/creating_services/#octueyaml) + in the [service configuration](../core_concepts/creating_services.md/#octueyaml) - If you override one static child, you must override others, too - The dynamic children must have the same keys as the static children (so the child knows which service to ask which questions) diff --git a/docs/creating_apps.md b/docs/core_concepts/creating_apps.md similarity index 99% rename from docs/creating_apps.md rename to docs/core_concepts/creating_apps.md index c9cb0ff26..66ce9ade9 100644 --- a/docs/creating_apps.md +++ b/docs/core_concepts/creating_apps.md @@ -1,4 +1,4 @@ -# app.py file {#creating_apps} +# The `app.py` file The `app.py` file is, as you might expect, the entrypoint to your app. It can contain any valid python including imports and use of any number diff --git a/docs/creating_services.md b/docs/core_concepts/creating_services.md similarity index 85% rename from docs/creating_services.md rename to docs/core_concepts/creating_services.md index d9eef87ba..5725bc010 100644 --- a/docs/creating_services.md +++ b/docs/core_concepts/creating_services.md @@ -12,19 +12,16 @@ run locally on any machine or be deployed to the cloud. Currently: ## Anatomy of a Twined service -A Twined service is defined by the following files (located in the -repository root by default). +A Twined service is defined by the following files (located in the repository root by default). ### app.py -This is the entrypoint into your code - read more [here](/creating_apps). +This is the entrypoint into your code - read more [here](creating_apps.md). ### twine.json -This file defines the schema for the service's configuration, input, -and output data. Read more -[here](https://twined.readthedocs.io/en/latest/) and see an example -[here](https://twined.readthedocs.io/en/latest/quick_start_create_your_first_twine.html). +This file defines the schema for the service's configuration, input, and output data. Read more [here](twines/anatomy.md) and +see an example [here](twines/twine_file_quickstart.md). ### Dependencies file @@ -45,26 +42,26 @@ are supported. ``` yaml services: - - namespace: my-organisation + - namespace: my-organisation name: my-app ``` It may also need the following key-value pairs: - + - `app_source_path: ` - if your `app.py` file is not in the repository root - + All paths should be relative to the repository root. Other valid entries can be found in the `ServiceConfiguration` constructor. - + !!! warning - + Currently, only one service can be defined per repository, but it must still appear as a list item of the "services" key. At some point, it will be possible to define multiple services in one repository. - + If a service's app needs any configuration, asks questions to any other Twined services, or produces output datafiles/datasets, you will need to provide some or all of the following values for that service: - + - `configuration_values` - `configuration_manifest` - `children` @@ -95,36 +92,35 @@ are supported. - [An OpenFAST service](https://github.com/octue/openfast-service/blob/main/Dockerfile) If you do provide one, you must provide its path relative to your - repository to the [build-twined-services] GitHub Actions [workflow](https://github.com/octue/workflows/blob/main/.github/workflows/build-twined-service.yml). + repository to the `build-twined-services` GitHub Actions [workflow](https://github.com/octue/workflows/blob/main/.github/workflows/build-twined-service.yml). As always, if you need help with this, feel free to drop us a message or raise an issue! - ### Where to specify the namespace, name, and revision tag -See [here](/services/#service-names) for service naming requirements. +See [here](services.md/#service-names) for service naming requirements. **Namespace** - Required: yes - Set in: - - `octue.yaml` - - `OCTUE_SERVICE_NAMESPACE` environment variable (takes priority) + - `octue.yaml` + - `OCTUE_SERVICE_NAMESPACE` environment variable (takes priority) **Name** - Required: yes - Set in: - - `octue.yaml` - - `OCTUE_SERVICE_NAME` environment variable (takes priority) + - `octue.yaml` + - `OCTUE_SERVICE_NAME` environment variable (takes priority) **Revision tag** - Required: no - Default: a random "coolname" (e.g. `hungry-hippo`) - Set in: - - `OCTUE_SERVICE_REVISION_TAG` environment variable - - If using `octue twined service start` command, the `--revision-tag` option (takes priority) + - `OCTUE_SERVICE_REVISION_TAG` environment variable + - If using `octue twined service start` command, the `--revision-tag` option (takes priority) ## Template apps @@ -153,4 +149,4 @@ Automated deployment with Octue means: other services are sent to it, meaning there are minimal costs to having it deployed but not in use. -If you'd like help deploying services, contact us. To do it yourself, see [here](/deploying_services). +If you'd like help deploying services, [contact us](../support.md). To do it yourself, see [here](deploying_services.md). diff --git a/docs/core_concepts/data_containers/available_filters.md b/docs/core_concepts/data_containers/available_filters.md new file mode 100644 index 000000000..b659f7072 --- /dev/null +++ b/docs/core_concepts/data_containers/available_filters.md @@ -0,0 +1,110 @@ +# Available filters + +Lots of filters are available when using the `Dataset.files.filter` +method. We've broken them down by the type of attribute the datafiles +are being filtered by: + +- Numbers (e.g. `int`, `float`): + + - `is` + - `is_not` + - `equals` + - `not_equals` + - `lt` + - `lte` + - `gt` + - `gte` + - `in_range` + - `not_in_range` + +- Iterables (e.g. `list`, `set`, `tuple`, `dictionary`): + + - `is` + - `is_not` + - `equals` + - `not_equals` + - `contains` + - `not_contains` + - `icontains` + - `not_icontains` + +- `bool` + + - `is` + - `is_not` + +- `str` + + - `is` + - `is_not` + - `equals` + - `not_equals` + - `iequals` + - `not_iequals` + - `lt` (less than) + - `lte` (less than or equal) + - `gt` (greater than) + - `gte` (greater than or equal) + - `contains` + - `not_contains` + - `icontains` (case-insensitive contains) + - `not_icontains` + - `starts_with` + - `not_starts_with` + - `ends_with` + - `not_ends_with` + - `in_range` + - `not_in_range` + +- `NoneType` + + - `is` + - `is_not` + +- `LabelSet` + + - `is` + - `is_not` + - `equals` + - `not_equals` + - `contains` + - `not_contains` + - `any_label_contains` + - `not_any_label_contains` + - `any_label_starts_with` + - `not_any_label_starts_with` + - `any_label_ends_with` + - `not_any_label_ends_with` + +- `datetime.datetime` + + - `is` + - `is_not` + - `equals` + - `not_equals` + - `lt` (less than) + - `lte` (less than or equal) + - `gt` (greater than) + - `gte` (greater than or equal) + - `in_range` + - `not_in_range` + - `year_equals` + - `year_in` + - `month_equals` + - `month_in` + - `day_equals` + - `day_in` + - `weekday_equals` + - `weekday_in` + - `iso_weekday_equals` + - `iso_weekday_in` + - `time_equals` + - `time_in` + - `hour_equals` + - `hour_in` + - `minute_equals` + - `minute_in` + - `second_equals` + - `second_in` + - `in_date_range` + - `in_time_range` diff --git a/docs/data_containers/datafile.md b/docs/core_concepts/data_containers/datafiles.md similarity index 93% rename from docs/data_containers/datafile.md rename to docs/core_concepts/data_containers/datafiles.md index 31093f691..86832f37f 100644 --- a/docs/data_containers/datafile.md +++ b/docs/core_concepts/data_containers/datafiles.md @@ -1,19 +1,4 @@ -# Datafile - -!!! info "Definitions" - - **Datafile** - - A single local or cloud file, its metadata, and helper methods. - - **Locality** - - A datafile has one of these localities: - - - **Cloud-based:** it exists only in the cloud - - **Local:** it exists only on your local filesystem - - **Cloud-based and local:** it's cloud-based but has been - downloaded for low-latency reading/writing +# Datafiles !!! tip @@ -66,7 +51,7 @@ Datafile content isn't downloaded until you: - Call its `download` method - Use its `local_path` property -Read more about downloading files [here](/downloading_datafiles). +Read more about downloading files [here](downloading_datafiles.md). ### CLI command friendly @@ -199,7 +184,7 @@ exiting the context (the `with` block), it closes the datafile locally and, if the datafile also exists in the cloud, updates the cloud object with any data or metadata changes. -![image](../images/datafile_use_cases.png) +![image](../../images/datafile_use_cases.png) ### Example A diff --git a/docs/data_containers/dataset.md b/docs/core_concepts/data_containers/datasets.md similarity index 92% rename from docs/data_containers/dataset.md rename to docs/core_concepts/data_containers/datasets.md index 02442c875..5f6e6bd4e 100644 --- a/docs/data_containers/dataset.md +++ b/docs/core_concepts/data_containers/datasets.md @@ -1,16 +1,4 @@ -# Dataset - -!!! info "Definitions" - - **Dataset** - - A set of related datafiles that exist in the same location, dataset metadata, and helper methods. - - **Locality** - - A dataset has one of these localities: - - **Cloud-based:** it exists only in the cloud - - **Local:** it exists only on your local filesystem +# Datasets !!! tip @@ -214,7 +202,7 @@ dataset.files.filter(extension="csv", labels__contains="a") >>> })> ``` -For the full list of available filters, [click here](/available_filters). +For the full list of available filters, [click here](available_filters.md). ### Order datasets diff --git a/docs/downloading_datafiles.md b/docs/core_concepts/data_containers/downloading_datafiles.md similarity index 91% rename from docs/downloading_datafiles.md rename to docs/core_concepts/data_containers/downloading_datafiles.md index 7938653f6..6842c271a 100644 --- a/docs/downloading_datafiles.md +++ b/docs/core_concepts/data_containers/downloading_datafiles.md @@ -1,7 +1,7 @@ # More information on downloading datafiles - To avoid unnecessary data transfer and costs, cloud datafiles are not - downloaded locally [until necessary](/data_containers/datafile/#automatic-lazy-downloading) + downloaded locally [until necessary](datafiles.md/#automatic-lazy-downloading) - When downloaded, they are downloaded by default to a temporary local file that will exist at least as long as the python session is running - Calling `Datafile.download` or using `Datafile.local_path` again will diff --git a/docs/core_concepts/data_containers/index.md b/docs/core_concepts/data_containers/index.md new file mode 100644 index 000000000..f86236991 --- /dev/null +++ b/docs/core_concepts/data_containers/index.md @@ -0,0 +1,9 @@ +# Datafiles, datasets, and manifests + +One of the main features of Twined is making using, creating, and +sharing scientific datasets easy. There are three main data classes that do this: + +- **Datafile** - [a single local or cloud file](datafiles.md) and its metadata. +- **Dataset** - [a set of related datafiles](datasets.md) that exist in the same location, plus metadata. +- **Manifest** - [a set of related datasets](manifests.md) that exist anywhere, plus metadata. Typically produced by or for + one analysis. diff --git a/docs/data_containers/manifest.md b/docs/core_concepts/data_containers/manifests.md similarity index 88% rename from docs/data_containers/manifest.md rename to docs/core_concepts/data_containers/manifests.md index e04310072..9c1492809 100644 --- a/docs/data_containers/manifest.md +++ b/docs/core_concepts/data_containers/manifests.md @@ -1,15 +1,8 @@ -# Manifest - -!!! info "Definition" - - **Manifest** - - A set of related cloud and/or local [datasets](/dataset), metadata, and helper methods. Typically produced by or - needed for processing by a Twined service. +# Manifests !!! tip - Use a manifest to send [datasets](/dataset) to a Twined service as a question (for processing) - the + Use a manifest to send [datasets](datasets.md) to a Twined service as a question (for processing) - the service will send an output manifest back with its answer if the answer includes output datasets. ## Key features @@ -46,7 +39,7 @@ child = Child( answer, question_uuid = child.ask(input_manifest=manifest) ``` -See [here](/asking_questions) for more information. +See [here](../asking_questions.md) for more information. ### Receive datasets from a service diff --git a/docs/deploying_services.md b/docs/core_concepts/deploying_services.md similarity index 94% rename from docs/deploying_services.md rename to docs/core_concepts/deploying_services.md index 91a658412..8a1c90adf 100644 --- a/docs/deploying_services.md +++ b/docs/core_concepts/deploying_services.md @@ -1,4 +1,4 @@ -# Deploying services (developer's guide) {#deploying_services_advanced} +# Deploying services (developer's guide) This is a guide for developers that want to deploy Twined services themselves - it is not needed if Octue manages your services for you or @@ -42,7 +42,7 @@ done automatically: including automated pre-deployment testing and creation of a GitHub release -You can now [ask your service some questions](/asking_questions)! It will be available in the service network as +You can now [ask your service some questions](asking_questions.md)! It will be available in the service network as `/:` (e.g. `octue/example-service-kueue:0.1.1`). ## Deploying the infrastructure diff --git a/docs/inter_service_compatibility.md b/docs/core_concepts/inter_service_compatibility.md similarity index 75% rename from docs/inter_service_compatibility.md rename to docs/core_concepts/inter_service_compatibility.md index 4079e0ad5..e288afbd5 100644 --- a/docs/inter_service_compatibility.md +++ b/docs/core_concepts/inter_service_compatibility.md @@ -14,64 +14,65 @@ question. - `0` = incompatible - `1` = compatible -| | 0.68.0 | 0.67.0 | 0.66.1 | 0.66.0 | 0.65.0 | 0.64.0 | 0.63.0 | 0.62.1 | 0.62.0 | 0.61.2 | 0.61.1 | 0.61.0 | 0.60.2 | 0.60.1 | 0.60.0 | 0.59.1 | 0.59.0 | 0.58.0 | 0.57.2 | 0.57.1 | 0.57.0 | 0.56.0 | 0.55.0 | 0.54.0 | 0.53.0 | 0.52.2 | 0.52.1 | 0.52.0 | 0.51.0 | 0.50.1 | 0.50.0 | 0.49.2 | 0.49.1 | 0.49.0 | 0.48.0 | 0.47.2 | 0.47.1 | 0.47.0 | 0.46.3 | 0.46.2 | 0.46.1 | 0.46.0 | 0.45.0 | 0.44.0 | 0.43.7 | 0.43.6 | 0.43.5 | 0.43.4 | 0.43.3 | 0.43.2 | 0.43.1 | 0.43.0 | 0.42.1 | 0.42.0 | 0.41.1 | 0.41.0 | 0.40.2 | 0.40.1 | 0.40.0 | -| :----- | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -| 0.68.0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.67.0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.66.1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.66.0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.65.0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.64.0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.63.0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.62.1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.62.0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.61.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.61.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.61.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.60.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.60.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.60.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.59.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.59.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.58.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.57.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.57.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.57.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.56.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.55.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.54.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.53.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.52.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.52.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.52.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.51.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -| 0.50.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.50.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.49.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.49.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.49.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.48.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.47.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.47.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.47.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.46.3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.46.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.46.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.46.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.45.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.44.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.43.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.43.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.43.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.43.4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.43.3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.43.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.43.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.43.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.42.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.42.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.41.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.41.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.40.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.40.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -| 0.40.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| | 0.69.0 | 0.68.0 | 0.67.0 | 0.66.1 | 0.66.0 | 0.65.0 | 0.64.0 | 0.63.0 | 0.62.1 | 0.62.0 | 0.61.2 | 0.61.1 | 0.61.0 | 0.60.2 | 0.60.1 | 0.60.0 | 0.59.1 | 0.59.0 | 0.58.0 | 0.57.2 | 0.57.1 | 0.57.0 | 0.56.0 | 0.55.0 | 0.54.0 | 0.53.0 | 0.52.2 | 0.52.1 | 0.52.0 | 0.51.0 | 0.50.1 | 0.50.0 | 0.49.2 | 0.49.1 | 0.49.0 | 0.48.0 | 0.47.2 | 0.47.1 | 0.47.0 | 0.46.3 | 0.46.2 | 0.46.1 | 0.46.0 | 0.45.0 | 0.44.0 | 0.43.7 | 0.43.6 | 0.43.5 | 0.43.4 | 0.43.3 | 0.43.2 | 0.43.1 | 0.43.0 | 0.42.1 | 0.42.0 | 0.41.1 | 0.41.0 | 0.40.2 | 0.40.1 | 0.40.0 | +| :----- | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | -----: | +| 0.69.0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.68.0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.67.0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.66.1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.66.0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.65.0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.64.0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.63.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.62.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.62.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.61.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.61.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.61.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.60.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.60.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.60.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.59.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.59.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.58.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.57.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.57.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.57.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.56.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.55.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.54.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.53.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.52.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.52.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.52.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.51.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +| 0.50.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.50.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.49.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.49.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.49.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.48.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.47.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.47.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.47.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.46.3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.46.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.46.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.46.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.45.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.44.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.43.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.43.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.43.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.43.4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.43.3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.43.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.43.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.43.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.42.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.42.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.41.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.41.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.40.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.40.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | +| 0.40.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | diff --git a/docs/logging.md b/docs/core_concepts/logging.md similarity index 97% rename from docs/logging.md rename to docs/core_concepts/logging.md index db5ce7f88..194e18cda 100644 --- a/docs/logging.md +++ b/docs/core_concepts/logging.md @@ -1,5 +1,3 @@ -# Logging - By default, `octue` streams your logs to `stderr` in a nice, readable format so your log messages are immediately visible when you start developing without any extra configuration. If you prefer to use your @@ -32,7 +30,7 @@ Another advantage to using the Octue log handler is that each Twined service is coloured according to its position in the tree, making it much easier to read log messages from multiple levels of children. -![image](images/coloured_logs.png) +![image](../images/coloured_logs.png) In this example: diff --git a/docs/running_services_locally.md b/docs/core_concepts/running_services_locally.md similarity index 92% rename from docs/running_services_locally.md rename to docs/core_concepts/running_services_locally.md index e547b9ec3..a29f53e94 100644 --- a/docs/running_services_locally.md +++ b/docs/core_concepts/running_services_locally.md @@ -1,5 +1,3 @@ -# Running services locally - Services can be operated locally (e.g. for testing or ad-hoc data processing). You can: @@ -15,7 +13,7 @@ processing). You can: ### Via the CLI 1. Ensure you've created a valid - [`octue.yaml`](/creating_services/#octueyaml) + [`octue.yaml`](../core_concepts/creating_services.md/#octueyaml) file for your service 2. Run: @@ -52,7 +50,7 @@ See the `Runner` API documentation for more advanced usage including providing c ### Via the CLI 1. Ensure you've created a valid - [`octue.yaml`](/creating_services/#octueyaml) + [`octue.yaml`](../core_concepts/creating_services.md/#octueyaml) file for your service 2. Run: diff --git a/docs/services.md b/docs/core_concepts/services.md similarity index 69% rename from docs/services.md rename to docs/core_concepts/services.md index 02eb209d9..edb519798 100644 --- a/docs/services.md +++ b/docs/core_concepts/services.md @@ -1,50 +1,8 @@ # Twined services -There's a growing range of live [services](/) in the +There's a growing range of live services in the Twined ecosystem that you can query, mostly related to wind energy and -other renewables. Here's a quick glossary of terms before we tell you -more: - -!!! info "Definitions" - - **Twined service** - - See [here](/). - - **Child** - - A Twined service that can be asked a question. This name reflects - the tree structure of services (specifically, [a - DAG](https://en.wikipedia.org/wiki/Directed_acyclic_graph)) formed - by the service asking the question (the parent), the child it asks - the question to, any children that the child asks questions to as - part of forming its answer, and so on. - - **Parent** - - A Twined service that asks a question to another Twined service (a - child). - - **Asking a question** - - Sending data (input values and/or an input manifest) to a child for - processing/analysis. - - **Receiving an answer** - - Receiving data (output values and/or an output manifest) from a - child you asked a question to. - - **Twined ecosystem** - - The set of services running Twined as their backend. These - services guarantee: - - - Defined input/output JSON schemas and validation - - An easy and consistent interface for asking them questions and - receiving their answers - - Logs, exceptions, and monitor messages forwarded to you - - High availability (if deployed in the cloud) +other renewables. ## Service names @@ -113,7 +71,7 @@ doesn't have to be). it. This enables asking questions to, for example, the service `octue/my-service` and automatically having them routed to its default (usually latest) revision. - [See here for more info](/asking_questions/#asking-a-question) + [See here for more info](asking_questions.md/#asking-a-question) ## Service communication standard diff --git a/docs/testing_services.md b/docs/core_concepts/testing_services.md similarity index 98% rename from docs/testing_services.md rename to docs/core_concepts/testing_services.md index 32fe14408..7a6db7e99 100644 --- a/docs/testing_services.md +++ b/docs/core_concepts/testing_services.md @@ -1,5 +1,3 @@ -# Testing services {#testing_services} - We recommend writing automated tests for your service so anyone who wants to use it can have confidence in its quality and reliability at a glance. [Here's an example @@ -287,4 +285,4 @@ result ``` You can also create test fixtures from -[downloaded service crash diagnostics](/troubleshooting_services/#creating-test-fixtures-from-diagnostics) +[downloaded service crash diagnostics](troubleshooting_services.md/#creating-test-fixtures-from-diagnostics). diff --git a/docs/core_concepts/troubleshooting_infrastructure.md b/docs/core_concepts/troubleshooting_infrastructure.md new file mode 100644 index 000000000..93b4d1386 --- /dev/null +++ b/docs/core_concepts/troubleshooting_infrastructure.md @@ -0,0 +1,80 @@ +Twined services run as "pods" on a Kubernetes cluster. If there's a problem with the pod's container image, it can +fail to start on the cluster and fail silently. This is most likely a deployment or infrastructure problem, not +a problem caused by the code running in the service. However, if a +[custom Dockerfile](https://github.com/octue/workflows?tab=readme-ov-file#deploying-a-kuberneteskueue-octue-twined-service-revision) +is specified for the service by the app creator, this is a likely cause of the problem. + +## Monitoring Twined services + +`kubectl` is a standard Kubernetes CLI tool for interacting with clusters. We can use it to observe Twined +services currently running questions as well as recently successful or failed questions. + +### Observing questions + +!!! warning + + This tool requires permission to access and interact with the Kubernetes cluster running the Twined service network. + It's mostly useful for the service network administrator but others can use it, too, if they're given the relevant + permissions. Be careful who is given these permissions - `kubectl` is a powerful tool. + +Follow the +[installation and authentication](https://cloud.google.com/kubernetes-engine/docs/how-to/cluster-access-for-kubectl) +instructions (installing `kubectl` and using the `gcloud container clusters get-credentials` command to authenticate +with the cluster) and then run: + +```shell +kubectl get pods +``` + +You'll see something like this: + +``` +NAME READY STATUS RESTARTS AGE +question-372a0c94-b95e-4a0e-8a9f-019d0bf3046b-wm7gp 0/1 Error 0 23m +question-5c9a6e86-5431-44fb-bd3a-936fcaf217c1-6cqzb 0/1 ContainerCreating 0 1s +question-5c9a6e86-5431-44fb-bd3a-936fcaf217d1-3cqzc 1/1 Running 0 1s +question-23dfb292-f23e-4524-9676-9deff9d4f1bd-nhb26 0/1 Completed 0 13s +``` + +Each pod is named like `question--wm7gp`, representing a question asked to a service in your service +network with the question UUID ``. The group of characters after the UUID is non-deterministic +and not relevant. + +### Question statuses + +There are several possible statuses for a question. The most relevant are: + +- `Pending` - the question has yet to be accepted by the cluster +- `ContainerCreating` - the Twined service is starting up and hasn't run the question yet +- `Error` - the question failed or the service's pod failed to start +- `Running` - the question is running in the Twined service +- `Completed` - the question completed successfully and the service returned its results + +## Inspecting a failed question + +If the question has an `Error` status, you can inspect it to see its logs: + +```shell +kubectl describe pod question-372a0c94-b95e-4a0e-8a9f-019d0bf3046b-wm7gp +``` + +The `Events` section at the bottom is often useful in finding what the issue is: + +``` +... + +Events: + Type Reason Age From Message + ---- ------ ---- ---- ------- + Normal Scheduled 47m gke.io/optimize-utilization-scheduler Successfully assigned default/question-372a0c94-b95e-4a0e-8a9f-019d0bf3046b-wm7gp to gk3-main-octue-twined-cl-nap-1a9cv5dt-f15cf29a-sdzc + Normal Pulled 47m kubelet Container image "europe-west9-docker.pkg.dev/octue-twined-services/octue-twined-services/octue/example-service-kueue:0.1.0" already present on machine + Normal Created 47m kubelet Created container: question-372a0c94-b95e-4a0e-8a9f-019d0bf3046b + Normal Started 47m kubelet Started container question-372a0c94-b95e-4a0e-8a9f-019d0bf3046b +``` + +If it's not helpful or looks successful (as above), follow up with the question's logs to see if something went wrong in +the app code: + +```shell +kubectl logs question-372a0c94-b95e-4a0e-8a9f-019d0bf3046b-wm7gp +``` diff --git a/docs/troubleshooting_services.md b/docs/core_concepts/troubleshooting_services.md similarity index 95% rename from docs/troubleshooting_services.md rename to docs/core_concepts/troubleshooting_services.md index f9de86687..802263139 100644 --- a/docs/troubleshooting_services.md +++ b/docs/core_concepts/troubleshooting_services.md @@ -1,5 +1,3 @@ -# Troubleshooting services - ## Diagnostics Services save the following data to the cloud if they crash while @@ -19,7 +17,7 @@ default): For this feature to be enabled, the child must have the `diagnostics_cloud_path` field in its service configuration - ([`octue.yaml`](/creating_services/#octueyaml) file) set to a Google Cloud Storage path. + ([`octue.yaml`](creating_services.md/#octueyaml) file) set to a Google Cloud Storage path. ## Accessing diagnostics @@ -55,7 +53,7 @@ Options: -h, --help Show this message and exit. ``` -## Creating test fixtures from diagnostics {#test_fixtures_from_diagnostics} +## Creating test fixtures from diagnostics You can create test fixtures directly from diagnostics, allowing you to recreate the exact conditions that caused your service to fail. diff --git a/docs/core_concepts/twines/anatomy.md b/docs/core_concepts/twines/anatomy.md new file mode 100644 index 000000000..ecf06c25f --- /dev/null +++ b/docs/core_concepts/twines/anatomy.md @@ -0,0 +1,40 @@ +# Anatomy of the twine file + +The main point of a twine file (`twine.json`) is to enable engineers and scientists to easily (and rigorously) define a +Twined service. + +Adding a twine means you can: + +- Communicate to you, a colleague, another service or machine what data is required by the service +- Deploy services automatically with a provider like [Octue](https://www.octue.com) + +Here, we describe the parts of a twine and what they mean. + +!!! tip + + To just get started building a twine, check out the [quickstart](twine_file_quickstart.md). + +## Strands + +A twine has several sections, called strands. Each defines a different kind of data required (or produced) by the service. + +| Strand | Describes the service's requirements for... | +| ------------------------------------------------------------------- | ------------------------------------------------------------------------------------ | +| [Configuration values](values.md#configuration-values-strand) | Data, in JSON form, used for configuration of the service | +| [Configuration manifest](manifest.md#configuration-manifest-strand) | Files/datasets required by the service at configuration/startup | +| [Input values](values.md#input-values-strand) | Data, in JSON form, passed to the service in order to trigger an analysis | +| [Input manifest](manifest.md#input-manifest-strand) | Files/datasets passed with input values to trigger an analysis | +| [Output values](values.md#output-values-strand) | Data, in JSON form, that will be produced by the service (in response to inputs) | +| [Output manifest](manifest.md#output-manifest-strand) | Files/datasets that will be produced by the service (in response to inputs) | +| [Credentials](credentials.md) | Credentials that are required by the service in order to access third party services | +| Children | Other twins, access to which are required for this service to function | +| [Monitors](monitors.md) | Visual and progress outputs from an analysis | + +## Twine file schema + +Because the twine itself is a JSON file with a strict structure, there's a schema to make sure it's correctly +written (a "schema of a schema", or metaschema). We don't need to think about it too much here, but it's +[here for reference](https://github.com/octue/octue-sdk-python/blob/main/octue/twined/schema/twine_schema.json). + +The first thing Twined always does is check that the `twine.json` file itself is valid, and give you a descriptive error +if it isn't. diff --git a/docs/twines/anatomy_credentials.md b/docs/core_concepts/twines/credentials.md similarity index 52% rename from docs/twines/anatomy_credentials.md rename to docs/core_concepts/twines/credentials.md index 8ed890dc8..60f854d75 100644 --- a/docs/twines/anatomy_credentials.md +++ b/docs/core_concepts/twines/credentials.md @@ -1,11 +1,10 @@ -# Credentials Strand {#credentials_strand} +# Credentials strand In order to: - GET/POST data from/to an API, - query a database, or -- connect to a socket (for receiving Values or emitting Values, Monitors - or Logs), +- connect to a socket (for receiving or emitting values or emitting, monitors or logs), A digital twin must have _access_ to it. API keys, database URIs, etc must be supplied to the digital twin but treated with best practice with @@ -13,13 +12,13 @@ respect to security considerations. The purpose of the `credentials` strand is to dictate what credentials the twin requires in order to function. -## Defining the Credentials Strand {#defining_the_credentials_strand} +## Defining the credentials strand This is the simplest of the strands, containing a list of credentials -(whose `NAMES_SHOULD_BE_SHOUTY_SNAKE_CASE`) with a reminder of the +(whose `NAMES_SHOULD_BE_SCREAMING_SNAKE_CASE`) with a reminder of the purpose. -```javascript +```json { "credentials": [ { @@ -38,43 +37,32 @@ purpose. } ``` -## Supplying Credentials {#supplying_credentials} +## Supplying credentials -:::: attention -::: title -Attention -::: +!!! warning -_Credentials should never be hard-coded into application code_ + Credentials should never be hard-coded into application code. -Do you trust the twin code? If you insert credentials to your own -database into a digital twin provided by a third party, you better be -very sure that twin isn\'t going to scrape all that data out then send -it elsewhere! + Do you trust the twin code? If you insert credentials to your own + database into a digital twin provided by a third party, you better be + very sure that twin isn't going to scrape all that data out then send + it elsewhere! -Alternatively, if you\'re building a twin requiring such credentials, -it\'s your responsibility to give the end users confidence that you\'re -not abusing their access. + Alternatively, if you're building a twin requiring such credentials, + it's your responsibility to give the end users confidence that you're + not abusing their access. -There\'ll be a lot more discussion on these issues, but it\'s outside -the scope of **twined** - all we do here is make sure a twin has the -credentials it requires. -:::: + There'll be a lot more discussion on these issues, but it's outside + the scope of **twined** - all we do here is make sure a twin has the + credentials it requires. Credentials should be securely managed by whatever system is managing the twin, then made accessible to the twin in the form of environment -variables: - -```javascript -SERVICE_API_KEY = - someLongTokenTHatYouProbablyHaveToPayTheThirdPartyProviderLoadsOfMoneyFor; -``` - -Credentials may also reside in a `.env` file in the current directory, +variables. Credentials may also reside in a `.env` file in the current directory, either in the format above (with a new line for each variable) or, for convenience, as bash exports like: -```javascript +```shell export SERVICE_API_KEY=someLongTokenTHatYouProbablyHaveToPayTheThirdPartyProviderLoadsOfMoneyFor ``` diff --git a/docs/core_concepts/twines/examples.md b/docs/core_concepts/twines/examples.md new file mode 100644 index 000000000..e86b174b3 --- /dev/null +++ b/docs/core_concepts/twines/examples.md @@ -0,0 +1,185 @@ +# Examples + +Here, we look at example use cases for the library, and show how to use +it in python. + +It's also well worth looking at the unit test cases copied straight +from the unit test cases, so you can always check there to see how +everything hooks up. + +## Equipment installation cost + +### Scenario + +You need to provide your team with an estimate for installation cost of +an equipment foundation. + +It's a straightforward calculation for you, but the Logistics Team +keeps changing the installation position, to try and optimise the +overall project logistics. + +Each time the locations change, the GIS team gives you an updated +embedment depth, which is what you use (along with steel cost and +foundation type), to calculate cost and report it back. + +This twine allows you to define to create a wrapper around your scripts +that communicates to the GIS team what you need as an input, communicate +to the logistics team what they can expect as an output. + +When deployed as a digital twin, the calculation gets automatically +updated, leaving you free to get on with all the other work! + +### Twine + +We specify the `steel_cost` and `foundation_type` as `configuration` +values, which you can set on startup of the twin. + +Once the twin is running, it requires the `embedment_depth` as an +`input_value` from the GIS team. A member of the GIS team can use your +twin to get `foundation_cost` directly. + +```json +{ + "title": "Foundation Cost Model", + "description": "This twine helps compute the cost of an installed foundation.", + "children": [], + "configuration_values_schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Foundation cost twin configuration", + "description": "Set config parameters and constants at startup of the twin.", + "type": "object", + "properties": { + "steel_cost": { + "description": "The cost of steel in GBP/m^3. To get a better predictive model, you could add an economic twin that forecasts the cost of steel using the project timetable.", + "type": "number", + "minimum": 0, + "default": 3000 + }, + "foundation_type": { + "description": "The type of foundation being used.", + "type": "string", + "pattern": "^(monopile|twisted-jacket)$", + "default": "monopile" + } + } + }, + "input_values_schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Input Values schema for the foundation cost twin", + "description": "These values are supplied to the twin asynchronously over a web socket. So as these values change, the twin can reply with an update.", + "type": "object", + "properties": { + "embedment_depth": { + "description": "Embedment depth in metres", + "type": "number", + "minimum": 10, + "maximum": 500 + } + } + }, + "output_manifest": { + "datasets": [] + }, + "output_values_schema": { + "title": "Output Values schema for the foundation cost twin", + "description": "The response supplied to a change in input values will always conform to this schema.", + "type": "object", + "properties": { + "foundation_cost": { + "description": "The foundation cost.", + "type": "integer", + "minimum": 2 + } + } + } +} +``` + +## Site weather conditions + +### Scenario + +You need to be able to get characteristic weather conditions at a +specific location, for a range of reasons including assessing extreme +design loads. The values you need are computed in a script, which calls +a Weather API (provided by a third party), but also needs a dataset of +"Wind Resource" files. + +### Twine + +```json +{ + "title": "Weather Service Digital Twin", + "description": "Provides a model for design extreme weather conditions given a location", + "notes": "Easily extendable with children to add forecast and historical data of different types.", + "credentials": [ + { + "name": "WEATHER_API_SECRET_KEY", + "purpose": "Token for accessing a 3rd party weather API service" + } + ], + "input_manifest": { + "datasets": [ + { + "key": "wind_resource_data", + "purpose": "A dataset containing Wind Resource Grid files" + } + ] + }, + "input_values_schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Input Values for the weather service twin", + "description": "This is a simple example for getting metocean conditions at a single location", + "type": "object", + "properties": { + "location": { + "description": "Location", + "type": "object", + "properties": { + "latitude": { + "type": "number", + "minimum": -90, + "maximum": 90 + }, + "longitude": { + "type": "number", + "minimum": -180, + "maximum": 180 + }, + "srid": { + "description": "The Spatial Reference System ID for the coordinate. Default is 4326 (WGS84)", + "type": "integer", + "default": 4326 + } + } + } + } + }, + "output_manifest": { + "datasets": [ + { + "key": "production_data", + "purpose": "A dataset containing production data", + "tags": { "cleaned": true }, + "labels": ["production", "wind"] + } + ] + }, + "output_values_schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Output Values for the metocean service twin", + "description": "The output values strand of an example twine", + "type": "object", + "properties": { + "water_depth": { + "description": "Design water depth for use in concept calculations", + "type": "number" + }, + "extreme_wind_speed": { + "description": "Extreme wind speed value for use in concept calculations", + "type": "number" + } + } + } +} +``` diff --git a/docs/core_concepts/twines/manifest.md b/docs/core_concepts/twines/manifest.md new file mode 100644 index 000000000..add32a140 --- /dev/null +++ b/docs/core_concepts/twines/manifest.md @@ -0,0 +1,443 @@ +# Manifest strands + +Frequently, twins operate on files containing some kind of data. These +files need to be made accessible to the code running in the twin, in +order that their contents can be read and processed. Conversely, a twin +might produce an output dataset which must be understood by users. + +The `configuration_manifest`, `input_manifest` and `output_manifest` +strands describe what kind of datasets (and associated files) are +required / produced. + +!!! note + + Files are always contained in datasets, even if there's only one file. + It's so that we can keep nitty-gritty file metadata separate from the + more meaningful, higher level metadata like what a dataset is for. + +## Configuration manifest strand + +This describes datasets/files that are required at startup of the twin / +service. They typically contain a resource that the twin might use +across many analyses. + +For example, a twin might predict failure for a particular component, +given an image. It will require a trained ML model (saved in a +`*.pickle` or `*.json`). While many thousands of predictions might be +done over the period that the twin is deployed, all predictions are done +using this version of the model - so the model file is supplied at +startup. + +### Example + +Here's a twine containing this strand: + +```json +{ + // Manifest strands contain lists, with one entry for each required dataset + "configuration_manifest": { + "datasets": [ + { + // Once the inputs are validated, your analysis program can use this key to access the dataset + "key": "trained_model", + // General notes, which are helpful as a reminder to users of the service + "purpose": "The trained classifier" + } + ] + } +} +``` + +Here's a manifest that's valid for the twine: + +```json +{ + "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", + "datasets": [ + { + "id": "7ead7669-8162-4f64-8cd5-4abe92509e17", + "name": "training data for system abc123", + "organisation": "megacorp", + "tags": { "system": "abc123" }, + "labels": ["classifier", "damage"], + "files": [ + { + "path": "datasets/7ead7669/blade_damage.mdl", + "cluster": 0, + "sequence": 0, + "extension": "csv", + "tags": {}, + "labels": [], + "posix_timestamp": 0, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "last_modified": "2019-02-28T22:40:30.533005Z", + "name": "blade_damage.mdl", + "size_bytes": 59684813, + "sha-512/256": "somesha" + } + ] + } + ] +} +``` + +## Input manifest strand + +These files are made available for the twin to run a particular analysis +with. Each analysis will likely have different input datasets. + +For example, a twin might be passed a dataset of LiDAR `*.scn` files and +be expected to compute atmospheric flow properties as a timeseries +(which might be returned in the [output values](values.md#output-values-strand) for +onward processing and storage). + +### Example + +Here we specify that two datasets (and all or some of the files +associated with them) are required, for a service that cross-checks +meteorological mast data and power output data for a wind farm. + +Here's a twine containing this strand: + +```json +{ + // Manifest strands contain lists, with one entry for each required dataset + "input_manifest": { + "datasets": [ + { + // Once the inputs are validated, your analysis program can use this key to access the dataset + "key": "met_mast_data", + // General notes, which are helpful as a reminder to users of the service + "purpose": "A dataset containing meteorological mast data" + }, + { + "key": "scada_data", + "purpose": "A dataset containing scada data" + } + ] + } +} +``` + +Here's a manifest valid for the twine: + +```json +{ + "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", + "datasets": [ + { + "id": "7ead7669-8162-4f64-8cd5-4abe92509e17", + "name": "meteorological mast dataset", + "tags": { "location": 108346 }, + "labels": ["met", "mast", "wind"], + "files": [ + { + "path": "input/datasets/7ead7669/mast_1.csv", + "cluster": 0, + "sequence": 0, + "extension": "csv", + "tags": {}, + "labels": [], + "posix_timestamp": 1551393630, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "last_modified": "2019-02-28T22:40:30.533005Z", + "name": "mast_1.csv", + "size_bytes": 59684813, + "sha-512/256": "somesha" + }, + { + "path": "input/datasets/7ead7669/mast_2.csv", + "cluster": 0, + "sequence": 1, + "extension": "csv", + "tags": {}, + "labels": [], + "posix_timestamp": 1551394230, + "id": "bbff07bc-7c19-4ed5-be6d-a6546eae8e45", + "last_modified": "2019-02-28T22:50:40.633001Z", + "name": "mast_2.csv", + "size_bytes": 59684813, + "sha-512/256": "someothersha" + } + ] + }, + { + "id": "5cf9e445-c288-4567-9072-edc31003b022", + "name": "scada data exports", + "tags": { "location": 108346, "system": "ab32" }, + "labels": ["wind", "turbine", "scada"], + "files": [ + { + "path": "input/datasets/7ead7669/export_1.csv", + "cluster": 0, + "sequence": 0, + "extension": "csv", + "tags": {}, + "labels": [], + "posix_timestamp": 1551393600, + "id": "78fa511f-3e28-4bc2-aa28-7b6a2e8e6ef9", + "last_modified": "2019-02-28T22:40:00.000000Z", + "name": "export_1.csv", + "size_bytes": 88684813, + "sha-512/256": "somesha" + }, + { + "path": "input/datasets/7ead7669/export_2.csv", + "cluster": 0, + "sequence": 1, + "extension": "csv", + "tags": {}, + "labels": [], + "posix_timestamp": 1551394200, + "id": "204d7316-7ae6-45e3-8f90-443225b21226", + "last_modified": "2019-02-28T22:50:00.000000Z", + "name": "export_2.csv", + "size_bytes": 88684813, + "sha-512/256": "someothersha" + } + ] + } + ] +} +``` + +## Output manifest strand + +Files are created by the twin during an analysis, tagged and stored as +datasets for some onward purpose. This strand is not used for sourcing +data; it enables users or other services to understand appropriate +search terms to retrieve datasets produced. + +### Example + +Here's a twine containing this strand: + +```json +{ + "output_manifest": { + "datasets": [ + { + // Twined will prepare a manifest with this key, which you can add to during the analysis or once its complete + "key": "met_scada_checks", + // General notes, which are helpful as a reminder to users of the service + "purpose": "A dataset containing figures showing correlations between mast and scada data" + } + ] + } +} +``` + +Here's a manifest valid for the twine: + +```json +{ + "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", + "datasets": [ + { + "id": "4564deca-5654-42e8-aadf-70690b393a30", + "name": "visual cross check data", + "organisation": "megacorp", + "tags": { "location": 108346 }, + "labels": ["figure", "met", "mast", "scada", "check"], + "files": [ + { + "path": "datasets/7ead7669/cross_check.fig", + "cluster": 0, + "sequence": 0, + "extension": "fig", + "tags": {}, + "labels": [], + "posix_timestamp": 1551394800, + "id": "38f77fe2-c8c0-49d1-a08c-0928d53a742f", + "last_modified": "2019-02-28T23:00:00.000000Z", + "name": "cross_check.fig", + "size_bytes": 59684813, + "sha-512/256": "somesha" + } + ] + } + ] +} +``` + +## File tag templates + +Datafiles can be tagged with key-value pairs of relevant metadata that +can be used in analyses. Certain datasets might need one set of metadata +on each file, while others might need a different set. The required (or +optional) file tags can be specified in the twine in the +`file_tags_template` property of each dataset of any `manifest` strand. +Each file in the corresponding manifest strand is then validated against +its dataset's file tag template to ensure the required tags are +present. + +Here's a manifest strand with a file tag template. It's for an input manifest, but the format is the +same for configuration and output manifests. + +```json +{ + "input_manifest": { + "datasets": [ + { + "key": "met_mast_data", + "purpose": "A dataset containing meteorological mast data", + "file_tags_template": { + "type": "object", + "properties": { + "manufacturer": { "type": "string" }, + "height": { "type": "number" }, + "is_recycled": { "type": "boolean" } + }, + "required": ["manufacturer", "height", "is_recycled"] + } + } + ] + } +} +``` + +Here's a manifest valid for the twine: + +```json +{ + "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", + "datasets": [ + { + "id": "7ead7669-8162-4f64-8cd5-4abe92509e17", + "name": "met_mast_data", + "tags": {}, + "labels": ["met", "mast", "wind"], + "files": [ + { + "path": "input/datasets/7ead7669/file_1.csv", + "cluster": 0, + "sequence": 0, + "extension": "csv", + "labels": ["mykeyword1", "mykeyword2"], + "tags": { + "manufacturer": "vestas", + "height": 500, + "is_recycled": true + }, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "name": "file_1.csv" + }, + { + "path": "input/datasets/7ead7669/file_1.csv", + "cluster": 0, + "sequence": 1, + "extension": "csv", + "labels": [], + "tags": { + "manufacturer": "vestas", + "height": 500, + "is_recycled": true + }, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "name": "file_1.csv" + } + ] + } + ] +} +``` + +A remote reference can also be given for a file tag template. If the tag +template somewhere public, this is useful for sharing the template +between one or more teams working on the same type of data. + +The example below is for an input manifest, but the format is the same +for configuration and output manifests. It also shows two different tag +templates being specified for two different types of dataset required by +the manifest. + +Here's a twine using a remote tag template: + +```json +{ + "input_manifest": { + "datasets": [ + { + "key": "met_mast_data", + "purpose": "A dataset containing meteorological mast data", + "file_tags_template": { + "$ref": "https://refs.schema.octue.com/octue/my-file-type-tag-template/0.0.0.json" + } + }, + { + "key": "some_other_kind_of_dataset", + "purpose": "A dataset containing something else", + "file_tags_template": { + "$ref": "https://refs.schema.octue.com/octue/another-file-type-tag-template/0.0.0.json" + } + } + ] + } +} +``` + +Here's a manifest valid for the twine: + +```json +{ + "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", + "datasets": [ + { + "id": "7ead7669-8162-4f64-8cd5-4abe92509e17", + "name": "met_mast_data", + "tags": {}, + "labels": ["met", "mast", "wind"], + "files": [ + { + "path": "input/datasets/7ead7669/file_1.csv", + "cluster": 0, + "sequence": 0, + "extension": "csv", + "labels": ["mykeyword1", "mykeyword2"], + "tags": { + "manufacturer": "vestas", + "height": 500, + "is_recycled": true + }, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "name": "file_1.csv" + }, + { + "path": "input/datasets/7ead7669/file_1.csv", + "cluster": 0, + "sequence": 1, + "extension": "csv", + "labels": [], + "tags": { + "manufacturer": "vestas", + "height": 500, + "is_recycled": true + }, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "name": "file_1.csv" + } + ] + }, + { + "id": "7ead7669-8162-4f64-8cd5-4abe92509e29", + "name": "some_other_kind_of_dataset", + "tags": {}, + "labels": ["my-label"], + "files": [ + { + "path": "input/datasets/7eadpp9/interesting_file.dat", + "cluster": 0, + "sequence": 0, + "extension": "dat", + "labels": [], + "tags": { + "length": 864, + "orientation_angle": 85 + }, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae9071", + "name": "interesting_file.csv" + } + } + ] +} +``` diff --git a/docs/core_concepts/twines/monitors.md b/docs/core_concepts/twines/monitors.md new file mode 100644 index 000000000..a8c5887e2 --- /dev/null +++ b/docs/core_concepts/twines/monitors.md @@ -0,0 +1,24 @@ +# Monitor message strand + +The `monitor_message_schema` strand is _values-based_ meaning the data +that matches the strand is in JSON form. It is a _JSON schema_ which +describes a monitor message. + +Monitor messages can include values for health and progress monitoring of the twin, for example +percentage progress, iteration number, and status - perhaps even +residuals graphs for a converging calculation. Broadly speaking, this +should be user-facing information. This kind of monitoring data can be in a suitable form for display on a dashboard. + +```json +{ + "monitor_message_schema": { + "type": "object", + "properties": { + "my_property": { + "type": "number" + } + }, + "required": ["my_property"] + } +} +``` diff --git a/docs/core_concepts/twines/twine_file_quickstart.md b/docs/core_concepts/twines/twine_file_quickstart.md new file mode 100644 index 000000000..a25f84bb6 --- /dev/null +++ b/docs/core_concepts/twines/twine_file_quickstart.md @@ -0,0 +1,100 @@ +# Twine file quickstart + +Let's say we want a service that accepts two values, uses them to make a calculation, then gives the result. Anyone +connecting to the service will need to know what values it requires, and what it responds with. + +First, create a blank text file called `twine.json`. First, we'll give the service a title and description. Paste in the +following: + +```json +{ + "title": "My first Twined service... of an atomising discombobulator", + "description": "A simple example... estimates the `foz` value of an atomising discombobulator." +} +``` + +Now, let's define an input values strand to specify what values are required by the service. For this we use a JSON +schema. Add the `input_values` field so the twine looks like this: + +```json +{ + "title": "My first Twined service", + "description": "A simple example to build on...", + "input_values_schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Input values schema for my first Twined service", + "description": "These values are supplied to the service by another program (often over a websocket, depending on your integration provider). So as these values change, the service can reply with an update.", + "type": "object", + "properties": { + "foo": { + "description": "The foo value... speed of the discombobulator's input bobulation module, in m/s", + "type": "number", + "minimum": 10, + "maximum": 500 + }, + "baz": { + "description": "The baz value... period of the discombobulator's recombulation unit, in s", + "type": "number", + "minimum": 0, + "maximum": 1000 + } + } + } +} +``` + +Finally, let's add an output values strand showing what kind of data is returned by the service: + +```json +{ + ... + "output_values_schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Output values schema for my first Twined service", + "description": "The service will output data that matches this schema", + "type": "object", + "properties": { + "foz": { + "description": "Estimate of the foz value... efficiency of the discombobulator in %", + "type": "number", + "minimum": 10, + "maximum": 500 + } + } + } +} +``` + +## Load the twine + +Twined provides a `Twine` class to load a twine from a file or a JSON string. The loading process checks the twine +itself is valid. It's as simple as: + +```python +from octue.twined import Twine + +my_twine = Twine(source='twine.json') +``` + +## Validate some inputs + +Say we have some JSON that we want to parse and validate to make sure it matches what's required for input values. + +```python +my_input_values = my_twine.validate_input_values(source='{"foo": 30, "baz": 500}') +``` + +You can read the values from a file too. Paste the following into a file named `input_values.json`: + +```json +{ + "foo": 30, + "baz": 500 +} +``` + +Then parse and validate directly from the file: + +```py +my_input_values = my_twine.validate_input_values(source="input_values.json") +``` diff --git a/docs/twines/anatomy_values.md b/docs/core_concepts/twines/values.md similarity index 58% rename from docs/twines/anatomy_values.md rename to docs/core_concepts/twines/values.md index 2a2c14b9b..500511ea0 100644 --- a/docs/twines/anatomy_values.md +++ b/docs/core_concepts/twines/values.md @@ -1,4 +1,4 @@ -# Values-based Strands {#values_based_strands} +# Values strands The `configuration_values_schema`, `input_values_schema` and `output_values_schema` strands are _values-based_, meaning the data that @@ -6,123 +6,73 @@ matches these strands is in JSON form. Each of these strands is a _json schema_ which describes that data. -:::::::: tabs -::: group-tab -Configuration Values Strand +## Configuration values strand This strand is a `configuration_values_schema`, that is used to check validity of any `configuration_values` data supplied to the twin at startup. -The Configuration Values Strand is generally used to define control +The configuration values strand is generally used to define control parameters relating to what the twin should do, or how it should operate. For example, should it produce output images as low resolution PNGs or as SVGs? How many iterations of a fluid flow solver should be used? What -is the acceptable error level on an classifier algorithm? -::: +is the acceptable error level on a classifier algorithm? -::::: group-tab -Input Values Strand - -This strand is an `input_values_schema`, that is used to check validity -of `input_values` data supplied to the twin at the beginning of an -analysis task. - -The Input Values Strand is generally used to define actual data which -will be processed by the twin. Sometimes, it may be used to define -control parameters specific to an analysis. - -For example, if a twin cleans and detects anomalies in a 10-minute -timeseries of 1Hz data, the `input_values` might contain an array of -data and a list of corresponding timestamps. It may also contain a -control parameter specifying which algorithm is used to do the -detection. - -:::: note -::: title -Note -::: - -Depending on the way the twin is deployed (see -`deployment`{.interpreted-text role="ref"}), the `input_values` might -come in from a web request, over a websocket or called directly from the -command line or another library. - -However they come, if the new `input_values` validate against the -`input_values_schema` strand, then analysis can proceed. -:::: -::::: - -::: group-tab -Output Values Strand - -This strand is an `output_values_schema`, that is used to check results -(`output_values`) computed during an analysis. This ensures that the -application wrapped up within the _twine_ is operating correctly, and -enables other twins/services or the end users to see what outputs they -will get. - -For example,if a twin cleans and detects anomalies in a 10-minute -timeseries of 1Hz data, the `output_values` might contain an array of -data interpolated onto regular timestamps, with missing values filled in -and a list of warnings where anomalies were found. -::: -:::::::: - -Let\'s look at basic examples for twines containing each of these -strands: - -:::::: tabs -::: group-tab -Configuration Values Strand +### Example This _twine_ contains an example `configuration_values_schema` with one control parameter. -[Many more detailed and specialised examples are available in the GitHub -repository](https://github.com/octue/twined/tree/main/examples) - -```javascript +```json { "configuration_values_schema": { - "title": "The example configuration form", - "description": "The Configuration Values Strand of an example twine", - "type": "object", - "properties": { - "n_iterations": { - "description": "An example of an integer configuration variable, called 'n_iterations'.", - "type": "integer", - "minimum": 1, - "maximum": 10, - "default": 5 - } + "title": "The example configuration form", + "description": "The Configuration Values Strand of an example twine", + "type": "object", + "properties": { + "n_iterations": { + "description": "An example of an integer configuration variable, called 'n_iterations'.", + "type": "integer", + "minimum": 1, + "maximum": 10, + "default": 5 } + } } } ``` Matching `configuration_values` data could look like this: -```javascript +```json { - "n_iterations": 8, + "n_iterations": 8 } ``` -::: +## Input values strand + +This strand is an `input_values_schema`, that is used to check validity +of `input_values` data supplied to the twin at the beginning of an +analysis task. + +The input values strand is generally used to define actual data which +will be processed by the twin. Sometimes, it may be used to define +control parameters specific to an analysis. -::: group-tab -Input Values Strand +For example, if a twin cleans and detects anomalies in a 10-minute +timeseries of 1Hz data, the `input_values` might contain an array of +data and a list of corresponding timestamps. It may also contain a +control parameter specifying which algorithm is used to do the +detection. -This _twine_ contains an example `input_values_schema` with one input -value, which marked as required. +### Example -Many more detailed and specialised examples are available in -`examples`{.interpreted-text role="ref"}. +This _twine_ contains an example `input_values_schema` with one input value, which marked as required. -```javascript +```json { "input_values_schema": { "title": "Input Values", @@ -141,17 +91,29 @@ Many more detailed and specialised examples are available in Matching `input_values` data could look like this: -```javascript +```json { - "height": 13, + "height": 13 } ``` -::: +## Output values strand + +This strand is an `output_values_schema`, that is used to check results +(`output_values`) computed during an analysis. This ensures that the +application wrapped up within the _twine_ is operating correctly, and +enables other twins/services or the end users to see what outputs they +will get. + +For example,if a twin cleans and detects anomalies in a 10-minute +timeseries of 1Hz data, the `output_values` might contain an array of +data interpolated onto regular timestamps, with missing values filled in +and a list of warnings where anomalies were found. + +### Example + +To be added. -::: group-tab -Output Values Strand +!!! tip -Stuff -::: -:::::: + More examples are available in the [GitHub repository](https://github.com/octue/octue-sdk-python/tree/main/octue/twined/examples). diff --git a/docs/updating_services.md b/docs/core_concepts/updating_services.md similarity index 94% rename from docs/updating_services.md rename to docs/core_concepts/updating_services.md index f6ca83a63..770d9a069 100644 --- a/docs/updating_services.md +++ b/docs/core_concepts/updating_services.md @@ -1,5 +1,3 @@ -# Updating a Twined service - This page describes how to update an existing, deployed Twined service - in other words, how to deploy a new Twined service revision. @@ -71,7 +69,7 @@ We assume that: 7. Run the tests locally using `pytest` and fix anything that makes them fail - ![image](images/updating_services/pytest.png) + ![image](../images/updating_services/pytest.png) 8. Update the [semantic version](https://semver.org/) of your app. This communicates to anyone updating from a previous version of the @@ -91,13 +89,13 @@ We assume that: forget to push a commit). Ask your colleagues to review the code if required. - ![image](images/updating_services/diff.png) + ![image](../images/updating_services/diff.png) 10. When you're ready to release the new version of your service, check that the GitHub checks have passed. These ensure code quality, that the tests pass, and that the new version number is correct. -> ![image](images/updating_services/checks.png) + ![image](../images/updating_services/checks.png) 11. Merge the pull request into `main`. This will run the deployment workflow (usually called `cd` - continuous deployment), making the @@ -106,4 +104,4 @@ We assume that: take a few minutes). You can check the progress in the "Actions" tab of the GitHub repository -> ![image](images/updating_services/deployment.png) + ![image](../images/updating_services/deployment.png) diff --git a/docs/data_containers/index.md b/docs/data_containers/index.md deleted file mode 100644 index f257a2ae6..000000000 --- a/docs/data_containers/index.md +++ /dev/null @@ -1,10 +0,0 @@ -# Datafiles, datasets, and manifests - -One of the main features of Twined is making using, creating, and -sharing scientific datasets easy. There are three main data classes in -the that do this. - -- **Datafile** - [a single local or cloud file](/data_containers/datafile) and its metadata. -- **Dataset** - [a set of related datafiles](/data_containers/dataset) that exist in the same location, plus metadata. -- **Manifest** - [a set of related datasets](/data_containers/manifest) that exist anywhere, plus metadata. Typically produced by or for - one analysis. diff --git a/docs/getting_started/creating_services.md b/docs/getting_started/creating_services.md new file mode 100644 index 000000000..805bf3091 --- /dev/null +++ b/docs/getting_started/creating_services.md @@ -0,0 +1,330 @@ +# Getting started - creating services + +Turn your analysis code into production-ready services - no infrastructure or DevOps skills needed - and share with +your team and beyond. + +This guide walks you through creating an example Twined service deployed in the cloud. By the end, you'll have a real +service and be able to ask it questions from your computer and receive answers from it over the internet. + +!!! tip + + You can see our deployed example service [here on GitHub](https://github.com/octue/example-service-kueue). + +## Prerequisites + +Before you begin, ensure you: + + + +- Are familiar with Python and the command line +- Have the following tools installed: + - Python >= 3.10 + - The `octue` python library / CLI (see [installation instructions](../installation.md)) +- Have access to an existing Twined service network - see [authentication instructions](../authentication.md) + and [managing infrastructure](../getting_started/managing_infrastructure.md) + + + +## Create and clone a GitHub repository + +!!! warning + + The repository must be created in the same GitHub account used for your Twined service network. See + [this issue](https://github.com/octue/octue-sdk-python/issues/740) for more details. + +Create a git repository for the service in the GitHub account linked to the Twined service network. Clone this +repository to your computer and checkout a new branch called `add-new-service`. Replace `` with the handle of +the GitHub account. + +```shell +git clone https://github.com//example-service.git +cd example-service +git checkout -b add-new-service +``` + +## Install the python dependencies + +Create a `pyproject.toml` file to define the service as a python package and list its dependencies: + +```toml +[tool.poetry] +name = "example-service" +version = "0.1.0" +description = "An example Twined data service." +authors = ["Your name "] +packages = [{include = "example_service"}] + +[tool.poetry.dependencies] +python = "^3.11" +octue = "0.69.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" +``` + +!!! tip + + We use Poetry in this example but you can use Pip or another package manager with either a `setup.py` or + `pyproject.toml` file. + +Now install the dependencies: + +```shell +poetry install +``` + +Note that this will create a `poetry.lock` file with the locked dependency versions in. Keep this file. + +## Write the service python code + +We'll make the example service calculate the first `n` numbers in the Fibonacci sequence. + +Create a directory called `example_service` with an empty `__init__.py` file and an `app.py` file inside. In `app.py`, +add the following code: + +```python +import json +import logging +import time + +from octue.resources import Datafile, Dataset +from octue.twined.resources.example import calculate_fibonacci_sequence + +logger = logging.getLogger(__name__) + + +def run(analysis): + logger.info("Started example analysis.") + + # Get your input values... + n = analysis.input_values["n"] + + # Do your calculations here... + sequence = calculate_fibonacci_sequence(n) + time.sleep(2) + + # Return results by assigning output values... + analysis.output_values = {"fibonacci": sequence} + + # If output values are too large, or custom/binary file outputs + # are required, you can save them as Datafiles and add them to + # the output manifest... + with Datafile("fibonacci.json", mode="w") as (datafile, f): + json.dump(analysis.output_values, f) + + analysis.output_manifest.datasets["example_dataset"] = Dataset(files={datafile}) + logger.info("Finished example analysis.") +``` + +## Add the Twine file + +The Twine file is a JSON file containing [JSON schemas](https://json-schema.org/) stating what kind of data is expected +as inputs and outputs of the service. It shows users what can be sent to the service and what to expect to receive. +Inputs and outputs that violate the schemas won't be processed and will cause an error. + +Create a file at the top level of the repository called `twine.json`: + +```json +{ + "input_values_schema": { + "type": "object", + "required": ["n"], + "properties": { + "n": { + "type": "integer", + "minimum": 0 + } + } + }, + "output_values_schema": { + "type": "object", + "required": ["fibonacci"], + "properties": { + "fibonacci": { + "type": "array", + "items": { + "type": "integer" + } + } + } + }, + "output_manifest": { + "datasets": { + "example_dataset": {} + } + } +} +``` + +!!! tip + + Learn more about `twine.json` files [here](../core_concepts/creating_services.md/#twinejson). + +## Add the `octue.yaml` file + +The service configuration file (called `octue.yaml`) names the service and sets details like where to store output data. +Create an `octue.yaml` file at the top level of the repository, replacing: + +- `` with the GitHub account handle +- `` with the ID of the Google Cloud Platform (GCP) project the Twined service network is deployed in +- `` with the name of the GCP region the service network is deployed in (e.g. `europe-west1`) +- `` with the name of the environment the service network is deployed in (`main` by default) + +!!! tip + + Ask the manager of your Twined service network to help you with these values! + +```yaml +services: + - namespace: + name: example-service + app_source_path: example_service + event_store_table_id: octue_twined.service-events + diagnostics_cloud_path: gs://-octue-twined/example-service/diagnostics + output_location: gs://-octue-twined/example-service/outputs + service_registries: + - name: 's services + endpoint: https://-.cloudfunctions.net/-octue-twined-service-registry +``` + +!!! tip + + Learn more about `octue.yaml` files [here](../core_concepts/creating_services.md/#octueyaml). + +## Enable GitHub Actions in the repository + +Go back to your repository on GitHub and open [its Actions settings](https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#managing-github-actions-permissions-for-your-repository) +(Settings -> Actions -> General). Set the "Actions permissions" option to "Allow all actions and reusable workflows". + +## Add the GitHub Actions deploy workflow + +A GitHub Actions reusable workflow is used to automatically deploy the service when its code is merged into `main`. +Create a file called `.github/workflows/deploy.yml` and add the following, replacing `` and +`` as before: + +```yaml +name: deploy + +# Only trigger when a pull request into main branch is merged. +on: + push: + branches: + - main + +jobs: + deploy: + uses: octue/workflows/.github/workflows/build-twined-service.yml@0.11.0 + permissions: + id-token: write + contents: read + with: + gcp_project_name: + gcp_project_number: + gcp_region: + service_namespace: + service_name: example-service +``` + +!!! tip + + See [here](https://github.com/octue/workflows?tab=readme-ov-file#deploying-a-kuberneteskueue-octue-twined-service-revision) + for more information, including how to use custom dockerfiles for your service. + +## Check your files + +Once you've finished the steps above, your repository should have a file structure like this: + +``` +. +├── .github +│ └── workflows +│ └── deploy.yml +├── example_service +│ ├── __init__.py +│ └── app.py +├── octue.yaml +├── poetry.lock +├── pyproject.toml +└── twine.json +``` + +## Merge the code into `main` + +To deploy the service, we need to merge the code we've added into the `main` branch. Make sure any sensitive and +irrelevant files are listed in a `.gitignore` file and run: + +```shell +git add . +git commit -m "Add example Twined service" +git push +``` + +For best practice, open a pull request for your branch into `main`, review it, and merge it. For a simpler route: + +```shell +git checkout main +git merge add-new-service +git push +``` + +Navigate to your repository's "Actions" tab on GitHub and you should see the deploy workflow progressing after a few +seconds. An in-progress indicator (currently a small orange circle) will be shown against the most recent commit. Once +the workflow has completed (which should take only a couple of minutes for this simple example service), a green tick +should show next to the most recent commit. + +
+
+ ![GitHub repository "Actions" tab](../images/actions_tab.png) +
The "Actions" tab of the GitHub repository. A successful workflow run is shown at the top with a green + tick while a failed one is shown below it with a red cross. Note that the workflow in the image is called + "release.yaml" but yours is called "deploy.yaml". +
+
+ +## Ask the service its first question + +You can now communicate with the service over the internet to ask +it a question! Follow the [authentication instructions](../authentication.md) in the terminal where you ran `poetry install`, replace +`` as before, and run: + +```shell +octue twined question ask /example-service:0.1.0 --input-values='{"n": 10}' +``` + +After a couple of minutes (while the Kubernetes cluster is spinning up a container to run the service), you should see +log messages start to appear. Finally, the first 10 values in the Fibonacci sequence are returned as output values: + +``` +[2025-11-05 12:45:29,861 | INFO | octue.twined.resources.example] Starting Fibonacci sequence calculation. +[2025-11-05 12:45:29,861 | INFO | octue.twined.resources.example] Finished Fibonacci sequence calculation. +{"kind": "result", "output_values": {"fibonacci": [0, 1, 1, 2, 3, 5, 8, 13, 21, 34]}, "output_manifest": null} +``` + +!!! tip + + See the [using services getting started guide](using_services.md) to see how to ask questions in python instead. + +## Next steps + +!!! success + + Congratulations on creating your first service! For additional information, check out the following resources: + + - [Read about the core concepts of Twined](../core_concepts/creating_services.md) + - [Create infrastructure for a Twined service network](managing_infrastructure.md) + +## Troubleshooting + +!!! failure + + If you get this error, double check you've followed the [authentication instructions](../authentication.md) fully. + + ``` + octue.exceptions.CloudLocationNotSpecified: `project_id` must be specified for a service to connect to the correct service - received None. + ``` + +!!! failure + + If your question gets stuck with a log message saying `Waiting for question to be accepted...`, take a look at the + [troubleshooting infrastructure guide](../core_concepts/troubleshooting_infrastructure.md). diff --git a/docs/getting_started/managing_infrastructure.md b/docs/getting_started/managing_infrastructure.md new file mode 100644 index 000000000..3891472b6 --- /dev/null +++ b/docs/getting_started/managing_infrastructure.md @@ -0,0 +1,273 @@ +# Getting started - managing infrastructure + +Allow your team to run services in your own private cloud, with just two pre-configured Terraform modules. Control +costs, limit access, and integrate with your other systems. + +Twined data services can be run locally, but deploying them in the cloud is preferable for reliability and +performance. Twined makes it easy to do this with two [Terraform](https://terraform.io) modules (ready-made +infrastructure as code (IaC)) that deploy a managed Kubernetes cluster to run services on. The combined infrastructure +is called a **Twined service network**. + +This guide walks you through setting up a Twined service network. By the end, you will have created a Kubernetes +cluster, cloud storage buckets, and all other infrastructure needed to run one or more Twined services in your Google +Cloud project. + +## Prerequisites + +Before you begin, ensure you: + + + +- Are familiar with Terraform/IaC and the command line +- Have the following set up: + - [Terraform CLI](https://developer.hashicorp.com/terraform/install) >= 1.8.0 installed + - A Google Cloud account and project with billing set up + - Optionally, a [Terraform Cloud](https://app.terraform.io/public/signup/account?product_intent=terraform) account + + + +## Enable the Cloud Resource Manager API + +The Cloud Resource Manager API must be enabled manually for Terraform to work. Enable it by going +[here](https://console.developers.google.com/apis/api/cloudresourcemanager.googleapis.com) and clicking "Enable API". +Make sure you've enabled it for the correct Google Cloud project + +## Create and clone a GitHub repository + +Create a GitHub repository for the infrastructure. Clone this repository to your computer and checkout a new branch +called `add-twined-infrastructure`. Replace `` with your GitHub account handle. + +```shell +git clone https://github.com//twined-infrastructure.git +cd twined-infrastructure +git checkout -b add-twined-infrastructure +``` + +## Create the configuration directories + +Create two directories at the top level of the repository: + +- `terraform_core` +- `terraform_cluster` + +## Authenticate with Google Cloud + +1. Access your service accounts [here](https://console.cloud.google.com/iam-admin/serviceaccounts), making sure the + correct project is selected +2. Create a service account for Terraform and assign it the editor and owner basic IAM permissions +3. Click on the service account, go to the "Keys" tab, and create (download) a JSON key for it. +4. Move the key file into `terraform_core`, renaming it `gcp-credentials.json` +5. Copy the key file into `terraform_cluster` +6. Create `.gitignore` and `.dockerignore` files in the top level of the repository and add `gcp-cred*` to them + +!!! danger + + Be careful storing the key file in your repository - you don't want to accidentally commit it or build it into a + docker image layer. Make sure to follow step 6 and double check that both copies of the key file are ignored by Git + by checking the output of `git status`. + +## Add the core infrastructure Terraform configuration + +Inside the `terraform_core` directory, create a `main.tf` file and add the following to it: + +```terraform +terraform { + required_version = ">= 1.8.0" + + required_providers { + google = { + source = "hashicorp/google" + version = "~>6.12" + } + } +} + + +provider "google" { + project = var.google_cloud_project_id + region = var.google_cloud_region + credentials = "gcp-credentials.json" +} + + +module "octue_twined_core" { + source = "git::github.com/octue/terraform-octue-twined-core.git?ref=0.1.2" + google_cloud_project_id = var.google_cloud_project_id + google_cloud_region = var.google_cloud_region + github_account = var.github_account +} +``` + +Next, create a `variables.tf` file and add the following, replacing the values in `<>` brackets: + +```terraform +# Check in the GCP cloud console for your project ID +# (this is not necessarily the same as its name) +variable "google_cloud_project_id" { + type = string + default = "" +} + +# Choose any region you like from +# https://cloud.google.com/about/locations. We like to +# choose a low carbon region like "europe-west9". +# See https://cloud.google.com/sustainability/region-carbon +variable "google_cloud_region" { + type = string + default = "" +} + +# The account handle where your service repositories and your +# infrastructure repository is stored (ours is "octue") +variable "github_account" { + type = string + default = "" +} +``` + +## Create the core infrastructure + +In the command line, Change directory into `terraform_core` and generate the Terraform plan: + +```shell +cd terraform_core +terraform init +terraform plan +``` + +If you're happy with the plan, run: + +```shell +terraform apply +``` + +and approve the run. + +## Add the cluster Terraform configuration + +Inside the `terraform_cluster` directory, create a `main.tf` file and add the following to it: + +```terraform +terraform { + required_version = ">= 1.8.0" + + required_providers { + google = { + source = "hashicorp/google" + version = "~>6.12" + } + kubernetes = { + source = "hashicorp/kubernetes" + version = "~>2.35" + } + kubectl = { + source = "gavinbunney/kubectl" + version = "~>1.19" + } + } + +} + + +provider "google" { + project = var.google_cloud_project_id + region = var.google_cloud_region + credentials = "gcp-credentials.json" +} + + +data "google_client_config" "default" {} + + +provider "kubernetes" { + host = "https://${module.octue_twined_cluster.kubernetes_cluster.endpoint}" + token = data.google_client_config.default.access_token + cluster_ca_certificate = base64decode(module.octue_twined_cluster.kubernetes_cluster.master_auth[0].cluster_ca_certificate) +} + + +provider "kubectl" { + load_config_file = false + host = "https://${module.octue_twined_cluster.kubernetes_cluster.endpoint}" + token = data.google_client_config.default.access_token + cluster_ca_certificate = base64decode(module.octue_twined_cluster.kubernetes_cluster.master_auth[0].cluster_ca_certificate) +} + + +locals { + workspace_split = split("-", terraform.workspace) + environment = element(local.workspace_split, length(local.workspace_split) - 1) +} + + +module "octue_twined_cluster" { + source = "git::github.com/octue/terraform-octue-twined-cluster.git?ref=0.3.0" + google_cloud_project_id = var.google_cloud_project_id + google_cloud_region = var.google_cloud_region + environment = local.environment + cluster_queue = var.cluster_queue +} +``` + +Next, create a `variables.tf` file and add the following, replacing the parts in `<>` brackets with the same values as +before: + +```terraform +variable "google_cloud_project_id" { + type = string + default = "" +} + + +variable "google_cloud_region" { + type = string + default = "" +} + + +variable "cluster_queue" { + type = object( + { + name = string + max_cpus = number + max_memory = string + max_ephemeral_storage = string + } + ) + default = { + name = "cluster-queue" + max_cpus = 100 + max_memory = "256Gi" + max_ephemeral_storage = "10Gi" + } +} +``` + +## Create the cluster + +In the command line, change directory into `terraform_cluster` and generate the Terraform plan: + +```shell +cd ../terraform_cluster +terraform init +terraform plan +``` + +If you're happy with the plan, run: + +```shell +terraform apply +``` + +and approve the run. + +## Next steps + +!!! success + + Congratulations on setting up a Twined service network! Next up: + + - [Create the first Twined service on your new service network](creating_services.md) + - Learn more about the Terraform modules used in this guide: + - [Core module](https://github.com/octue/terraform-octue-twined-core) + - [Cluster module](https://github.com/octue/terraform-octue-twined-cluster) diff --git a/docs/getting_started/using_services.md b/docs/getting_started/using_services.md new file mode 100644 index 000000000..77ba632a2 --- /dev/null +++ b/docs/getting_started/using_services.md @@ -0,0 +1,129 @@ +# Getting started - using services + +Run analyses instantly on ready-made services - no cloud setup, DevOps, or coding required. Run once or thousands of +times, and add them to automation pipelines. + +This guide walks you through using an example Twined service locally. The process for using a real one (deployed locally +or in the cloud) is almost identical. + +By the end, you will be able to use the Twined CLI to run an analysis on a data service, sending it input data and +receiving output data. + +## Prerequisites + +Before you begin, ensure you: + + + +- Are familiar with Python and/or the command line +- Have the following tools installed: + - Python >= 3.10 + - The `octue` python library / CLI (see [installation instructions](../installation.md)) + + + +## Authentication + +No authentication is needed to run the example data service. To authenticate for real data services, see +[authentication instructions](../authentication.md). + +## Run your first analysis + +!!! info + + In Twined, sending input data to a service is called "asking a question". The service will run an analysis on the + question and send back any output data - this is called called "receiving an answer". + +### Ask a question + +The following command asks a question to the local example data service, which calculates the first `n` values of the +[Fibonacci sequence](https://en.wikipedia.org/wiki/Fibonacci_sequence). + +=== "CLI" + + ```shell + octue twined question ask example/service:latest --input-values='{"n": 10}' + ``` + + !!! tip + + To ask a question to a real data service, just specify its ID: + + ```shell + octue twined question ask some-org/a-service:1.2.0 --input-values='{"n": 10}' + ``` + +=== "Python" + + ```python + from octue.twined.resources import Child + + child = Child( + id="example/service:latest", + backend={ + "name": "GCPPubSubBackend", + "project_id": "example", + }, + ) + + answer, question_uuid = child.ask(input_values={"n": 10}) + ``` + + !!! info + + A child is a Twined service you ask a question to, in the sense of child and parent nodes in a tree. This only + becomes important when services use other Twined services as part of their analysis, forming a tree of services. + + !!! tip + + To ask a question to a real data service, specify its ID and project ID e.g. `some-org/real-service:1.2.0` + instead of `example/service:latest`. + +### Receive an answer + +=== "CLI" + + The output is automatically written to the command line. It contains log messages followed by the answer as + [JSON](https://en.wikipedia.org/wiki/JSON): + + ```text + [2025-10-28 15:36:52,377 | INFO | octue.twined.resources.example] Starting Fibonacci sequence calculation. + [2025-10-28 15:36:52,377 | INFO | octue.twined.resources.example] Finished Fibonacci sequence calculation. + {"kind": "result", "output_values": {"fibonacci": [0, 1, 1, 2, 3, 5, 8, 13, 21, 34]}, "output_manifest": null} + ``` + + !!! tip + + You can pipe the output JSON into other CLI tools or redirect it to a file: + + ```shell + # Format the result using the `jq` command line tool + octue twined question ask example/service:latest --input-values='{"n": 10}' | jq + + # Store the result in a file + octue twined question ask example/service:latest --input-values='{"n": 10}' > result.json + ``` + +=== "Python" + + ```python + answer + + >>> { + "kind": "result", + "output_values": {"fibonacci": [0, 1, 1, 2, 3, 5, 8, 13, 21, 34]}, + "output_manifest": None, + } + ``` + +## Next steps + +!!! success + + Congratulations on running your first analysis! For additional information, check out the following resources: + + - [Create your own data service](creating_services.md) + - [Set up infrastructure to host your data service(s) in the cloud](managing_infrastructure.md) + - Run a data service locally + - See the library and CLI reference + - [Get support](../support.md) diff --git a/docs/glossary.md b/docs/glossary.md new file mode 100644 index 000000000..f6c0ed668 --- /dev/null +++ b/docs/glossary.md @@ -0,0 +1,18 @@ +| Term | Definition | +| ------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Asking a question | Asking a question is sending data (input values and/or an input manifest) to a service for processing/analysis. | +| Child | A child is a service that can be asked a question. This name reflects the tree structure of services (specifically, a directed acyclic graph, or DAG) formed by the service asking the question (the parent), the child it asks the question to, any children that the child asks questions to as part of forming its answer, and so on. | +| Datafile | A datafile is a single local or cloud file and its metadata. | +| Dataset | A dataset is a set of related datafiles that exist in the same location and some metadata. | +| Locality | Locality is the type of location of a datafile or dataset. Possible types are: cloud-based (it exists only in the cloud); local (it exists only on your local filesystem); and hybrid (it's cloud-based but has been downloaded for low-latency reading/writing) | +| Manifest | A manifest is a set of related cloud and/or local datasets and some metadata. Typically produced by or needed for processing by a Twined service. | +| Parent | A parent is a service that asks a question to another service (a child). | +| Receiving an answer | Receiving data (output values and/or an output manifest) from a service you asked a question to. | +| Service | A service (also known as a Twined service) is a data service or digital twin built with the Twined framework that can be asked questions (input data), process them, and return answers (output data). Twined services can communicate with each other with minimal extra setup. | +| Service ID | A service ID can be an SRUID or just the service namespace and name. It can be used to ask a question to a service without specifying a specific revision of it. This enables asking questions to, for example, the service octue/my-service and automatically having them routed to its default (usually latest) revision. An SRUID is a special case of a service ID. | +| Service name | A name to uniquely identify the service within its namespace. This usually corresponds to the name of the GitHub repository for the service. Names must be lower kebab case (i.e. they may contain the letters [a-z], numbers [0-9] and hyphens [-]). They may not begin or end with hyphens. | +| Service namespace | A service namespace is the group to which the service belongs e.g. your name or your organisation's name. If in doubt, use the GitHub handle of the user or organisation publishing the services. Namespaces must be lower kebab case (i.e. they may contain the letters [a-z], numbers [0-9], and hyphens [-]). They may not begin or end with hyphens. | +| Service revision | A service revision is a specific instance of a service that can be individually addressed. The revision could correspond to a version of the service, a dynamic development branch for it, or a deliberate duplication or variation of it. | +| Service revision tag | A service revision tag is a short string of text that uniquely identifies a particular revision of a service. The revision tag could be a commit hash (e.g. a3eb45), a semantic version (e.g. 0.12.4), a branch name (e.g. development), a particular environment the service is deployed in (e.g. production), or a combination of these (e.g. 0.12.4-production). Tags may contain lowercase and uppercase letters, numbers, underscores, periods, and hyphens, but can't start with a period or a dash. They can contain a maximum of 128 characters. These requirements are the same as the [Docker tag format](https://docs.docker.com/engine/reference/commandline/tag/). | +| Service revision unique identifier (SRUID) | An SRUID is the combination of a service revision's namespace, name, and revision tag that uniquely identifies it. For example, `octue/my-service:1.3.0` where the namespace is `octue`, the name is `my-service`, and the revision tag is `1.3.0`. | +| Twined ecosystem | The Twined ecosystem is the set of services running Twined as their backend. These services guarantee: defined input/output JSON schemas and validation; an easy and consistent interface for asking them questions and receiving their answers; logs, exceptions, and monitor messages forwarded to you; and high availability (if deployed in the cloud) | diff --git a/docs/images/actions_tab.png b/docs/images/actions_tab.png new file mode 100644 index 000000000..b1fa7465d Binary files /dev/null and b/docs/images/actions_tab.png differ diff --git a/docs/index.md b/docs/index.md index e10ad91e5..bb3fbf0d9 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,59 +1,38 @@ -# Introduction +Twined helps scientists and engineers focus on their analyses instead of wrestling with infrastructure. Whether you need +to **run published analyses out of the box**, **share your own models and code as reusable services**, or **deploy +everything securely in your own environment**, Twined streamlines the process so you can spend more time on the science. -The python SDK for [Octue](https://octue.com) Twined scientific data services and digital twins - get faster data -groundwork so you have more time for the science! +## Where to start -!!! info "Definition" +### 1 - Using services - **Twined service** +Access ready-made scientific analyses instantly, with no coding, cloud setup, or DevOps required. Run them once or +thousands of times, and connect them directly into your research workflows and pipelines. Try the [getting started +guide](getting_started/using_services.md). - A data service or digital twin built with the Twined framework that can be asked questions, process them, and - return answers. Twined services can communicate with each other with minimal extra setup. +### 2 - Creating services -## Key features +Package your analysis code or models as production-ready services, without needing infrastructure expertise. Share them +with colleagues, collaborators, or the wider community. Try the [getting started guide](getting_started/creating_services.md). -**Unified cloud/local file, dataset, and manifest operations** +### 3 - Managing infrastructure -- Create and build datasets easily -- Organise them with timestamps, labels, and tags -- Filter and combine them using this metadata -- Store them locally or in the cloud (or both for low-latency reading/writing with cloud-guaranteed data availability) -- Use internet/cloud-based datasets as if they were local e.g. - - `https://example.com/important_dataset.dat` - - `gs://example-bucket/important_dataset.dat` -- Create manifests (a set of datasets needed for a particular analysis) to modularise your dataset input/output +Deploy Twined services in your private cloud with just two pre-configured Terraform modules. Control costs, manage +access, and integrate with your existing scientific data and digital tools. Try the [getting started guide](getting_started/managing_infrastructure.md). -**Ask existing services questions from anywhere** +## Stats -- Send them data to process from anywhere -- Automatically have their logs, monitor messages, and any errors forwarded to you and displayed as if they were local -- Receive their output data as JSON -- Receive a manifest of any output datasets they produce for you to download or access as you wish +- 250,000+ analyses run +- 100s of services built +- 6 years in development +- 12 countries - users across the world -**Create, run, and deploy your apps as services** +## Twined in the wild -- No need to change your app - just wrap it -- Use the `octue` CLI to run your service locally or deploy it to Google Kubernetes Engine (GKE) -- Create JSON-schema interfaces to explicitly define the form of configuration, input, and output data -- Ask other services questions as part of your app (i.e. build trees of services) -- Automatically display readable, colourised logs, or use your own log handler -- Avoid time-consuming and confusing devops, cloud configuration, and backend maintenance +- WindPioneers - [WindQuest](https://wind-pioneers.com/what-we-do/windquest-smarter-tools/): Twined powers the simulation infrastructure behind the WindQuest application, supporting design of hundreds of wind farms worldwide with over 250,000 Energy Yield Assessment (EYA) scenarios run to date. +- OST Zurich - [Aerosense](https://rtdt.ai/) (now part of RTDT Labs): The Twined framework was used to underpin digital twin and data infrastructure, enabling a novel turbine blade sensing technology to reach commercial readiness, now spun out and deployed internationally. -**High standards, quick responses, and good intentions** +## Twined by example -- Open-source and transparent on GitHub - anyone can see the code and raise an issue -- Automated testing, standards, releases, and deployment -- High test coverage -- Works on MacOS, Linux, and Windows -- Developed not-for-profit for the renewable energy industry - -## Need help, found a bug, or want to request a new feature? - -We use [GitHub Issues](https://github.com/octue/octue-sdk-python/issues) to manage: - -- Bug reports -- Feature requests -- Support requests - -Bug reports, feature requests and support requests, may also be made directly to your Octue support contact, or via the -[support pages](https://www.octue.com/contact). +- [Elevation service](https://github.com/octue/windeurope72hours-elevations-api) +- [Example service](https://github.com/octue/example-service-kueue) diff --git a/docs/installation.md b/docs/installation.md index 0fef71cb0..20c3a9297 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -1,9 +1,11 @@ # Installation +The Twined framework is provided through the [Octue SDK](https://github.com/octue/octue-sdk-python). + ## Pip ```shell -pip install octue==x.y.z +pip install octue ``` ## Poetry @@ -11,17 +13,31 @@ pip install octue==x.y.z Read more about Poetry [here](https://python-poetry.org). ```shell -poetry add octue=x.y.z +poetry add octue ``` -## Add to your dependencies +## Check installation -To use a specific version of Twined in your python application, -simply add: +If the installation worked correctly, the `octue` CLI will be available: ```shell -octue==x.y.z +octue --help ``` -to your `requirements.txt` or `setup.py` file, where `x.y.z` is your -preferred version of the SDK (we recommend the latest stable version). +```text +Usage: octue [OPTIONS] COMMAND [ARGS]... + + The CLI for Octue SDKs and APIs, most notably Twined. + + Read more in the docs: https://twined.octue.com + +Options: + --log-level [debug|info|warning|error] + Log level used for the analysis. [default: + info] + --version Show the version and exit. + -h, --help Show this message and exit. + +Commands: + twined The Twined CLI. +``` diff --git a/docs/license.md b/docs/license.md index 8fdf9f723..f2f47bcad 100644 --- a/docs/license.md +++ b/docs/license.md @@ -1,10 +1,10 @@ # License -## The Boring Bit +## The boring bit -See [the octue-sdk-python license](https://github.com/octue/octue-sdk-python/blob/main/LICENSE). +See the Twined license [here](https://github.com/octue/octue-sdk-python/blob/main/LICENSE). -## Third Party Libraries +## Third party libraries -**octue-sdk-python** includes or is linked against code from third party -libraries - see [our attributions page](https://github.com/octue/octue-sdk-python/blob/main/ATTRIBUTIONS.md). +**Twined** includes or is linked against code from third party libraries - see +[our attributions page](https://github.com/octue/octue-sdk-python/blob/main/ATTRIBUTIONS.md). diff --git a/docs/support.md b/docs/support.md new file mode 100644 index 000000000..6eae4df4a --- /dev/null +++ b/docs/support.md @@ -0,0 +1,9 @@ +Need help, found a bug, or want to request a new feature? We use +[GitHub Issues](https://github.com/octue/octue-sdk-python/issues) to manage: + +- Feature requests +- Bug reports +- Support requests + +Need something else? Contact us [here](https://octue.com/contact). You can also directly message your Octue support +contact. diff --git a/docs/twines/about/about_digital_twins.md b/docs/twines/about/about_digital_twins.md deleted file mode 100644 index ade802551..000000000 --- a/docs/twines/about/about_digital_twins.md +++ /dev/null @@ -1,53 +0,0 @@ -# Digital Twins {#digital_twins} - -A digital twin is a virtual representation of a real life being - a -physical asset like a wind turbine or car - or even a human. - -There are three reasons why you might want to create a digital twin: - -: - Monitoring - Prediction - Optimisation - -On its own, a digital twin can be quite useful. For example, a twin -might embody an AI-based analysis to predict power output of a turbine. - -
- -
A digital twin consists of some kind of analysis or -processing task, which could be run many times per second, or daily, -down to occasionally or sometimes only once (the same as a "normal" -analysis).
-
- -Coupling digital twins is generally even more useful. You might wish to -couple your turbine twin with a representation of the local power grid, -and a representation of a factory building to determine power demand\... -enabling you to optimise your factory plant for lowest energy cost -whilst intelligently selling surplus power to the grid. - -
- -
A hierarchy of digital twins. Each blue circle represents a -twin, coupled to its neighbours. Yellow nodes are where schema are used -to connect twins.
-
- -## Gemini Principles {#gemini_principles} - -The Gemini Principles have been derived by the [Centre for Digital Built -Britain -(CDBB)](https://www.cdbb.cam.ac.uk/system/files/documents/TheGeminiPrinciples.pdf). -We strongly recommend you give them a read if embarking on a digital -twins project. - -The aim of **twined** is to enable the following principles. In -particular: - -1. Openness (open-source project to create schema for twins that can be - run anywhere, anywhen) -2. Federation (encouraging a standardised way of connecting twins - together) -3. Security (making sure schemas and data can be read safely) -4. Public Good (see our nano-rant about climate change in - `reason_for_being`{.interpreted-text role="ref"}) diff --git a/docs/twines/about/about_introducing_json_schema.md b/docs/twines/about/about_introducing_json_schema.md deleted file mode 100644 index b75ab93e8..000000000 --- a/docs/twines/about/about_introducing_json_schema.md +++ /dev/null @@ -1,126 +0,0 @@ -# Introducing JSON Schema {#introducing_json_schema} - -`JSON` is a data interchange format that has rapidly taken over as the -defacto web-based data communication standard in recent years. - -`JSONSchema` is a way of specifying what a `JSON` document should -contain. The Schema are, themselves, written in `JSON`! - -Whilst schema can become extremely complicated in some scenarios, they -are best designed to be quite succinct. See below for the schema (and -matching `JSON`) for an integer and a string variable. - -**JSON:** - -```json -{ - "id": 1, - "name": "Tom" -} -``` - -**Schema:** - -```json -{ - "type": "object", - "title": "An id number and a name", - "properties": { - "id": { - "type": "integer", - "title": "An integer number", - "default": 0 - }, - "name": { - "type": "string", - "title": "A string name", - "default": "" - } - } -} -``` - -## Useful resources {#useful_resources} - -Link Resource - ---- - - Useful web tool for inferring schema from existing json - A powerful online editor for json, allowing manipulation of large documents better than most text editors - The JSON standard spec - The (draft standard) JSONSchema spec - A front end library for generating webforms directly from a schema - -## Human readability {#human_readbility} - -Back in our `requirements`{.interpreted-text role="ref"} section, we -noted it was important for humans to read and understand schema. - -The actual documents themselves are pretty easy to read by technical -users. But, for non technical users, readability can be enhanced even -further by the ability to turn `JSONSchema` into web forms -automatically. For our example above, we can autogenerate a web form -straight from the schema: - -
- -
Web form generated from the example schema -above.
-
- -Thus, we can take a schema (or a part of a schema) and use it to -generate a control form for a digital twin in a web interface without -writing a separate form component - great for ease and maintainability. - -## Why not XML? {#why_not_xml} - -In a truly excellent [three-part -blog](https://www.toptal.com/web/json-vs-xml-part-3), writer Seva Savris -takes us through the ups and downs of `JSON` versus `XML`; well worth a -read if wishing to understand the respective technologies better. - -In short, both `JSON` and `XML` are generalised data interchange -specifications and can both can do what we want here. We choose `JSON` -because: - -1. Textual representation is much more concise and easy to understand - (very important where non-developers like engineers and scientists - must be expected to interpret schema) -2. [Attack - vectors](https://www.opswat.com/blog/depth-look-xml-document-attack-vectors). - Because entities in `XML` are not necessarily primitives (unlike in - `JSON`), an `XML` document parser in its default state may leave a - system open to XXE injection attacks and DTD validation attacks, and - therefore requires hardening. `JSON` documents are similarly - afflicated (just like any kind of serialized data) but default - parsers, operating on the premise of only deserializing to primitive - types, are safe by default - it is only when nondefault parsering or - deserialization techniques (such as `JSONP`) are used that the - application becomes vulnerable. By utilising a default `JSON` parser - we can therefore significantly shrink the attack surface of the - system. See [this blog - post](https://blog.securityevaluators.com/xml-vs-json-security-risks-22e5320cf529) - for further discussion. -3. `XML` is powerful\... perhaps too powerful. The standard can be - adapted greatly, resulting in high encapsulation and a high - resilience to future unknowns. Both beneficial. However, this - requires developers of twins to maintain interfaces of very high - complexity, adaptable to a much wider variety of input. To enable - developers to progress, we suggest handling changes and future - unknowns through well-considered versioning, whilst keeping their - API simple. -4. `XML` allows baked-in validation of data and attributes. Whilst - advantageous in some situations, this is not a benefit here. We wish - validation to be one-sided: validation of data accepted/generated by - a digital twin should be occur within (at) the boundaries of that - twin. -5. Required validation capabilities, built into `XML` are achievable - with `JSONSchema` (otherwise missing from the pure `JSON` standard) -6. `JSON` is a more compact expression than XML, significantly reducing - memory and bandwidth requirements. Whilst not a major issue for most - modern PCS, sensors on the edge may have limited memory, and both - memory and bandwidth at scale are extremely expensive. Thus for - extremely large networks of interconnected systems there could be - significant speed and cost savings. diff --git a/docs/twines/about/about_other_considerations.md b/docs/twines/about/about_other_considerations.md deleted file mode 100644 index 03d4ddecb..000000000 --- a/docs/twines/about/about_other_considerations.md +++ /dev/null @@ -1,112 +0,0 @@ -# Other Considerations {#other_considerations} - -A variety of thoughts that arose whilst architecting **twined**. - -## Bash-style stdio {#bash_style_stdio} - -Some thought was given to using a very old-school-unix approach to -piping data between twins, via stdout. - -Whilst attractive (as being a wildly fast way of piping data between -twins on the same machine) it was felt this was insufficiently general, -eg: - -> - where twins don\'t exist on the same machine or container, making it -> cumbersome to engineer common iostreams -> - where slight differences between different shells might lead to -> incompatibilities or changes in behaviour - -And also unfriendly, eg: - -> - engineers or scientists unfamiliar with subtleties of bash shell -> scripting encounter difficulty piping data around -> - difficult to build friendly web based tools to introspect the data -> and configuration -> - bound to be headaches on windows platforms, even though windows now -> supports bash -> - easy to corrupt using third party libraries (e.g. which print to -> stdout) - -## Units {#Units} - -Being used (mostly) for engineering and scientific analysis, it was -tempting to add in a specified sub-schema for units. For example, -mandating that where values can be given in units, they be specified in -a certain way, like: - -```javascript -{ - "wind_speed": { - "value": 10.2, - "units": "mph" - } -} -``` - -or (more succinct): - -```javascript -{ - "wind_speed": 10.2, - "wind_speed_units": "mph" -} -``` - -It\'s still extremely tempting to provide this facility; or at least -provide some way of specifying in the schema what units a value should -be provided in. Thinking about it but don\'t have time right now. If -anybody wants to start crafting a PR with an extension or update to -**twined** that facilitates this; please raise an issue to start -progressing it. - -## Variable Style {#variable_style} - -A premptive stamp on the whinging\... - -Note that in the `JSON` descriptions above, all variables are named in -`snake_case` rather than `camelCase`. This decision, more likely than -even Brexit to divide opinions, is based on: - -- The languages we anticipate being most popular for building twins seem to trend toward snake case (eg - - : [python](https://www.python.org/dev/peps/pep-0008/), - [c++](https://google.github.io/styleguide/cppguide.html)) although - to be fair we might\'ve woefully misjudged which languages start - emerging. - -- The reservation of snake case for the schema spec has the subtle advantage that in future, we might be able to use - - : camelCase within the spec to denote class types in some useful - way, just like in python. Not sure yet; just mulling. - -- The `requirements`{.interpreted-text role="ref"} mention human-readability as a must; - - : [this - paper](https://ieeexplore.ieee.org/document/5521745?tp=&arnumber=5521745&url=http:%2F%2Fieeexplore.ieee.org%2Fxpls%2Fabs_all.jsp%3Farnumber%3D5521745) - suggests a 20% slower comprehension of camel case than snake, - although to be fair that\'s probably arguable. - -- We\'re starting in Python so are taking a lead from PEP8, which is bar none the most successful style guide on the - - : planet, because it got everybody on the same page really early on. - -If existing code that you\'re dropping in uses camelCase, please don\'t -file that as an issue\... converting property names automatically after -schema validation generation is trivial, there are tons of libraries -(like [humps](https://humps.readthedocs.io/en/latest/)) to do it. - -We\'d also consider a pull request for a built-in utility converting -[to](https://pypi.org/project/camelcase/) and -[from](https://pypi.org/project/snakecase/) that does this following -validation and prior to returning results. Suggest your proposed -approach on the [issues board](https://github.com/octue/twined). - -## Language Choice {#language_choice} - -**twined** is presently released in python only. It won\'t be too hard -to replicate functionality in other languages, and we\'re considering -other languages at present, so might be easily persuadable ;) - -If you require implementation of **twined** in a different language, and -are willing to consider sponsorship of development and maintenance of -that library, please [file an issue](https://github.com/octue/twined). diff --git a/docs/twines/about/about_requirements.md b/docs/twines/about/about_requirements.md deleted file mode 100644 index 9727e323d..000000000 --- a/docs/twines/about/about_requirements.md +++ /dev/null @@ -1,37 +0,0 @@ -# Requirements of the framework {#requirements} - -A _twine_ must describe a digital twin, and have multiple roles. It -must: - -1. Define what data is required by a digital twin, in order to run -2. Define what data will be returned by the twin following a successful - run -3. Define the formats of these data, in such a way that incoming data - can be validated -4. Define what other (1st or 3rd party) twins / services are required - by this one in order for it to run. - -If this weren\'t enough, the description: - -1. Must be trustable (i.e. a _twine_ from an untrusted, corrupt or - malicious third party should be safe to at least read) -2. Must be machine-readable _and machine-understandable_[^1] -3. Must be human-readable _and human-understandable_[^2] -4. Must be discoverable (that is, searchable/indexable) otherwise - people won\'t know it\'s there in orer to use it. - -Fortunately for digital twin developers, several of these requirements -have already been seen for data interchange formats developed for the -web. **twined** uses `JSON` and `JSONSchema` to help interchange data. - -If you\'re not already familiar with `JSONSchema` (or wish to know why -**twined** uses `JSON` over the seemingly more appropriate `XML` -standard), see `introducing_json_schema`{.interpreted-text role="ref"}. - -[^1]: - _Understandable_ essentially means that, once read, the machine or - human knows what it actually means and what to do with it. - -[^2]: - _Understandable_ essentially means that, once read, the machine or - human knows what it actually means and what to do with it. diff --git a/docs/twines/about/index.md b/docs/twines/about/index.md deleted file mode 100644 index fce02f56c..000000000 --- a/docs/twines/about/index.md +++ /dev/null @@ -1,16 +0,0 @@ -# About Twines {#about} - -**Twined** is a framework for describing a digital twin or data service. - -We call these descriptions \"twines\". To just get started building a -_twine_, check out the `quick_start`{.interpreted-text role="ref"}. To -get into the detail of what\'s in a _twine_, see -`anatomy`{.interpreted-text role="ref"}. - -Here, we look at requirements for the framework, our motivations and -background, and some of the decisions made while developing **twined**. - -::: {.toctree maxdepth="1"} -about_digital_twins about_requirements about_introducing_json_schema -about_other_considerations -::: diff --git a/docs/twines/anatomy.md b/docs/twines/anatomy.md deleted file mode 100644 index 96952b1c1..000000000 --- a/docs/twines/anatomy.md +++ /dev/null @@ -1,109 +0,0 @@ -# Anatomy Of The Twine File {#anatomy} - -The main point of **twined** is to enable engineers and scientists to -easily (and rigorously) define a digital twin or data service. - -This is done by adding a `twine.json` file to the repository containing -your code. Adding a _twine_ means you can: - -- communicate (to you or a colleague) what data is required by this - service -- communicate (to another service / machine) what data is required -- deploy services automatically with a provider like - [Octue](https://www.octue.com). - -To just get started building a _twine_, check out the -`quick_start`{.interpreted-text role="ref"}. To learn more about twines -in general, see `about`{.interpreted-text role="ref"}. Here, we describe -the parts of a _twine_ (\"strands\") and what they mean. - -## Strands - -A _twine_ has several sections, called _strands_. Each defines a -different kind of data required (or produced) by the twin. - ---- - -Strand Describes the twin\'s requirements for\... - ---- - -`Configuration Values `{.interpreted-text Data, in JSON form, used for configuration of the -role="ref"} twin/service. - -`Configuration Manifest `{.interpreted-text Files/datasets required by the twin at -role="ref"} configuration/startup - -`Input Values `{.interpreted-text Data, in JSON form, passed to the twin in order -role="ref"} to trigger an analysis - -`Input Manifest `{.interpreted-text role="ref"} Files/datasets passed with Input Values to -trigger an analysis - -`Output Values `{.interpreted-text Data, in JSON form, that will be produced by the -role="ref"} twin (in response to inputs) - -`Output Manifest `{.interpreted-text Files/datasets that will be produced by the twin -role="ref"} (in response to inputs) - -`Credentials `{.interpreted-text role="ref"} Credentials that are required by the twin in -order to access third party services - -`Children `{.interpreted-text role="ref"} Other twins, access to which are required for -this twin to function - -`Monitors `{.interpreted-text role="ref"} Visual and progress outputs from an analysis - ---- - -::: {.toctree maxdepth="1" hidden=""} -anatomy_values anatomy_manifest anatomy_credentials anatomy_monitors -anatomy_children -::: - -## Twine File Schema {#twine_file_schema} - -Because the `twine.json` file itself is in `JSON` format with a strict -structure, **twined** uses a schema to make that twine files are -correctly written (a \"schema-schema\", if you will, since a twine -already contains schema). Try not to think about it. But if you must, -the _twine_ schema is -[here](https://github.com/octue/twined/blob/master/twined/schema/twine_schema.json). - -The first thing **twined** always does is check that the `twine.json` -file itself is valid, and give you a descriptive error if it isn\'t. - -## Other External I/O {#other_external_io} - -A twin might: - -- GET/POST data from/to an external API, -- query/update a database, -- upload files to an object store, -- trigger events in another network, or -- perform pretty much any interaction you can think of with other - applications over the web. - -However, such data exchange may not be controllable by **twined** (which -is intended to operate at the boundaries of the twin) unless the -resulting data is returned from the twin (and must therefore be -compliant with the schema). - -So, there\'s nothing for **twined** to do here, and no need for a strand -in the _twine_ file. However, interacting with third party APIs or -databases might require some credentials. See -`credentials_strand`{.interpreted-text role="ref"} for help with that. - -:::: note -::: title -Note -::: - -This is actually a very common scenario. For example, the purpose of the -twin might be to fetch data (like a weather forecast) from some external -API then return it in the `output_values` for use in a network of -digital twins. But its the twin developer\'s job to do the fetchin\' and -make sure the resulting data is compliant with the -`output_values_schema` (see `values_based_strands`{.interpreted-text -role="ref"}). -:::: diff --git a/docs/twines/anatomy_children.md b/docs/twines/anatomy_children.md deleted file mode 100644 index 29e5fbb45..000000000 --- a/docs/twines/anatomy_children.md +++ /dev/null @@ -1,9 +0,0 @@ -# Children Strand {#children_strand} - -:::: attention -::: title -Attention -::: - -Coming Soon! -:::: diff --git a/docs/twines/anatomy_manifest.md b/docs/twines/anatomy_manifest.md deleted file mode 100644 index dfb3082cd..000000000 --- a/docs/twines/anatomy_manifest.md +++ /dev/null @@ -1,420 +0,0 @@ -# Manifest-based Strands {#manifest_strands} - -Frequently, twins operate on files containing some kind of data. These -files need to be made accessible to the code running in the twin, in -order that their contents can be read and processed. Conversely, a twin -might produce an output dataset which must be understood by users. - -The `configuration_manifest`, `input_manifest` and `output_manifest` -strands describe what kind of datasets (and associated files) are -required / produced. - -:::: note -::: title -Note -::: - -Files are always contained in datasets, even if there\'s only one file. -It\'s so that we can keep nitty-gritty file metadata separate from the -more meaningful, higher level metadata like what a dataset is for. -:::: - -:::::: tabs -::: group-tab -Configuration Manifest Strand - -This describes datasets/files that are required at startup of the twin / -service. They typically contain a resource that the twin might use -across many analyses. - -For example, a twin might predict failure for a particular component, -given an image. It will require a trained ML model (saved in a -`*.pickle` or `*.json`). While many thousands of predictions might be -done over the period that the twin is deployed, all predictions are done -using this version of the model - so the model file is supplied at -startup. -::: - -::: group-tab -Input Manifest Strand - -These files are made available for the twin to run a particular analysis -with. Each analysis will likely have different input datasets. - -For example, a twin might be passed a dataset of LiDAR `*.scn` files and -be expected to compute atmospheric flow properties as a timeseries -(which might be returned in the -`output values `{.interpreted-text role="ref"} for -onward processing and storage). -::: - -::: group-tab -Output Manifest Strand - -Files are created by the twin during an analysis, tagged and stored as -datasets for some onward purpose. This strand is not used for sourcing -data; it enables users or other services to understand appropriate -search terms to retrieve datasets produced. -::: -:::::: - -## Describing Manifests {#describing_manifests} - -Manifest-based strands are a **description of what files are needed**. -The purpose of the manifest strands is to provide a helper to a wider -system providing datafiles to digital twins. - -::::::::::::::::::::: tabs -:::::::: group-tab -Configuration Manifest Strand - -::::::: accordion -:::: accordion-row -Show twine containing this strand - -::: {.literalinclude language="javascript"} -../../octue/twined/examples/damage_classifier_service/twine.json -::: -:::: - -:::: accordion-row -Show a matching file manifest - -::: {.literalinclude language="javascript"} -../../octue/twined/examples/damage_classifier_service/data/configuration_manifest.json -::: -:::: -::::::: -:::::::: - -:::::::: group-tab -Input Manifest Strand - -Here we specify that two datasets (and all or some of the files -associated with them) are required, for a service that cross-checks -meteorological mast data and power output data for a wind farm. - -::::::: accordion -:::: accordion-row -Show twine containing this strand - -::: {.literalinclude language="javascript"} -../../octue/twined/examples/met_mast_scada_service/strands/input_manifest.json -::: -:::: - -:::: accordion-row -Show a matching file manifest - -::: {.literalinclude language="javascript"} -../../octue/twined/examples/met_mast_scada_service/data/input_manifest.json -::: -:::: -::::::: -:::::::: - -:::::::: group-tab -Output Manifest Strand - -::::::: accordion -:::: accordion-row -Show twine containing this strand - -::: {.literalinclude language="javascript"} -../../octue/twined/examples/met_mast_scada_service/strands/output_manifest.json -::: -:::: - -:::: accordion-row -Show a matching file manifest - -::: {.literalinclude language="javascript"} -../../octue/twined/examples/met_mast_scada_service/data/output_manifest.json -::: -:::: -::::::: -:::::::: -::::::::::::::::::::: - -## File tag templates {#file_tag_templates} - -Datafiles can be tagged with key-value pairs of relevant metadata that -can be used in analyses. Certain datasets might need one set of metadata -on each file, while others might need a different set. The required (or -optional) file tags can be specified in the twine in the -`file_tags_template` property of each dataset of any `manifest` strand. -Each file in the corresponding manifest strand is then validated against -its dataset\'s file tag template to ensure the required tags are -present. - -::::::::::: tabs -:::::: group-tab -Manifest strand with file tag template - -The example below is for an input manifest, but the format is the same -for configuration and output manifests. - -::::: accordion -::: accordion-row -Show twine containing a manifest strand with a file tag template - -```javascript -{ - "input_manifest": { - "datasets": [ - { - "key": "met_mast_data", - "purpose": "A dataset containing meteorological mast data", - "file_tags_template": { - "type": "object", - "properties": { - "manufacturer": {"type": "string"}, - "height": {"type": "number"}, - "is_recycled": {"type": "boolean"} - }, - "required": ["manufacturer", "height", "is_recycled"] - } - } - ] - } -} -``` - -::: - -::: accordion-row -Show a matching file manifest - -```javascript -{ - "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", - "datasets": [ - { - "id": "7ead7669-8162-4f64-8cd5-4abe92509e17", - "name": "met_mast_data", - "tags": {}, - "labels": ["met", "mast", "wind"], - "files": [ - { - "path": "input/datasets/7ead7669/file_1.csv", - "cluster": 0, - "sequence": 0, - "extension": "csv", - "labels": ["mykeyword1", "mykeyword2"], - "tags": { - "manufacturer": "vestas", - "height": 500, - "is_recycled": true - }, - "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", - "name": "file_1.csv" - }, - { - "path": "input/datasets/7ead7669/file_1.csv", - "cluster": 0, - "sequence": 1, - "extension": "csv", - "labels": [], - "tags": { - "manufacturer": "vestas", - "height": 500, - "is_recycled": true - }, - "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", - "name": "file_1.csv" - } - ] - } - ] -} -``` - -::: -::::: -:::::: - -:::::: group-tab -Manifest strand with a remote file tag template - -A remote reference can also be given for a file tag template. If the tag -template somewhere public, this is useful for sharing the template -between one or more teams working on the same type of data. - -The example below is for an input manifest, but the format is the same -for configuration and output manifests. It also shows two different tag -templates being specified for two different types of dataset required by -the manifest. - -::::: accordion -::: accordion-row -Show twine using a remote tag template - -```javascript -{ - "input_manifest": { - "datasets": [ - { - "key": "met_mast_data", - "purpose": "A dataset containing meteorological mast data", - "file_tags_template": { - "$ref": "https://refs.schema.octue.com/octue/my-file-type-tag-template/0.0.0.json" - } - }, - { - "key": "some_other_kind_of_dataset", - "purpose": "A dataset containing something else", - "file_tags_template": { - "$ref": "https://refs.schema.octue.com/octue/another-file-type-tag-template/0.0.0.json" - } - } - ] - } -} -``` - -::: - -::: accordion-row -Show a matching file manifest - -```javascript -{ - "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", - "datasets": [ - { - "id": "7ead7669-8162-4f64-8cd5-4abe92509e17", - "name": "met_mast_data", - "tags": {}, - "labels": ["met", "mast", "wind"], - "files": [ - { - "path": "input/datasets/7ead7669/file_1.csv", - "cluster": 0, - "sequence": 0, - "extension": "csv", - "labels": ["mykeyword1", "mykeyword2"], - "tags": { - "manufacturer": "vestas", - "height": 500, - "is_recycled": true - }, - "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", - "name": "file_1.csv" - }, - { - "path": "input/datasets/7ead7669/file_1.csv", - "cluster": 0, - "sequence": 1, - "extension": "csv", - "labels": [], - "tags": { - "manufacturer": "vestas", - "height": 500, - "is_recycled": true - }, - "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", - "name": "file_1.csv" - } - ] - }, - { - "id": "7ead7669-8162-4f64-8cd5-4abe92509e29", - "name": "some_other_kind_of_dataset", - "tags": {}, - "labels": ["my-label"], - "files": [ - { - "path": "input/datasets/7eadpp9/interesting_file.dat", - "cluster": 0, - "sequence": 0, - "extension": "dat", - "labels": [], - "tags": { - "length": 864, - "orientation_angle": 85 - }, - "id": "abff07bc-7c19-4ed5-be6d-a6546eae9071", - "name": "interesting_file.csv" - }, - } - ] -} -``` - -::: -::::: -:::::: -::::::::::: - -TODO - clean up or remove this section - -## How Filtering Works {#how_filtering_works} - -It\'s the job of **twined** to make sure of two things: - -1. make sure the _twine_ file itself is valid, - - > **File data (input, output)** - > - > Files are not streamed directly to the digital twin (this would - > require extreme bandwidth in whatever system is orchestrating all - > the twins). Instead, files should be made available on the local - > storage system; i.e. a volume mounted to whatever container or VM - > the digital twin runs in. - > - > Groups of files are described by a `manifest`, where a manifest is - > (in essence) a catalogue of files in a dataset. - > - > A digital twin might receive multiple manifests, if it uses - > multiple datasets. For example, it could use a 3D point cloud - > LiDAR dataset, and a meteorological dataset. - > - > ```javascript - > { - > "manifests": [ - > { - > "type": "dataset", - > "id": "3c15c2ba-6a32-87e0-11e9-3baa66a632fe", // UUID of the manifest - > "files": [ - > { - > "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", // UUID of that file - > "sha1": "askjnkdfoisdnfkjnkjsnd" // for quality control to check correctness of file contents - > "name": "Lidar - 4 to 10 Dec.csv", - > "path": "local/file/path/to/folder/containing/it/", - > "type": "csv", - > "metadata": { - > }, - > "size_bytes": 59684813, - > "tags": {"special_number": 1}, - > "labels": ["lidar", "helpful", "information", "like"], // Searchable, parsable and filterable - > }, - > { - > "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", - > "name": "Lidar - 11 to 18 Dec.csv", - > "path": "local/file/path/to/folder/containing/it/", - > "type": "csv", - > "metadata": { - > }, - > "size_bytes": 59684813, - > "tags": {"special_number": 2}, - > "labels": ["lidar", "helpful", "information", "like"] // Searchable, parsable and filterable - > }, - > { - > "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", - > "name": "Lidar report.pdf", - > "path": "local/file/path/to/folder/containing/it/", - > "type": "pdf", - > "metadata": { - > }, - > "size_bytes": 484813, - > "tags": {}, - > "labels": ["report"] // Searchable, parsable and filterable - > } - > ] - > }, - > { - > // ... another dataset manifest ... - > } - > ] - > } - > ``` diff --git a/docs/twines/anatomy_monitors.md b/docs/twines/anatomy_monitors.md deleted file mode 100644 index ee8f0a7e1..000000000 --- a/docs/twines/anatomy_monitors.md +++ /dev/null @@ -1,57 +0,0 @@ -# Monitor Message Strand {#monitors_strand} - -The `monitor_message_schema` strand is _values-based_ meaning the data -that matches the strand is in JSON form. It is a _json schema_ which -describes a monitor message. - -:::: tabs -::: group-tab -Monitors Strand - -There are two kinds of monitoring data required from a digital twin. - -**Monitor data (output)** - -Values for health and progress monitoring of the twin, for example -percentage progress, iteration number and status - perhaps even -residuals graphs for a converging calculation. Broadly speaking, this -should be user-facing information. - -_This kind of monitoring data can be in a suitable form for display on a -dashboard_ - -**Log data (output)** - -Logged statements, typically in iostream form, produced by the twin -(e.g. via python\'s `logging` module) must be capturable as an output -for debugging and monitoring purposes. Broadly speaking, this should be -developer-facing information. -::: -:::: - -Let\'s look at basic examples for twines containing each of these -strands: - -:::: tabs -::: group-tab -Monitors Strand - -**Monitor data (output)** - -```javascript -{ - "monitor_message_schema": { - "type": "object", - "properties": { - "my_property": { - "type": "number" - } - }, - "required": ["my_property"] - } -} -``` - -**Log data (output)** -::: -:::: diff --git a/docs/twines/examples.md b/docs/twines/examples.md deleted file mode 100644 index 974c05706..000000000 --- a/docs/twines/examples.md +++ /dev/null @@ -1,200 +0,0 @@ -# Examples - -Here, we look at example use cases for the library, and show how to use -it in python. - -It\'s also well worth looking at the unit test cases copied straight -from the unit test cases, so you can always check there to see how -everything hooks up. - -## \[Simple\] Equipment installation cost {#example_equipment_installation_cost} - -::::: tabs -::: group-tab -Scenario - -You need to provide your team with an estimate for installation cost of -an equipment foundation. - -It\'s a straightforward calculation for you, but the Logistics Team -keeps changing the installation position, to try and optimise the -overall project logistics. - -Each time the locations change, the GIS team gives you an updated -embedment depth, which is what you use (along with steel cost and -foundation type), to calculate cost and report it back. - -This twine allows you to define to create a wrapper around your scripts -that communicates to the GIS team what you need as an input, communicate -to the logistics team what they can expect as an output. - -When deployed as a digital twin, the calculation gets automatically -updated, leaving you free to get on with all the other work! -::: - -::: group-tab -Twine - -We specify the `steel_cost` and `foundation_type` as `configuration` -values, which you can set on startup of the twin. - -Once the twin is running, it requires the `embedment_depth` as an -`input_value` from the GIS team. A member of the GIS team can use your -twin to get `foundation_cost` directly. - -```javascript -{ - "title": "Foundation Cost Model", - "description": "This twine helps compute the cost of an installed foundation.", - "children": [ - ], - "configuration_values_schema": { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "title": "Foundation cost twin configuration", - "description": "Set config parameters and constants at startup of the twin.", - "type": "object", - "properties": { - "steel_cost": { - "description": "The cost of steel in GBP/m^3. To get a better predictive model, you could add an economic twin that forecasts the cost of steel using the project timetable.", - "type": "number", - "minimum": 0, - "default": 3000 - }, - "foundation_type": { - "description": "The type of foundation being used.", - "type": "string", - "pattern": "^(monopile|twisted-jacket)$", - "default": "monopile" - } - } - }, - "input_values_schema": { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "title": "Input Values schema for the foundation cost twin", - "description": "These values are supplied to the twin asynchronously over a web socket. So as these values change, the twin can reply with an update.", - "type": "object", - "properties": { - "embedment_depth": { - "description": "Embedment depth in metres", - "type": "number", - "minimum": 10, - "maximum": 500 - } - } - }, - "output_manifest": { - "datasets": [] - }, - "output_values_schema": { - "title": "Output Values schema for the foundation cost twin", - "description": "The response supplied to a change in input values will always conform to this schema.", - "type": "object", - "properties": { - "foundation_cost": { - "description": "The foundation cost.", - "type": "integer", - "minimum": 2 - } - } - } -} -``` - -::: -::::: - -## \[Simple\] Site weather conditions {#example_site_weather_conditions} - -::::: tabs -::: group-tab -Scenario - -You need to be able to get characteristic weather conditions at a -specific location, for a range of reasons including assessing extreme -design loads. The values you need are computed in a script, which calls -a Weather API (provided by a third party), but also needs a dataset of -\"Wind Resource\" files. -::: - -::: group-tab -Twine - -```javascript -{ - "title": "Weather Service Digital Twin", - "description": "Provides a model for design extreme weather conditions given a location", - "notes": "Easily extendable with children to add forecast and historical data of different types.", - "credentials": [ - { - "name": "WEATHER_API_SECRET_KEY", - "purpose": "Token for accessing a 3rd party weather API service" - } - ], - "input_manifest": { - "datasets": [ - { - "key": "wind_resource_data", - "purpose": "A dataset containing Wind Resource Grid files" - } - ] - }, - "input_values_schema": { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "title": "Input Values for the weather service twin", - "description": "This is a simple example for getting metocean conditions at a single location", - "type": "object", - "properties": { - "location": { - "description": "Location", - "type": "object", - "properties": { - "latitude": { - "type": "number", - "minimum": -90, - "maximum": 90 - }, - "longitude": { - "type": "number", - "minimum": -180, - "maximum": 180 - }, - "srid": { - "description": "The Spatial Reference System ID for the coordinate. Default is 4326 (WGS84)", - "type": "integer", - "default": 4326 - } - } - } - } - }, - "output_manifest": { - "datasets": [ - { - "key": "production_data", - "purpose": "A dataset containing production data", - "tags": {"cleaned": true}, - "labels": ["production", "wind"] - } - ] - }, - "output_values_schema": { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "title": "Output Values for the metocean service twin", - "description": "The output values strand of an example twine", - "type": "object", - "properties": { - "water_depth": { - "description": "Design water depth for use in concept calculations", - "type": "number" - }, - "extreme_wind_speed": { - "description": "Extreme wind speed value for use in concept calculations", - "type": "number" - } - } - } -} -``` - -::: -::::: diff --git a/docs/twines/index.md b/docs/twines/index.md deleted file mode 100644 index 6722bf27e..000000000 --- a/docs/twines/index.md +++ /dev/null @@ -1,109 +0,0 @@ -:::: attention -::: title -Attention -::: - -This library is in very early stages. Like the idea of it? Please [star -us on GitHub](https://github.com/octue/twined) and contribute via the -[issues board](https://github.com/octue/twined/issues) and -[roadmap](https://github.com/octue/twined/projects/1). -:::: - -# Twined - -**twined** is a library to help create and connect -`digital_twins`{.interpreted-text role="ref"} and data services. - -> - -A digital twin is a virtual representation of a real life being - a -physical asset like a wind turbine or car - or even a human. Like real -things, digital twins need to interact, so can be connected together, -but need a common communication framework to do so. - -**twined** helps you to define a single file, a \"twine\", that defines -a digital twin / data service. It specifies specifying its data -interfaces, connections to other twins, and other requirements. - -Any person, or any computer, can read a twine and understand -_what-goes-in_ and _what-comes-out_. That makes it easy to collaborate -with other teams, since everybody is crystal clear about what\'s needed. - -
- -
Digital twins / data services connected in a hierarchy. Each -blue circle represents a twin, coupled to its neighbours. Yellow nodes -are where schema are used to connect twins.
-
- -## Aims - -**twined** provides a toolkit to help create and validate \"twines\" - -descriptions of a digital twin, what data it requires, what it does and -how it works. - -The goals of this **twined** library are as follows: - -: - Provide a clear framework for what a _twine_ can and/or must -contain - Provide functions to validate incoming data against a known -_twine_ - Provide functions to check that a _twine_ itself is valid - Provide (or direct you to) tools to create _twines_ describing -what you require - -In `anatomy`{.interpreted-text role="ref"}, we describe the different -parts of a twine (examining how digital twins connect and interact\... -building them together in hierarchies and networks). But you may prefer -to dive straight in with the `quick_start`{.interpreted-text role="ref"} -guide. - -The scope of **twined** is not large. Many other libraries will deal -with hosting and deploying digital twins, still more will deal with the -actual analyses done within them. **twined** purely deals with parsing -and checking the information exchanged. - -## Raison d\'etre {#reason_for_being} - -Octue believes that a lynchpin of solving climate change is the ability -for all engineering, manufacturing, supply chain and infrastructure -plant to be connected together, enabling strong optimisation and -efficient use of these systems. - -To enable engineers and scientists to build, connect and run digital -twins in large networks (or even in small teams!) it is necessary for -everyone to be on the same page - the -`gemini_principles`{.interpreted-text role="ref"} are a great way to -start with that, which is why we\'ve released this part of our -technology stack as open source, to support those principles and help -develop a wider ecosystem. - -The main goal is to **help engineers and scientists focus on doing -engineering and science** - instead of apis, data cleaning/management, -and all this cloud-pipeline-devops-test-ci-ml BS that takes up 90% of a -scientist\'s time, when they should be spending their valuable time -researching migratory patterns of birds, or cell structures, or wind -turbine performance, or whatever excites them. - -## Uses - -At [Octue](https://www.octue.com), **twined** is used as a core part of -our application creation process: - -> - As a format to communicate requirements to our partners in research -> projects -> - As a tool to validate incoming data to digital twins -> - As a framework to help establish schema when designing digital twins -> - As a source of information on digital twins in our network, to help -> map and connect twins together - -We\'d like to hear about your use case. Please get in touch! - -We use the [GitHub Issue Tracker](https://github.com/octue/twined) to -manage bug reports and feature requests. Please note, this is not a -\"general help\" forum; we recommend Stack Overflow for such questions. -For really gnarly issues or for help designing digital twin schema, -Octue is able to provide application support services for those building -digital twins using **twined**. - -::: {.toctree maxdepth="2"} -self quick_start anatomy about deployment license version_history -::: diff --git a/docs/twines/lifecycle.md b/docs/twines/lifecycle.md deleted file mode 100644 index 133e7f096..000000000 --- a/docs/twines/lifecycle.md +++ /dev/null @@ -1,34 +0,0 @@ -> Data matching the `configuration_values_schema` is supplied to the -> digital twin / data service at startup. -> -> It\'s generally used to define control parameters relating to what the -> service should do, or how it should operate. For example, should it -> produce output images as low resolution PNGs or as SVGs? How many -> iterations of a fluid flow solver should be used? What is the -> acceptable error level on an classifier algorithm? -> -> Input Values -> -> Once configuration data supplied to a service has been validated, it -> can accept inputs and run analyses using them. -> -> Depending on the way it\'s deployed (see -> `deployment`{.interpreted-text role="ref"}), the `input_values` might -> come in from a web request, over a websocket or called directly from -> the command line or another library. -> -> However it comes, new `input_values`, which are in `JSON` format, are -> checked against the `input_values_schema` strand of the twine. If they -> match, then analysis can proceed. -> -> Output Values -> -> Once a service has Data matching the `output_values_schema` is -> supplied to the service while it\'s running. Depending on the way -> it\'s deployed, the values might come in from a web request, over a -> websocket or called directly from another library -> -> Input For example current rotor speed, or forecast wind direction. -> -> Values might be passed at instantiation of a twin (typical -> application-like process) or via a socket. diff --git a/docs/twines/quick_start.md b/docs/twines/quick_start.md deleted file mode 100644 index c8bee73f0..000000000 --- a/docs/twines/quick_start.md +++ /dev/null @@ -1,5 +0,0 @@ -# Quick Start {#quick_start} - -::: {.toctree maxdepth="2"} -quick_start_installation quick_start_create_your_first_twine -::: diff --git a/docs/twines/quick_start_create_your_first_twine.md b/docs/twines/quick_start_create_your_first_twine.md deleted file mode 100644 index 0d0f997a0..000000000 --- a/docs/twines/quick_start_create_your_first_twine.md +++ /dev/null @@ -1,112 +0,0 @@ -# Create your first twine {#create_your_first_twine} - -Let\'s say we want a digital twin that accepts two values, uses them to -make a calculation, then gives the result. Anyone connecting to the twin -will need to know what values it requires, and what it responds with. - -First, create a blank text file, call it [twine.json]{.title-ref}. -We\'ll give the twin a title and description. Paste in the following: - -```javascript -{ - "title": "My first digital twin... of an atomising discombobulator", - "description": "A simple example... estimates the `foz` value of an atomising discombobulator." -} -``` - -Now, let\'s define an input values strand, to specify what values are -required by the twin. For this we use a json schema (you can read more -about them in `introducing_json_schema`{.interpreted-text role="ref"}). -Add the `input_values` field, so your twine looks like this: - -```javascript -{ - "title": "My first digital twin", - "description": "A simple example to build on..." - "input_values_schema": { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "title": "Input Values schema for my first digital twin", - "description": "These values are supplied to the twin by another program (often over a websocket, depending on your integration provider). So as these values change, the twin can reply with an update.", - "type": "object", - "properties": { - "foo": { - "description": "The foo value... speed of the discombobulator's input bobulation module, in m/s", - "type": "number", - "minimum": 10, - "maximum": 500 - }, - "baz": { - "description": "The baz value... period of the discombobulator's recombulation unit, in s", - "type": "number", - "minimum": 0, - "maximum": 1000 - } - } - } -} -``` - -Finally, let\'s define an output values strand, to define what kind of -data is returned by the twin: - -```javascript -"output_values_schema": { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "title": "Output Values schema for my first digital twin", - "description": "The twin will output data that matches this schema", - "type": "object", - "properties": { - "foz": { - "description": "Estimate of the foz value... efficiency of the discombobulator in %", - "type": "number", - "minimum": 10, - "maximum": 500 - } - } -} -``` - -# Load the twine {#load_the_twine} - -**twined** provides a [Twine()]{.title-ref} class to load a twine (from -a file or a json string). The loading process checks the twine itself is -valid. It\'s as simple as: - -```py -from octue.twined import Twine - -my_twine = Twine(source='twine.json') -``` - -# Validate some inputs {#validate_some_inputs} - -Say we have some json that we want to parse and validate, to make sure -it matches what\'s required for input values. - -```py -my_input_values = my_twine.validate_input_values(json='{"foo": 30, "baz": 500}') -``` - -You can read the values from a file too. Paste the following into a file -named `input_values.json`: - -```javascript -{ - "foo": 30, - "baz": 500 -} -``` - -Then parse and validate directly from the file: - -```py -my_input_values = my_twine.validate_input_values(source="input_values.json") -``` - -:::: attention -::: title -Attention -::: - -LIBRARY IS UNDER CONSTRUCTION! WATCH THIS SPACE FOR MORE! -:::: diff --git a/docs/version_history.md b/docs/version_history.md index 8068e4209..31bcacf2a 100644 --- a/docs/version_history.md +++ b/docs/version_history.md @@ -1,4 +1,4 @@ -# Version History {#chapter-version-history} +# Version history See our [releases on GitHub.](https://github.com/octue/octue-sdk-python/releases) diff --git a/mkdocs.yml b/mkdocs.yml index 8d9eff6b6..6142dc4d9 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -2,37 +2,47 @@ site_name: Octue Twined site_url: https://twined.octue.com nav: - - index.md + - Overview: index.md - installation.md - - Datafiles, datasets, and manifests: - - data_containers/index.md - - data_containers/datafile.md - - data_containers/dataset.md - - data_containers/manifest.md - - services.md - - asking_questions.md - - creating_services.md - - Twines: - - twines/index.md - - Quickstart: twines/quick_start_create_your_first_twine.md - - twines/anatomy.md - - About Twines: - - twines/about/index.md - - twines/about/about_digital_twins.md - - twines/about/about_requirements.md - - twines/about/about_introducing_json_schema.md - - twines/about/about_other_considerations.md - - updating_services.md - - running_services_locally.md - - deploying_services.md - - testing_services.md - - troubleshooting_services.md - - logging.md - authentication.md - - inter_service_compatibility.md - # - api.md + - Getting started: + - Using services: getting_started/using_services.md + - Creating services: getting_started/creating_services.md + - Managing infrastructure: getting_started/managing_infrastructure.md + - Core concepts: + - Datafiles, datasets, and manifests: + - core_concepts/data_containers/index.md + - core_concepts/data_containers/datafiles.md + - core_concepts/data_containers/datasets.md + - core_concepts/data_containers/manifests.md + - More: + - Downloading datafiles: core_concepts/data_containers/downloading_datafiles.md + - core_concepts/data_containers/available_filters.md + - core_concepts/services.md + - core_concepts/asking_questions.md + - core_concepts/creating_services.md + - core_concepts/creating_apps.md + - core_concepts/updating_services.md + - core_concepts/running_services_locally.md + - Deploying services: core_concepts/deploying_services.md + - core_concepts/testing_services.md + - core_concepts/troubleshooting_services.md + - core_concepts/troubleshooting_infrastructure.md + - core_concepts/logging.md + - core_concepts/inter_service_compatibility.md + - Twines: + - core_concepts/twines/anatomy.md + - core_concepts/twines/values.md + - core_concepts/twines/manifest.md + - core_concepts/twines/credentials.md + - core_concepts/twines/monitors.md + - core_concepts/twines/twine_file_quickstart.md + - core_concepts/twines/examples.md + + - glossary.md - license.md - version_history.md + - support.md theme: name: material @@ -51,6 +61,8 @@ theme: - search.suggest - search.highlight - search.share + - content.tabs.link + - content.code.copy palette: # Palette toggle for light mode @@ -74,6 +86,11 @@ markdown_extensions: - attr_list - md_in_html - pymdownx.blocks.caption + - pymdownx.tabbed: + alternate_style: true + slugify: !!python/object/apply:pymdownx.slugs.slugify + kwds: + case: lower plugins: - privacy diff --git a/octue/cli.py b/octue/cli.py index 87b3e4ea0..3bdac7321 100644 --- a/octue/cli.py +++ b/octue/cli.py @@ -11,7 +11,7 @@ from octue.cloud import storage from octue.cloud.storage import GoogleCloudStorageClient from octue.definitions import LOCAL_SDK_VERSION -from octue.log_handlers import apply_log_handler, get_remote_handler +from octue.log_handlers import apply_log_handler from octue.resources import Manifest from octue.twined.cloud.events.answer_question import answer_question from octue.twined.cloud.events.question import make_question_event @@ -24,6 +24,7 @@ from octue.twined.definitions import MANIFEST_FILENAME, VALUES_FILENAME from octue.twined.exceptions import ServiceAlreadyExists from octue.twined.resources import Child, service_backends +from octue.twined.resources.example import calculate_fibonacci_sequence from octue.twined.runner import Runner from octue.utils.decoders import OctueJSONDecoder from octue.utils.encoders import OctueJSONEncoder @@ -34,7 +35,6 @@ @click.group(context_settings={"help_option_names": ["-h", "--help"]}) -@click.option("--logger-uri", default=None, show_default=True, help="Stream logs to a websocket at the given URI.") @click.option( "--log-level", default="info", @@ -43,20 +43,14 @@ help="Log level used for the analysis.", ) @click.version_option(version=LOCAL_SDK_VERSION) -def octue_cli(logger_uri, log_level): +def octue_cli(log_level): """The CLI for Octue SDKs and APIs, most notably Twined. - Read more in the docs: https://octue-python-sdk.readthedocs.io/en/latest/ + Read more in the docs: https://twined.octue.com """ - global_cli_context["logger_uri"] = logger_uri - global_cli_context["log_handler"] = None global_cli_context["log_level"] = log_level.upper() - apply_log_handler(log_level=log_level.upper()) - if global_cli_context["logger_uri"]: - global_cli_context["log_handler"] = get_remote_handler(logger_uri=global_cli_context["logger_uri"]) - @octue_cli.group() def twined(): @@ -69,12 +63,7 @@ def question(): """Ask a new question to an Octue Twined data service or interact with a previous question.""" -@question.group() -def ask(): - """Ask a new question to an Octue Twined data service.""" - - -@ask.command() +@question.command() @click.argument("sruid", type=str) @click.option( "-i", @@ -113,13 +102,28 @@ def ask(): "`OCTUE_SERVICE_CONFIGURATION_PATH` environment variable is used if present, otherwise the local path `octue.yaml` " "is used.", ) -def remote(sruid, input_values, input_manifest, project_id, asynchronous, service_config): +def ask(sruid, input_values, input_manifest, project_id, asynchronous, service_config): """Ask a question to a remote Octue Twined service. SRUID should be a valid service revision unique identifier for an existing Octue Twined service e.g. - octue question ask remote your-org/example-service:1.2.0 + octue twined question ask your-org/example-service:1.2.0 """ + if input_values: + input_values = json.loads(input_values, cls=OctueJSONDecoder) + + if sruid.startswith("example/"): + sequence = calculate_fibonacci_sequence(n=input_values.get("n")) + + answer = { + "kind": "result", + "output_values": {"fibonacci": sequence}, + "output_manifest": None, + } + + click.echo(json.dumps(answer, cls=OctueJSONEncoder)) + return + service_configuration = ServiceConfiguration.from_file(service_config, allow_not_found=True) if service_configuration: @@ -127,9 +131,6 @@ def remote(sruid, input_values, input_manifest, project_id, asynchronous, servic else: service_registries = None - if input_values: - input_values = json.loads(input_values, cls=OctueJSONDecoder) - if input_manifest: input_manifest = Manifest.deserialise(input_manifest, from_string=True) @@ -160,7 +161,7 @@ def remote(sruid, input_values, input_manifest, project_id, asynchronous, servic click.echo(json.dumps(answer, cls=OctueJSONEncoder)) -@ask.command() +@question.command() @click.option( "-i", "--input-values", @@ -192,10 +193,10 @@ def remote(sruid, input_values, input_manifest, project_id, asynchronous, servic "`OCTUE_SERVICE_CONFIGURATION_PATH` environment variable is used if present, otherwise the local path `octue.yaml` " "is used.", ) -def local(input_values, input_manifest, attributes, service_config): +def ask_local(input_values, input_manifest, attributes, service_config): """Ask a question to a local Octue Twined service. - This command is similar to running `octue twined start` and asking the resulting local service revision a question + This command is similar to running `octue twined service start` and asking the resulting local service revision a question via Pub/Sub. Instead of starting a local Pub/Sub service revision, however, no Pub/Sub subscription or subscriber is created; the question is instead passed directly to local the service revision without Pub/Sub being involved. Everything after this runs the same, though, with the service revision emitting any events via Pub/Sub as usual. @@ -637,7 +638,6 @@ def start(service_config, revision_tag, timeout, no_rm): run_function = functools.partial( runner.run, analysis_log_level=global_cli_context["log_level"], - analysis_log_handler=global_cli_context["log_handler"], ) backend_configuration_values = (service_configuration.configuration_values or {}).get("backend") diff --git a/octue/definitions.py b/octue/definitions.py index c036e1f43..88a08f36d 100644 --- a/octue/definitions.py +++ b/octue/definitions.py @@ -1,4 +1,9 @@ import importlib.metadata +import os GOOGLE_COMPUTE_PROVIDERS = {"GOOGLE_CLOUD_FUNCTION"} LOCAL_SDK_VERSION = importlib.metadata.version("octue") + +_root_dir = os.path.dirname(os.path.abspath(__file__)) +TEMPLATES_PATH = os.path.join(_root_dir, "twined", "templates") +DATA_PATH = os.path.join(os.path.dirname(_root_dir), "tests", "data") diff --git a/octue/log_handlers.py b/octue/log_handlers.py index 85658947a..b30a1a08f 100644 --- a/octue/log_handlers.py +++ b/octue/log_handlers.py @@ -1,7 +1,6 @@ import logging import logging.handlers import os -from urllib.parse import urlparse from octue.definitions import GOOGLE_COMPUTE_PROVIDERS @@ -150,43 +149,6 @@ def apply_log_handler( return handler -def get_remote_handler( - logger_uri, - formatter=None, - include_line_number=False, - include_process_name=False, - include_thread_name=False, -): - """Get a log handler for streaming logs to a remote URI accessed via HTTP or HTTPS. The default octue log formatter - is used if no formatter is provided. - - :param str logger_uri: the URI to stream the logs to - :param logging.Formatter|None formatter: if provided, this formatter is used and the other formatting options are ignored - :param bool include_line_number: if `True`, include the line number in the log context - :param bool include_process_name: if `True`, include the process name in the log context - :param bool include_thread_name: if `True`, include the thread name in the log context - :return logging.Handler: - """ - parsed_uri = urlparse(logger_uri) - - if parsed_uri.scheme not in {"ws", "wss"}: - raise ValueError( - f"Only WS and WSS protocols currently supported for remote logger URI. Received {logger_uri!r}." - ) - - handler = logging.handlers.SocketHandler(host=parsed_uri.hostname, port=parsed_uri.port) - - formatter = formatter or create_octue_formatter( - get_log_record_attributes_for_environment(), - include_line_number=include_line_number, - include_process_name=include_process_name, - include_thread_name=include_thread_name, - ) - - handler.setFormatter(formatter) - return handler - - def get_log_record_attributes_for_environment(): """Get the correct log record attributes for the environment. If the environment is in Google Cloud, get log record attributes not including the timestamp in the log context to avoid the date appearing twice in the Google Cloud diff --git a/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python310 b/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python310 index 4e92e9443..21049fb13 100644 --- a/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python310 +++ b/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python310 @@ -36,4 +36,4 @@ RUN if [ -f "pyproject.toml" ]; then poetry install --only main; \ ENV USE_OCTUE_LOG_HANDLER=1 ENV COMPUTE_PROVIDER=GOOGLE_KUEUE -CMD ["octue", "twined", "question", "ask", "local"] +CMD ["octue", "twined", "question", "ask-local"] diff --git a/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python311 b/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python311 index 8cd98448b..1e6c5a64f 100644 --- a/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python311 +++ b/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python311 @@ -36,4 +36,4 @@ RUN if [ -f "pyproject.toml" ]; then poetry install --only main; \ ENV USE_OCTUE_LOG_HANDLER=1 ENV COMPUTE_PROVIDER=GOOGLE_KUEUE -CMD ["octue", "twined", "question", "ask", "local"] +CMD ["octue", "twined", "question", "ask-local"] diff --git a/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python312 b/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python312 index 750ea49e5..2430425c5 100644 --- a/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python312 +++ b/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python312 @@ -36,4 +36,4 @@ RUN if [ -f "pyproject.toml" ]; then poetry install --only main; \ ENV USE_OCTUE_LOG_HANDLER=1 ENV COMPUTE_PROVIDER=GOOGLE_KUEUE -CMD ["octue", "twined", "question", "ask", "local"] +CMD ["octue", "twined", "question", "ask-local"] diff --git a/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python313 b/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python313 index 03be44435..8cef4183c 100644 --- a/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python313 +++ b/octue/twined/cloud/deployment/dockerfiles/Dockerfile-python313 @@ -36,4 +36,4 @@ RUN if [ -f "pyproject.toml" ]; then poetry install --only main; \ ENV USE_OCTUE_LOG_HANDLER=1 ENV COMPUTE_PROVIDER=GOOGLE_KUEUE -CMD ["octue", "twined", "question", "ask", "local"] +CMD ["octue", "twined", "question", "ask-local"] diff --git a/octue/twined/configuration.py b/octue/twined/configuration.py index d13e883fa..5c0b7f1ae 100644 --- a/octue/twined/configuration.py +++ b/octue/twined/configuration.py @@ -3,6 +3,8 @@ import yaml +from octue.twined.definitions import DISALLOWED_SERVICE_NAMESPACES + logger = logging.getLogger(__name__) @@ -49,7 +51,12 @@ def __init__( **kwargs, ): self.name = name - self.namespace = namespace + + if namespace in DISALLOWED_SERVICE_NAMESPACES: + raise ValueError(f"{namespace!r} is not an allowed Twined service namespace.") + else: + self.namespace = namespace + self.diagnostics_cloud_path = diagnostics_cloud_path self.service_registries = service_registries self.event_store_table_id = event_store_table_id diff --git a/octue/twined/definitions.py b/octue/twined/definitions.py index 95cbb49dc..3190537f5 100644 --- a/octue/twined/definitions.py +++ b/octue/twined/definitions.py @@ -3,3 +3,4 @@ OUTPUT_STRANDS = ("output_values", "output_manifest") RUN_STRANDS = ("input_values", "input_manifest", "credentials", "children") DEFAULT_MAXIMUM_HEARTBEAT_INTERVAL = 360 +DISALLOWED_SERVICE_NAMESPACES = {"example"} diff --git a/octue/twined/metadata/version_compatibilities.json b/octue/twined/metadata/version_compatibilities.json index 34ed07ec5..cf251fcd2 100644 --- a/octue/twined/metadata/version_compatibilities.json +++ b/octue/twined/metadata/version_compatibilities.json @@ -58,7 +58,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.40.1": { "0.40.1": true, @@ -119,7 +120,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.40.2": { "0.41.0": true, @@ -180,7 +182,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.41.0": { "0.41.0": true, @@ -241,7 +244,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.41.1": { "0.41.1": true, @@ -302,7 +306,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.42.0": { "0.42.0": true, @@ -363,7 +368,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.42.1": { "0.43.2": true, @@ -424,7 +430,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.43.0": { "0.43.2": true, @@ -485,7 +492,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.43.1": { "0.43.2": true, @@ -546,7 +554,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.43.2": { "0.43.2": true, @@ -607,7 +616,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.43.3": { "0.43.3": true, @@ -668,7 +678,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.43.4": { "0.43.4": true, @@ -729,7 +740,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.43.5": { "0.43.5": true, @@ -790,7 +802,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.43.6": { "0.43.6": true, @@ -851,7 +864,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.43.7": { "0.43.7": true, @@ -912,7 +926,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.44.0": { "0.44.0": true, @@ -973,7 +988,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.45.0": { "0.45.0": true, @@ -1034,7 +1050,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.46.0": { "0.46.0": true, @@ -1095,7 +1112,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.46.1": { "0.46.1": true, @@ -1156,7 +1174,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.46.2": { "0.46.2": true, @@ -1217,7 +1236,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.46.3": { "0.46.3": true, @@ -1278,7 +1298,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.47.0": { "0.47.0": true, @@ -1339,7 +1360,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.47.1": { "0.47.1": true, @@ -1400,7 +1422,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.47.2": { "0.47.2": true, @@ -1461,7 +1484,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.48.0": { "0.48.0": true, @@ -1522,7 +1546,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.49.0": { "0.49.1": true, @@ -1583,7 +1608,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.49.1": { "0.49.1": true, @@ -1644,7 +1670,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.49.2": { "0.49.2": true, @@ -1705,7 +1732,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.50.0": { "0.50.0": true, @@ -1766,7 +1794,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.50.1": { "0.51.0": false, @@ -1827,7 +1856,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.51.0": { "0.51.0": true, @@ -1888,7 +1918,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.52.0": { "0.51.0": true, @@ -1949,7 +1980,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.52.1": { "0.51.0": true, @@ -2010,7 +2042,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.52.2": { "0.51.0": true, @@ -2071,7 +2104,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.53.0": { "0.51.0": false, @@ -2132,7 +2166,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.54.0": { "0.51.0": false, @@ -2193,7 +2228,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.55.0": { "0.51.0": false, @@ -2254,7 +2290,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.56.0": { "0.51.0": false, @@ -2315,7 +2352,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.57.0": { "0.51.0": false, @@ -2376,7 +2414,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.57.1": { "0.51.0": false, @@ -2437,7 +2476,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.57.2": { "0.51.0": false, @@ -2498,7 +2538,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.58.0": { "0.51.0": false, @@ -2559,7 +2600,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.59.0": { "0.51.0": false, @@ -2620,7 +2662,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.59.1": { "0.51.0": false, @@ -2681,7 +2724,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.60.0": { "0.51.0": false, @@ -2742,7 +2786,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.60.1": { "0.51.0": false, @@ -2803,7 +2848,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.60.2": { "0.51.0": false, @@ -2864,7 +2910,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.61.0": { "0.51.0": false, @@ -2925,7 +2972,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.61.1": { "0.51.0": false, @@ -2986,7 +3034,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.61.2": { "0.51.0": false, @@ -3047,7 +3096,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.62.0": { "0.51.0": false, @@ -3108,7 +3158,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.62.1": { "0.51.0": false, @@ -3169,7 +3220,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.63.0": { "0.51.0": false, @@ -3230,7 +3282,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.64.0": { "0.51.0": false, @@ -3291,7 +3344,8 @@ "0.66.0": true, "0.66.1": true, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.65.0": { "0.51.0": false, @@ -3352,7 +3406,8 @@ "0.66.0": true, "0.66.1": true, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.66.0": { "0.51.0": false, @@ -3413,7 +3468,8 @@ "0.66.0": true, "0.66.1": true, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.66.1": { "0.51.0": false, @@ -3474,7 +3530,8 @@ "0.66.0": true, "0.66.1": true, "0.67.0": false, - "0.68.0": false + "0.68.0": false, + "0.69.0": false }, "0.67.0": { "0.51.0": false, @@ -3535,7 +3592,8 @@ "0.66.0": false, "0.66.1": false, "0.67.0": true, - "0.68.0": true + "0.68.0": true, + "0.69.0": false }, "0.68.0": { "0.51.0": false, @@ -3596,6 +3654,69 @@ "0.66.0": false, "0.66.1": false, "0.67.0": true, - "0.68.0": true + "0.68.0": true, + "0.69.0": false + }, + "0.69.0": { + "0.51.0": false, + "0.50.1": false, + "0.50.0": false, + "0.49.2": false, + "0.49.1": false, + "0.49.0": false, + "0.48.0": false, + "0.47.2": false, + "0.47.1": false, + "0.47.0": false, + "0.46.3": false, + "0.46.2": false, + "0.46.1": false, + "0.46.0": false, + "0.45.0": false, + "0.44.0": false, + "0.43.7": false, + "0.43.6": false, + "0.43.5": false, + "0.43.4": false, + "0.43.3": false, + "0.43.2": false, + "0.43.1": false, + "0.43.0": false, + "0.42.1": false, + "0.42.0": false, + "0.41.1": false, + "0.41.0": false, + "0.40.2": false, + "0.40.1": false, + "0.40.0": false, + "0.52.0": false, + "0.52.1": false, + "0.52.2": false, + "0.53.0": false, + "0.54.0": false, + "0.55.0": false, + "0.56.0": false, + "0.57.0": false, + "0.57.1": false, + "0.57.2": false, + "0.58.0": false, + "0.59.0": false, + "0.59.1": false, + "0.60.0": false, + "0.60.1": false, + "0.60.2": false, + "0.61.0": false, + "0.61.1": false, + "0.61.2": false, + "0.62.0": false, + "0.62.1": false, + "0.63.0": false, + "0.64.0": false, + "0.65.0": false, + "0.66.0": false, + "0.66.1": false, + "0.67.0": false, + "0.68.0": false, + "0.69.0": true } } diff --git a/octue/twined/resources/child.py b/octue/twined/resources/child.py index ea311c163..db65fddb6 100644 --- a/octue/twined/resources/child.py +++ b/octue/twined/resources/child.py @@ -2,10 +2,12 @@ import copy import logging import os +import uuid from octue.twined.cloud.pub_sub.service import Service from octue.twined.definitions import DEFAULT_MAXIMUM_HEARTBEAT_INTERVAL from octue.twined.resources import service_backends +from octue.twined.resources.example import calculate_fibonacci_sequence logger = logging.getLogger(__name__) @@ -113,6 +115,17 @@ def ask( :raise Exception: if the question raises an error and `raise_errors=True` :return dict|octue.twined.cloud.pub_sub.subscription.Subscription|Exception|None, str: for a synchronous question, a dictionary containing the keys "output_values" and "output_manifest" from the result (or just an exception if the question fails), and the question UUID; for a question with a push endpoint, the push subscription and the question UUID; for an asynchronous question, `None` and the question UUID """ + if self.id.startswith("example/"): + sequence = calculate_fibonacci_sequence(n=input_values.get("n")) + + answer = { + "kind": "result", + "output_values": {"fibonacci": sequence}, + "output_manifest": None, + } + + return answer, str(uuid.uuid4()) + prevent_retries_when = prevent_retries_when or [] inputs = { @@ -155,7 +168,7 @@ def ask( except Exception as e: logger.error( - "Question %r failed. Run 'octue question diagnostics gs:///%s " + "Question %r failed. Run 'octue twined question diagnostics gs:///%s " "--download-datasets' to get the crash diagnostics.", question_uuid, question_uuid, diff --git a/octue/twined/resources/example.py b/octue/twined/resources/example.py new file mode 100644 index 000000000..8bf0c4592 --- /dev/null +++ b/octue/twined/resources/example.py @@ -0,0 +1,40 @@ +import functools +import logging + +logger = logging.getLogger(__name__) + + +def calculate_fibonacci_sequence(n): + """Run an example analysis that calculates the first `n` values of the Fibonacci sequence. + + :param int n: the number of values in the Fibonacci sequence to calculate (must be >= 0) + :return list(int): the sequence + """ + error_message = f"`n` must be an integer >= 0. Received {n!r}." + + if not isinstance(n, int): + raise TypeError(error_message) + + if n < 0: + raise ValueError(error_message) + + logger.info("Starting Fibonacci sequence calculation.") + sequence = [_calculate_fibonacci_value(i) for i in range(n)] + logger.info("Finished Fibonacci sequence calculation.") + return sequence + + +@functools.lru_cache() +def _calculate_fibonacci_value(n): + """Calculate the nth value of the Fibonacci sequence. + + :param int n: the position in the sequence to calculate the value of (must be >= 0) + :return int: the value of the sequence at the nth position + """ + if n == 0: + return 0 + + if n == 1: + return 1 + + return _calculate_fibonacci_value(n - 1) + _calculate_fibonacci_value(n - 2) diff --git a/octue/twined/templates/template.py b/octue/twined/templates/template.py new file mode 100644 index 000000000..7e2645a71 --- /dev/null +++ b/octue/twined/templates/template.py @@ -0,0 +1,37 @@ +import os +import shutil +import sys +import uuid + +from octue.definitions import DATA_PATH, TEMPLATES_PATH + + +class Template: + def __init__(self): + self.start_path = os.getcwd() + self.template_twine = None + self.template_path = None + self.app_test_path = None + self.teardown_templates = [] + + def set_template(self, template): + """Set up the working directory and data paths to run one of the provided templates.""" + self.template_path = os.path.join(TEMPLATES_PATH, template) + self.template_twine = os.path.join(TEMPLATES_PATH, template, "twine.json") + + # Duplicate the template's data/ directory to a test-specific replica + self.app_test_path = os.path.join(DATA_PATH, str(uuid.uuid4())) + shutil.copytree(self.template_path, self.app_test_path) + + # Add this template to the list to remove in teardown + self.teardown_templates.append(self.app_test_path) + sys.path.insert(0, self.app_test_path) + + # Run from within the app folder context + os.chdir(self.app_test_path) + + def cleanup(self): + os.chdir(self.start_path) + for path in self.teardown_templates: + sys.path.remove(path) + shutil.rmtree(path) diff --git a/pyproject.toml b/pyproject.toml index 63fb88f31..7d824ee7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "octue" -version = "0.68.0" +version = "0.69.0" description = "A package providing template applications for data services, and a python SDK to the Octue API." readme = "README.md" authors = ["Marcus Lugg ", "Thomas Clark "] diff --git a/tests/base.py b/tests/base.py index 63ccf2b95..139ba6a36 100644 --- a/tests/base.py +++ b/tests/base.py @@ -5,6 +5,7 @@ from octue.cloud import storage from octue.cloud.storage import GoogleCloudStorageClient +from octue.definitions import DATA_PATH from octue.resources import Datafile, Dataset, Manifest from tests import TEST_BUCKET_NAME from tests.twined.base import TestResultModifier @@ -19,23 +20,9 @@ class BaseTestCase(unittest.TestCase): setattr(unittest.TestResult, "startTestRun", test_result_modifier.startTestRun) setattr(unittest.TestResult, "stopTestRun", test_result_modifier.stopTestRun) - def setUp(self): - """Set up the test case by: - - Adding the paths to the test data and app templates directories to the test case - - Making `unittest` ignore excess ResourceWarnings so tests' console outputs are clearer. This has to be done - even if these warnings are ignored elsewhere as unittest forces warnings to be displayed by default. - - :return None: - """ - root_dir = os.path.dirname(os.path.abspath(__file__)) - self.data_path = os.path.join(root_dir, "data") - self.templates_path = os.path.join(os.path.dirname(root_dir), "octue", "twined", "templates") - - super().setUp() - def create_valid_dataset(self, **kwargs): """Create a valid dataset with two valid datafiles (they're the same file in this case).""" - path = os.path.join(self.data_path, "basic_files", "configuration", "test-dataset") + path = os.path.join(DATA_PATH, "basic_files", "configuration", "test-dataset") return Dataset( path=path, diff --git a/tests/test_cli.py b/tests/test_cli.py index 1ae688d6b..2ac615703 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -48,7 +48,7 @@ class TestQuestionAskRemoteCommand(BaseTestCase): QUESTION_UUID = "81f35b28-068b-4314-9eeb-e55e60d0fe8a" def test_with_input_values(self): - """Test that the `octue twined question ask remote` CLI command works with just input values.""" + """Test that the `octue twined question ask` CLI command works with just input values.""" with mock.patch("octue.cli.ServiceConfiguration.from_file", return_value=MOCK_CONFIGURATION): with mock.patch("octue.cli.Child.ask", return_value=(RESULT, self.QUESTION_UUID)) as mock_ask: result = CliRunner().invoke( @@ -57,7 +57,6 @@ def test_with_input_values(self): "twined", "question", "ask", - "remote", self.SRUID, '--input-values={"height": 3}', ], @@ -67,7 +66,7 @@ def test_with_input_values(self): self.assertIn(json.dumps(RESULT), result.output) def test_with_input_manifest(self): - """Test that the `octue twined question ask remote` CLI command works with just an input manifest.""" + """Test that the `octue twined question ask` CLI command works with just an input manifest.""" input_manifest = self.create_valid_manifest() with mock.patch("octue.cli.ServiceConfiguration.from_file", return_value=MOCK_CONFIGURATION): @@ -78,7 +77,6 @@ def test_with_input_manifest(self): "twined", "question", "ask", - "remote", self.SRUID, f"--input-manifest={input_manifest.serialise()}", ], @@ -88,7 +86,7 @@ def test_with_input_manifest(self): self.assertIn(json.dumps(RESULT), result.output) def test_with_input_values_and_manifest(self): - """Test that the `octue twined question ask remote` CLI command works with input values and input manifest.""" + """Test that the `octue twined question ask` CLI command works with input values and input manifest.""" input_values = {"height": 3} input_manifest = self.create_valid_manifest() @@ -100,7 +98,6 @@ def test_with_input_values_and_manifest(self): "twined", "question", "ask", - "remote", self.SRUID, f"--input-values={json.dumps(input_values)}", f"--input-manifest={input_manifest.serialise()}", @@ -112,7 +109,7 @@ def test_with_input_values_and_manifest(self): self.assertIn(json.dumps(RESULT), result.output) def test_with_output_manifest(self): - """Test that the `octue twined question ask remote` CLI command returns output manifests in a useful form.""" + """Test that the `octue twined question ask` CLI command returns output manifests in a useful form.""" result = {"output_values": {"some": "data"}, "output_manifest": self.create_valid_manifest()} with mock.patch("octue.cli.ServiceConfiguration.from_file", return_value=MOCK_CONFIGURATION): @@ -123,7 +120,6 @@ def test_with_output_manifest(self): "twined", "question", "ask", - "remote", self.SRUID, f"--input-values={json.dumps({'height': 3})}", ], @@ -134,7 +130,7 @@ def test_with_output_manifest(self): self.assertEqual(len(output["output_manifest"]["datasets"]), 2) def test_asynchronous(self): - """Test that the `octue twined question ask remote` CLI command works with the `--asynchronous` option and returns the + """Test that the `octue twined question ask` CLI command works with the `--asynchronous` option and returns the question UUID. """ with mock.patch("octue.cli.ServiceConfiguration.from_file", return_value=MOCK_CONFIGURATION): @@ -145,7 +141,6 @@ def test_asynchronous(self): "twined", "question", "ask", - "remote", self.SRUID, '--input-values={"height": 3}', "--asynchronous", @@ -164,7 +159,6 @@ def test_with_no_service_configuration(self): "twined", "question", "ask", - "remote", self.SRUID, '--input-values={"height": 3}', ], @@ -176,7 +170,7 @@ def test_with_no_service_configuration(self): class TestQuestionAskLocalCommand(BaseTestCase): def test_with_input_values(self): - """Test that the `octue twined question ask local` CLI command works with just input values and sends an originator + """Test that the `octue twined question ask-local` CLI command works with just input values and sends an originator question. """ with mock.patch("octue.cli.ServiceConfiguration.from_file", return_value=MOCK_CONFIGURATION): @@ -186,8 +180,7 @@ def test_with_input_values(self): [ "twined", "question", - "ask", - "local", + "ask-local", '--input-values={"height": 3}', ], ) @@ -214,7 +207,7 @@ def test_with_input_values(self): self.assertIn(json.dumps(RESULT), result.output) def test_with_input_manifest(self): - """Test that the `octue twined question ask local` CLI command works with just an input manifest and sends an + """Test that the `octue twined question ask-local` CLI command works with just an input manifest and sends an originator question. """ input_manifest = self.create_valid_manifest() @@ -225,8 +218,7 @@ def test_with_input_manifest(self): [ "twined", "question", - "ask", - "local", + "ask-local", f"--input-manifest={input_manifest.serialise()}", ], ) @@ -253,7 +245,7 @@ def test_with_input_manifest(self): self.assertIn(json.dumps(RESULT), result.output) def test_with_input_values_and_manifest(self): - """Test that the `octue twined question ask local` CLI command works with input values and input manifest and sends an + """Test that the `octue twined question ask-local` CLI command works with input values and input manifest and sends an originator question. """ input_values = {"height": 3} @@ -266,8 +258,7 @@ def test_with_input_values_and_manifest(self): [ "twined", "question", - "ask", - "local", + "ask-local", f"--input-values={json.dumps(input_values)}", f"--input-manifest={input_manifest.serialise()}", ], @@ -296,7 +287,7 @@ def test_with_input_values_and_manifest(self): self.assertIn(json.dumps(RESULT), result.output) def test_with_output_manifest(self): - """Test that the `octue twined question ask local` CLI command returns output manifests in a useful form.""" + """Test that the `octue twined question ask-local` CLI command returns output manifests in a useful form.""" result = {"output_values": {"some": "data"}, "output_manifest": self.create_valid_manifest()} with mock.patch("octue.cli.ServiceConfiguration.from_file", return_value=MOCK_CONFIGURATION): @@ -306,8 +297,7 @@ def test_with_output_manifest(self): [ "twined", "question", - "ask", - "local", + "ask-local", f"--input-values={json.dumps({'height': 3})}", ], ) @@ -317,7 +307,7 @@ def test_with_output_manifest(self): self.assertEqual(len(output["output_manifest"]["datasets"]), 2) def test_with_attributes(self): - """Test that the `octue twined question ask remote` CLI command can be passed question attributes which are passed + """Test that the `octue twined question ask-local` CLI command can be passed question attributes which are passed along to the answering `Service` instance. """ original_attributes = { @@ -347,8 +337,7 @@ def test_with_attributes(self): [ "twined", "question", - "ask", - "local", + "ask-local", '--input-values={"height": 3}', f"--attributes={json.dumps(original_attributes)}", ], diff --git a/tests/test_log_handlers.py b/tests/test_log_handlers.py index 300f7081d..95674a964 100644 --- a/tests/test_log_handlers.py +++ b/tests/test_log_handlers.py @@ -1,14 +1,9 @@ import importlib -import logging import os import sys from unittest import mock -from octue.log_handlers import ( - LOG_RECORD_ATTRIBUTES_WITH_TIMESTAMP, - LOG_RECORD_ATTRIBUTES_WITHOUT_TIMESTAMP, - get_remote_handler, -) +from octue.log_handlers import LOG_RECORD_ATTRIBUTES_WITH_TIMESTAMP, LOG_RECORD_ATTRIBUTES_WITHOUT_TIMESTAMP from tests.base import BaseTestCase @@ -83,32 +78,3 @@ def test_extra_log_record_attributes_are_included_if_relevant_environment_variab include_process_name=True, include_thread_name=True, ) - - -class TestGetRemoteHandler(BaseTestCase): - def test_get_remote_handler_parses_ws_properly(self): - """Assert that the remote log handler parses URIs properly.""" - handler = get_remote_handler(logger_uri="ws://0.0.0.1:3000") - assert handler.host == "0.0.0.1" - assert handler.port == 3000 - - def test_wss_is_supported(self): - """Test that HTTPS is supported by the remote log handler.""" - handler = get_remote_handler(logger_uri="wss://0.0.0.1:3000/log") - assert handler.host == "0.0.0.1" - assert handler.port == 3000 - - def test_non_ws_or_wss_protocol_raises_error(self): - """Ensure an error is raised if a protocol other than HTTP or HTTPS is used for the logger URI.""" - with self.assertRaises(ValueError): - get_remote_handler(logger_uri="https://0.0.0.1:3000/log") - - def test_remote_logger_emits_messages(self): - """Test that the remote log handler emits messages.""" - logger = logging.getLogger("test-logger") - logger.addHandler(get_remote_handler(logger_uri="wss://0.0.0.0:80")) - logger.setLevel("DEBUG") - - with mock.patch("logging.handlers.SocketHandler.emit") as mock_emit: - logger.debug("Hello") - mock_emit.assert_called() diff --git a/tests/twined/templates/test_template_apps.py b/tests/twined/templates/test_template_apps.py index cce2d531e..e53bf010e 100644 --- a/tests/twined/templates/test_template_apps.py +++ b/tests/twined/templates/test_template_apps.py @@ -1,5 +1,4 @@ import os -import shutil import subprocess import sys import time @@ -15,6 +14,7 @@ from octue.twined.cloud.emulators import ChildEmulator from octue.twined.cloud.service_id import create_sruid from octue.twined.runner import Runner +from octue.twined.templates.template import Template from octue.utils.processes import ProcessesContextManager from tests import MOCK_SERVICE_REVISION_TAG, TEST_BUCKET_NAME from tests.base import BaseTestCase @@ -25,11 +25,11 @@ class TemplateAppsTestCase(BaseTestCase): def test_fractal_template_with_default_configuration(self): """Ensure the `fractal` app can be configured with its default configuration and run.""" - self.set_template("template-fractal") + self.template.set_template("template-fractal") runner = Runner( - app_src=self.template_path, - twine=self.template_twine, + app_src=self.template.template_path, + twine=self.template.template_twine, configuration_values=os.path.join("data", "configuration", "configuration_values.json"), ) @@ -37,12 +37,12 @@ def test_fractal_template_with_default_configuration(self): def test_using_manifests_template(self): """Ensure the `using-manifests` template app works correctly.""" - self.set_template("template-using-manifests") + self.template.set_template("template-using-manifests") output_location = f"gs://{TEST_BUCKET_NAME}" runner = Runner( - app_src=self.template_path, - twine=self.template_twine, + app_src=self.template.template_path, + twine=self.template.template_twine, configuration_values=os.path.join("data", "configuration", "values.json"), output_location=output_location, ) @@ -70,9 +70,9 @@ def test_child_services_template(self): parent sends coordinates to both children, receiving the elevation and wind speed from them at these locations. """ cli_path = os.path.join(REPOSITORY_ROOT, "octue", "cli.py") - self.set_template("template-child-services") + self.template.set_template("template-child-services") - elevation_service_path = os.path.join(self.template_path, "elevation_service") + elevation_service_path = os.path.join(self.template.template_path, "elevation_service") elevation_service_revision_tag = str(uuid.uuid4()) elevation_process = subprocess.Popen( @@ -88,7 +88,7 @@ def test_child_services_template(self): env={**os.environ, "OCTUE_SERVICE_REVISION_TAG": elevation_service_revision_tag}, ) - wind_speed_service_path = os.path.join(self.template_path, "wind_speed_service") + wind_speed_service_path = os.path.join(self.template.template_path, "wind_speed_service") wind_speed_service_revision_tag = str(uuid.uuid4()) wind_speed_process = subprocess.Popen( @@ -104,7 +104,7 @@ def test_child_services_template(self): env={**os.environ, "OCTUE_SERVICE_REVISION_TAG": wind_speed_service_revision_tag}, ) - parent_service_path = os.path.join(self.template_path, "parent_service") + parent_service_path = os.path.join(self.template.template_path, "parent_service") namespace = "template-child-services" with open(os.path.join(parent_service_path, "octue.yaml")) as f: @@ -140,8 +140,8 @@ def test_child_services_template(self): def test_child_services_template_using_emulated_children(self): """Test the child services template app using emulated children.""" - self.set_template("template-child-services") - parent_service_path = os.path.join(self.template_path, "parent_service") + self.template.set_template("template-child-services") + parent_service_path = os.path.join(self.template.template_path, "parent_service") with open(os.path.join(parent_service_path, "octue.yaml")) as f: children = yaml.safe_load(f)["services"][0]["children"] @@ -198,36 +198,9 @@ def setUp(self): :return None: """ super().setUp() - self.start_path = os.getcwd() - - # Initialise so these variables are assigned on the instance - self.template_twine = None - self.template_path = None - self.app_test_path = None - self.teardown_templates = [] - - def set_template(template): - """Set up the working directory and data paths to run one of the provided templates.""" - self.template_path = os.path.join(self.templates_path, template) - self.template_twine = os.path.join(self.templates_path, template, "twine.json") - - # Duplicate the template's data/ directory to a test-specific replica - self.app_test_path = os.path.join(self.data_path, str(uuid.uuid4())) - shutil.copytree(self.template_path, self.app_test_path) - - # Add this template to the list to remove in teardown - self.teardown_templates.append(self.app_test_path) - sys.path.insert(0, self.app_test_path) - - # Run from within the app folder context - os.chdir(self.app_test_path) - - self.set_template = set_template + self.template = Template() def tearDown(self): """Remove the temporary template app directories.""" super().tearDown() - os.chdir(self.start_path) - for path in self.teardown_templates: - sys.path.remove(path) - shutil.rmtree(path) + self.template.cleanup() diff --git a/tests/twined/test_configuration.py b/tests/twined/test_configuration.py new file mode 100644 index 000000000..f4bfd653a --- /dev/null +++ b/tests/twined/test_configuration.py @@ -0,0 +1,11 @@ +from octue.twined.configuration import ServiceConfiguration +from tests.base import BaseTestCase + + +class TestServiceConfiguration(BaseTestCase): + def test_error_raised_if_namespace_disallowed(self): + """Test that an error is raised if a disallowed namespace is given.""" + with self.assertRaises(ValueError) as error_context: + ServiceConfiguration(namespace="example", name="service") + + self.assertEqual(error_context.exception.args[0], "'example' is not an allowed Twined service namespace.")