diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 95404e233..d3096ef43 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -77,7 +77,7 @@ steps: useVault: true image: family/enterprise-search-ubuntu-2204-connectors-py - - label: ":sweating: Checking for changes in connectors" + - label: ":sweating: Checking for changes in connectors_service" key: "relevant_ftests" plugins: monorepo-diff#v1.1.0: @@ -85,7 +85,7 @@ steps: wait: false watch: - path: - - "connectors/sources/mysql.py" + - "connectors_service/sources/mysql.py" - "tests/sources/fixtures/mysql/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -104,7 +104,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/network_drive.py" + - "connectors_service/sources/network_drive.py" - "tests/sources/fixtures/network_drive/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -123,7 +123,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/s3.py" + - "connectors_service/sources/s3.py" - "tests/sources/fixtures/s3/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -142,7 +142,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/google_cloud_storage.py" + - "connectors_service/sources/google_cloud_storage.py" - "tests/sources/fixtures/google_cloud_storage/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -161,7 +161,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/azure_blob_storage.py" + - "connectors_service/sources/azure_blob_storage.py" - "tests/sources/fixtures/azure_blob_storage/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -180,7 +180,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/postgresql.py" + - "connectors_service/sources/postgresql.py" - "tests/sources/fixtures/postgresql/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -199,7 +199,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/directory.py" + - "connectors_service/sources/directory.py" - "tests/sources/fixtures/dir/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -218,7 +218,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/oracle.py" + - "connectors_service/sources/oracle.py" - "tests/sources/fixtures/oracle/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -237,7 +237,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/sharepoint_server.py" + - "connectors_service/sources/sharepoint_server.py" - "tests/sources/fixtures/sharepoint_server/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -256,7 +256,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/sharepoint_online.py" + - "connectors_service/sources/sharepoint_online.py" - "tests/sources/fixtures/sharepoint_online/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -275,7 +275,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/mssql.py" + - "connectors_service/sources/mssql.py" - "tests/sources/fixtures/mssql/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -294,8 +294,8 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/jira.py" - - "connectors/sources/atlassian.py" + - "connectors_service/sources/jira.py" + - "connectors_service/sources/atlassian.py" - "tests/sources/fixtures/jira/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -314,8 +314,8 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/confluence.py" - - "connectors/sources/atlassian.py" + - "connectors_service/sources/confluence.py" + - "connectors_service/sources/atlassian.py" - "tests/sources/fixtures/confluence/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -334,7 +334,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/servicenow.py" + - "connectors_service/sources/servicenow.py" - "tests/sources/fixtures/servicenow/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -353,7 +353,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/mongo.py" + - "connectors_service/sources/mongo.py" - "tests/sources/fixtures/mongodb/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -372,7 +372,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/github.py" + - "connectors_service/sources/github.py" - "tests/sources/fixtures/github/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -391,7 +391,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/google_drive.py" + - "connectors_service/sources/google_drive.py" - "tests/sources/fixtures/google_drive/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -410,7 +410,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/dropbox.py" + - "connectors_service/sources/dropbox.py" - "tests/sources/fixtures/dropbox/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -429,7 +429,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/onedrive.py" + - "connectors_service/sources/onedrive.py" - "tests/sources/fixtures/onedrive/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -448,7 +448,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/salesforce.py" + - "connectors_service/sources/salesforce.py" - "tests/sources/fixtures/salesforce/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -467,7 +467,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/zoom.py" + - "connectors_service/sources/zoom.py" - "tests/sources/fixtures/zoom/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -486,7 +486,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/box.py" + - "connectors_service/sources/box.py" - "tests/sources/fixtures/box/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -505,7 +505,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/microsoft_teams.py" + - "connectors_service/sources/microsoft_teams.py" - "tests/sources/fixtures/microsoft_teams/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -524,7 +524,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/notion.py" + - "connectors_service/sources/notion.py" - "tests/sources/fixtures/notion/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -543,7 +543,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/redis.py" + - "connectors_service/sources/redis.py" - "tests/sources/fixtures/redis/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -562,7 +562,7 @@ steps: - "perf8-report-*/**/*" - path: - - "connectors/sources/graphql.py" + - "connectors_service/sources/graphql.py" - "tests/sources/fixtures/graphql/**" - "tests/sources/fixtures/fixture.py" - "${DOCKERFILE_FTEST_PATH}" @@ -665,8 +665,8 @@ steps: env: ARCHITECTURE: "amd64" DOCKERFILE_PATH: "Dockerfile" - DOCKER_IMAGE_NAME: "docker.elastic.co/ci-agent-images/elastic-connectors-oss-dockerfile" - DOCKER_ARTIFACT_KEY: "elastic-connectors-oss-dockerfile" + DOCKER_IMAGE_NAME: "docker.elastic.co/ci-agent-images/elastic-connectors-service-oss-dockerfile" + DOCKER_ARTIFACT_KEY: "elastic-connectors-service-oss-dockerfile" command: ".buildkite/publish/build-docker.sh" key: "build_oss_dockerfile_image_amd64" artifact_paths: ".artifacts/*.tar.gz" @@ -678,8 +678,8 @@ steps: env: ARCHITECTURE: "amd64" DOCKERFILE_PATH: "Dockerfile" - DOCKER_IMAGE_NAME: "docker.elastic.co/ci-agent-images/elastic-connectors-oss-dockerfile" - DOCKER_ARTIFACT_KEY: "elastic-connectors-oss-dockerfile" + DOCKER_IMAGE_NAME: "docker.elastic.co/ci-agent-images/elastic-connectors-service-oss-dockerfile" + DOCKER_ARTIFACT_KEY: "elastic-connectors-service-oss-dockerfile" depends_on: "build_oss_dockerfile_image_amd64" key: "test_oss_dockerfile_image_amd64" commands: @@ -714,8 +714,8 @@ steps: env: ARCHITECTURE: "arm64" DOCKERFILE_PATH: "Dockerfile" - DOCKER_IMAGE_NAME: "docker.elastic.co/ci-agent-images/elastic-connectors-oss-dockerfile" - DOCKER_ARTIFACT_KEY: "elastic-connectors-oss-dockerfile" + DOCKER_IMAGE_NAME: "docker.elastic.co/ci-agent-images/elastic-connectors-service-oss-dockerfile" + DOCKER_ARTIFACT_KEY: "elastic-connectors-service-oss-dockerfile" command: ".buildkite/publish/build-docker.sh" key: "build_oss_dockerfile_image_arm64" artifact_paths: ".artifacts/*.tar.gz" @@ -729,8 +729,8 @@ steps: env: ARCHITECTURE: "arm64" DOCKERFILE_PATH: "Dockerfile" - DOCKER_IMAGE_NAME: "docker.elastic.co/ci-agent-images/elastic-connectors-oss-dockerfile" - DOCKER_ARTIFACT_KEY: "elastic-connectors-oss-dockerfile" + DOCKER_IMAGE_NAME: "docker.elastic.co/ci-agent-images/elastic-connectors-service-oss-dockerfile" + DOCKER_ARTIFACT_KEY: "elastic-connectors-service-oss-dockerfile" depends_on: "build_oss_dockerfile_image_arm64" key: "test_oss_dockerfile_image_arm64" commands: diff --git a/.buildkite/publish/dra/init_dra_publishing.sh b/.buildkite/publish/dra/init_dra_publishing.sh index 2bdf5715d..276977661 100755 --- a/.buildkite/publish/dra/init_dra_publishing.sh +++ b/.buildkite/publish/dra/init_dra_publishing.sh @@ -138,7 +138,7 @@ if [[ "${PUBLISH_SNAPSHOT:-}" == "true" ]]; then generateDependencyReport $DEPENDENCIES_REPORTS_DIR/$dependencyReportName echo "-------- Publishing SNAPSHOT DRA Artifacts" - cp $RELEASE_DIR/dist/elasticsearch_connectors-${VERSION}.zip $DRA_ARTIFACTS_DIR/connectors-${VERSION}-SNAPSHOT.zip + cp $RELEASE_DIR/dist/elasticsearch_connectors_service-${VERSION}.zip $DRA_ARTIFACTS_DIR/connectors-service-${VERSION}-SNAPSHOT.zip cp $DRA_ARTIFACTS_DIR/$PROJECT_NAME-$VERSION-docker-image-linux-amd64.tar.gz $DRA_ARTIFACTS_DIR/$PROJECT_NAME-$VERSION-SNAPSHOT-docker-image-linux-amd64.tar.gz cp $DRA_ARTIFACTS_DIR/$PROJECT_NAME-$VERSION-docker-image-linux-arm64.tar.gz $DRA_ARTIFACTS_DIR/$PROJECT_NAME-$VERSION-SNAPSHOT-docker-image-linux-arm64.tar.gz setDraVaultCredentials diff --git a/.buildkite/publish/publish-common.sh b/.buildkite/publish/publish-common.sh index cda767571..b22e065bc 100644 --- a/.buildkite/publish/publish-common.sh +++ b/.buildkite/publish/publish-common.sh @@ -36,7 +36,7 @@ elif [[ -n "${VERSION_QUALIFIER:-}" ]]; then fi # Create a build.yaml file for reference after build process -cat < connectors/build.yaml +cat < connectors_service/build.yaml version: "$version" qualifier: "$version_qualifier" revision: "$revision" @@ -44,7 +44,7 @@ repository: "$repository" EOL echo "Created connectors/build.yaml file:" -cat connectors/build.yaml +cat connectors_service/build.yaml if [[ "${MANUAL_RELEASE:-}" == "true" ]]; then # This block is for out-of-band releases, triggered by the release-pipeline @@ -64,9 +64,9 @@ else export DOCKER_TAG_VERSION=${VERSION} fi -export BASE_TAG_NAME=${DOCKER_IMAGE_NAME:-docker.elastic.co/integrations/elastic-connectors} +export BASE_TAG_NAME=${DOCKER_IMAGE_NAME:-docker.elastic.co/integrations/elastic-connectors-service} export DOCKERFILE_PATH=${DOCKERFILE_PATH:-Dockerfile} -export PROJECT_NAME=${PROJECT_NAME:-elastic-connectors} +export PROJECT_NAME=${PROJECT_NAME:-elastic-connectors-service} export DOCKER_ARTIFACT_KEY=${DOCKER_ARTIFACT_KEY:-${PROJECT_NAME}-docker} export VAULT_ADDR=${VAULT_ADDR:-https://vault-ci-prod.elastic.dev} export VAULT_USER="docker-swiftypeadmin" diff --git a/.buildkite/pypi-publish-pipeline.yml b/.buildkite/pypi-publish-pipeline.yml new file mode 100644 index 000000000..ad17464c2 --- /dev/null +++ b/.buildkite/pypi-publish-pipeline.yml @@ -0,0 +1,104 @@ +## .buildkite/pypi-publish-pipeline.yml +# Manually-triggered pipeline to build and publish Python packages to PyPI +notify: + - if: 'build.branch =~ /^((main)|([0-9]+\.[0-9]+))$/ && (build.state == "failed" || pipeline.started_passing)' + slack: + channels: + - "#search-et-alerts" + message: "${BUILDKITE_MESSAGE}" + +# add paths for all the packages we want to build here +# then make sure they're added to the matrix.setup.package_path lists in the steps below +env: + SERVICE_PACKAGE_PATH: "app/connectors_service" + +steps: + - group: ":building_construction: Build `connectors_service` Python Package" + key: "build_package" + steps: + - label: ":python: Build Python {{matrix.python_version}} {{matrix.package_path}} package" + key: build_python_package + agents: + provider: "gcp" + machineType: "n1-standard-8" + useVault: true + image: family/enterprise-search-ubuntu-2204-connectors-py + matrix: + setup: + python_version: + - "3.10" + - "3.11" + package_path: + - "${SERVICE_PACKAGE_PATH}" + commands: + - "cd {{matrix.package_path}}" + - "python{{matrix.python_version}} -m pip install --upgrade build twine" + - "python{{matrix.python_version}} -m build" + - "ls -lah dist/" + - "python{{matrix.python_version}} -m twine check dist/*" + artifact_paths: + - "{{matrix.package_path}}/dist/*" + + - wait + + - group: ":test_tube: Publish to Test PyPI" + key: "publish_to_test_pypi" + depends_on: + - build_package + steps: + - label: ":package: Publish to Test PyPI" + key: publish_test_pypi + agents: + provider: "gcp" + machineType: "n1-standard-8" + useVault: true + image: family/enterprise-search-ubuntu-2204-connectors-py + env: + TWINE_USERNAME: "__token__" + matrix: + setup: + python_version: + - "3.10" + - "3.11" + package_path: + - "${SERVICE_PACKAGE_PATH}" + commands: + # splitting the assignment in 2 lines to avoid leaking the key in the buildkite logs + - "TWINE_PASSWORD=$(vault read -field publishing-api-key secret/ent-search-team/test-pypi-ent-search-dev)" + - "export TWINE_PASSWORD" + - "mkdir -p {{matrix.package_path}}/dist" + - "buildkite-agent artifact download '{{matrix.package_path}}/dist/*' ." + - "cd {{matrix.package_path}}" + - "python{{matrix.python_version}} -m pip install --upgrade twine" + - "python{{matrix.python_version}} -m twine upload --repository testpypi dist/*" + + - group: ":truck: Publish to Production PyPI" + key: "publish_to_pypi" + depends_on: + - publish_to_test_pypi + steps: + - label: ":package: Publish to Production PyPI" + key: publish_pypi + agents: + provider: "gcp" + machineType: "n1-standard-8" + useVault: true + image: family/enterprise-search-ubuntu-2204-connectors-py + env: + TWINE_USERNAME: "__token__" + matrix: + setup: + python_version: + - "3.10" + - "3.11" + package_path: + - "${SERVICE_PACKAGE_PATH}" + commands: + # splitting the assignment in 2 lines to avoid leaking the key in the buildkite logs + - "TWINE_PASSWORD=$(vault read -field publishing-api-key secret/ent-search-team/pypi-ent-search-dev)" + - "export TWINE_PASSWORD" + - "mkdir -p {{matrix.package_path}}/dist" + - "buildkite-agent artifact download '{{matrix.package_path}}/dist/*' ." + - "cd {{matrix.package_path}}" + - "python{{matrix.python_version}} -m pip install --upgrade twine" + - "python{{matrix.python_version}} -m twine upload --repository pypi dist/*" diff --git a/.buildkite/release-pipeline.yml b/.buildkite/release-pipeline.yml index 591360de2..4b60c9361 100644 --- a/.buildkite/release-pipeline.yml +++ b/.buildkite/release-pipeline.yml @@ -15,12 +15,12 @@ steps: key: "release_setup" steps: # ---- - # Set the build timestamp (for the verion suffix) + # Set the build timestamp (for the version suffix) # --- - label: "Set build metadata" commands: - buildkite-agent meta-data set timestamp "$(date -u +'%Y%m%d%H%M')" - - buildkite-agent meta-data set orig_version "$(cat connectors/VERSION)" + - buildkite-agent meta-data set orig_version "$(cat app/connectors_service/connectors_service/VERSION)" key: set_timestamp - wait - label: ":github: update version and tag" diff --git a/.buildkite/run_notice_check.sh b/.buildkite/run_notice_check.sh index cf0bbf0da..4b2925a67 100755 --- a/.buildkite/run_notice_check.sh +++ b/.buildkite/run_notice_check.sh @@ -21,8 +21,9 @@ if is_pr && ! is_fork; then echo 'New changes to NOTICE.txt:' git --no-pager diff - git add NOTICE.txt - git commit -m"Update NOTICE.txt" + git status --porcelain | grep app/connectors_service/NOTICE.txt && git add app/connectors_service/NOTICE.txt + git status --porcelain | grep lib/connectors_sdk/NOTICE.txt && git add lib/connectors_sdk/NOTICE.txt + git commit -m "Update NOTICE.txt" git push exit 1 diff --git a/.github/workflows/add-labels-main.yml b/.github/workflows/add-labels-main.yml index 290bff105..98c0ca26e 100644 --- a/.github/workflows/add-labels-main.yml +++ b/.github/workflows/add-labels-main.yml @@ -15,7 +15,7 @@ jobs: - id: version uses: juliangruber/read-file-action@ebfa650188272343fef925480eb4d18c5d49b925 with: - path: ./connectors/VERSION + path: ./app/connectors_service/connectors_service/VERSION - uses: actions-ecosystem/action-add-labels@v1 with: labels: | diff --git a/Makefile b/Makefile index ea2bd1ca8..855c2fd5d 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ app_dir := app/connectors_service connectors_sdk_dir := libs/connectors_sdk -VERSION=$(shell cat app/connectors_service/connectors/VERSION) +VERSION=$(shell cat app/connectors_service/connectors_service/VERSION) DOCKER_IMAGE_NAME?=docker.elastic.co/integrations/elastic-connectors DOCKERFILE_PATH?=Dockerfile diff --git a/app/connectors_service/.ruff.toml b/app/connectors_service/.ruff.toml deleted file mode 100644 index 279732ddd..000000000 --- a/app/connectors_service/.ruff.toml +++ /dev/null @@ -1,31 +0,0 @@ -target-version = "py310" - -[lint] -select = ["A", "ASYNC", "I", "E", "F", "B", "C4", "T10", "T20", "EM", "ISC", "S", "CPY001"] -ignore = ["E501", "ISC001"] -preview = true - -# Allow autofix for all enabled rules (when `--fix`) is provided. -fixable = ["A", "B", "C", "C4", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT", "T10", "T20"] -unfixable = [] - -exclude = [ - ".git", - "__pycache__", - "lib", - "bin", - "include" -] -dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" - -[lint.per-file-ignores] -"connectors/*" = ["S608"] -"scripts/verify.py" = [ "EM" ] -"tests/*" = ["B017", "S101", "S", "ASYNC110"] -"tests/conftest.py" = [ "EM" ] - -[lint.isort] -known-first-party=["connectors", "tests"] - -[lint.flake8-copyright] -notice-rgx = "#\n# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one\n# or more contributor license agreements. Licensed under the Elastic License 2.0;\n# you may not use this file except in compliance with the Elastic License 2.0.\n#" diff --git a/app/connectors_service/MANIFEST.in b/app/connectors_service/MANIFEST.in index f7b6d772b..a3b33b2b6 100644 --- a/app/connectors_service/MANIFEST.in +++ b/app/connectors_service/MANIFEST.in @@ -1,3 +1,3 @@ include Makefile README.md config.yml LICENSE -recursive-include connectors/ *.yml -include connectors/VERSION +recursive-include connectors_service/ *.yml +include connectors_service/VERSION diff --git a/app/connectors_service/Makefile b/app/connectors_service/Makefile index 3090dc485..91b1aec11 100644 --- a/app/connectors_service/Makefile +++ b/app/connectors_service/Makefile @@ -56,25 +56,25 @@ clean: rm -rf bin lib .venv include elasticsearch_connectors.egg-info .coverage site-packages pyvenv.cfg include.site.python*.greenlet dist lint: .venv/bin/python .venv/bin/ruff .venv/bin/elastic-ingest - .venv/bin/ruff check connectors - .venv/bin/ruff format connectors --check + .venv/bin/ruff check connectors_service + .venv/bin/ruff format connectors_service --check .venv/bin/ruff check tests .venv/bin/ruff format tests --check .venv/bin/ruff check scripts .venv/bin/ruff format scripts --check - .venv/bin/pyright connectors + .venv/bin/pyright connectors_service .venv/bin/pyright tests autoformat: .venv/bin/python .venv/bin/ruff .venv/bin/elastic-ingest - .venv/bin/ruff check connectors --fix - .venv/bin/ruff format connectors + .venv/bin/ruff check connectors_service --fix + .venv/bin/ruff format connectors_service .venv/bin/ruff check tests --fix .venv/bin/ruff format tests .venv/bin/ruff check scripts --fix .venv/bin/ruff format scripts test: .venv/bin/python .venv/bin/elastic-ingest .venv/bin/pytest - .venv/bin/pytest --cov-report term-missing --cov-fail-under 90 --cov-report html --cov=connectors --fail-slow=$(SLOW_TEST_THRESHOLD) -sv tests + .venv/bin/pytest --cov-report term-missing --cov-fail-under 90 --cov-report html --cov=connectors_service --fail-slow=$(SLOW_TEST_THRESHOLD) -sv tests ftest: .venv/bin/pytest diff --git a/app/connectors_service/NOTICE.txt b/app/connectors_service/NOTICE.txt index 09012784f..4f217c8a7 100644 --- a/app/connectors_service/NOTICE.txt +++ b/app/connectors_service/NOTICE.txt @@ -2802,6 +2802,11 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +connectors_service +9.2.0 +Apache Software License +UNKNOWN + cron-schedule-triggers 0.0.11 MIT License @@ -3545,11 +3550,6 @@ Apache Software License -elasticsearch-connectors -9.2.0 -Apache Software License -UNKNOWN - elasticsearch-connectors-sdk 9.2.0 Apache Software License @@ -4052,7 +4052,7 @@ Apache Software License google-auth -2.41.0 +2.41.1 Apache Software License Apache License Version 2.0, January 2004 diff --git a/app/connectors_service/connectors/services/__init__.py b/app/connectors_service/connectors/services/__init__.py deleted file mode 100644 index 7a32ebc28..000000000 --- a/app/connectors_service/connectors/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -# or more contributor license agreements. Licensed under the Elastic License 2.0; -# you may not use this file except in compliance with the Elastic License 2.0. -# - -from connectors.services.access_control_sync_job_execution import ( - AccessControlSyncJobExecutionService, # NOQA -) -from connectors.services.base import get_services # NOQA -from connectors.services.content_sync_job_execution import ( - ContentSyncJobExecutionService, # NOQA -) -from connectors.services.job_cleanup import JobCleanUpService # NOQA -from connectors.services.job_scheduling import JobSchedulingService # NOQA diff --git a/app/connectors_service/connectors/VERSION b/app/connectors_service/connectors_service/VERSION similarity index 100% rename from app/connectors_service/connectors/VERSION rename to app/connectors_service/connectors_service/VERSION diff --git a/app/connectors_service/connectors/__init__.py b/app/connectors_service/connectors_service/__init__.py similarity index 100% rename from app/connectors_service/connectors/__init__.py rename to app/connectors_service/connectors_service/__init__.py diff --git a/app/connectors_service/connectors/access_control.py b/app/connectors_service/connectors_service/access_control.py similarity index 100% rename from app/connectors_service/connectors/access_control.py rename to app/connectors_service/connectors_service/access_control.py diff --git a/app/connectors_service/connectors/agent/__init__.py b/app/connectors_service/connectors_service/agent/__init__.py similarity index 100% rename from app/connectors_service/connectors/agent/__init__.py rename to app/connectors_service/connectors_service/agent/__init__.py diff --git a/app/connectors_service/connectors/agent/cli.py b/app/connectors_service/connectors_service/agent/cli.py similarity index 90% rename from app/connectors_service/connectors/agent/cli.py rename to app/connectors_service/connectors_service/agent/cli.py index 38ed1e77d..c98c64c2f 100644 --- a/app/connectors_service/connectors/agent/cli.py +++ b/app/connectors_service/connectors_service/agent/cli.py @@ -11,8 +11,8 @@ sleeps_for_retryable, ) -from connectors.agent.component import ConnectorsAgentComponent -from connectors.agent.logger import get_logger +from connectors_service.agent.component import ConnectorsAgentComponent +from connectors_service.agent.logger import get_logger logger = get_logger("cli") diff --git a/app/connectors_service/connectors/agent/component.py b/app/connectors_service/connectors_service/agent/component.py similarity index 83% rename from app/connectors_service/connectors/agent/component.py rename to app/connectors_service/connectors_service/agent/component.py index 8c65fb0d2..81e8eedb6 100644 --- a/app/connectors_service/connectors/agent/component.py +++ b/app/connectors_service/connectors_service/agent/component.py @@ -10,11 +10,14 @@ from elastic_agent_client.service.actions import ActionsService from elastic_agent_client.service.checkin import CheckinV2Service -from connectors.agent.config import ConnectorsAgentConfigurationWrapper -from connectors.agent.logger import get_logger -from connectors.agent.protocol import ConnectorActionHandler, ConnectorCheckinHandler -from connectors.agent.service_manager import ConnectorServiceManager -from connectors.services.base import MultiService +from connectors_service.agent.config import ConnectorsAgentConfigurationWrapper +from connectors_service.agent.logger import get_logger +from connectors_service.agent.protocol import ( + ConnectorActionHandler, + ConnectorCheckinHandler, +) +from connectors_service.agent.service_manager import ConnectorServiceManager +from connectors_service.services.base import MultiService logger = get_logger("component") @@ -22,7 +25,7 @@ class ConnectorsAgentComponent: - """Entry point into running connectors service in Agent. + """Entry point into running connectors_service service in Agent. This class provides a simple abstraction over Agent components and Connectors Service manager. @@ -54,7 +57,7 @@ async def run(self): Additionally services for handling Check-in and Actions will be started to implement the protocol correctly. """ - logger.info("Starting connectors agent component") + logger.info("Starting connectors_service agent component") client = new_v2_from_reader(self.buffer, self.ver, self.opts) action_handler = ConnectorActionHandler() self.connector_service_manager = ConnectorServiceManager(self.config_wrapper) @@ -77,5 +80,5 @@ def stop(self, sig): Attempts to gracefully shutdown the services that are running under the component. """ - logger.info("Shutting down connectors agent component") + logger.info("Shutting down connectors_service agent component") self.multi_service.shutdown(sig) diff --git a/app/connectors_service/connectors/agent/config.py b/app/connectors_service/connectors_service/agent/config.py similarity index 98% rename from app/connectors_service/connectors/agent/config.py rename to app/connectors_service/connectors_service/agent/config.py index 16e81c680..c75e68e3c 100644 --- a/app/connectors_service/connectors/agent/config.py +++ b/app/connectors_service/connectors_service/agent/config.py @@ -7,8 +7,8 @@ from connectors_sdk.utils import nested_get_from_dict -from connectors.agent.logger import get_logger -from connectors.config import add_defaults +from connectors_service.agent.logger import get_logger +from connectors_service.config import add_defaults logger = get_logger("config") diff --git a/app/connectors_service/connectors/agent/connector_record_manager.py b/app/connectors_service/connectors_service/agent/connector_record_manager.py similarity index 96% rename from app/connectors_service/connectors/agent/connector_record_manager.py rename to app/connectors_service/connectors_service/agent/connector_record_manager.py index fe231dddd..270ab42b3 100644 --- a/app/connectors_service/connectors/agent/connector_record_manager.py +++ b/app/connectors_service/connectors_service/agent/connector_record_manager.py @@ -4,9 +4,9 @@ # you may not use this file except in compliance with the Elastic License 2.0. # -from connectors.agent.logger import get_logger -from connectors.protocol import ConnectorIndex -from connectors.utils import generate_random_id +from connectors_service.agent.logger import get_logger +from connectors_service.protocol import ConnectorIndex +from connectors_service.utils import generate_random_id logger = get_logger("agent_connector_record_manager") diff --git a/app/connectors_service/connectors/agent/logger.py b/app/connectors_service/connectors_service/agent/logger.py similarity index 100% rename from app/connectors_service/connectors/agent/logger.py rename to app/connectors_service/connectors_service/agent/logger.py diff --git a/app/connectors_service/connectors/agent/mappings/google_drive.json b/app/connectors_service/connectors_service/agent/mappings/google_drive.json similarity index 100% rename from app/connectors_service/connectors/agent/mappings/google_drive.json rename to app/connectors_service/connectors_service/agent/mappings/google_drive.json diff --git a/app/connectors_service/connectors/agent/mappings/salesforce.json b/app/connectors_service/connectors_service/agent/mappings/salesforce.json similarity index 100% rename from app/connectors_service/connectors/agent/mappings/salesforce.json rename to app/connectors_service/connectors_service/agent/mappings/salesforce.json diff --git a/app/connectors_service/connectors/agent/pipelines/googledrive_pipeline.json b/app/connectors_service/connectors_service/agent/pipelines/googledrive_pipeline.json similarity index 100% rename from app/connectors_service/connectors/agent/pipelines/googledrive_pipeline.json rename to app/connectors_service/connectors_service/agent/pipelines/googledrive_pipeline.json diff --git a/app/connectors_service/connectors/agent/pipelines/salesforce_pipeline.json b/app/connectors_service/connectors_service/agent/pipelines/salesforce_pipeline.json similarity index 100% rename from app/connectors_service/connectors/agent/pipelines/salesforce_pipeline.json rename to app/connectors_service/connectors_service/agent/pipelines/salesforce_pipeline.json diff --git a/app/connectors_service/connectors/agent/protocol.py b/app/connectors_service/connectors_service/agent/protocol.py similarity index 97% rename from app/connectors_service/connectors/agent/protocol.py rename to app/connectors_service/connectors_service/agent/protocol.py index b136691cc..0f27524b9 100644 --- a/app/connectors_service/connectors/agent/protocol.py +++ b/app/connectors_service/connectors_service/agent/protocol.py @@ -8,13 +8,13 @@ from elastic_agent_client.handler.action import BaseActionHandler from elastic_agent_client.handler.checkin import BaseCheckinHandler -from connectors.agent.connector_record_manager import ConnectorRecordManager -from connectors.agent.logger import get_logger +from connectors_service.agent.connector_record_manager import ConnectorRecordManager +from connectors_service.agent.logger import get_logger logger = get_logger("protocol") -CONNECTORS_INPUT_TYPE = "connectors-py" +CONNECTORS_INPUT_TYPE = "connectors-service-py" ELASTICSEARCH_OUTPUT_TYPE = "elasticsearch" diff --git a/app/connectors_service/connectors/agent/service_manager.py b/app/connectors_service/connectors_service/agent/service_manager.py similarity index 89% rename from app/connectors_service/connectors/agent/service_manager.py rename to app/connectors_service/connectors_service/agent/service_manager.py index 21553ae64..72fe8ec5a 100644 --- a/app/connectors_service/connectors/agent/service_manager.py +++ b/app/connectors_service/connectors_service/agent/service_manager.py @@ -7,13 +7,13 @@ import connectors_sdk.logger -import connectors.agent.logger -from connectors.agent.logger import get_logger -from connectors.services.base import ( +import connectors_service.agent.logger +from connectors_service.agent.logger import get_logger +from connectors_service.services.base import ( ServiceAlreadyRunningError, get_services, ) -from connectors.utils import CancellableSleeps +from connectors_service.utils import CancellableSleeps logger = get_logger("service_manager") @@ -68,10 +68,10 @@ async def run(self): ) log_level = config.get("service", {}).get( "log_level", logging.INFO - ) # Log Level for connectors is managed like this + ) # Log Level for connectors_service is managed like this connectors_sdk.logger.set_logger(log_level, filebeat=True) - # Log Level for agent connectors component itself - connectors.agent.logger.update_logger_level(log_level) + # Log Level for agent connectors_service component itself + connectors_service.agent.logger.update_logger_level(log_level) await self._multi_service.run() except Exception as e: diff --git a/app/connectors_service/connectors/build_info.py b/app/connectors_service/connectors_service/build_info.py similarity index 94% rename from app/connectors_service/connectors/build_info.py rename to app/connectors_service/connectors_service/build_info.py index c2667b251..655f8356b 100644 --- a/app/connectors_service/connectors/build_info.py +++ b/app/connectors_service/connectors_service/build_info.py @@ -7,7 +7,7 @@ import yaml -from connectors import __version__ +from connectors_service import __version__ # This references a file that's built in .buildkite/publish/publish-common.sh # See https://github.com/elastic/connectors/pull/3154 for more info diff --git a/app/connectors_service/connectors/cli/.gitkeep b/app/connectors_service/connectors_service/cli/.gitkeep similarity index 100% rename from app/connectors_service/connectors/cli/.gitkeep rename to app/connectors_service/connectors_service/cli/.gitkeep diff --git a/app/connectors_service/connectors/cli/README.md b/app/connectors_service/connectors_service/cli/README.md similarity index 100% rename from app/connectors_service/connectors/cli/README.md rename to app/connectors_service/connectors_service/cli/README.md diff --git a/app/connectors_service/connectors/cli/__init__.py b/app/connectors_service/connectors_service/cli/__init__.py similarity index 100% rename from app/connectors_service/connectors/cli/__init__.py rename to app/connectors_service/connectors_service/cli/__init__.py diff --git a/app/connectors_service/connectors/cli/auth.py b/app/connectors_service/connectors_service/cli/auth.py similarity index 96% rename from app/connectors_service/connectors/cli/auth.py rename to app/connectors_service/connectors_service/cli/auth.py index c24176419..cf68831af 100644 --- a/app/connectors_service/connectors/cli/auth.py +++ b/app/connectors_service/connectors_service/cli/auth.py @@ -9,7 +9,7 @@ import yaml from elasticsearch import ApiError -from connectors.es.cli_client import CLIClient +from connectors_service.es.cli_client import CLIClient CONFIG_FILE_PATH = ".cli/config.yml" diff --git a/app/connectors_service/connectors/cli/connector.py b/app/connectors_service/connectors_service/cli/connector.py similarity index 97% rename from app/connectors_service/connectors/cli/connector.py rename to app/connectors_service/connectors_service/cli/connector.py index 1a446695d..5d3915c61 100644 --- a/app/connectors_service/connectors/cli/connector.py +++ b/app/connectors_service/connectors_service/cli/connector.py @@ -8,15 +8,15 @@ from connectors_sdk.utils import iso_utc -from connectors.es import DEFAULT_LANGUAGE -from connectors.es.cli_client import CLIClient -from connectors.protocol import ( +from connectors_service.es import DEFAULT_LANGUAGE +from connectors_service.es.cli_client import CLIClient +from connectors_service.protocol import ( CONCRETE_CONNECTORS_INDEX, CONCRETE_JOBS_INDEX, CONNECTORS_ACCESS_CONTROL_INDEX_PREFIX, ConnectorIndex, ) -from connectors.utils import get_source_klass +from connectors_service.utils import get_source_klass EVERYDAY_AT_MIDNIGHT = "0 0 0 * * ?" diff --git a/app/connectors_service/connectors/cli/index.py b/app/connectors_service/connectors_service/cli/index.py similarity index 96% rename from app/connectors_service/connectors/cli/index.py rename to app/connectors_service/connectors_service/cli/index.py index 4f0d91af3..605dd0420 100644 --- a/app/connectors_service/connectors/cli/index.py +++ b/app/connectors_service/connectors_service/cli/index.py @@ -7,8 +7,8 @@ from elasticsearch import ApiError -from connectors.es.cli_client import CLIClient -from connectors.protocol import ( +from connectors_service.es.cli_client import CLIClient +from connectors_service.protocol import ( CONCRETE_CONNECTORS_INDEX, CONCRETE_JOBS_INDEX, ConnectorIndex, diff --git a/app/connectors_service/connectors/cli/job.py b/app/connectors_service/connectors_service/cli/job.py similarity index 97% rename from app/connectors_service/connectors/cli/job.py rename to app/connectors_service/connectors_service/cli/job.py index 86e80fb1b..ec8e604e4 100644 --- a/app/connectors_service/connectors/cli/job.py +++ b/app/connectors_service/connectors_service/cli/job.py @@ -7,8 +7,8 @@ from elasticsearch import ApiError -from connectors.es.cli_client import CLIClient -from connectors.protocol import ( +from connectors_service.es.cli_client import CLIClient +from connectors_service.protocol import ( CONCRETE_CONNECTORS_INDEX, CONCRETE_JOBS_INDEX, ConnectorIndex, diff --git a/app/connectors_service/connectors/config.py b/app/connectors_service/connectors_service/config.py similarity index 71% rename from app/connectors_service/connectors/config.py rename to app/connectors_service/connectors_service/config.py index 72ac41e89..c42604dd0 100644 --- a/app/connectors_service/connectors/config.py +++ b/app/connectors_service/connectors_service/config.py @@ -111,36 +111,36 @@ def _default_config(): "log_level": "INFO", }, "sources": { - "azure_blob_storage": "connectors.sources.azure_blob_storage:AzureBlobStorageDataSource", - "box": "connectors.sources.box:BoxDataSource", - "confluence": "connectors.sources.confluence:ConfluenceDataSource", - "dir": "connectors.sources.directory:DirectoryDataSource", - "dropbox": "connectors.sources.dropbox:DropboxDataSource", - "github": "connectors.sources.github:GitHubDataSource", - "gmail": "connectors.sources.gmail:GMailDataSource", - "google_cloud_storage": "connectors.sources.google_cloud_storage:GoogleCloudStorageDataSource", - "google_drive": "connectors.sources.google_drive:GoogleDriveDataSource", - "graphql": "connectors.sources.graphql:GraphQLDataSource", - "jira": "connectors.sources.jira:JiraDataSource", - "microsoft_teams": "connectors.sources.microsoft_teams:MicrosoftTeamsDataSource", - "mongodb": "connectors.sources.mongo:MongoDataSource", - "mssql": "connectors.sources.mssql:MSSQLDataSource", - "mysql": "connectors.sources.mysql:MySqlDataSource", - "network_drive": "connectors.sources.network_drive:NASDataSource", - "notion": "connectors.sources.notion:NotionDataSource", - "onedrive": "connectors.sources.onedrive:OneDriveDataSource", - "oracle": "connectors.sources.oracle:OracleDataSource", - "outlook": "connectors.sources.outlook:OutlookDataSource", - "postgresql": "connectors.sources.postgresql:PostgreSQLDataSource", - "redis": "connectors.sources.redis:RedisDataSource", - "s3": "connectors.sources.s3:S3DataSource", - "salesforce": "connectors.sources.salesforce:SalesforceDataSource", - "sandfly": "connectors.sources.sandfly:SandflyDataSource", - "servicenow": "connectors.sources.servicenow:ServiceNowDataSource", - "sharepoint_online": "connectors.sources.sharepoint_online:SharepointOnlineDataSource", - "sharepoint_server": "connectors.sources.sharepoint_server:SharepointServerDataSource", - "slack": "connectors.sources.slack:SlackDataSource", - "zoom": "connectors.sources.zoom:ZoomDataSource", + "azure_blob_storage": "connectors_service.sources.azure_blob_storage:AzureBlobStorageDataSource", + "box": "connectors_service.sources.box:BoxDataSource", + "confluence": "connectors_service.sources.confluence:ConfluenceDataSource", + "dir": "connectors_service.sources.directory:DirectoryDataSource", + "dropbox": "connectors_service.sources.dropbox:DropboxDataSource", + "github": "connectors_service.sources.github:GitHubDataSource", + "gmail": "connectors_service.sources.gmail:GMailDataSource", + "google_cloud_storage": "connectors_service.sources.google_cloud_storage:GoogleCloudStorageDataSource", + "google_drive": "connectors_service.sources.google_drive:GoogleDriveDataSource", + "graphql": "connectors_service.sources.graphql:GraphQLDataSource", + "jira": "connectors_service.sources.jira:JiraDataSource", + "microsoft_teams": "connectors_service.sources.microsoft_teams:MicrosoftTeamsDataSource", + "mongodb": "connectors_service.sources.mongo:MongoDataSource", + "mssql": "connectors_service.sources.mssql:MSSQLDataSource", + "mysql": "connectors_service.sources.mysql:MySqlDataSource", + "network_drive": "connectors_service.sources.network_drive:NASDataSource", + "notion": "connectors_service.sources.notion:NotionDataSource", + "onedrive": "connectors_service.sources.onedrive:OneDriveDataSource", + "oracle": "connectors_service.sources.oracle:OracleDataSource", + "outlook": "connectors_service.sources.outlook:OutlookDataSource", + "postgresql": "connectors_service.sources.postgresql:PostgreSQLDataSource", + "redis": "connectors_service.sources.redis:RedisDataSource", + "s3": "connectors_service.sources.s3:S3DataSource", + "salesforce": "connectors_service.sources.salesforce:SalesforceDataSource", + "sandfly": "connectors_service.sources.sandfly:SandflyDataSource", + "servicenow": "connectors_service.sources.servicenow:ServiceNowDataSource", + "sharepoint_online": "connectors_service.sources.sharepoint_online:SharepointOnlineDataSource", + "sharepoint_server": "connectors_service.sources.sharepoint_server:SharepointServerDataSource", + "slack": "connectors_service.sources.slack:SlackDataSource", + "zoom": "connectors_service.sources.zoom:ZoomDataSource", }, } diff --git a/app/connectors_service/connectors/connectors_cli.py b/app/connectors_service/connectors_service/connectors_cli.py similarity index 98% rename from app/connectors_service/connectors/connectors_cli.py rename to app/connectors_service/connectors_service/connectors_cli.py index ffccc9aba..e1bea6550 100644 --- a/app/connectors_service/connectors/connectors_cli.py +++ b/app/connectors_service/connectors_service/connectors_cli.py @@ -21,13 +21,13 @@ from simple_term_menu import TerminalMenu from tabulate import tabulate -from connectors import __version__ # NOQA -from connectors.cli.auth import CONFIG_FILE_PATH, Auth -from connectors.cli.connector import Connector -from connectors.cli.index import Index -from connectors.cli.job import Job -from connectors.config import _default_config -from connectors.es import DEFAULT_LANGUAGE +from connectors_service import __version__ # NOQA +from connectors_service.cli.auth import CONFIG_FILE_PATH, Auth +from connectors_service.cli.connector import Connector +from connectors_service.cli.index import Index +from connectors_service.cli.job import Job +from connectors_service.config import _default_config +from connectors_service.es import DEFAULT_LANGUAGE __all__ = ["main"] diff --git a/app/connectors_service/connectors/es/__init__.py b/app/connectors_service/connectors_service/es/__init__.py similarity index 67% rename from app/connectors_service/connectors/es/__init__.py rename to app/connectors_service/connectors_service/es/__init__.py index 6fe4cf1e1..5f8e7e205 100644 --- a/app/connectors_service/connectors/es/__init__.py +++ b/app/connectors_service/connectors_service/es/__init__.py @@ -7,9 +7,12 @@ from elasticsearch.exceptions import GeneralAvailabilityWarning -from connectors.es.client import ESClient # NOQA -from connectors.es.document import ESDocument, InvalidDocumentSourceError # NOQA -from connectors.es.index import ESIndex # NOQA +from connectors_service.es.client import ESClient # NOQA +from connectors_service.es.document import ( # NOQA + ESDocument, + InvalidDocumentSourceError, +) +from connectors_service.es.index import ESIndex # NOQA warnings.filterwarnings("ignore", category=GeneralAvailabilityWarning) diff --git a/app/connectors_service/connectors/es/cli_client.py b/app/connectors_service/connectors_service/es/cli_client.py similarity index 71% rename from app/connectors_service/connectors/es/cli_client.py rename to app/connectors_service/connectors_service/es/cli_client.py index 9aac13258..79aeee3ea 100644 --- a/app/connectors_service/connectors/es/cli_client.py +++ b/app/connectors_service/connectors_service/es/cli_client.py @@ -3,8 +3,8 @@ # or more contributor license agreements. Licensed under the Elastic License 2.0; # you may not use this file except in compliance with the Elastic License 2.0. # -from connectors.es.client import USER_AGENT_BASE -from connectors.es.management_client import ESManagementClient +from connectors_service.es.client import USER_AGENT_BASE +from connectors_service.es.management_client import ESManagementClient class CLIClient(ESManagementClient): diff --git a/app/connectors_service/connectors/es/client.py b/app/connectors_service/connectors_service/es/client.py similarity index 96% rename from app/connectors_service/connectors/es/client.py rename to app/connectors_service/connectors_service/es/client.py index 9fff6fef9..8dc0ec998 100644 --- a/app/connectors_service/connectors/es/client.py +++ b/app/connectors_service/connectors_service/es/client.py @@ -16,12 +16,12 @@ ConnectionError as ElasticConnectionError, ) -from connectors import __version__ -from connectors.config import ( +from connectors_service import __version__ +from connectors_service.config import ( DEFAULT_ELASTICSEARCH_MAX_RETRIES, DEFAULT_ELASTICSEARCH_RETRY_INTERVAL, ) -from connectors.utils import ( +from connectors_service.utils import ( CancellableSleeps, RetryStrategy, func_human_readable_name, @@ -39,7 +39,7 @@ class License(Enum): UNSET = None -USER_AGENT_BASE = f"elastic-connectors-{__version__}" +USER_AGENT_BASE = f"elastic-connectors-service-{__version__}" class ESClient: @@ -109,7 +109,9 @@ def __init__(self, config): options["headers"] = config.get("headers", {}) options["headers"]["user-agent"] = self.__class__.user_agent - options["headers"]["X-elastic-product-origin"] = "connectors" + options["headers"]["X-elastic-product-origin"] = ( + "connectors" # TODO: update this and dashboard depending on this value + ) self.client = AsyncElasticsearch(**options) self._keep_waiting = True diff --git a/app/connectors_service/connectors/es/document.py b/app/connectors_service/connectors_service/es/document.py similarity index 100% rename from app/connectors_service/connectors/es/document.py rename to app/connectors_service/connectors_service/es/document.py diff --git a/app/connectors_service/connectors/es/index.py b/app/connectors_service/connectors_service/es/index.py similarity index 98% rename from app/connectors_service/connectors/es/index.py rename to app/connectors_service/connectors_service/es/index.py index c4eaf9901..b4d9858fe 100644 --- a/app/connectors_service/connectors/es/index.py +++ b/app/connectors_service/connectors_service/es/index.py @@ -8,7 +8,7 @@ from connectors_sdk.logger import logger from elasticsearch import ApiError -from connectors.es import ESClient +from connectors_service.es import ESClient DEFAULT_PAGE_SIZE = 100 @@ -20,7 +20,7 @@ class DocumentNotFoundError(Exception): class TemporaryConnectorApiWrapper(ESClient): """Temporary class to wrap calls to Connectors API. - When connectors API becomes part of official client + When connectors_service API becomes part of official client this class will be removed. """ diff --git a/app/connectors_service/connectors/es/language_data.yml b/app/connectors_service/connectors_service/es/language_data.yml similarity index 100% rename from app/connectors_service/connectors/es/language_data.yml rename to app/connectors_service/connectors_service/es/language_data.yml diff --git a/app/connectors_service/connectors/es/license.py b/app/connectors_service/connectors_service/es/license.py similarity index 91% rename from app/connectors_service/connectors/es/license.py rename to app/connectors_service/connectors_service/es/license.py index 12eb823c7..7594e9d4d 100644 --- a/app/connectors_service/connectors/es/license.py +++ b/app/connectors_service/connectors_service/es/license.py @@ -3,7 +3,7 @@ # or more contributor license agreements. Licensed under the Elastic License 2.0; # you may not use this file except in compliance with the Elastic License 2.0. # -from connectors.protocol import JobType +from connectors_service.protocol import JobType def requires_platinum_license(sync_job, connector, source_klass): diff --git a/app/connectors_service/connectors/es/management_client.py b/app/connectors_service/connectors_service/es/management_client.py similarity index 98% rename from app/connectors_service/connectors/es/management_client.py rename to app/connectors_service/connectors_service/es/management_client.py index 0aa2917f0..6e71fd023 100644 --- a/app/connectors_service/connectors/es/management_client.py +++ b/app/connectors_service/connectors_service/es/management_client.py @@ -13,8 +13,8 @@ ) from elasticsearch.helpers import async_scan -from connectors.es import TIMESTAMP_FIELD -from connectors.es.client import ESClient +from connectors_service.es import TIMESTAMP_FIELD +from connectors_service.es.client import ESClient class ESManagementClient(ESClient): diff --git a/app/connectors_service/connectors/es/sink.py b/app/connectors_service/connectors_service/es/sink.py similarity index 98% rename from app/connectors_service/connectors/es/sink.py rename to app/connectors_service/connectors_service/es/sink.py index 9d6d08b3b..6995648f3 100644 --- a/app/connectors_service/connectors/es/sink.py +++ b/app/connectors_service/connectors_service/es/sink.py @@ -31,19 +31,19 @@ iso_utc, ) -from connectors.config import ( +from connectors_service.config import ( DEFAULT_ELASTICSEARCH_MAX_RETRIES, DEFAULT_ELASTICSEARCH_RETRY_INTERVAL, ) -from connectors.es import TIMESTAMP_FIELD -from connectors.es.management_client import ESManagementClient -from connectors.protocol import JobType -from connectors.protocol.connectors import ( +from connectors_service.es import TIMESTAMP_FIELD +from connectors_service.es.management_client import ESManagementClient +from connectors_service.protocol import JobType +from connectors_service.protocol.connectors import ( DELETED_DOCUMENT_COUNT, INDEXED_DOCUMENT_COUNT, INDEXED_DOCUMENT_VOLUME, ) -from connectors.utils import ( +from connectors_service.utils import ( DEFAULT_CHUNK_MEM_SIZE, DEFAULT_CHUNK_SIZE, DEFAULT_CONCURRENT_DOWNLOADS, @@ -129,7 +129,7 @@ class Sink: Arguments: - - `client` -- an instance of `connectors.es.ESManagementClient` + - `client` -- an instance of `connectors_service.es.ESManagementClient` - `queue` -- an instance of `asyncio.Queue` to pull docs from - `chunk_size` -- a maximum number of operations to send per request - `pipeline` -- ingest pipeline settings to pass to the bulk API @@ -430,7 +430,7 @@ class Extractor: This class runs a coroutine that puts docs in `queue`, given a document generator. Arguments: - - client: an instance of `connectors.es.ESManagementClient` + - client: an instance of `connectors_service.es.ESManagementClient` - queue: an `asyncio.Queue` to put docs in - index: the target Elasticsearch index - filter_: an instance of `Filter` to apply on the fetched document -- default: `None` diff --git a/app/connectors_service/connectors/kibana.py b/app/connectors_service/connectors_service/kibana.py similarity index 95% rename from app/connectors_service/connectors/kibana.py rename to app/connectors_service/connectors_service/kibana.py index 55c9232da..914f702bd 100644 --- a/app/connectors_service/connectors/kibana.py +++ b/app/connectors_service/connectors_service/kibana.py @@ -12,11 +12,11 @@ from connectors_sdk.logger import set_extra_logger -from connectors.config import load_config -from connectors.es import DEFAULT_LANGUAGE -from connectors.es.management_client import ESManagementClient -from connectors.protocol import ConnectorIndex -from connectors.utils import get_source_klass, validate_index_name +from connectors_service.config import load_config +from connectors_service.es import DEFAULT_LANGUAGE +from connectors_service.es.management_client import ESManagementClient +from connectors_service.protocol import ConnectorIndex +from connectors_service.utils import get_source_klass, validate_index_name CONNECTORS_INDEX = ".elastic-connectors-v1" JOBS_INDEX = ".elastic-connectors-sync-jobs-v1" diff --git a/app/connectors_service/connectors/preflight_check.py b/app/connectors_service/connectors_service/preflight_check.py similarity index 97% rename from app/connectors_service/connectors/preflight_check.py rename to app/connectors_service/connectors_service/preflight_check.py index 0ad7c3dc5..8e0db091d 100644 --- a/app/connectors_service/connectors/preflight_check.py +++ b/app/connectors_service/connectors_service/preflight_check.py @@ -9,9 +9,9 @@ import aiohttp from connectors_sdk.logger import logger -from connectors.es.management_client import ESManagementClient -from connectors.protocol import CONCRETE_CONNECTORS_INDEX, CONCRETE_JOBS_INDEX -from connectors.utils import CancellableSleeps +from connectors_service.es.management_client import ESManagementClient +from connectors_service.protocol import CONCRETE_CONNECTORS_INDEX, CONCRETE_JOBS_INDEX +from connectors_service.utils import CancellableSleeps class PreflightCheck: @@ -211,7 +211,7 @@ def _validate_configuration(self): and deprecated_service_type ): logger.warning( - "The configuration 'connector_id' and 'serivce_type' has been deprecated and will be removed in later release. Please configure the connector in 'connectors'." + "The configuration 'connector_id' and 'service_type' has been deprecated and will be removed in later release. Please configure the connector in 'connectors'." ) configured_connectors.append( { diff --git a/app/connectors_service/connectors/protocol/__init__.py b/app/connectors_service/connectors_service/protocol/__init__.py similarity index 100% rename from app/connectors_service/connectors/protocol/__init__.py rename to app/connectors_service/connectors_service/protocol/__init__.py diff --git a/app/connectors_service/connectors/protocol/connectors.py b/app/connectors_service/connectors_service/protocol/connectors.py similarity index 99% rename from app/connectors_service/connectors/protocol/connectors.py rename to app/connectors_service/connectors_service/protocol/connectors.py index e40a29704..5471b5494 100644 --- a/app/connectors_service/connectors/protocol/connectors.py +++ b/app/connectors_service/connectors_service/protocol/connectors.py @@ -36,9 +36,9 @@ NotFoundError as ElasticNotFoundError, ) -from connectors.es import ESDocument, ESIndex -from connectors.es.client import with_concurrency_control -from connectors.utils import ( +from connectors_service.es import ESDocument, ESIndex +from connectors_service.es.client import with_concurrency_control +from connectors_service.utils import ( ACCESS_CONTROL_INDEX_PREFIX, deep_merge_dicts, filter_nested_dict_by_keys, diff --git a/app/connectors_service/connectors/service_cli.py b/app/connectors_service/connectors_service/service_cli.py similarity index 94% rename from app/connectors_service/connectors/service_cli.py rename to app/connectors_service/connectors_service/service_cli.py index 201ee5f7e..87eccd59d 100755 --- a/app/connectors_service/connectors/service_cli.py +++ b/app/connectors_service/connectors_service/service_cli.py @@ -23,16 +23,16 @@ from connectors_sdk.content_extraction import ContentExtraction from connectors_sdk.logger import logger, set_logger -from connectors import __version__ -from connectors.build_info import __build_info__ -from connectors.config import load_config -from connectors.preflight_check import PreflightCheck -from connectors.services import get_services -from connectors.utils import get_source_klass, get_source_klasses +from connectors_service import __version__ +from connectors_service.build_info import __build_info__ +from connectors_service.config import load_config +from connectors_service.preflight_check import PreflightCheck +from connectors_service.services import get_services +from connectors_service.utils import get_source_klass, get_source_klasses __all__ = ["main"] -from connectors.utils import sleeps_for_retryable +from connectors_service.utils import sleeps_for_retryable async def _start_service(actions, config, loop): diff --git a/app/connectors_service/connectors_service/services/__init__.py b/app/connectors_service/connectors_service/services/__init__.py new file mode 100644 index 000000000..a205615c1 --- /dev/null +++ b/app/connectors_service/connectors_service/services/__init__.py @@ -0,0 +1,15 @@ +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License 2.0; +# you may not use this file except in compliance with the Elastic License 2.0. +# + +from connectors_service.services.access_control_sync_job_execution import ( + AccessControlSyncJobExecutionService, # NOQA +) +from connectors_service.services.base import get_services # NOQA +from connectors_service.services.content_sync_job_execution import ( + ContentSyncJobExecutionService, # NOQA +) +from connectors_service.services.job_cleanup import JobCleanUpService # NOQA +from connectors_service.services.job_scheduling import JobSchedulingService # NOQA diff --git a/app/connectors_service/connectors/services/access_control_sync_job_execution.py b/app/connectors_service/connectors_service/services/access_control_sync_job_execution.py similarity index 91% rename from app/connectors_service/connectors/services/access_control_sync_job_execution.py rename to app/connectors_service/connectors_service/services/access_control_sync_job_execution.py index ee789366a..da07f11ae 100644 --- a/app/connectors_service/connectors/services/access_control_sync_job_execution.py +++ b/app/connectors_service/connectors_service/services/access_control_sync_job_execution.py @@ -5,8 +5,8 @@ # from functools import cached_property -from connectors.protocol import JobStatus, JobType -from connectors.services.job_execution import JobExecutionService +from connectors_service.protocol import JobStatus, JobType +from connectors_service.services.job_execution import JobExecutionService class AccessControlSyncJobExecutionService(JobExecutionService): diff --git a/app/connectors_service/connectors/services/base.py b/app/connectors_service/connectors_service/services/base.py similarity index 99% rename from app/connectors_service/connectors/services/base.py rename to app/connectors_service/connectors_service/services/base.py index a1f66438e..b43063181 100644 --- a/app/connectors_service/connectors/services/base.py +++ b/app/connectors_service/connectors_service/services/base.py @@ -17,7 +17,7 @@ from connectors_sdk.logger import DocumentLogger, logger -from connectors.utils import CancellableSleeps +from connectors_service.utils import CancellableSleeps __all__ = [ "MultiService", diff --git a/app/connectors_service/connectors/services/content_sync_job_execution.py b/app/connectors_service/connectors_service/services/content_sync_job_execution.py similarity index 89% rename from app/connectors_service/connectors/services/content_sync_job_execution.py rename to app/connectors_service/connectors_service/services/content_sync_job_execution.py index e83e5c521..48bdaa514 100644 --- a/app/connectors_service/connectors/services/content_sync_job_execution.py +++ b/app/connectors_service/connectors_service/services/content_sync_job_execution.py @@ -6,8 +6,8 @@ from functools import cached_property -from connectors.protocol import JobStatus, JobType -from connectors.services.job_execution import JobExecutionService +from connectors_service.protocol import JobStatus, JobType +from connectors_service.services.job_execution import JobExecutionService class ContentSyncJobExecutionService(JobExecutionService): diff --git a/app/connectors_service/connectors/services/job_cleanup.py b/app/connectors_service/connectors_service/services/job_cleanup.py similarity index 95% rename from app/connectors_service/connectors/services/job_cleanup.py rename to app/connectors_service/connectors_service/services/job_cleanup.py index 52c3d8eea..be3866f0f 100644 --- a/app/connectors_service/connectors/services/job_cleanup.py +++ b/app/connectors_service/connectors_service/services/job_cleanup.py @@ -7,10 +7,10 @@ A task periodically clean up orphaned and idle jobs. """ -from connectors.es.index import DocumentNotFoundError -from connectors.es.management_client import ESManagementClient -from connectors.protocol import ConnectorIndex, SyncJobIndex -from connectors.services.base import BaseService +from connectors_service.es.index import DocumentNotFoundError +from connectors_service.es.management_client import ESManagementClient +from connectors_service.protocol import ConnectorIndex, SyncJobIndex +from connectors_service.services.base import BaseService IDLE_JOB_ERROR = "The job has not seen any update for some time." diff --git a/app/connectors_service/connectors/services/job_execution.py b/app/connectors_service/connectors_service/services/job_execution.py similarity index 93% rename from app/connectors_service/connectors/services/job_execution.py rename to app/connectors_service/connectors_service/services/job_execution.py index fccb71789..453a1eceb 100644 --- a/app/connectors_service/connectors/services/job_execution.py +++ b/app/connectors_service/connectors_service/services/job_execution.py @@ -5,17 +5,17 @@ # from functools import cached_property -from connectors.es.client import License -from connectors.es.index import DocumentNotFoundError -from connectors.es.license import requires_platinum_license -from connectors.protocol import ( +from connectors_service.es.client import License +from connectors_service.es.index import DocumentNotFoundError +from connectors_service.es.license import requires_platinum_license +from connectors_service.protocol import ( ConnectorIndex, DataSourceError, SyncJobIndex, ) -from connectors.services.base import BaseService -from connectors.sync_job_runner import SyncJobRunner -from connectors.utils import ConcurrentTasks, get_source_klass +from connectors_service.services.base import BaseService +from connectors_service.sync_job_runner import SyncJobRunner +from connectors_service.utils import ConcurrentTasks, get_source_klass class JobExecutionService(BaseService): diff --git a/app/connectors_service/connectors/services/job_scheduling.py b/app/connectors_service/connectors_service/services/job_scheduling.py similarity index 97% rename from app/connectors_service/connectors/services/job_scheduling.py rename to app/connectors_service/connectors_service/services/job_scheduling.py index be2dcf538..d27368983 100644 --- a/app/connectors_service/connectors/services/job_scheduling.py +++ b/app/connectors_service/connectors_service/services/job_scheduling.py @@ -14,9 +14,9 @@ import functools from datetime import datetime, timezone -from connectors.es.client import License, with_concurrency_control -from connectors.es.index import DocumentNotFoundError -from connectors.protocol import ( +from connectors_service.es.client import License, with_concurrency_control +from connectors_service.es.index import DocumentNotFoundError +from connectors_service.protocol import ( ConnectorIndex, DataSourceError, JobTriggerMethod, @@ -26,8 +26,8 @@ Status, SyncJobIndex, ) -from connectors.services.base import BaseService -from connectors.utils import ConcurrentTasks, get_source_klass +from connectors_service.services.base import BaseService +from connectors_service.utils import ConcurrentTasks, get_source_klass class JobSchedulingService(BaseService): diff --git a/app/connectors_service/connectors/sources/__init__.py b/app/connectors_service/connectors_service/sources/__init__.py similarity index 100% rename from app/connectors_service/connectors/sources/__init__.py rename to app/connectors_service/connectors_service/sources/__init__.py diff --git a/app/connectors_service/connectors/sources/atlassian.py b/app/connectors_service/connectors_service/sources/atlassian.py similarity index 95% rename from app/connectors_service/connectors/sources/atlassian.py rename to app/connectors_service/connectors_service/sources/atlassian.py index b1cfd338b..0a2e0f449 100644 --- a/app/connectors_service/connectors/sources/atlassian.py +++ b/app/connectors_service/connectors_service/sources/atlassian.py @@ -12,8 +12,8 @@ from connectors_sdk.utils import iso_utc from fastjsonschema import JsonSchemaValueException -from connectors.access_control import es_access_control_query, prefix_identity -from connectors.utils import RetryStrategy, retryable +from connectors_service.access_control import es_access_control_query, prefix_identity +from connectors_service.utils import RetryStrategy, retryable RETRIES = 3 RETRY_INTERVAL = 2 @@ -109,7 +109,7 @@ def access_control_query(self, access_control): return es_access_control_query(access_control) async def fetch_all_users(self, url): - from connectors.sources.jira import JIRA_CLOUD + from connectors_service.sources.jira import JIRA_CLOUD start_at = 0 while True: @@ -129,7 +129,7 @@ async def fetch_all_users(self, url): start_at += CLOUD_USER_BATCH async def fetch_all_users_for_confluence(self, url): - from connectors.sources.confluence import CONFLUENCE_CLOUD + from connectors_service.sources.confluence import CONFLUENCE_CLOUD start_at = 0 while True: @@ -218,8 +218,8 @@ async def user_access_control_doc(self, user): return user_document | self.access_control_query(access_control=access_control) def is_active_atlassian_user(self, user_info): - from connectors.sources.confluence import CONFLUENCE_CLOUD - from connectors.sources.jira import JIRA_CLOUD + from connectors_service.sources.confluence import CONFLUENCE_CLOUD + from connectors_service.sources.jira import JIRA_CLOUD user_url = user_info.get("self") user_name = user_info.get("displayName", "user") diff --git a/app/connectors_service/connectors/sources/azure_blob_storage.py b/app/connectors_service/connectors_service/sources/azure_blob_storage.py similarity index 100% rename from app/connectors_service/connectors/sources/azure_blob_storage.py rename to app/connectors_service/connectors_service/sources/azure_blob_storage.py diff --git a/app/connectors_service/connectors/sources/box.py b/app/connectors_service/connectors_service/sources/box.py similarity index 99% rename from app/connectors_service/connectors/sources/box.py rename to app/connectors_service/connectors_service/sources/box.py index 8bc135cde..a92f67e15 100644 --- a/app/connectors_service/connectors/sources/box.py +++ b/app/connectors_service/connectors_service/sources/box.py @@ -25,7 +25,7 @@ convert_to_b64, ) -from connectors.utils import ( +from connectors_service.utils import ( CacheWithTimeout, CancellableSleeps, ConcurrentTasks, diff --git a/app/connectors_service/connectors/sources/confluence.py b/app/connectors_service/connectors_service/sources/confluence.py similarity index 99% rename from app/connectors_service/connectors/sources/confluence.py rename to app/connectors_service/connectors_service/sources/confluence.py index d4847023d..5e721190d 100644 --- a/app/connectors_service/connectors/sources/confluence.py +++ b/app/connectors_service/connectors_service/sources/confluence.py @@ -20,8 +20,8 @@ nested_get_from_dict, ) -from connectors.access_control import ACCESS_CONTROL -from connectors.sources.atlassian import ( +from connectors_service.access_control import ACCESS_CONTROL +from connectors_service.sources.atlassian import ( AtlassianAccessControl, AtlassianAdvancedRulesValidator, prefix_account_email, @@ -31,7 +31,7 @@ prefix_group_id, prefix_user, ) -from connectors.utils import ( +from connectors_service.utils import ( CancellableSleeps, ConcurrentTasks, MemQueue, diff --git a/app/connectors_service/connectors/sources/directory.py b/app/connectors_service/connectors_service/sources/directory.py similarity index 98% rename from app/connectors_service/connectors/sources/directory.py rename to app/connectors_service/connectors_service/sources/directory.py index c5fcc82dd..abf5a5ea4 100644 --- a/app/connectors_service/connectors/sources/directory.py +++ b/app/connectors_service/connectors_service/sources/directory.py @@ -21,7 +21,7 @@ hash_id, ) -from connectors.utils import get_base64_value +from connectors_service.utils import get_base64_value DEFAULT_DIR = os.environ.get("SYSTEM_DIR", os.path.dirname(__file__)) diff --git a/app/connectors_service/connectors/sources/dropbox.py b/app/connectors_service/connectors_service/sources/dropbox.py similarity index 99% rename from app/connectors_service/connectors/sources/dropbox.py rename to app/connectors_service/connectors_service/sources/dropbox.py index 7523e5691..a1954a75e 100644 --- a/app/connectors_service/connectors/sources/dropbox.py +++ b/app/connectors_service/connectors_service/sources/dropbox.py @@ -25,12 +25,12 @@ iso_utc, ) -from connectors.access_control import ( +from connectors_service.access_control import ( ACCESS_CONTROL, es_access_control_query, prefix_identity, ) -from connectors.utils import ( +from connectors_service.utils import ( CancellableSleeps, RetryStrategy, evaluate_timedelta, diff --git a/app/connectors_service/connectors/sources/generic_database.py b/app/connectors_service/connectors_service/sources/generic_database.py similarity index 98% rename from app/connectors_service/connectors/sources/generic_database.py rename to app/connectors_service/connectors_service/sources/generic_database.py index 5339faccb..fb7e3f16b 100644 --- a/app/connectors_service/connectors/sources/generic_database.py +++ b/app/connectors_service/connectors_service/sources/generic_database.py @@ -9,7 +9,7 @@ from asyncpg.exceptions._base import InternalClientError from sqlalchemy.exc import ProgrammingError -from connectors.utils import RetryStrategy, retryable +from connectors_service.utils import RetryStrategy, retryable WILDCARD = "*" diff --git a/app/connectors_service/connectors/sources/github.py b/app/connectors_service/connectors_service/sources/github.py similarity index 99% rename from app/connectors_service/connectors/sources/github.py rename to app/connectors_service/connectors_service/sources/github.py index 4e360b268..86dafbe5c 100644 --- a/app/connectors_service/connectors/sources/github.py +++ b/app/connectors_service/connectors_service/sources/github.py @@ -30,12 +30,12 @@ from gidgethub.aiohttp import GitHubAPI from gidgethub.apps import get_installation_access_token, get_jwt -from connectors.access_control import ( +from connectors_service.access_control import ( ACCESS_CONTROL, es_access_control_query, prefix_identity, ) -from connectors.utils import ( +from connectors_service.utils import ( CancellableSleeps, RetryStrategy, decode_base64_value, diff --git a/app/connectors_service/connectors/sources/gmail.py b/app/connectors_service/connectors_service/sources/gmail.py similarity index 98% rename from app/connectors_service/connectors/sources/gmail.py rename to app/connectors_service/connectors_service/sources/gmail.py index 18e93d370..5afad0ab3 100644 --- a/app/connectors_service/connectors/sources/gmail.py +++ b/app/connectors_service/connectors_service/sources/gmail.py @@ -17,8 +17,8 @@ ) from fastjsonschema import JsonSchemaValueException -from connectors.access_control import ACCESS_CONTROL, es_access_control_query -from connectors.sources.google import ( +from connectors_service.access_control import ACCESS_CONTROL, es_access_control_query +from connectors_service.sources.google import ( GMailClient, GoogleDirectoryClient, MessageFields, @@ -26,7 +26,7 @@ load_service_account_json, validate_service_account_json, ) -from connectors.utils import ( +from connectors_service.utils import ( EMAIL_REGEX_PATTERN, base64url_to_base64, validate_email_address, diff --git a/app/connectors_service/connectors/sources/google.py b/app/connectors_service/connectors_service/sources/google.py similarity index 99% rename from app/connectors_service/connectors/sources/google.py rename to app/connectors_service/connectors_service/sources/google.py index 01051ab87..5d6c87237 100644 --- a/app/connectors_service/connectors/sources/google.py +++ b/app/connectors_service/connectors_service/sources/google.py @@ -13,7 +13,7 @@ from connectors_sdk.logger import logger from connectors_sdk.source import ConfigurableFieldValueError -from connectors.utils import RetryStrategy, retryable +from connectors_service.utils import RetryStrategy, retryable # Google Service Account JSON includes "universe_domain" key. That argument is not # supported in aiogoogle library in version 5.3.0. The "universe_domain" key is allowed in diff --git a/app/connectors_service/connectors/sources/google_cloud_storage.py b/app/connectors_service/connectors_service/sources/google_cloud_storage.py similarity index 99% rename from app/connectors_service/connectors/sources/google_cloud_storage.py rename to app/connectors_service/connectors_service/sources/google_cloud_storage.py index 3ff4f2ce1..527d50e57 100644 --- a/app/connectors_service/connectors/sources/google_cloud_storage.py +++ b/app/connectors_service/connectors_service/sources/google_cloud_storage.py @@ -14,11 +14,11 @@ from connectors_sdk.logger import logger from connectors_sdk.source import BaseDataSource -from connectors.sources.google import ( +from connectors_service.sources.google import ( load_service_account_json, validate_service_account_json, ) -from connectors.utils import RetryStrategy, get_pem_format, retryable +from connectors_service.utils import RetryStrategy, get_pem_format, retryable CLOUD_STORAGE_READ_ONLY_SCOPE = "https://www.googleapis.com/auth/devstorage.read_only" CLOUD_STORAGE_BASE_URL = "https://console.cloud.google.com/storage/browser/_details/" diff --git a/app/connectors_service/connectors/sources/google_drive.py b/app/connectors_service/connectors_service/sources/google_drive.py similarity index 99% rename from app/connectors_service/connectors/sources/google_drive.py rename to app/connectors_service/connectors_service/sources/google_drive.py index 3e0b3fd8f..c95910b0f 100644 --- a/app/connectors_service/connectors/sources/google_drive.py +++ b/app/connectors_service/connectors_service/sources/google_drive.py @@ -16,20 +16,20 @@ iso_zulu, ) -from connectors.access_control import ( +from connectors_service.access_control import ( ACCESS_CONTROL, es_access_control_query, prefix_identity, ) -from connectors.es.sink import OP_DELETE, OP_INDEX -from connectors.sources.google import ( +from connectors_service.es.sink import OP_DELETE, OP_INDEX +from connectors_service.sources.google import ( GoogleServiceAccountClient, UserFields, load_service_account_json, remove_universe_domain, validate_service_account_json, ) -from connectors.utils import ( +from connectors_service.utils import ( EMAIL_REGEX_PATTERN, validate_email_address, ) @@ -383,7 +383,7 @@ def get_default_configuration(cls): "label": "Google Drive service account JSON", "sensitive": True, "order": 1, - "tooltip": "This connectors authenticates as a service account to synchronize content from Google Drive.", + "tooltip": "This connector authenticates as a service account to synchronize content from Google Drive.", "type": "str", }, "use_domain_wide_delegation_for_sync": { diff --git a/app/connectors_service/connectors/sources/graphql.py b/app/connectors_service/connectors_service/sources/graphql.py similarity index 99% rename from app/connectors_service/connectors/sources/graphql.py rename to app/connectors_service/connectors_service/sources/graphql.py index 059b928d0..6b4c6440f 100644 --- a/app/connectors_service/connectors/sources/graphql.py +++ b/app/connectors_service/connectors_service/sources/graphql.py @@ -21,7 +21,7 @@ from graphql.language.ast import VariableNode from graphql.language.visitor import Visitor -from connectors.utils import ( +from connectors_service.utils import ( CancellableSleeps, RetryStrategy, retryable, diff --git a/app/connectors_service/connectors/sources/jira.py b/app/connectors_service/connectors_service/sources/jira.py similarity index 99% rename from app/connectors_service/connectors/sources/jira.py rename to app/connectors_service/connectors_service/sources/jira.py index ed66ca65a..8f925eb49 100644 --- a/app/connectors_service/connectors/sources/jira.py +++ b/app/connectors_service/connectors_service/sources/jira.py @@ -20,15 +20,15 @@ iso_utc, ) -from connectors.access_control import ACCESS_CONTROL -from connectors.sources.atlassian import ( +from connectors_service.access_control import ACCESS_CONTROL +from connectors_service.sources.atlassian import ( AtlassianAccessControl, AtlassianAdvancedRulesValidator, prefix_account_id, prefix_account_name, prefix_group_id, ) -from connectors.utils import ( +from connectors_service.utils import ( CancellableSleeps, ConcurrentTasks, MemQueue, diff --git a/app/connectors_service/connectors/sources/microsoft_teams.py b/app/connectors_service/connectors_service/sources/microsoft_teams.py similarity index 99% rename from app/connectors_service/connectors/sources/microsoft_teams.py rename to app/connectors_service/connectors_service/sources/microsoft_teams.py index 1b4f7d020..d45f07b1c 100644 --- a/app/connectors_service/connectors/sources/microsoft_teams.py +++ b/app/connectors_service/connectors_service/sources/microsoft_teams.py @@ -27,7 +27,7 @@ ) from msal import ConfidentialClientApplication -from connectors.utils import ( +from connectors_service.utils import ( CacheWithTimeout, CancellableSleeps, ConcurrentTasks, diff --git a/app/connectors_service/connectors/sources/mongo.py b/app/connectors_service/connectors_service/sources/mongo.py similarity index 99% rename from app/connectors_service/connectors/sources/mongo.py rename to app/connectors_service/connectors_service/sources/mongo.py index 4995fb930..416402705 100644 --- a/app/connectors_service/connectors/sources/mongo.py +++ b/app/connectors_service/connectors_service/sources/mongo.py @@ -22,7 +22,7 @@ from motor.motor_asyncio import AsyncIOMotorClient from pymongo.errors import OperationFailure -from connectors.utils import get_pem_format +from connectors_service.utils import get_pem_format class MongoAdvancedRulesValidator(AdvancedRulesValidator): diff --git a/app/connectors_service/connectors/sources/mssql.py b/app/connectors_service/connectors_service/sources/mssql.py similarity index 99% rename from app/connectors_service/connectors/sources/mssql.py rename to app/connectors_service/connectors_service/sources/mssql.py index 4c7dda568..384a73199 100644 --- a/app/connectors_service/connectors/sources/mssql.py +++ b/app/connectors_service/connectors_service/sources/mssql.py @@ -25,7 +25,7 @@ from sqlalchemy.engine import URL from sqlalchemy.exc import ProgrammingError -from connectors.sources.generic_database import ( +from connectors_service.sources.generic_database import ( DEFAULT_FETCH_SIZE, DEFAULT_RETRY_COUNT, DEFAULT_WAIT_MULTIPLIER, @@ -36,7 +36,7 @@ is_wildcard, map_column_names, ) -from connectors.utils import ( +from connectors_service.utils import ( RetryStrategy, get_pem_format, retryable, diff --git a/app/connectors_service/connectors/sources/mysql.py b/app/connectors_service/connectors_service/sources/mysql.py similarity index 99% rename from app/connectors_service/connectors/sources/mysql.py rename to app/connectors_service/connectors_service/sources/mysql.py index 823bd9af3..f162f79f0 100644 --- a/app/connectors_service/connectors/sources/mysql.py +++ b/app/connectors_service/connectors_service/sources/mysql.py @@ -19,11 +19,11 @@ ) from fastjsonschema import JsonSchemaValueException -from connectors.sources.generic_database import ( +from connectors_service.sources.generic_database import ( configured_tables, is_wildcard, ) -from connectors.utils import ( +from connectors_service.utils import ( CancellableSleeps, RetryStrategy, retryable, diff --git a/app/connectors_service/connectors/sources/network_drive.py b/app/connectors_service/connectors_service/sources/network_drive.py similarity index 99% rename from app/connectors_service/connectors/sources/network_drive.py rename to app/connectors_service/connectors_service/sources/network_drive.py index bbb5ddf60..f6648b549 100644 --- a/app/connectors_service/connectors/sources/network_drive.py +++ b/app/connectors_service/connectors_service/sources/network_drive.py @@ -43,12 +43,12 @@ ) from wcmatch import glob -from connectors.access_control import ( +from connectors_service.access_control import ( ACCESS_CONTROL, es_access_control_query, prefix_identity, ) -from connectors.utils import ( +from connectors_service.utils import ( RetryStrategy, retryable, ) diff --git a/app/connectors_service/connectors/sources/notion.py b/app/connectors_service/connectors_service/sources/notion.py similarity index 99% rename from app/connectors_service/connectors/sources/notion.py rename to app/connectors_service/connectors_service/sources/notion.py index 2d5492599..21288f58e 100644 --- a/app/connectors_service/connectors/sources/notion.py +++ b/app/connectors_service/connectors_service/sources/notion.py @@ -25,7 +25,7 @@ from connectors_sdk.source import BaseDataSource, ConfigurableFieldValueError from notion_client import APIResponseError, AsyncClient -from connectors.utils import CancellableSleeps, RetryStrategy, retryable +from connectors_service.utils import CancellableSleeps, RetryStrategy, retryable RETRIES = 3 RETRY_INTERVAL = 2 diff --git a/app/connectors_service/connectors/sources/onedrive.py b/app/connectors_service/connectors_service/sources/onedrive.py similarity index 99% rename from app/connectors_service/connectors/sources/onedrive.py rename to app/connectors_service/connectors_service/sources/onedrive.py index 1a115d8b6..9b01659fc 100644 --- a/app/connectors_service/connectors/sources/onedrive.py +++ b/app/connectors_service/connectors_service/sources/onedrive.py @@ -30,12 +30,12 @@ ) from wcmatch import glob -from connectors.access_control import ( +from connectors_service.access_control import ( ACCESS_CONTROL, es_access_control_query, prefix_identity, ) -from connectors.utils import ( +from connectors_service.utils import ( CacheWithTimeout, CancellableSleeps, RetryStrategy, diff --git a/app/connectors_service/connectors/sources/oracle.py b/app/connectors_service/connectors_service/sources/oracle.py similarity index 99% rename from app/connectors_service/connectors/sources/oracle.py rename to app/connectors_service/connectors_service/sources/oracle.py index 74e8f6e4e..fa350520d 100644 --- a/app/connectors_service/connectors/sources/oracle.py +++ b/app/connectors_service/connectors_service/sources/oracle.py @@ -18,7 +18,7 @@ from sqlalchemy import create_engine, text from sqlalchemy.exc import ProgrammingError -from connectors.sources.generic_database import ( +from connectors_service.sources.generic_database import ( DEFAULT_FETCH_SIZE, DEFAULT_RETRY_COUNT, Queries, diff --git a/app/connectors_service/connectors/sources/outlook.py b/app/connectors_service/connectors_service/sources/outlook.py similarity index 99% rename from app/connectors_service/connectors/sources/outlook.py rename to app/connectors_service/connectors_service/sources/outlook.py index 22c929ac9..8dc90309a 100644 --- a/app/connectors_service/connectors/sources/outlook.py +++ b/app/connectors_service/connectors_service/sources/outlook.py @@ -35,12 +35,12 @@ from exchangelib.protocol import BaseProtocol, NoVerifyHTTPAdapter from ldap3 import SAFE_SYNC, Connection, Server -from connectors.access_control import ( +from connectors_service.access_control import ( ACCESS_CONTROL, es_access_control_query, prefix_identity, ) -from connectors.utils import ( +from connectors_service.utils import ( CancellableSleeps, RetryStrategy, get_pem_format, diff --git a/app/connectors_service/connectors/sources/postgresql.py b/app/connectors_service/connectors_service/sources/postgresql.py similarity index 99% rename from app/connectors_service/connectors/sources/postgresql.py rename to app/connectors_service/connectors_service/sources/postgresql.py index 6c731606e..c203e630f 100644 --- a/app/connectors_service/connectors/sources/postgresql.py +++ b/app/connectors_service/connectors_service/sources/postgresql.py @@ -24,7 +24,7 @@ from sqlalchemy.exc import ProgrammingError from sqlalchemy.ext.asyncio import create_async_engine -from connectors.sources.generic_database import ( +from connectors_service.sources.generic_database import ( DEFAULT_FETCH_SIZE, DEFAULT_RETRY_COUNT, DEFAULT_WAIT_MULTIPLIER, @@ -35,7 +35,7 @@ is_wildcard, map_column_names, ) -from connectors.utils import ( +from connectors_service.utils import ( RetryStrategy, get_pem_format, retryable, diff --git a/app/connectors_service/connectors/sources/redis.py b/app/connectors_service/connectors_service/sources/redis.py similarity index 99% rename from app/connectors_service/connectors/sources/redis.py rename to app/connectors_service/connectors_service/sources/redis.py index 5bd094ab1..57bbdce7b 100644 --- a/app/connectors_service/connectors/sources/redis.py +++ b/app/connectors_service/connectors_service/sources/redis.py @@ -22,7 +22,7 @@ iso_utc, ) -from connectors.utils import get_pem_format +from connectors_service.utils import get_pem_format PAGE_SIZE = 1000 diff --git a/app/connectors_service/connectors/sources/s3.py b/app/connectors_service/connectors_service/sources/s3.py similarity index 100% rename from app/connectors_service/connectors/sources/s3.py rename to app/connectors_service/connectors_service/sources/s3.py diff --git a/app/connectors_service/connectors/sources/salesforce.py b/app/connectors_service/connectors_service/sources/salesforce.py similarity index 99% rename from app/connectors_service/connectors/sources/salesforce.py rename to app/connectors_service/connectors_service/sources/salesforce.py index fe5b0e135..7bba9f0d7 100644 --- a/app/connectors_service/connectors/sources/salesforce.py +++ b/app/connectors_service/connectors_service/sources/salesforce.py @@ -27,12 +27,12 @@ iso_utc, ) -from connectors.access_control import ( +from connectors_service.access_control import ( ACCESS_CONTROL, es_access_control_query, prefix_identity, ) -from connectors.utils import ( +from connectors_service.utils import ( CancellableSleeps, RetryStrategy, retryable, diff --git a/app/connectors_service/connectors/sources/sandfly.py b/app/connectors_service/connectors_service/sources/sandfly.py similarity index 99% rename from app/connectors_service/connectors/sources/sandfly.py rename to app/connectors_service/connectors_service/sources/sandfly.py index 464819cc7..fef43951a 100644 --- a/app/connectors_service/connectors/sources/sandfly.py +++ b/app/connectors_service/connectors_service/sources/sandfly.py @@ -25,8 +25,8 @@ iso_utc, ) -from connectors.es.sink import OP_INDEX -from connectors.utils import ( +from connectors_service.es.sink import OP_INDEX +from connectors_service.utils import ( CacheWithTimeout, CancellableSleeps, RetryStrategy, diff --git a/app/connectors_service/connectors/sources/servicenow.py b/app/connectors_service/connectors_service/sources/servicenow.py similarity index 99% rename from app/connectors_service/connectors/sources/servicenow.py rename to app/connectors_service/connectors_service/sources/servicenow.py index a69150da3..80989cb13 100644 --- a/app/connectors_service/connectors/sources/servicenow.py +++ b/app/connectors_service/connectors_service/sources/servicenow.py @@ -27,12 +27,12 @@ iso_utc, ) -from connectors.access_control import ( +from connectors_service.access_control import ( ACCESS_CONTROL, es_access_control_query, prefix_identity, ) -from connectors.utils import ( +from connectors_service.utils import ( CancellableSleeps, ConcurrentTasks, MemQueue, diff --git a/app/connectors_service/connectors/sources/sharepoint_online.py b/app/connectors_service/connectors_service/sources/sharepoint_online.py similarity index 99% rename from app/connectors_service/connectors/sources/sharepoint_online.py rename to app/connectors_service/connectors_service/sources/sharepoint_online.py index 163504340..6ebb5fcc9 100644 --- a/app/connectors_service/connectors/sources/sharepoint_online.py +++ b/app/connectors_service/connectors_service/sources/sharepoint_online.py @@ -35,13 +35,13 @@ ) from fastjsonschema import JsonSchemaValueException -from connectors.access_control import ( +from connectors_service.access_control import ( ACCESS_CONTROL, es_access_control_query, prefix_identity, ) -from connectors.es.sink import OP_DELETE, OP_INDEX -from connectors.utils import ( +from connectors_service.es.sink import OP_DELETE, OP_INDEX +from connectors_service.utils import ( CacheWithTimeout, CancellableSleeps, html_to_text, diff --git a/app/connectors_service/connectors/sources/sharepoint_server.py b/app/connectors_service/connectors_service/sources/sharepoint_server.py similarity index 99% rename from app/connectors_service/connectors/sources/sharepoint_server.py rename to app/connectors_service/connectors_service/sources/sharepoint_server.py index f9e0daa7b..f311bf54c 100644 --- a/app/connectors_service/connectors/sources/sharepoint_server.py +++ b/app/connectors_service/connectors_service/sources/sharepoint_server.py @@ -26,12 +26,12 @@ ) from httpx_ntlm import HttpNtlmAuth -from connectors.access_control import ( +from connectors_service.access_control import ( ACCESS_CONTROL, es_access_control_query, prefix_identity, ) -from connectors.utils import ( +from connectors_service.utils import ( CancellableSleeps, ssl_context, ) diff --git a/app/connectors_service/connectors/sources/slack.py b/app/connectors_service/connectors_service/sources/slack.py similarity index 99% rename from app/connectors_service/connectors/sources/slack.py rename to app/connectors_service/connectors_service/sources/slack.py index f1fc703e6..a708a6874 100644 --- a/app/connectors_service/connectors/sources/slack.py +++ b/app/connectors_service/connectors_service/sources/slack.py @@ -14,7 +14,7 @@ from connectors_sdk.logger import logger from connectors_sdk.source import BaseDataSource -from connectors.utils import CancellableSleeps, dict_slice, retryable +from connectors_service.utils import CancellableSleeps, dict_slice, retryable BASE_URL = "https://slack.com/api" diff --git a/app/connectors_service/connectors/sources/zoom.py b/app/connectors_service/connectors_service/sources/zoom.py similarity index 99% rename from app/connectors_service/connectors/sources/zoom.py rename to app/connectors_service/connectors_service/sources/zoom.py index 1dc6b4d47..005e900b2 100644 --- a/app/connectors_service/connectors/sources/zoom.py +++ b/app/connectors_service/connectors_service/sources/zoom.py @@ -18,7 +18,7 @@ iso_utc, ) -from connectors.utils import ( +from connectors_service.utils import ( CacheWithTimeout, CancellableSleeps, RetryStrategy, diff --git a/app/connectors_service/connectors/sync_job_runner.py b/app/connectors_service/connectors_service/sync_job_runner.py similarity index 98% rename from app/connectors_service/connectors/sync_job_runner.py rename to app/connectors_service/connectors_service/sync_job_runner.py index 29fe12e3c..8dc27ab3f 100644 --- a/app/connectors_service/connectors/sync_job_runner.py +++ b/app/connectors_service/connectors_service/sync_job_runner.py @@ -14,10 +14,10 @@ AuthorizationException as ElasticAuthorizationException, ) -from connectors.es.client import License, with_concurrency_control -from connectors.es.index import DocumentNotFoundError -from connectors.es.license import requires_platinum_license -from connectors.es.sink import ( +from connectors_service.es.client import License, with_concurrency_control +from connectors_service.es.index import DocumentNotFoundError +from connectors_service.es.license import requires_platinum_license +from connectors_service.es.sink import ( CREATES_QUEUED, DELETES_QUEUED, OP_INDEX, @@ -25,13 +25,13 @@ SyncOrchestrator, UnsupportedJobType, ) -from connectors.protocol import JobStatus, JobType -from connectors.protocol.connectors import ( +from connectors_service.protocol import JobStatus, JobType +from connectors_service.protocol.connectors import ( DELETED_DOCUMENT_COUNT, INDEXED_DOCUMENT_COUNT, INDEXED_DOCUMENT_VOLUME, ) -from connectors.utils import truncate_id +from connectors_service.utils import truncate_id UTF_8 = "utf-8" diff --git a/app/connectors_service/connectors/utils.py b/app/connectors_service/connectors_service/utils.py similarity index 100% rename from app/connectors_service/connectors/utils.py rename to app/connectors_service/connectors_service/utils.py diff --git a/app/connectors_service/pyproject.toml b/app/connectors_service/pyproject.toml index ea19e99dd..3c761c0bb 100644 --- a/app/connectors_service/pyproject.toml +++ b/app/connectors_service/pyproject.toml @@ -3,7 +3,7 @@ requires = ["setuptools"] build-backend = "setuptools.build_meta" [project] -name = "elasticsearch-connectors" +name = "connectors_service" description = "Elasticsearch Connectors" dynamic = ["version"] readme = "README.md" @@ -108,14 +108,81 @@ ftest= [ ] [project.scripts] -elastic-ingest = "connectors.service_cli:main" -fake-kibana = "connectors.kibana:main" -connectors = "connectors.connectors_cli:main" -elastic-agent-connectors = "connectors.agent.cli:main" +elastic-ingest = "connectors_service.service_cli:main" +fake-kibana = "connectors_service.kibana:main" +connectors = "connectors_service.connectors_cli:main" +elastic-agent-connectors = "connectors_service.agent.cli:main" test-connectors = "scripts.testing.cli:main" [tool.setuptools] -packages = ["connectors", "scripts"] +packages = ["connectors_service"] [tool.setuptools.dynamic] -version = {file = "connectors/VERSION"} +version = {file = "connectors_service/VERSION"} + +[tool.pytest.ini_options] +asyncio_mode = "auto" +addopts = [ + "-v" +] +filterwarnings = [ + "error", + # botocore has this warning that is reported by them to be irrelevant + "ignore:.*urllib3.contrib.pyopenssl.*:DeprecationWarning:botocore.*", + # latest main of aioresponses does not have this problem, but current package uses deprecated pkg_resources API + "ignore:.*pkg_resources.*:DeprecationWarning", + # SQLAlchemy uses deprecated APIs internally + "ignore:.*dbapi().*:DeprecationWarning", + # aiogoogle inherits on top of AioHttpSession, which is not recommended by aiohttp + "ignore:Inheritance class AiohttpSession from ClientSession is discouraged:DeprecationWarning", + # aiogoogle inherits on top of RetryableAioHttpSession, which is not recommended by aiohttp + "ignore:Inheritance class RetryableAiohttpSession from ClientSession is discouraged:DeprecationWarning", + # pytest may generate its own warnings in some situations, such as improper usage or deprecated features. + "ignore::pytest.PytestUnraisableExceptionWarning", + # this error comes from 8.16 Elasticsearch client: we use some non-GA features in our CI code + "ignore::elasticsearch.exceptions.GeneralAvailabilityWarning", +] + +[tool.pyright] +pythonVersion = "3.10" +include = ["connectors_service"] +reportMissingImports = false +reportMissingModuleSource = false +reportOptionalMemberAccess = false +exclude = ["**/tests", "**/__pycache__"] +executionEnvironments = [ + { root = "./", venv = ".venv" } +] + +[tool.ruff] +target-version = "py310" + +[tool.ruff.lint] +select = ["A", "ASYNC", "I", "E", "F", "B", "C4", "T10", "T20", "EM", "ISC", "S", "CPY001"] +ignore = ["E501", "ISC001"] +preview = true + +# Allow autofix for all enabled rules (when `--fix`) is provided. +fixable = ["A", "B", "C", "C4", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT", "T10", "T20"] +unfixable = [] + +exclude = [ + ".git", + "__pycache__", + "lib", + "bin", + "include" +] +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[tool.ruff.lint.per-file-ignores] +"connectors_service/*" = ["S608"] +"scripts/verify.py" = ["EM"] +"tests/*" = ["B017", "S101", "S", "ASYNC110"] +"tests/conftest.py" = ["EM"] + +[tool.ruff.lint.isort] +known-first-party = ["connectors_service", "tests"] + +[tool.ruff.lint.flake8-copyright] +notice-rgx = "#\n# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one\n# or more contributor license agreements. Licensed under the Elastic License 2.0;\n# you may not use this file except in compliance with the Elastic License 2.0.\n#" diff --git a/app/connectors_service/pyrightconfig.json b/app/connectors_service/pyrightconfig.json deleted file mode 100644 index d2c099858..000000000 --- a/app/connectors_service/pyrightconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "pythonVersion": "3.10", - "include": [ - "connectors" - ], - "reportMissingImports": false, - "reportMissingModuleSource": false, - "reportOptionalMemberAccess": false, - "exclude": [ - "**/tests", - "**/__pycache__" - ], - "executionEnvironments": [ - { - "root": "./", - "venv": "./" - } - ] -} diff --git a/app/connectors_service/pytest.ini b/app/connectors_service/pytest.ini deleted file mode 100644 index 757c4a49a..000000000 --- a/app/connectors_service/pytest.ini +++ /dev/null @@ -1,20 +0,0 @@ -[pytest] -asyncio_mode = auto -addopts = - -v -filterwarnings = - error -; botocore has this warning that is reported by them to be irrelevant - ignore:.*urllib3.contrib.pyopenssl.*:DeprecationWarning:botocore.* -; latest main of aioresponses does not have this problem, but current package uses deprecated pkg_resources API - ignore:.*pkg_resources.*:DeprecationWarning -; SQLAlchemy uses deprecated APIs internally - ignore:.*dbapi().*:DeprecationWarning -; aiogoogle inherits on top of AioHttpSession, which is not recommended by aiohttp - ignore:Inheritance class AiohttpSession from ClientSession is discouraged:DeprecationWarning -; aiogoogle inherits on top of RetryableAioHttpSession, which is not recommended by aiohttp - ignore:Inheritance class RetryableAiohttpSession from ClientSession is discouraged:DeprecationWarning -; pytest may generate its own warnings in some situations, such as improper usage or deprecated features. - ignore::pytest.PytestUnraisableExceptionWarning -; this error comes from 8.16 Elasticsearch client: we use some non-GA features in our CI code - ignore::elasticsearch.exceptions.GeneralAvailabilityWarning diff --git a/app/connectors_service/scripts/testing/cli.py b/app/connectors_service/scripts/testing/cli.py index e410a0ad6..10752ca72 100644 --- a/app/connectors_service/scripts/testing/cli.py +++ b/app/connectors_service/scripts/testing/cli.py @@ -30,14 +30,14 @@ ES_DEFAULT_HOST = "http://localhost:9200" ES_DEFAULT_USERNAME = "elastic" ES_DEFAULT_PASSWORD = "changeme" # noqa: S105 -SOURCE_MACHINE_IMAGE = "elastic-connectors-testing-base-image" +SOURCE_MACHINE_IMAGE = "elastic-connectors-service-testing-base-image" IMAGE_FAMILY = "ubuntu-2204-lts" # VMs metadata DIVISION = "engineering" ORG = "search" TEAM = "ingestion" -PROJECT = "connectors-testing" +PROJECT = "connectors-service-testing" @click.group() diff --git a/app/connectors_service/scripts/verify.py b/app/connectors_service/scripts/verify.py index d44002a60..222fbd606 100644 --- a/app/connectors_service/scripts/verify.py +++ b/app/connectors_service/scripts/verify.py @@ -10,7 +10,7 @@ from elasticsearch import AsyncElasticsearch -from connectors.config import load_config +from connectors_service.config import load_config DEFAULT_CONFIG = os.path.join(os.path.dirname(__file__), "..", "config.yml") diff --git a/app/connectors_service/tests/agent/test_agent_config.py b/app/connectors_service/tests/agent/test_agent_config.py index dc79bca25..ece4010ac 100644 --- a/app/connectors_service/tests/agent/test_agent_config.py +++ b/app/connectors_service/tests/agent/test_agent_config.py @@ -5,7 +5,7 @@ # from unittest.mock import MagicMock, Mock -from connectors.agent.config import ConnectorsAgentConfigurationWrapper +from connectors_service.agent.config import ConnectorsAgentConfigurationWrapper CONNECTOR_ID = "test-connector" SERVICE_TYPE = "test-service-type" diff --git a/app/connectors_service/tests/agent/test_cli.py b/app/connectors_service/tests/agent/test_cli.py index ec1a6e41d..afb0ded38 100644 --- a/app/connectors_service/tests/agent/test_cli.py +++ b/app/connectors_service/tests/agent/test_cli.py @@ -8,16 +8,19 @@ import signal from unittest.mock import AsyncMock, patch -from connectors.agent.cli import main +from connectors_service.agent.cli import main -@patch("connectors.agent.cli.ConnectorsAgentComponent", return_value=AsyncMock()) +@patch( + "connectors_service.agent.cli.ConnectorsAgentComponent", return_value=AsyncMock() +) def test_main_responds_to_sigterm(patch_component): async def kill(): await asyncio.sleep(0.2) os.kill(os.getpid(), signal.SIGTERM) loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) loop.create_task(kill()) # No asserts here. diff --git a/app/connectors_service/tests/agent/test_component.py b/app/connectors_service/tests/agent/test_component.py index 9bdfe9e17..176767277 100644 --- a/app/connectors_service/tests/agent/test_component.py +++ b/app/connectors_service/tests/agent/test_component.py @@ -8,7 +8,7 @@ import pytest -from connectors.agent.component import ConnectorsAgentComponent +from connectors_service.agent.component import ConnectorsAgentComponent class StubMultiService: @@ -28,8 +28,12 @@ def shutdown(self, sig): @pytest.mark.asyncio -@patch("connectors.agent.component.MultiService", return_value=StubMultiService()) -@patch("connectors.agent.component.new_v2_from_reader", return_value=MagicMock()) +@patch( + "connectors_service.agent.component.MultiService", return_value=StubMultiService() +) +@patch( + "connectors_service.agent.component.new_v2_from_reader", return_value=MagicMock() +) async def test_try_update_without_auth_data( stub_multi_service, patch_new_v2_from_reader ): diff --git a/app/connectors_service/tests/agent/test_connector_record_manager.py b/app/connectors_service/tests/agent/test_connector_record_manager.py index 9657f3943..608f15329 100644 --- a/app/connectors_service/tests/agent/test_connector_record_manager.py +++ b/app/connectors_service/tests/agent/test_connector_record_manager.py @@ -7,10 +7,10 @@ import pytest -from connectors.agent.connector_record_manager import ( +from connectors_service.agent.connector_record_manager import ( ConnectorRecordManager, ) -from connectors.protocol import ConnectorIndex +from connectors_service.protocol import ConnectorIndex @pytest.fixture @@ -40,7 +40,7 @@ async def test_ensure_connector_records_exist_creates_connectors_if_not_exist( random_connector_name_id = "1234" with patch( - "connectors.agent.connector_record_manager.generate_random_id", + "connectors_service.agent.connector_record_manager.generate_random_id", return_value=random_connector_name_id, ): connector_record_manager.connector_index.connector_exists = AsyncMock( diff --git a/app/connectors_service/tests/agent/test_protocol.py b/app/connectors_service/tests/agent/test_protocol.py index ac78eb833..3a45ea595 100644 --- a/app/connectors_service/tests/agent/test_protocol.py +++ b/app/connectors_service/tests/agent/test_protocol.py @@ -10,8 +10,11 @@ from elastic_agent_client.generated import elastic_agent_client_pb2 as proto from google.protobuf.struct_pb2 import Struct -from connectors.agent.config import ConnectorsAgentConfigurationWrapper -from connectors.agent.protocol import ConnectorActionHandler, ConnectorCheckinHandler +from connectors_service.agent.config import ConnectorsAgentConfigurationWrapper +from connectors_service.agent.protocol import ( + ConnectorActionHandler, + ConnectorCheckinHandler, +) @pytest.fixture(autouse=True) @@ -29,7 +32,7 @@ def _string_config_field_mock(value): "connector_name": _string_config_field_mock("test-connector"), "id": _string_config_field_mock("test-id"), } - unit_mock.config.type = "connectors-py" + unit_mock.config.type = "connectors-service-py" return unit_mock @@ -187,7 +190,7 @@ async def test_apply_from_client_when_units_with_multiple_outputs_and_updating_c await checkin_handler.apply_from_client() - # Only ES output from the policy should be used by connectors component + # Only ES output from the policy should be used by connectors service component assert config_wrapper_mock.try_update.called_once() _, called_kwargs = config_wrapper_mock.try_update.call_args called_output_unit = called_kwargs.get("output_unit") @@ -232,7 +235,7 @@ async def test_apply_from_client_when_units_with_multiple_mixed_outputs_and_upda await checkin_handler.apply_from_client() - # First ES output from the policy should be used by connectors component + # First ES output from the policy should be used by connectors service component assert config_wrapper_mock.try_update.called_once() _, called_kwargs = config_wrapper_mock.try_update.call_args called_output_unit = called_kwargs.get("output_unit") diff --git a/app/connectors_service/tests/agent/test_service_manager.py b/app/connectors_service/tests/agent/test_service_manager.py index 6e4a528ba..103105a98 100644 --- a/app/connectors_service/tests/agent/test_service_manager.py +++ b/app/connectors_service/tests/agent/test_service_manager.py @@ -8,8 +8,8 @@ import pytest -from connectors.agent.service_manager import ConnectorServiceManager -from connectors.services.base import ServiceAlreadyRunningError +from connectors_service.agent.service_manager import ConnectorServiceManager +from connectors_service.services.base import ServiceAlreadyRunningError @pytest.fixture(autouse=True) @@ -42,7 +42,10 @@ def shutdown(self, sig): @pytest.mark.asyncio -@patch("connectors.agent.service_manager.get_services", return_value=StubMultiService()) +@patch( + "connectors_service.agent.service_manager.get_services", + return_value=StubMultiService(), +) async def test_run_and_stop_work_as_intended(patch_get_services, config_mock): service_manager = ConnectorServiceManager(config_mock) @@ -57,7 +60,10 @@ async def stop_service_after_timeout(): @pytest.mark.asyncio -@patch("connectors.agent.service_manager.get_services", return_value=StubMultiService()) +@patch( + "connectors_service.agent.service_manager.get_services", + return_value=StubMultiService(), +) async def test_restart_starts_another_multiservice(patch_get_services, config_mock): service_manager = ConnectorServiceManager(config_mock) @@ -74,7 +80,10 @@ async def stop_service_after_timeout(): @pytest.mark.asyncio -@patch("connectors.agent.service_manager.get_services", return_value=StubMultiService()) +@patch( + "connectors_service.agent.service_manager.get_services", + return_value=StubMultiService(), +) async def test_cannot_run_same_service_manager_twice(patch_get_services, config_mock): service_manager = ConnectorServiceManager(config_mock) diff --git a/app/connectors_service/tests/conftest.py b/app/connectors_service/tests/conftest.py index bad032e12..ddd6668ca 100644 --- a/app/connectors_service/tests/conftest.py +++ b/app/connectors_service/tests/conftest.py @@ -118,7 +118,7 @@ def mock_responses(): @pytest_asyncio.fixture async def patch_cancellable_sleeps(): with patch( - "connectors.utils.CancellableSleeps.sleep", return_value=AsyncMock() + "connectors_service.utils.CancellableSleeps.sleep", return_value=AsyncMock() ) as new_mock: yield new_mock diff --git a/app/connectors_service/tests/es/test_cli_client.py b/app/connectors_service/tests/es/test_cli_client.py index 65393165b..49d1c7bd2 100644 --- a/app/connectors_service/tests/es/test_cli_client.py +++ b/app/connectors_service/tests/es/test_cli_client.py @@ -3,8 +3,8 @@ # or more contributor license agreements. Licensed under the Elastic License 2.0; # you may not use this file except in compliance with the Elastic License 2.0. # -from connectors import __version__ -from connectors.es.cli_client import CLIClient +from connectors_service import __version__ +from connectors_service.es.cli_client import CLIClient def test_overrides_user_agent_header(): @@ -17,5 +17,5 @@ def test_overrides_user_agent_header(): assert ( cli_client.client._headers["user-agent"] - == f"elastic-connectors-{__version__}/cli" + == f"elastic-connectors-service-{__version__}/cli" ) diff --git a/app/connectors_service/tests/es/test_client.py b/app/connectors_service/tests/es/test_client.py index ea9b48e2e..8d0d4735b 100644 --- a/app/connectors_service/tests/es/test_client.py +++ b/app/connectors_service/tests/es/test_client.py @@ -11,8 +11,8 @@ import elasticsearch import pytest -from connectors import __version__ -from connectors.es.client import ( +from connectors_service import __version__ +from connectors_service.es.client import ( ESClient, License, RetryInterruptedError, @@ -253,7 +253,7 @@ def test_sets_user_agent(self): assert ( es_client.client._headers["user-agent"] - == f"elastic-connectors-{__version__}/service" + == f"elastic-connectors-service-{__version__}/service" ) diff --git a/app/connectors_service/tests/es/test_document.py b/app/connectors_service/tests/es/test_document.py index 65b7f94a6..b35fefb62 100644 --- a/app/connectors_service/tests/es/test_document.py +++ b/app/connectors_service/tests/es/test_document.py @@ -7,7 +7,7 @@ import pytest -from connectors.es import ESDocument, InvalidDocumentSourceError +from connectors_service.es import ESDocument, InvalidDocumentSourceError @pytest.mark.parametrize( diff --git a/app/connectors_service/tests/es/test_index.py b/app/connectors_service/tests/es/test_index.py index 66653903c..ce183ed58 100644 --- a/app/connectors_service/tests/es/test_index.py +++ b/app/connectors_service/tests/es/test_index.py @@ -8,7 +8,7 @@ import pytest from elasticsearch import ApiError, ConflictError -from connectors.es.index import DocumentNotFoundError, ESApi, ESIndex +from connectors_service.es.index import DocumentNotFoundError, ESApi, ESIndex headers = {"X-Elastic-Product": "Elasticsearch"} config = { diff --git a/app/connectors_service/tests/es/test_license.py b/app/connectors_service/tests/es/test_license.py index bb7d0f97f..c2b21a82a 100644 --- a/app/connectors_service/tests/es/test_license.py +++ b/app/connectors_service/tests/es/test_license.py @@ -7,8 +7,8 @@ import pytest -from connectors.es.license import requires_platinum_license -from connectors.protocol import JobType +from connectors_service.es.license import requires_platinum_license +from connectors_service.protocol import JobType def mock_source_klass(is_premium): diff --git a/app/connectors_service/tests/es/test_management_client.py b/app/connectors_service/tests/es/test_management_client.py index 18ac77858..e5fa2287b 100644 --- a/app/connectors_service/tests/es/test_management_client.py +++ b/app/connectors_service/tests/es/test_management_client.py @@ -13,7 +13,7 @@ NotFoundError as ElasticNotFoundError, ) -from connectors.es.management_client import ESManagementClient +from connectors_service.es.management_client import ESManagementClient from tests.commons import AsyncIterator @@ -156,7 +156,7 @@ async def test_yield_existing_documents_metadata_when_index_does_not_exist( ] with mock.patch( - "connectors.es.management_client.async_scan", + "connectors_service.es.management_client.async_scan", return_value=AsyncIterator(records), ): ids = [] @@ -180,7 +180,7 @@ async def test_yield_existing_documents_metadata_when_index_exists( ] with mock.patch( - "connectors.es.management_client.async_scan", + "connectors_service.es.management_client.async_scan", return_value=AsyncIterator(records), ): ids = [] diff --git a/app/connectors_service/tests/ftest.sh b/app/connectors_service/tests/ftest.sh index 171b34103..f87abc4f5 100755 --- a/app/connectors_service/tests/ftest.sh +++ b/app/connectors_service/tests/ftest.sh @@ -14,7 +14,7 @@ VIRTUAL_ENV="$ROOT_DIR/.venv" PLATFORM='unknown' MAX_RSS="200M" MAX_DURATION=600 -CONNECTORS_VERSION=$(cat "$ROOT_DIR/connectors/VERSION") +CONNECTORS_VERSION=$(cat "$ROOT_DIR/connectors_service/VERSION") ARTIFACT_BASE_URL="https://artifacts-snapshot.elastic.co" # Retry configuration CURL_MAX_RETRIES=3 diff --git a/app/connectors_service/tests/protocol/test_connectors.py b/app/connectors_service/tests/protocol/test_connectors.py index b3714c208..a32fceccf 100644 --- a/app/connectors_service/tests/protocol/test_connectors.py +++ b/app/connectors_service/tests/protocol/test_connectors.py @@ -21,8 +21,8 @@ from connectors_sdk.utils import Features from elasticsearch import ApiError, ConflictError, NotFoundError -from connectors.config import load_config -from connectors.protocol import ( +from connectors_service.config import load_config +from connectors_service.protocol import ( IDLE_JOBS_THRESHOLD, JOB_NOT_FOUND_ERROR, Connector, @@ -40,8 +40,8 @@ SyncJob, SyncJobIndex, ) -from connectors.protocol.connectors import ProtocolError -from connectors.utils import ACCESS_CONTROL_INDEX_PREFIX, iso_utc +from connectors_service.protocol.connectors import ProtocolError +from connectors_service.utils import ACCESS_CONTROL_INDEX_PREFIX, iso_utc from tests.commons import AsyncIterator HERE = os.path.dirname(__file__) @@ -650,7 +650,7 @@ async def test_sync_done(job, expected_doc_source_update): (True, mock_next_run, JobType.INCREMENTAL), ], ) -@patch("connectors.protocol.connectors.next_run") +@patch("connectors_service.protocol.connectors.next_run") async def test_connector_next_sync( next_run, scheduling_enabled, expected_next_sync, job_type ): @@ -2036,7 +2036,7 @@ def test_incremental_sync_enabled(features_json, incremental_sync_enabled): JobTriggerMethod.SCHEDULED, ], ) -@patch("connectors.protocol.SyncJobIndex.index") +@patch("connectors_service.protocol.SyncJobIndex.index") async def test_create_job(index_method, trigger_method, set_env): connector = Mock() connector.id = "id" @@ -2106,9 +2106,9 @@ async def test_create_job_with_connector_api(trigger_method, job_type, set_env): (JobType.ACCESS_CONTROL, f"{ACCESS_CONTROL_INDEX_PREFIX}{INDEX_NAME}"), ], ) -@patch("connectors.protocol.SyncJobIndex.index") +@patch("connectors_service.protocol.SyncJobIndex.index") @patch( - "connectors.utils.ACCESS_CONTROL_INDEX_PREFIX", + "connectors_service.utils.ACCESS_CONTROL_INDEX_PREFIX", ACCESS_CONTROL_INDEX_PREFIX, ) async def test_create_jobs_with_correct_target_index( @@ -2163,7 +2163,7 @@ async def test_create_jobs_with_correct_target_index( ), ], ) -@patch("connectors.protocol.SyncJobIndex.get_all_docs") +@patch("connectors_service.protocol.SyncJobIndex.get_all_docs") async def test_pending_jobs( get_all_docs, job_types, job_type_query, remote_call, set_env ): @@ -2207,7 +2207,7 @@ async def test_pending_jobs( @pytest.mark.asyncio -@patch("connectors.protocol.SyncJobIndex.get_all_docs") +@patch("connectors_service.protocol.SyncJobIndex.get_all_docs") async def test_orphaned_idle_jobs(get_all_docs, set_env): job = Mock() get_all_docs.return_value = AsyncIterator([job]) @@ -2241,7 +2241,7 @@ async def test_orphaned_idle_jobs(get_all_docs, set_env): @pytest.mark.asyncio -@patch("connectors.protocol.SyncJobIndex.get_all_docs") +@patch("connectors_service.protocol.SyncJobIndex.get_all_docs") async def test_idle_jobs(get_all_docs, set_env): job = Mock() get_all_docs.return_value = AsyncIterator([job]) diff --git a/app/connectors_service/tests/services/test_base.py b/app/connectors_service/tests/services/test_base.py index 0abb49e4b..c148ca64d 100644 --- a/app/connectors_service/tests/services/test_base.py +++ b/app/connectors_service/tests/services/test_base.py @@ -12,8 +12,8 @@ import pytest -from connectors.config import load_config -from connectors.services.base import BaseService, MultiService, get_services +from connectors_service.config import load_config +from connectors_service.services.base import BaseService, MultiService, get_services HERE = os.path.dirname(__file__) FIXTURES_DIR = os.path.abspath(os.path.join(HERE, "..", "fixtures")) diff --git a/app/connectors_service/tests/services/test_job_cleanup.py b/app/connectors_service/tests/services/test_job_cleanup.py index 33048c369..b4edac93a 100644 --- a/app/connectors_service/tests/services/test_job_cleanup.py +++ b/app/connectors_service/tests/services/test_job_cleanup.py @@ -8,7 +8,7 @@ import pytest -from connectors.services.job_cleanup import IDLE_JOB_ERROR, JobCleanUpService +from connectors_service.services.job_cleanup import IDLE_JOB_ERROR, JobCleanUpService from tests.commons import AsyncIterator from tests.services.test_base import create_and_run_service @@ -47,11 +47,11 @@ def mock_sync_job( @pytest.mark.asyncio -@patch("connectors.protocol.SyncJobIndex.idle_jobs") -@patch("connectors.protocol.SyncJobIndex.orphaned_idle_jobs") -@patch("connectors.protocol.ConnectorIndex.fetch_by_id") -@patch("connectors.protocol.ConnectorIndex.supported_connectors") -@patch("connectors.protocol.ConnectorIndex.all_connectors") +@patch("connectors_service.protocol.SyncJobIndex.idle_jobs") +@patch("connectors_service.protocol.SyncJobIndex.orphaned_idle_jobs") +@patch("connectors_service.protocol.ConnectorIndex.fetch_by_id") +@patch("connectors_service.protocol.ConnectorIndex.supported_connectors") +@patch("connectors_service.protocol.ConnectorIndex.all_connectors") async def test_cleanup_jobs( all_connectors, supported_connectors, diff --git a/app/connectors_service/tests/services/test_job_execution.py b/app/connectors_service/tests/services/test_job_execution.py index e78633aad..3c39bcbe0 100644 --- a/app/connectors_service/tests/services/test_job_execution.py +++ b/app/connectors_service/tests/services/test_job_execution.py @@ -8,13 +8,13 @@ import pytest -from connectors.es.client import License -from connectors.es.index import DocumentNotFoundError -from connectors.protocol import JobStatus, JobType -from connectors.services.access_control_sync_job_execution import ( +from connectors_service.es.client import License +from connectors_service.es.index import DocumentNotFoundError +from connectors_service.protocol import JobStatus, JobType +from connectors_service.services.access_control_sync_job_execution import ( AccessControlSyncJobExecutionService, ) -from connectors.services.content_sync_job_execution import ( +from connectors_service.services.content_sync_job_execution import ( ContentSyncJobExecutionService, ) from tests.commons import AsyncIterator @@ -24,7 +24,7 @@ @pytest.fixture(autouse=True) def connector_index_mock(): with patch( - "connectors.services.job_execution.ConnectorIndex" + "connectors_service.services.job_execution.ConnectorIndex" ) as connector_index_klass_mock: connector_index_mock = Mock() connector_index_mock.stop_waiting = Mock() @@ -40,7 +40,7 @@ def connector_index_mock(): @pytest.fixture(autouse=True) def sync_job_index_mock(): with patch( - "connectors.services.job_execution.SyncJobIndex" + "connectors_service.services.job_execution.SyncJobIndex" ) as sync_job_index_klass_mock: sync_job_index_mock = Mock() sync_job_index_mock.stop_waiting = Mock() @@ -62,7 +62,7 @@ def concurrent_task_mock(): @pytest.fixture(autouse=True) def concurrent_tasks_mocks(): with patch( - "connectors.services.job_execution.ConcurrentTasks" + "connectors_service.services.job_execution.ConcurrentTasks" ) as concurrent_tasks_klass_mock: concurrent_content_syncs_tasks_mock = concurrent_task_mock() concurrent_tasks_klass_mock.return_value = concurrent_content_syncs_tasks_mock @@ -73,7 +73,7 @@ def concurrent_tasks_mocks(): @pytest.fixture(autouse=True) def sync_job_runner_mock(): with patch( - "connectors.services.job_execution.SyncJobRunner" + "connectors_service.services.job_execution.SyncJobRunner" ) as sync_job_runner_klass_mock: sync_job_runner_mock = Mock() sync_job_runner_mock.execute = AsyncMock() diff --git a/app/connectors_service/tests/services/test_job_scheduling.py b/app/connectors_service/tests/services/test_job_scheduling.py index 7aacf1f91..1c04527a7 100644 --- a/app/connectors_service/tests/services/test_job_scheduling.py +++ b/app/connectors_service/tests/services/test_job_scheduling.py @@ -11,9 +11,9 @@ from connectors_sdk.source import ConfigurableFieldValueError, DataSourceConfiguration from elasticsearch import ConflictError -from connectors.es.client import License -from connectors.es.index import DocumentNotFoundError -from connectors.protocol import ( +from connectors_service.es.client import License +from connectors_service.es.index import DocumentNotFoundError +from connectors_service.protocol import ( DataSourceError, JobTriggerMethod, JobType, @@ -21,7 +21,7 @@ ServiceTypeNotSupportedError, Status, ) -from connectors.services.job_scheduling import JobSchedulingService +from connectors_service.services.job_scheduling import JobSchedulingService from tests.commons import AsyncIterator from tests.services.test_base import create_and_run_service @@ -31,7 +31,7 @@ @pytest.fixture(autouse=True) def connector_index_mock(): with patch( - "connectors.services.job_scheduling.ConnectorIndex" + "connectors_service.services.job_scheduling.ConnectorIndex" ) as connector_index_klass_mock: connector_index_mock = Mock() connector_index_mock.stop_waiting = Mock() @@ -47,7 +47,7 @@ def connector_index_mock(): @pytest.fixture(autouse=True) def sync_job_index_mock(): with patch( - "connectors.services.job_scheduling.SyncJobIndex" + "connectors_service.services.job_scheduling.SyncJobIndex" ) as sync_job_index_klass_mock: sync_job_index_mock = Mock() sync_job_index_mock.create = AsyncMock(return_value="1") @@ -348,7 +348,7 @@ async def test_run_when_sync_fails_then_continues_service_execution( @pytest.mark.asyncio -@patch("connectors.services.job_scheduling.get_source_klass") +@patch("connectors_service.services.job_scheduling.get_source_klass") async def test_run_when_connector_fields_are_invalid( get_source_klass_mock, connector_index_mock, set_env ): @@ -380,7 +380,7 @@ def _source_klass(config): @pytest.mark.asyncio -@patch("connectors.services.job_scheduling.get_source_klass") +@patch("connectors_service.services.job_scheduling.get_source_klass") async def test_run_when_connector_failed_validation_then_succeeded( get_source_klass_mock, connector_index_mock, set_env ): @@ -419,7 +419,7 @@ def _error_once(): @pytest.mark.asyncio -@patch("connectors.services.job_scheduling.get_source_klass") +@patch("connectors_service.services.job_scheduling.get_source_klass") async def test_run_when_connector_ping_fails( get_source_klass_mock, connector_index_mock, set_env ): @@ -451,7 +451,7 @@ def _source_klass(config): @pytest.mark.asyncio -@patch("connectors.services.job_scheduling.get_source_klass") +@patch("connectors_service.services.job_scheduling.get_source_klass") async def test_run_when_connector_validate_config_fails( get_source_klass_mock, connector_index_mock, set_env ): @@ -504,7 +504,7 @@ async def test_initial_loop_run_heartbeat_only_once( @pytest.mark.asyncio -@patch("connectors.services.job_scheduling.get_source_klass") +@patch("connectors_service.services.job_scheduling.get_source_klass") async def test_run_when_validation_is_very_slow( get_source_klass_mock, connector_index_mock, set_env ): diff --git a/app/connectors_service/tests/sources/fixtures/dir/fixture.py b/app/connectors_service/tests/sources/fixtures/dir/fixture.py index 791e7f1f9..421d18f2b 100644 --- a/app/connectors_service/tests/sources/fixtures/dir/fixture.py +++ b/app/connectors_service/tests/sources/fixtures/dir/fixture.py @@ -14,7 +14,7 @@ DATA_SIZE = os.environ.get("DATA_SIZE", "small").lower() if DATA_SIZE == "small": - REPO = "connectors-python" + REPO = "connectors-service-python" elif DATA_SIZE == "medium": REPO = "elasticsearch" else: diff --git a/app/connectors_service/tests/sources/fixtures/fixture.py b/app/connectors_service/tests/sources/fixtures/fixture.py index 8332f6958..fb28dc29c 100644 --- a/app/connectors_service/tests/sources/fixtures/fixture.py +++ b/app/connectors_service/tests/sources/fixtures/fixture.py @@ -21,8 +21,8 @@ from elastic_transport import ConnectionTimeout from elasticsearch import ApiError -from connectors.es.management_client import ESManagementClient -from connectors.utils import ( +from connectors_service.es.management_client import ESManagementClient +from connectors_service.utils import ( RetryStrategy, time_to_sleep_between_retries, ) diff --git a/app/connectors_service/tests/sources/test_atlassian.py b/app/connectors_service/tests/sources/test_atlassian.py index 3344f222c..987a5c6d6 100644 --- a/app/connectors_service/tests/sources/test_atlassian.py +++ b/app/connectors_service/tests/sources/test_atlassian.py @@ -13,11 +13,11 @@ SyncRuleValidationResult, ) -from connectors.sources.atlassian import ( +from connectors_service.sources.atlassian import ( AtlassianAccessControl, AtlassianAdvancedRulesValidator, ) -from connectors.sources.jira import JiraClient, JiraDataSource +from connectors_service.sources.jira import JiraClient, JiraDataSource from tests.sources.support import create_source diff --git a/app/connectors_service/tests/sources/test_azure_blob_storage.py b/app/connectors_service/tests/sources/test_azure_blob_storage.py index 6a3e79b05..dcdaa316d 100644 --- a/app/connectors_service/tests/sources/test_azure_blob_storage.py +++ b/app/connectors_service/tests/sources/test_azure_blob_storage.py @@ -14,7 +14,7 @@ from azure.storage.blob.aio import BlobServiceClient, ContainerClient from connectors_sdk.source import ConfigurableFieldValueError -from connectors.sources.azure_blob_storage import AzureBlobStorageDataSource +from connectors_service.sources.azure_blob_storage import AzureBlobStorageDataSource from tests.commons import AsyncIterator from tests.sources.support import create_source diff --git a/app/connectors_service/tests/sources/test_box.py b/app/connectors_service/tests/sources/test_box.py index be5f06d84..008bdd85d 100644 --- a/app/connectors_service/tests/sources/test_box.py +++ b/app/connectors_service/tests/sources/test_box.py @@ -13,7 +13,7 @@ from aiohttp.client_exceptions import ClientResponseError from connectors_sdk.source import ConfigurableFieldValueError -from connectors.sources.box import FINISHED, BoxDataSource, NotFound, TokenError +from connectors_service.sources.box import FINISHED, BoxDataSource, NotFound, TokenError from tests.commons import AsyncIterator from tests.sources.support import create_source @@ -216,7 +216,7 @@ async def test_set_access_token_raise_token_error_on_exception(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries") +@patch("connectors_service.utils.time_to_sleep_between_retries") @pytest.mark.parametrize( "status_code, exception", [ @@ -262,7 +262,7 @@ async def test_ping_with_successful_connection(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries") +@patch("connectors_service.utils.time_to_sleep_between_retries") async def test_ping_raises_on_unsuccessful_connection( mock_time_to_sleep_between_retries, ): @@ -370,7 +370,7 @@ async def test_fetch(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries") +@patch("connectors_service.utils.time_to_sleep_between_retries") async def test_fetch_returns_none_on_client_exception( mock_time_to_sleep_between_retries, ): diff --git a/app/connectors_service/tests/sources/test_confluence.py b/app/connectors_service/tests/sources/test_confluence.py index c70d9df69..465fd4b12 100644 --- a/app/connectors_service/tests/sources/test_confluence.py +++ b/app/connectors_service/tests/sources/test_confluence.py @@ -19,8 +19,8 @@ from connectors_sdk.source import ConfigurableFieldValueError from freezegun import freeze_time -from connectors.access_control import DLS_QUERY -from connectors.sources.confluence import ( +from connectors_service.access_control import DLS_QUERY +from connectors_service.sources.confluence import ( CONFLUENCE_CLOUD, CONFLUENCE_DATA_CENTER, CONFLUENCE_SERVER, @@ -33,7 +33,7 @@ NotFound, Unauthorized, ) -from connectors.utils import ssl_context +from connectors_service.utils import ssl_context from tests.commons import AsyncIterator from tests.sources.support import create_source @@ -721,7 +721,7 @@ async def test_remote_validation_when_space_keys_are_unavailable_then_raise_exce async_response.json.return_value = RESPONSE_SPACE_KEYS with mock.patch( - "connectors.sources.confluence.ConfluenceClient.api_call", + "connectors_service.sources.confluence.ConfluenceClient.api_call", return_value=async_response, ): with pytest.raises( @@ -812,7 +812,7 @@ async def test_validate_configuration_for_ssl_enabled(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_429_status(): initial_response = ClientResponseError(None, None) initial_response.status = 429 @@ -835,7 +835,7 @@ async def test_get_with_429_status(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_429_status_without_retry_after_header(): payload = {"value": "Test rate limit"} async_mock_response = AsyncMock() @@ -854,7 +854,7 @@ async def test_get_with_429_status_without_retry_after_header(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_400_status(): error = ClientResponseError(None, None) error.status = 400 @@ -872,7 +872,7 @@ async def test_get_with_400_status(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_401_status(): error = ClientResponseError(None, None) error.status = 401 @@ -890,7 +890,7 @@ async def test_get_with_401_status(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_403_status(): error = ClientResponseError(None, None) error.status = 403 @@ -925,7 +925,7 @@ async def test_get_with_404_status(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_500_status(): error = ClientResponseError(None, None) error.status = 500 @@ -1457,7 +1457,7 @@ async def test_fetch_confluence_server_users(): async_response = AsyncMock() async_response.json.return_value = {"start": 0, "users": []} with mock.patch( - "connectors.sources.confluence.ConfluenceClient.api_call", + "connectors_service.sources.confluence.ConfluenceClient.api_call", return_value=async_response, ): async for user in source.confluence_client.fetch_confluence_server_users(): @@ -1646,7 +1646,7 @@ async def test_fetch_server_space_permission(): async_response = AsyncMock() async_response.json.return_value = payload with mock.patch( - "connectors.sources.confluence.ConfluenceClient.api_call", + "connectors_service.sources.confluence.ConfluenceClient.api_call", return_value=async_response, ): expected_response = await source.fetch_server_space_permission( diff --git a/app/connectors_service/tests/sources/test_directory.py b/app/connectors_service/tests/sources/test_directory.py index e65f7d2b4..82fdc73aa 100644 --- a/app/connectors_service/tests/sources/test_directory.py +++ b/app/connectors_service/tests/sources/test_directory.py @@ -5,7 +5,7 @@ # import pytest -from connectors.sources.directory import DEFAULT_DIR, DirectoryDataSource +from connectors_service.sources.directory import DEFAULT_DIR, DirectoryDataSource from tests.sources.support import assert_basics, create_source diff --git a/app/connectors_service/tests/sources/test_dropbox.py b/app/connectors_service/tests/sources/test_dropbox.py index 5bf5248bf..f8e7647cb 100644 --- a/app/connectors_service/tests/sources/test_dropbox.py +++ b/app/connectors_service/tests/sources/test_dropbox.py @@ -22,7 +22,7 @@ from connectors_sdk.source import ConfigurableFieldValueError from freezegun import freeze_time -from connectors.sources.dropbox import ( +from connectors_service.sources.dropbox import ( AUTHENTICATED_ADMIN_URL, DropBoxAdvancedRulesValidator, DropboxClient, @@ -637,7 +637,7 @@ async def test_validate_configuration_with_valid_path(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_validate_configuration_with_invalid_path_then_raise_exception(): async with create_source(DropboxDataSource) as source: setup_dropbox(source) @@ -674,7 +674,7 @@ async def test_set_access_token(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_set_access_token_with_incorrect_app_key_then_raise_exception(): async with create_source(DropboxDataSource) as source: setup_dropbox(source) @@ -694,7 +694,7 @@ async def test_set_access_token_with_incorrect_app_key_then_raise_exception(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_set_access_token_with_incorrect_refresh_token_then_raise_exception(): async with create_source(DropboxDataSource) as source: setup_dropbox(source) @@ -748,7 +748,7 @@ async def test_ping(): @pytest.mark.asyncio -@patch("connectors.sources.dropbox.RETRY_INTERVAL", 0) +@patch("connectors_service.sources.dropbox.RETRY_INTERVAL", 0) async def test_ping_when_server_timeout_error_raises(): async with create_source(DropboxDataSource) as source: setup_dropbox(source) @@ -761,7 +761,7 @@ async def test_ping_when_server_timeout_error_raises(): @pytest.mark.asyncio -@patch("connectors.sources.dropbox.RETRY_INTERVAL", 0) +@patch("connectors_service.sources.dropbox.RETRY_INTERVAL", 0) async def test_ping_when_client_response_error_occurs(): async with create_source(DropboxDataSource) as source: setup_dropbox(source) @@ -783,7 +783,7 @@ async def test_ping_when_client_response_error_occurs(): @pytest.mark.asyncio -@patch("connectors.sources.dropbox.RETRY_INTERVAL", 0) +@patch("connectors_service.sources.dropbox.RETRY_INTERVAL", 0) async def test_ping_when_client_response_error_occur_with_unexpected_url(): async with create_source(DropboxDataSource) as source: setup_dropbox(source) @@ -805,7 +805,7 @@ async def test_ping_when_client_response_error_occur_with_unexpected_url(): @pytest.mark.asyncio -@patch("connectors.sources.dropbox.RETRY_INTERVAL", 0) +@patch("connectors_service.sources.dropbox.RETRY_INTERVAL", 0) async def test_api_call_negative(): async with create_source(DropboxDataSource) as source: setup_dropbox(source) @@ -909,12 +909,12 @@ async def test_set_access_token_when_token_expires_at_is_str(): @pytest.fixture def patch_default_wait_multiplier(): - with mock.patch("connectors.sources.dropbox.RETRY_INTERVAL", 0): + with mock.patch("connectors_service.sources.dropbox.RETRY_INTERVAL", 0): yield @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_api_call_when_token_is_expired(): async with create_source(DropboxDataSource) as source: setup_dropbox(source) @@ -980,7 +980,7 @@ async def test_api_call_when_status_429_exception(): @pytest.mark.asyncio -@patch("connectors.sources.dropbox.DEFAULT_RETRY_AFTER", 0) +@patch("connectors_service.sources.dropbox.DEFAULT_RETRY_AFTER", 0) async def test_api_call_when_status_429_exception_without_retry_after_header(): async with create_source(DropboxDataSource) as source: setup_dropbox(source) diff --git a/app/connectors_service/tests/sources/test_generic_database.py b/app/connectors_service/tests/sources/test_generic_database.py index 6103f9bcf..f2ec3708b 100644 --- a/app/connectors_service/tests/sources/test_generic_database.py +++ b/app/connectors_service/tests/sources/test_generic_database.py @@ -9,13 +9,13 @@ import pytest -from connectors.sources.generic_database import ( +from connectors_service.sources.generic_database import ( configured_tables, fetch, is_wildcard, map_column_names, ) -from connectors.sources.mssql import MSSQLQueries +from connectors_service.sources.mssql import MSSQLQueries SCHEMA = "dbo" TABLE = "emp_table" diff --git a/app/connectors_service/tests/sources/test_github.py b/app/connectors_service/tests/sources/test_github.py index ba7b71496..32d54ca91 100644 --- a/app/connectors_service/tests/sources/test_github.py +++ b/app/connectors_service/tests/sources/test_github.py @@ -17,8 +17,8 @@ from connectors_sdk.utils import Features from gidgethub.abc import BadGraphQLRequest, GraphQLAuthorizationFailure, QueryError -from connectors.access_control import DLS_QUERY -from connectors.sources.github import ( +from connectors_service.access_control import DLS_QUERY +from connectors_service.sources.github import ( GITHUB_APP, PERSONAL_ACCESS_TOKEN, REPOSITORY_OBJECT, @@ -888,7 +888,7 @@ async def test_get_user_repos(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_ping_with_unsuccessful_connection(): async with create_github_source() as source: with patch.object( @@ -930,7 +930,7 @@ async def test_validate_config_with_extra_scopes_token(patch_logger): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_validate_config_with_inaccessible_repositories_then_raise(): async with create_github_source( repos="repo1m owner1/repo1, repo2, owner2/repo2" @@ -944,7 +944,7 @@ async def test_validate_config_with_inaccessible_repositories_then_raise(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_invalid_repos_with_max_retries(): async with create_github_source() as source: with pytest.raises(Exception): @@ -953,7 +953,7 @@ async def test_get_invalid_repos_with_max_retries(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_response_with_rate_limit_exceeded(): async with create_github_source() as source: with patch.object( @@ -990,7 +990,7 @@ async def test_get_retry_after(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) @pytest.mark.parametrize( "exceptions, raises", [ @@ -1018,7 +1018,7 @@ async def test_graphql_with_BadGraphQLRequest(exceptions, raises): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) @pytest.mark.parametrize( "exceptions, raises, is_raised", [ @@ -1068,7 +1068,7 @@ async def test_graphql_with_QueryError(exceptions, raises, is_raised): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_graphql_with_unauthorized(): async with create_github_source() as source: source.github_client._get_client.graphql = Mock( @@ -1394,7 +1394,7 @@ async def test_fetch_repos_github_app(repo_type, repos, expected_repos): ) jwt_response = {"token": "changeme"} with patch( - "connectors.sources.github.get_installation_access_token", + "connectors_service.sources.github.get_installation_access_token", return_value=jwt_response, ): actual_repos = [repo async for repo in source._fetch_repos()] @@ -2021,7 +2021,7 @@ async def test_get_access_control_github_app(): actual_response = [] jwt_response = {"token": "changeme"} with patch( - "connectors.sources.github.get_installation_access_token", + "connectors_service.sources.github.get_installation_access_token", return_value=jwt_response, ): async for access_control in source.get_access_control(): @@ -2082,7 +2082,7 @@ async def test_get_personal_access_token_scopes(scopes, expected_scopes): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) @pytest.mark.parametrize( "exception, raises", [ @@ -2127,7 +2127,9 @@ async def test_github_client_get_installations(): source.github_client._get_client._make_request = AsyncMock( return_value=(mock_response, None) ) - with patch("connectors.sources.github.get_jwt", return_value="changeme"): + with patch( + "connectors_service.sources.github.get_jwt", return_value="changeme" + ): expected_installations = [ installation async for installation in source.github_client.get_installations() @@ -2148,7 +2150,9 @@ async def test_github_app_paginated_get(): source.github_client._get_client._make_request = AsyncMock( side_effect=[([item_1, item_2], "fake_url_2"), ([item_3], None)] ) - with patch("connectors.sources.github.get_jwt", return_value="changeme"): + with patch( + "connectors_service.sources.github.get_jwt", return_value="changeme" + ): expected_results = [ item async for item in source.github_client._github_app_paginated_get( @@ -2166,7 +2170,7 @@ async def test_update_installation_id(): jwt_response = {"token": "changeme"} installation_id = 123 with patch( - "connectors.sources.github.get_installation_access_token", + "connectors_service.sources.github.get_installation_access_token", return_value=jwt_response, ) as get_installation_access_token: assert source.github_client._installation_id is None @@ -2262,7 +2266,7 @@ async def test_get_owners(auth_method, repo_type, expected_owners): ) jwt_response = {"token": "changeme"} with patch( - "connectors.sources.github.get_installation_access_token", + "connectors_service.sources.github.get_installation_access_token", return_value=jwt_response, ): actual_owners = [owner async for owner in source._get_owners()] @@ -2270,7 +2274,7 @@ async def test_get_owners(auth_method, repo_type, expected_owners): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_update_installation_access_token_when_error_occurs(): async with create_github_source() as source: source.github_client.get_installation_access_token = AsyncMock( @@ -2281,7 +2285,7 @@ async def test_update_installation_access_token_when_error_occurs(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) @pytest.mark.parametrize( "exceptions, raises", [ diff --git a/app/connectors_service/tests/sources/test_gmail.py b/app/connectors_service/tests/sources/test_gmail.py index 97c955127..c5c9106fe 100644 --- a/app/connectors_service/tests/sources/test_gmail.py +++ b/app/connectors_service/tests/sources/test_gmail.py @@ -15,13 +15,13 @@ from connectors_sdk.utils import Features, iso_utc from freezegun import freeze_time -from connectors.sources.gmail import ( +from connectors_service.sources.gmail import ( ACCESS_CONTROL, GMailAdvancedRulesValidator, GMailDataSource, _message_doc, ) -from connectors.sources.google import MessageFields, UserFields +from connectors_service.sources.google import MessageFields, UserFields from tests.commons import AsyncIterator from tests.sources.support import create_source @@ -157,7 +157,7 @@ class TestGMailDataSource: @pytest_asyncio.fixture async def patch_gmail_client(self): with patch( - "connectors.sources.gmail.GMailClient", return_value=AsyncMock() + "connectors_service.sources.gmail.GMailClient", return_value=AsyncMock() ) as mock: client = mock.return_value yield client @@ -165,7 +165,8 @@ async def patch_gmail_client(self): @pytest_asyncio.fixture async def patch_google_directory_client(self): with patch( - "connectors.sources.gmail.GoogleDirectoryClient", return_value=AsyncMock() + "connectors_service.sources.gmail.GoogleDirectoryClient", + return_value=AsyncMock(), ) as mock: client = mock.return_value yield client diff --git a/app/connectors_service/tests/sources/test_google.py b/app/connectors_service/tests/sources/test_google.py index c986784a0..28b275be4 100644 --- a/app/connectors_service/tests/sources/test_google.py +++ b/app/connectors_service/tests/sources/test_google.py @@ -9,7 +9,7 @@ import pytest_asyncio from connectors_sdk.source import ConfigurableFieldValueError -from connectors.sources.google import ( +from connectors_service.sources.google import ( GMailClient, GoogleDirectoryClient, GoogleServiceAccountClient, @@ -104,14 +104,14 @@ class TestGoogleServiceAccountClient: @pytest_asyncio.fixture(autouse=True) async def patch_service_account_creds(self): with patch( - "connectors.sources.google.ServiceAccountCreds", return_value=Mock() + "connectors_service.sources.google.ServiceAccountCreds", return_value=Mock() ) as class_mock: yield class_mock @pytest_asyncio.fixture async def patch_aiogoogle(self): with patch( - "connectors.sources.google.Aiogoogle", return_value=MagicMock() + "connectors_service.sources.google.Aiogoogle", return_value=MagicMock() ) as mock: aiogoogle_client = AsyncMock() mock.return_value.__aenter__.return_value = aiogoogle_client @@ -199,7 +199,7 @@ class TestGoogleDirectoryClient: @pytest_asyncio.fixture async def patch_google_service_account_client(self): with patch( - "connectors.sources.google.GoogleServiceAccountClient", + "connectors_service.sources.google.GoogleServiceAccountClient", return_value=AsyncMock(), ) as mock: client = mock.return_value @@ -262,7 +262,7 @@ class TestGMailClient: @pytest_asyncio.fixture async def patch_google_service_account_client(self): with patch( - "connectors.sources.google.GoogleServiceAccountClient", + "connectors_service.sources.google.GoogleServiceAccountClient", return_value=AsyncMock(), ) as mock: client = mock.return_value diff --git a/app/connectors_service/tests/sources/test_google_cloud_storage.py b/app/connectors_service/tests/sources/test_google_cloud_storage.py index b8ec4a061..19df3d643 100644 --- a/app/connectors_service/tests/sources/test_google_cloud_storage.py +++ b/app/connectors_service/tests/sources/test_google_cloud_storage.py @@ -16,7 +16,7 @@ from aiogoogle.models import Request, Response from connectors_sdk.source import ConfigurableFieldValueError, DataSourceConfiguration -from connectors.sources.google_cloud_storage import GoogleCloudStorageDataSource +from connectors_service.sources.google_cloud_storage import GoogleCloudStorageDataSource from tests.sources.support import create_source SERVICE_ACCOUNT_CREDENTIALS = '{"project_id": "dummy123"}' @@ -82,7 +82,7 @@ async def test_ping_for_successful_connection(catch_stdout): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_ping_for_failed_connection(catch_stdout): """Tests the ping functionality when connection can not be established to Google Cloud Storage.""" @@ -245,7 +245,7 @@ async def test_fetch_blobs(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_fetch_blobs_negative(): """Tests the method responsible to yield blobs(negative) from Google Cloud Storage bucket.""" @@ -667,7 +667,7 @@ async def test_get_content_when_file_size_is_large(catch_stdout): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_api_call_for_attribute_error(catch_stdout): """Tests the api_call method when resource attribute is not present in the getattr.""" diff --git a/app/connectors_service/tests/sources/test_google_drive.py b/app/connectors_service/tests/sources/test_google_drive.py index 4f93d2bfb..e9694ce65 100644 --- a/app/connectors_service/tests/sources/test_google_drive.py +++ b/app/connectors_service/tests/sources/test_google_drive.py @@ -21,9 +21,9 @@ DataSourceConfiguration, ) -from connectors.access_control import DLS_QUERY -from connectors.sources.google import GoogleServiceAccountClient -from connectors.sources.google_drive import ( +from connectors_service.access_control import DLS_QUERY +from connectors_service.sources.google import GoogleServiceAccountClient +from connectors_service.sources.google_drive import ( RETRIES, GoogleDriveDataSource, SyncCursorEmpty, @@ -134,7 +134,9 @@ async def test_ping_for_successful_connection(): await source.ping() -@patch("connectors.utils.time_to_sleep_between_retries", mock.Mock(return_value=0)) +@patch( + "connectors_service.utils.time_to_sleep_between_retries", mock.Mock(return_value=0) +) @pytest.mark.asyncio async def test_ping_for_failed_connection(): """Tests the ping functionality when connection can not be established to Google Drive.""" @@ -1195,7 +1197,7 @@ async def test_get_content_when_type_not_supported(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", 0) +@patch("connectors_service.utils.time_to_sleep_between_retries", 0) async def test_api_call_for_attribute_error(): """Tests the api_call method when resource attribute is not present in the getattr.""" @@ -1207,7 +1209,7 @@ async def test_api_call_for_attribute_error(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", 0) +@patch("connectors_service.utils.time_to_sleep_between_retries", 0) async def test_api_call_http_error(): """Test handling retries for HTTPError exception in api_call() method.""" async with create_gdrive_source() as source: @@ -1221,7 +1223,7 @@ async def test_api_call_http_error(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", 0) +@patch("connectors_service.utils.time_to_sleep_between_retries", 0) async def test_api_call_other_exception(): """Test handling retries for generic Exception in api_call() method.""" async with create_gdrive_source() as source: @@ -1233,7 +1235,7 @@ async def test_api_call_other_exception(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries") +@patch("connectors_service.utils.time_to_sleep_between_retries") async def test_api_call_ping_retries( mock_time_to_sleep_between_retries, mock_responses ): @@ -1253,7 +1255,7 @@ async def test_api_call_ping_retries( @pytest.mark.asyncio -@mock.patch("connectors.utils.time_to_sleep_between_retries") +@mock.patch("connectors_service.utils.time_to_sleep_between_retries") async def test_api_call_list_drives_retries( mock_time_to_sleep_between_retries, mock_responses ): diff --git a/app/connectors_service/tests/sources/test_graphql.py b/app/connectors_service/tests/sources/test_graphql.py index e5e8b5dda..59b690b85 100644 --- a/app/connectors_service/tests/sources/test_graphql.py +++ b/app/connectors_service/tests/sources/test_graphql.py @@ -15,7 +15,7 @@ from freezegun import freeze_time from graphql import parse -from connectors.sources.graphql import GraphQLDataSource, UnauthorizedException +from connectors_service.sources.graphql import GraphQLDataSource, UnauthorizedException from tests.commons import AsyncIterator from tests.sources.support import create_source @@ -108,7 +108,7 @@ async def test_get(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_errors(): async with create_graphql_source() as source: source.graphql_client.session.get = Mock( @@ -136,7 +136,7 @@ async def test_post(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_post_with_errors(): async with create_graphql_source() as source: source.graphql_client.session.post = Mock( @@ -152,7 +152,7 @@ async def test_post_with_errors(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_make_request_with_unauthorized(): async with create_graphql_source() as source: source.graphql_client.session.post = Mock( @@ -169,7 +169,7 @@ async def test_make_request_with_unauthorized(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_make_request_with_429_exception(): async with create_graphql_source() as source: source.graphql_client.session.post = Mock( @@ -231,7 +231,7 @@ async def test_ping(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_ping_negative(): async with create_graphql_source() as source: source.graphql_client.post = AsyncMock(side_effect=Exception()) diff --git a/app/connectors_service/tests/sources/test_jira.py b/app/connectors_service/tests/sources/test_jira.py index ab81a9526..9a67d7bfa 100644 --- a/app/connectors_service/tests/sources/test_jira.py +++ b/app/connectors_service/tests/sources/test_jira.py @@ -19,8 +19,8 @@ from connectors_sdk.source import ConfigurableFieldValueError from freezegun import freeze_time -from connectors.access_control import DLS_QUERY -from connectors.sources.jira import ( +from connectors_service.access_control import DLS_QUERY +from connectors_service.sources.jira import ( JIRA_CLOUD, JIRA_DATA_CENTER, JIRA_SERVER, @@ -31,7 +31,7 @@ JiraDataSource, NotFound, ) -from connectors.utils import ssl_context +from connectors_service.utils import ssl_context from tests.commons import AsyncIterator from tests.sources.support import create_source @@ -469,7 +469,7 @@ async def test_validate_configuration_for_empty_fields(field, data_source): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_api_call_negative(): """Tests the api_call function while getting an exception.""" @@ -490,7 +490,7 @@ async def test_api_call_negative(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_api_call_when_server_is_down(): """Tests the api_call function while server gets disconnected.""" @@ -507,7 +507,7 @@ async def test_api_call_when_server_is_down(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_api_call_with_empty_response(): """Tests the api_call function when response is empty.""" @@ -523,7 +523,7 @@ async def test_api_call_with_empty_response(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_429_status(): initial_response = ClientResponseError(None, None) initial_response.status = 429 @@ -548,7 +548,7 @@ async def test_get_with_429_status(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_429_status_without_retry_after_header(): initial_response = ClientResponseError(None, None) initial_response.status = 429 @@ -558,7 +558,7 @@ async def test_get_with_429_status_without_retry_after_header(): payload = {"value": "Test rate limit"} retried_response.__aenter__ = AsyncMock(return_value=JSONAsyncMock(payload)) - with patch("connectors.sources.jira.DEFAULT_RETRY_SECONDS", 0): + with patch("connectors_service.sources.jira.DEFAULT_RETRY_SECONDS", 0): async with create_jira_source() as source: with patch( "aiohttp.ClientSession.get", @@ -590,7 +590,7 @@ async def test_get_with_404_status(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_500_status(): error = ClientResponseError(None, None) error.status = 500 diff --git a/app/connectors_service/tests/sources/test_microsoft_teams.py b/app/connectors_service/tests/sources/test_microsoft_teams.py index 0a9b98165..915c62b32 100644 --- a/app/connectors_service/tests/sources/test_microsoft_teams.py +++ b/app/connectors_service/tests/sources/test_microsoft_teams.py @@ -13,7 +13,7 @@ from connectors_sdk.logger import logger from connectors_sdk.source import ConfigurableFieldValueError, DataSourceConfiguration -from connectors.sources.microsoft_teams import ( +from connectors_service.sources.microsoft_teams import ( GraphAPIToken, InternalServerError, MicrosoftTeamsClient, @@ -1083,7 +1083,7 @@ async def json(self): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_call_api_with_429( microsoft_client, mock_responses, @@ -1097,7 +1097,7 @@ async def test_call_api_with_429( payload = {"value": "Test rate limit"} retried_response.__aenter__ = AsyncMock(return_value=JSONAsyncMock(payload)) - with patch("connectors.sources.microsoft_teams.RETRY_SECONDS", 0.3): + with patch("connectors_service.sources.microsoft_teams.RETRY_SECONDS", 0.3): with patch.object( GraphAPIToken, "get_with_username_password", return_value="abc" ): @@ -1111,7 +1111,7 @@ async def test_call_api_with_429( @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_call_api_with_429_with_retry_after( microsoft_client, mock_responses, @@ -1126,7 +1126,7 @@ async def test_call_api_with_429_with_retry_after( payload = {"value": "Test rate limit"} retried_response.__aenter__ = AsyncMock(return_value=JSONAsyncMock(payload)) - with patch("connectors.sources.microsoft_teams.RETRY_SECONDS", 0.3): + with patch("connectors_service.sources.microsoft_teams.RETRY_SECONDS", 0.3): with patch.object( GraphAPIToken, "get_with_username_password", return_value="abc" ): diff --git a/app/connectors_service/tests/sources/test_mongo.py b/app/connectors_service/tests/sources/test_mongo.py index bcf0747ca..62c47ccc9 100644 --- a/app/connectors_service/tests/sources/test_mongo.py +++ b/app/connectors_service/tests/sources/test_mongo.py @@ -17,7 +17,10 @@ from connectors_sdk.source import ConfigurableFieldValueError from pymongo.errors import OperationFailure -from connectors.sources.mongo import MongoAdvancedRulesValidator, MongoDataSource +from connectors_service.sources.mongo import ( + MongoAdvancedRulesValidator, + MongoDataSource, +) from tests.commons import AsyncIterator from tests.sources.support import create_source diff --git a/app/connectors_service/tests/sources/test_mssql.py b/app/connectors_service/tests/sources/test_mssql.py index 5e5543b61..d38ff6e6c 100644 --- a/app/connectors_service/tests/sources/test_mssql.py +++ b/app/connectors_service/tests/sources/test_mssql.py @@ -13,7 +13,7 @@ from sqlalchemy.engine import Engine from sqlalchemy.exc import ProgrammingError -from connectors.sources.mssql import ( +from connectors_service.sources.mssql import ( MSSQLAdvancedRulesValidator, MSSQLDataSource, MSSQLQueries, @@ -51,7 +51,7 @@ async def test_ping(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_ping_negative(): with pytest.raises(Exception): async with create_source(MSSQLDataSource) as source: @@ -60,7 +60,7 @@ async def test_ping_negative(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_fetch_documents_from_table_negative(): async with create_source(MSSQLDataSource) as source: with patch.object( @@ -73,7 +73,7 @@ async def test_fetch_documents_from_table_negative(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_fetch_documents_from_query_negative(): async with create_source(MSSQLDataSource) as source: with patch.object( diff --git a/app/connectors_service/tests/sources/test_mysql.py b/app/connectors_service/tests/sources/test_mysql.py index 4028cace5..2e520c39a 100644 --- a/app/connectors_service/tests/sources/test_mysql.py +++ b/app/connectors_service/tests/sources/test_mysql.py @@ -14,14 +14,14 @@ from connectors_sdk.source import ConfigurableFieldValueError from freezegun import freeze_time -from connectors.sources.mysql import ( +from connectors_service.sources.mysql import ( MySQLAdvancedRulesValidator, MySQLClient, MySqlDataSource, generate_id, row2doc, ) -from connectors.utils import iso_utc +from connectors_service.utils import iso_utc from tests.commons import AsyncIterator from tests.sources.support import create_source @@ -125,13 +125,15 @@ def patch_ping(): @pytest.fixture def patch_row2doc(): - with patch("connectors.sources.mysql.row2doc", return_value=MagicMock()) as row2doc: + with patch( + "connectors_service.sources.mysql.row2doc", return_value=MagicMock() + ) as row2doc: yield row2doc @pytest.fixture def patch_default_wait_multiplier(): - with patch("connectors.sources.mysql.RETRY_INTERVAL", 0): + with patch("connectors_service.sources.mysql.RETRY_INTERVAL", 0): yield diff --git a/app/connectors_service/tests/sources/test_network_drive.py b/app/connectors_service/tests/sources/test_network_drive.py index 81ea907b2..932d67b50 100644 --- a/app/connectors_service/tests/sources/test_network_drive.py +++ b/app/connectors_service/tests/sources/test_network_drive.py @@ -22,8 +22,8 @@ Unsuccessful, ) -from connectors.access_control import ACCESS_CONTROL -from connectors.sources.network_drive import ( +from connectors_service.access_control import ACCESS_CONTROL +from connectors_service.sources.network_drive import ( ClientPermissionException, InvalidLogonHoursException, NASDataSource, @@ -251,7 +251,9 @@ async def test_traverse_diretory_with_invalid_path(dir_mock): @mock.patch("smbclient.scandir") -@mock.patch("connectors.utils.time_to_sleep_between_retries", mock.Mock(return_value=0)) +@mock.patch( + "connectors_service.utils.time_to_sleep_between_retries", mock.Mock(return_value=0) +) @pytest.mark.asyncio async def test_traverse_diretory_retried_on_smb_timeout(dir_mock): """Tests the scandir method of smbclient is retried on SMBConnectionClosed error @@ -1141,7 +1143,9 @@ async def test_validate_drive_path(): @mock.patch("smbclient.scandir") -@mock.patch("connectors.utils.time_to_sleep_between_retries", mock.Mock(return_value=0)) +@mock.patch( + "connectors_service.utils.time_to_sleep_between_retries", mock.Mock(return_value=0) +) @pytest.mark.asyncio async def test_traverse_diretory_smb_timeout_for_sync_rule(dir_mock): with mock.patch.object(SMBSession, "create_connection"): diff --git a/app/connectors_service/tests/sources/test_notion.py b/app/connectors_service/tests/sources/test_notion.py index 6d0add61e..1dff7844e 100644 --- a/app/connectors_service/tests/sources/test_notion.py +++ b/app/connectors_service/tests/sources/test_notion.py @@ -12,7 +12,7 @@ from httpx import Response from notion_client import APIResponseError -from connectors.sources.notion import ( +from connectors_service.sources.notion import ( NotFound, NotionAdvancedRulesValidator, NotionClient, @@ -206,7 +206,7 @@ @pytest.mark.asyncio -@patch("connectors.sources.notion.NotionClient", autospec=True) +@patch("connectors_service.sources.notion.NotionClient", autospec=True) async def test_ping(mock_notion_client): mock_notion_client.return_value.fetch_owner.return_value = None async with create_source( @@ -218,7 +218,7 @@ async def test_ping(mock_notion_client): @pytest.mark.asyncio -@patch("connectors.sources.notion.NotionClient", autospec=True) +@patch("connectors_service.sources.notion.NotionClient", autospec=True) async def test_ping_negative(mock_notion_client): mock_notion_client.return_value.fetch_owner.side_effect = APIResponseError( message="Invalid API key", @@ -265,7 +265,7 @@ async def test_close_with_client(): ), ], ) -@patch("connectors.sources.notion.NotionClient", autospec=True) +@patch("connectors_service.sources.notion.NotionClient", autospec=True) async def test_get_entities( mock_notion_client, entity_type, entity_titles, mock_search_results ): @@ -287,7 +287,7 @@ async def test_get_entities( ("page", ["Missing Page"], "pages"), ], ) -@patch("connectors.sources.notion.NotionClient") +@patch("connectors_service.sources.notion.NotionClient") async def test_get_entities_entity_not_found( mock_notion_client, entity_type, entity_titles, configuration_key ): @@ -498,7 +498,7 @@ async def test_query_database(): assert database["title"][0]["plain_text"] == "This is a test database." -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) @pytest.mark.asyncio async def test_get_via_session_client_response_error(): async with create_source( @@ -519,7 +519,7 @@ async def test_get_via_session_client_response_error(): ) -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) @pytest.mark.asyncio async def test_get_via_session_with_429_status(): retried_response = AsyncMock() @@ -805,7 +805,7 @@ async def mock_function(**kwargs): mock_client_instance.some_function.assert_called_once() -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) @pytest.mark.asyncio async def test_fetch_results_rate_limit_exceeded(): async def mock_function_with_429(**kwargs): @@ -824,7 +824,7 @@ async def mock_function_with_429(**kwargs): async with create_source( NotionDataSource, notion_secret_key="secret_key" ) as source: - with patch("connectors.sources.notion.DEFAULT_RETRY_SECONDS", 0.3): + with patch("connectors_service.sources.notion.DEFAULT_RETRY_SECONDS", 0.3): mock_function_with_429.call_count = 0 result = await source.notion_client.fetch_results(mock_function_with_429) @@ -835,7 +835,7 @@ async def mock_function_with_429(**kwargs): assert mock_function_with_429.call_count == 3 # initial call + 2 retries -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) @pytest.mark.asyncio async def test_fetch_results_other_errors_not_retried(): async def mock_function_with_other_error(**kwargs): @@ -881,7 +881,7 @@ async def test_fetch_child_blocks_for_external_object_instance_page(caplog): block_id = "block_id" caplog.set_level("WARNING") with patch( - "connectors.sources.notion.NotionClient.async_iterate_paginated_api", + "connectors_service.sources.notion.NotionClient.async_iterate_paginated_api", side_effect=APIResponseError( code="validation_error", message="external_object_instance_page is not supported via the API", @@ -946,7 +946,7 @@ async def test_fetch_child_blocks_with_not_found_object(caplog): block_id = "block_id" caplog.set_level("WARNING") with patch( - "connectors.sources.notion.NotionClient.async_iterate_paginated_api", + "connectors_service.sources.notion.NotionClient.async_iterate_paginated_api", side_effect=APIResponseError( code="object_not_found", message="Object Not Found", diff --git a/app/connectors_service/tests/sources/test_onedrive.py b/app/connectors_service/tests/sources/test_onedrive.py index f3d24a225..8a15022fe 100644 --- a/app/connectors_service/tests/sources/test_onedrive.py +++ b/app/connectors_service/tests/sources/test_onedrive.py @@ -14,7 +14,7 @@ from connectors_sdk.filtering.validation import Filter, SyncRuleValidationResult from connectors_sdk.source import ConfigurableFieldValueError, DataSourceConfiguration -from connectors.sources.onedrive import ( +from connectors_service.sources.onedrive import ( AccessToken, InternalServerError, NotFound, @@ -659,7 +659,7 @@ async def test_get_token_raises_correct_exception_when_any_other_status(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_429_status(): initial_response = ClientResponseError(None, None) initial_response.status = 429 @@ -685,7 +685,7 @@ async def test_get_with_429_status(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_429_status_without_retry_after_header(): initial_response = ClientResponseError(None, None) initial_response.status = 429 @@ -695,7 +695,7 @@ async def test_get_with_429_status_without_retry_after_header(): payload = {"value": "Test rate limit"} retried_response.__aenter__ = AsyncMock(return_value=JSONAsyncMock(payload)) - with patch("connectors.sources.onedrive.DEFAULT_RETRY_SECONDS", 0.3): + with patch("connectors_service.sources.onedrive.DEFAULT_RETRY_SECONDS", 0.3): async with create_onedrive_source() as source: with patch.object(AccessToken, "get", return_value="abc"): with patch( @@ -729,7 +729,7 @@ async def test_get_with_404_status(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_with_500_status(): error = ClientResponseError(None, None) error.status = 500 @@ -748,7 +748,7 @@ async def test_get_with_500_status(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_post_with_429_status(): initial_response = ClientPayloadError(None, None) initial_response.status = 429 @@ -774,7 +774,7 @@ async def test_post_with_429_status(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_post_with_429_status_without_retry_after_header(): initial_response = ClientPayloadError(None, None) initial_response.status = 429 @@ -784,7 +784,7 @@ async def test_post_with_429_status_without_retry_after_header(): payload = {"value": "Test rate limit"} retried_response.__aenter__ = AsyncMock(return_value=JSONAsyncMock(payload)) - with patch("connectors.sources.onedrive.DEFAULT_RETRY_SECONDS", 0.3): + with patch("connectors_service.sources.onedrive.DEFAULT_RETRY_SECONDS", 0.3): async with create_onedrive_source() as source: with patch.object(AccessToken, "get", return_value="abc"): with patch( diff --git a/app/connectors_service/tests/sources/test_oracle.py b/app/connectors_service/tests/sources/test_oracle.py index bc411583d..ad867b552 100644 --- a/app/connectors_service/tests/sources/test_oracle.py +++ b/app/connectors_service/tests/sources/test_oracle.py @@ -11,7 +11,11 @@ import pytest from sqlalchemy.engine import Engine -from connectors.sources.oracle import OracleClient, OracleDataSource, OracleQueries +from connectors_service.sources.oracle import ( + OracleClient, + OracleDataSource, + OracleQueries, +) from tests.sources.support import create_source from tests.sources.test_generic_database import ConnectionSync @@ -45,7 +49,7 @@ def oracle_client(**extras): client.close() -@patch("connectors.sources.oracle.create_engine") +@patch("connectors_service.sources.oracle.create_engine") @pytest.mark.parametrize( "connection_source, DSN", [ @@ -65,7 +69,7 @@ def test_engine_in_thin_mode(mock_fun, connection_source, DSN): mock_fun.assert_called_with(DSN) -@patch("connectors.sources.oracle.create_engine") +@patch("connectors_service.sources.oracle.create_engine") @pytest.mark.parametrize( "connection_source, DSN", [ diff --git a/app/connectors_service/tests/sources/test_outlook.py b/app/connectors_service/tests/sources/test_outlook.py index 4e37fb2d6..ccb914196 100644 --- a/app/connectors_service/tests/sources/test_outlook.py +++ b/app/connectors_service/tests/sources/test_outlook.py @@ -13,7 +13,7 @@ from aiohttp import StreamReader from connectors_sdk.source import ConfigurableFieldValueError -from connectors.sources.outlook import ( +from connectors_service.sources.outlook import ( OUTLOOK_CLOUD, OUTLOOK_SERVER, Forbidden, @@ -512,7 +512,7 @@ async def test_validate_config_with_valid_dependency_fields_does_not_raise_error @pytest.mark.asyncio -@patch("connectors.sources.outlook.Connection") +@patch("connectors_service.sources.outlook.Connection") async def test_ping_for_server(mock_connection): mock_connection_instance = mock_connection.return_value mock_connection_instance.search.return_value = ( @@ -528,7 +528,7 @@ async def test_ping_for_server(mock_connection): @pytest.mark.asyncio -@patch("connectors.sources.outlook.Connection") +@patch("connectors_service.sources.outlook.Connection") async def test_ping_for_server_for_failed_connection(mock_connection): mock_connection_instance = mock_connection.return_value mock_connection_instance.search.return_value = ( @@ -570,7 +570,7 @@ async def test_ping_for_cloud(): (MockException(status=404), NotFound), ], ) -@mock.patch("connectors.utils.time_to_sleep_between_retries") +@mock.patch("connectors_service.utils.time_to_sleep_between_retries") async def test_ping_for_cloud_for_failed_connection( mock_time_to_sleep_between_retries, raised_exception, side_effect_exception ): @@ -609,7 +609,7 @@ async def test_get_users_for_cloud(): @pytest.mark.asyncio -@patch("connectors.sources.outlook.Connection") +@patch("connectors_service.sources.outlook.Connection") async def test_fetch_admin_users_negative(mock_connection): async with create_outlook_source() as source: mock_connection_instance = mock_connection.return_value @@ -628,7 +628,7 @@ async def test_fetch_admin_users_negative(mock_connection): @pytest.mark.asyncio -@patch("connectors.sources.outlook.Connection") +@patch("connectors_service.sources.outlook.Connection") async def test_fetch_admin_users(mock_connection): async with create_outlook_source() as source: users = [] @@ -697,7 +697,7 @@ async def test_get_content_with_extraction_service(): (False, {"type": "user", "attributes": {"mail": "account"}}), ], ) -@patch("connectors.sources.outlook.Account", return_value="account") +@patch("connectors_service.sources.outlook.Account", return_value="account") async def test_get_user_accounts_for_cloud(account, is_cloud, user_response): async with create_outlook_source() as source: source.client.is_cloud = is_cloud diff --git a/app/connectors_service/tests/sources/test_postgresql.py b/app/connectors_service/tests/sources/test_postgresql.py index 585266bd4..ddcb49704 100644 --- a/app/connectors_service/tests/sources/test_postgresql.py +++ b/app/connectors_service/tests/sources/test_postgresql.py @@ -16,7 +16,7 @@ from sqlalchemy.ext.asyncio import create_async_engine from sqlalchemy.ext.asyncio.engine import AsyncEngine -from connectors.sources.postgresql import ( +from connectors_service.sources.postgresql import ( PostgreSQLAdvancedRulesValidator, PostgreSQLClient, PostgreSQLDataSource, @@ -193,7 +193,7 @@ async def test_ping(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_ping_negative(): with pytest.raises(Exception): async with create_source(PostgreSQLDataSource, port=5432) as source: diff --git a/app/connectors_service/tests/sources/test_redis.py b/app/connectors_service/tests/sources/test_redis.py index 349bf97be..391d551a4 100644 --- a/app/connectors_service/tests/sources/test_redis.py +++ b/app/connectors_service/tests/sources/test_redis.py @@ -14,7 +14,7 @@ from connectors_sdk.source import ConfigurableFieldValueError from freezegun import freeze_time -from connectors.sources.redis import ( +from connectors_service.sources.redis import ( RedisAdvancedRulesValidator, RedisDataSource, ) diff --git a/app/connectors_service/tests/sources/test_s3.py b/app/connectors_service/tests/sources/test_s3.py index 9eb01f897..e879957ab 100644 --- a/app/connectors_service/tests/sources/test_s3.py +++ b/app/connectors_service/tests/sources/test_s3.py @@ -15,7 +15,7 @@ from connectors_sdk.filtering.validation import Filter, SyncRuleValidationResult from connectors_sdk.source import ConfigurableFieldValueError -from connectors.sources.s3 import S3AdvancedRulesValidator, S3DataSource +from connectors_service.sources.s3 import S3AdvancedRulesValidator, S3DataSource from tests.sources.support import create_source ADVANCED_SNIPPET = "advanced_snippet" diff --git a/app/connectors_service/tests/sources/test_salesforce.py b/app/connectors_service/tests/sources/test_salesforce.py index d60986c25..c993ca19c 100644 --- a/app/connectors_service/tests/sources/test_salesforce.py +++ b/app/connectors_service/tests/sources/test_salesforce.py @@ -16,8 +16,8 @@ from connectors_sdk.filtering.validation import Filter from connectors_sdk.source import ConfigurableFieldValueError, DataSourceConfiguration -from connectors.access_control import DLS_QUERY -from connectors.sources.salesforce import ( +from connectors_service.access_control import DLS_QUERY +from connectors_service.sources.salesforce import ( API_VERSION, RELEVANT_SOBJECT_FIELDS, ConnectorRequestError, @@ -910,7 +910,7 @@ async def test_generate_token_with_unexpected_error_retries( ], ) @mock.patch( - "connectors.sources.salesforce.RELEVANT_SOBJECTS", + "connectors_service.sources.salesforce.RELEVANT_SOBJECTS", ["FooField", "BarField", "ArghField"], ) async def test_get_queryable_sobjects(mock_responses, sobject, expected_result): @@ -939,9 +939,9 @@ async def test_get_queryable_sobjects(mock_responses, sobject, expected_result): @pytest.mark.asyncio -@mock.patch("connectors.sources.salesforce.RELEVANT_SOBJECTS", ["Account"]) +@mock.patch("connectors_service.sources.salesforce.RELEVANT_SOBJECTS", ["Account"]) @mock.patch( - "connectors.sources.salesforce.RELEVANT_SOBJECT_FIELDS", + "connectors_service.sources.salesforce.RELEVANT_SOBJECT_FIELDS", ["FooField", "BarField", "ArghField"], ) async def test_get_queryable_fields(mock_responses): @@ -2184,7 +2184,7 @@ async def test_queryable_sobject_fields_performance_optimization(mock_responses) - Performance improvement: ~85% reduction in API calls """ async with create_salesforce_source(mock_queryables=False) as source: - from connectors.sources.salesforce import RELEVANT_SOBJECTS + from connectors_service.sources.salesforce import RELEVANT_SOBJECTS # Mock responses for all RELEVANT_SOBJECTS mock_fields = [{"name": "Id"}, {"name": "Name"}, {"name": "Description"}] diff --git a/app/connectors_service/tests/sources/test_sandfly.py b/app/connectors_service/tests/sources/test_sandfly.py index 24d62fda1..a5ad2fce4 100644 --- a/app/connectors_service/tests/sources/test_sandfly.py +++ b/app/connectors_service/tests/sources/test_sandfly.py @@ -14,7 +14,7 @@ CURSOR_SYNC_TIMESTAMP, ) -from connectors.sources.sandfly import ( +from connectors_service.sources.sandfly import ( CURSOR_SEQUENCE_ID_KEY, FetchTokenError, ResourceNotFound, @@ -196,7 +196,7 @@ async def test_client_ping_success(sandfly_client, mock_responses): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_client_ping_failure(sandfly_client, mock_responses): request_error = ClientResponseError(None, None) request_error.status = 403 @@ -211,7 +211,7 @@ async def test_client_ping_failure(sandfly_client, mock_responses): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_client_login_failures(sandfly_client, mock_responses): request_error = FetchTokenError(None, None) request_error.status = 403 @@ -243,7 +243,7 @@ async def test_client_login_failures(sandfly_client, mock_responses): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_client_resource_not_found(sandfly_client, mock_responses): mock_responses.post( URL_SANDFLY_LOGIN, @@ -448,7 +448,7 @@ async def test_data_source_ping_success(sandfly_data_source, mock_responses): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_data_source_ping_failure(sandfly_data_source, mock_responses): request_error = ClientResponseError(None, None) request_error.status = 403 diff --git a/app/connectors_service/tests/sources/test_servicenow.py b/app/connectors_service/tests/sources/test_servicenow.py index efa792325..11207101b 100644 --- a/app/connectors_service/tests/sources/test_servicenow.py +++ b/app/connectors_service/tests/sources/test_servicenow.py @@ -14,8 +14,8 @@ from connectors_sdk.filtering.validation import Filter, SyncRuleValidationResult from connectors_sdk.source import ConfigurableFieldValueError -from connectors.access_control import DLS_QUERY -from connectors.sources.servicenow import ( +from connectors_service.access_control import DLS_QUERY +from connectors_service.sources.servicenow import ( InvalidResponse, ServiceNowAdvancedRulesValidator, ServiceNowClient, @@ -164,7 +164,7 @@ async def test_get_data(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_data_with_retry(): async with create_service_now_source() as source: source.servicenow_client._api_call = mock.AsyncMock( @@ -191,7 +191,7 @@ async def test_get_table_length(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_table_length_with_retry(): async with create_service_now_source() as source: source.servicenow_client._api_call = mock.AsyncMock( @@ -203,7 +203,7 @@ async def test_get_table_length_with_retry(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_data_with_empty_response(): async with create_service_now_source() as source: source.servicenow_client._api_call = mock.AsyncMock( @@ -219,7 +219,7 @@ async def test_get_data_with_empty_response(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_get_data_with_text_response(): async with create_service_now_source() as source: source.servicenow_client._api_call = mock.AsyncMock( @@ -312,7 +312,7 @@ async def test_get_docs_with_skipping_table_data(): ) response_list = [] with mock.patch( - "connectors.sources.servicenow.DEFAULT_SERVICE_NAMES", + "connectors_service.sources.servicenow.DEFAULT_SERVICE_NAMES", {"incident": ["sn_incident_read"]}, ): with mock.patch.object( @@ -374,7 +374,7 @@ async def test_get_docs_with_skipping_attachment_data(dls_enabled, expected_resp response_list = [] with mock.patch( - "connectors.sources.servicenow.DEFAULT_SERVICE_NAMES", + "connectors_service.sources.servicenow.DEFAULT_SERVICE_NAMES", {"incident": ["sn_incident_read"]}, ): with mock.patch.object( @@ -894,7 +894,7 @@ async def test_get_docs_with_advanced_rules_pagination(filtering): }, ] - with patch("connectors.sources.servicenow.TABLE_FETCH_SIZE", 2): + with patch("connectors_service.sources.servicenow.TABLE_FETCH_SIZE", 2): async with create_service_now_source() as source: source.servicenow_client._api_call = mock.AsyncMock( return_value=MockResponse( diff --git a/app/connectors_service/tests/sources/test_sharepoint_online.py b/app/connectors_service/tests/sources/test_sharepoint_online.py index bad3482cf..01d92ca31 100644 --- a/app/connectors_service/tests/sources/test_sharepoint_online.py +++ b/app/connectors_service/tests/sources/test_sharepoint_online.py @@ -21,7 +21,7 @@ from connectors_sdk.utils import Features, iso_utc from freezegun import freeze_time -from connectors.sources.sharepoint_online import ( +from connectors_service.sources.sharepoint_online import ( ACCESS_CONTROL, DEFAULT_BACKOFF_MULTIPLIER, DEFAULT_RETRY_SECONDS, @@ -536,7 +536,7 @@ async def test_fetch_token(self, token, mock_responses): certificate_credential_mock.close = AsyncMock() with patch( - "connectors.sources.sharepoint_online.CertificateCredential", + "connectors_service.sources.sharepoint_online.CertificateCredential", return_value=certificate_credential_mock, ): actual_token, actual_expires_at = await token._fetch_token() @@ -565,7 +565,7 @@ def effect(*args, **kwargs): certificate_credential_mock.get_token = AsyncMock(side_effect=effect()) with patch( - "connectors.sources.sharepoint_online.CertificateCredential", + "connectors_service.sources.sharepoint_online.CertificateCredential", return_value=certificate_credential_mock, ): actual_token, actual_expires_at = await token._fetch_token() @@ -2285,7 +2285,7 @@ async def patch_sharepoint_client(self): client = AsyncMock() with patch( - "connectors.sources.sharepoint_online.SharepointOnlineClient", + "connectors_service.sources.sharepoint_online.SharepointOnlineClient", return_value=AsyncMock(), ) as new_mock: client = new_mock.return_value @@ -2392,7 +2392,7 @@ async def test_get_docs_without_access_control(self, patch_sharepoint_client): @pytest.mark.asyncio @patch( - "connectors.sources.sharepoint_online.ACCESS_CONTROL", + "connectors_service.sources.sharepoint_online.ACCESS_CONTROL", ALLOW_ACCESS_CONTROL_PATCHED, ) @freeze_time(iso_utc()) @@ -2881,7 +2881,7 @@ async def test_drive_items_batch_with_permissions_for_delta_delete_operation( @pytest.mark.asyncio @patch( - "connectors.sources.sharepoint_online.ACCESS_CONTROL", + "connectors_service.sources.sharepoint_online.ACCESS_CONTROL", ALLOW_ACCESS_CONTROL_PATCHED, ) async def test_drive_items_permissions_when_fetch_drive_item_permissions_enabled( @@ -2931,7 +2931,7 @@ async def test_drive_items_permissions_when_fetch_drive_item_permissions_enabled @pytest.mark.asyncio @patch( - "connectors.sources.sharepoint_online.ACCESS_CONTROL", + "connectors_service.sources.sharepoint_online.ACCESS_CONTROL", ALLOW_ACCESS_CONTROL_PATCHED, ) async def test_site_page_permissions_when_fetch_drive_item_permissions_enabled( @@ -3405,7 +3405,7 @@ async def test_site_access_control(self, patch_sharepoint_client): ], ) @patch( - "connectors.sources.sharepoint_online.ACCESS_CONTROL", + "connectors_service.sources.sharepoint_online.ACCESS_CONTROL", ALLOW_ACCESS_CONTROL_PATCHED, ) async def test_decorate_with_access_control( @@ -3472,7 +3472,7 @@ async def test_dls_disabled_with_features_missing(self): @pytest.mark.asyncio @patch( - "connectors.sources.sharepoint_online.TIMESTAMP_FORMAT", + "connectors_service.sources.sharepoint_online.TIMESTAMP_FORMAT", TIMESTAMP_FORMAT_PATCHED, ) @pytest.mark.asyncio diff --git a/app/connectors_service/tests/sources/test_sharepoint_server.py b/app/connectors_service/tests/sources/test_sharepoint_server.py index 08e532598..04e867a7c 100644 --- a/app/connectors_service/tests/sources/test_sharepoint_server.py +++ b/app/connectors_service/tests/sources/test_sharepoint_server.py @@ -16,7 +16,7 @@ from connectors_sdk.source import ConfigurableFieldValueError from httpx import ByteStream -from connectors.sources.sharepoint_server import SharepointServerDataSource +from connectors_service.sources.sharepoint_server import SharepointServerDataSource from tests.commons import AsyncIterator from tests.sources.support import create_source @@ -1021,7 +1021,7 @@ async def test_api_call_successfully(): @pytest.fixture def patch_default_wait_multiplier(): - with mock.patch("connectors.sources.sharepoint_server.RETRY_INTERVAL", 0): + with mock.patch("connectors_service.sources.sharepoint_server.RETRY_INTERVAL", 0): yield diff --git a/app/connectors_service/tests/sources/test_slack.py b/app/connectors_service/tests/sources/test_slack.py index 49ff15cbe..6a799046a 100644 --- a/app/connectors_service/tests/sources/test_slack.py +++ b/app/connectors_service/tests/sources/test_slack.py @@ -12,7 +12,7 @@ from aiohttp.client_exceptions import ClientError from connectors_sdk.logger import logger -from connectors.sources.slack import SlackClient, SlackDataSource +from connectors_service.sources.slack import SlackClient, SlackDataSource from tests.commons import AsyncIterator from tests.sources.support import create_source @@ -135,7 +135,7 @@ async def test_slack_client_list_users(slack_client, mock_responses): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_handle_throttled_error(slack_client, mock_responses): channel = {"id": 1, "name": "test"} error_response_data = {"ok": False, "error": "rate_limited"} @@ -172,7 +172,7 @@ async def test_ping(slack_client, mock_responses): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_bad_ping(slack_client, mock_responses): response_data = {"ok": False, "error": "not_authed"} mock_responses.get( diff --git a/app/connectors_service/tests/sources/test_zoom.py b/app/connectors_service/tests/sources/test_zoom.py index 58cfd409a..853ecc500 100644 --- a/app/connectors_service/tests/sources/test_zoom.py +++ b/app/connectors_service/tests/sources/test_zoom.py @@ -15,7 +15,7 @@ from connectors_sdk.source import ConfigurableFieldValueError from freezegun import freeze_time -from connectors.sources.zoom import TokenError, ZoomDataSource +from connectors_service.sources.zoom import TokenError, ZoomDataSource from tests.sources.support import create_source # Access token document @@ -532,7 +532,7 @@ async def test_fetch_for_successful_call(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_fetch_for_unsuccessful_call(): async with create_zoom_source() as source: with mock.patch( @@ -550,7 +550,7 @@ async def test_fetch_for_unsuccessful_call(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_fetch_for_unauthorized_error(): async with create_zoom_source() as source: with mock.patch( @@ -574,7 +574,7 @@ async def test_fetch_for_unauthorized_error(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_fetch_for_notfound_error(): async with create_zoom_source() as source: with mock.patch( @@ -598,7 +598,7 @@ async def test_fetch_for_notfound_error(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_fetch_for_other_client_error(): async with create_zoom_source() as source: with mock.patch( @@ -639,7 +639,7 @@ async def test_content_for_successful_call(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_content_for_unsuccessful_call(): async with create_zoom_source() as source: with mock.patch( @@ -733,7 +733,7 @@ async def test_ping_for_successful_connection(): @pytest.mark.asyncio -@patch("connectors.utils.time_to_sleep_between_retries", Mock(return_value=0)) +@patch("connectors_service.utils.time_to_sleep_between_retries", Mock(return_value=0)) async def test_ping_for_unsuccessful_connection(): async with create_zoom_source() as source: with mock.patch( diff --git a/app/connectors_service/tests/test_access_control.py b/app/connectors_service/tests/test_access_control.py index 6c1f34d7e..bce77312b 100644 --- a/app/connectors_service/tests/test_access_control.py +++ b/app/connectors_service/tests/test_access_control.py @@ -6,7 +6,7 @@ import pytest -from connectors.access_control import ( +from connectors_service.access_control import ( es_access_control_query, prefix_identity, ) diff --git a/app/connectors_service/tests/test_config.py b/app/connectors_service/tests/test_config.py index 3a3b29b9a..b468795ab 100644 --- a/app/connectors_service/tests/test_config.py +++ b/app/connectors_service/tests/test_config.py @@ -9,7 +9,7 @@ import pytest -from connectors.config import _nest_configs, load_config +from connectors_service.config import _nest_configs, load_config HERE = os.path.dirname(__file__) FIXTURES_DIR = os.path.abspath(os.path.join(HERE, "fixtures")) diff --git a/app/connectors_service/tests/test_connectors_cli.py b/app/connectors_service/tests/test_connectors_cli.py index 7f7de65bc..779105841 100644 --- a/app/connectors_service/tests/test_connectors_cli.py +++ b/app/connectors_service/tests/test_connectors_cli.py @@ -12,32 +12,32 @@ from click.testing import CliRunner from elasticsearch import ApiError -from connectors import __version__ # NOQA -from connectors.cli.auth import CONFIG_FILE_PATH -from connectors.connectors_cli import cli, login -from connectors.protocol.connectors import Connector as ConnectorObject -from connectors.protocol.connectors import JobStatus -from connectors.protocol.connectors import SyncJob as SyncJobObject +from connectors_service import __version__ # NOQA +from connectors_service.cli.auth import CONFIG_FILE_PATH +from connectors_service.connectors_cli import cli, login +from connectors_service.protocol.connectors import Connector as ConnectorObject +from connectors_service.protocol.connectors import JobStatus +from connectors_service.protocol.connectors import SyncJob as SyncJobObject from tests.commons import AsyncIterator @pytest.fixture(autouse=True) def mock_cli_config(): - with patch("connectors.connectors_cli.load_config") as mock: + with patch("connectors_service.connectors_cli.load_config") as mock: mock.return_value = {"elasticsearch": {"host": "http://localhost:9211/"}} yield mock @pytest.fixture(autouse=True) def mock_connector_es_client(): - with patch("connectors.cli.connector.CLIClient") as mock: + with patch("connectors_service.cli.connector.CLIClient") as mock: mock.return_value = AsyncMock() yield mock @pytest.fixture(autouse=True) def mock_job_es_client(): - with patch("connectors.cli.job.CLIClient") as mock: + with patch("connectors_service.cli.job.CLIClient") as mock: mock.return_value = AsyncMock() yield mock @@ -59,7 +59,10 @@ def test_help_page(commands): assert "Commands:" in result.output -@patch("connectors.cli.auth.Auth._Auth__ping_es_client", AsyncMock(return_value=False)) +@patch( + "connectors_service.cli.auth.Auth._Auth__ping_es_client", + AsyncMock(return_value=False), +) def test_login_unsuccessful(tmp_path): runner = CliRunner() with runner.isolated_filesystem(temp_dir=tmp_path) as temp_dir: @@ -71,7 +74,10 @@ def test_login_unsuccessful(tmp_path): assert not os.path.isfile(os.path.join(temp_dir, CONFIG_FILE_PATH)) -@patch("connectors.cli.auth.Auth._Auth__ping_es_client", AsyncMock(return_value=True)) +@patch( + "connectors_service.cli.auth.Auth._Auth__ping_es_client", + AsyncMock(return_value=True), +) def test_login_successful(tmp_path): runner = CliRunner() with runner.isolated_filesystem(temp_dir=tmp_path) as temp_dir: @@ -83,7 +89,10 @@ def test_login_successful(tmp_path): assert os.path.isfile(os.path.join(temp_dir, CONFIG_FILE_PATH)) -@patch("connectors.cli.auth.Auth._Auth__ping_es_client", AsyncMock(return_value=True)) +@patch( + "connectors_service.cli.auth.Auth._Auth__ping_es_client", + AsyncMock(return_value=True), +) def test_login_successful_with_apikey_method(tmp_path): runner = CliRunner() api_key = "testapikey" @@ -128,7 +137,10 @@ def test_connector_help_page(): assert "Commands:" in result.output -@patch("connectors.cli.connector.Connector.list_connectors", AsyncMock(return_value=[])) +@patch( + "connectors_service.cli.connector.Connector.list_connectors", + AsyncMock(return_value=[]), +) def test_connector_list_no_connectors(): runner = CliRunner() result = runner.invoke(cli, ["connector", "list"]) @@ -152,7 +164,8 @@ def test_connector_list_one_connector(): connectors = [ConnectorObject(connector_index, doc)] with patch( - "connectors.protocol.ConnectorIndex.all_connectors", AsyncIterator(connectors) + "connectors_service.protocol.ConnectorIndex.all_connectors", + AsyncIterator(connectors), ): result = runner.invoke(cli, ["connector", "list"]) @@ -166,7 +179,7 @@ def test_connector_list_one_connector(): @patch("click.confirm") @patch( - "connectors.cli.index.Index.index_or_connector_exists", + "connectors_service.cli.index.Index.index_or_connector_exists", MagicMock(return_value=[False, False]), ) def test_connector_create(patch_click_confirm): @@ -187,7 +200,7 @@ def test_connector_create(patch_click_confirm): ) with patch( - "connectors.protocol.connectors.ConnectorIndex.index", + "connectors_service.protocol.connectors.ConnectorIndex.index", AsyncMock(return_value={"_id": "new_connector_id"}), ) as patched_create: result = runner.invoke( @@ -213,7 +226,7 @@ def test_connector_create(patch_click_confirm): ) @patch("click.confirm") @patch( - "connectors.cli.index.Index.index_or_connector_exists", + "connectors_service.cli.index.Index.index_or_connector_exists", MagicMock(return_value=[False, False]), ) def test_connector_create_with_native_flags( @@ -236,7 +249,7 @@ def test_connector_create_with_native_flags( ) with patch( - "connectors.protocol.connectors.ConnectorIndex.index", + "connectors_service.protocol.connectors.ConnectorIndex.index", AsyncMock(return_value={"_id": "new_connector_id"}), ) as patched_create: args = ["connector", "create", "--service-type", "mongodb"] @@ -254,11 +267,11 @@ def test_connector_create_with_native_flags( @patch("click.confirm") @patch( - "connectors.cli.index.Index.index_or_connector_exists", + "connectors_service.cli.index.Index.index_or_connector_exists", MagicMock(return_value=[True, False]), ) @patch( - "connectors.cli.connector.Connector._Connector__create_api_key", + "connectors_service.cli.connector.Connector._Connector__create_api_key", AsyncMock(return_value={"id": "new_api_key_id", "encoded": "encoded_api_key"}), ) def test_connector_create_from_index(patch_click_confirm): @@ -279,7 +292,7 @@ def test_connector_create_from_index(patch_click_confirm): ) with patch( - "connectors.protocol.connectors.ConnectorIndex.index", + "connectors_service.protocol.connectors.ConnectorIndex.index", AsyncMock(return_value={"_id": "new_connector_id"}), ) as patched_create: result = runner.invoke( @@ -329,11 +342,11 @@ def test_connector_create_fails_when_index_or_connector_exists( ) with patch( - "connectors.cli.index.Index.index_or_connector_exists", + "connectors_service.cli.index.Index.index_or_connector_exists", MagicMock(return_value=[index_exists, connector_exists]), ): with patch( - "connectors.protocol.connectors.ConnectorIndex.index", + "connectors_service.protocol.connectors.ConnectorIndex.index", AsyncMock(return_value={"_id": "new_connector_id"}), ) as patched_create: args = ["connector", "create", "--service-type", "mongodb"] @@ -349,11 +362,11 @@ def test_connector_create_fails_when_index_or_connector_exists( @patch( - "connectors.cli.connector.Connector._Connector__create_api_key", + "connectors_service.cli.connector.Connector._Connector__create_api_key", AsyncMock(return_value={"id": "new_api_key_id", "encoded": "encoded_api_key"}), ) @patch( - "connectors.cli.index.Index.index_or_connector_exists", + "connectors_service.cli.index.Index.index_or_connector_exists", MagicMock(return_value=[False, False]), ) def test_connector_create_from_file(): @@ -369,7 +382,7 @@ def test_connector_create_from_file(): ) with patch( - "connectors.protocol.connectors.ConnectorIndex.index", + "connectors_service.protocol.connectors.ConnectorIndex.index", AsyncMock(return_value={"_id": "new_connector_id"}), ) as patched_create: with runner.isolated_filesystem(): @@ -407,11 +420,11 @@ def test_connector_create_from_file(): @patch( - "connectors.cli.connector.Connector._Connector__create_api_key", + "connectors_service.cli.connector.Connector._Connector__create_api_key", AsyncMock(return_value={"id": "new_api_key_id", "encoded": "encoded_api_key"}), ) @patch( - "connectors.cli.index.Index.index_or_connector_exists", + "connectors_service.cli.index.Index.index_or_connector_exists", MagicMock(return_value=[False, False]), ) def test_connector_create_and_update_the_service_config(): @@ -434,7 +447,7 @@ def test_connector_create_and_update_the_service_config(): ) with patch( - "connectors.protocol.connectors.ConnectorIndex.index", + "connectors_service.protocol.connectors.ConnectorIndex.index", AsyncMock(return_value={"_id": connector_id}), ) as patched_create: with runner.isolated_filesystem(): @@ -469,7 +482,7 @@ def test_connector_create_and_update_the_service_config(): @patch("click.confirm") @patch( - "connectors.cli.index.Index.index_or_connector_exists", + "connectors_service.cli.index.Index.index_or_connector_exists", MagicMock(return_value=[True, False]), ) def test_connector_create_native_connector(patched_confirm): @@ -490,15 +503,15 @@ def test_connector_create_native_connector(patched_confirm): ) with patch( - "connectors.cli.connector.Connector._Connector__create_api_key", + "connectors_service.cli.connector.Connector._Connector__create_api_key", AsyncMock(return_value={"id": "api-key-123", "encoded": "foo"}), ) as patched_create_api_key: with patch( - "connectors.cli.connector.Connector._Connector__store_api_key", + "connectors_service.cli.connector.Connector._Connector__store_api_key", AsyncMock(return_value="secret-123"), ) as patched_store_api_key: with patch( - "connectors.protocol.connectors.ConnectorIndex.index", + "connectors_service.protocol.connectors.ConnectorIndex.index", AsyncMock(return_value={"_id": "new_connector_id"}), ) as patched_create: result = runner.invoke( @@ -537,7 +550,7 @@ def test_index_help_page(): assert "Commands:" in result.output -@patch("connectors.cli.index.Index.list_indices", MagicMock(return_value=[])) +@patch("connectors_service.cli.index.Index.list_indices", MagicMock(return_value=[])) def test_index_list_no_indexes(): runner = CliRunner() result = runner.invoke(cli, ["index", "list"]) @@ -550,7 +563,7 @@ def test_index_list_one_index(): indices = {"test_index": {"docs_count": 10}} with patch( - "connectors.es.cli_client.CLIClient.list_indices", + "connectors_service.es.cli_client.CLIClient.list_indices", AsyncMock(return_value=indices), ): result = runner.invoke(cli, ["index", "list"]) @@ -564,13 +577,13 @@ def test_index_list_one_index_in_serverless(): indices = {"test_index": {"docs_count": 10}} with patch( - "connectors.es.cli_client.CLIClient.list_indices" + "connectors_service.es.cli_client.CLIClient.list_indices" ) as mocked_list_indices: mocked_list_indices.side_effect = ApiError( "api_not_available_exception", meta="meta", body="error" ) with patch( - "connectors.es.cli_client.CLIClient.list_indices_serverless", + "connectors_service.es.cli_client.CLIClient.list_indices_serverless", AsyncMock(return_value=indices), ): result = runner.invoke(cli, ["index", "list"]) @@ -584,7 +597,7 @@ def test_index_clean(): runner = CliRunner() index_name = "test_index" with patch( - "connectors.es.cli_client.CLIClient.clean_index", + "connectors_service.es.cli_client.CLIClient.clean_index", AsyncMock(return_value=True), ) as mocked_method: result = runner.invoke(cli, ["index", "clean", index_name]) @@ -599,7 +612,7 @@ def test_index_clean_error(): runner = CliRunner() index_name = "test_index" with patch( - "connectors.es.cli_client.CLIClient.clean_index", + "connectors_service.es.cli_client.CLIClient.clean_index", side_effect=ApiError(500, meta="meta", body="error"), ): result = runner.invoke(cli, ["index", "clean", index_name]) @@ -613,7 +626,7 @@ def test_index_delete(): runner = CliRunner() index_name = "test_index" with patch( - "connectors.es.cli_client.CLIClient.delete_indices", + "connectors_service.es.cli_client.CLIClient.delete_indices", AsyncMock(return_value=None), ) as mocked_method: result = runner.invoke(cli, ["index", "delete", index_name]) @@ -628,7 +641,7 @@ def test_delete_index_error(): runner = CliRunner() index_name = "test_index" with patch( - "connectors.es.cli_client.CLIClient.delete_indices", + "connectors_service.es.cli_client.CLIClient.delete_indices", side_effect=ApiError(500, meta="meta", body="error"), ): result = runner.invoke(cli, ["index", "delete", index_name]) @@ -678,7 +691,9 @@ def test_job_cancel(): job = SyncJobObject(job_index, doc) - with patch("connectors.protocol.SyncJobIndex.get_all_docs", AsyncIterator([job])): + with patch( + "connectors_service.protocol.SyncJobIndex.get_all_docs", AsyncIterator([job]) + ): with patch.object(job, "_terminate") as mocked_method: result = runner.invoke(cli, ["job", "cancel", job_id]) @@ -692,7 +707,7 @@ def test_job_cancel_error(): runner = CliRunner() job_id = "test_job_id" with patch( - "connectors.protocol.SyncJobIndex.get_all_docs", + "connectors_service.protocol.SyncJobIndex.get_all_docs", side_effect=ApiError(500, meta="meta", body="error"), ): result = runner.invoke(cli, ["job", "cancel", job_id]) @@ -706,7 +721,8 @@ def test_job_list_no_jobs(): connector_id = "test_connector_id" with patch( - "connectors.cli.job.Job._Job__async_list_jobs", AsyncMock(return_value=[]) + "connectors_service.cli.job.Job._Job__async_list_jobs", + AsyncMock(return_value=[]), ): result = runner.invoke(cli, ["job", "list", connector_id]) @@ -748,7 +764,8 @@ def test_job_list_one_job(): job = SyncJobObject(job_index, doc) with patch( - "connectors.protocol.connectors.SyncJobIndex.get_all_docs", AsyncIterator([job]) + "connectors_service.protocol.connectors.SyncJobIndex.get_all_docs", + AsyncIterator([job]), ): result = runner.invoke(cli, ["job", "list", connector_id]) @@ -763,7 +780,7 @@ def test_job_list_one_job(): @patch( - "connectors.protocol.connectors.ConnectorIndex.fetch_by_id", + "connectors_service.protocol.connectors.ConnectorIndex.fetch_by_id", AsyncMock(return_value=MagicMock()), ) def test_job_start(): @@ -772,7 +789,7 @@ def test_job_start(): job_id = "test_job_id" with patch( - "connectors.protocol.connectors.SyncJobIndex.create", + "connectors_service.protocol.connectors.SyncJobIndex.create", AsyncMock(return_value=job_id), ) as patched_create: result = runner.invoke(cli, ["job", "start", "-i", connector_id, "-t", "full"]) @@ -815,7 +832,7 @@ def test_job_view(): job = SyncJobObject(job_index, doc) with patch( - "connectors.protocol.connectors.SyncJobIndex.fetch_by_id", + "connectors_service.protocol.connectors.SyncJobIndex.fetch_by_id", AsyncMock(return_value=job), ): result = runner.invoke(cli, ["job", "view", job_id]) diff --git a/app/connectors_service/tests/test_kibana.py b/app/connectors_service/tests/test_kibana.py index 236bcd675..b49599104 100644 --- a/app/connectors_service/tests/test_kibana.py +++ b/app/connectors_service/tests/test_kibana.py @@ -8,8 +8,8 @@ import pytest -from connectors.es.management_client import ESManagementClient -from connectors.kibana import main, upsert_index +from connectors_service.es.management_client import ESManagementClient +from connectors_service.kibana import main, upsert_index HERE = os.path.dirname(__file__) FIXTURES_DIR = os.path.abspath(os.path.join(HERE, "fixtures")) diff --git a/app/connectors_service/tests/test_preflight_check.py b/app/connectors_service/tests/test_preflight_check.py index 54c5c5ba1..bdf3e428e 100644 --- a/app/connectors_service/tests/test_preflight_check.py +++ b/app/connectors_service/tests/test_preflight_check.py @@ -8,8 +8,8 @@ import pytest -from connectors.preflight_check import PreflightCheck -from connectors.protocol import CONCRETE_CONNECTORS_INDEX, CONCRETE_JOBS_INDEX +from connectors_service.preflight_check import PreflightCheck +from connectors_service.protocol import CONCRETE_CONNECTORS_INDEX, CONCRETE_JOBS_INDEX connectors_version = "1.2.3.4" headers = {"X-Elastic-Product": "Elasticsearch"} @@ -128,7 +128,7 @@ async def test_pass(mock_responses): @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") async def test_pass_serverless(patched_logger, mock_responses): mock_es_info(mock_responses, serverless=True) mock_index_exists(mock_responses, CONCRETE_CONNECTORS_INDEX) @@ -142,7 +142,7 @@ async def test_pass_serverless(patched_logger, mock_responses): @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") async def test_pass_serverless_mismatched_versions(patched_logger, mock_responses): mock_es_info(mock_responses, es_version="2.0.0-SNAPSHOT", serverless=True) mock_index_exists(mock_responses, CONCRETE_CONNECTORS_INDEX) @@ -156,7 +156,7 @@ async def test_pass_serverless_mismatched_versions(patched_logger, mock_response @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") @pytest.mark.parametrize( "es_version, expected_log", [ @@ -183,7 +183,7 @@ async def test_fail_mismatched_version( @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") @pytest.mark.parametrize( "es_version, expected_log", [ @@ -210,7 +210,7 @@ async def test_warn_mismatched_version( @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") @pytest.mark.parametrize( "es_version, connectors_version, expected_log", [ @@ -287,7 +287,7 @@ async def test_index_exist_transient_error(mock_responses): @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") async def test_native_config_is_warned(patched_logger, mock_responses): mock_es_info(mock_responses) mock_index_exists(mock_responses, CONCRETE_CONNECTORS_INDEX) @@ -310,7 +310,7 @@ async def test_native_config_is_warned(patched_logger, mock_responses): @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") async def test_native_config_is_forced(patched_logger, mock_responses): mock_es_info(mock_responses) mock_index_exists(mock_responses, CONCRETE_CONNECTORS_INDEX) @@ -325,7 +325,7 @@ async def test_native_config_is_forced(patched_logger, mock_responses): @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") async def test_client_config(patched_logger, mock_responses): mock_es_info(mock_responses) mock_index_exists(mock_responses, CONCRETE_CONNECTORS_INDEX) @@ -340,7 +340,7 @@ async def test_client_config(patched_logger, mock_responses): @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") async def test_unmodified_default_config(patched_logger, mock_responses): mock_es_info(mock_responses) mock_index_exists(mock_responses, CONCRETE_CONNECTORS_INDEX) @@ -357,7 +357,7 @@ async def test_unmodified_default_config(patched_logger, mock_responses): @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") async def test_missing_mode_config(patched_logger, mock_responses): mock_es_info(mock_responses) mock_index_exists(mock_responses, CONCRETE_CONNECTORS_INDEX) @@ -371,7 +371,7 @@ async def test_missing_mode_config(patched_logger, mock_responses): @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") async def test_extraction_service_enabled_and_found_writes_info_log( patched_logger, mock_responses ): @@ -395,7 +395,7 @@ async def test_extraction_service_enabled_and_found_writes_info_log( @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") async def test_extraction_service_enabled_but_missing_logs_warning( patched_logger, mock_responses ): @@ -419,7 +419,7 @@ async def test_extraction_service_enabled_but_missing_logs_warning( @pytest.mark.asyncio -@patch("connectors.preflight_check.logger") +@patch("connectors_service.preflight_check.logger") async def test_extraction_service_enabled_but_missing_logs_critical( patched_logger, mock_responses ): diff --git a/app/connectors_service/tests/test_service_cli.py b/app/connectors_service/tests/test_service_cli.py index aeba9091c..38a111c2e 100644 --- a/app/connectors_service/tests/test_service_cli.py +++ b/app/connectors_service/tests/test_service_cli.py @@ -13,8 +13,8 @@ from click import ClickException, UsageError from click.testing import CliRunner -from connectors import __version__ -from connectors.service_cli import _start_service, get_event_loop, main +from connectors_service import __version__ +from connectors_service.service_cli import _start_service, get_event_loop, main SUCCESS_EXIT_CODE = 0 CLICK_EXCEPTION_EXIT_CODE = ClickException.exit_code @@ -58,8 +58,8 @@ def test_version_action(option): @pytest.mark.asyncio -@patch("connectors.service_cli.PreflightCheck") -@patch("connectors.service_cli.get_services") +@patch("connectors_service.service_cli.PreflightCheck") +@patch("connectors_service.service_cli.get_services") async def test_shutdown_signal_registered( patch_get_services, patch_preflight_check, set_env ): @@ -168,9 +168,10 @@ def test_config_cannot_be_used_with_other_actions(set_env): assert "Cannot use the `config` action with other actions" in result.output -@patch("connectors.service_cli.set_logger") +@patch("connectors_service.service_cli.set_logger") @patch( - "connectors.service_cli.load_config", side_effect=Exception("something went wrong") + "connectors_service.service_cli.load_config", + side_effect=Exception("something went wrong"), ) def test_main_with_invalid_configuration(load_config, set_logger): runner = CliRunner() @@ -209,8 +210,8 @@ def test_unknown_service_type(set_env): ) -@patch("connectors.service_cli._get_uvloop") -@patch("connectors.service_cli.asyncio") +@patch("connectors_service.service_cli._get_uvloop") +@patch("connectors_service.service_cli.asyncio") def test_uvloop_success(patched_asyncio, patched_uvloop): get_event_loop(True) assert patched_asyncio.set_event_loop_policy.called_once_with( @@ -218,9 +219,11 @@ def test_uvloop_success(patched_asyncio, patched_uvloop): ) -@patch("connectors.service_cli._get_uvloop", side_effect=Exception("import fails")) -@patch("connectors.service_cli.asyncio") -@patch("connectors.service_cli.logger") +@patch( + "connectors_service.service_cli._get_uvloop", side_effect=Exception("import fails") +) +@patch("connectors_service.service_cli.asyncio") +@patch("connectors_service.service_cli.logger") def test_uvloop_error(patched_logger, patched_asyncio, patched_uvloop): get_event_loop(True) patched_logger.warning.assert_any_call( diff --git a/app/connectors_service/tests/test_sink.py b/app/connectors_service/tests/test_sink.py index 2d9a3492e..4018a89fc 100644 --- a/app/connectors_service/tests/test_sink.py +++ b/app/connectors_service/tests/test_sink.py @@ -14,8 +14,8 @@ import pytest from elasticsearch import ApiError, BadRequestError -from connectors.es.management_client import ESManagementClient -from connectors.es.sink import ( +from connectors_service.es.management_client import ESManagementClient +from connectors_service.es.sink import ( BIN_DOCS_DOWNLOADED, BULK_OPERATIONS, BULK_RESPONSES, @@ -34,13 +34,13 @@ Sink, SyncOrchestrator, ) -from connectors.protocol import JobType, Pipeline -from connectors.protocol.connectors import ( +from connectors_service.protocol import JobType, Pipeline +from connectors_service.protocol.connectors import ( DELETED_DOCUMENT_COUNT, INDEXED_DOCUMENT_COUNT, INDEXED_DOCUMENT_VOLUME, ) -from connectors.utils import ErrorMonitor, TooManyErrors +from connectors_service.utils import ErrorMonitor, TooManyErrors from tests.commons import AsyncIterator INDEX = "some-index" @@ -91,7 +91,7 @@ def failed_action_log_message(doc_id, action, result, error=BULK_ACTION_ERROR): ) -@patch("connectors.es.sink.CANCELATION_TIMEOUT", -1) +@patch("connectors_service.es.sink.CANCELATION_TIMEOUT", -1) @pytest.mark.asyncio async def test_prepare_content_index_raise_error_when_index_creation_failed( mock_responses, @@ -127,7 +127,7 @@ async def test_prepare_content_index_raise_error_when_index_creation_failed( await es.close() -@patch("connectors.es.sink.CANCELATION_TIMEOUT", -1) +@patch("connectors_service.es.sink.CANCELATION_TIMEOUT", -1) @pytest.mark.asyncio async def test_prepare_content_index_create_index( mock_responses, @@ -179,7 +179,7 @@ async def test_prepare_content_index_create_index( create_index_mock.assert_called_with(index_name, language_code) -@patch("connectors.es.sink.CANCELATION_TIMEOUT", -1) +@patch("connectors_service.es.sink.CANCELATION_TIMEOUT", -1) @pytest.mark.asyncio async def test_prepare_content_index(mock_responses): language_code = "en" @@ -300,7 +300,7 @@ def set_responses(mock_responses, ts=None): ) -@patch("connectors.es.sink.CANCELATION_TIMEOUT", -1) +@patch("connectors_service.es.sink.CANCELATION_TIMEOUT", -1) @pytest.mark.asyncio async def test_async_bulk(mock_responses): config = {"host": "http://nowhere.com:9200", "user": "tarek", "password": "blah"} @@ -712,7 +712,7 @@ async def setup_extractor( ], ) @mock.patch( - "connectors.es.management_client.ESManagementClient.yield_existing_documents_metadata" + "connectors_service.es.management_client.ESManagementClient.yield_existing_documents_metadata" ) @pytest.mark.asyncio async def test_get_docs( @@ -734,7 +734,9 @@ async def test_get_docs( [(str(doc["_id"]), doc["_timestamp"]) for doc in existing_docs] ) - with mock.patch("connectors.utils.ConcurrentTasks", return_value=lazy_downloads): + with mock.patch( + "connectors_service.utils.ConcurrentTasks", return_value=lazy_downloads + ): queue = await queue_mock() basic_rule_engine = await basic_rule_engine_mock(doc_should_ingest) @@ -911,7 +913,9 @@ async def test_get_docs_incrementally( ): lazy_downloads = await lazy_downloads_mock() - with mock.patch("connectors.utils.ConcurrentTasks", return_value=lazy_downloads): + with mock.patch( + "connectors_service.utils.ConcurrentTasks", return_value=lazy_downloads + ): queue = await queue_mock() basic_rule_engine = await basic_rule_engine_mock(doc_should_ingest) @@ -1008,7 +1012,7 @@ async def test_get_docs_incrementally( ], ) @mock.patch( - "connectors.es.management_client.ESManagementClient.yield_existing_documents_metadata" + "connectors_service.es.management_client.ESManagementClient.yield_existing_documents_metadata" ) @pytest.mark.asyncio async def test_get_access_control_docs( @@ -1283,7 +1287,7 @@ async def test_batch_bulk_with_errors(patch_logger): patch_logger.assert_present(f"operation index failed for doc 1, {error}") -@patch("connectors.es.sink.CANCELATION_TIMEOUT", -1) +@patch("connectors_service.es.sink.CANCELATION_TIMEOUT", -1) @pytest.mark.parametrize( "extractor_task, extractor_task_done, sink_task, sink_task_done, expected_result", [ @@ -1341,9 +1345,9 @@ async def test_extractor_put_doc(): @pytest.mark.asyncio @mock.patch( - "connectors.es.management_client.ESManagementClient.yield_existing_documents_metadata" + "connectors_service.es.management_client.ESManagementClient.yield_existing_documents_metadata" ) -@mock.patch("connectors.utils.ConcurrentTasks.cancel") +@mock.patch("connectors_service.utils.ConcurrentTasks.cancel") async def test_extractor_get_docs_when_downloads_fail( yield_existing_documents_metadata, concurrent_tasks_cancel ): diff --git a/app/connectors_service/tests/test_sync_job_runner.py b/app/connectors_service/tests/test_sync_job_runner.py index f282294c9..940de0c3e 100644 --- a/app/connectors_service/tests/test_sync_job_runner.py +++ b/app/connectors_service/tests/test_sync_job_runner.py @@ -13,11 +13,11 @@ ConflictError, ) -from connectors.es.client import License -from connectors.es.index import DocumentNotFoundError -from connectors.protocol import JobStatus, JobType, Pipeline -from connectors.protocol.connectors import ProtocolError -from connectors.sync_job_runner import ( +from connectors_service.es.client import License +from connectors_service.es.index import DocumentNotFoundError +from connectors_service.protocol import JobStatus, JobType, Pipeline +from connectors_service.protocol.connectors import ProtocolError +from connectors_service.sync_job_runner import ( SyncJobRunner, SyncJobStartError, ) @@ -122,7 +122,7 @@ def create_runner( @pytest.fixture(autouse=True) def sync_orchestrator_mock(): with patch( - "connectors.sync_job_runner.SyncOrchestrator" + "connectors_service.sync_job_runner.SyncOrchestrator" ) as sync_orchestrator_klass_mock: sync_orchestrator_mock = Mock() sync_orchestrator_mock.prepare_content_index = AsyncMock() @@ -568,7 +568,7 @@ async def test_sync_job_runner_suspend(job_type, sync_cursor, sync_orchestrator_ sync_job_runner.sync_orchestrator.cancel.assert_called_once() -@patch("connectors.sync_job_runner.ES_ID_SIZE_LIMIT", 1) +@patch("connectors_service.sync_job_runner.ES_ID_SIZE_LIMIT", 1) @pytest.mark.asyncio async def test_prepare_docs_when_original_id_and_hashed_id_too_long_then_skip_doc(): _id_too_long = "ab" @@ -583,7 +583,7 @@ async def test_prepare_docs_when_original_id_and_hashed_id_too_long_then_skip_do assert len(docs) == 0 -@patch("connectors.sync_job_runner.ES_ID_SIZE_LIMIT", 10) +@patch("connectors_service.sync_job_runner.ES_ID_SIZE_LIMIT", 10) @pytest.mark.parametrize("_id", ["ab", 1, 1.5]) @pytest.mark.asyncio async def test_prepare_docs_when_original_id_below_limit_then_yield_doc_with_original_id( @@ -599,7 +599,7 @@ async def test_prepare_docs_when_original_id_below_limit_then_yield_doc_with_ori assert docs[0]["_id"] == _id -@patch("connectors.sync_job_runner.ES_ID_SIZE_LIMIT", 3) +@patch("connectors_service.sync_job_runner.ES_ID_SIZE_LIMIT", 3) @pytest.mark.asyncio async def test_prepare_docs_when_original_id_above_limit_and_hashed_id_below_limit_then_yield_doc_with_hashed_id(): _id_too_long = "abcd" @@ -625,8 +625,8 @@ async def test_prepare_docs_when_original_id_above_limit_and_hashed_id_below_lim ], ) @pytest.mark.asyncio -@patch("connectors.sync_job_runner.JOB_REPORTING_INTERVAL", 0) -@patch("connectors.sync_job_runner.JOB_CHECK_INTERVAL", 0) +@patch("connectors_service.sync_job_runner.JOB_REPORTING_INTERVAL", 0) +@patch("connectors_service.sync_job_runner.JOB_CHECK_INTERVAL", 0) async def test_sync_job_runner_reporting_metadata( job_type, sync_cursor, sync_orchestrator_mock ): @@ -663,8 +663,8 @@ async def test_sync_job_runner_reporting_metadata( "job_type", [JobType.FULL, JobType.INCREMENTAL, JobType.ACCESS_CONTROL] ) @pytest.mark.asyncio -@patch("connectors.sync_job_runner.JOB_REPORTING_INTERVAL", 0) -@patch("connectors.sync_job_runner.JOB_CHECK_INTERVAL", 0) +@patch("connectors_service.sync_job_runner.JOB_REPORTING_INTERVAL", 0) +@patch("connectors_service.sync_job_runner.JOB_CHECK_INTERVAL", 0) async def test_sync_job_runner_connector_not_found(job_type, sync_orchestrator_mock): ingestion_stats = { "indexed_document_count": 15, @@ -704,8 +704,8 @@ def _raise_document_not_found_error(): ], ) @pytest.mark.asyncio -@patch("connectors.sync_job_runner.JOB_REPORTING_INTERVAL", 0) -@patch("connectors.sync_job_runner.JOB_CHECK_INTERVAL", 0) +@patch("connectors_service.sync_job_runner.JOB_REPORTING_INTERVAL", 0) +@patch("connectors_service.sync_job_runner.JOB_CHECK_INTERVAL", 0) async def test_sync_job_runner_sync_job_not_found( job_type, sync_cursor, sync_orchestrator_mock ): @@ -739,8 +739,8 @@ async def test_sync_job_runner_sync_job_not_found( ], ) @pytest.mark.asyncio -@patch("connectors.sync_job_runner.JOB_REPORTING_INTERVAL", 0) -@patch("connectors.sync_job_runner.JOB_CHECK_INTERVAL", 0) +@patch("connectors_service.sync_job_runner.JOB_REPORTING_INTERVAL", 0) +@patch("connectors_service.sync_job_runner.JOB_CHECK_INTERVAL", 0) async def test_sync_job_runner_canceled(job_type, sync_cursor, sync_orchestrator_mock): ingestion_stats = { "indexed_document_count": 15, @@ -780,8 +780,8 @@ def _update_job_status(): ], ) @pytest.mark.asyncio -@patch("connectors.sync_job_runner.JOB_REPORTING_INTERVAL", 0) -@patch("connectors.sync_job_runner.JOB_CHECK_INTERVAL", 0) +@patch("connectors_service.sync_job_runner.JOB_REPORTING_INTERVAL", 0) +@patch("connectors_service.sync_job_runner.JOB_CHECK_INTERVAL", 0) async def test_sync_job_runner_not_running( job_type, sync_cursor, sync_orchestrator_mock ): @@ -877,7 +877,7 @@ def __init__(self, config): @patch( - "connectors.sync_job_runner.SyncJobRunner._skip_unchanged_documents_enabled", + "connectors_service.sync_job_runner.SyncJobRunner._skip_unchanged_documents_enabled", Mock(return_value=True), ) @pytest.mark.asyncio @@ -898,7 +898,7 @@ async def test_incremental_sync_with_skip_unchanged_documents_generator(): @patch( - "connectors.sync_job_runner.SyncJobRunner._skip_unchanged_documents_enabled", + "connectors_service.sync_job_runner.SyncJobRunner._skip_unchanged_documents_enabled", Mock(return_value=False), ) @pytest.mark.asyncio diff --git a/app/connectors_service/tests/test_utils.py b/app/connectors_service/tests/test_utils.py index a0b97e85d..9ad6a9024 100644 --- a/app/connectors_service/tests/test_utils.py +++ b/app/connectors_service/tests/test_utils.py @@ -17,7 +17,7 @@ from freezegun import freeze_time from pympler import asizeof -from connectors.utils import ( +from connectors_service.utils import ( ConcurrentTasks, ErrorMonitor, InvalidIndexNameError, @@ -179,7 +179,7 @@ def test_mem_queue_speed(): def mem_queue(): import asyncio - from connectors.utils import MemQueue + from connectors_service.utils import MemQueue queue = MemQueue( maxmemsize=1024 * 1024, refresh_interval=0.1, refresh_timeout=2 @@ -994,7 +994,7 @@ def _init_func(html, parser_type=None, features=None): parser_mock = Mock() return parser_mock - with patch("connectors.utils.BeautifulSoup") as beautiful_soup_patch: + with patch("connectors_service.utils.BeautifulSoup") as beautiful_soup_patch: beautiful_soup_patch.side_effect = _init_func html = "lala
" diff --git a/catalog-info.yaml b/catalog-info.yaml index f47fba985..98c6c7704 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -1,14 +1,14 @@ ################################################################################ -###################### catalog-info for connectors ###################### -# Declare a Backstage Component for connectors +###################### catalog-info for connectors_service ###################### +# Declare a Backstage Component for connectors_service # When doing changes validate them using https://backstage.elastic.dev/entity-validation --- # yaml-language-server: $schema=https://json.schemastore.org/catalog-info.json apiVersion: "backstage.io/v1alpha1" kind: "Component" metadata: - name: "elastic-connectors" - description: "Ingestion service hosting connectors that can be used to ingest data into Elasticsearch" + name: "elastic-connectors-service" + description: "Ingestion service hosting connectors_service that can be used to ingest data into Elasticsearch" annotations: backstage.io/source-location: "url:https://github.com/elastic/connectors/" github.com/project-slug: "elastic/connectors" @@ -30,10 +30,10 @@ spec: apiVersion: "backstage.io/v1alpha1" kind: "Resource" metadata: - name: "connectors" + name: "connectors_service" description: "Lints and tests Elastic connector framework and connector implementations" links: - - title: "connectors Main Pipeline" + - title: "connectors_service Main Pipeline" url: "https://buildkite.com/elastic/connectors" spec: type: "buildkite-pipeline" @@ -43,7 +43,7 @@ spec: apiVersion: "buildkite.elastic.dev/v1" kind: "Pipeline" metadata: - name: "connectors" + name: "connectors_service" description: "Your Connector Service to ingest data into Elasticsearch" spec: branch_configuration: "main" @@ -90,10 +90,10 @@ spec: apiVersion: "backstage.io/v1alpha1" kind: "Resource" metadata: - name: "connectors-nightly" + name: "connectors-service-nightly" description: "Nightly Connector Service Tests" links: - - title: "connectors Nightly Buildkite Jobs" + - title: "connectors-service Nightly Buildkite Jobs" url: "https://buildkite.com/elastic/connectors" spec: type: "buildkite-pipeline" @@ -103,7 +103,7 @@ spec: apiVersion: "buildkite.elastic.dev/v1" kind: "Pipeline" metadata: - name: "connectors-nightly" + name: "connectors-service-nightly" description: "Connectors Service Nightly Tests" links: - title: "Connector Service Nightly Pipeline" @@ -148,10 +148,10 @@ spec: apiVersion: "backstage.io/v1alpha1" kind: "Resource" metadata: - name: "connectors-nightly-aarch64" + name: "connectors-service-nightly-aarch64" description: "Nightly Connector Service Tests on aarch64" links: - - title: "connectors Nightly Buildkite Jobs" + - title: "connectors_service Nightly Buildkite Jobs" url: "https://buildkite.com/elastic/connectors" spec: type: "buildkite-pipeline" @@ -161,7 +161,7 @@ spec: apiVersion: "buildkite.elastic.dev/v1" kind: "Pipeline" metadata: - name: "connectors-nightly-aarch64" + name: "connectors-service-nightly-aarch64" description: "Connectors Service Nightly Tests on aarch64" links: - title: "Connector Service Nightly Pipeline" @@ -209,8 +209,8 @@ spec: apiVersion: "backstage.io/v1alpha1" kind: "Resource" metadata: - name: "connectors-docker-build-publish" - description: "Docker image build and publish for Elastic connectors" + name: "connectors-service-docker-build-publish" + description: "Docker image build and publish for Elastic connectors_service" links: - title: "Connectors Docker Build and Publish" url: "https://buildkite.com/elastic/connectors-docker-build-publish" @@ -222,7 +222,7 @@ spec: apiVersion: "buildkite.elastic.dev/v1" kind: "Pipeline" metadata: - name: "connectors-docker-build-publish" + name: "connectors-service-docker-build-publish" spec: repository: "elastic/connectors" pipeline_file: ".buildkite/release-pipeline.yml" diff --git a/docs/DEVELOPING.md b/docs/DEVELOPING.md index b849f85f4..646c5a241 100644 --- a/docs/DEVELOPING.md +++ b/docs/DEVELOPING.md @@ -68,8 +68,8 @@ A source class can be any Python class, and is declared into the `config.yml` fi ```yaml sources: - mongodb: connectors.sources.mongo:MongoDataSource - s3: connectors.sources.s3:S3DataSource + mongodb: connectors_service.sources.mongo:MongoDataSource + s3: connectors_service.sources.s3:S3DataSource ``` The source class is declared with its [Fully Qualified Name(FQN)](https://en.wikipedia.org/wiki/Fully_qualified_name) so the framework knows where the class is located, so it can import it and instantiate it. diff --git a/docs/DOCKER.md b/docs/DOCKER.md index c50f07b4d..8c22edca2 100644 --- a/docs/DOCKER.md +++ b/docs/DOCKER.md @@ -91,7 +91,7 @@ docker run \ --network "elastic" \ --tty \ --rm \ -docker.elastic.co/integrations/elastic-connectors:-SNAPSHOT \ +docker.elastic.co/integrations/elastic-connectors-service:-SNAPSHOT \ /app/bin/elastic-ingest \ -c /config/config.yml ``` diff --git a/libs/connectors_sdk/connectors_sdk/logger.py b/libs/connectors_sdk/connectors_sdk/logger.py index 6499b5e94..a273923fd 100644 --- a/libs/connectors_sdk/connectors_sdk/logger.py +++ b/libs/connectors_sdk/connectors_sdk/logger.py @@ -151,7 +151,7 @@ def _log(self, level, msg, args, exc_info=None, prefix=None, extra=None): extra = {} extra.update( { - "service.type": "connectors-python", + "service.type": "connectors-service-python", "service.version": __version__, } ) diff --git a/libs/connectors_sdk/tests/test_logger.py b/libs/connectors_sdk/tests/test_logger.py index 349b9d182..db2f307c6 100644 --- a/libs/connectors_sdk/tests/test_logger.py +++ b/libs/connectors_sdk/tests/test_logger.py @@ -47,7 +47,7 @@ def _w(msg): # make sure it's JSON and we have service.type data = json.loads(ecs_log) - assert data["service"]["type"] == "connectors-python" + assert data["service"]["type"] == "connectors-service-python" def test_tracer(): diff --git a/resources/agent/python-elastic-agent-client.spec.yml b/resources/agent/python-elastic-agent-client.spec.yml index 7f60fb975..86171eff3 100644 --- a/resources/agent/python-elastic-agent-client.spec.yml +++ b/resources/agent/python-elastic-agent-client.spec.yml @@ -1,6 +1,6 @@ version: 2 inputs: - - name: connectors-py + - name: connectors-service-py description: "Connectors Py component input" platforms: &platforms - linux/amd64 diff --git a/scripts/stack/README.md b/scripts/stack/README.md index 4fae20d5e..9ac199008 100644 --- a/scripts/stack/README.md +++ b/scripts/stack/README.md @@ -58,7 +58,7 @@ Once the stack is running, you can monitor the logs from the Connectors instance ``` or: ```bash -docker-compose -f ./scripts/stack/docker/docker-compose.yml logs -f elastic-connectors +docker-compose -f ./scripts/stack/docker/docker-compose.yml logs -f elastic-connectors-service ``` ## Prompted Configuration diff --git a/scripts/stack/copy-config.sh b/scripts/stack/copy-config.sh index ea2082fcc..26531ff2f 100755 --- a/scripts/stack/copy-config.sh +++ b/scripts/stack/copy-config.sh @@ -32,7 +32,7 @@ if [[ "$is_example_config" == true ]]; then if [[ "$MACHINE_OS" == "MacOS" || "$MACHINE_OS" == "FreeBSD" ]]; then sed_cmd="sed -i -e" fi - $sed_cmd '/connectors:/s/^#//g' "$script_config" + $sed_cmd '/connectors_service:/s/^#//g' "$script_config" $sed_cmd '/elasticsearch.host/s/^#//g' "$script_config" $sed_cmd '/elasticsearch.username/s/^#//g' "$script_config" $sed_cmd '/elasticsearch.password/s/^#//g' "$script_config" diff --git a/scripts/stack/docker/docker-compose.yml b/scripts/stack/docker/docker-compose.yml index 99d3eca1b..fcf7462d9 100644 --- a/scripts/stack/docker/docker-compose.yml +++ b/scripts/stack/docker/docker-compose.yml @@ -43,12 +43,12 @@ services: XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: c69548d9027afcf4d55146b1d425a9f4c69548d9027afcf4d55146b1d425a9f4 elastic-connectors: - image: docker.elastic.co/integrations/elastic-connectors:${CONNECTORS_VERSION} + image: docker.elastic.co/integrations/elastic-connectors-service:${CONNECTORS_VERSION} depends_on: - elasticsearch - kibana volumes: - - ${CURDIR}/connectors-config:/config + - ${CURDIR}/connectors-service-config:/config command: /app/bin/elastic-ingest -c /config/config.yml network_mode: "elastic" diff --git a/scripts/stack/run-stack.sh b/scripts/stack/run-stack.sh index 0876c136d..515bc53ab 100755 --- a/scripts/stack/run-stack.sh +++ b/scripts/stack/run-stack.sh @@ -31,7 +31,7 @@ fi if [ "${update_images:-}" = true ] then echo "Ensuring we have the latest images..." - docker-compose -f $compose_file pull elasticsearch kibana elastic-connectors + docker-compose -f $compose_file pull elasticsearch kibana elastic-connectors-service fi if [[ "${connectors_only}" != true ]]; then @@ -75,7 +75,7 @@ if [ "${no_connectors:-}" == false ]; then exit 2 fi - docker-compose -f $compose_file up --detach elastic-connectors + docker-compose -f $compose_file up --detach elastic-connectors-service else echo "... Connectors service is set to not start... skipping..." fi