Skip to content

Commit

Permalink
Merge branch 'main' into kostasb/switch-dockerfile-image
Browse files Browse the repository at this point in the history
  • Loading branch information
kostasb authored Jan 20, 2025
2 parents bfd0ef8 + 8376819 commit c243aa4
Show file tree
Hide file tree
Showing 18 changed files with 140 additions and 62 deletions.
3 changes: 1 addition & 2 deletions .backportrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@
{ "name": "main", "checked": true },
"8.x",
"8.17",
"8.16",
"8.15"
"8.16"
],
"fork": false,
"targetPRLabels": ["backport"],
Expand Down
17 changes: 15 additions & 2 deletions .buildkite/publish/dra/init_dra_publishing.sh
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,10 @@ fi
if [[ "${BUILDKITE_BRANCH:-}" =~ ([0-9]\.[0-9x]*$) ]]; then
export PUBLISH_STAGING="true"
fi
if [ -n "${VERSION_QUALIFIER:-}" ]; then
# this is a special case where we will release a pre-release artifact, regardless of branch
export PUBLISH_STAGING="true"
fi

# Sanity check in the logs to list the downloaded artifacts
chmod -R a+rw "${RELEASE_DIR}/dist"
Expand Down Expand Up @@ -149,12 +153,21 @@ fi

# generate the dependency report and publish STAGING artifacts
if [[ "${PUBLISH_STAGING:-}" == "true" ]]; then
dependencyReportName="dependencies-${VERSION}.csv";
if [ -n "${VERSION_QUALIFIER:-}" ]; then
dependencyReportName="dependencies-${VERSION}-${VERSION_QUALIFIER}.csv";
zip_artifact_name="connectors-${VERSION}-${VERSION_QUALIFIER}.zip"
cp $DRA_ARTIFACTS_DIR/$PROJECT_NAME-$VERSION-docker-image-linux-amd64.tar.gz $DRA_ARTIFACTS_DIR/$PROJECT_NAME-$VERSION-$VERSION_QUALIFIER-docker-image-linux-amd64.tar.gz
cp $DRA_ARTIFACTS_DIR/$PROJECT_NAME-$VERSION-docker-image-linux-arm64.tar.gz $DRA_ARTIFACTS_DIR/$PROJECT_NAME-$VERSION-$VERSION_QUALIFIER-docker-image-linux-arm64.tar.gz
else
dependencyReportName="dependencies-${VERSION}.csv";
zip_artifact_name="connectors-${VERSION}.zip"
fi

echo "-------- Generating STAGING dependency report: ${dependencyReportName}"
generateDependencyReport $DEPENDENCIES_REPORTS_DIR/$dependencyReportName

echo "-------- Publishing STAGING DRA Artifacts"
cp $RELEASE_DIR/dist/elasticsearch_connectors-${VERSION}.zip $DRA_ARTIFACTS_DIR/connectors-${VERSION}.zip
cp $RELEASE_DIR/dist/elasticsearch_connectors-${VERSION}.zip $DRA_ARTIFACTS_DIR/${zip_artifact_name}
setDraVaultCredentials
export WORKFLOW="staging"

Expand Down
9 changes: 9 additions & 0 deletions .buildkite/publish/dra/publish-daily-release-artifact.sh
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,14 @@ if [[ "${WORKFLOW:-}" != "staging" && "${WORKFLOW:-}" != "snapshot" ]]; then
exit 2
fi

# snapshot workflows do not use qualifiers
if [[ "${WORKFLOW:-}" == "snapshot" ]]; then
echo "SNAPSHOT workflows ignore version qualifier"
version_qualifier=""
else
version_qualifier="${VERSION_QUALIFIER:-}"
fi

# Version. This is pulled from config/product_version.
if [[ "${VERSION:-}" == "" ]]; then
echo "ERROR: VERSION required!"
Expand Down Expand Up @@ -87,4 +95,5 @@ docker run --rm \
--commit "${REVISION}" \
--workflow "${WORKFLOW}" \
--version "${VERSION}" \
--qualifier "${version_qualifier:-}" \
--artifact-set main
7 changes: 5 additions & 2 deletions .buildkite/run_linter.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@ init_python

if is_pr && ! is_fork; then
echo "We're on PR, running autoformat"

export GH_TOKEN="$VAULT_GITHUB_TOKEN"
source .buildkite/publish/git-setup.sh

if ! make autoformat ; then
echo "make autoformat ran with errors, exiting"
exit 1
Expand All @@ -18,10 +22,9 @@ if is_pr && ! is_fork; then
echo "Nothing to be fixed by autoformat"
exit 0
else
source .buildkite/publish/git-setup.sh

git --no-pager diff
echo "linting errors are fixed, pushing the diff"
export GH_TOKEN="$VAULT_GITHUB_TOKEN"

git add .
git commit -m"make autoformat"
Expand Down
36 changes: 18 additions & 18 deletions .buildkite/run_notice_check.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,28 +7,28 @@ source .buildkite/shared.sh

init_python

make notice
if is_pr && ! is_fork; then
echo 'Running on a PR that is not a fork, will commit changes'

if [ -z "$(git status --porcelain | grep NOTICE.txt)" ]; then
echo 'Nothing changed'
exit 0
else
echo 'New changes to NOTICE.txt:'
git --no-pager diff
if is_pr && ! is_fork; then
echo 'Running on a PR that is not a fork, will commit changes'
source .buildkite/publish/git-setup.sh
export GH_TOKEN="$VAULT_GITHUB_TOKEN"
export GH_TOKEN="$VAULT_GITHUB_TOKEN"
source .buildkite/publish/git-setup.sh
make notice

if [ -z "$(git status --porcelain | grep NOTICE.txt)" ]; then
echo 'Nothing changed'
exit 0
else
echo 'New changes to NOTICE.txt:'
git --no-pager diff

git add NOTICE.txt
git commit -m"Update NOTICE.txt"
git push
sleep 15
else
echo 'Running against a fork or a non-PR change, skipping pushing changes and just failing instead'
fi

exit 1
exit 1
fi
else
echo 'Skipping autofix'
make notice
exit 0
fi

exit 0
12 changes: 6 additions & 6 deletions Dockerfile.agent
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,16 @@ FROM docker.elastic.co/elastic-agent/elastic-agent:9.0.0-SNAPSHOT

USER root

# Install apt-get dependencies
RUN apt-get update && apt-get install -y \
software-properties-common \
# Install basic dependencies
RUN microdnf update && microdnf install -y \
vim \
wget \
git \
make \
&& add-apt-repository ppa:deadsnakes/ppa \
&& apt-get update && apt-get install -y python3.11 python3.11-venv \
&& apt-get clean && rm -rf /var/lib/apt/lists/*
python3.11 \
python3.11-pip \
&& microdnf clean all


# Install Go-based yq separately
RUN wget https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64 -O /usr/bin/yq && \
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile.ftest.wolfi
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM docker.elastic.co/wolfi/python:3.11-dev@sha256:c196966593fe640ccf8ac044d4b766a4e22a78809d03aaceff58cda3494141c5
FROM docker.elastic.co/wolfi/python:3.11-dev@sha256:337643dccf2afac53c59955b9244f51ea315541a1641fb72b35d7e9ae2bca67d
USER root
COPY . /connectors
WORKDIR /connectors
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile.wolfi
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM docker.elastic.co/wolfi/python:3.11-dev@sha256:c196966593fe640ccf8ac044d4b766a4e22a78809d03aaceff58cda3494141c5
FROM docker.elastic.co/wolfi/python:3.11-dev@sha256:337643dccf2afac53c59955b9244f51ea315541a1641fb72b35d7e9ae2bca67d
USER root
COPY . /app
WORKDIR /app
Expand Down
8 changes: 7 additions & 1 deletion connectors/agent/connector_record_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,9 @@ async def ensure_connector_records_exist(self, agent_config, connector_name=None
random_connector_name_id = generate_random_id(length=4)
connector_name = f"[Elastic-managed] {service_type} connector {random_connector_name_id}"

if not await self.connector_index.connector_exists(connector_id):
if not await self.connector_index.connector_exists(
connector_id, include_deleted=True
):
try:
await self.connector_index.connector_put(
connector_id=connector_id,
Expand All @@ -64,6 +66,10 @@ async def ensure_connector_records_exist(self, agent_config, connector_name=None
f"Failed to create connector record for {connector_id}: {e}"
)
raise e
else:
logger.debug(
f"Skipping connector creation. Connector record for {connector_id} already exists."
)

def _check_agent_config_ready(self, agent_config):
"""
Expand Down
13 changes: 13 additions & 0 deletions connectors/es/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,14 @@ class TemporaryConnectorApiWrapper(ESClient):
def __init__(self, elastic_config):
super().__init__(elastic_config)

async def connector_get(self, connector_id, include_deleted):
return await self.client.perform_request(
"GET",
f"/_connector/{connector_id}",
headers={"accept": "application/json"},
params={"include_deleted": include_deleted},
)

async def connector_check_in(self, connector_id):
return await self.client.perform_request(
"PUT",
Expand Down Expand Up @@ -98,6 +106,11 @@ async def connector_check_in(self, connector_id):
partial(self._api_wrapper.connector_check_in, connector_id)
)

async def connector_get(self, connector_id, include_deleted=False):
return await self._retrier.execute_with_retry(
partial(self._api_wrapper.connector_get, connector_id, include_deleted)
)

async def connector_put(
self, connector_id, service_type, connector_name, index_name, is_native
):
Expand Down
16 changes: 11 additions & 5 deletions connectors/protocol/connectors.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,15 @@
from datetime import datetime, timezone
from enum import Enum

from elasticsearch import ApiError
from elasticsearch import (
ApiError,
)
from elasticsearch import (
NotFoundError as ElasticNotFoundError,
)

from connectors.es import ESDocument, ESIndex
from connectors.es.client import with_concurrency_control
from connectors.es.index import DocumentNotFoundError
from connectors.filtering.validation import (
FilteringValidationState,
InvalidFilteringError,
Expand Down Expand Up @@ -179,11 +183,13 @@ async def connector_put(
is_native=is_native,
)

async def connector_exists(self, connector_id):
async def connector_exists(self, connector_id, include_deleted=False):
try:
doc = await self.fetch_by_id(connector_id)
doc = await self.api.connector_get(
connector_id=connector_id, include_deleted=include_deleted
)
return doc is not None
except DocumentNotFoundError:
except ElasticNotFoundError:
return False
except Exception as e:
logger.error(
Expand Down
12 changes: 11 additions & 1 deletion connectors/source.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,17 @@ def _convert(self, value, field_type_):
# list requires special type casting
if cast_type is list:
if isinstance(value, str):
return [item.strip() for item in value.split(",")] if value else []
items = []
if value:
for item in value.split(","):
item = item.strip()
if not item:
logger.debug(
"Empty string detected in the comma-separated list. It will be skipped."
)
else:
items.append(item)
return items
elif isinstance(value, int):
return [value]
elif isinstance(value, set):
Expand Down
22 changes: 11 additions & 11 deletions docs/CLI.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,23 +16,23 @@ Connectors CLI helps with Elastic Connectors managing connectors and running syn
## Installation
1. Clone the repository `git clone https://github.com/elastic/connectors.git`
2. Run `make clean install` to install dependencies and create executable files.
3. Connectors CLI is available via `./bin/connectors`
3. Connectors CLI is available via `.venv/bin/connectors`

## Configuration
**Note:** Make sure your Elasticsearch instance is up and running.

1. Run `./bin/connectors login` to authenticate the CLI with an Elasticsearch instance.
1. Run `.venv/bin/connectors login` to authenticate the CLI with an Elasticsearch instance.
2. Provide credentials
3. The command will create or ask to rewrite an existing configuration file in `./cli/config.yml`

By default, the CLI uses basic authentication method (username, password) however an API key can be used too.
Run `./bin/connectors login --method apikey` to authenticate the CLI via your API key.
Run `.venv/bin/connectors login --method apikey` to authenticate the CLI via your API key.

When you run any command you can specify a configuration file using `-c` argument.
Example:

```bash
./bin/connectors -c <config-file-path.yml> connector list
.venv/bin/connectors -c <config-file-path.yml> connector list
```

## Available commands
Expand All @@ -41,7 +41,7 @@ Connectors CLI provides a `--help`/`-h` argument that can be used with any comma

For example:
```bash
./bin/connectors --help
.venv/bin/connectors --help


Usage: connectors [OPTIONS] COMMAND [ARGS]...
Expand Down Expand Up @@ -76,7 +76,7 @@ To bypass interactive mode you can use the `--from-file` argument, pointing to a
Examples:

```console
./bin/connectors connector create \
.venv/bin/connectors connector create \
--index-name my-index \
--service-type sharepoint_online \
--index-language en \
Expand All @@ -95,7 +95,7 @@ Lists all the existing connectors
Examples:

```console
./bin/connectors connector list
.venv/bin/connectors connector list
```

This will display all existing connectors and the associated indices.
Expand All @@ -105,7 +105,7 @@ Lists all jobs and their stats.

Examples
```console
./bin/connectors job list -- <connector_id>
.venv/bin/connectors job list -- <connector_id>
```

This will display all sync jobs including information like job status, number of indexed documents and index data volume associated with `connector_id`.
Expand All @@ -116,7 +116,7 @@ Marks the job as `cancelling` to let Connector services know that the job has to
Examples:

```console
./bin/connectors job cancel -- <job_id>
.venv/bin/connectors job cancel -- <job_id>
```

#### `connectors job start`
Expand All @@ -125,7 +125,7 @@ Schedules a new sync job and lets Connector service pick it up.
Examples:

```console
./bin/connectors job start -- \
.venv/bin/connectors job start -- \
-i <connector_id> \
-t <job_type{full,incremental,access_control}> \
-o <format{text,json}>
Expand All @@ -139,7 +139,7 @@ Shows information about a sync job.
Examples:

```console
./bin/connectors job view -- <job_id> -o <format{text,json}
.venv/bin/connectors job view -- <job_id> -o <format{text,json}
```

This will display information about the job including job id, connector id, indexed document counts and index data value.
2 changes: 1 addition & 1 deletion docs/DEVELOPING.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ $ make clean install
The `elastic-ingest` CLI will be installed on your system:

```shell
$ bin/elastic-ingest --help
$ .venv/bin/elastic-ingest --help
usage: elastic-ingest [-h] [--action {poll,list}] [-c CONFIG_FILE] [--log-level {DEBUG,INFO,WARNING,ERROR,CRITICAL} | --debug] [--filebeat] [--version] [--uvloop]

options:
Expand Down
12 changes: 11 additions & 1 deletion docs/RELEASING.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ Take care of the branching (minor releases only):

- Increment the VERSION on main to match the next minor release
- Create a new maintenance branch
- Make sure the `.backportrc.json` is updated. The previous minor is added to `targetBranchChoices` and the new minor is used in `branchLabelMapping`
- Make sure `.backportrc.json` is updated: the previous minor is added to `targetBranchChoices` and the new minor is used in `branchLabelMapping`
- Make sure `renovate.json` is updated: the previous minor is added to `labels` (for example, `v8.18`). [Create that label](https://github.com/elastic/connectors/labels) if it doesn't exist yet


## Unified release, (>= 8.16)
Expand All @@ -26,6 +27,15 @@ On the day of the release, `#mission-control` will notify the release manager th

The Unified Release build will take care of producing git tags and official artifacts from our most recent DRA artifacts.

### Pre-release artifacts

If `#mission-control` asks for a pre-release artifact to be built, trigger the build pipeline from the relevant branch
and add an Environment Variable for `VERSION_QUALIFIER` with the value of the pre-release.

For example, to release 9.0.0-BC1, you would set `VERSION_QUALIFIER=BC1` for this build.

Note that the qualified artifacts will only show up in DRA "staging" but not "snapshot" reports.

### In-Between releases

Sometimes, we need to release Connectors independently of the Elastic unified-release.
Expand Down
Loading

0 comments on commit c243aa4

Please sign in to comment.