Skip to content

Commit

Permalink
Merge pull request opendatahub-io#1778 from christianvogt/no-scheme
Browse files Browse the repository at this point in the history
Fix error creating pipeline server when data connection endpoint omits scheme
  • Loading branch information
openshift-merge-robot authored Sep 15, 2023
2 parents c3b2b43 + 27d87fe commit b4d723e
Show file tree
Hide file tree
Showing 3 changed files with 148 additions and 9 deletions.
2 changes: 1 addition & 1 deletion frontend/jest.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ module.exports = {
testEnvironment: 'jest-environment-jsdom',

// include projects from node_modules as required
transformIgnorePatterns: ['node_modules/(?!yaml)'],
transformIgnorePatterns: ['node_modules/(?!yaml|@openshift|lodash-es|uuid)'],

// A list of paths to snapshot serializer modules Jest should use for snapshot testing
snapshotSerializers: [],
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
import { AWS_KEYS } from '~/pages/projects/dataConnections/const';
import { PipelineServerConfigType } from '~/concepts/pipelines/content/configurePipelinesServer/types';
import { createDSPipelineResourceSpec } from '~/concepts/pipelines/content/configurePipelinesServer/utils';

describe('configure pipeline server utils', () => {
describe('createDSPipelineResourceSpec', () => {
const createPipelineServerConfig = () =>
({
database: {
useDefault: true,
value: [],
},
objectStorage: {
useExisting: true,
existingName: '',
existingValue: [],
},
} as PipelineServerConfigType);

type SecretsResponse = Parameters<typeof createDSPipelineResourceSpec>[1];

const createSecretsResponse = (
databaseSecret?: SecretsResponse[0],
objectStorageSecret?: SecretsResponse[1],
): SecretsResponse => [databaseSecret, objectStorageSecret ?? { secretName: '', awsData: [] }];

it('should create resource spec', () => {
const spec = createDSPipelineResourceSpec(
createPipelineServerConfig(),
createSecretsResponse(),
);
expect(spec).toEqual({
database: undefined,
objectStorage: {
externalStorage: {
bucket: '',
host: '',
s3CredentialsSecret: {
accessKey: 'AWS_ACCESS_KEY_ID',
secretKey: 'AWS_SECRET_ACCESS_KEY',
secretName: '',
},
scheme: 'https',
},
},
});
});

it('should parse S3 endpoint with scheme', () => {
const secretsResponse = createSecretsResponse();
secretsResponse[1].awsData = [
{ key: AWS_KEYS.S3_ENDPOINT, value: 'http://s3.amazonaws.com' },
];
const spec = createDSPipelineResourceSpec(createPipelineServerConfig(), secretsResponse);
expect(spec.objectStorage.externalStorage?.scheme).toBe('http');
expect(spec.objectStorage.externalStorage?.host).toBe('s3.amazonaws.com');
});

it('should parse S3 endpoint without scheme', () => {
const secretsResponse = createSecretsResponse();

secretsResponse[1].awsData = [{ key: AWS_KEYS.S3_ENDPOINT, value: 's3.amazonaws.com' }];
const spec = createDSPipelineResourceSpec(createPipelineServerConfig(), secretsResponse);
expect(spec.objectStorage.externalStorage?.scheme).toBe('https');
expect(spec.objectStorage.externalStorage?.host).toBe('s3.amazonaws.com');
});

it('should include bucket', () => {
const secretsResponse = createSecretsResponse();
secretsResponse[1].awsData = [{ key: AWS_KEYS.AWS_S3_BUCKET, value: 'my-bucket' }];
const spec = createDSPipelineResourceSpec(createPipelineServerConfig(), secretsResponse);
expect(spec.objectStorage.externalStorage?.bucket).toBe('my-bucket');
});

it('should create spec with database object', () => {
const config = createPipelineServerConfig();
config.database.value = [
{
key: 'Username',
value: 'test-user',
},
{
key: 'Port',
value: '8080',
},
{
key: 'Host',
value: 'test.host.com',
},
{
key: 'Database',
value: 'db-name',
},
];
const spec = createDSPipelineResourceSpec(
config,
createSecretsResponse({
key: 'password-key',
name: 'password-name',
}),
);
expect(spec).toEqual({
objectStorage: {
externalStorage: {
bucket: '',
host: '',
s3CredentialsSecret: {
accessKey: 'AWS_ACCESS_KEY_ID',
secretKey: 'AWS_SECRET_ACCESS_KEY',
secretName: '',
},
scheme: 'https',
},
},
database: {
externalDB: {
host: 'test.host.com',
passwordSecret: {
key: 'password-key',
name: 'password-name',
},
pipelineDBName: 'db-name',
port: '8080',
username: 'test-user',
},
},
});
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -116,22 +116,22 @@ const createSecrets = (config: PipelineServerConfigType, projectName: string) =>
.catch(reject);
});

export const configureDSPipelineResourceSpec = (
export const createDSPipelineResourceSpec = (
config: PipelineServerConfigType,
projectName: string,
): Promise<DSPipelineKind['spec']> =>
createSecrets(config, projectName).then(([databaseSecret, objectStorageSecret]) => {
[databaseSecret, objectStorageSecret]: SecretsResponse,
): DSPipelineKind['spec'] => {
{
const awsRecord = dataEntryToRecord(objectStorageSecret.awsData);
const databaseRecord = dataEntryToRecord(config.database.value);

const [, externalStorageScheme, externalStorageHost] =
awsRecord.AWS_S3_ENDPOINT?.match(/^(\w+):\/\/(.*)/) ?? [];
awsRecord.AWS_S3_ENDPOINT?.match(/^(?:(\w+):\/\/)?(.*)/) ?? [];

return {
objectStorage: {
externalStorage: {
host: externalStorageHost.replace(/\/$/, ''),
scheme: externalStorageScheme,
host: externalStorageHost?.replace(/\/$/, '') || '',
scheme: externalStorageScheme || 'https',
bucket: awsRecord.AWS_S3_BUCKET || '',
s3CredentialsSecret: {
accessKey: AWS_KEYS.ACCESS_KEY_ID,
Expand All @@ -155,4 +155,13 @@ export const configureDSPipelineResourceSpec = (
}
: undefined,
};
});
}
};

export const configureDSPipelineResourceSpec = (
config: PipelineServerConfigType,
projectName: string,
): Promise<DSPipelineKind['spec']> =>
createSecrets(config, projectName).then((secretsResponse) =>
createDSPipelineResourceSpec(config, secretsResponse),
);

0 comments on commit b4d723e

Please sign in to comment.