Skip to content

Commit

Permalink
[9.0] [Automatic Import] Remove pipeline tests from the package (elas…
Browse files Browse the repository at this point in the history
…tic#211223) (elastic#211385)

# Backport

This will backport the following commits from `main` to `9.0`:
- [[Automatic Import] Remove pipeline tests from the package
(elastic#211223)](elastic#211223)

<!--- Backport version: 9.4.3 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sqren/backport)

<!--BACKPORT [{"author":{"name":"Bharat
Pasupula","email":"123897612+bhapas@users.noreply.github.com"},"sourceCommit":{"committedDate":"2025-02-17T09:28:57Z","message":"[Automatic
Import] Remove pipeline tests from the package (elastic#211223)\n\n##
Summary\r\n\r\nThis PR removes unused pipeline tests from the packaging
of integration.\r\n\r\nThe pipeline tests are not run today when the
integration is built.\r\nHence removing them for
now.","sha":"54b6e65a2047cdcc727032e1734170e7c8307aff","branchLabelMapping":{"^v9.1.0$":"main","^v8.19.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","backport:prev-minor","backport:prev-major","ci:build-cloud-image","ci:cloud-deploy","Team:Security-Scalability","Feature:AutomaticImport","v9.1.0"],"title":"[Automatic
Import] Remove pipeline tests from the
package","number":211223,"url":"https://github.com/elastic/kibana/pull/211223","mergeCommit":{"message":"[Automatic
Import] Remove pipeline tests from the package (elastic#211223)\n\n##
Summary\r\n\r\nThis PR removes unused pipeline tests from the packaging
of integration.\r\n\r\nThe pipeline tests are not run today when the
integration is built.\r\nHence removing them for
now.","sha":"54b6e65a2047cdcc727032e1734170e7c8307aff"}},"sourceBranch":"main","suggestedTargetBranches":[],"targetPullRequestStates":[{"branch":"main","label":"v9.1.0","branchLabelMappingKey":"^v9.1.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/211223","number":211223,"mergeCommit":{"message":"[Automatic
Import] Remove pipeline tests from the package (elastic#211223)\n\n##
Summary\r\n\r\nThis PR removes unused pipeline tests from the packaging
of integration.\r\n\r\nThe pipeline tests are not run today when the
integration is built.\r\nHence removing them for
now.","sha":"54b6e65a2047cdcc727032e1734170e7c8307aff"}}]}] BACKPORT-->

Co-authored-by: Bharat Pasupula <123897612+bhapas@users.noreply.github.com>
  • Loading branch information
kibanamachine and bhapas authored Feb 17, 2025
1 parent b2618cf commit d638532
Show file tree
Hide file tree
Showing 20 changed files with 103 additions and 191 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ export enum GenerationErrorCode {
UNSUPPORTED_LOG_SAMPLES_FORMAT = 'unsupported-log-samples-format',
UNPARSEABLE_CSV_DATA = 'unparseable-csv-data',
CEF_ERROR = 'cef-not-supported',
BUILD_INTEGRATION_ERROR = 'build-integration-error',
}

// Size limits
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,12 @@ export const GENERATION_ERROR_TRANSLATION: Record<
defaultMessage:
'CEF format detected. Please decode the CEF logs into JSON format using filebeat decode_cef processor.',
}),
[GenerationErrorCode.BUILD_INTEGRATION_ERROR]: i18n.translate(
'xpack.automaticImport.errors.buildIntegrationError',
{
defaultMessage: 'An error occurred while building the integration package. Please try again.',
}
),
[GenerationErrorCode.UNPARSEABLE_CSV_DATA]: (attributes) => {
if (
attributes.underlyingMessages !== undefined &&
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ describe('EcsGraph', () => {
throw Error(`getEcsGraph threw an error: ${error}`);
}

expect(response.results).toStrictEqual(ecsMappingExpectedResults);
expect(response.results).toEqual(ecsMappingExpectedResults);

// Check if the functions were called
expect(handleEcsMapping).toHaveBeenCalled();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import { createDataStream } from './data_stream';
import { createFieldMapping } from './fields';
import { createPipeline } from './pipeline';
import { createReadme } from './readme_files';
import { BuildIntegrationError } from '../lib/errors/build_integration_error';

const initialVersion = '1.0.0';

Expand All @@ -37,46 +38,56 @@ export async function buildPackage(integration: Integration): Promise<Buffer> {
configureNunjucks();

if (!isValidName(integration.name)) {
throw new Error(
throw new BuildIntegrationError(
`Invalid integration name: ${integration.name}, Should only contain letters, numbers and underscores`
);
}

const workingDir = joinPath(getDataPath(), `automatic-import-${generateUniqueId()}`);
const packageDirectoryName = `${integration.name}-${initialVersion}`;
const packageDir = createDirectories(workingDir, integration, packageDirectoryName);

const dataStreamsDir = joinPath(packageDir, 'data_stream');
const fieldsPerDatastream = integration.dataStreams.map((dataStream) => {
const dataStreamName = dataStream.name;
if (!isValidDatastreamName(dataStreamName)) {
throw new Error(
`Invalid datastream name: ${dataStreamName}, Name must be at least 2 characters long and can only contain lowercase letters, numbers, and underscores`
);
}
const specificDataStreamDir = joinPath(dataStreamsDir, dataStreamName);
try {
const packageDirectoryName = `${integration.name}-${initialVersion}`;
const packageDir = createDirectories(workingDir, integration, packageDirectoryName);

const dataStreamFields = createDataStream(integration.name, specificDataStreamDir, dataStream);
createAgentInput(specificDataStreamDir, dataStream.inputTypes, dataStream.celInput);
createPipeline(specificDataStreamDir, dataStream.pipeline);
const fields = createFieldMapping(
integration.name,
dataStreamName,
specificDataStreamDir,
dataStream.docs
);
const dataStreamsDir = joinPath(packageDir, 'data_stream');
const fieldsPerDatastream = integration.dataStreams.map((dataStream) => {
const dataStreamName = dataStream.name;
if (!isValidDatastreamName(dataStreamName)) {
throw new Error(
`Invalid datastream name: ${dataStreamName}, Name must be at least 2 characters long and can only contain lowercase letters, numbers, and underscores`
);
}
const specificDataStreamDir = joinPath(dataStreamsDir, dataStreamName);

return {
datastream: dataStreamName,
fields: mergeAndSortFields(fields, dataStreamFields),
};
});
const dataStreamFields = createDataStream(
integration.name,
specificDataStreamDir,
dataStream
);
createAgentInput(specificDataStreamDir, dataStream.inputTypes, dataStream.celInput);
createPipeline(specificDataStreamDir, dataStream.pipeline);
const fields = createFieldMapping(
integration.name,
dataStreamName,
specificDataStreamDir,
dataStream.docs
);

createReadme(packageDir, integration.name, integration.dataStreams, fieldsPerDatastream);
const zipBuffer = await createZipArchive(integration, workingDir, packageDirectoryName);
return {
datastream: dataStreamName,
fields: mergeAndSortFields(fields, dataStreamFields),
};
});

removeDirSync(workingDir);
return zipBuffer;
createReadme(packageDir, integration.name, integration.dataStreams, fieldsPerDatastream);
const zipBuffer = await createZipArchive(integration, workingDir, packageDirectoryName);

removeDirSync(workingDir);
return zipBuffer;
} catch (error) {
throw new BuildIntegrationError('Building the Integration failed');
} finally {
removeDirSync(workingDir);
}
}

export function isValidName(input: string): boolean {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,17 +89,6 @@ describe('createDataStream', () => {
// dataStream files
expect(copySync).toHaveBeenCalledWith(expect.any(String), `${dataStreamPath}/fields`);

// test files
expect(ensureDirSync).toHaveBeenCalledWith(`${dataStreamPath}/_dev/test/pipeline`);
expect(copySync).toHaveBeenCalledWith(
expect.any(String),
`${dataStreamPath}/_dev/test/pipeline/test-common-config.yml`
);
expect(createSync).toHaveBeenCalledWith(
`${dataStreamPath}/_dev/test/pipeline/test-${packageName}-datastream-1.log`,
samples
);

// // Manifest files
expect(createSync).toHaveBeenCalledWith(`${dataStreamPath}/manifest.yml`, undefined);
expect(render).toHaveBeenCalledWith(`filestream_manifest.yml.njk`, expect.anything());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@ export function createDataStream(

ensureDirSync(specificDataStreamDir);
const fields = createDataStreamFolders(specificDataStreamDir, pipelineDir);
createPipelineTests(specificDataStreamDir, dataStream.rawSamples, packageName, dataStreamName);

const dataStreams: string[] = [];
for (const inputType of dataStream.inputTypes) {
let mappedValues = {
Expand Down Expand Up @@ -89,30 +87,6 @@ function loadFieldsFromFiles(sourcePath: string, files: string[]): Field[] {
});
}

function createPipelineTests(
specificDataStreamDir: string,
rawSamples: string[],
packageName: string,
dataStreamName: string
): void {
const pipelineTestTemplatesDir = joinPath(__dirname, '../templates/pipeline_tests');
const pipelineTestsDir = joinPath(specificDataStreamDir, '_dev/test/pipeline');
ensureDirSync(pipelineTestsDir);
const items = listDirSync(pipelineTestTemplatesDir);
for (const item of items) {
const s = joinPath(pipelineTestTemplatesDir, item);
const d = joinPath(pipelineTestsDir, item.replaceAll('_', '-'));
copySync(s, d);
}
const formattedPackageName = packageName.replace(/_/g, '-');
const formattedDataStreamName = dataStreamName.replace(/_/g, '-');
const testFileName = joinPath(
pipelineTestsDir,
`test-${formattedPackageName}-${formattedDataStreamName}.log`
);
createSync(testFileName, rawSamples.join('\n'));
}

function prepareCelValues(mappedValues: object, celInput: CelInput | undefined) {
if (celInput != null) {
// Ready the program for printing with correct indentation
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { KibanaResponseFactory } from '@kbn/core/server';
import { ErrorThatHandlesItsOwnResponse } from './types';
import { GenerationErrorCode } from '../../../common/constants';

export class BuildIntegrationError extends Error implements ErrorThatHandlesItsOwnResponse {
private readonly errorCode: GenerationErrorCode = GenerationErrorCode.BUILD_INTEGRATION_ERROR;

public sendResponse(res: KibanaResponseFactory) {
return res.badRequest({
body: { message: this.message, attributes: { errorCode: this.errorCode } },
});
}
}

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

Loading

0 comments on commit d638532

Please sign in to comment.