Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Generating import file for postman #770

Draft
wants to merge 14 commits into
base: main
Choose a base branch
from
1 change: 1 addition & 0 deletions .cspell
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ aoss
APIV
cbor
evals
formdata
lucene
millis
mxyz
Expand Down
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
- Added `GET /_plugins/_ml/connectors/{connector_id}`, `_search`, `POST /_plugins/_ml/connectors/_search`, and `PUT /_plugins/_ml/connectors/{connector_id}` ([#764](https://github.com/opensearch-project/opensearch-api-specification/pull/764))
- Added the ability to skip an individual chapter test ([#765](https://github.com/opensearch-project/opensearch-api-specification/pull/765))
- Added uploading of test spec logs ([#767](https://github.com/opensearch-project/opensearch-api-specification/pull/767))
- Added generation of file for Postman ([#770](https://github.com/opensearch-project/opensearch-api-specification/pull/770))
- Added `aggs` property as an alias to `aggregations` in requestBody of `search` [#774](https://github.com/opensearch-project/opensearch-api-specification/issues/774)
- Added `POST /_plugins/_ml/memory`, `POST /_plugins/_ml/memory/_search`, `{memory_id}/_search`, `{memory_id}/messages`, `PUT /_plugins/_ml/memory/{memory_id}`, `message/{message_id}`, `GET /_plugins/_ml/memory`, `GET /_plugins/_ml/memory/{memory_id}`, `_search`, `message/{message_id}`, `{memory_id}/messages`, `{memory_id}/_search`, `message/{message_id}/traces`, and `DELETE /_plugins/_ml/memory/{memory_id}` ([#771](https://github.com/opensearch-project/opensearch-api-specification/pull/771))
- Added support for evaluating response payloads in prologues and epilogues ([#772](https://github.com/opensearch-project/opensearch-api-specification/pull/772))
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
"coverage:spec": "ts-node tools/src/coverage/coverage.ts",
"dump-cluster-spec": "ts-node tools/src/dump-cluster-spec/dump-cluster-spec.ts",
"generate-types": "ts-node tools/src/tester/_generate_story_types.ts",
"export:postman": "ts-node tools/src/exporter/export.ts",
"lint:spec": "ts-node tools/src/linter/lint.ts",
"lint": "eslint . --report-unused-disable-directives",
"lint--fix": "eslint . --fix --report-unused-disable-directives",
Expand Down
7 changes: 7 additions & 0 deletions tools/src/OpenSearchHttpClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,13 @@ export class OpenSearchHttpClient {
}
}

get_url(): string | undefined {
if (this._opts != null && this._opts.url != null && this._opts.url !== '') {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think lodash isEmpty can this this cleaner, maybe just _.isEmpty(this._opts?.url) ? DEFAULT_URL : this._opts?.url.

return this._opts.url;
}
return DEFAULT_URL;
}

async request<T = any, R = AxiosResponse<T>, D = any>(config: AxiosRequestConfig<D>): Promise<R> {
return await this._axios.request(config)
}
Expand Down
128 changes: 128 additions & 0 deletions tools/src/exporter/ExportChapters.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

import fs from 'fs'
import { read_yaml, to_ndjson } from '../helpers'
import { basename, resolve } from 'path'
import _ from 'lodash'
import { StoryFile } from 'tester/types/eval.types'
import { Logger } from 'Logger'
import StoryParser from './StoryParser'
import { PostmanManager } from './PostmanManager'
import { APPLICATION_JSON } from './MimeTypes'
import { Parameter } from 'tester/types/story.types'

export default class ExportChapters {
private readonly _story_files: Record<string, StoryFile[]> = {}
private readonly _logger: Logger
private readonly _postman_manager: PostmanManager

constructor (logger: Logger, postman_manager: PostmanManager) {
this._logger = logger
this._postman_manager = postman_manager
}

run (story_path: string): void {
const story_files = this.story_files(story_path)

for (const story_file of story_files) {
for (const chapter of story_file.story.chapters) {
const [headers, content_type] = this.#serialize_headers(chapter.request?.headers, chapter.request?.content_type)
let params = {};
if (chapter.parameters !== undefined) {
params = this.#parse_url(chapter.path, chapter.parameters)
}
const request_data = chapter.request?.payload !== undefined ? this.#serialize_payload(
chapter.request.payload,
content_type
) : {}
this._postman_manager.add_to_collection('url', chapter.method, chapter.path, headers, params, request_data, content_type, story_file.full_path);
}
this._logger.info(`Evaluating ${story_file.display_path} ...`)
}
this._postman_manager.save_collection()
}

story_files(story_path: string): StoryFile[] {
if (this._story_files[story_path] !== undefined) return this._story_files[story_path]
this._story_files[story_path] = this.#sort_story_files(this.#collect_story_files(resolve(story_path), '', ''))
return this._story_files[story_path]
}

#collect_story_files (folder: string, file: string, prefix: string): StoryFile[] {
const path = file === '' ? folder : `${folder}/${file}`
const next_prefix = prefix === '' ? file : `${prefix}/${file}`
if (file.startsWith('.') || file == 'docker-compose.yml' || file == 'Dockerfile' || file.endsWith('.py')) {
return []
} else if (fs.statSync(path).isFile()) {
const story = StoryParser.parse(read_yaml(path))
return [{
display_path: next_prefix === '' ? basename(path) : next_prefix,
full_path: path,
story
}]
} else {
return _.compact(fs.readdirSync(path).flatMap(next_file => {
return this.#collect_story_files(path, next_file, next_prefix)
}))
}
}

#sort_story_files (story_files: StoryFile[]): StoryFile[] {
return story_files.sort(({ display_path: a }, { display_path: b }) => {
const a_depth = a.split('/').length
const b_depth = b.split('/').length
if (a_depth !== b_depth) return a_depth - b_depth
return a.localeCompare(b)
})
}

#serialize_headers(headers?: Record<string, any>, content_type?: string): [Record<string, any> | undefined, string] {
headers = _.cloneDeep(headers)
content_type = content_type ?? APPLICATION_JSON
if (!headers) return [headers, content_type]
_.forEach(headers, (v, k) => {
if (k.toLowerCase() == 'content-type') {
content_type = v.toString()
if (headers) delete headers[k]
}
})
return [headers, content_type]
}

#serialize_payload(payload: any, content_type: string): any {
if (payload === undefined) return undefined
switch (content_type) {
case 'application/x-ndjson': return to_ndjson(payload as any[])
default: return payload
}
}

resolve_params (parameters: Record<string, Parameter>): Record<string, Parameter> {
const resolved_params: Record<string, Parameter> = {}
for (const [param_name, param_value] of Object.entries(parameters ?? {})) {
if (typeof param_value === 'string') {
resolved_params[param_name] = param_value
} else {
resolved_params[param_name] = param_value
}
}
return resolved_params
}

#parse_url (path: string, parameters: Record<string, Parameter>): Record<string, Parameter> {
const path_params = new Set<string>()
path.replace(/{(\w+)}/g, (_, key) => {
path_params.add(key as string)
return parameters[key] as string
})
const query_params = Object.fromEntries(Object.entries(parameters).filter(([key]) => !path_params.has(key)))
return query_params
}
}
14 changes: 14 additions & 0 deletions tools/src/exporter/MimeTypes.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

export const APPLICATION_CBOR = 'application/cbor'
export const APPLICATION_JSON = 'application/json'
export const APPLICATION_SMILE = 'application/smile'
export const APPLICATION_YAML = 'application/yaml'
export const TEXT_PLAIN = 'text/plain'
84 changes: 84 additions & 0 deletions tools/src/exporter/PostmanManager.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

import fs from 'fs';

export class PostmanManager {
private readonly collection: any;
private readonly collection_path: string;

constructor(collection_path: string = './postman_collection.json') {
this.collection_path = collection_path;
this.collection = {
info: {
name: "OpenSearch tests",
schema: "https://schema.getpostman.com/json/collection/v2.1.0/collection.json",
},
item: [],
};
}

add_to_collection(
url: string | undefined,
method: string,
path: string,
headers: Record<string, any> | undefined,
params: Record<string, any>,
body: any,
content_type: string,
full_path?: string
): void {
const folders: string[] = [];
console.log(full_path)
if (full_path != null && full_path) {
const path_parts = full_path.split('/').filter(Boolean);

const start_index = path_parts.indexOf('tests');

if (start_index !== -1) {
folders.push(...path_parts.slice(start_index + 1));
}
}

let current_folder = this.collection.item;
folders.forEach(folder => {
let existing_folder = current_folder.find((item: any) => item.name === folder);

if (existing_folder == null) {
existing_folder = { name: folder, item: [] };
current_folder.push(existing_folder);
}
current_folder = existing_folder.item;
});

const item = {
name: path,
request: {
method,
header: Object.entries(headers ?? {}).map(([key, value]) => ({ key, value })),
url: {
raw: `${url}${path}`,
host: url,
path: path.split('/').filter(Boolean),
query: Object.entries(params).map(([key, value]) => ({ key, value: String(value) })),
},
body: body != null ? { mode: content_type === 'application/json' ? 'raw' : 'formdata', raw: JSON.stringify(body) } : undefined,
},
};

const exists = current_folder.some((existing_item: any) => existing_item.name === item.name);
if (exists != null) {
current_folder.push(item);
}
}

save_collection(): void {
fs.writeFileSync(this.collection_path, JSON.stringify(this.collection, null, 2));
}
}
41 changes: 41 additions & 0 deletions tools/src/exporter/StoryParser.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

import _ from "lodash";
import { ParsedChapter, ParsedStory } from "tester/types/parsed_story.types";
import { Chapter, Story } from "tester/types/story.types";

export default class StoryParser {
static parse(story: Story): ParsedStory {
return {
...story,
chapters: this.#expand_chapters(story.chapters),
}
}

static #chapter_methods(methods: string[] | string): string[] {
return [...(Array.isArray(methods) ? methods : [methods])]
}

static #expand_chapters(chapters?: Chapter[]): ParsedChapter[] {
if (chapters === undefined) return []
return _.flatMap(_.map(chapters, (chapter) => {
return _.map(this.#chapter_methods(chapter.method), (method) => {
let synopsis = chapter.synopsis && Array.isArray(chapter.method) ?
`${chapter.synopsis} [${method}]` :
chapter.synopsis
return {
...chapter,
synopsis,
method
}
})
})) as ParsedChapter[]
}
}
27 changes: 27 additions & 0 deletions tools/src/exporter/export.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

import { Logger, LogLevel } from '../Logger'
import { Command, Option } from '@commander-js/extra-typings'
import ExportChapters from './ExportChapters'
import { PostmanManager } from './PostmanManager'

const command = new Command()
.description('Run test stories against the OpenSearch spec.')
.addOption(new Option('--tests, --tests-path <path>', 'path to the root folder of the tests').default('./tests/default'))
.allowExcessArguments(false)
.parse()


const opts = command.opts()
const logger = new Logger(LogLevel.warn)
const postman_manager = new PostmanManager()
const runner = new ExportChapters(logger, postman_manager)

runner.run(opts.testsPath)
47 changes: 47 additions & 0 deletions tools/tests/exporter/GeneratePostman.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

import fs from 'fs'
import tmp from 'tmp'
import { PostmanManager } from 'exporter/PostmanManager'
import ExportChapters from 'exporter/ExportChapters'
import { Logger, LogLevel } from 'Logger'

describe('OpenApiMerger', () => {
var logger: Logger;
var postman_manager: PostmanManager;
var runner: ExportChapters;

describe('defaults', () => {

describe('write_to()', () => {
var temp: tmp.DirResult
var filename: string

beforeEach(() => {
temp = tmp.dirSync()
filename = `${temp.name}/postman_collection.json`
logger = new Logger(LogLevel.warn)
postman_manager = new PostmanManager(filename);
runner = new ExportChapters(logger, postman_manager)
})

afterEach(() => {
fs.unlinkSync(filename)
temp.removeCallback()
})

test('writes a spec', () => {
runner.run('./tools/tests/exporter/fixtures')
expect(fs.readFileSync('./tools/tests/exporter/fixtures/postman_collection.json', 'utf8'))
.toEqual(fs.readFileSync(filename, 'utf8'))
})
})
})
})
Loading
Loading