Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove coercion of Console GET requests to POST #39170

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 3 additions & 44 deletions src/legacy/core_plugins/console/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,32 +21,11 @@ import Boom from 'boom';
import { first } from 'rxjs/operators';
import { resolve, join, sep } from 'path';
import url from 'url';
import { has, isEmpty, head, pick } from 'lodash';
import { has, head } from 'lodash';

import { resolveApi } from './api_server/server';
import { addExtensionSpecFilePath } from './api_server/spec';
import { setHeaders } from './server/set_headers';

import {
ProxyConfigCollection,
getElasticsearchProxyConfig,
createProxyRoute
} from './server';

function filterHeaders(originalHeaders, headersToKeep) {
const normalizeHeader = function (header) {
if (!header) {
return '';
}
header = header.toString();
return header.trim().toLowerCase();
};

// Normalize list of headers we want to allow in upstream request
const headersToKeepNormalized = headersToKeep.map(normalizeHeader);

return pick(originalHeaders, headersToKeepNormalized);
}
import { ProxyConfigCollection, registerProxyRoute } from './server';

export default function (kibana) {
const modules = resolve(__dirname, 'public/webpackShims/');
Expand Down Expand Up @@ -111,7 +90,6 @@ export default function (kibana) {
throw new Error('sense.ssl.verify is no longer supported.');
}

const config = server.config();
const legacyEsConfig = await server.newPlatform.setup.core.elasticsearch.legacy.config$.pipe(first()).toPromise();
const proxyConfigCollection = new ProxyConfigCollection(options.proxyConfig);
const proxyPathFilters = options.proxyFilter.map(str => new RegExp(str));
Expand All @@ -122,26 +100,7 @@ export default function (kibana) {
),
};

server.route(createProxyRoute({
baseUrl: head(legacyEsConfig.hosts),
pathFilters: proxyPathFilters,
getConfigForReq(req, uri) {
const filteredHeaders = filterHeaders(req.headers, legacyEsConfig.requestHeadersWhitelist);
const headers = setHeaders(filteredHeaders, legacyEsConfig.customHeaders);

if (!isEmpty(config.get('console.proxyConfig'))) {
return {
...proxyConfigCollection.configForUri(uri),
headers,
};
}

return {
...getElasticsearchProxyConfig(legacyEsConfig),
headers,
};
}
}));
registerProxyRoute(server, legacyEsConfig, proxyConfigCollection, proxyPathFilters);

server.route({
path: '/api/console/api_server',
Expand Down
5 changes: 0 additions & 5 deletions src/legacy/core_plugins/console/public/src/es.js
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,6 @@ export function getContentType(body) {
export function send(method, path, data) {
const wrappedDfd = $.Deferred(); // eslint-disable-line new-cap

const isGetRequest = /^get$/i.test(method);
if (data && isGetRequest) {
method = 'POST';
}

const options = {
url: '../api/console/proxy?' + formatQueryString({ path, method }),
data,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,16 @@

import _ from 'lodash';
import { readFileSync } from 'fs';
import http from 'http';
import https from 'https';
import url from 'url';

const readFile = (file) => readFileSync(file, 'utf8');

const createAgent = (legacyConfig) => {
const createAgentOptions = (legacyConfig) => {
const target = url.parse(_.head(legacyConfig.hosts));
if (!/^https/.test(target.protocol)) return new http.Agent();

if (!/^https/.test(target.protocol)) {
return {};
}

const agentOptions = {};

Expand Down Expand Up @@ -65,12 +66,12 @@ const createAgent = (legacyConfig) => {
agentOptions.passphrase = legacyConfig.ssl.keyPassphrase;
}

return new https.Agent(agentOptions);
return agentOptions;
};

export const getElasticsearchProxyConfig = (legacyConfig) => {
return {
timeout: legacyConfig.requestTimeout.asMilliseconds(),
agent: createAgent(legacyConfig)
agent: createAgentOptions(legacyConfig)
};
};
2 changes: 1 addition & 1 deletion src/legacy/core_plugins/console/server/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,4 @@

export { ProxyConfigCollection } from './proxy_config_collection';
export { getElasticsearchProxyConfig } from './elasticsearch_proxy_config';
export { createProxyRoute } from './proxy_route';
export { createProxyRoute, registerProxyRoute } from './proxy_route';
136 changes: 96 additions & 40 deletions src/legacy/core_plugins/console/server/proxy_route.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,27 @@

import Joi from 'joi';
import Boom from 'boom';
import Wreck from '@hapi/wreck';
import { trimLeft, trimRight } from 'lodash';
import { trimLeft, trimRight, head, pick, isEmpty } from 'lodash';
import request from 'request';
import { parse } from 'querystring';

import { getElasticsearchProxyConfig } from './elasticsearch_proxy_config';
import { setHeaders } from './set_headers';

function filterHeaders(originalHeaders, headersToKeep) {
const normalizeHeader = function (header) {
if (!header) {
return '';
}
header = header.toString();
return header.trim().toLowerCase();
};

// Normalize list of headers we want to allow in upstream request
const headersToKeepNormalized = headersToKeep.map(normalizeHeader);

return pick(originalHeaders, headersToKeepNormalized);
}

function resolveUri(base, path) {
let pathToUse = `${trimRight(base, '/')}/${trimLeft(path, '/')}`;
Expand Down Expand Up @@ -68,10 +87,6 @@ export const createProxyRoute = ({
method: 'POST',
config: {
tags: ['access:console'],
payload: {
output: 'stream',
parse: false,
},

validate: {
query: Joi.object()
Expand Down Expand Up @@ -104,43 +119,84 @@ export const createProxyRoute = ({
const { payload, query } = req;
const { path, method } = query;
const uri = resolveUri(baseUrl, path);

// Because this can technically be provided by a settings-defined proxy config, we need to
// preserve these property names to maintain BWC.
const { timeout, rejectUnauthorized, agent, headers } = getConfigForReq(req, uri);
const makeRequest = async payloadToSend => {
const wreckOptions = {
payload: payloadToSend,
timeout,
rejectUnauthorized,
agent,
headers: {
...headers,
...getProxyHeaders(req),
},
console.log('rejectUnauthorized', rejectUnauthorized)
console.log('agent', agent)

return await new Promise((resolve) => {
const pathParts = path.split('?'); // TODO: Ensure this only has 2 parts
const requestHeaders = {
...headers,
...getProxyHeaders(req),
};

const esResponse = await Wreck.request(method, uri, wreckOptions);

if (method.toUpperCase() !== 'HEAD') {
return h
.response(esResponse)
.code(esResponse.statusCode)
.header('warning', esResponse.headers.warning);
}

return h
.response(`${esResponse.statusCode} - ${esResponse.statusMessage}`)
.code(esResponse.statusCode)
.type('text/plain')
.header('warning', esResponse.headers.warning);
};
// Wreck assumes that DELETE requests will not have a body, and thus it does not
// parse the payload to pass it along, so we have to do this manually here.
if (method.toUpperCase() === 'DELETE') {
const data = await Wreck.read(payload);
return await makeRequest(data);
} else {
return await makeRequest(payload);
}
// We use the request library because Hapi, Axios, and Superagent don't support GET requests
// with bodies, but ES APIs do. Similarly with DELETE requests with bodiese. If we need to
// deprecate use of this library, we can also solve this issue with Node's http library.
// See #39170 for details.
request({
method,
uri,
timeout,
qs: parse(pathParts[1] || ''),
headers: requestHeaders,
body: payload ? JSON.stringify(payload) : undefined,
// agentOptions,
}, (error, response, body) => {
console.log('error', error); // TODO: test this
const {
statusCode,
statusMessage,
headers: { warning },
} = response;

let result;

if (method.toUpperCase() !== 'HEAD') {
result = h
.response(body)
.code(statusCode)
.header('warning', warning);
} else {
result = h
.response(`${statusCode} - ${statusMessage}`)
.code(statusCode)
.type('text/plain')
.header('warning', warning);
}

resolve(result);
});
});
},
},
});

export function registerProxyRoute(server, legacyEsConfig, proxyConfigCollection, proxyPathFilters) {
const config = server.config();

server.route(createProxyRoute({
baseUrl: head(legacyEsConfig.hosts),
pathFilters: proxyPathFilters,
getConfigForReq: (req, uri) => {
const filteredHeaders = filterHeaders(req.headers, legacyEsConfig.requestHeadersWhitelist);
const headers = setHeaders(filteredHeaders, legacyEsConfig.customHeaders);

if (!isEmpty(config.get('console.proxyConfig'))) {
console.log('return proxyConfig')
return {
...proxyConfigCollection.configForUri(uri),
headers,
};
}

console.log('return regular')
return {
...getElasticsearchProxyConfig(legacyEsConfig),
headers,
};
}
}));
}