Skip to content

[Backport 7.x] Move integration test to artifact API #1437

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .ci/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ FROM node:${NODE_JS_VERSION}
# Create app directory
WORKDIR /usr/src/app

RUN apt-get clean -y
RUN apt-get update -y
RUN apt-get install -y zip

# Install app dependencies
COPY package*.json ./
RUN npm install
Expand Down
4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
"@sinonjs/fake-timers": "github:sinonjs/fake-timers#0bfffc1",
"@types/node": "^14.14.28",
"convert-hrtime": "^3.0.0",
"cross-zip": "^4.0.0",
"dedent": "^0.7.0",
"deepmerge": "^4.2.2",
"dezalgo": "^1.0.3",
Expand All @@ -58,6 +59,7 @@
"js-yaml": "^4.0.0",
"license-checker": "^25.0.1",
"minimist": "^1.2.5",
"node-fetch": "^2.6.1",
"ora": "^5.3.0",
"pretty-hrtime": "^1.0.3",
"proxy": "^1.0.2",
Expand Down Expand Up @@ -102,4 +104,4 @@
"coverage": false,
"jobs-auto": true
}
}
}
159 changes: 159 additions & 0 deletions scripts/download-artifacts.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
'use strict'

const { join } = require('path')
const minimist = require('minimist')
const stream = require('stream')
const { promisify } = require('util')
const { createWriteStream, promises } = require('fs')
const rimraf = require('rimraf')
const fetch = require('node-fetch')
const crossZip = require('cross-zip')
const ora = require('ora')

const { mkdir, writeFile } = promises
const pipeline = promisify(stream.pipeline)
const unzip = promisify(crossZip.unzip)
const rm = promisify(rimraf)

const esFolder = join(__dirname, '..', 'elasticsearch')
const zipFolder = join(esFolder, 'artifacts.zip')
const specFolder = join(esFolder, 'rest-api-spec', 'api')
const freeTestFolder = join(esFolder, 'rest-api-spec', 'test', 'free')
const xPackTestFolder = join(esFolder, 'rest-api-spec', 'test', 'platinum')
const artifactInfo = join(esFolder, 'info.json')

async function downloadArtifacts (opts) {
if (typeof opts.version !== 'string') {
throw new Error('Missing version')
}

const log = ora('Checking out spec and test').start()

log.text = 'Resolving versions'
let resolved
try {
resolved = await resolve(opts.version, opts.hash)
} catch (err) {
log.fail(err.message)
process.exit(1)
}

opts.id = opts.id || resolved.id
opts.hash = opts.hash || resolved.hash
opts.version = resolved.version

const info = loadInfo()

if (info && info.version === opts.version) {
if (info.hash === opts.hash && info.id === opts.id) {
log.succeed('The artifact copy present locally is already up to date')
return
}
}

log.text = 'Cleanup checkouts/elasticsearch'
await rm(esFolder)
await mkdir(esFolder, { recursive: true })

log.text = 'Downloading artifacts'
const response = await fetch(resolved.url)
if (!response.ok) {
log.fail(`unexpected response ${response.statusText}`)
process.exit(1)
}
await pipeline(response.body, createWriteStream(zipFolder))

log.text = 'Unzipping'
await unzip(zipFolder, esFolder)

log.text = 'Cleanup'
await rm(zipFolder)

log.text = 'Update info'
await writeFile(artifactInfo, JSON.stringify(opts), 'utf8')

log.succeed('Done')
}

function loadInfo () {
try {
return require(artifactInfo)
} catch (err) {
return null
}
}

async function resolve (version, hash) {
const response = await fetch(`https://artifacts-api.elastic.co/v1/versions/${version}`)
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}

const data = await response.json()
const esBuilds = data.version.builds
.filter(build => build.projects.elasticsearch != null)
.map(build => {
return {
projects: build.projects.elasticsearch,
buildId: build.build_id,
date: build.start_time,
version: build.version
}
})
.sort((a, b) => {
const dA = new Date(a.date)
const dB = new Date(b.date)
if (dA > dB) return -1
if (dA < dB) return 1
return 0
})

if (hash != null) {
const build = esBuilds.find(build => build.projects.commit_hash === hash)
if (!build) {
throw new Error(`Can't find any build with hash '${hash}'`)
}
const zipKey = Object.keys(build.projects.packages).find(key => key.startsWith('rest-resources-zip-') && key.endsWith('.zip'))
return {
url: build.projects.packages[zipKey].url,
id: build.buildId,
hash: build.projects.commit_hash,
version: build.version
}
}

const lastBuild = esBuilds[0]
const zipKey = Object.keys(lastBuild.projects.packages).find(key => key.startsWith('rest-resources-zip-') && key.endsWith('.zip'))
return {
url: lastBuild.projects.packages[zipKey].url,
id: lastBuild.buildId,
hash: lastBuild.projects.commit_hash,
version: lastBuild.version
}
}

async function main (options) {
delete options._
await downloadArtifacts(options)
}
if (require.main === module) {
process.on('unhandledRejection', function (err) {
console.error(err)
process.exit(1)
})

const options = minimist(process.argv.slice(2), {
string: ['id', 'version', 'hash']
})
main(options).catch(t => {
console.log(t)
process.exit(2)
})
}

module.exports = downloadArtifacts
module.exports.locations = {
specFolder,
freeTestFolder,
xPackTestFolder
}
115 changes: 9 additions & 106 deletions test/integration/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,20 +24,18 @@ process.on('unhandledRejection', function (err) {
process.exit(1)
})

const { writeFileSync, readFileSync, accessSync, mkdirSync, readdirSync, statSync } = require('fs')
const { writeFileSync, readFileSync, readdirSync, statSync } = require('fs')
const { join, sep } = require('path')
const yaml = require('js-yaml')
const Git = require('simple-git')
const ms = require('ms')
const { Client } = require('../../index')
const build = require('./test-runner')
const { sleep } = require('./helper')
const createJunitReporter = require('./reporter')
const downloadArtifacts = require('../../scripts/download-artifacts')

const esRepo = 'https://github.com/elastic/elasticsearch.git'
const esFolder = join(__dirname, '..', '..', 'elasticsearch')
const yamlFolder = join(esFolder, 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'test')
const xPackYamlFolder = join(esFolder, 'x-pack', 'plugin', 'src', 'test', 'resources', 'rest-api-spec', 'test')
const yamlFolder = downloadArtifacts.locations.freeTestFolder
const xPackYamlFolder = downloadArtifacts.locations.xPackTestFolder

const MAX_API_TIME = 1000 * 90
const MAX_FILE_TIME = 1000 * 30
Expand Down Expand Up @@ -83,6 +81,7 @@ const platinumBlackList = {
],
// The cleanup fails with a index not found when retrieving the jobs
'ml/get_datafeed_stats.yml': ['Test get datafeed stats when total_search_time_ms mapping is missing'],
'ml/preview_datafeed.yml': ['*'],
// Investigate why is failing
'ml/inference_crud.yml': ['*'],
// investigate why this is failing
Expand Down Expand Up @@ -168,10 +167,10 @@ async function start ({ client, isXPack }) {
await waitCluster(client)

const { body } = await client.info()
const { number: version, build_hash: sha } = body.version
const { number: version, build_hash: hash } = body.version

log(`Checking out sha ${sha}...`)
await withSHA(sha)
log(`Downloading artifacts for hash ${hash}...`)
await downloadArtifacts({ hash, version })

log(`Testing ${isXPack ? 'Platinum' : 'Free'} api...`)
const junit = createJunitReporter()
Expand Down Expand Up @@ -216,7 +215,7 @@ async function start ({ client, isXPack }) {
const testRunner = build({
client,
version,
isXPack: file.includes('x-pack')
isXPack: file.includes('platinum')
})
const fileTime = now()
const data = readFileSync(file, 'utf8')
Expand Down Expand Up @@ -324,102 +323,6 @@ function parse (data) {
return doc
}

/**
* Sets the elasticsearch repository to the given sha.
* If the repository is not present in `esFolder` it will
* clone the repository and the checkout the sha.
* If the repository is already present but it cannot checkout to
* the given sha, it will perform a pull and then try again.
* @param {string} sha
* @param {function} callback
*/
function withSHA (sha) {
return new Promise((resolve, reject) => {
_withSHA(err => err ? reject(err) : resolve())
})

function _withSHA (callback) {
let fresh = false
let retry = 0

if (!pathExist(esFolder)) {
if (!createFolder(esFolder)) {
return callback(new Error('Failed folder creation'))
}
fresh = true
}

const git = Git(esFolder)

if (fresh) {
clone(checkout)
} else {
checkout()
}

function checkout () {
log(`Checking out sha '${sha}'`)
git.checkout(sha, err => {
if (err) {
if (retry++ > 0) {
return callback(err)
}
return pull(checkout)
}
callback()
})
}

function pull (cb) {
log('Pulling elasticsearch repository...')
git.pull(err => {
if (err) {
return callback(err)
}
cb()
})
}

function clone (cb) {
log('Cloning elasticsearch repository...')
git.clone(esRepo, esFolder, err => {
if (err) {
return callback(err)
}
cb()
})
}
}
}

/**
* Checks if the given path exists
* @param {string} path
* @returns {boolean} true if exists, false if not
*/
function pathExist (path) {
try {
accessSync(path)
return true
} catch (err) {
return false
}
}

/**
* Creates the given folder
* @param {string} name
* @returns {boolean} true on success, false on failure
*/
function createFolder (name) {
try {
mkdirSync(name)
return true
} catch (err) {
return false
}
}

function generateJunitXmlReport (junit, suite) {
writeFileSync(
join(__dirname, '..', '..', `${suite}-report-junit.xml`),
Expand Down