Я строю свой проект, используя Docker внутри Дженкинса. Теперь я хотел бы добавить интеграционные тесты к Jenkinsfile
. Интеграционные тесты используют базу данных PostgreSQL.
Я уже написал файл конфигурации docker-compose.yml
для подключения как контейнера PostgreSQL, так и образа сборки. Однако сейчас я застрял на том же самом внутри Дженкинса. Как правильно это сделать?
Мой проект на данный момент:
CMakeLists.txt
Docker/
Dockerfile
docker-compose.yml
Jenkinsfile
src/
main.cpp
CMakeLists.txt
cmake_minimum_required(VERSION 3.10)
# set the project name
project(Mango)
# add the executable
add_executable(Mango src/main.cpp)
# search for PostgreSQL
find_package(PostgreSQL REQUIRED)
# setup PostgreSQL
target_include_directories(Mango PRIVATE ${PostgreSQL_INCLUDE_DIRS})
target_link_libraries(Mango PRIVATE ${PostgreSQL_LIBRARIES})
Docker / Dockerfile
FROM ubuntu:bionic
RUN apt-get update && apt-get install -y clang cmake postgresql-10 postgresql-client-10 postgresql-server-dev-10 bash
SHELL ["/bin/bash", "-c"]
docker -compose.yml
version: "3.7"
services:
postgres:
image: postgres:10
restart: always
environment:
- POSTGRES_DB=demo
- POSTGRES_USER=demo
- POSTGRES_PASSWORD=demo
ports:
- 5432
volumes:
- "/var/run/postgres/postgres.sock:/var/run/postgres/postgres.sock"
- "database:/var/lib/postgresql/data"
mango:
image: mango:latest
volumes:
database:
src / main. cpp
/*
* Demo of libpq.
* Build: g++ libpq-demo.cc -o libpq-demo -lpq
* Run: ./libpq-demo
*/
#include <arpa/inet.h>
#include <iostream>
#include <libpq-fe.h>
#include <sstream>
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
using namespace std;
/*
* Turn host byte sequence to network byte sequence.
*/
char *myhton(char *src, int size) {
char *dest = (char *)malloc(sizeof(char) * size);
switch (size) {
case 1:
*dest = *src;
break;
case 2:
*(uint16_t *)dest = htobe16(*(uint16_t *)src);
break;
case 4:
*(uint32_t *)dest = htobe32(*(uint32_t *)src);
break;
case 8:
*(uint64_t *)dest = htobe64(*(uint64_t *)src);
break;
default:
*dest = *src;
break;
}
memcpy(src, dest, size);
free(dest);
return src;
}
void printPGresult(PGresult *res) {
std::cout << PQntuples(res) << "tuples, " << PQnfields(res) << " fields"
<< std::endl;
// print column name
for (int i = 0; i < PQnfields(res); i++) {
std::cout << PQfname(res, i) << "\t";
}
std::cout << std::endl;
// print column values
for (int i = 0; i < PQntuples(res); i++) {
for (int j = 0; j < PQnfields(res); j++) {
std::cout << PQgetvalue(res, i, j) << "\t";
}
std::cout << std::endl;
}
}
int main() {
const char conninfo[] =
"postgresql://postgres@postgres?port=5432&dbname=demo&user=demo&password=demo";
PGconn *conn = PQconnectdb(conninfo);
/* Check to see that the backend connection was successfully made */
if (PQstatus(conn) != CONNECTION_OK) {
std::cout << "Connection to database failed: " << PQerrorMessage(conn)
<< std::endl;
PQfinish(conn);
return 1;
} else {
std::cout << "Connection to database succeed." << std::endl;
}
PGresult *res = NULL;
/* create table demo */
res = PQexec(conn, "create table if not exists t(id int, name text);");
if (PQresultStatus(res) != PGRES_COMMAND_OK) {
std::cout << "Create table failed: " << PQresultErrorMessage(res)
<< std::endl;
PQclear(res);
return 1;
}
PQclear(res);
/* INSERT demo */
res = PQexec(conn,
"insert into t values(1, 'hello'), (2, 'world'),(3, '....');");
if (PQresultStatus(res) != PGRES_COMMAND_OK) {
std::cout << "Insert into table failed: " << PQresultErrorMessage(res)
<< std::endl;
}
PQclear(res);
/* UPDATE demo */
res = PQexec(conn, "update t set id = 0 where id = 1;");
if (PQresultStatus(res) != PGRES_COMMAND_OK) {
std::cout << "upate failed: " << PQresultErrorMessage(res) << std::endl;
} else {
std::cout << "Update counts: " << PQcmdTuples(res) << std::endl;
}
PQclear(res);
const char command[] = "insert into t values($1, $2);";
char cid[] = "10";
char name[20] = "helloworld2";
int nParams = 2;
const char *const paramValues[] = {cid, name};
const int paramLengths[] = {sizeof(cid), sizeof(name)};
const int paramFormats[] = {0, 0};
int resultFormat = 0;
/* PQexecParams demo */
res = PQexecParams(conn, command, nParams, NULL, paramValues, paramLengths,
paramFormats, resultFormat);
if (PQresultStatus(res) != PGRES_COMMAND_OK) {
std::cout << "PQexecParams failed: " << PQresultErrorMessage(res)
<< std::endl;
}
PQclear(res);
/* PREPARE INSERT demo */
res = PQprepare(conn, "insertStmt", command, nParams, NULL);
if (PQresultStatus(res) != PGRES_COMMAND_OK) {
std::cout << "PQprepare failed:" << PQresultErrorMessage(res) << std::endl;
PQclear(res);
} else {
PQclear(res);
res = PQexecPrepared(conn, "insertStmt", nParams, paramValues, paramLengths,
paramFormats, resultFormat);
if (PQresultStatus(res) != PGRES_COMMAND_OK) {
std::cout << "PQexecPrepared failed: " << PQresultErrorMessage(res)
<< std::endl;
}
PQclear(res);
}
/* SELECT demo */
res = PQexec(conn, "select * from t;");
if (PQresultStatus(res) != PGRES_TUPLES_OK) {
std::cout << "Select failed: " << PQresultErrorMessage(res) << std::endl;
} else {
std::cout << "Get " << PQntuples(res) << "tuples, each tuple has "
<< PQnfields(res) << "fields" << std::endl;
// print column name
for (int i = 0; i < PQnfields(res); i++) {
std::cout << PQfname(res, i) << " ";
}
std::cout << std::endl;
// print column values
for (int i = 0; i < PQntuples(res); i++) {
for (int j = 0; j < PQnfields(res); j++) {
std::cout << PQgetvalue(res, i, j) << " ";
}
std::cout << std::endl;
}
}
PQclear(res);
res = PQexec(conn, "DROP TABLE t;");
PQclear(res);
PQfinish(conn);
return 0;
}
Jenkinsfile
pipeline {
agent any
stages {
stage('Fire Up docker-compose') {
steps {
step([$class: 'DockerComposeBuilder', dockerComposeFile: 'docker-compose.yml', option: [$class: 'StartAllServices'], useCustomDockerComposeFile: false])
}
}
stage('Preparations') {
agent {
docker { image 'mango:latest' }
}
steps {
sh script: 'rm -rf build/', returnStatus: true
}
}
stage('Build') {
agent {
docker { image 'mango:latest' }
}
steps {
sh script: 'mkdir build && cd build && cmake .. && make -j'
}
}
stage('Test') {
agent {
docker { image 'mango:latest' }
}
steps {
sh script: 'cd build && ./Mango'
}
}
}
}
Я уже обнаружил, что docker-compose up
успешно выполнено. К сожалению, конфигурация Дженкинса как-то не так. Образ сборки не может найти сервер базы данных.