Skip to content

Commit

Permalink
ci: build GreptimeDB binary for later use (GreptimeTeam#3244)
Browse files Browse the repository at this point in the history
* ci: build GreptimeDB binaries for later use

* debug CI

* try larger runner host

* Revert "try larger runner host"

This reverts commit 03c18c0.

* fix: resolve PR comments

* revert some unrelated action yamls

* fix CI

* use artifact upload v4 for faster upload and download speed
  • Loading branch information
MichaelScofield authored Jan 31, 2024
1 parent 60e760b commit 50d16d6
Show file tree
Hide file tree
Showing 6 changed files with 93 additions and 28 deletions.
7 changes: 4 additions & 3 deletions .github/actions/upload-artifacts/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ inputs:
required: true
target-file:
description: The path of the target artifact
required: true
required: false
version:
description: Version of the artifact
required: true
Expand All @@ -18,6 +18,7 @@ runs:
using: composite
steps:
- name: Create artifacts directory
if: ${{ inputs.target-file != '' }}
working-directory: ${{ inputs.working-dir }}
shell: bash
run: |
Expand Down Expand Up @@ -51,13 +52,13 @@ runs:
# Note: The artifacts will be double zip compressed(related issue: https://github.com/actions/upload-artifact/issues/39).
# However, when we use 'actions/download-artifact@v3' to download the artifacts, it will be automatically unzipped.
- name: Upload artifacts
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: ${{ inputs.artifacts-dir }}
path: ${{ inputs.working-dir }}/${{ inputs.artifacts-dir }}.tar.gz

- name: Upload checksum
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: ${{ inputs.artifacts-dir }}.sha256sum
path: ${{ inputs.working-dir }}/${{ inputs.artifacts-dir }}.sha256sum
62 changes: 47 additions & 15 deletions .github/workflows/develop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,8 @@ jobs:
- name: Run taplo
run: taplo format --check

sqlness:
name: Sqlness Test
build:
name: Build GreptimeDB binaries
if: github.event.pull_request.draft == false
runs-on: ${{ matrix.os }}
strategy:
Expand All @@ -89,15 +89,47 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: arduino/setup-protoc@v1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ env.RUST_TOOLCHAIN }}
- name: Rust Cache
uses: Swatinem/rust-cache@v2
- uses: Swatinem/rust-cache@v2
- name: Build greptime binaries
shell: bash
run: cargo build
- name: Pack greptime binaries
shell: bash
run: |
mkdir bins && \
mv ./target/debug/greptime bins && \
mv ./target/debug/sqlness-runner bins
- name: Print greptime binaries info
run: ls -lh bins
- name: Upload artifacts
uses: ./.github/actions/upload-artifacts
with:
artifacts-dir: bins
version: current

sqlness:
name: Sqlness Test
if: github.event.pull_request.draft == false
needs: build
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ ubuntu-20.04 ]
timeout-minutes: 60
steps:
- uses: actions/checkout@v3
- name: Download pre-built binaries
uses: actions/download-artifact@v4
with:
name: bins
path: .
- name: Unzip binaries
run: tar -xvf ./bins.tar.gz
- name: Run sqlness
run: cargo sqlness
run: ./bins/sqlness-runner -c ./tests/cases --bins-dir ./bins
- name: Upload sqlness logs
if: always()
uses: actions/upload-artifact@v3
Expand All @@ -109,26 +141,26 @@ jobs:
sqlness-kafka-wal:
name: Sqlness Test with Kafka Wal
if: github.event.pull_request.draft == false
needs: build
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ ubuntu-20.04 ]
timeout-minutes: 60
steps:
- uses: actions/checkout@v3
- uses: arduino/setup-protoc@v1
- name: Download pre-built binaries
uses: actions/download-artifact@v4
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ env.RUST_TOOLCHAIN }}
- name: Rust Cache
uses: Swatinem/rust-cache@v2
name: bins
path: .
- name: Unzip binaries
run: tar -xvf ./bins.tar.gz
- name: Setup kafka server
working-directory: tests-integration/fixtures/kafka
run: docker compose -f docker-compose-standalone.yml up -d --wait
- name: Run sqlness
run: cargo sqlness -w kafka -k 127.0.0.1:9092
run: ./bins/sqlness-runner -w kafka -k 127.0.0.1:9092 -c ./tests/cases --bins-dir ./bins
- name: Upload sqlness logs
if: always()
uses: actions/upload-artifact@v3
Expand Down
2 changes: 1 addition & 1 deletion docker/dev-builder/ubuntu/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ RUN apt-get -y purge python3.8 && \
# wildcard here. However, that requires the git's config files and the submodules all owned by the very same user.
# It's troublesome to do this since the dev build runs in Docker, which is under user "root"; while outside the Docker,
# it can be a different user that have prepared the submodules.
RUN git config --global --add safe.directory ‘*’
RUN git config --global --add safe.directory *

# Install Python dependencies.
COPY $DOCKER_BUILD_ROOT/docker/python/requirements.txt /etc/greptime/requirements.txt
Expand Down
36 changes: 30 additions & 6 deletions tests/runner/src/env.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,11 @@ pub struct Env {
data_home: PathBuf,
server_addr: Option<String>,
wal: WalConfig,

/// The path to the directory that contains the pre-built GreptimeDB binary.
/// When running in CI, this is expected to be set.
/// If not set, this runner will build the GreptimeDB binary itself when needed, and set this field by then.
bins_dir: Arc<Mutex<Option<PathBuf>>>,
}

#[async_trait]
Expand All @@ -78,19 +83,25 @@ impl EnvController for Env {
}

impl Env {
pub fn new(data_home: PathBuf, server_addr: Option<String>, wal: WalConfig) -> Self {
pub fn new(
data_home: PathBuf,
server_addr: Option<String>,
wal: WalConfig,
bins_dir: Option<PathBuf>,
) -> Self {
Self {
data_home,
server_addr,
wal,
bins_dir: Arc::new(Mutex::new(bins_dir)),
}
}

async fn start_standalone(&self) -> GreptimeDB {
if let Some(server_addr) = self.server_addr.clone() {
self.connect_db(&server_addr)
} else {
Self::build_db().await;
self.build_db();
self.setup_wal();

let db_ctx = GreptimeDBContext::new(self.wal.clone());
Expand All @@ -116,7 +127,7 @@ impl Env {
if let Some(server_addr) = self.server_addr.clone() {
self.connect_db(&server_addr)
} else {
Self::build_db().await;
self.build_db();
self.setup_wal();

let db_ctx = GreptimeDBContext::new(self.wal.clone());
Expand Down Expand Up @@ -249,8 +260,12 @@ impl Env {
#[cfg(windows)]
let program = "greptime.exe";

let bins_dir = self.bins_dir.lock().unwrap().clone().expect(
"GreptimeDB binary is not available. Please pass in the path to the directory that contains the pre-built GreptimeDB binary. Or you may call `self.build_db()` beforehand.",
);

let mut process = Command::new(program)
.current_dir(util::get_binary_dir("debug"))
.current_dir(bins_dir)
.env("TZ", "UTC")
.args(args)
.stdout(log_file)
Expand Down Expand Up @@ -374,7 +389,11 @@ impl Env {
}

/// Build the DB with `cargo build --bin greptime`
async fn build_db() {
fn build_db(&self) {
if self.bins_dir.lock().unwrap().is_some() {
return;
}

println!("Going to build the DB...");
let output = Command::new("cargo")
.current_dir(util::get_workspace_root())
Expand All @@ -389,7 +408,12 @@ impl Env {
io::stderr().write_all(&output.stderr).unwrap();
panic!();
}
println!("Build finished, starting...");

let _ = self
.bins_dir
.lock()
.unwrap()
.insert(util::get_binary_dir("debug"));
}
}

Expand Down
10 changes: 9 additions & 1 deletion tests/runner/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,11 @@ struct Args {
/// from starting a kafka cluster, and use the given endpoint as kafka backend.
#[clap(short, long)]
kafka_wal_broker_endpoints: Option<String>,

/// The path to the directory where GreptimeDB's binaries resides.
/// If not set, sqlness will build GreptimeDB on the fly.
#[clap(long)]
bins_dir: Option<PathBuf>,
}

#[tokio::main]
Expand Down Expand Up @@ -94,6 +99,9 @@ async fn main() {
},
};

let runner = Runner::new(config, Env::new(data_home, args.server_addr, wal));
let runner = Runner::new(
config,
Env::new(data_home, args.server_addr, wal, args.bins_dir),
);
runner.run().await.unwrap();
}
4 changes: 2 additions & 2 deletions tests/runner/src/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,15 +91,15 @@ pub fn get_workspace_root() -> String {
runner_crate_path.into_os_string().into_string().unwrap()
}

pub fn get_binary_dir(mode: &str) -> String {
pub fn get_binary_dir(mode: &str) -> PathBuf {
// first go to the workspace root.
let mut workspace_root = PathBuf::from(get_workspace_root());

// change directory to target dir (workspace/target/<build mode>/)
workspace_root.push("target");
workspace_root.push(mode);

workspace_root.into_os_string().into_string().unwrap()
workspace_root
}

/// Spin-waiting a socket address is available, or timeout.
Expand Down

0 comments on commit 50d16d6

Please sign in to comment.