Compare commits
No commits in common. "e6775f19813af999ef2d3de8a02421c7253a277b" and "f72a025dfe0ca7b2e3a656767b2b3e95d56d6245" have entirely different histories.
e6775f1981
...
f72a025dfe
11 changed files with 192 additions and 604 deletions
163
Cargo.lock
generated
163
Cargo.lock
generated
|
@ -29,21 +29,6 @@ version = "1.5.3"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d67c60c5f10f11c6ee04de72b2dd98bb9d2548cbc314d22a609bfa8bd9e87e8f"
|
||||
|
||||
[[package]]
|
||||
name = "android-tzdata"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
|
||||
|
||||
[[package]]
|
||||
name = "android_system_properties"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstream"
|
||||
version = "0.6.18"
|
||||
|
@ -160,12 +145,6 @@ dependencies = [
|
|||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bumpalo"
|
||||
version = "3.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
version = "1.5.0"
|
||||
|
@ -178,35 +157,12 @@ version = "1.9.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229"
|
||||
dependencies = [
|
||||
"shlex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.4.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825"
|
||||
dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
"js-sys",
|
||||
"num-traits",
|
||||
"wasm-bindgen",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.27"
|
||||
|
@ -277,12 +233,6 @@ version = "0.9.6"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation-sys"
|
||||
version = "0.8.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
|
||||
|
||||
[[package]]
|
||||
name = "cpufeatures"
|
||||
version = "0.2.16"
|
||||
|
@ -556,7 +506,6 @@ version = "0.1.1"
|
|||
dependencies = [
|
||||
"alphanumeric-sort",
|
||||
"better-commands",
|
||||
"chrono",
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"serde",
|
||||
|
@ -626,29 +575,6 @@ dependencies = [
|
|||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iana-time-zone"
|
||||
version = "0.1.61"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220"
|
||||
dependencies = [
|
||||
"android_system_properties",
|
||||
"core-foundation-sys",
|
||||
"iana-time-zone-haiku",
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
"windows-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iana-time-zone-haiku"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
|
||||
dependencies = [
|
||||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_collections"
|
||||
version = "1.5.0"
|
||||
|
@ -810,16 +736,6 @@ version = "1.0.14"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.77"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.5.0"
|
||||
|
@ -1165,12 +1081,6 @@ dependencies = [
|
|||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.18"
|
||||
|
@ -1258,12 +1168,6 @@ dependencies = [
|
|||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||
|
||||
[[package]]
|
||||
name = "signal-hook-registry"
|
||||
version = "1.4.2"
|
||||
|
@ -1829,64 +1733,6 @@ version = "0.1.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"rustversion",
|
||||
"wasm-bindgen-macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "whoami"
|
||||
version = "1.5.2"
|
||||
|
@ -1897,15 +1743,6 @@ dependencies = [
|
|||
"wasite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-core"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.48.0"
|
||||
|
|
|
@ -9,7 +9,6 @@ readme = "README.md"
|
|||
[dependencies]
|
||||
alphanumeric-sort = "1.5.3"
|
||||
better-commands = "1.0.2"
|
||||
chrono = "0.4.39"
|
||||
clap = { version = "4.5.23", features = ["derive"] }
|
||||
clap_complete = "4.5.40"
|
||||
serde = { version = "1.0.216", features = ["derive"] }
|
||||
|
|
|
@ -4,12 +4,17 @@ This is Gregory. Gregory controls repos. Gregory keeps track of updating repos,
|
|||
|
||||
## Documentation
|
||||
|
||||
Go look at [`docs/`](/docs/), and check out the [example config](/gregory.example.toml)
|
||||
Go look at [`docs/`](/docs/)
|
||||
|
||||
## TODO
|
||||
|
||||
- Add multithreading
|
||||
- Add hook system
|
||||
- Add SQL database (maybe using `sqlx`?)
|
||||
- Log stderr too
|
||||
- Add dependency system
|
||||
- Automatic detection with features (extensibility?) for each distro?
|
||||
- Add podman errors
|
||||
|
||||
## Other stuff
|
||||
|
||||
|
@ -17,4 +22,3 @@ Go look at [`docs/`](/docs/), and check out the [example config](/gregory.exampl
|
|||
- Why the name?
|
||||
- I was thinking to go with something dark and foreboding, since this is a program to control *everything* about a repo - it's the high command. But I couldn't think of anything and thought just naming it some lame random name instead would be way funnier. Hence, Gregory.
|
||||
- Gregory is a program, so it uses it/its pronouns. It also doesn't mind whether you capitalize its name or not, "gregory" or "Gregory" are fine, you can even shorten it if you want.
|
||||
- It's built for updating package repositories, but can be used to run pretty much anything. This isn't to say support won't be offered unless you're using it for a repo, but development will be focused on updating repos.
|
||||
|
|
17
dev-setup.sh
17
dev-setup.sh
|
@ -1,9 +1,4 @@
|
|||
#!/usr/bin/env bash
|
||||
set -ex
|
||||
|
||||
command_exists() { type "$1" &>/dev/null; }
|
||||
|
||||
./test.sh
|
||||
|
||||
rm -rf ./data/
|
||||
mkdir -p ./data/{fedora-repo,librewolf,other-workspace}
|
||||
|
@ -11,16 +6,8 @@ mkdir -p ./data/{fedora-repo,librewolf,other-workspace}
|
|||
mkdir -p ./dev/{pgadmin,gregory-pg}
|
||||
chmod -R 777 ./dev/pgadmin
|
||||
|
||||
if command_exists "docker-compose"; then
|
||||
docker-compose -f podman-compose.dev.yml down
|
||||
docker-compose -f podman-compose.dev.yml up -d
|
||||
elif command_exists "podman-compose"; then
|
||||
podman-compose -f podman-compose.dev.yml down
|
||||
podman-compose -f podman-compose.dev.yml up -d
|
||||
else
|
||||
echo "[ERROR] neither docker-compose nor podman-compose were found"
|
||||
exit 127
|
||||
fi
|
||||
podman-compose down
|
||||
podman-compose -f podman-compose.dev.yml up -d
|
||||
|
||||
echo "
|
||||
---
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
# Database docs
|
||||
|
||||
Gregory's database is described as follows:
|
||||
|
||||
```sql
|
||||
CREATE TABLE IF NOT EXISTS job_logs (
|
||||
start_time timestamp,
|
||||
end_time timestamp,
|
||||
duration interval GENERATED ALWAYS AS (end_time - start_time) STORED,
|
||||
exit_code smallint,
|
||||
job_id text,
|
||||
revision text,
|
||||
uuid text,
|
||||
container_name text GENERATED ALWAYS AS (job_id || '-' || uuid) STORED,
|
||||
log_path text
|
||||
);
|
||||
```
|
||||
|
||||
i.e. it uses the table `job_logs`, containing the following fields:
|
||||
|
||||
| start_time | end_time | duration | exit_code | job_id | revision | uuid | container_name | log_path |
|
||||
| ---------- | -------- | -------- | --------- | ------ | -------- | ---- | -------------- | -------- |
|
||||
|
||||
---
|
||||
|
||||
`duration` and `container_name` don't have to be inserted, as the database generates them, so they're just inserted like this:
|
||||
|
||||
```rs
|
||||
INSERT INTO job_logs (start_time, end_time, exit_code, job_id, revision, uuid, log_path)
|
||||
VALUES ('1970-01-01 10:10:10 idkkkkk', '1970-01-01 10:11:10 idkkkkk', 1, 'packaging.librewolf.compilation', '5', 'blahblahblahblah', './data/logs/packages.librewolf.compilation/5/blahblahblahblah');
|
||||
```
|
|
@ -12,7 +12,7 @@ max-threads = 10
|
|||
revision = "2"
|
||||
threads = 6
|
||||
image = "docker.io/library/debian"
|
||||
commands = ["echo hi", "sleep 2.432", "echo helloooooooooo"]
|
||||
commands = ["echo hi", "echo helloooooooooo"]
|
||||
volumes = ["librewolf"]
|
||||
|
||||
[packages.librewolf.packaging.fedora]
|
||||
|
|
33
src/data.rs
33
src/data.rs
|
@ -32,22 +32,6 @@ pub(crate) struct Config {
|
|||
pub(crate) volumes: HashMap<String, String>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub(crate) fn from_file(filename: String) -> Result<Config, Error> {
|
||||
match fs::read_to_string(filename) {
|
||||
Ok(raw_data) => match toml::from_str(raw_data.as_str()) {
|
||||
Ok(conf) => return Ok(conf),
|
||||
Err(e) => {
|
||||
return Err(Error::DeserError(e));
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
return Err(Error::IOError(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Holds the data for a job
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub(crate) struct Job {
|
||||
|
@ -105,13 +89,26 @@ pub(crate) struct JobExitStatus {
|
|||
/// Where the log is
|
||||
///
|
||||
/// TEMPORARY
|
||||
/// TODO: Have main() handle logs and writing them to the database, not doing it in run_job()
|
||||
pub(crate) log_path: String,
|
||||
/// How long it took to run the job
|
||||
pub(crate) duration: time::Duration,
|
||||
/// The name of the container this job ran in
|
||||
pub(crate) container_name: String,
|
||||
/// Uuid
|
||||
pub(crate) job_uuid: String
|
||||
}
|
||||
|
||||
pub(crate) fn config_from_file(filename: String) -> Result<Config, Error> {
|
||||
match fs::read_to_string(filename) {
|
||||
Ok(raw_data) => match toml::from_str(raw_data.as_str()) {
|
||||
Ok(conf) => return Ok(conf),
|
||||
Err(e) => {
|
||||
return Err(Error::DeserError(e));
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
return Err(Error::IOError(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ==========================
|
||||
|
|
|
@ -6,4 +6,6 @@ pub enum Error {
|
|||
IOError(#[from] std::io::Error),
|
||||
#[error("error while deserializing TOML: {0}")]
|
||||
DeserError(#[from] toml::de::Error),
|
||||
#[error("Error connecting to database: {0}")]
|
||||
DbConnectionError(String),
|
||||
}
|
||||
|
|
221
src/logging.rs
221
src/logging.rs
|
@ -1,51 +1,65 @@
|
|||
use uuid::Uuid;
|
||||
|
||||
use crate::errors::Error;
|
||||
use std::env;
|
||||
use std::fs::{create_dir_all, File, OpenOptions};
|
||||
use std::fs::{File, OpenOptions};
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::time::{Instant, SystemTime};
|
||||
|
||||
/// The logger for gregory itself - NOT for jobs
|
||||
pub(crate) struct Logger {
|
||||
log_file: File,
|
||||
}
|
||||
|
||||
impl Logger {
|
||||
pub(crate) fn new(path: String) -> Result<Logger, Error> {
|
||||
match OpenOptions::new().append(true).open(path) {
|
||||
Ok(f) => return Ok(Logger { log_file: f }),
|
||||
Err(e) => {
|
||||
return Err(Error::IOError(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Log a warning
|
||||
///
|
||||
/// Fun gregory lore: I originally typo'd this as "Strign" and the linter didn't catch it for some reason
|
||||
pub(crate) fn warning(&mut self, text: String) -> Result<(), Error> {
|
||||
match writeln!(&mut self.log_file, "[WARNING] {}", text) {
|
||||
Ok(_) => return Ok(()),
|
||||
Err(e) => {
|
||||
return Err(Error::IOError(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Log an error
|
||||
pub(crate) fn error(&mut self, text: String) -> Result<(), Error> {
|
||||
match writeln!(&mut self.log_file, "[ERROR] {}", text) {
|
||||
Ok(_) => return Ok(()),
|
||||
Err(e) => {
|
||||
return Err(Error::IOError(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Logging for a [`Job`]
|
||||
// TODO: log to postgres instead; maybe i already made a comment todo-ing this idk
|
||||
pub(crate) struct JobLogger {
|
||||
log_file: File,
|
||||
path: String,
|
||||
}
|
||||
|
||||
impl JobLogger {
|
||||
pub(crate) fn new(
|
||||
data_dir: String,
|
||||
job_id: String,
|
||||
revision: String,
|
||||
run_id: Uuid,
|
||||
) -> JobLogger {
|
||||
// get path and create the dir.
|
||||
let log_path = format!("{data_dir}/logs/{job_id}/{revision}/{run_id}");
|
||||
let log_dir = Path::new(&log_path).parent().unwrap();
|
||||
create_dir_all(log_dir).unwrap();
|
||||
|
||||
return JobLogger {
|
||||
log_file: OpenOptions::new()
|
||||
.create_new(true)
|
||||
.append(true)
|
||||
.open(&log_path)
|
||||
.unwrap(),
|
||||
path: log_path,
|
||||
};
|
||||
pub(crate) fn new(path: String) -> Result<JobLogger, Error> {
|
||||
match OpenOptions::new().create_new(true).append(true).open(path) {
|
||||
Ok(f) => return Ok(JobLogger { log_file: f }),
|
||||
Err(e) => {
|
||||
return Err(Error::IOError(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Log something printed to stdout
|
||||
///
|
||||
/// Fun gregory lore: I originally typo'd this as "Strign" and the linter didn't catch it for some reason
|
||||
pub(crate) fn stdout(&mut self, text: String, start_time: Instant) -> Result<(), Error> {
|
||||
match writeln!(
|
||||
&mut self.log_file,
|
||||
"[{:.3}] [stdout] {}",
|
||||
start_time.elapsed().as_millis() as f64 / 1000.0,
|
||||
text
|
||||
) {
|
||||
pub(crate) fn stdout(&mut self, text: String) -> Result<(), Error> {
|
||||
match writeln!(&mut self.log_file, "[stdout] {}", text) {
|
||||
Ok(_) => return Ok(()),
|
||||
Err(e) => {
|
||||
return Err(Error::IOError(e));
|
||||
|
@ -54,147 +68,12 @@ impl JobLogger {
|
|||
}
|
||||
|
||||
/// Log something printed to stderr
|
||||
pub(crate) fn stderr(&mut self, text: String, start_time: Instant) -> Result<(), Error> {
|
||||
match writeln!(
|
||||
&mut self.log_file,
|
||||
"[{}] [stderr] {}",
|
||||
start_time.elapsed().as_millis() / 1000,
|
||||
text
|
||||
) {
|
||||
pub(crate) fn stderr(&mut self, text: String) -> Result<(), Error> {
|
||||
match writeln!(&mut self.log_file, "[stderr] {}", text) {
|
||||
Ok(_) => return Ok(()),
|
||||
Err(e) => {
|
||||
return Err(Error::IOError(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the path the job's output was logged to
|
||||
pub(crate) fn path(&self) -> String {
|
||||
return self.path.clone();
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) mod sql {
|
||||
use chrono::{DateTime, Utc};
|
||||
use sqlx::{Connection, PgConnection};
|
||||
use std::{env, time::SystemTime};
|
||||
|
||||
/// Returns a new connection to postgres
|
||||
///
|
||||
/// *x*: How many times to retry the reconnect
|
||||
pub(crate) async fn start(x: u16) -> PgConnection {
|
||||
let mut conn = db_connect_with_retries(x).await;
|
||||
create_tables(&mut conn).await;
|
||||
return conn;
|
||||
}
|
||||
|
||||
/// Returns the database environment variables
|
||||
///
|
||||
/// Format: (address, username, password)
|
||||
pub(crate) fn db_vars() -> (String, String, String) {
|
||||
let db_address: String = match env::var("GREGORY_DB_ADDRESS") {
|
||||
Ok(address) => address,
|
||||
Err(_) => {
|
||||
panic!("Environment variable `GREGORY_DB_ADDRESS` not set")
|
||||
}
|
||||
};
|
||||
let db_user: String = match env::var("GREGORY_DB_USER") {
|
||||
Ok(user) => user,
|
||||
Err(_) => {
|
||||
panic!("Environment variable `GREGORY_DB_USER` not set")
|
||||
}
|
||||
};
|
||||
let db_pass: String = match env::var("GREGORY_DB_PASSWORD") {
|
||||
Ok(pass) => pass,
|
||||
Err(_) => {
|
||||
panic!("Environment variable `GREGORY_DB_PASSWORD` not set")
|
||||
}
|
||||
};
|
||||
|
||||
return (db_address, db_user, db_pass);
|
||||
}
|
||||
|
||||
/// Returns the connection to the database
|
||||
pub(crate) async fn db_connection() -> Result<PgConnection, sqlx::Error> {
|
||||
let (db_address, db_user, db_pass) = db_vars();
|
||||
let uri = format!("postgres://{db_user}:{db_pass}@{db_address}/gregory");
|
||||
return PgConnection::connect(uri.as_str()).await;
|
||||
}
|
||||
|
||||
pub(crate) async fn db_connect_with_retries(x: u16) -> PgConnection {
|
||||
let mut conn = db_connection().await;
|
||||
if conn.is_ok() {
|
||||
return conn.unwrap();
|
||||
}
|
||||
|
||||
for _ in 0..x {
|
||||
conn = db_connection().await;
|
||||
if conn.is_ok() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return conn.unwrap();
|
||||
}
|
||||
|
||||
// TODO: when adding logging to postgres directly, update this so it 1) adds the job at the start, 2) logs line-by-line, and 3) adds the end time and exit code at the end of the job
|
||||
pub(crate) async fn log_job(
|
||||
conn: &mut PgConnection,
|
||||
start_time: SystemTime,
|
||||
end_time: SystemTime,
|
||||
exit_code: Option<i32>,
|
||||
job_id: String,
|
||||
revision: String,
|
||||
uuid: String,
|
||||
log_path: String,
|
||||
) {
|
||||
let start_time: DateTime<Utc> = start_time.into();
|
||||
let start_time = start_time.format("%+").to_string();
|
||||
let end_time: DateTime<Utc> = end_time.into();
|
||||
let end_time = end_time.format("%+").to_string();
|
||||
let exit_code = match exit_code {
|
||||
Some(code) => code.to_string(),
|
||||
None => "NULL".to_string(),
|
||||
};
|
||||
let query = format!("INSERT INTO job_logs (start_time, end_time, exit_code, job_id, revision, uuid, log_path) VALUES ('{start_time}', '{end_time}', {exit_code}, '{job_id}', '{revision}', '{uuid}', '{log_path}')");
|
||||
sqlx::query(query.as_str())
|
||||
.execute(conn.as_mut())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
/// Tries to connect to the database *x* times, panics after reaching that limit
|
||||
|
||||
/// Creates table(s) for gregory if they don't exist already
|
||||
pub(crate) async fn create_tables(conn: &mut PgConnection) {
|
||||
sqlx::query(
|
||||
"CREATE TABLE IF NOT EXISTS job_logs (
|
||||
start_time timestamp,
|
||||
end_time timestamp,
|
||||
duration interval GENERATED ALWAYS AS (end_time - start_time) STORED,
|
||||
exit_code smallint,
|
||||
job_id text,
|
||||
revision text,
|
||||
uuid text,
|
||||
container_name text GENERATED ALWAYS AS (job_id || '-' || uuid) STORED,
|
||||
log_path text
|
||||
);
|
||||
",
|
||||
)
|
||||
.execute(conn)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub(crate) fn test_db_vars() {
|
||||
assert_eq!(
|
||||
(
|
||||
"postgres".to_string(),
|
||||
"gregory".to_string(),
|
||||
"pass".to_string()
|
||||
),
|
||||
sql::db_vars()
|
||||
)
|
||||
}
|
||||
|
|
302
src/main.rs
302
src/main.rs
|
@ -3,8 +3,6 @@ use crate::data::*;
|
|||
use better_commands;
|
||||
use clap::{CommandFactory, Parser};
|
||||
use clap_complete::aot::{generate, Bash, Elvish, Fish, PowerShell, Zsh};
|
||||
use logging::sql;
|
||||
use sqlx::PgConnection;
|
||||
use std::collections::HashMap;
|
||||
use std::fs::create_dir_all;
|
||||
use std::fs::remove_dir_all;
|
||||
|
@ -16,8 +14,6 @@ use std::path::Path;
|
|||
use std::process::Command;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use std::time::Instant;
|
||||
use std::time::SystemTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
mod cli;
|
||||
|
@ -26,8 +22,7 @@ mod errors;
|
|||
mod logging;
|
||||
mod tests;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
fn main() {
|
||||
let cli = Cli::parse();
|
||||
|
||||
match cli.command {
|
||||
|
@ -49,18 +44,37 @@ async fn main() {
|
|||
}
|
||||
},
|
||||
Commands::Run { config } => {
|
||||
run(config).await;
|
||||
run(config);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn run(config_path: String) {
|
||||
let config = Config::from_file(config_path).unwrap(); // this reads the file to a [`Config`] thing
|
||||
let state = State::from_config(config.clone()).await;
|
||||
fn run(config_path: String) {
|
||||
let config = config_from_file(config_path).unwrap(); // this reads the file to a [`Config`] thing
|
||||
|
||||
let mut jobs: HashMap<String, Job> = HashMap::new();
|
||||
|
||||
// arranges all the jobs by their job id (e.g. `packages.librewolf.compilation`)
|
||||
for (package_name, package) in config.clone().packages {
|
||||
match package.compilation {
|
||||
Some(tmp) => {
|
||||
jobs.insert(format!("packages.{}.compilation", package_name), tmp);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
for (job_name, job) in package.packaging {
|
||||
jobs.insert(
|
||||
format!("packages.{}.packaging.{}", package_name, job_name),
|
||||
job,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: improve efficiency of all this logic
|
||||
// TODO: Also clean it up and split it into different functions, especially the job sorter
|
||||
// TODO: figure all this out and stuff and update the comments above this - the dependency map is done though
|
||||
let dep_map = dependency_map(jobs.clone(), config.clone());
|
||||
|
||||
let mut ordered: Vec<String> = Vec::new(); // holds the job ids in order of how they should be run
|
||||
|
||||
|
@ -73,56 +87,39 @@ async fn run(config_path: String) {
|
|||
|
||||
let failed_packages: Vec<String> = Vec::new();
|
||||
|
||||
|
||||
let mut pg_connection = sql::start(5).await;
|
||||
|
||||
// runs the jobs (will need to be updated after sorting is added)
|
||||
for (job_id, job) in state.jobs {
|
||||
let start_time = SystemTime::now();
|
||||
let job_exit_status = run_job(&state.conf, job_id.clone(), job.clone());
|
||||
|
||||
// TODO: PUSH IT TO THE DB HERE
|
||||
sql::log_job(
|
||||
pg_connection.as_mut(),
|
||||
start_time,
|
||||
start_time + job_exit_status.duration,
|
||||
job_exit_status.exit_code,
|
||||
job_id,
|
||||
job.revision,
|
||||
job_exit_status.job_uuid,
|
||||
job_exit_status.log_path,
|
||||
).await;
|
||||
for (job_id, job) in jobs {
|
||||
let job_exit_status = run_job(config.clone(), job_id, job);
|
||||
println!("{:#?}", job_exit_status);
|
||||
}
|
||||
}
|
||||
|
||||
fn run_job(
|
||||
conf: &Config,
|
||||
job_id: String,
|
||||
job: Job,
|
||||
) -> JobExitStatus {
|
||||
fn run_job(conf: Config, job_id: String, job: Job) -> JobExitStatus {
|
||||
// limit threads to max_threads in the config
|
||||
let mut threads = job.threads;
|
||||
if job.threads > conf.max_threads {
|
||||
threads = conf.max_threads;
|
||||
}
|
||||
|
||||
let run_id = Uuid::now_v7();
|
||||
let container_name: String = format!("gregory-{}-{}-{}", job_id, job.revision, Uuid::now_v7());
|
||||
|
||||
let job_logger = Arc::new(Mutex::new(logging::JobLogger::new(
|
||||
conf.data_dir.clone(),
|
||||
job_id.clone(),
|
||||
job.revision.clone(),
|
||||
run_id,
|
||||
)));
|
||||
// do job log setup
|
||||
let log_path = &format!("{}/logs/{container_name}", conf.data_dir); // can't select fields in the format!() {} thing, have to do this
|
||||
let log_dir: &Path = Path::new(log_path).parent().unwrap();
|
||||
create_dir_all(log_dir).unwrap();
|
||||
|
||||
let job_logger = Arc::new(Mutex::new(
|
||||
logging::JobLogger::new(log_path.clone()).unwrap(),
|
||||
));
|
||||
|
||||
// write the script
|
||||
let script_path: String = format!("{}/tmp/{}.sh", conf.data_dir, run_id);
|
||||
let script_dir = Path::new(&script_path).parent().unwrap(); // create dir for the script
|
||||
let script_path = &format!("{}/tmp/{container_name}.sh", conf.data_dir);
|
||||
let script_dir: &Path = Path::new(script_path).parent().unwrap(); // create dir for the script
|
||||
create_dir_all(script_dir).unwrap();
|
||||
write(&script_path, job.commands.join("\n")).unwrap();
|
||||
write(script_path, job.commands.join("\n")).unwrap();
|
||||
|
||||
// set permissions - *unix specific*
|
||||
let mut perms = File::open(&script_path)
|
||||
let mut perms = File::open(script_path)
|
||||
.unwrap()
|
||||
.metadata()
|
||||
.unwrap()
|
||||
|
@ -132,8 +129,7 @@ fn run_job(
|
|||
// run the job
|
||||
let mut cmd_args: Vec<String> = vec![
|
||||
"run".to_string(),
|
||||
"--rm".to_string(),
|
||||
format!("--name={job_id}-{run_id}"),
|
||||
format!("--name={container_name}"),
|
||||
format!("--cpus={threads}"),
|
||||
format!("--privileged={}", job.privileged),
|
||||
format!("-v={script_path}:/gregory-entrypoint.sh"),
|
||||
|
@ -155,26 +151,18 @@ fn run_job(
|
|||
let cmd_output = better_commands::run_funcs(
|
||||
Command::new("podman").args(cmd_args),
|
||||
{
|
||||
let start_time = Instant::now();
|
||||
let logger_clone = Arc::clone(&job_logger);
|
||||
move |stdout_lines| {
|
||||
for line in stdout_lines {
|
||||
let _ = logger_clone
|
||||
.lock()
|
||||
.unwrap()
|
||||
.stdout(line.unwrap(), start_time);
|
||||
let _ = logger_clone.lock().unwrap().stdout(line.unwrap());
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
let start_time = Instant::now();
|
||||
let logger_clone = Arc::clone(&job_logger);
|
||||
move |stderr_lines| {
|
||||
for line in stderr_lines {
|
||||
let _ = logger_clone
|
||||
.lock()
|
||||
.unwrap()
|
||||
.stderr(line.unwrap(), start_time);
|
||||
let _ = logger_clone.lock().unwrap().stderr(line.unwrap());
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -183,15 +171,12 @@ fn run_job(
|
|||
// remove tmp dir/clean up
|
||||
remove_dir_all(script_dir).unwrap();
|
||||
|
||||
let log_path = job_logger.lock().unwrap().path();
|
||||
|
||||
return JobExitStatus {
|
||||
container_name: script_path,
|
||||
container_name: container_name,
|
||||
duration: cmd_output.clone().duration(),
|
||||
job,
|
||||
job: job,
|
||||
exit_code: cmd_output.status_code(),
|
||||
log_path,
|
||||
job_uuid: run_id.to_string(),
|
||||
log_path: log_path.clone(),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -235,136 +220,27 @@ fn order_jobs(jobs: HashMap<String, Job>, conf: Config) {
|
|||
*/
|
||||
}
|
||||
|
||||
/// Returns all the dependencies for a package recursively, *not* including the package's own jobs (e.g. compilation)
|
||||
fn recursive_deps_for_package(package_name: String, conf: Config) -> Vec<String> {
|
||||
let mut deps: Vec<String> = Vec::new();
|
||||
|
||||
for dep_name in conf
|
||||
.packages
|
||||
.get(&package_name)
|
||||
.unwrap()
|
||||
.dependencies
|
||||
.clone()
|
||||
{
|
||||
// add recursive dependencies
|
||||
deps.append(&mut recursive_deps_for_package(
|
||||
dep_name.clone(),
|
||||
conf.clone(),
|
||||
));
|
||||
}
|
||||
// add its compilation to deps
|
||||
match conf
|
||||
.packages
|
||||
.get(&package_name)
|
||||
.unwrap()
|
||||
.compilation
|
||||
.clone()
|
||||
{
|
||||
Some(_) => {
|
||||
deps.push(format!("packages.{package_name}.compilation"));
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
// add packaging jobs to deps
|
||||
for (packaging_job_name, _) in conf.packages.get(&package_name).unwrap().packaging.clone() {
|
||||
deps.push(format!(
|
||||
"packages.{package_name}.packaging.{packaging_job_name}"
|
||||
))
|
||||
}
|
||||
|
||||
return deps;
|
||||
}
|
||||
|
||||
struct State {
|
||||
/// The entire config, from the config file.
|
||||
conf: Config,
|
||||
/// A hashmap mapping all job ids to what jobs depend on them (recursively)
|
||||
///
|
||||
/// Using the example config (`gregory.example.toml`):
|
||||
///
|
||||
/// ```json
|
||||
/// {
|
||||
/// "packages.some-librewolf-dependency.packaging.fedora": [
|
||||
/// "packages.librewolf.compilation",
|
||||
/// "packages.librewolf.packaging.fedora",
|
||||
/// ],
|
||||
/// "packages.some-librewolf-dependency.compilation": [
|
||||
/// "packages.librewolf.compilation",
|
||||
/// "packages.librewolf.packaging.fedora",
|
||||
/// "packages.some-librewolf-dependency.packaging.fedora",
|
||||
/// ],
|
||||
/// "packages.librewolf.compilation": [
|
||||
/// "packages.librewolf.packaging.fedora",
|
||||
/// ],
|
||||
/// }
|
||||
/// ```
|
||||
dependency_map: HashMap<String, Vec<String>>,
|
||||
/// A hashmap mapping all job ids to their jobs
|
||||
jobs: HashMap<String, Job>,
|
||||
/// The connection to the database
|
||||
///
|
||||
/// Example (from sqlx README, modified)
|
||||
/// ```ignore
|
||||
/// sqlx::query("DELETE FROM table").execute(&mut state.conn).await?;
|
||||
/// ```
|
||||
sql: PgConnection,
|
||||
}
|
||||
|
||||
impl State {
|
||||
pub(crate) async fn from_file(filename: String) -> State {
|
||||
let conf = Config::from_file(filename).unwrap();
|
||||
return State::from_config(conf).await;
|
||||
}
|
||||
|
||||
pub(crate) async fn from_config(conf: Config) -> State {
|
||||
let mut jobs = HashMap::new();
|
||||
|
||||
for (package_name, package) in conf.clone().packages {
|
||||
match package.compilation {
|
||||
Some(tmp) => {
|
||||
jobs.insert(format!("packages.{}.compilation", package_name), tmp);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
for (job_name, job) in package.packaging {
|
||||
jobs.insert(
|
||||
format!("packages.{}.packaging.{}", package_name, job_name),
|
||||
job,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return State {
|
||||
conf: conf.clone(),
|
||||
jobs: jobs.clone(),
|
||||
dependency_map: State::dependency_map(jobs, conf),
|
||||
sql: logging::sql::start(5).await,
|
||||
};
|
||||
}
|
||||
|
||||
/// Returns a hashmap mapping all job ids to what jobs depend on them (recursively)
|
||||
///
|
||||
/// Example output using the example toml:
|
||||
///
|
||||
/// ```json
|
||||
/// {
|
||||
/// "packages.some-librewolf-dependency.packaging.fedora": [
|
||||
/// "packages.librewolf.compilation",
|
||||
/// "packages.librewolf.packaging.fedora",
|
||||
/// ],
|
||||
/// "packages.some-librewolf-dependency.compilation": [
|
||||
/// "packages.librewolf.compilation",
|
||||
/// "packages.librewolf.packaging.fedora",
|
||||
/// "packages.some-librewolf-dependency.packaging.fedora",
|
||||
/// ],
|
||||
/// "packages.librewolf.compilation": [
|
||||
/// "packages.librewolf.packaging.fedora",
|
||||
/// ],
|
||||
/// }
|
||||
/// ```
|
||||
fn dependency_map(jobs: HashMap<String, Job>, conf: Config) -> HashMap<String, Vec<String>> {
|
||||
/// Returns a hashmap mapping all job ids to what jobs depend on them (recursively)
|
||||
///
|
||||
/// Example output using the example toml:
|
||||
///
|
||||
/// ```json
|
||||
/// {
|
||||
/// "packages.some-librewolf-dependency.packaging.fedora": [
|
||||
/// "packages.librewolf.compilation",
|
||||
/// "packages.librewolf.packaging.fedora",
|
||||
/// ],
|
||||
/// "packages.some-librewolf-dependency.compilation": [
|
||||
/// "packages.librewolf.compilation",
|
||||
/// "packages.librewolf.packaging.fedora",
|
||||
/// "packages.some-librewolf-dependency.packaging.fedora",
|
||||
/// ],
|
||||
/// "packages.librewolf.compilation": [
|
||||
/// "packages.librewolf.packaging.fedora",
|
||||
/// ],
|
||||
/// }
|
||||
/// ```
|
||||
fn dependency_map(jobs: HashMap<String, Job>, conf: Config) -> HashMap<String, Vec<String>> {
|
||||
let mut dep_map: HashMap<String, Vec<String>> = HashMap::new(); // holds job ids and every job they depend on (recursively) - not just specified dependencies, also packaging depending on compilation
|
||||
|
||||
for (job_id, _) in jobs.clone() {
|
||||
|
@ -409,5 +285,45 @@ impl State {
|
|||
}
|
||||
|
||||
return dep_map;
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns all the dependencies for a package recursively, *not* including the package's own jobs (e.g. compilation)
|
||||
fn recursive_deps_for_package(package_name: String, conf: Config) -> Vec<String> {
|
||||
let mut deps: Vec<String> = Vec::new();
|
||||
|
||||
for dep_name in conf
|
||||
.packages
|
||||
.get(&package_name)
|
||||
.unwrap()
|
||||
.dependencies
|
||||
.clone()
|
||||
{
|
||||
// add recursive dependencies
|
||||
deps.append(&mut recursive_deps_for_package(
|
||||
dep_name.clone(),
|
||||
conf.clone(),
|
||||
));
|
||||
}
|
||||
// add its compilation to deps
|
||||
match conf
|
||||
.packages
|
||||
.get(&package_name)
|
||||
.unwrap()
|
||||
.compilation
|
||||
.clone()
|
||||
{
|
||||
Some(_) => {
|
||||
deps.push(format!("packages.{package_name}.compilation"));
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
// add packaging jobs to deps
|
||||
for (packaging_job_name, _) in conf.packages.get(&package_name).unwrap().packaging.clone() {
|
||||
deps.push(format!(
|
||||
"packages.{package_name}.packaging.{packaging_job_name}"
|
||||
))
|
||||
}
|
||||
|
||||
return deps;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
#!/usr/bin/env bash
|
||||
set -ex
|
||||
|
||||
# set env vars for testing
|
||||
export GREGORY_DB_ADDRESS=postgres
|
||||
export GREGORY_DB_USER=gregory
|
Loading…
Add table
Add a link
Reference in a new issue