diff --git a/Cargo.lock b/Cargo.lock
index 3a38daf..e009b27 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -29,6 +29,21 @@ version = "1.5.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d67c60c5f10f11c6ee04de72b2dd98bb9d2548cbc314d22a609bfa8bd9e87e8f"
 
+[[package]]
+name = "android-tzdata"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
+
+[[package]]
+name = "android_system_properties"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
+dependencies = [
+ "libc",
+]
+
 [[package]]
 name = "anstream"
 version = "0.6.18"
@@ -145,6 +160,12 @@ dependencies = [
  "generic-array",
 ]
 
+[[package]]
+name = "bumpalo"
+version = "3.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
+
 [[package]]
 name = "byteorder"
 version = "1.5.0"
@@ -157,12 +178,35 @@ version = "1.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
 
+[[package]]
+name = "cc"
+version = "1.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229"
+dependencies = [
+ "shlex",
+]
+
 [[package]]
 name = "cfg-if"
 version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
 
+[[package]]
+name = "chrono"
+version = "0.4.39"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825"
+dependencies = [
+ "android-tzdata",
+ "iana-time-zone",
+ "js-sys",
+ "num-traits",
+ "wasm-bindgen",
+ "windows-targets 0.52.6",
+]
+
 [[package]]
 name = "clap"
 version = "4.5.27"
@@ -233,6 +277,12 @@ version = "0.9.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
 
+[[package]]
+name = "core-foundation-sys"
+version = "0.8.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
+
 [[package]]
 name = "cpufeatures"
 version = "0.2.16"
@@ -506,6 +556,7 @@ version = "0.1.1"
 dependencies = [
  "alphanumeric-sort",
  "better-commands",
+ "chrono",
  "clap",
  "clap_complete",
  "serde",
@@ -575,6 +626,29 @@ dependencies = [
  "windows-sys 0.59.0",
 ]
 
+[[package]]
+name = "iana-time-zone"
+version = "0.1.61"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220"
+dependencies = [
+ "android_system_properties",
+ "core-foundation-sys",
+ "iana-time-zone-haiku",
+ "js-sys",
+ "wasm-bindgen",
+ "windows-core",
+]
+
+[[package]]
+name = "iana-time-zone-haiku"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
+dependencies = [
+ "cc",
+]
+
 [[package]]
 name = "icu_collections"
 version = "1.5.0"
@@ -736,6 +810,16 @@ version = "1.0.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
 
+[[package]]
+name = "js-sys"
+version = "0.3.77"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f"
+dependencies = [
+ "once_cell",
+ "wasm-bindgen",
+]
+
 [[package]]
 name = "lazy_static"
 version = "1.5.0"
@@ -1081,6 +1165,12 @@ dependencies = [
  "windows-sys 0.59.0",
 ]
 
+[[package]]
+name = "rustversion"
+version = "1.0.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4"
+
 [[package]]
 name = "ryu"
 version = "1.0.18"
@@ -1168,6 +1258,12 @@ dependencies = [
  "digest",
 ]
 
+[[package]]
+name = "shlex"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
+
 [[package]]
 name = "signal-hook-registry"
 version = "1.4.2"
@@ -1733,6 +1829,64 @@ version = "0.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
 
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.100"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "rustversion",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.100"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
+dependencies = [
+ "bumpalo",
+ "log",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.100"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.100"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.100"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d"
+dependencies = [
+ "unicode-ident",
+]
+
 [[package]]
 name = "whoami"
 version = "1.5.2"
@@ -1743,6 +1897,15 @@ dependencies = [
  "wasite",
 ]
 
+[[package]]
+name = "windows-core"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
 [[package]]
 name = "windows-sys"
 version = "0.48.0"
diff --git a/Cargo.toml b/Cargo.toml
index 2baf5ac..a91d93a 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -9,6 +9,7 @@ readme = "README.md"
 [dependencies]
 alphanumeric-sort = "1.5.3"
 better-commands = "1.0.2"
+chrono = "0.4.39"
 clap = { version = "4.5.23", features = ["derive"] }
 clap_complete = "4.5.40"
 serde = { version = "1.0.216", features = ["derive"] }
diff --git a/docs/database.md b/docs/database.md
new file mode 100644
index 0000000..e15a59f
--- /dev/null
+++ b/docs/database.md
@@ -0,0 +1,31 @@
+# Database docs
+
+Gregory's database is described as follows:
+
+```sql
+CREATE TABLE IF NOT EXISTS job_logs (
+    start_time   timestamp,
+    end_time    timestamp,
+    duration    interval GENERATED ALWAYS AS (end_time - start_time) STORED,
+    exit_code    smallint,
+    job_id     text,
+    revision    text,
+    uuid      text,
+    container_name  text GENERATED ALWAYS AS (job_id || '-' || uuid) STORED,
+    log_path        text
+);
+```
+
+i.e. it uses the table `job_logs`, containing the following fields:
+
+| start_time | end_time | duration | exit_code | job_id | revision | uuid | container_name | log_path |
+| ---------- | -------- | -------- | --------- | ------ | -------- | ---- | -------------- | -------- |
+
+---
+
+`duration` and `container_name` don't have to be inserted, as the database generates them, so they're just inserted like this:
+
+```rs
+INSERT INTO job_logs (start_time, end_time, exit_code, job_id, revision, uuid, log_path)
+    VALUES ('1970-01-01 10:10:10 idkkkkk', '1970-01-01 10:11:10 idkkkkk', 1, 'packaging.librewolf.compilation', '5', 'blahblahblahblah', './data/logs/packages.librewolf.compilation/5/blahblahblahblah');
+```
diff --git a/gregory.example.toml b/gregory.example.toml
index bf55cd4..4c28e68 100644
--- a/gregory.example.toml
+++ b/gregory.example.toml
@@ -12,7 +12,7 @@ max-threads = 10
     revision = "2"
     threads = 6
     image = "docker.io/library/debian"
-    commands = ["echo hi", "echo helloooooooooo"]
+    commands = ["echo hi", "sleep 2.432", "echo helloooooooooo"]
     volumes = ["librewolf"]
 
     [packages.librewolf.packaging.fedora]
diff --git a/src/data.rs b/src/data.rs
index b5cf0a1..fc52ac3 100644
--- a/src/data.rs
+++ b/src/data.rs
@@ -32,6 +32,22 @@ pub(crate) struct Config {
     pub(crate) volumes: HashMap<String, String>,
 }
 
+impl Config {
+    pub(crate) fn from_file(filename: String) -> Result<Config, Error> {
+        match fs::read_to_string(filename) {
+            Ok(raw_data) => match toml::from_str(raw_data.as_str()) {
+                Ok(conf) => return Ok(conf),
+                Err(e) => {
+                    return Err(Error::DeserError(e));
+                }
+            },
+            Err(e) => {
+                return Err(Error::IOError(e));
+            }
+        }
+    }
+}
+
 /// Holds the data for a job
 #[derive(Debug, Clone, Deserialize)]
 pub(crate) struct Job {
@@ -89,7 +105,6 @@ pub(crate) struct JobExitStatus {
     /// Where the log is
     ///
     /// TEMPORARY
-    /// TODO: Have main() handle logs and writing them to the database, not doing it in run_job()
     pub(crate) log_path: String,
     /// How long it took to run the job
     pub(crate) duration: time::Duration,
@@ -97,20 +112,6 @@ pub(crate) struct JobExitStatus {
     pub(crate) container_name: String,
 }
 
-pub(crate) fn config_from_file(filename: String) -> Result<Config, Error> {
-    match fs::read_to_string(filename) {
-        Ok(raw_data) => match toml::from_str(raw_data.as_str()) {
-            Ok(conf) => return Ok(conf),
-            Err(e) => {
-                return Err(Error::DeserError(e));
-            }
-        },
-        Err(e) => {
-            return Err(Error::IOError(e));
-        }
-    }
-}
-
 // ==========================
 // ===                    ===
 // ===    ↓ DEFAULTS ↓    ===
diff --git a/src/errors.rs b/src/errors.rs
index cd638e1..72ba332 100644
--- a/src/errors.rs
+++ b/src/errors.rs
@@ -6,6 +6,4 @@ pub enum Error {
     IOError(#[from] std::io::Error),
     #[error("error while deserializing TOML: {0}")]
     DeserError(#[from] toml::de::Error),
-    #[error("Error connecting to database: {0}")]
-    DbConnectionError(String),
 }
diff --git a/src/logging.rs b/src/logging.rs
index fbf57b7..72d5083 100644
--- a/src/logging.rs
+++ b/src/logging.rs
@@ -1,29 +1,51 @@
+use uuid::Uuid;
+
 use crate::errors::Error;
-use std::fs::{File, OpenOptions};
+use std::env;
+use std::fs::{create_dir_all, File, OpenOptions};
 use std::io::Write;
 use std::path::Path;
 use std::time::Instant;
 
 /// Logging for a [`Job`]
+// TODO: log to postgres instead; maybe i already made a comment todo-ing this idk
 pub(crate) struct JobLogger {
     log_file: File,
+    path: String,
 }
 
 impl JobLogger {
-    pub(crate) fn new(path: String) -> Result<JobLogger, Error> {
-        match OpenOptions::new().create_new(true).append(true).open(path) {
-            Ok(f) => return Ok(JobLogger { log_file: f }),
-            Err(e) => {
-                return Err(Error::IOError(e));
-            }
-        }
+    pub(crate) fn new(
+        data_dir: String,
+        job_id: String,
+        revision: String,
+        run_id: Uuid,
+    ) -> JobLogger {
+        // get path and create the dir.
+        let log_path = format!("{data_dir}/logs/{job_id}/{revision}/{run_id}");
+        let log_dir = Path::new(&log_path).parent().unwrap();
+        create_dir_all(log_dir).unwrap();
+
+        return JobLogger {
+            log_file: OpenOptions::new()
+                .create_new(true)
+                .append(true)
+                .open(&log_path)
+                .unwrap(),
+            path: log_path,
+        };
     }
 
     /// Log something printed to stdout
     ///
     /// Fun gregory lore: I originally typo'd this as "Strign" and the linter didn't catch it for some reason
-    pub(crate) fn stdout(&mut self, text: String) -> Result<(), Error> {
-        match writeln!(&mut self.log_file, "[stdout] {}", text) {
+    pub(crate) fn stdout(&mut self, text: String, start_time: Instant) -> Result<(), Error> {
+        match writeln!(
+            &mut self.log_file,
+            "[{:.3}] [stdout] {}",
+            start_time.elapsed().as_millis() as f64 / 1000.0,
+            text
+        ) {
             Ok(_) => return Ok(()),
             Err(e) => {
                 return Err(Error::IOError(e));
@@ -32,12 +54,150 @@ impl JobLogger {
     }
 
     /// Log something printed to stderr
-    pub(crate) fn stderr(&mut self, text: String) -> Result<(), Error> {
-        match writeln!(&mut self.log_file, "[stderr] {}", text) {
+    pub(crate) fn stderr(&mut self, text: String, start_time: Instant) -> Result<(), Error> {
+        match writeln!(
+            &mut self.log_file,
+            "[{}] [stderr] {}",
+            start_time.elapsed().as_millis() / 1000,
+            text
+        ) {
             Ok(_) => return Ok(()),
             Err(e) => {
                 return Err(Error::IOError(e));
             }
         }
     }
+
+    /// Returns the path the job's output was logged to
+    pub(crate) fn path(&self) -> String {
+        return self.path.clone();
+    }
+}
+
+pub(crate) mod sql {
+    use sqlx::{Connection, PgConnection};
+    use std::{env, time::Instant};
+
+    /// Returns a new connection to postgres
+    ///
+    /// *x*: How many times to retry the reconnect
+    pub(crate) async fn start(x: u16) -> Box<PgConnection> {
+        let mut conn = Box::new(db_connect_with_retries(x).await);
+        create_tables(&mut conn).await;
+        return conn;
+    }
+
+    /// Returns the database environment variables
+    ///
+    /// Format: (address, username, password)
+    pub(crate) fn db_vars() -> (String, String, String) {
+        let db_address: String = match env::var("GREGORY_DB_ADDRESS") {
+            Ok(address) => address,
+            Err(_) => {
+                panic!("Environment variable `GREGORY_DB_ADDRESS` not set")
+            }
+        };
+        let db_user: String = match env::var("GREGORY_DB_USER") {
+            Ok(user) => user,
+            Err(_) => {
+                panic!("Environment variable `GREGORY_DB_USER` not set")
+            }
+        };
+        let db_pass: String = match env::var("GREGORY_DB_PASSWORD") {
+            Ok(pass) => pass,
+            Err(_) => {
+                panic!("Environment variable `GREGORY_DB_PASSWORD` not set")
+            }
+        };
+
+        return (db_address, db_user, db_pass);
+    }
+
+    /// Returns the connection to the database
+    pub(crate) async fn db_connection() -> Result<PgConnection, sqlx::Error> {
+        let (db_address, db_user, db_pass) = db_vars();
+        let uri = format!("postgres://{db_user}:{db_pass}@{db_address}/gregory");
+        return PgConnection::connect(uri.as_str()).await;
+    }
+
+    pub(crate) async fn db_connect_with_retries(x: u16) -> PgConnection {
+        let mut conn = db_connection().await;
+        if conn.is_ok() {
+            return conn.unwrap();
+        }
+
+        for _ in 0..x {
+            conn = db_connection().await;
+            if conn.is_ok() {
+                break;
+            }
+        }
+
+        return conn.unwrap();
+    }
+
+    // TODO: when adding logging to postgres directly, update this so it 1) adds the job at the start, 2) logs line-by-line, and 3) adds the end time and exit code at the end of the job
+    pub(crate) async fn log_job(
+        mut conn: Box<PgConnection>,
+        start_time: Instant,
+        end_time: Instant,
+        exit_code: Option<i32>,
+        job_id: String,
+        revision: String,
+        uuid: String,
+        log_path: String,
+    ) {
+        let start_time =
+            chrono::DateTime::from_timestamp_millis(start_time.elapsed().as_millis() as i64)
+                .unwrap()
+                .format("%+")
+                .to_string();
+        let end_time =
+            chrono::DateTime::from_timestamp_millis(end_time.elapsed().as_millis() as i64)
+                .unwrap()
+                .format("%+")
+                .to_string();
+        let exit_code = match exit_code {
+            Some(code) => code.to_string(),
+            None => "NULL".to_string(),
+        };
+        sqlx::query(format!("INSERT INTO job_logs (start_time, end_time, exit_code, job_id, revision, uuid, log_path)
+                VALUES ('{start_time}', '{end_time}', {exit_code}, '{job_id}', '{revision}', '{uuid}', '{log_path}'));            
+").as_str()).execute(conn.as_mut()).await.unwrap();
+    }
+
+    /// Tries to connect to the database *x* times, panics after reaching that limit
+
+    /// Creates table(s) for gregory if they don't exist already
+    pub(crate) async fn create_tables(conn: &mut Box<PgConnection>) {
+        sqlx::query(
+            "CREATE TABLE IF NOT EXISTS job_logs (
+    start_time   timestamp,
+    end_time    timestamp,
+    duration    interval GENERATED ALWAYS AS (end_time - start_time) STORED,
+    exit_code    smallint,
+    job_id     text,
+    revision    text,
+    uuid      text,
+    container_name  text GENERATED ALWAYS AS (job_id || '-' || uuid) STORED,
+    log_path        text
+);
+",
+        )
+        .execute(conn.as_mut())
+        .await
+        .unwrap();
+    }
+}
+
+#[test]
+pub(crate) fn test_db_vars() {
+    assert_eq!(
+        (
+            "postgres".to_string(),
+            "gregory".to_string(),
+            "pass".to_string()
+        ),
+        sql::db_vars()
+    )
 }
diff --git a/src/main.rs b/src/main.rs
index 9439064..f18d1f1 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -3,6 +3,8 @@ use crate::data::*;
 use better_commands;
 use clap::{CommandFactory, Parser};
 use clap_complete::aot::{generate, Bash, Elvish, Fish, PowerShell, Zsh};
+use logging::sql;
+use sqlx::PgConnection;
 use std::collections::HashMap;
 use std::fs::create_dir_all;
 use std::fs::remove_dir_all;
@@ -14,6 +16,7 @@ use std::path::Path;
 use std::process::Command;
 use std::sync::Arc;
 use std::sync::Mutex;
+use std::time::Instant;
 use uuid::Uuid;
 
 mod cli;
@@ -22,7 +25,8 @@ mod errors;
 mod logging;
 mod tests;
 
-fn main() {
+#[tokio::main]
+async fn main() {
     let cli = Cli::parse();
 
     match cli.command {
@@ -44,37 +48,18 @@ fn main() {
             }
         },
         Commands::Run { config } => {
-            run(config);
+            run(config).await;
         }
     }
 }
 
-fn run(config_path: String) {
-    let config = config_from_file(config_path).unwrap(); // this reads the file to a [`Config`] thing
-
-    let mut jobs: HashMap<String, Job> = HashMap::new();
-
-    // arranges all the jobs by their job id (e.g. `packages.librewolf.compilation`)
-    for (package_name, package) in config.clone().packages {
-        match package.compilation {
-            Some(tmp) => {
-                jobs.insert(format!("packages.{}.compilation", package_name), tmp);
-            }
-            None => {}
-        }
-
-        for (job_name, job) in package.packaging {
-            jobs.insert(
-                format!("packages.{}.packaging.{}", package_name, job_name),
-                job,
-            );
-        }
-    }
+async fn run(config_path: String) {
+    let config = Config::from_file(config_path).unwrap(); // this reads the file to a [`Config`] thing
+    let state = State::from_config(config.clone()).await;
 
     // TODO: improve efficiency of all this logic
     // TODO: Also clean it up and split it into different functions, especially the job sorter
     // TODO: figure all this out and stuff and update the comments above this - the dependency map is done though
-    let dep_map = dependency_map(jobs.clone(), config.clone());
 
     let mut ordered: Vec<String> = Vec::new(); // holds the job ids in order of how they should be run
 
@@ -88,38 +73,36 @@ fn run(config_path: String) {
     let failed_packages: Vec<String> = Vec::new();
 
     // runs the jobs (will need to be updated after sorting is added)
-    for (job_id, job) in jobs {
-        let job_exit_status = run_job(config.clone(), job_id, job);
+    for (job_id, job) in state.jobs {
+        let job_exit_status = run_job(&state.conf, job_id, job);
         println!("{:#?}", job_exit_status);
     }
 }
 
-fn run_job(conf: Config, job_id: String, job: Job) -> JobExitStatus {
+fn run_job(conf: &Config, job_id: String, job: Job) -> JobExitStatus {
     // limit threads to max_threads in the config
     let mut threads = job.threads;
     if job.threads > conf.max_threads {
         threads = conf.max_threads;
     }
 
-    let container_name: String = format!("gregory-{}-{}-{}", job_id, job.revision, Uuid::now_v7());
+    let run_id = Uuid::now_v7();
 
-    // do job log setup
-    let log_path = &format!("{}/logs/{container_name}", conf.data_dir); // can't select fields in the format!() {} thing, have to do this
-    let log_dir: &Path = Path::new(log_path).parent().unwrap();
-    create_dir_all(log_dir).unwrap();
-
-    let job_logger = Arc::new(Mutex::new(
-        logging::JobLogger::new(log_path.clone()).unwrap(),
-    ));
+    let job_logger = Arc::new(Mutex::new(logging::JobLogger::new(
+        conf.data_dir.clone(),
+        job_id.clone(),
+        job.revision.clone(),
+        run_id,
+    )));
 
     // write the script
-    let script_path = &format!("{}/tmp/{container_name}.sh", conf.data_dir);
-    let script_dir: &Path = Path::new(script_path).parent().unwrap(); // create dir for the script
+    let script_path: String = format!("{}/tmp/{}.sh", conf.data_dir, run_id);
+    let script_dir = Path::new(&script_path).parent().unwrap(); // create dir for the script
     create_dir_all(script_dir).unwrap();
-    write(script_path, job.commands.join("\n")).unwrap();
+    write(&script_path, job.commands.join("\n")).unwrap();
 
     // set permissions - *unix specific*
-    let mut perms = File::open(script_path)
+    let mut perms = File::open(&script_path)
         .unwrap()
         .metadata()
         .unwrap()
@@ -129,7 +112,8 @@ fn run_job(conf: Config, job_id: String, job: Job) -> JobExitStatus {
     // run the job
     let mut cmd_args: Vec<String> = vec![
         "run".to_string(),
-        format!("--name={container_name}"),
+        "--rm".to_string(),
+        format!("--name={job_id}-{run_id}"),
         format!("--cpus={threads}"),
         format!("--privileged={}", job.privileged),
         format!("-v={script_path}:/gregory-entrypoint.sh"),
@@ -151,18 +135,26 @@ fn run_job(conf: Config, job_id: String, job: Job) -> JobExitStatus {
     let cmd_output = better_commands::run_funcs(
         Command::new("podman").args(cmd_args),
         {
+            let start_time = Instant::now();
             let logger_clone = Arc::clone(&job_logger);
             move |stdout_lines| {
                 for line in stdout_lines {
-                    let _ = logger_clone.lock().unwrap().stdout(line.unwrap());
+                    let _ = logger_clone
+                        .lock()
+                        .unwrap()
+                        .stdout(line.unwrap(), start_time);
                 }
             }
         },
         {
+            let start_time = Instant::now();
             let logger_clone = Arc::clone(&job_logger);
             move |stderr_lines| {
                 for line in stderr_lines {
-                    let _ = logger_clone.lock().unwrap().stderr(line.unwrap());
+                    let _ = logger_clone
+                        .lock()
+                        .unwrap()
+                        .stderr(line.unwrap(), start_time);
                 }
             }
         },
@@ -171,12 +163,16 @@ fn run_job(conf: Config, job_id: String, job: Job) -> JobExitStatus {
     // remove tmp dir/clean up
     remove_dir_all(script_dir).unwrap();
 
+    let log_path = job_logger.lock().unwrap().path();
+
+    // TODO: PUSH IT TO THE DB HERE
+
     return JobExitStatus {
-        container_name: container_name,
+        container_name: script_path,
         duration: cmd_output.clone().duration(),
-        job: job,
+        job,
         exit_code: cmd_output.status_code(),
-        log_path: log_path.clone(),
+        log_path,
     };
 }
 
@@ -220,73 +216,6 @@ fn order_jobs(jobs: HashMap<String, Job>, conf: Config) {
     */
 }
 
-/// Returns a hashmap mapping all job ids to what jobs depend on them (recursively)
-///
-/// Example output using the example toml:
-///
-/// ```json
-/// {
-///     "packages.some-librewolf-dependency.packaging.fedora": [
-///         "packages.librewolf.compilation",
-///         "packages.librewolf.packaging.fedora",
-///     ],
-///     "packages.some-librewolf-dependency.compilation": [
-///         "packages.librewolf.compilation",
-///         "packages.librewolf.packaging.fedora",
-///         "packages.some-librewolf-dependency.packaging.fedora",
-///     ],
-///     "packages.librewolf.compilation": [
-///         "packages.librewolf.packaging.fedora",
-///     ],
-/// }
-/// ```
-fn dependency_map(jobs: HashMap<String, Job>, conf: Config) -> HashMap<String, Vec<String>> {
-    let mut dep_map: HashMap<String, Vec<String>> = HashMap::new(); // holds job ids and every job they depend on (recursively) - not just specified dependencies, also packaging depending on compilation
-
-    for (job_id, _) in jobs.clone() {
-        let (_, package_name, _) = jod_id_to_metadata(job_id.clone());
-
-        for dep_name in conf
-            .packages
-            .get(&package_name)
-            .unwrap()
-            .dependencies
-            .clone()
-        {
-            let all_deps = recursive_deps_for_package(dep_name.clone(), conf.clone());
-            for dep in all_deps {
-                if !dep_map.contains_key(&dep) {
-                    dep_map.insert(dep.clone(), Vec::new());
-                }
-                dep_map.get_mut(&dep).unwrap().push(job_id.clone());
-            }
-        }
-    }
-
-    // add compilation jobs when relevant
-    for (package_name, package) in conf.packages {
-        if package.compilation.is_some() {
-            if !dep_map.contains_key(&format!("packages.{package_name}.compilation")) {
-                dep_map.insert(format!("packages.{package_name}.compilation"), Vec::new());
-            }
-
-            for (job_name, _) in package.packaging {
-                dep_map
-                    .get_mut(&format!("packages.{package_name}.compilation"))
-                    .unwrap()
-                    .push(format!("packages.{package_name}.packaging.{job_name}"));
-            }
-        }
-    }
-
-    // deduplicate dependencies
-    for (_, deps) in dep_map.iter_mut() {
-        deps.dedup();
-    }
-
-    return dep_map;
-}
-
 /// Returns all the dependencies for a package recursively, *not* including the package's own jobs (e.g. compilation)
 fn recursive_deps_for_package(package_name: String, conf: Config) -> Vec<String> {
     let mut deps: Vec<String> = Vec::new();
@@ -327,3 +256,139 @@ fn recursive_deps_for_package(package_name: String, conf: Config) -> Vec<String>
 
     return deps;
 }
+
+struct State {
+    /// The entire config, from the config file.
+    conf: Config,
+    /// A hashmap mapping all job ids to what jobs depend on them (recursively)
+    ///
+    /// Using the example config (`gregory.example.toml`):
+    ///
+    /// ```json
+    /// {
+    ///     "packages.some-librewolf-dependency.packaging.fedora": [
+    ///         "packages.librewolf.compilation",
+    ///         "packages.librewolf.packaging.fedora",
+    ///     ],
+    ///     "packages.some-librewolf-dependency.compilation": [
+    ///         "packages.librewolf.compilation",
+    ///         "packages.librewolf.packaging.fedora",
+    ///         "packages.some-librewolf-dependency.packaging.fedora",
+    ///     ],
+    ///     "packages.librewolf.compilation": [
+    ///         "packages.librewolf.packaging.fedora",
+    ///     ],
+    /// }
+    /// ```
+    dependency_map: HashMap<String, Vec<String>>,
+    /// A hashmap mapping all job ids to their jobs
+    jobs: HashMap<String, Job>,
+    /// The connection to the database
+    ///
+    /// Example (from sqlx README, modified)
+    /// ```ignore
+    /// sqlx::query("DELETE FROM table").execute(&mut state.conn).await?;
+    /// ```
+    sql: Box<PgConnection>,
+}
+
+impl State {
+    pub(crate) async fn from_file(filename: String) -> State {
+        let conf = Config::from_file(filename).unwrap();
+        return State::from_config(conf).await;
+    }
+
+    pub(crate) async fn from_config(conf: Config) -> State {
+        let mut jobs = HashMap::new();
+
+        for (package_name, package) in conf.clone().packages {
+            match package.compilation {
+                Some(tmp) => {
+                    jobs.insert(format!("packages.{}.compilation", package_name), tmp);
+                }
+                None => {}
+            }
+
+            for (job_name, job) in package.packaging {
+                jobs.insert(
+                    format!("packages.{}.packaging.{}", package_name, job_name),
+                    job,
+                );
+            }
+        }
+
+        return State {
+            conf: conf.clone(),
+            jobs: jobs.clone(),
+            dependency_map: State::dependency_map(jobs, conf),
+            sql: logging::sql::start(5).await,
+        };
+    }
+
+    /// Returns a hashmap mapping all job ids to what jobs depend on them (recursively)
+    ///
+    /// Example output using the example toml:
+    ///
+    /// ```json
+    /// {
+    ///     "packages.some-librewolf-dependency.packaging.fedora": [
+    ///         "packages.librewolf.compilation",
+    ///         "packages.librewolf.packaging.fedora",
+    ///     ],
+    ///     "packages.some-librewolf-dependency.compilation": [
+    ///         "packages.librewolf.compilation",
+    ///         "packages.librewolf.packaging.fedora",
+    ///         "packages.some-librewolf-dependency.packaging.fedora",
+    ///     ],
+    ///     "packages.librewolf.compilation": [
+    ///         "packages.librewolf.packaging.fedora",
+    ///     ],
+    /// }
+    /// ```
+    fn dependency_map(jobs: HashMap<String, Job>, conf: Config) -> HashMap<String, Vec<String>> {
+        let mut dep_map: HashMap<String, Vec<String>> = HashMap::new(); // holds job ids and every job they depend on (recursively) - not just specified dependencies, also packaging depending on compilation
+
+        for (job_id, _) in jobs.clone() {
+            let (_, package_name, _) = jod_id_to_metadata(job_id.clone());
+
+            for dep_name in conf
+                .packages
+                .get(&package_name)
+                .unwrap()
+                .dependencies
+                .clone()
+            {
+                let all_deps = recursive_deps_for_package(dep_name.clone(), conf.clone());
+                for dep in all_deps {
+                    if !dep_map.contains_key(&dep) {
+                        dep_map.insert(dep.clone(), Vec::new());
+                    }
+                    dep_map.get_mut(&dep).unwrap().push(job_id.clone());
+                }
+            }
+        }
+
+        // add compilation jobs when relevant
+        for (package_name, package) in conf.packages {
+            if package.compilation.is_some() {
+                if !dep_map.contains_key(&format!("packages.{package_name}.compilation")) {
+                    dep_map.insert(format!("packages.{package_name}.compilation"), Vec::new());
+                }
+
+                for (job_name, _) in package.packaging {
+                    dep_map
+                        .get_mut(&format!("packages.{package_name}.compilation"))
+                        .unwrap()
+                        .push(format!("packages.{package_name}.packaging.{job_name}"));
+                }
+            }
+        }
+
+        // deduplicate dependencies
+        for (_, deps) in dep_map.iter_mut() {
+            deps.dedup();
+        }
+
+        return dep_map;
+    }
+}