|
| 1 | +//! This is due to be replaced with a real vector database. I just got bored |
| 2 | +//! with trying to get them to work. |
| 3 | +use std::{ |
| 4 | + collections::HashMap, |
| 5 | + hash::{DefaultHasher, Hash, Hasher as _}, |
| 6 | + path::PathBuf, |
| 7 | +}; |
| 8 | + |
| 9 | +use anyhow::{Result, bail}; |
| 10 | +use bstr::BString; |
| 11 | +use gitbutler_command_context::CommandContext; |
| 12 | +use serde::{Deserialize, Serialize}; |
| 13 | +use tracing::instrument; |
| 14 | + |
| 15 | +#[derive(Serialize, Deserialize, Clone)] |
| 16 | +pub struct Hunk { |
| 17 | + /// The sha of the commit |
| 18 | + #[serde(with = "gitbutler_serde::object_id")] |
| 19 | + pub oid: gix::ObjectId, |
| 20 | + /// Header |
| 21 | + pub header: String, |
| 22 | + pub path: BString, |
| 23 | + pub previous_path: Option<BString>, |
| 24 | + pub vector: Vec<f32>, |
| 25 | +} |
| 26 | + |
| 27 | +impl Hunk { |
| 28 | + // We should only ever have one entry per commit & hunk hearder |
| 29 | + pub fn key(&self) -> u64 { |
| 30 | + let mut hasher = DefaultHasher::new(); |
| 31 | + (self.oid, &self.header).hash(&mut hasher); |
| 32 | + hasher.finish() |
| 33 | + } |
| 34 | +} |
| 35 | + |
| 36 | +#[derive(Serialize, Deserialize, Clone)] |
| 37 | +/// Used to denote a commit has been processed |
| 38 | +pub struct Commit { |
| 39 | + /// The sha of the commit |
| 40 | + #[serde(with = "gitbutler_serde::object_id")] |
| 41 | + pub oid: gix::ObjectId, |
| 42 | +} |
| 43 | + |
| 44 | +#[derive(Serialize, Deserialize)] |
| 45 | +pub struct Db { |
| 46 | + pub hunks: Vec<Hunk>, |
| 47 | + pub commits: Vec<Commit>, |
| 48 | +} |
| 49 | + |
| 50 | +pub struct DbHandle { |
| 51 | + path: PathBuf, |
| 52 | +} |
| 53 | + |
| 54 | +// TODO: Replace with real vector database |
| 55 | +impl DbHandle { |
| 56 | + pub fn new(ctx: &CommandContext) -> Self { |
| 57 | + Self { |
| 58 | + path: ctx.project().gb_dir().join("inspection.json"), |
| 59 | + } |
| 60 | + } |
| 61 | + |
| 62 | + #[instrument(skip_all)] |
| 63 | + pub fn read(&self) -> Result<Db> { |
| 64 | + if std::fs::exists(&self.path)? { |
| 65 | + let content = std::fs::read_to_string(&self.path)?; |
| 66 | + let content: Db = serde_json::from_str(&content)?; |
| 67 | + Ok(content) |
| 68 | + } else { |
| 69 | + Ok(Db { |
| 70 | + hunks: vec![], |
| 71 | + commits: vec![], |
| 72 | + }) |
| 73 | + } |
| 74 | + } |
| 75 | + |
| 76 | + #[instrument(skip_all)] |
| 77 | + fn write(&self, db: &Db) -> Result<()> { |
| 78 | + let content = serde_json::to_string(db)?; |
| 79 | + std::fs::create_dir_all(self.path.parent().unwrap())?; |
| 80 | + std::fs::write(&self.path, content)?; |
| 81 | + Ok(()) |
| 82 | + } |
| 83 | + |
| 84 | + #[instrument(skip_all)] |
| 85 | + pub fn upsert_many_hunks(&self, entries: &[Hunk]) -> Result<Vec<Hunk>> { |
| 86 | + let mut db = self.read()?; |
| 87 | + let mut map = db |
| 88 | + .hunks |
| 89 | + .into_iter() |
| 90 | + .map(|e| (e.key(), e)) |
| 91 | + .collect::<HashMap<u64, Hunk>>(); |
| 92 | + |
| 93 | + for e in entries { |
| 94 | + map.insert(e.key(), e.clone()); |
| 95 | + } |
| 96 | + |
| 97 | + db.hunks = map.into_values().collect::<Vec<_>>(); |
| 98 | + |
| 99 | + self.upsert_many_commits( |
| 100 | + &entries |
| 101 | + .iter() |
| 102 | + .map(|h| Commit { oid: h.oid }) |
| 103 | + .collect::<Vec<Commit>>(), |
| 104 | + )?; |
| 105 | + |
| 106 | + self.write(&db)?; |
| 107 | + |
| 108 | + Ok(db.hunks) |
| 109 | + } |
| 110 | + |
| 111 | + #[instrument(skip_all)] |
| 112 | + pub fn upsert_many_commits(&self, entries: &[Commit]) -> Result<Vec<Commit>> { |
| 113 | + let mut db = self.read()?; |
| 114 | + let mut map = db |
| 115 | + .commits |
| 116 | + .into_iter() |
| 117 | + .map(|e| (e.oid, e)) |
| 118 | + .collect::<HashMap<gix::ObjectId, Commit>>(); |
| 119 | + |
| 120 | + for e in entries { |
| 121 | + map.insert(e.oid, e.clone()); |
| 122 | + } |
| 123 | + |
| 124 | + db.commits = map.into_values().collect::<Vec<_>>(); |
| 125 | + |
| 126 | + self.write(&db)?; |
| 127 | + |
| 128 | + Ok(db.commits) |
| 129 | + } |
| 130 | + |
| 131 | + // TODO: Replace with real vector db search rather than a manual implementation. |
| 132 | + #[instrument(skip_all)] |
| 133 | + pub fn search_hunks(&self, term: Vec<f32>, cutoff: Option<usize>) -> Result<Vec<(Hunk, f32)>> { |
| 134 | + let db = self.read()?; |
| 135 | + |
| 136 | + let mut with_distance = db |
| 137 | + .hunks |
| 138 | + .into_iter() |
| 139 | + .map(|i| { |
| 140 | + let distance = cosine_distance(&i.vector, &term)?; |
| 141 | + Ok((i, distance)) |
| 142 | + }) |
| 143 | + .collect::<Result<Vec<(Hunk, f32)>>>()?; |
| 144 | + |
| 145 | + // Sort decending |
| 146 | + with_distance.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap()); |
| 147 | + |
| 148 | + if let Some(cutoff) = cutoff { |
| 149 | + Ok(with_distance.into_iter().take(cutoff).collect()) |
| 150 | + } else { |
| 151 | + Ok(with_distance) |
| 152 | + } |
| 153 | + } |
| 154 | +} |
| 155 | + |
| 156 | +fn cosine_distance(a: &[f32], b: &[f32]) -> Result<f32> { |
| 157 | + if a.len() != b.len() { |
| 158 | + bail!("Vectors MUST be the same length!") |
| 159 | + }; |
| 160 | + |
| 161 | + let dot: f32 = a.iter().zip(b).map(|(a, b)| a * b).sum(); |
| 162 | + let am: f32 = a.iter().fold(0.0, |acc, a| acc + (a * a)).powf(0.5); |
| 163 | + let bm: f32 = b.iter().fold(0.0, |acc, a| acc + (a * a)).powf(0.5); |
| 164 | + Ok(dot / (am * bm)) |
| 165 | +} |
0 commit comments