Skip to content

Commit

Permalink
Merge branch 'master' into feat/optional-git-owner
Browse files Browse the repository at this point in the history
  • Loading branch information
timonv authored Jan 23, 2025
2 parents bd077c7 + 5bc0687 commit 22d39f4
Show file tree
Hide file tree
Showing 10 changed files with 464 additions and 90 deletions.
108 changes: 102 additions & 6 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ tempfile = { version = "3.15.0" }

copypasta = "0.10.1"
strip-ansi-escapes = "0.2.1"
inquire = "0.7.5"

# Something is still pulling in libssl, this is a quickfix and should be investigated
[target.'cfg(linux)'.dependencies]
Expand All @@ -97,6 +98,7 @@ assert_cmd = "2.0.16"
predicates = "3.1.3"
swiftide-core = { version = "0.17.1", features = ["test-utils"] }
mockall = "0.13.1"
rexpect = "0.6.0"


[lints.rust]
Expand Down
5 changes: 4 additions & 1 deletion src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,10 @@ pub struct Args {
#[derive(Subcommand, Debug, Clone, Default)]
pub enum Commands {
/// Initializes a new kwaak project in the current directory
Init,
Init {
#[arg(long, default_value_t = false)]
dry_run: bool,
},
/// Start the TUI (default)
#[default]
Tui,
Expand Down
13 changes: 12 additions & 1 deletion src/config/llm_configuration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,17 @@ pub struct LLMConfigurations {
// Custom deserialize for LLMConfigurations so it gives better errors (i.e. on partial match llm
// configuration or missing 'query' from multiple)

#[derive(Debug, Clone, Deserialize, Serialize)]
#[derive(
Debug,
Clone,
Deserialize,
Serialize,
strum_macros::EnumString,
strum_macros::VariantNames,
strum_macros::Display,
)]
#[serde(tag = "provider")]
#[strum(ascii_case_insensitive)]
pub enum LLMConfiguration {
OpenAI {
api_key: Option<ApiKey>,
Expand Down Expand Up @@ -95,6 +104,7 @@ impl LLMConfiguration {
PartialEq,
strum_macros::EnumString,
strum_macros::Display,
strum_macros::VariantNames,
Default,
)]
pub enum OpenAIPromptModel {
Expand All @@ -114,6 +124,7 @@ pub enum OpenAIPromptModel {
Serialize,
strum_macros::EnumString,
strum_macros::Display,
strum_macros::VariantNames,
PartialEq,
Default,
)]
Expand Down
6 changes: 3 additions & 3 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ async fn main() -> Result<()> {
let args = cli::Args::parse();

// Handle the `init` command immediately after parsing args
if let Some(cli::Commands::Init) = args.command {
if let Err(error) = onboarding::run() {
if let Some(cli::Commands::Init { dry_run }) = args.command {
if let Err(error) = onboarding::run(dry_run) {
eprintln!("Error: {error}");
std::process::exit(1);
}
Expand Down Expand Up @@ -99,7 +99,7 @@ async fn main() -> Result<()> {
println!("{}", toml::to_string_pretty(repository.config())?);
Ok(())
}
cli::Commands::Init => unreachable!(),
cli::Commands::Init { .. } => unreachable!(),
}
};

Expand Down
Loading

0 comments on commit 22d39f4

Please sign in to comment.