use std::{error::Error, process, time::Duration}; use chrono::Local; use rand::Rng; use crate::{ bot::IzzilisBot, publish::FediversePublisher, selection::{ConsoleSelector, SelectorExt}, }; use futures::StreamExt; use futures_timer::Delay; use model::SampleModelExt; mod bot; mod config; mod model; mod publish; mod selection; const CONFIG_PATH: &str = "bot_config.json"; fn main() -> Result<(), Box> { smol::block_on(async { let cfg = match config::Config::from(CONFIG_PATH.to_string()) { Ok(cfg) => cfg, Err(_) => { println!( "Failed reading config at [{}], writing default", CONFIG_PATH ); match config::Config::default().save(CONFIG_PATH.to_string()) { Some(err) => println!("Failed writing file to {}: {}", CONFIG_PATH, err), None => (), } process::exit(1); } }; let mut gpt_model = ConsoleSelector.filter( model::GPTSampleModel::new( cfg.python_path(), cfg.gpt_code_path(), vec![ "generate_unconditional_samples.py".to_string(), "--model_name".to_string(), cfg.model_name(), "--temperature".to_string(), cfg.temperature(), "--top_k".to_string(), cfg.top_k(), "--nsamples".to_string(), "1".to_string(), ], ) .into_stream() .take(10), ); while let Some(Ok(sample)) = gpt_model.next().await { println!("{}", sample); } return Ok(()); // let publisher = FediversePublisher::new(cfg.fediverse_base_url())?; // // let publisher = ConsolePublisher::new(); // let console_selector = ConsoleSelector::new(); // let mut bot = IzzilisBot::new(gen, publisher, console_selector); // bot.generate_samples(); // let cfg_interval = cfg.interval_seconds(); // loop { // let wait_seconds = rand::thread_rng().gen_range(cfg_interval.min()..cfg_interval.max()); // let wait_time = Duration::from_secs(wait_seconds); // let now = Local::now(); // println!("[{}] Next post is in [{}] seconds", now, wait_seconds); // Delay::new(wait_time).await; // match bot.publish() { // Err(err) => println!("Got error from publish: [{}]; continuing", err), // Ok(()) => println!("publish() call successful"), // } // } }) }