fn main() {
let listener = TcpListener::bind("127.0.0.1:7878").unwrap();
for stream in listener.incoming() {
let mut stream: TcpStream = stream.unwrap();
let mut buffer = [0; 1024];
stream.read(&mut buffer);
let file = read_to_string("h.html").unwrap();
let response = String::from_utf8_lossy(&buffer);
let path = Response::new(&response);
if !path.path().starts_with("/api") {
let response = format!(
"HTTP/1.1 200 OK\r\nContent-Type: text/html\t\nContent-Length: {}\r\n\r\n{}",
file.len(),
file
);
stream.write_all(response.as_bytes());
stream.flush();
}
let response = String::from_utf8_lossy(&buffer);
let response = Response::new(&response);
println!("{}", response.path());
if response.method().starts_with("GET") {
if response.path().starts_with("/api/gemini") {
println!("{}", response.path());
gemini(stream);
}
}
}
println!("Hello, world!");
}
fn gemini(mut streams: TcpStream) {
// let streamss = stream.try_clone().unwrap();
let consten = "{"contents":[{"parts":[{"text":"Explain how AI works"}]}]}";
let gemini = TlsConnector::new().unwrap();
let stream = TcpStream::connect("generativelanguage.googleapis.com:443").unwrap();
let mut stream = gemini
.connect("generativelanguage.googleapis.com", stream)
.unwrap();
// /v1beta/models/gemini-1.5-flash-latest:generateContent?
let models = format!("POST /v1beta/models/gemini-1.5-flash-latest:generateContent?key={} HTTP/1.1\r\nHost: generativelanguage.googleapis.com\r\nContent-Type: application/json\r\nContent-Length: {}\r\n\r\n{}",consten.len(),consten);
stream.write_all(models.as_bytes());
stream.flush();
let mut responses = BufReader::new(stream);
// let mut buffer = [0; 602400];
// stream.read(&mut buffer);
// let response = String::from_utf8_lossy(&buffer);
// let mut stringg = String::new();
for responses in responses.by_ref().lines() {
let responses = responses.unwrap();
stringg.push(responses);
// string(responses, streamss.try_clone().unwrap());
// content.push_str(&responses);
}
println!("{:?}", stringg);
}