From e1fcd95be33f379465c5c56db72d76c90a980d9e Mon Sep 17 00:00:00 2001 From: Paul Masurel Date: Thu, 7 Apr 2016 10:23:35 +0900 Subject: [PATCH] switched for iron --- Cargo.toml | 6 +++ src/main.rs | 126 +++++++++++++++++++++++++++++++++++----------------- 2 files changed, 91 insertions(+), 41 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index c84bba3..7d02614 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,3 +7,9 @@ authors = ["Paul Masurel "] [dependencies] tantivy = { path = "../tantivy" } time = "0.1.34" +iron = "0.3.0" +staticfile = "*" +mount= "*" + +[dependencies.urlencoded] +version = "*" diff --git a/src/main.rs b/src/main.rs index b9bfb11..44f5d34 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,18 +1,35 @@ extern crate tantivy; extern crate time; +extern crate urlencoded; -use tantivy::core::collector::{CountCollector, FirstNCollector, MultiCollector}; -use tantivy::core::schema::*; -use tantivy::core::searcher::Searcher; -use tantivy::core::directory::Directory; +use tantivy::collector::{CountCollector, FirstNCollector, MultiCollector}; +use tantivy::schema::*; +use tantivy::Searcher; +use tantivy::Directory; use std::io; use std::convert::From; use std::path::PathBuf; -use tantivy::core::analyzer::*; use std::io::BufRead; use time::PreciseTime; +use urlencoded::UrlEncodedQuery; +use iron::status; -fn handle_query(searcher: &Searcher, terms: &Vec, print_fields: &Vec) -> usize { +extern crate iron; +extern crate staticfile; +extern crate mount; + +// This example serves the docs from target/doc/staticfile at /doc/ +// +// Run `cargo doc && cargo test && ./target/doc_server`, then +// point your browser to http://127.0.0.1:3000/doc/ + +use std::path::Path; + +use staticfile::Static; +use mount::Mount; +use iron::prelude::*; + +fn handle_query(searcher: &Searcher, terms: &Vec, print_fields: &Vec) -> usize { let mut count_collector = CountCollector::new(); let mut first_3_collector = FirstNCollector::with_limit(3); { @@ -21,9 +38,9 @@ fn handle_query(searcher: &Searcher, terms: &Vec, print_fields: &Vec, print_fields: &Vec IronResult { +// Ok(Response::with((iron::status::Ok, "Hello World"))) +// } - let mut directory = Directory::open(&PathBuf::from("/data/wiki-index/")).unwrap(); - let searcher = Searcher::for_directory(directory); - let tokenizer = SimpleTokenizer::new(); - println!("Ready"); - let stdin = io::stdin(); - loop { - let mut input = String::new(); - print!("> "); - stdin.read_line(&mut input); - if input == "exit\n" { - break; - } - let mut terms: Vec = Vec::new(); - let mut token_it = tokenizer.tokenize(&input); - loop { - match token_it.next() { - Some(token) => { - terms.push(Term::from_field_text(&body_field, &token)); +fn search(req: &mut Request) -> IronResult { + // Extract the decoded data as hashmap, using the UrlEncodedQuery plugin. + match req.get_ref::() { + Ok(ref qs_map) => { + println!("Parsed GET request query string:\n {:?}", qs_map); + println!("{:?}", qs_map.get("q")); + match qs_map.get("q") { + Some(qs) => { + Ok(Response::with((status::Ok, format!("Hello!, {:?}", qs)) )) + } + None => { + Ok(Response::with((status::BadRequest, "Query not defined"))) } - None => { break; } } } - println!("Input: {:?}", input); - println!("Keywords {:?}", terms); - let start = PreciseTime::now(); - let num_docs = handle_query(&searcher, &terms, &print_fields); - let stop = PreciseTime::now(); - println!("Elasped time {:?} microseconds", start.to(stop).num_microseconds().unwrap()); - println!("Num_docs {:?}", num_docs); + Err(ref e) => Ok(Response::with((status::BadRequest, "Failed to parse query string"))) } +} + +fn main() { + // let directory = Directory::open(&PathBuf::from("/data/wiki-index/")).unwrap(); + // let schema = directory.schema(); + // let url_field = schema.field("url").unwrap(); + // let title_field = schema.field("title").unwrap(); + // let body_field = schema.field("body").unwrap(); + // let print_fields = vec!(title_field, url_field); + // + // let mut directory = Directory::open(&PathBuf::from("/data/wiki-index/")).unwrap(); + // let searcher = Searcher::for_directory(directory); + // let tokenizer = SimpleTokenizer::new(); + // + // println!("Ready"); + // let stdin = io::stdin(); + // loop { + // let mut input = String::new(); + // print!("> "); + // stdin.read_line(&mut input); + // if input == "exit\n" { + // break; + // } + // let mut terms: Vec = Vec::new(); + // let mut token_it = tokenizer.tokenize(&input); + // loop { + // match token_it.next() { + // Some(token) => { + // terms.push(Term::from_field_text(&body_field, &token)); + // } + // None => { break; } + // } + // } + // println!("Input: {:?}", input); + // println!("Keywords {:?}", terms); + // let start = PreciseTime::now(); + // let num_docs = handle_query(&searcher, &terms, &print_fields); + // let stop = PreciseTime::now(); + // println!("Elasped time {:?} microseconds", start.to(stop).num_microseconds().unwrap()); + // println!("Num_docs {:?}", num_docs); + let mut mount = Mount::new(); + mount.mount("/", search); + mount.mount("/static/", Static::new(Path::new("static/"))); + Iron::new(mount).http("127.0.0.1:3000").unwrap(); }