Skip to content

Commit

Permalink
Merge branch 'q3' into alpha
Browse files Browse the repository at this point in the history
  • Loading branch information
fuyutarow committed May 24, 2021
2 parents ffceb65 + 34eec57 commit ee60767
Show file tree
Hide file tree
Showing 21 changed files with 281 additions and 459 deletions.
7 changes: 7 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ path = "src/bin/main.rs"

[dependencies]
anyhow = "1.0.40"
collect-mac = "0.1.0"
itertools = "0.10.0"
nom = "6.1.2"
regex = "1.5.4"
Expand Down
16 changes: 14 additions & 2 deletions Makefile.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,19 +35,31 @@ cargo fix
[tasks.run-q1]
script = '''
alias pq="./target/debug/partiql-cli"
cat samples/q1.env | pq from --to json | jq > samples/q1.json
pq sql -f samples/q1.json -q "$(cat samples/q1.sql)" | pq from -t json | jq
'''
dependencies = ["build"]

[tasks.run-q2]
script = '''
alias pq="./target/debug/partiql-cli"
cat samples/q2.env | pq from --to json | jq > samples/q2.json
pq sql -f samples/q2.json -q "$(cat samples/q2.sql)" | pq from -t json | jq
'''
dependencies = ["build"]

[tasks.run-q3]
script = '''
alias pq="./target/debug/partiql-cli"
pq sql -f samples/q3.json -q "$(cat samples/q3.sql)" | pq from -t json | jq
'''
dependencies = ["build"]

[tasks.run-q5]
script = '''
alias pq="./target/debug/partiql-cli"
pq sql -f samples/q5.json -q "$(cat samples/q5.sql)" | pq from -t json | jq
'''
dependencies = ["build"]

[tasks.generate-json]
script = '''
alias pq="./target/debug/partiql-cli"
Expand Down
50 changes: 50 additions & 0 deletions examples/product.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
use std::collections::HashMap;

use collect_mac::collect;
use itertools::Itertools;

use partiql::models::JsonValue;

fn main() {
let mut records = Vec::<HashMap<String, Vec<JsonValue>>>::new();

let record = collect! {
as HashMap<String, Vec<JsonValue>>:
"id".to_owned() => vec![
JsonValue::Num(
3.0,
),
],
"employeeName".to_owned()=> vec![
JsonValue::Str(
"Bob Smith".to_owned(),
),
],
"projectName".to_owned()=> vec![
JsonValue::Str(
"AWS Redshift Spectrum querying".to_owned(),
),
JsonValue::Str(
"AWS Redshift security".to_owned(),
),
JsonValue::Str(
"AWS Aurora security".to_owned(),
),
],
"title".to_owned()=> vec![
JsonValue::Null,
],
};
dbg!(&record);

let record = record
.into_iter()
.filter_map(|(k, v)| if v.len() > 0 { Some((k, v)) } else { None })
.collect::<HashMap<String, Vec<JsonValue>>>();

let it = record.values().into_iter().multi_cartesian_product();

for prod in it {
dbg!(prod);
}
}
63 changes: 0 additions & 63 deletions examples/q1.rs

This file was deleted.

28 changes: 18 additions & 10 deletions examples/q2.rs → examples/q3.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,70 +6,78 @@ use partiql::dsql_parser;
use partiql::models::JsonValue;
use partiql::pqlir_parser;
use partiql::sql::to_list;
use partiql::sql::Bingings;
use partiql::sql::Bindings;
use partiql::sql::DField;
use partiql::sql::DSql as Sql;
use partiql::sql::DWhereCond;
use partiql::sql::Dpath;
use partiql::sql_parser;

fn main() {
parse();
}

fn run(sql: Sql, data: JsonValue) -> JsonValue {
fn run(sql: &Sql, data: &JsonValue) -> JsonValue {
let fields = sql
.select_clause
.iter()
.chain(sql.from_clause.iter())
.chain(sql.left_join_clause.iter())
.map(|e| e.to_owned())
.collect::<Vec<_>>();
let bindings = Bingings::from(fields.as_slice());
let bindings = Bindings::from(fields.as_slice());

let select_fields = sql
.select_clause
.iter()
.map(|field| field.to_owned().full(&bindings))
.collect::<Vec<_>>();
let bindings_for_select = Bingings::from(select_fields.as_slice());
let bindings_for_select = Bindings::from(select_fields.as_slice());

let value = data.select_by_fields(&select_fields).unwrap();
let list = to_list(value);
dbg!(&list);

let filtered_list = list
.iter()
.filter_map(|value| match &sql.where_clause {
Some(cond) if cond.eval(&value.to_owned(), &bindings, &bindings_for_select) => {
Some(value.to_owned())
}
_ => None,
Some(_) => None,
_ => Some(value.to_owned()),
})
.collect::<Vec<JsonValue>>();
dbg!(&filtered_list);

JsonValue::Array(filtered_list)
}

fn parse() -> anyhow::Result<()> {
let sql = {
let input = std::fs::read_to_string("samples/q2.sql").unwrap();
let input = std::fs::read_to_string("samples/q3.sql").unwrap();
let sql = dsql_parser::sql(&input)?;
sql
};

let data = {
let input = std::fs::read_to_string("samples/q2.env").unwrap();
let input = std::fs::read_to_string("samples/q3.env").unwrap();
let model = pqlir_parser::pql_model(&input)?;
model
};

let output = {
let input = std::fs::read_to_string("samples/q2.output").unwrap();
let input = std::fs::read_to_string("samples/q3.output").unwrap();
let v = input.split("---").collect::<Vec<_>>();
let input = v.first().unwrap();
let model = pqlir_parser::pql_model(&input)?;
model
};

let res = run(sql, data);
let res = run(&sql, &data);

dbg!(&res);
dbg!(&output);

assert_eq!(res, output);

dbg!("END OF FILE");
Expand Down
121 changes: 121 additions & 0 deletions examples/r3.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
use std::collections::HashMap;

use itertools::Itertools;

use partiql::dsql_parser as sql_parser;
use partiql::models::JsonValue;
use partiql::pqlir_parser;
use partiql::sql::Bindings;
use partiql::sql::DField;
use partiql::sql::Dpath;

fn main() {
parse();
}

fn parse() -> anyhow::Result<()> {
let sql = {
let input = std::fs::read_to_string("samples/q3.sql").unwrap();
let sql = sql_parser::sql(&input)?;
sql
};

let data = {
let input = std::fs::read_to_string("samples/q3.env").unwrap();
let model = pqlir_parser::pql_model(&input)?;
model
};

let fields = sql
.select_clause
.iter()
.chain(sql.from_clause.iter())
.chain(sql.left_join_clause.iter())
.map(|e| e.to_owned())
.collect::<Vec<_>>();
let bindings = Bindings::from(fields.as_slice());

let select_fields = sql
.clone()
.select_clause
.iter()
.map(|field| field.to_owned().full(&bindings))
.collect::<Vec<_>>();
dbg!(&select_fields);
let value = data.select_by_fields(&select_fields).unwrap();
dbg!(&value);

let (tables, n, keys) = {
let mut tables = HashMap::<String, Vec<JsonValue>>::new();
let mut n = 0;
let mut keys = vec![];
if let JsonValue::Object(map) = value {
keys = map
.keys()
.into_iter()
.map(|s| s.to_string())
.collect::<Vec<String>>();
for (key, value) in map {
if let JsonValue::Array(array) = value {
if n == 0 {
n = array.len();
}
tables.insert(key, array);
}
}
}
(tables, n, keys)
};
dbg!(&tables);

let records = {
let mut records = Vec::<HashMap<String, Vec<JsonValue>>>::new();
for i in 0..n {
let mut record = HashMap::<String, Vec<JsonValue>>::new();
for key in &keys {
let v = tables.get(key.as_str()).unwrap().get(i).unwrap();
// record.insert(key.to_string(), v.to_owned());
match v {
JsonValue::Array(array) => {
record.insert(key.to_string(), array.to_owned());
}
_ => {
record.insert(key.to_string(), vec![v.to_owned()]);
}
}
}
records.push(record);
}
records
};
dbg!(&records);

let list = records
.into_iter()
.map(|record| {
let record = record
.into_iter()
.filter_map(|(k, v)| if v.len() > 0 { Some((k, v)) } else { None })
.collect::<HashMap<String, Vec<JsonValue>>>();

let keys = record.keys();
let it = record.values().into_iter().multi_cartesian_product();
it.map(|prod| {
let map = keys
.clone()
.into_iter()
.zip(prod.into_iter())
.map(|(key, p)| (key.to_owned(), p.to_owned()))
.collect::<HashMap<String, _>>();
let v = JsonValue::Object(map);
v
})
.collect::<Vec<JsonValue>>()
})
.flatten()
.collect::<Vec<JsonValue>>();
dbg!(list);

dbg!("END OF FILE");
Ok(())
}
2 changes: 1 addition & 1 deletion examples/sql.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use partiql::sql_parser as parser;
use partiql::dsql_parser as parser;

fn main() {
parse();
Expand Down
Loading

0 comments on commit ee60767

Please sign in to comment.