Skip to content

Commit 06e9f53

Browse files
authored
Minor: consolidate test data (#6217)
* Minor: consolidate test data directories * move json data * move parquet data
1 parent 7ef4de5 commit 06e9f53

File tree

15 files changed

+11
-11
lines changed

15 files changed

+11
-11
lines changed

datafusion/core/src/datasource/file_format/json.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -256,7 +256,7 @@ mod tests {
256256
projection: Option<Vec<usize>>,
257257
limit: Option<usize>,
258258
) -> Result<Arc<dyn ExecutionPlan>> {
259-
let filename = "tests/jsons/2.json";
259+
let filename = "tests/data/2.json";
260260
let format = JsonFormat::default();
261261
scan_format(state, &format, ".", filename, projection, limit).await
262262
}
@@ -266,7 +266,7 @@ mod tests {
266266
let session = SessionContext::new();
267267
let ctx = session.state();
268268
let store = Arc::new(LocalFileSystem::new()) as _;
269-
let filename = "tests/jsons/schema_infer_limit.json";
269+
let filename = "tests/data/schema_infer_limit.json";
270270
let format = JsonFormat::default().with_schema_infer_max_rec(Some(3));
271271

272272
let file_schema = format

datafusion/core/src/physical_plan/file_format/csv.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -785,7 +785,7 @@ mod tests {
785785
let options = CsvReadOptions::default()
786786
.schema_infer_max_records(2)
787787
.has_header(true);
788-
let df = ctx.read_csv("tests/csv/corrupt.csv", options).await?;
788+
let df = ctx.read_csv("tests/data/corrupt.csv", options).await?;
789789
let tmp_dir = TempDir::new()?;
790790
let out_dir = tmp_dir.as_ref().to_str().unwrap().to_string() + "/out";
791791
let e = df

datafusion/core/src/physical_plan/file_format/json.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -309,7 +309,7 @@ mod tests {
309309

310310
use super::*;
311311

312-
const TEST_DATA_BASE: &str = "tests/jsons";
312+
const TEST_DATA_BASE: &str = "tests/data";
313313

314314
async fn prepare_store(
315315
state: &SessionState,
@@ -707,7 +707,7 @@ mod tests {
707707
let options = CsvReadOptions::default()
708708
.schema_infer_max_records(2)
709709
.has_header(true);
710-
let df = ctx.read_csv("tests/csv/corrupt.csv", options).await?;
710+
let df = ctx.read_csv("tests/data/corrupt.csv", options).await?;
711711
let tmp_dir = TempDir::new()?;
712712
let out_dir = tmp_dir.as_ref().to_str().unwrap().to_string() + "/out";
713713
let e = df

datafusion/core/src/physical_plan/file_format/parquet.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -940,7 +940,7 @@ mod tests {
940940
let options = CsvReadOptions::default()
941941
.schema_infer_max_records(2)
942942
.has_header(true);
943-
let df = ctx.read_csv("tests/csv/corrupt.csv", options).await?;
943+
let df = ctx.read_csv("tests/data/corrupt.csv", options).await?;
944944
let tmp_dir = TempDir::new()?;
945945
let out_dir = tmp_dir.as_ref().to_str().unwrap().to_string() + "/out";
946946
let e = df

datafusion/core/tests/sql/json.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
use super::*;
1919

20-
const TEST_DATA_BASE: &str = "tests/jsons";
20+
const TEST_DATA_BASE: &str = "tests/data";
2121

2222
#[tokio::test]
2323
async fn json_query() {
@@ -92,7 +92,7 @@ async fn json_explain() {
9292
\n CoalescePartitionsExec\
9393
\n AggregateExec: mode=Partial, gby=[], aggr=[COUNT(UInt8(1))]\
9494
\n RepartitionExec: partitioning=RoundRobinBatch(NUM_CORES), input_partitions=1\
95-
\n JsonExec: file_groups={1 group: [[WORKING_DIR/tests/jsons/2.json]]}, projection=[a]\n",
95+
\n JsonExec: file_groups={1 group: [[WORKING_DIR/tests/data/2.json]]}, projection=[a]\n",
9696
],
9797
];
9898
assert_eq!(expected, actual);

datafusion/core/tests/sql/order.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ use test_utils::{batches_to_vec, partitions_to_sorted_vec};
2525
#[tokio::test]
2626
async fn sort_with_lots_of_repetition_values() -> Result<()> {
2727
let ctx = SessionContext::new();
28-
let filename = "tests/parquet/data/repeat_much.snappy.parquet";
28+
let filename = "tests/data/repeat_much.snappy.parquet";
2929

3030
ctx.register_parquet("rep", filename, ParquetReadOptions::default())
3131
.await?;

datafusion/core/tests/sql/parquet.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ async fn fixed_size_binary_columns() {
151151
let ctx = SessionContext::new();
152152
ctx.register_parquet(
153153
"t0",
154-
"tests/parquet/data/test_binary.parquet",
154+
"tests/data/test_binary.parquet",
155155
ParquetReadOptions::default(),
156156
)
157157
.await
@@ -170,7 +170,7 @@ async fn window_fn_timestamp_tz() {
170170
let ctx = SessionContext::new();
171171
ctx.register_parquet(
172172
"t0",
173-
"tests/parquet/data/timestamp_with_tz.parquet",
173+
"tests/data/timestamp_with_tz.parquet",
174174
ParquetReadOptions::default(),
175175
)
176176
.await

0 commit comments

Comments
 (0)