Skip to content

Commit f1a81a7

Browse files
author
zhuliquan
committed
Merge branch 'main' into feature-scalar_regexp_match_expr
2 parents aa2eed2 + 444a673 commit f1a81a7

File tree

759 files changed

+46200
-19569
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

759 files changed

+46200
-19569
lines changed

.github/actions/setup-builder/action.yaml

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -28,16 +28,18 @@ runs:
2828
- name: Install Build Dependencies
2929
shell: bash
3030
run: |
31-
apt-get update
32-
apt-get install -y protobuf-compiler
31+
RETRY="ci/scripts/retry"
32+
"${RETRY}" apt-get update
33+
"${RETRY}" apt-get install -y protobuf-compiler
3334
- name: Setup Rust toolchain
3435
shell: bash
3536
# rustfmt is needed for the substrait build script
3637
run: |
38+
RETRY="ci/scripts/retry"
3739
echo "Installing ${{ inputs.rust-version }}"
38-
rustup toolchain install ${{ inputs.rust-version }}
39-
rustup default ${{ inputs.rust-version }}
40-
rustup component add rustfmt
40+
"${RETRY}" rustup toolchain install ${{ inputs.rust-version }}
41+
"${RETRY}" rustup default ${{ inputs.rust-version }}
42+
"${RETRY}" rustup component add rustfmt
4143
- name: Configure rust runtime env
4244
uses: ./.github/actions/setup-rust-runtime
4345
- name: Fixup git permissions

.github/workflows/rust.yml

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -521,7 +521,7 @@ jobs:
521521
run: taplo format --check
522522

523523
config-docs-check:
524-
name: check configs.md is up-to-date
524+
name: check configs.md and ***_functions.md is up-to-date
525525
needs: [ linux-build-lib ]
526526
runs-on: ubuntu-latest
527527
container:
@@ -542,6 +542,11 @@ jobs:
542542
# If you encounter an error, run './dev/update_config_docs.sh' and commit
543543
./dev/update_config_docs.sh
544544
git diff --exit-code
545+
- name: Check if any of the ***_functions.md has been modified
546+
run: |
547+
# If you encounter an error, run './dev/update_function_docs.sh' and commit
548+
./dev/update_function_docs.sh
549+
git diff --exit-code
545550
546551
# Verify MSRV for the crates which are directly used by other projects:
547552
# - datafusion
@@ -569,9 +574,9 @@ jobs:
569574
#
570575
# To reproduce:
571576
# 1. Install the version of Rust that is failing. Example:
572-
# rustup install 1.78.0
577+
# rustup install 1.79.0
573578
# 2. Run the command that failed with that version. Example:
574-
# cargo +1.78.0 check -p datafusion
579+
# cargo +1.79.0 check -p datafusion
575580
#
576581
# To resolve, either:
577582
# 1. Change your code to use older Rust features,

Cargo.toml

Lines changed: 36 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,8 @@ homepage = "https://datafusion.apache.org"
5858
license = "Apache-2.0"
5959
readme = "README.md"
6060
repository = "https://github.com/apache/datafusion"
61-
rust-version = "1.78"
62-
version = "42.0.0"
61+
rust-version = "1.79"
62+
version = "42.1.0"
6363

6464
[workspace.dependencies]
6565
# We turn off default-features for some dependencies here so the workspaces which inherit them can
@@ -70,51 +70,51 @@ version = "42.0.0"
7070
ahash = { version = "0.8", default-features = false, features = [
7171
"runtime-rng",
7272
] }
73-
arrow = { version = "53.0.0", features = [
73+
arrow = { version = "53.2.0", features = [
7474
"prettyprint",
7575
] }
76-
arrow-array = { version = "53.0.0", default-features = false, features = [
76+
arrow-array = { version = "53.2.0", default-features = false, features = [
7777
"chrono-tz",
7878
] }
79-
arrow-buffer = { version = "53.0.0", default-features = false }
80-
arrow-flight = { version = "53.0.0", features = [
79+
arrow-buffer = { version = "53.2.0", default-features = false }
80+
arrow-flight = { version = "53.2.0", features = [
8181
"flight-sql-experimental",
8282
] }
83-
arrow-ipc = { version = "53.0.0", default-features = false, features = [
83+
arrow-ipc = { version = "53.2.0", default-features = false, features = [
8484
"lz4",
8585
] }
86-
arrow-ord = { version = "53.0.0", default-features = false }
87-
arrow-schema = { version = "53.0.0", default-features = false }
88-
arrow-string = { version = "53.0.0", default-features = false }
86+
arrow-ord = { version = "53.2.0", default-features = false }
87+
arrow-schema = { version = "53.2.0", default-features = false }
88+
arrow-string = { version = "53.2.0", default-features = false }
8989
async-trait = "0.1.73"
9090
bigdecimal = "=0.4.1"
9191
bytes = "1.4"
9292
chrono = { version = "0.4.38", default-features = false }
9393
ctor = "0.2.0"
9494
dashmap = "6.0.1"
95-
datafusion = { path = "datafusion/core", version = "42.0.0", default-features = false }
96-
datafusion-catalog = { path = "datafusion/catalog", version = "42.0.0" }
97-
datafusion-common = { path = "datafusion/common", version = "42.0.0", default-features = false }
98-
datafusion-common-runtime = { path = "datafusion/common-runtime", version = "42.0.0" }
99-
datafusion-execution = { path = "datafusion/execution", version = "42.0.0" }
100-
datafusion-expr = { path = "datafusion/expr", version = "42.0.0" }
101-
datafusion-expr-common = { path = "datafusion/expr-common", version = "42.0.0" }
102-
datafusion-functions = { path = "datafusion/functions", version = "42.0.0" }
103-
datafusion-functions-aggregate = { path = "datafusion/functions-aggregate", version = "42.0.0" }
104-
datafusion-functions-aggregate-common = { path = "datafusion/functions-aggregate-common", version = "42.0.0" }
105-
datafusion-functions-nested = { path = "datafusion/functions-nested", version = "42.0.0" }
106-
datafusion-functions-window = { path = "datafusion/functions-window", version = "42.0.0" }
107-
datafusion-functions-window-common = { path = "datafusion/functions-window-common", version = "42.0.0" }
108-
datafusion-optimizer = { path = "datafusion/optimizer", version = "42.0.0", default-features = false }
109-
datafusion-physical-expr = { path = "datafusion/physical-expr", version = "42.0.0", default-features = false }
110-
datafusion-physical-expr-common = { path = "datafusion/physical-expr-common", version = "42.0.0", default-features = false }
111-
datafusion-physical-optimizer = { path = "datafusion/physical-optimizer", version = "42.0.0" }
112-
datafusion-physical-plan = { path = "datafusion/physical-plan", version = "42.0.0" }
113-
datafusion-proto = { path = "datafusion/proto", version = "42.0.0" }
114-
datafusion-proto-common = { path = "datafusion/proto-common", version = "42.0.0" }
115-
datafusion-sql = { path = "datafusion/sql", version = "42.0.0" }
116-
datafusion-sqllogictest = { path = "datafusion/sqllogictest", version = "42.0.0" }
117-
datafusion-substrait = { path = "datafusion/substrait", version = "42.0.0" }
95+
datafusion = { path = "datafusion/core", version = "42.1.0", default-features = false }
96+
datafusion-catalog = { path = "datafusion/catalog", version = "42.1.0" }
97+
datafusion-common = { path = "datafusion/common", version = "42.1.0", default-features = false }
98+
datafusion-common-runtime = { path = "datafusion/common-runtime", version = "42.1.0" }
99+
datafusion-execution = { path = "datafusion/execution", version = "42.1.0" }
100+
datafusion-expr = { path = "datafusion/expr", version = "42.1.0" }
101+
datafusion-expr-common = { path = "datafusion/expr-common", version = "42.1.0" }
102+
datafusion-functions = { path = "datafusion/functions", version = "42.1.0" }
103+
datafusion-functions-aggregate = { path = "datafusion/functions-aggregate", version = "42.1.0" }
104+
datafusion-functions-aggregate-common = { path = "datafusion/functions-aggregate-common", version = "42.1.0" }
105+
datafusion-functions-nested = { path = "datafusion/functions-nested", version = "42.1.0" }
106+
datafusion-functions-window = { path = "datafusion/functions-window", version = "42.1.0" }
107+
datafusion-functions-window-common = { path = "datafusion/functions-window-common", version = "42.1.0" }
108+
datafusion-optimizer = { path = "datafusion/optimizer", version = "42.1.0", default-features = false }
109+
datafusion-physical-expr = { path = "datafusion/physical-expr", version = "42.1.0", default-features = false }
110+
datafusion-physical-expr-common = { path = "datafusion/physical-expr-common", version = "42.1.0", default-features = false }
111+
datafusion-physical-optimizer = { path = "datafusion/physical-optimizer", version = "42.1.0" }
112+
datafusion-physical-plan = { path = "datafusion/physical-plan", version = "42.1.0" }
113+
datafusion-proto = { path = "datafusion/proto", version = "42.1.0" }
114+
datafusion-proto-common = { path = "datafusion/proto-common", version = "42.1.0" }
115+
datafusion-sql = { path = "datafusion/sql", version = "42.1.0" }
116+
datafusion-sqllogictest = { path = "datafusion/sqllogictest", version = "42.1.0" }
117+
datafusion-substrait = { path = "datafusion/substrait", version = "42.1.0" }
118118
doc-comment = "0.3"
119119
env_logger = "0.11"
120120
futures = "0.3"
@@ -126,7 +126,7 @@ log = "^0.4"
126126
num_cpus = "1.13.0"
127127
object_store = { version = "0.11.0", default-features = false }
128128
parking_lot = "0.12"
129-
parquet = { version = "53.0.0", default-features = false, features = [
129+
parquet = { version = "53.2.0", default-features = false, features = [
130130
"arrow",
131131
"async",
132132
"object_store",
@@ -137,7 +137,7 @@ prost = "0.13.1"
137137
prost-derive = "0.13.1"
138138
rand = "0.8"
139139
regex = "1.8"
140-
rstest = "0.22.0"
140+
rstest = "0.23.0"
141141
serde_json = "1"
142142
sqlparser = { version = "0.51.0", features = ["visitor"] }
143143
tempfile = "3"
@@ -169,3 +169,4 @@ large_futures = "warn"
169169

170170
[workspace.lints.rust]
171171
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(tarpaulin)"] }
172+
unused_qualifications = "deny"

README.md

Lines changed: 21 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -42,14 +42,23 @@
4242
</a>
4343

4444
DataFusion is an extensible query engine written in [Rust] that
45-
uses [Apache Arrow] as its in-memory format. DataFusion's target users are
46-
developers building fast and feature rich database and analytic systems,
47-
customized to particular workloads. See [use cases] for examples.
45+
uses [Apache Arrow] as its in-memory format.
4846

49-
"Out of the box," DataFusion offers [SQL] and [`Dataframe`] APIs,
50-
excellent [performance], built-in support for CSV, Parquet, JSON, and Avro,
51-
extensive customization, and a great community.
52-
[Python Bindings] are also available.
47+
This crate provides libraries and binaries for developers building fast and
48+
feature rich database and analytic systems, customized to particular workloads.
49+
See [use cases] for examples. The following related subprojects target end users:
50+
51+
- [DataFusion Python](https://github.com/apache/datafusion-python/) offers a Python interface for SQL and DataFrame
52+
queries.
53+
- [DataFusion Ray](https://github.com/apache/datafusion-ray/) provides a distributed version of DataFusion that scales
54+
out on Ray clusters.
55+
- [DataFusion Comet](https://github.com/apache/datafusion-comet/) is an accelerator for Apache Spark based on
56+
DataFusion.
57+
58+
"Out of the box,"
59+
DataFusion offers [SQL] and [`Dataframe`] APIs, excellent [performance],
60+
built-in support for CSV, Parquet, JSON, and Avro, extensive customization, and
61+
a great community.
5362

5463
DataFusion features a full query planner, a columnar, streaming, multi-threaded,
5564
vectorized execution engine, and partitioned data sources. You can
@@ -125,3 +134,8 @@ For example, given the releases `1.78.0`, `1.79.0`, `1.80.0`, `1.80.1` and `1.81
125134
If a hotfix is released for the minimum supported Rust version (MSRV), the MSRV will be the minor version with all hotfixes, even if it surpasses the four-month window.
126135

127136
We enforce this policy using a [MSRV CI Check](https://github.com/search?q=repo%3Aapache%2Fdatafusion+rust-version+language%3ATOML+path%3A%2F%5ECargo.toml%2F&type=code)
137+
138+
## DataFusion API evolution policy
139+
140+
Public methods in Apache DataFusion are subject to evolve as part of the API lifecycle.
141+
Deprecated methods will be phased out in accordance with the [policy](https://datafusion.apache.org/library-user-guide/api-health.html), ensuring the API is stable and healthy.

benchmarks/README.md

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -330,6 +330,16 @@ steps.
330330
The tests sort the entire dataset using several different sort
331331
orders.
332332

333+
## IMDB
334+
335+
Run Join Order Benchmark (JOB) on IMDB dataset.
336+
337+
The Internet Movie Database (IMDB) dataset contains real-world movie data. Unlike synthetic datasets like TPCH, which assume uniform data distribution and uncorrelated columns, the IMDB dataset includes skewed data and correlated columns (which are common for real dataset), making it more suitable for testing query optimizers, particularly for cardinality estimation.
338+
339+
This benchmark is derived from [Join Order Benchmark](https://github.com/gregrahn/join-order-benchmark).
340+
341+
See paper [How Good Are Query Optimizers, Really](http://www.vldb.org/pvldb/vol9/p204-leis.pdf) for more details.
342+
333343
## TPCH
334344

335345
Run the tpch benchmark.
@@ -342,6 +352,34 @@ This benchmarks is derived from the [TPC-H][1] version
342352
[2]: https://github.com/databricks/tpch-dbgen.git,
343353
[2.17.1]: https://www.tpc.org/tpc_documents_current_versions/pdf/tpc-h_v2.17.1.pdf
344354

355+
## External Aggregation
356+
357+
Run the benchmark for aggregations with limited memory.
358+
359+
When the memory limit is exceeded, the aggregation intermediate results will be spilled to disk, and finally read back with sort-merge.
360+
361+
External aggregation benchmarks run several aggregation queries with different memory limits, on TPCH `lineitem` table. Queries can be found in [`external_aggr.rs`](src/bin/external_aggr.rs).
362+
363+
This benchmark is inspired by [DuckDB's external aggregation paper](https://hannes.muehleisen.org/publications/icde2024-out-of-core-kuiper-boncz-muehleisen.pdf), specifically Section VI.
364+
365+
### External Aggregation Example Runs
366+
1. Run all queries with predefined memory limits:
367+
```bash
368+
# Under 'benchmarks/' directory
369+
cargo run --release --bin external_aggr -- benchmark -n 4 --iterations 3 -p '....../data/tpch_sf1' -o '/tmp/aggr.json'
370+
```
371+
372+
2. Run a query with specific memory limit:
373+
```bash
374+
cargo run --release --bin external_aggr -- benchmark -n 4 --iterations 3 -p '....../data/tpch_sf1' -o '/tmp/aggr.json' --query 1 --memory-limit 30M
375+
```
376+
377+
3. Run all queries with `bench.sh` script:
378+
```bash
379+
./bench.sh data external_aggr
380+
./bench.sh run external_aggr
381+
```
382+
345383

346384
# Older Benchmarks
347385

benchmarks/bench.sh

Lines changed: 39 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ sort: Benchmark of sorting speed
7878
clickbench_1: ClickBench queries against a single parquet file
7979
clickbench_partitioned: ClickBench queries against a partitioned (100 files) parquet
8080
clickbench_extended: ClickBench \"inspired\" queries against a single parquet (DataFusion specific)
81+
external_aggr: External aggregation benchmark
8182
8283
**********
8384
* Supported Configuration (Environment Variables)
@@ -170,6 +171,10 @@ main() {
170171
imdb)
171172
data_imdb
172173
;;
174+
external_aggr)
175+
# same data as for tpch
176+
data_tpch "1"
177+
;;
173178
*)
174179
echo "Error: unknown benchmark '$BENCHMARK' for data generation"
175180
usage
@@ -211,6 +216,8 @@ main() {
211216
run_clickbench_1
212217
run_clickbench_partitioned
213218
run_clickbench_extended
219+
run_imdb
220+
run_external_aggr
214221
;;
215222
tpch)
216223
run_tpch "1"
@@ -239,6 +246,12 @@ main() {
239246
clickbench_extended)
240247
run_clickbench_extended
241248
;;
249+
imdb)
250+
run_imdb
251+
;;
252+
external_aggr)
253+
run_external_aggr
254+
;;
242255
*)
243256
echo "Error: unknown benchmark '$BENCHMARK' for run"
244257
usage
@@ -353,15 +366,15 @@ run_parquet() {
353366
RESULTS_FILE="${RESULTS_DIR}/parquet.json"
354367
echo "RESULTS_FILE: ${RESULTS_FILE}"
355368
echo "Running parquet filter benchmark..."
356-
$CARGO_COMMAND --bin parquet -- filter --path "${DATA_DIR}" --prefer_hash_join "${PREFER_HASH_JOIN}" --scale-factor 1.0 --iterations 5 -o "${RESULTS_FILE}"
369+
$CARGO_COMMAND --bin parquet -- filter --path "${DATA_DIR}" --scale-factor 1.0 --iterations 5 -o "${RESULTS_FILE}"
357370
}
358371

359372
# Runs the sort benchmark
360373
run_sort() {
361374
RESULTS_FILE="${RESULTS_DIR}/sort.json"
362375
echo "RESULTS_FILE: ${RESULTS_FILE}"
363376
echo "Running sort benchmark..."
364-
$CARGO_COMMAND --bin parquet -- sort --path "${DATA_DIR}" --prefer_hash_join "${PREFER_HASH_JOIN}" --scale-factor 1.0 --iterations 5 -o "${RESULTS_FILE}"
377+
$CARGO_COMMAND --bin parquet -- sort --path "${DATA_DIR}" --scale-factor 1.0 --iterations 5 -o "${RESULTS_FILE}"
365378
}
366379

367380

@@ -510,7 +523,31 @@ data_imdb() {
510523
fi
511524
}
512525

526+
# Runs the imdb benchmark
527+
run_imdb() {
528+
IMDB_DIR="${DATA_DIR}/imdb"
529+
530+
RESULTS_FILE="${RESULTS_DIR}/imdb.json"
531+
echo "RESULTS_FILE: ${RESULTS_FILE}"
532+
echo "Running imdb benchmark..."
533+
$CARGO_COMMAND --bin imdb -- benchmark datafusion --iterations 5 --path "${IMDB_DIR}" --prefer_hash_join "${PREFER_HASH_JOIN}" --format parquet -o "${RESULTS_FILE}"
534+
}
513535

536+
# Runs the external aggregation benchmark
537+
run_external_aggr() {
538+
# Use TPC-H SF1 dataset
539+
TPCH_DIR="${DATA_DIR}/tpch_sf1"
540+
RESULTS_FILE="${RESULTS_DIR}/external_aggr.json"
541+
echo "RESULTS_FILE: ${RESULTS_FILE}"
542+
echo "Running external aggregation benchmark..."
543+
544+
# Only parquet is supported.
545+
# Since per-operator memory limit is calculated as (total-memory-limit /
546+
# number-of-partitions), and by default `--partitions` is set to number of
547+
# CPU cores, we set a constant number of partitions to prevent this
548+
# benchmark to fail on some machines.
549+
$CARGO_COMMAND --bin external_aggr -- benchmark --partitions 4 --iterations 5 --path "${TPCH_DIR}" -o "${RESULTS_FILE}"
550+
}
514551

515552

516553
compare_benchmarks() {

benchmarks/queries/imdb/10a.sql

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
SELECT MIN(chn.name) AS uncredited_voiced_character, MIN(t.title) AS russian_movie FROM char_name AS chn, cast_info AS ci, company_name AS cn, company_type AS ct, movie_companies AS mc, role_type AS rt, title AS t WHERE ci.note like '%(voice)%' and ci.note like '%(uncredited)%' AND cn.country_code = '[ru]' AND rt.role = 'actor' AND t.production_year > 2005 AND t.id = mc.movie_id AND t.id = ci.movie_id AND ci.movie_id = mc.movie_id AND chn.id = ci.person_role_id AND rt.id = ci.role_id AND cn.id = mc.company_id AND ct.id = mc.company_type_id;

benchmarks/queries/imdb/10b.sql

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
SELECT MIN(chn.name) AS character, MIN(t.title) AS russian_mov_with_actor_producer FROM char_name AS chn, cast_info AS ci, company_name AS cn, company_type AS ct, movie_companies AS mc, role_type AS rt, title AS t WHERE ci.note like '%(producer)%' AND cn.country_code = '[ru]' AND rt.role = 'actor' AND t.production_year > 2010 AND t.id = mc.movie_id AND t.id = ci.movie_id AND ci.movie_id = mc.movie_id AND chn.id = ci.person_role_id AND rt.id = ci.role_id AND cn.id = mc.company_id AND ct.id = mc.company_type_id;

benchmarks/queries/imdb/10c.sql

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
SELECT MIN(chn.name) AS character, MIN(t.title) AS movie_with_american_producer FROM char_name AS chn, cast_info AS ci, company_name AS cn, company_type AS ct, movie_companies AS mc, role_type AS rt, title AS t WHERE ci.note like '%(producer)%' AND cn.country_code = '[us]' AND t.production_year > 1990 AND t.id = mc.movie_id AND t.id = ci.movie_id AND ci.movie_id = mc.movie_id AND chn.id = ci.person_role_id AND rt.id = ci.role_id AND cn.id = mc.company_id AND ct.id = mc.company_type_id;

benchmarks/queries/imdb/11a.sql

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
SELECT MIN(cn.name) AS from_company, MIN(lt.link) AS movie_link_type, MIN(t.title) AS non_polish_sequel_movie FROM company_name AS cn, company_type AS ct, keyword AS k, link_type AS lt, movie_companies AS mc, movie_keyword AS mk, movie_link AS ml, title AS t WHERE cn.country_code !='[pl]' AND (cn.name LIKE '%Film%' OR cn.name LIKE '%Warner%') AND ct.kind ='production companies' AND k.keyword ='sequel' AND lt.link LIKE '%follow%' AND mc.note IS NULL AND t.production_year BETWEEN 1950 AND 2000 AND lt.id = ml.link_type_id AND ml.movie_id = t.id AND t.id = mk.movie_id AND mk.keyword_id = k.id AND t.id = mc.movie_id AND mc.company_type_id = ct.id AND mc.company_id = cn.id AND ml.movie_id = mk.movie_id AND ml.movie_id = mc.movie_id AND mk.movie_id = mc.movie_id;

benchmarks/queries/imdb/11b.sql

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
SELECT MIN(cn.name) AS from_company, MIN(lt.link) AS movie_link_type, MIN(t.title) AS sequel_movie FROM company_name AS cn, company_type AS ct, keyword AS k, link_type AS lt, movie_companies AS mc, movie_keyword AS mk, movie_link AS ml, title AS t WHERE cn.country_code !='[pl]' AND (cn.name LIKE '%Film%' OR cn.name LIKE '%Warner%') AND ct.kind ='production companies' AND k.keyword ='sequel' AND lt.link LIKE '%follows%' AND mc.note IS NULL AND t.production_year = 1998 and t.title like '%Money%' AND lt.id = ml.link_type_id AND ml.movie_id = t.id AND t.id = mk.movie_id AND mk.keyword_id = k.id AND t.id = mc.movie_id AND mc.company_type_id = ct.id AND mc.company_id = cn.id AND ml.movie_id = mk.movie_id AND ml.movie_id = mc.movie_id AND mk.movie_id = mc.movie_id;

0 commit comments

Comments
 (0)