lapx
Advanced tools
+431
-426
| [](https://github.com/rathaROG/lapx/releases) | ||
| [](https://badge.fury.io/py/lapx) | ||
| [](https://badge.fury.io/py/lapx) | ||
| [](https://github.com/rathaROG/lapx/actions/workflows/benchmark_single.yaml) | ||
@@ -30,7 +30,12 @@ [](https://github.com/rathaROG/lapx/actions/workflows/benchmark_batch.yaml) | ||
| 📊 Some benchmark results using `lapx` [v0.9.0](https://github.com/rathaROG/lapx/releases/tag/v0.8.0) (2025/10/27): | ||
| 📊 Some benchmark results using `lapx` [v0.9.1](https://github.com/rathaROG/lapx/releases/tag/v0.9.1) (2025/10/31): | ||
| <details><summary>🗂️ Batch on my local Windows 11 i9-13900KS (8 p-core + 8 e-core) + python 3.11.9:</summary> | ||
| <details><summary>🗂️ Batch on my local Windows 11 i9-13900KS (8 p-core + 8 e-core) + python 3.11.9:</summary><br> | ||
| ``` | ||
| numpy==2.2.6 | ||
| lapx @ git+https://github.com/rathaROG/lapx.git@8e1a5c5cbe1a813d5ee80570b285e316fcc99f7a # 0.9.1 | ||
| ``` | ||
| ``` | ||
| Microsoft Windows [Version 10.0.26200.7019] | ||
@@ -43,49 +48,49 @@ (c) Microsoft Corporation. All rights reserved. | ||
| CPU lapx-batch-jvx : cost=16.48859572, time=0.67588449s | ||
| CPU lapx-batch-jvs : cost=16.48859572, time=0.46411657s | ||
| CPU lapx-batch-jvxa : cost=16.48859572, time=0.71385884s | ||
| CPU lapx-batch-jvsa : cost=16.48859572, time=0.45670390s | ||
| CPU lapx-batch-jvsa64 : cost=16.48859572, time=0.70847058s | ||
| CPU lapx-loop-jvx : cost=16.48859572, time=3.95986462s | ||
| CPU lapx-loop-jvs : cost=16.48859572, time=2.66866994s | ||
| CPU lapx-batch-jvx : cost=16.48732425, time=0.70944118s | ||
| CPU lapx-batch-jvs : cost=16.48732425, time=0.43542552s | ||
| CPU lapx-batch-jvxa : cost=16.48732425, time=0.69259763s | ||
| CPU lapx-batch-jvsa : cost=16.48732425, time=0.44047427s | ||
| CPU lapx-batch-jvsa64 : cost=16.48732425, time=0.78757620s | ||
| CPU lapx-loop-jvx : cost=16.48732425, time=4.28805971s | ||
| CPU lapx-loop-jvs : cost=16.48732425, time=2.85956860s | ||
| # 20 x (3000x2000) | n_threads = 24 | ||
| CPU lapx-batch-jvx : cost=16.65042067, time=0.18923616s | ||
| CPU lapx-batch-jvs : cost=16.65042067, time=0.17624354s | ||
| CPU lapx-batch-jvxa : cost=16.65042067, time=0.18447852s | ||
| CPU lapx-batch-jvsa : cost=16.65042067, time=0.18925667s | ||
| CPU lapx-batch-jvsa64 : cost=16.65042067, time=0.18949389s | ||
| CPU lapx-loop-jvx : cost=16.65042067, time=0.85662770s | ||
| CPU lapx-loop-jvs : cost=16.65042067, time=1.05569839s | ||
| CPU lapx-batch-jvx : cost=16.69374066, time=0.19042516s | ||
| CPU lapx-batch-jvs : cost=16.69374066, time=0.19088888s | ||
| CPU lapx-batch-jvxa : cost=16.69374066, time=0.17689967s | ||
| CPU lapx-batch-jvsa : cost=16.69374066, time=0.18332553s | ||
| CPU lapx-batch-jvsa64 : cost=16.69374066, time=0.18913651s | ||
| CPU lapx-loop-jvx : cost=16.69374066, time=0.85293603s | ||
| CPU lapx-loop-jvs : cost=16.69374066, time=1.09705830s | ||
| # 50 x (2000x2000) | n_threads = 24 | ||
| CPU lapx-batch-jvx : cost=82.12386385, time=0.56725645s | ||
| CPU lapx-batch-jvs : cost=82.12386385, time=0.37664533s | ||
| CPU lapx-batch-jvxa : cost=82.12386385, time=0.57265162s | ||
| CPU lapx-batch-jvsa : cost=82.12386385, time=0.37772393s | ||
| CPU lapx-batch-jvsa64 : cost=82.12386385, time=0.61493921s | ||
| CPU lapx-loop-jvx : cost=82.12386385, time=4.46092606s | ||
| CPU lapx-loop-jvs : cost=82.12386385, time=3.49988031s | ||
| CPU lapx-batch-jvx : cost=81.88714629, time=0.40948844s | ||
| CPU lapx-batch-jvs : cost=81.88714629, time=0.34669971s | ||
| CPU lapx-batch-jvxa : cost=81.88714629, time=0.39700556s | ||
| CPU lapx-batch-jvsa : cost=81.88714629, time=0.33096647s | ||
| CPU lapx-batch-jvsa64 : cost=81.88714629, time=0.44180655s | ||
| CPU lapx-loop-jvx : cost=81.88714629, time=3.34968209s | ||
| CPU lapx-loop-jvs : cost=81.88714629, time=3.22587180s | ||
| # 100 x (1000x2000) | n_threads = 24 | ||
| CPU lapx-batch-jvx : cost=58.19636934, time=0.18971944s | ||
| CPU lapx-batch-jvs : cost=58.19636934, time=0.16700149s | ||
| CPU lapx-batch-jvxa : cost=58.19636934, time=0.18943620s | ||
| CPU lapx-batch-jvsa : cost=58.19636934, time=0.16706610s | ||
| CPU lapx-batch-jvsa64 : cost=58.19636934, time=0.25204611s | ||
| CPU lapx-loop-jvx : cost=58.19636934, time=1.02838278s | ||
| CPU lapx-loop-jvs : cost=58.19636934, time=1.21967244s | ||
| CPU lapx-batch-jvx : cost=57.81402817, time=0.18936110s | ||
| CPU lapx-batch-jvs : cost=57.81402817, time=0.16963148s | ||
| CPU lapx-batch-jvxa : cost=57.81402817, time=0.18951726s | ||
| CPU lapx-batch-jvsa : cost=57.81402817, time=0.16521573s | ||
| CPU lapx-batch-jvsa64 : cost=57.81402817, time=0.25324345s | ||
| CPU lapx-loop-jvx : cost=57.81402817, time=0.96780610s | ||
| CPU lapx-loop-jvs : cost=57.81402817, time=1.20634890s | ||
| # 500 x (1000x1000) | n_threads = 24 | ||
| CPU lapx-batch-jvx : cost=821.97407482, time=0.59273267s | ||
| CPU lapx-batch-jvs : cost=821.97407482, time=0.58274126s | ||
| CPU lapx-batch-jvxa : cost=821.97407482, time=0.58346224s | ||
| CPU lapx-batch-jvsa : cost=821.97407482, time=0.58098578s | ||
| CPU lapx-batch-jvsa64 : cost=821.97407482, time=0.61520362s | ||
| CPU lapx-loop-jvx : cost=821.97407482, time=6.64442897s | ||
| CPU lapx-loop-jvs : cost=821.97407482, time=7.03527546s | ||
| CPU lapx-batch-jvx : cost=820.77561875, time=0.55759573s | ||
| CPU lapx-batch-jvs : cost=820.77561875, time=0.56053782s | ||
| CPU lapx-batch-jvxa : cost=820.77561875, time=0.55279994s | ||
| CPU lapx-batch-jvsa : cost=820.77561875, time=0.56725907s | ||
| CPU lapx-batch-jvsa64 : cost=820.77561875, time=0.58956695s | ||
| CPU lapx-loop-jvx : cost=820.77561875, time=6.52994561s | ||
| CPU lapx-loop-jvs : cost=820.77561875, time=7.08902001s | ||
| ``` | ||
@@ -97,3 +102,3 @@ | ||
| https://github.com/rathaROG/lapx/actions/runs/18961613065/job/54149890164 | ||
| https://github.com/rathaROG/lapx/actions/runs/18984608559/job/54225293380 | ||
@@ -104,17 +109,17 @@ ``` | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 1.64 x slower | ||
| * lapjv : ✅ Passed 🐌 5.53 x slower | ||
| * lapjvx : ✅ Passed 🐌 2.68 x slower | ||
| * lapjvxa : ✅ Passed 🐌 1.81 x slower | ||
| * lapjvs : ✅ Passed 🐌 3.56 x slower | ||
| * lapjvc : ✅ Passed 🐌 1.53 x slower | ||
| * lapjv : ✅ Passed 🐌 5.17 x slower | ||
| * lapjvx : ✅ Passed 🐌 2.58 x slower | ||
| * lapjvxa : ✅ Passed 🐌 1.69 x slower | ||
| * lapjvs : ✅ Passed 🐌 3.41 x slower | ||
| * lapjvsa : ✅ Passed 🐌 3.36 x slower | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. scipy ⭐ : 0.00001024s | ||
| 2. lapjvc : 0.00001677s | ||
| 3. lapjvxa : 0.00001853s | ||
| 4. lapjvx : 0.00002740s | ||
| 5. lapjvsa : 0.00003439s | ||
| 6. lapjvs : 0.00003648s | ||
| 7. lapjv : 0.00005660s | ||
| 1. scipy ⭐ : 0.00001055s | ||
| 2. lapjvc : 0.00001615s | ||
| 3. lapjvxa : 0.00001785s | ||
| 4. lapjvx : 0.00002719s | ||
| 5. lapjvsa : 0.00003547s | ||
| 6. lapjvs : 0.00003601s | ||
| 7. lapjv : 0.00005454s | ||
| ------------------------------- | ||
@@ -125,17 +130,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 2.03 x slower | ||
| * lapjv : ✅ Passed 🐌 5.72 x slower | ||
| * lapjvx : ✅ Passed 🐌 2.28 x slower | ||
| * lapjvxa : ✅ Passed 🐌 1.75 x slower | ||
| * lapjvs : ✅ Passed 🐌 2.7 x slower | ||
| * lapjvsa : ✅ Passed 🐌 1.04 x slower | ||
| * lapjvc : ✅ Passed 🐌 1.65 x slower | ||
| * lapjv : ✅ Passed 🐌 4.75 x slower | ||
| * lapjvx : ✅ Passed 🐌 2.0 x slower | ||
| * lapjvxa : ✅ Passed 🐌 1.98 x slower | ||
| * lapjvs : ✅ Passed 🐌 2.43 x slower | ||
| * lapjvsa : ✅ Passed 🏆 1.11 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. scipy ⭐ : 0.00000664s | ||
| 2. lapjvsa : 0.00000690s | ||
| 3. lapjvxa : 0.00001165s | ||
| 4. lapjvc : 0.00001346s | ||
| 5. lapjvx : 0.00001517s | ||
| 6. lapjvs : 0.00001796s | ||
| 7. lapjv : 0.00003800s | ||
| 1. lapjvsa : 0.00000672s | ||
| 2. scipy ⭐ : 0.00000748s | ||
| 3. lapjvc : 0.00001232s | ||
| 4. lapjvxa : 0.00001484s | ||
| 5. lapjvx : 0.00001497s | ||
| 6. lapjvs : 0.00001819s | ||
| 7. lapjv : 0.00003557s | ||
| ------------------------------- | ||
@@ -146,17 +151,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 2.3 x slower | ||
| * lapjv : ✅ Passed 🐌 9.53 x slower | ||
| * lapjvx : ✅ Passed 🐌 3.62 x slower | ||
| * lapjvxa : ✅ Passed 🐌 3.04 x slower | ||
| * lapjvs : ✅ Passed 🐌 4.63 x slower | ||
| * lapjvsa : ✅ Passed 🐌 5.32 x slower | ||
| * lapjvc : ✅ Passed 🐌 2.16 x slower | ||
| * lapjv : ✅ Passed 🐌 9.32 x slower | ||
| * lapjvx : ✅ Passed 🐌 4.22 x slower | ||
| * lapjvxa : ✅ Passed 🐌 2.93 x slower | ||
| * lapjvs : ✅ Passed 🐌 4.38 x slower | ||
| * lapjvsa : ✅ Passed 🐌 4.89 x slower | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. scipy ⭐ : 0.00000537s | ||
| 2. lapjvc : 0.00001233s | ||
| 3. lapjvxa : 0.00001631s | ||
| 4. lapjvx : 0.00001947s | ||
| 5. lapjvs : 0.00002489s | ||
| 6. lapjvsa : 0.00002857s | ||
| 7. lapjv : 0.00005116s | ||
| 1. scipy ⭐ : 0.00000558s | ||
| 2. lapjvc : 0.00001205s | ||
| 3. lapjvxa : 0.00001633s | ||
| 4. lapjvx : 0.00002352s | ||
| 5. lapjvs : 0.00002447s | ||
| 6. lapjvsa : 0.00002730s | ||
| 7. lapjv : 0.00005201s | ||
| ------------------------------- | ||
@@ -167,17 +172,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 1.94 x slower | ||
| * lapjv : ✅ Passed 🐌 1.34 x slower | ||
| * lapjvx : ✅ Passed 🏆 1.15 x faster | ||
| * lapjvxa : ✅ Passed 🏆 1.41 x faster | ||
| * lapjvs : ✅ Passed 🐌 1.98 x slower | ||
| * lapjvsa : ✅ Passed 🏆 1.13 x faster | ||
| * lapjvc : ✅ Passed 🐌 1.41 x slower | ||
| * lapjv : ✅ Passed 🐌 1.04 x slower | ||
| * lapjvx : ✅ Passed 🏆 1.42 x faster | ||
| * lapjvxa : ✅ Passed 🏆 1.77 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.37 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.5 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvxa : 0.00003351s | ||
| 2. lapjvx : 0.00004110s | ||
| 3. lapjvsa : 0.00004164s | ||
| 4. scipy ⭐ : 0.00004721s | ||
| 5. lapjv : 0.00006310s | ||
| 6. lapjvc : 0.00009150s | ||
| 7. lapjvs : 0.00009357s | ||
| 1. lapjvxa : 0.00003649s | ||
| 2. lapjvsa : 0.00004314s | ||
| 3. lapjvx : 0.00004534s | ||
| 4. lapjvs : 0.00004701s | ||
| 5. scipy ⭐ : 0.00006450s | ||
| 6. lapjv : 0.00006738s | ||
| 7. lapjvc : 0.00009109s | ||
| ------------------------------- | ||
@@ -188,17 +193,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🏆 1.43 x faster | ||
| * lapjv : ✅ Passed 🏆 1.44 x faster | ||
| * lapjvx : ✅ Passed 🏆 2.25 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.94 x faster | ||
| * lapjvs : ✅ Passed 🏆 2.27 x faster | ||
| * lapjvsa : ✅ Passed 🏆 3.99 x faster | ||
| * lapjvc : ✅ Passed 🏆 1.21 x faster | ||
| * lapjv : ✅ Passed 🏆 1.03 x faster | ||
| * lapjvx : ✅ Passed 🏆 1.54 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.0 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.46 x faster | ||
| * lapjvsa : ✅ Passed 🏆 2.49 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvsa : 0.00002166s | ||
| 2. lapjvxa : 0.00002932s | ||
| 3. lapjvs : 0.00003803s | ||
| 4. lapjvx : 0.00003831s | ||
| 5. lapjv : 0.00005988s | ||
| 6. lapjvc : 0.00006028s | ||
| 7. scipy ⭐ : 0.00008634s | ||
| 1. lapjvsa : 0.00002653s | ||
| 2. lapjvxa : 0.00003293s | ||
| 3. lapjvx : 0.00004293s | ||
| 4. lapjvs : 0.00004526s | ||
| 5. lapjvc : 0.00005453s | ||
| 6. lapjv : 0.00006407s | ||
| 7. scipy ⭐ : 0.00006601s | ||
| ------------------------------- | ||
@@ -209,17 +214,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 1.22 x slower | ||
| * lapjv : ✅ Passed 🏆 1.07 x faster | ||
| * lapjvx : ✅ Passed 🏆 1.54 x faster | ||
| * lapjvxa : ✅ Passed 🏆 1.88 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.63 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.47 x faster | ||
| * lapjvc : ✅ Passed 🐌 1.45 x slower | ||
| * lapjv : ✅ Passed 🏆 1.11 x faster | ||
| * lapjvx : ✅ Passed 🏆 1.79 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.13 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.73 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.41 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvxa : 0.00004026s | ||
| 2. lapjvs : 0.00004654s | ||
| 3. lapjvx : 0.00004924s | ||
| 4. lapjvsa : 0.00005152s | ||
| 5. lapjv : 0.00007051s | ||
| 6. scipy ⭐ : 0.00007566s | ||
| 7. lapjvc : 0.00009201s | ||
| 1. lapjvxa : 0.00003505s | ||
| 2. lapjvx : 0.00004173s | ||
| 3. lapjvs : 0.00004336s | ||
| 4. lapjvsa : 0.00005296s | ||
| 5. lapjv : 0.00006746s | ||
| 6. scipy ⭐ : 0.00007480s | ||
| 7. lapjvc : 0.00010855s | ||
| ------------------------------- | ||
@@ -230,17 +235,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 4.97 x slower | ||
| * lapjv : ✅ Passed 🏆 2.02 x faster | ||
| * lapjvx : ✅ Passed 🏆 2.34 x faster | ||
| * lapjvxa : ✅ Passed 🏆 3.09 x faster | ||
| * lapjvs : ✅ Passed 🏆 3.95 x faster | ||
| * lapjvsa : ✅ Passed 🏆 3.89 x faster | ||
| * lapjvc : ✅ Passed 🐌 3.8 x slower | ||
| * lapjv : ✅ Passed 🏆 2.17 x faster | ||
| * lapjvx : ✅ Passed 🏆 3.0 x faster | ||
| * lapjvxa : ✅ Passed 🏆 4.33 x faster | ||
| * lapjvs : ✅ Passed 🏆 4.67 x faster | ||
| * lapjvsa : ✅ Passed 🏆 4.63 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvs : 0.00116294s | ||
| 2. lapjvsa : 0.00117967s | ||
| 3. lapjvxa : 0.00148459s | ||
| 4. lapjvx : 0.00196006s | ||
| 5. lapjv : 0.00227485s | ||
| 6. scipy ⭐ : 0.00458916s | ||
| 7. lapjvc : 0.02279758s | ||
| 1. lapjvs : 0.00102049s | ||
| 2. lapjvsa : 0.00102816s | ||
| 3. lapjvxa : 0.00110050s | ||
| 4. lapjvx : 0.00158621s | ||
| 5. lapjv : 0.00219654s | ||
| 6. scipy ⭐ : 0.00476269s | ||
| 7. lapjvc : 0.01809850s | ||
| ------------------------------- | ||
@@ -251,17 +256,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🏆 1.46 x faster | ||
| * lapjv : ✅ Passed 🏆 1.19 x faster | ||
| * lapjvx : ✅ Passed 🏆 1.19 x faster | ||
| * lapjvxa : ✅ Passed 🏆 1.21 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.58 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.6 x faster | ||
| * lapjvc : ✅ Passed 🏆 1.24 x faster | ||
| * lapjv : ✅ Passed 🏆 1.07 x faster | ||
| * lapjvx : ✅ Passed 🏆 1.4 x faster | ||
| * lapjvxa : ✅ Passed 🏆 1.41 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.43 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.44 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvsa : 0.00610949s | ||
| 2. lapjvs : 0.00617076s | ||
| 3. lapjvc : 0.00669765s | ||
| 4. lapjvxa : 0.00807378s | ||
| 5. lapjvx : 0.00817340s | ||
| 6. lapjv : 0.00822582s | ||
| 7. scipy ⭐ : 0.00976096s | ||
| 1. lapjvsa : 0.00626876s | ||
| 2. lapjvs : 0.00630340s | ||
| 3. lapjvxa : 0.00640438s | ||
| 4. lapjvx : 0.00642594s | ||
| 5. lapjvc : 0.00729600s | ||
| 6. lapjv : 0.00845927s | ||
| 7. scipy ⭐ : 0.00901426s | ||
| ------------------------------- | ||
@@ -272,17 +277,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 4.66 x slower | ||
| * lapjv : ✅ Passed 🏆 2.07 x faster | ||
| * lapjvx : ✅ Passed 🏆 3.44 x faster | ||
| * lapjvxa : ✅ Passed 🏆 3.42 x faster | ||
| * lapjvs : ✅ Passed 🏆 4.27 x faster | ||
| * lapjvsa : ✅ Passed 🏆 4.37 x faster | ||
| * lapjvc : ✅ Passed 🐌 4.58 x slower | ||
| * lapjv : ✅ Passed 🏆 2.04 x faster | ||
| * lapjvx : ✅ Passed 🏆 4.01 x faster | ||
| * lapjvxa : ✅ Passed 🏆 4.14 x faster | ||
| * lapjvs : ✅ Passed 🏆 4.42 x faster | ||
| * lapjvsa : ✅ Passed 🏆 4.49 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvsa : 0.00128856s | ||
| 2. lapjvs : 0.00131904s | ||
| 3. lapjvx : 0.00163978s | ||
| 4. lapjvxa : 0.00164817s | ||
| 5. lapjv : 0.00272247s | ||
| 6. scipy ⭐ : 0.00563737s | ||
| 7. lapjvc : 0.02625532s | ||
| 1. lapjvsa : 0.00114634s | ||
| 2. lapjvs : 0.00116270s | ||
| 3. lapjvxa : 0.00124253s | ||
| 4. lapjvx : 0.00128147s | ||
| 5. lapjv : 0.00252214s | ||
| 6. scipy ⭐ : 0.00514199s | ||
| 7. lapjvc : 0.02353958s | ||
| ------------------------------- | ||
@@ -293,17 +298,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 257.47 x slower | ||
| * lapjvc : ✅ Passed 🐌 228.4 x slower | ||
| * lapjv : ✅ Passed 🏆 1.09 x faster | ||
| * lapjvx : ✅ Passed 🏆 1.09 x faster | ||
| * lapjvxa : ✅ Passed 🏆 1.08 x faster | ||
| * lapjvs : ✅ Passed 🐌 1.11 x slower | ||
| * lapjvsa : ✅ Passed 🐌 1.1 x slower | ||
| * lapjvx : ✅ Passed 🏆 1.24 x faster | ||
| * lapjvxa : ✅ Passed 🏆 1.25 x faster | ||
| * lapjvs : ✅ Passed 🐌 1.1 x slower | ||
| * lapjvsa : ✅ Passed 🐌 1.12 x slower | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvx : 0.09400308s | ||
| 2. lapjv : 0.09424586s | ||
| 3. lapjvxa : 0.09509772s | ||
| 4. scipy ⭐ : 0.10258777s | ||
| 5. lapjvsa : 0.11241747s | ||
| 6. lapjvs : 0.11372150s | ||
| 7. lapjvc : 26.41295845s | ||
| 1. lapjvxa : 0.08090518s | ||
| 2. lapjvx : 0.08157910s | ||
| 3. lapjv : 0.09252072s | ||
| 4. scipy ⭐ : 0.10097560s | ||
| 5. lapjvs : 0.11067445s | ||
| 6. lapjvsa : 0.11269509s | ||
| 7. lapjvc : 23.06289135s | ||
| ------------------------------- | ||
@@ -314,17 +319,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 1.02 x slower | ||
| * lapjv : ✅ Passed 🐌 1.62 x slower | ||
| * lapjvx : ✅ Passed 🐌 1.62 x slower | ||
| * lapjvxa : ✅ Passed 🐌 1.62 x slower | ||
| * lapjvs : ✅ Passed 🏆 1.76 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.76 x faster | ||
| * lapjvc : ✅ Passed 🏆 1.33 x faster | ||
| * lapjv : ✅ Passed 🐌 1.02 x slower | ||
| * lapjvx : ✅ Passed 🏆 1.42 x faster | ||
| * lapjvxa : ✅ Passed 🏆 1.42 x faster | ||
| * lapjvs : ✅ Passed 🏆 2.3 x faster | ||
| * lapjvsa : ✅ Passed 🏆 2.3 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvs : 1.34793133s | ||
| 2. lapjvsa : 1.34966543s | ||
| 3. scipy ⭐ : 2.37237136s | ||
| 4. lapjvc : 2.41397720s | ||
| 5. lapjvxa : 3.84284193s | ||
| 6. lapjvx : 3.84922083s | ||
| 7. lapjv : 3.85101395s | ||
| 1. lapjvsa : 0.97763377s | ||
| 2. lapjvs : 0.97772870s | ||
| 3. lapjvx : 1.58527767s | ||
| 4. lapjvxa : 1.58615075s | ||
| 5. lapjvc : 1.69588961s | ||
| 6. scipy ⭐ : 2.24908994s | ||
| 7. lapjv : 2.28853597s | ||
| ------------------------------- | ||
@@ -335,17 +340,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 273.6 x slower | ||
| * lapjv : ✅ Passed 🏆 2.03 x faster | ||
| * lapjvx : ✅ Passed 🏆 2.04 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.05 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.59 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.63 x faster | ||
| * lapjvc : ✅ Passed 🐌 217.35 x slower | ||
| * lapjv : ✅ Passed 🏆 2.07 x faster | ||
| * lapjvx : ✅ Passed 🏆 2.4 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.42 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.65 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.64 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvxa : 0.19721307s | ||
| 2. lapjvx : 0.19786040s | ||
| 3. lapjv : 0.19958704s | ||
| 4. lapjvsa : 0.24835583s | ||
| 5. lapjvs : 0.25347052s | ||
| 6. scipy ⭐ : 0.40418303s | ||
| 7. lapjvc : 110.58635478s | ||
| 1. lapjvxa : 0.16547692s | ||
| 2. lapjvx : 0.16660061s | ||
| 3. lapjv : 0.19367327s | ||
| 4. lapjvs : 0.24263112s | ||
| 5. lapjvsa : 0.24431215s | ||
| 6. scipy ⭐ : 0.40064618s | ||
| 7. lapjvc : 87.08241680s | ||
| ------------------------------- | ||
@@ -358,3 +363,3 @@ ``` | ||
| https://github.com/rathaROG/lapx/actions/runs/18961613065/job/54149890234 | ||
| https://github.com/rathaROG/lapx/actions/runs/18984608559/job/54225293427 | ||
@@ -365,17 +370,17 @@ ``` | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 1.71 x slower | ||
| * lapjv : ✅ Passed 🐌 5.14 x slower | ||
| * lapjvx : ✅ Passed 🐌 2.32 x slower | ||
| * lapjvxa : ✅ Passed 🐌 1.57 x slower | ||
| * lapjvs : ✅ Passed 🐌 3.57 x slower | ||
| * lapjvsa : ✅ Passed 🐌 3.25 x slower | ||
| * lapjvc : ✅ Passed 🐌 2.43 x slower | ||
| * lapjv : ✅ Passed 🐌 5.19 x slower | ||
| * lapjvx : ✅ Passed 🐌 2.7 x slower | ||
| * lapjvxa : ✅ Passed 🐌 1.48 x slower | ||
| * lapjvs : ✅ Passed 🐌 3.41 x slower | ||
| * lapjvsa : ✅ Passed 🐌 3.07 x slower | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. scipy ⭐ : 0.00000567s | ||
| 2. lapjvxa : 0.00000888s | ||
| 3. lapjvc : 0.00000971s | ||
| 4. lapjvx : 0.00001317s | ||
| 5. lapjvsa : 0.00001842s | ||
| 6. lapjvs : 0.00002025s | ||
| 7. lapjv : 0.00002913s | ||
| 1. scipy ⭐ : 0.00000625s | ||
| 2. lapjvxa : 0.00000925s | ||
| 3. lapjvc : 0.00001521s | ||
| 4. lapjvx : 0.00001688s | ||
| 5. lapjvsa : 0.00001917s | ||
| 6. lapjvs : 0.00002129s | ||
| 7. lapjv : 0.00003242s | ||
| ------------------------------- | ||
@@ -386,17 +391,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 1.83 x slower | ||
| * lapjv : ✅ Passed 🐌 4.14 x slower | ||
| * lapjvx : ✅ Passed 🐌 1.83 x slower | ||
| * lapjvxa : ✅ Passed 🐌 1.59 x slower | ||
| * lapjvs : ✅ Passed 🐌 2.01 x slower | ||
| * lapjvsa : ✅ Passed 🏆 1.32 x faster | ||
| * lapjvc : ✅ Passed 🐌 3.64 x slower | ||
| * lapjv : ✅ Passed 🐌 4.33 x slower | ||
| * lapjvx : ✅ Passed 🐌 2.05 x slower | ||
| * lapjvxa : ✅ Passed 🐌 1.41 x slower | ||
| * lapjvs : ✅ Passed 🐌 2.8 x slower | ||
| * lapjvsa : ✅ Passed 🏆 1.17 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvsa : 0.00000283s | ||
| 2. scipy ⭐ : 0.00000375s | ||
| 3. lapjvxa : 0.00000596s | ||
| 4. lapjvc : 0.00000687s | ||
| 5. lapjvx : 0.00000687s | ||
| 6. lapjvs : 0.00000754s | ||
| 7. lapjv : 0.00001554s | ||
| 1. lapjvsa : 0.00000346s | ||
| 2. scipy ⭐ : 0.00000404s | ||
| 3. lapjvxa : 0.00000571s | ||
| 4. lapjvx : 0.00000829s | ||
| 5. lapjvs : 0.00001133s | ||
| 6. lapjvc : 0.00001471s | ||
| 7. lapjv : 0.00001750s | ||
| ------------------------------- | ||
@@ -407,17 +412,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 2.66 x slower | ||
| * lapjv : ✅ Passed 🐌 5.96 x slower | ||
| * lapjvx : ✅ Passed 🐌 3.03 x slower | ||
| * lapjvxa : ✅ Passed 🐌 2.6 x slower | ||
| * lapjvs : ✅ Passed 🐌 3.77 x slower | ||
| * lapjvsa : ✅ Passed 🐌 4.37 x slower | ||
| * lapjvc : ✅ Passed 🐌 2.76 x slower | ||
| * lapjv : ✅ Passed 🐌 5.61 x slower | ||
| * lapjvx : ✅ Passed 🐌 2.84 x slower | ||
| * lapjvxa : ✅ Passed 🐌 2.87 x slower | ||
| * lapjvs : ✅ Passed 🐌 3.57 x slower | ||
| * lapjvsa : ✅ Passed 🐌 4.02 x slower | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. scipy ⭐ : 0.00000292s | ||
| 2. lapjvxa : 0.00000758s | ||
| 3. lapjvc : 0.00000775s | ||
| 4. lapjvx : 0.00000883s | ||
| 5. lapjvs : 0.00001100s | ||
| 6. lapjvsa : 0.00001275s | ||
| 7. lapjv : 0.00001738s | ||
| 1. scipy ⭐ : 0.00000333s | ||
| 2. lapjvc : 0.00000921s | ||
| 3. lapjvx : 0.00000946s | ||
| 4. lapjvxa : 0.00000958s | ||
| 5. lapjvs : 0.00001192s | ||
| 6. lapjvsa : 0.00001342s | ||
| 7. lapjv : 0.00001871s | ||
| ------------------------------- | ||
@@ -428,17 +433,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 2.19 x slower | ||
| * lapjv : ✅ Passed 🏆 1.34 x faster | ||
| * lapjvx : ✅ Passed 🏆 1.83 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.24 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.74 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.22 x faster | ||
| * lapjvc : ✅ Passed 🐌 1.9 x slower | ||
| * lapjv : ✅ Passed 🏆 1.31 x faster | ||
| * lapjvx : ✅ Passed 🏆 1.86 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.32 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.63 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.91 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvxa : 0.00001796s | ||
| 2. lapjvx : 0.00002200s | ||
| 3. lapjvs : 0.00002308s | ||
| 4. lapjv : 0.00002992s | ||
| 5. lapjvsa : 0.00003283s | ||
| 6. scipy ⭐ : 0.00004017s | ||
| 7. lapjvc : 0.00008783s | ||
| 1. lapjvxa : 0.00002146s | ||
| 2. lapjvsa : 0.00002608s | ||
| 3. lapjvx : 0.00002679s | ||
| 4. lapjvs : 0.00003046s | ||
| 5. lapjv : 0.00003796s | ||
| 6. scipy ⭐ : 0.00004979s | ||
| 7. lapjvc : 0.00009475s | ||
| ------------------------------- | ||
@@ -449,17 +454,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🏆 1.16 x faster | ||
| * lapjv : ✅ Passed 🏆 2.13 x faster | ||
| * lapjvx : ✅ Passed 🏆 2.69 x faster | ||
| * lapjvxa : ✅ Passed 🏆 3.29 x faster | ||
| * lapjvs : ✅ Passed 🏆 2.35 x faster | ||
| * lapjvsa : ✅ Passed 🏆 3.41 x faster | ||
| * lapjvc : ✅ Passed 🐌 1.61 x slower | ||
| * lapjv : ✅ Passed 🏆 1.41 x faster | ||
| * lapjvx : ✅ Passed 🏆 2.05 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.66 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.69 x faster | ||
| * lapjvsa : ✅ Passed 🏆 2.87 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvsa : 0.00002146s | ||
| 2. lapjvxa : 0.00002221s | ||
| 3. lapjvx : 0.00002721s | ||
| 4. lapjvs : 0.00003108s | ||
| 5. lapjv : 0.00003437s | ||
| 6. lapjvc : 0.00006300s | ||
| 7. scipy ⭐ : 0.00007317s | ||
| 1. lapjvsa : 0.00001892s | ||
| 2. lapjvxa : 0.00002046s | ||
| 3. lapjvx : 0.00002650s | ||
| 4. lapjvs : 0.00003208s | ||
| 5. lapjv : 0.00003850s | ||
| 6. scipy ⭐ : 0.00005437s | ||
| 7. lapjvc : 0.00008771s | ||
| ------------------------------- | ||
@@ -470,17 +475,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 2.31 x slower | ||
| * lapjv : ✅ Passed 🏆 1.15 x faster | ||
| * lapjvx : ✅ Passed 🏆 1.54 x faster | ||
| * lapjvxa : ✅ Passed 🏆 1.89 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.47 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.53 x faster | ||
| * lapjvc : ✅ Passed 🐌 2.02 x slower | ||
| * lapjv : ✅ Passed 🏆 1.27 x faster | ||
| * lapjvx : ✅ Passed 🏆 1.86 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.27 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.63 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.83 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvxa : 0.00001904s | ||
| 2. lapjvx : 0.00002342s | ||
| 3. lapjvsa : 0.00002350s | ||
| 4. lapjvs : 0.00002458s | ||
| 5. lapjv : 0.00003133s | ||
| 6. scipy ⭐ : 0.00003604s | ||
| 7. lapjvc : 0.00008329s | ||
| 1. lapjvxa : 0.00002217s | ||
| 2. lapjvx : 0.00002708s | ||
| 3. lapjvsa : 0.00002746s | ||
| 4. lapjvs : 0.00003079s | ||
| 5. lapjv : 0.00003954s | ||
| 6. scipy ⭐ : 0.00005025s | ||
| 7. lapjvc : 0.00010162s | ||
| ------------------------------- | ||
@@ -491,17 +496,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 6.57 x slower | ||
| * lapjv : ✅ Passed 🏆 3.31 x faster | ||
| * lapjvx : ✅ Passed 🏆 3.69 x faster | ||
| * lapjvxa : ✅ Passed 🏆 3.81 x faster | ||
| * lapjvs : ✅ Passed 🏆 2.86 x faster | ||
| * lapjvsa : ✅ Passed 🏆 3.05 x faster | ||
| * lapjvc : ✅ Passed 🐌 7.42 x slower | ||
| * lapjv : ✅ Passed 🏆 2.99 x faster | ||
| * lapjvx : ✅ Passed 🏆 3.78 x faster | ||
| * lapjvxa : ✅ Passed 🏆 3.7 x faster | ||
| * lapjvs : ✅ Passed 🏆 2.95 x faster | ||
| * lapjvsa : ✅ Passed 🏆 3.03 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvxa : 0.00073746s | ||
| 2. lapjvx : 0.00076150s | ||
| 3. lapjv : 0.00085017s | ||
| 4. lapjvsa : 0.00092200s | ||
| 5. lapjvs : 0.00098329s | ||
| 6. scipy ⭐ : 0.00281312s | ||
| 7. lapjvc : 0.01849563s | ||
| 1. lapjvx : 0.00077642s | ||
| 2. lapjvxa : 0.00079467s | ||
| 3. lapjvsa : 0.00096896s | ||
| 4. lapjv : 0.00098104s | ||
| 5. lapjvs : 0.00099629s | ||
| 6. scipy ⭐ : 0.00293721s | ||
| 7. lapjvc : 0.02179187s | ||
| ------------------------------- | ||
@@ -512,17 +517,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 1.37 x slower | ||
| * lapjv : ✅ Passed 🏆 1.09 x faster | ||
| * lapjvx : ✅ Passed 🏆 1.11 x faster | ||
| * lapjvxa : ✅ Passed 🏆 1.18 x faster | ||
| * lapjvs : ✅ Passed 🏆 1.06 x faster | ||
| * lapjvsa : ✅ Passed 🏆 1.04 x faster | ||
| * lapjvc : ✅ Passed 🐌 1.22 x slower | ||
| * lapjv : ✅ Passed 🏆 2.3 x faster | ||
| * lapjvx : ✅ Passed 🏆 2.42 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.39 x faster | ||
| * lapjvs : ✅ Passed 🏆 2.04 x faster | ||
| * lapjvsa : ✅ Passed 🏆 2.17 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvxa : 0.00629000s | ||
| 2. lapjvx : 0.00669658s | ||
| 3. lapjv : 0.00680446s | ||
| 4. lapjvs : 0.00702367s | ||
| 5. lapjvsa : 0.00716958s | ||
| 6. scipy ⭐ : 0.00744075s | ||
| 7. lapjvc : 0.01022246s | ||
| 1. lapjvx : 0.00280804s | ||
| 2. lapjvxa : 0.00284217s | ||
| 3. lapjv : 0.00295687s | ||
| 4. lapjvsa : 0.00313346s | ||
| 5. lapjvs : 0.00333604s | ||
| 6. scipy ⭐ : 0.00679725s | ||
| 7. lapjvc : 0.00829000s | ||
| ------------------------------- | ||
@@ -533,17 +538,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 5.87 x slower | ||
| * lapjv : ✅ Passed 🏆 2.05 x faster | ||
| * lapjvx : ✅ Passed 🏆 3.86 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.83 x faster | ||
| * lapjvs : ✅ Passed 🏆 3.29 x faster | ||
| * lapjvsa : ✅ Passed 🏆 3.35 x faster | ||
| * lapjvc : ✅ Passed 🐌 6.83 x slower | ||
| * lapjv : ✅ Passed 🏆 2.53 x faster | ||
| * lapjvx : ✅ Passed 🏆 3.11 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.9 x faster | ||
| * lapjvs : ✅ Passed 🏆 2.57 x faster | ||
| * lapjvsa : ✅ Passed 🏆 2.52 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvx : 0.00102792s | ||
| 2. lapjvsa : 0.00118358s | ||
| 3. lapjvs : 0.00120550s | ||
| 4. lapjvxa : 0.00140042s | ||
| 5. lapjv : 0.00192958s | ||
| 6. scipy ⭐ : 0.00396425s | ||
| 7. lapjvc : 0.02326779s | ||
| 1. lapjvx : 0.00112583s | ||
| 2. lapjvxa : 0.00120517s | ||
| 3. lapjvs : 0.00136033s | ||
| 4. lapjv : 0.00138454s | ||
| 5. lapjvsa : 0.00138996s | ||
| 6. scipy ⭐ : 0.00349771s | ||
| 7. lapjvc : 0.02389029s | ||
| ------------------------------- | ||
@@ -554,17 +559,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 349.67 x slower | ||
| * lapjv : ✅ Passed 🐌 3.52 x slower | ||
| * lapjvx : ✅ Passed 🐌 1.43 x slower | ||
| * lapjvxa : ✅ Passed 🐌 1.45 x slower | ||
| * lapjvs : ✅ Passed 🐌 3.18 x slower | ||
| * lapjvsa : ✅ Passed 🐌 2.84 x slower | ||
| * lapjvc : ✅ Passed 🐌 303.21 x slower | ||
| * lapjv : ✅ Passed 🐌 4.3 x slower | ||
| * lapjvx : ✅ Passed 🐌 1.65 x slower | ||
| * lapjvxa : ✅ Passed 🐌 1.37 x slower | ||
| * lapjvs : ✅ Passed 🐌 2.67 x slower | ||
| * lapjvsa : ✅ Passed 🐌 3.06 x slower | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. scipy ⭐ : 0.07873675s | ||
| 2. lapjvx : 0.11293429s | ||
| 3. lapjvxa : 0.11401713s | ||
| 4. lapjvsa : 0.22370100s | ||
| 5. lapjvs : 0.25039458s | ||
| 6. lapjv : 0.27737229s | ||
| 7. lapjvc : 27.53160242s | ||
| 1. scipy ⭐ : 0.08946058s | ||
| 2. lapjvxa : 0.12264183s | ||
| 3. lapjvx : 0.14727325s | ||
| 4. lapjvs : 0.23845862s | ||
| 5. lapjvsa : 0.27356104s | ||
| 6. lapjv : 0.38505029s | ||
| 7. lapjvc : 27.12492925s | ||
| ------------------------------- | ||
@@ -575,17 +580,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 1.35 x slower | ||
| * lapjv : ✅ Passed 🏆 2.36 x faster | ||
| * lapjvx : ✅ Passed 🏆 2.15 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.23 x faster | ||
| * lapjvs : ✅ Passed 🏆 2.52 x faster | ||
| * lapjvsa : ✅ Passed 🏆 2.89 x faster | ||
| * lapjvc : ✅ Passed 🐌 1.61 x slower | ||
| * lapjv : ✅ Passed 🏆 1.6 x faster | ||
| * lapjvx : ✅ Passed 🏆 1.86 x faster | ||
| * lapjvxa : ✅ Passed 🏆 1.89 x faster | ||
| * lapjvs : ✅ Passed 🏆 2.06 x faster | ||
| * lapjvsa : ✅ Passed 🏆 2.6 x faster | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvsa : 0.90473763s | ||
| 2. lapjvs : 1.03581571s | ||
| 3. lapjv : 1.10758192s | ||
| 4. lapjvxa : 1.17104621s | ||
| 5. lapjvx : 1.21566396s | ||
| 6. scipy ⭐ : 2.61083117s | ||
| 7. lapjvc : 3.53453567s | ||
| 1. lapjvsa : 0.94210742s | ||
| 2. lapjvs : 1.19320912s | ||
| 3. lapjvxa : 1.29491192s | ||
| 4. lapjvx : 1.32094417s | ||
| 5. lapjv : 1.53106525s | ||
| 6. scipy ⭐ : 2.45269650s | ||
| 7. lapjvc : 3.93988354s | ||
| ------------------------------- | ||
@@ -596,17 +601,17 @@ | ||
| ----------------------------------------- | ||
| * lapjvc : ✅ Passed 🐌 308.42 x slower | ||
| * lapjv : ✅ Passed 🐌 2.1 x slower | ||
| * lapjvx : ✅ Passed 🏆 1.32 x faster | ||
| * lapjvxa : ✅ Passed 🏆 1.87 x faster | ||
| * lapjvs : ✅ Passed 🐌 2.3 x slower | ||
| * lapjvsa : ✅ Passed 🐌 1.98 x slower | ||
| * lapjvc : ✅ Passed 🐌 313.47 x slower | ||
| * lapjv : ✅ Passed 🐌 1.34 x slower | ||
| * lapjvx : ✅ Passed 🏆 1.46 x faster | ||
| * lapjvxa : ✅ Passed 🏆 2.76 x faster | ||
| * lapjvs : ✅ Passed 🐌 1.47 x slower | ||
| * lapjvsa : ✅ Passed 🐌 1.38 x slower | ||
| ----- 🎉 SPEED RANKING 🎉 ----- | ||
| 1. lapjvxa : 0.18422821s | ||
| 2. lapjvx : 0.26109171s | ||
| 3. scipy ⭐ : 0.34502992s | ||
| 4. lapjvsa : 0.68365575s | ||
| 5. lapjv : 0.72371579s | ||
| 6. lapjvs : 0.79274062s | ||
| 7. lapjvc : 106.41484879s | ||
| 1. lapjvxa : 0.13445975s | ||
| 2. lapjvx : 0.25395613s | ||
| 3. scipy ⭐ : 0.37139379s | ||
| 4. lapjv : 0.49666225s | ||
| 5. lapjvsa : 0.51077446s | ||
| 6. lapjvs : 0.54710338s | ||
| 7. lapjvc : 116.42058821s | ||
| ------------------------------- | ||
@@ -647,3 +652,3 @@ ``` | ||
| scipy==1.16.3 | ||
| lapx @ git+https://github.com/rathaROG/lapx.git@ca0bbee8e319fe005c557d5a2bcce1148d89797c | ||
| lapx @ git+https://github.com/rathaROG/lapx.git@8e1a5c5cbe1a813d5ee80570b285e316fcc99f7a # 0.9.1 | ||
| ``` | ||
@@ -664,11 +669,11 @@ | ||
| ----------------------------------------------------------------------------------------------------------------------- | ||
| 10x10 | 0.000063s 4th | 0.000057s ✓ 2nd | 0.000063s ✓ 5th | 0.000069s ✓ 6th | 0.000061s ✓ 3rd | 0.000057s ✓ 1st | ||
| 25x20 | 0.000058s 4th | 0.000104s ✗ 6th | 0.000062s ✓ 5th | 0.000052s ✓ 2nd | 0.000058s ✓ 3rd | 0.000051s ✓ 1st | ||
| 50x50 | 0.000083s 4th | 0.000086s ✗ 5th | 0.000068s ✓ 3rd | 0.000058s ✓ 1st | 0.000103s ✓ 6th | 0.000063s ✓ 2nd | ||
| 100x150 | 0.000131s 2nd | 0.000828s ✗ 6th | 0.000132s ✓ 3rd | 0.000144s ✓ 4th | 0.000680s ✓ 5th | 0.000123s ✓ 1st | ||
| 250x250 | 0.001126s 4th | 0.001218s ✓ 5th | 0.000557s ✓ 2nd | 0.000537s ✓ 1st | 0.001516s ✓ 6th | 0.000605s ✓ 3rd | ||
| 550x500 | 0.003531s 4th | 0.011714s ✓ 5th | 0.001424s ✓ 2nd | 0.001358s ✓ 1st | 0.017545s ✓ 6th | 0.001511s ✓ 3rd | ||
| 1000x1000 | 0.022934s 4th | 0.026359s ✓ 5th | 0.010415s ✓ 2nd | 0.010320s ✓ 1st | 0.031669s ✓ 6th | 0.012068s ✓ 3rd | ||
| 2000x2500 | 0.034198s 4th | 1.627013s ✓ 6th | 0.013647s ✓ 1st | 0.015660s ✓ 2nd | 1.531048s ✓ 5th | 0.022275s ✓ 3rd | ||
| 5000x5000 | 1.095034s 3rd | 2.335637s ✓ 6th | 1.082954s ✓ 2nd | 1.103870s ✓ 4th | 1.140890s ✓ 5th | 0.496765s ✓ 1st | ||
| 10x10 | 0.000056s 3rd | 0.000056s ✗ 4th | 0.000061s ✓ 6th | 0.000050s ✓ 1st | 0.000060s ✓ 5th | 0.000052s ✓ 2nd | ||
| 25x20 | 0.000052s 3rd | 0.000061s ✗ 5th | 0.000061s ✓ 6th | 0.000049s ✓ 1st | 0.000056s ✓ 4th | 0.000051s ✓ 2nd | ||
| 50x50 | 0.000084s 4th | 0.000085s ✗ 5th | 0.000072s ✓ 2nd | 0.000063s ✓ 1st | 0.000105s ✓ 6th | 0.000073s ✓ 3rd | ||
| 100x150 | 0.000148s 4th | 0.000564s ✓ 5th | 0.000135s ✓ 3rd | 0.000110s ✓ 1st | 0.000671s ✓ 6th | 0.000120s ✓ 2nd | ||
| 250x250 | 0.001327s 4th | 0.001399s ✓ 5th | 0.000527s ✓ 2nd | 0.000510s ✓ 1st | 0.001417s ✓ 6th | 0.000579s ✓ 3rd | ||
| 550x500 | 0.003237s 4th | 0.011715s ✓ 5th | 0.001379s ✓ 2nd | 0.001344s ✓ 1st | 0.014237s ✓ 6th | 0.001463s ✓ 3rd | ||
| 1000x1000 | 0.023160s 5th | 0.020795s ✓ 4th | 0.006882s ✓ 2nd | 0.006875s ✓ 1st | 0.027876s ✓ 6th | 0.008634s ✓ 3rd | ||
| 2000x2500 | 0.036176s 4th | 1.683198s ✓ 5th | 0.014039s ✓ 1st | 0.015730s ✓ 2nd | 1.683977s ✓ 6th | 0.023320s ✓ 3rd | ||
| 5000x5000 | 1.116971s 4th | 1.807397s ✓ 6th | 0.811022s ✓ 2nd | 0.844260s ✓ 3rd | 1.125310s ✓ 5th | 0.421959s ✓ 1st | ||
| ----------------------------------------------------------------------------------------------------------------------- | ||
@@ -679,8 +684,8 @@ | ||
| 🎉 --------------------------- OVERALL RANKING --------------------------- 🎉 | ||
| 1. LAPX LAPJVS : 533.5186 ms | ✅ | 🥇x4 🥈x1 🥉x4 | ||
| 2. LAPX LAPJV : 1109.3228 ms | ✅ | 🥇x1 🥈x4 🥉x2 🏳️x2 | ||
| 3. LAPX LAPJVX : 1132.0674 ms | ✅ | 🥇x4 🥈x2 🚩x2 🥴x1 | ||
| 4. BASELINE SciPy : 1157.1577 ms | ⭐ | 🥈x1 🥉x1 🚩x7 | ||
| 5. LAPX LAPJVC : 2723.5708 ms | ✅ | 🥉x2 🏳️x3 🥴x4 | ||
| 6. LAPX LAPJV-IFT : 4003.0145 ms | ⚠️ | 🥈x1 🏳️x4 🥴x4 | ||
| 1. LAPX LAPJVS : 456.2515 ms | ✅ | 🥇x1 🥈x3 🥉x5 | ||
| 2. LAPX LAPJV : 834.1779 ms | ✅ | 🥇x1 🥈x5 🥉x1 🥴x2 | ||
| 3. LAPX LAPJVX : 868.9913 ms | ✅ | 🥇x7 🥈x1 🥉x1 | ||
| 4. BASELINE SciPy : 1181.2127 ms | ⭐ | 🥉x2 🚩x6 🏳️x1 | ||
| 5. LAPX LAPJVC : 2853.7103 ms | ✅ | 🚩x1 🏳️x2 🥴x6 | ||
| 6. LAPX LAPJV-IFT : 3525.2702 ms | ⚠️ | 🚩x2 🏳️x6 🥴x1 | ||
| 🎉 ------------------------------------------------------------------------- 🎉 | ||
@@ -696,11 +701,11 @@ | ||
| ----------------------------------------------------------------------------------------------------------------------- | ||
| 10x10 | 0.000048s 2nd | 0.000055s ✗ 5th | 0.000048s ✓ 3rd | 0.000039s ✓ 1st | 0.000054s ✓ 4th | 0.000055s ✓ 6th | ||
| 25x20 | 0.000048s 3rd | 0.000058s ✗ 6th | 0.000055s ✓ 4th | 0.000047s ✓ 2nd | 0.000055s ✓ 5th | 0.000047s ✓ 1st | ||
| 50x50 | 0.000077s 4th | 0.000080s ✗ 6th | 0.000057s ✓ 3rd | 0.000048s ✓ 1st | 0.000078s ✓ 5th | 0.000051s ✓ 2nd | ||
| 100x150 | 0.000112s 3rd | 0.000635s ✓ 6th | 0.000123s ✓ 4th | 0.000092s ✓ 1st | 0.000588s ✓ 5th | 0.000093s ✓ 2nd | ||
| 250x250 | 0.000991s 4th | 0.001352s ✓ 6th | 0.000536s ✓ 1st | 0.000536s ✓ 2nd | 0.001200s ✓ 5th | 0.000591s ✓ 3rd | ||
| 550x500 | 0.003480s 4th | 0.010844s ✓ 5th | 0.001426s ✓ 2nd | 0.001311s ✓ 1st | 0.016003s ✓ 6th | 0.001447s ✓ 3rd | ||
| 1000x1000 | 0.023240s 4th | 0.026984s ✓ 5th | 0.009923s ✓ 2nd | 0.009682s ✓ 1st | 0.027498s ✓ 6th | 0.011329s ✓ 3rd | ||
| 2000x2500 | 0.034578s 4th | 1.563681s ✓ 5th | 0.014135s ✓ 2nd | 0.014121s ✓ 1st | 1.596397s ✓ 6th | 0.022706s ✓ 3rd | ||
| 5000x5000 | 1.070328s 2nd | 3.315799s ✓ 6th | 1.622128s ✓ 4th | 1.628149s ✓ 5th | 1.100956s ✓ 3rd | 0.537018s ✓ 1st | ||
| 10x10 | 0.000058s 6th | 0.000050s ✗ 5th | 0.000048s ✓ 4th | 0.000039s ✓ 1st | 0.000045s ✓ 3rd | 0.000043s ✓ 2nd | ||
| 25x20 | 0.000045s 1st | 0.000064s ✗ 6th | 0.000056s ✓ 5th | 0.000048s ✓ 3rd | 0.000051s ✓ 4th | 0.000048s ✓ 2nd | ||
| 50x50 | 0.000080s 4th | 0.000091s ✗ 5th | 0.000077s ✓ 3rd | 0.000068s ✓ 1st | 0.000135s ✓ 6th | 0.000076s ✓ 2nd | ||
| 100x150 | 0.000149s 4th | 0.000717s ✓ 6th | 0.000123s ✓ 2nd | 0.000110s ✓ 1st | 0.000659s ✓ 5th | 0.000127s ✓ 3rd | ||
| 250x250 | 0.001166s 5th | 0.001069s ✓ 4th | 0.000287s ✓ 2nd | 0.000278s ✓ 1st | 0.002031s ✓ 6th | 0.000369s ✓ 3rd | ||
| 550x500 | 0.003814s 4th | 0.011264s ✓ 5th | 0.001395s ✓ 2nd | 0.001389s ✓ 1st | 0.016663s ✓ 6th | 0.001569s ✓ 3rd | ||
| 1000x1000 | 0.024046s 4th | 0.040244s ✓ 6th | 0.018605s ✓ 2nd | 0.018562s ✓ 1st | 0.030481s ✓ 5th | 0.020355s ✓ 3rd | ||
| 2000x2500 | 0.035922s 4th | 1.732549s ✓ 6th | 0.016267s ✓ 2nd | 0.014801s ✓ 1st | 1.713590s ✓ 5th | 0.023109s ✓ 3rd | ||
| 5000x5000 | 1.088875s 5th | 1.248801s ✓ 6th | 0.501658s ✓ 3rd | 0.484758s ✓ 2nd | 1.040230s ✓ 4th | 0.378295s ✓ 1st | ||
| ----------------------------------------------------------------------------------------------------------------------- | ||
@@ -711,8 +716,8 @@ | ||
| 🎉 --------------------------- OVERALL RANKING --------------------------- 🎉 | ||
| 1. LAPX LAPJVS : 573.3374 ms | ✅ | 🥇x2 🥈x2 🥉x4 🥴x1 | ||
| 2. BASELINE SciPy : 1132.9018 ms | ⭐ | 🥈x2 🥉x2 🚩x5 | ||
| 3. LAPX LAPJV : 1648.4320 ms | ✅ | 🥇x1 🥈x3 🥉x2 🚩x3 | ||
| 4. LAPX LAPJVX : 1654.0251 ms | ✅ | 🥇x6 🥈x2 🏳️x1 | ||
| 5. LAPX LAPJVC : 2742.8296 ms | ✅ | 🥉x1 🚩x1 🏳️x4 🥴x3 | ||
| 6. LAPX LAPJV-IFT : 4919.4888 ms | ⚠️ | 🏳️x4 🥴x5 | ||
| 1. LAPX LAPJVS : 423.9891 ms | ✅ | 🥇x1 🥈x3 🥉x5 | ||
| 2. LAPX LAPJVX : 520.0521 ms | ✅ | 🥇x7 🥈x1 🥉x1 | ||
| 3. LAPX LAPJV : 538.5159 ms | ✅ | 🥈x5 🥉x2 🚩x1 🏳️x1 | ||
| 4. BASELINE SciPy : 1154.1538 ms | ⭐ | 🥇x1 🚩x5 🏳️x2 🥴x1 | ||
| 5. LAPX LAPJVC : 2803.8866 ms | ✅ | 🥉x1 🚩x2 🏳️x3 🥴x3 | ||
| 6. LAPX LAPJV-IFT : 3034.8488 ms | ⚠️ | 🚩x1 🏳️x3 🥴x5 | ||
| 🎉 ------------------------------------------------------------------------- 🎉 | ||
@@ -728,11 +733,11 @@ | ||
| ----------------------------------------------------------------------------------------------------------------------- | ||
| 10x10 | 0.000051s 6th | 0.000045s ✓ 5th | 0.000045s ✓ 4th | 0.000040s ✓ 2nd | 0.000043s ✓ 3rd | 0.000039s ✓ 1st | ||
| 25x20 | 0.000043s 1st | 0.000055s ✓ 6th | 0.000054s ✓ 4th | 0.000046s ✓ 2nd | 0.000054s ✓ 5th | 0.000046s ✓ 3rd | ||
| 50x50 | 0.000070s 4th | 0.000076s ✓ 5th | 0.000060s ✓ 3rd | 0.000049s ✓ 1st | 0.000089s ✓ 6th | 0.000054s ✓ 2nd | ||
| 100x150 | 0.000113s 4th | 0.000646s ✓ 6th | 0.000103s ✓ 3rd | 0.000095s ✓ 2nd | 0.000616s ✓ 5th | 0.000095s ✓ 1st | ||
| 250x250 | 0.001064s 4th | 0.001522s ✓ 6th | 0.000643s ✓ 2nd | 0.000591s ✓ 1st | 0.001448s ✓ 5th | 0.000673s ✓ 3rd | ||
| 550x500 | 0.003672s 4th | 0.010797s ✓ 5th | 0.001429s ✓ 2nd | 0.001405s ✓ 1st | 0.015196s ✓ 6th | 0.001497s ✓ 3rd | ||
| 1000x1000 | 0.019571s 4th | 0.027457s ✓ 6th | 0.010368s ✓ 1st | 0.011375s ✓ 3rd | 0.024061s ✓ 5th | 0.010495s ✓ 2nd | ||
| 2000x2500 | 0.038530s 4th | 1.654156s ✓ 6th | 0.015500s ✓ 2nd | 0.014464s ✓ 1st | 1.561805s ✓ 5th | 0.022967s ✓ 3rd | ||
| 5000x5000 | 0.969325s 5th | 1.507703s ✓ 6th | 0.668259s ✓ 3rd | 0.656468s ✓ 2nd | 0.954102s ✓ 4th | 0.475278s ✓ 1st | ||
| 10x10 | 0.000056s 6th | 0.000045s ✓ 4th | 0.000046s ✓ 5th | 0.000040s ✓ 1st | 0.000044s ✓ 3rd | 0.000041s ✓ 2nd | ||
| 25x20 | 0.000045s 1st | 0.000063s ✓ 6th | 0.000058s ✓ 5th | 0.000047s ✓ 2nd | 0.000057s ✓ 4th | 0.000049s ✓ 3rd | ||
| 50x50 | 0.000082s 4th | 0.000082s ✓ 5th | 0.000067s ✓ 3rd | 0.000059s ✓ 1st | 0.000102s ✓ 6th | 0.000061s ✓ 2nd | ||
| 100x150 | 0.000145s 3rd | 0.000699s ✓ 6th | 0.000153s ✓ 4th | 0.000108s ✓ 1st | 0.000661s ✓ 5th | 0.000117s ✓ 2nd | ||
| 250x250 | 0.001362s 5th | 0.001290s ✓ 4th | 0.000435s ✓ 1st | 0.000468s ✓ 2nd | 0.001533s ✓ 6th | 0.000469s ✓ 3rd | ||
| 550x500 | 0.003384s 4th | 0.011987s ✓ 5th | 0.001601s ✓ 3rd | 0.001488s ✓ 1st | 0.016294s ✓ 6th | 0.001589s ✓ 2nd | ||
| 1000x1000 | 0.022760s 4th | 0.041152s ✓ 6th | 0.017574s ✓ 1st | 0.018122s ✓ 2nd | 0.027452s ✓ 5th | 0.019619s ✓ 3rd | ||
| 2000x2500 | 0.035389s 4th | 1.731252s ✓ 6th | 0.016473s ✓ 2nd | 0.015285s ✓ 1st | 1.536600s ✓ 5th | 0.023138s ✓ 3rd | ||
| 5000x5000 | 1.090672s 5th | 1.585820s ✓ 6th | 0.698390s ✓ 3rd | 0.681333s ✓ 2nd | 1.086945s ✓ 4th | 0.527087s ✓ 1st | ||
| ----------------------------------------------------------------------------------------------------------------------- | ||
@@ -743,8 +748,8 @@ | ||
| 🎉 --------------------------- OVERALL RANKING --------------------------- 🎉 | ||
| 1. LAPX LAPJVS : 511.1457 ms | ✅ | 🥇x3 🥈x2 🥉x4 | ||
| 2. LAPX LAPJVX : 684.5318 ms | ✅ | 🥇x4 🥈x4 🥉x1 | ||
| 3. LAPX LAPJV : 696.4601 ms | ✅ | 🥇x1 🥈x3 🥉x3 🚩x2 | ||
| 4. BASELINE SciPy : 1032.4388 ms | ⭐ | 🥇x1 🚩x6 🏳️x1 🥴x1 | ||
| 5. LAPX LAPJVC : 2557.4136 ms | ✅ | 🥉x1 🚩x1 🏳️x5 🥴x2 | ||
| 6. LAPX LAPJV-IFT : 3202.4579 ms | ✅ | 🏳️x3 🥴x6 | ||
| 1. LAPX LAPJVS : 572.1697 ms | ✅ | 🥇x1 🥈x4 🥉x4 | ||
| 2. LAPX LAPJVX : 716.9497 ms | ✅ | 🥇x5 🥈x4 | ||
| 3. LAPX LAPJV : 734.7961 ms | ✅ | 🥇x2 🥈x1 🥉x3 🚩x1 🏳️x2 | ||
| 4. BASELINE SciPy : 1153.8943 ms | ⭐ | 🥇x1 🥉x1 🚩x4 🏳️x2 🥴x1 | ||
| 5. LAPX LAPJVC : 2669.6871 ms | ✅ | 🥉x1 🚩x2 🏳️x3 🥴x3 | ||
| 6. LAPX LAPJV-IFT : 3372.3898 ms | ✅ | 🚩x2 🏳️x2 🥴x5 | ||
| 🎉 ------------------------------------------------------------------------- 🎉 | ||
@@ -760,11 +765,11 @@ | ||
| ----------------------------------------------------------------------------------------------------------------------- | ||
| 10x10 | 0.000049s 6th | 0.000046s ✓ 4th | 0.000046s ✓ 5th | 0.000038s ✓ 1st | 0.000044s ✓ 3rd | 0.000041s ✓ 2nd | ||
| 25x20 | 0.000040s 1st | 0.000055s ✓ 6th | 0.000052s ✓ 5th | 0.000043s ✓ 2nd | 0.000051s ✓ 4th | 0.000045s ✓ 3rd | ||
| 50x50 | 0.000067s 4th | 0.000074s ✓ 5th | 0.000058s ✓ 3rd | 0.000053s ✓ 2nd | 0.000081s ✓ 6th | 0.000053s ✓ 1st | ||
| 100x150 | 0.000117s 2nd | 0.000752s ✓ 6th | 0.000123s ✓ 3rd | 0.000126s ✓ 4th | 0.000721s ✓ 5th | 0.000098s ✓ 1st | ||
| 250x250 | 0.001063s 4th | 0.001545s ✓ 6th | 0.000447s ✓ 2nd | 0.000445s ✓ 1st | 0.001303s ✓ 5th | 0.000477s ✓ 3rd | ||
| 550x500 | 0.003711s 4th | 0.011309s ✓ 5th | 0.001524s ✓ 2nd | 0.001460s ✓ 1st | 0.016480s ✓ 6th | 0.001558s ✓ 3rd | ||
| 1000x1000 | 0.019167s 1st | 0.053561s ✓ 6th | 0.025616s ✓ 3rd | 0.025778s ✓ 4th | 0.023447s ✓ 2nd | 0.027353s ✓ 5th | ||
| 2000x2500 | 0.035676s 4th | 1.579856s ✓ 5th | 0.014502s ✓ 2nd | 0.014438s ✓ 1st | 1.699035s ✓ 6th | 0.023144s ✓ 3rd | ||
| 5000x5000 | 1.214213s 5th | 1.230595s ✓ 6th | 0.511229s ✓ 2nd | 0.514490s ✓ 3rd | 1.144982s ✓ 4th | 0.452692s ✓ 1st | ||
| 10x10 | 0.000040s 3rd | 0.000046s ✓ 6th | 0.000045s ✓ 5th | 0.000037s ✓ 1st | 0.000042s ✓ 4th | 0.000040s ✓ 2nd | ||
| 25x20 | 0.000042s 1st | 0.000117s ✓ 6th | 0.000058s ✓ 4th | 0.000062s ✓ 5th | 0.000056s ✓ 3rd | 0.000051s ✓ 2nd | ||
| 50x50 | 0.000080s 4th | 0.000084s ✓ 5th | 0.000062s ✓ 3rd | 0.000055s ✓ 1st | 0.000096s ✓ 6th | 0.000060s ✓ 2nd | ||
| 100x150 | 0.000142s 4th | 0.000650s ✓ 6th | 0.000131s ✓ 3rd | 0.000104s ✓ 1st | 0.000586s ✓ 5th | 0.000106s ✓ 2nd | ||
| 250x250 | 0.001134s 5th | 0.001073s ✓ 4th | 0.000330s ✓ 2nd | 0.000317s ✓ 1st | 0.001307s ✓ 6th | 0.000389s ✓ 3rd | ||
| 550x500 | 0.003360s 4th | 0.010832s ✓ 5th | 0.001444s ✓ 2nd | 0.001410s ✓ 1st | 0.015605s ✓ 6th | 0.001537s ✓ 3rd | ||
| 1000x1000 | 0.020469s 4th | 0.023088s ✓ 5th | 0.008001s ✓ 1st | 0.008134s ✓ 2nd | 0.024824s ✓ 6th | 0.010342s ✓ 3rd | ||
| 2000x2500 | 0.038874s 4th | 1.661149s ✓ 6th | 0.014831s ✓ 1st | 0.016389s ✓ 2nd | 1.640910s ✓ 5th | 0.022910s ✓ 3rd | ||
| 5000x5000 | 0.984126s 5th | 1.032316s ✓ 6th | 0.393411s ✓ 2nd | 0.380302s ✓ 1st | 0.949218s ✓ 4th | 0.401329s ✓ 3rd | ||
| ----------------------------------------------------------------------------------------------------------------------- | ||
@@ -775,8 +780,8 @@ | ||
| 🎉 --------------------------- OVERALL RANKING --------------------------- 🎉 | ||
| 1. LAPX LAPJVS : 505.4603 ms | ✅ | 🥇x3 🥈x1 🥉x4 🏳️x1 | ||
| 2. LAPX LAPJV : 553.5970 ms | ✅ | 🥈x4 🥉x3 🏳️x2 | ||
| 3. LAPX LAPJVX : 556.8710 ms | ✅ | 🥇x4 🥈x2 🥉x1 🚩x2 | ||
| 4. BASELINE SciPy : 1274.1026 ms | ⭐ | 🥇x2 🥈x1 🚩x4 🏳️x1 🥴x1 | ||
| 5. LAPX LAPJV-IFT : 2877.7913 ms | ✅ | 🚩x1 🏳️x3 🥴x5 | ||
| 6. LAPX LAPJVC : 2886.1434 ms | ✅ | 🥈x1 🥉x1 🚩x2 🏳️x2 🥴x3 | ||
| 1. LAPX LAPJVX : 406.8110 ms | ✅ | 🥇x6 🥈x2 🏳️x1 | ||
| 2. LAPX LAPJV : 418.3143 ms | ✅ | 🥇x2 🥈x3 🥉x2 🚩x1 🏳️x1 | ||
| 3. LAPX LAPJVS : 436.7637 ms | ✅ | 🥈x4 🥉x5 | ||
| 4. BASELINE SciPy : 1048.2659 ms | ⭐ | 🥇x1 🥉x1 🚩x5 🏳️x2 | ||
| 5. LAPX LAPJVC : 2632.6435 ms | ✅ | 🥉x1 🚩x2 🏳️x2 🥴x4 | ||
| 6. LAPX LAPJV-IFT : 2729.3534 ms | ✅ | 🚩x1 🏳️x3 🥴x5 | ||
| 🎉 ------------------------------------------------------------------------- 🎉 | ||
@@ -792,11 +797,11 @@ | ||
| ----------------------------------------------------------------------------------------------------------------------- | ||
| 10x10 | 0.000055s 6th | 0.000048s ✓ 5th | 0.000046s ✓ 4th | 0.000037s ✓ 1st | 0.000043s ✓ 3rd | 0.000040s ✓ 2nd | ||
| 25x20 | 0.000043s 1st | 0.000059s ✓ 6th | 0.000053s ✓ 4th | 0.000045s ✓ 2nd | 0.000055s ✓ 5th | 0.000046s ✓ 3rd | ||
| 50x50 | 0.000074s 4th | 0.000080s ✓ 5th | 0.000063s ✓ 3rd | 0.000054s ✓ 1st | 0.000088s ✓ 6th | 0.000058s ✓ 2nd | ||
| 100x150 | 0.000146s 4th | 0.000647s ✓ 5th | 0.000107s ✓ 3rd | 0.000095s ✓ 1st | 0.000714s ✓ 6th | 0.000103s ✓ 2nd | ||
| 250x250 | 0.000964s 4th | 0.001495s ✓ 6th | 0.000565s ✓ 1st | 0.000603s ✓ 2nd | 0.001220s ✓ 5th | 0.000636s ✓ 3rd | ||
| 550x500 | 0.003138s 4th | 0.010879s ✓ 5th | 0.001294s ✓ 1st | 0.001329s ✓ 2nd | 0.016092s ✓ 6th | 0.001405s ✓ 3rd | ||
| 1000x1000 | 0.020857s 3rd | 0.042133s ✓ 6th | 0.019502s ✓ 2nd | 0.019448s ✓ 1st | 0.023370s ✓ 5th | 0.021119s ✓ 4th | ||
| 2000x2500 | 0.032293s 4th | 1.575432s ✓ 6th | 0.014037s ✓ 1st | 0.014037s ✓ 2nd | 1.482075s ✓ 5th | 0.022823s ✓ 3rd | ||
| 5000x5000 | 0.974974s 4th | 1.340142s ✓ 6th | 0.564158s ✓ 2nd | 0.570803s ✓ 3rd | 1.116583s ✓ 5th | 0.442339s ✓ 1st | ||
| 10x10 | 0.000051s 6th | 0.000045s ✓ 4th | 0.000049s ✓ 5th | 0.000037s ✓ 1st | 0.000042s ✓ 3rd | 0.000038s ✓ 2nd | ||
| 25x20 | 0.000044s 1st | 0.000057s ✓ 6th | 0.000055s ✓ 4th | 0.000045s ✓ 2nd | 0.000056s ✓ 5th | 0.000046s ✓ 3rd | ||
| 50x50 | 0.000068s 4th | 0.000076s ✓ 5th | 0.000064s ✓ 3rd | 0.000055s ✓ 1st | 0.000089s ✓ 6th | 0.000059s ✓ 2nd | ||
| 100x150 | 0.000152s 4th | 0.000662s ✓ 5th | 0.000140s ✓ 3rd | 0.000107s ✓ 1st | 0.000692s ✓ 6th | 0.000112s ✓ 2nd | ||
| 250x250 | 0.001222s 4th | 0.001813s ✓ 6th | 0.000859s ✓ 2nd | 0.000820s ✓ 1st | 0.001422s ✓ 5th | 0.000873s ✓ 3rd | ||
| 550x500 | 0.003388s 4th | 0.010608s ✓ 5th | 0.001394s ✓ 2nd | 0.001381s ✓ 1st | 0.014955s ✓ 6th | 0.001531s ✓ 3rd | ||
| 1000x1000 | 0.023853s 4th | 0.036422s ✓ 6th | 0.016408s ✓ 2nd | 0.015504s ✓ 1st | 0.029056s ✓ 5th | 0.017412s ✓ 3rd | ||
| 2000x2500 | 0.033767s 4th | 1.643829s ✓ 6th | 0.014560s ✓ 2nd | 0.014478s ✓ 1st | 1.421325s ✓ 5th | 0.022940s ✓ 3rd | ||
| 5000x5000 | 1.026469s 4th | 2.122379s ✓ 6th | 0.946314s ✓ 2nd | 0.947657s ✓ 3rd | 1.053784s ✓ 5th | 0.458383s ✓ 1st | ||
| ----------------------------------------------------------------------------------------------------------------------- | ||
@@ -807,8 +812,8 @@ | ||
| 🎉 --------------------------- OVERALL RANKING --------------------------- 🎉 | ||
| 1. LAPX LAPJVS : 488.5671 ms | ✅ | 🥇x1 🥈x3 🥉x4 🚩x1 | ||
| 2. LAPX LAPJV : 599.8239 ms | ✅ | 🥇x3 🥈x2 🥉x2 🚩x2 | ||
| 3. LAPX LAPJVX : 606.4511 ms | ✅ | 🥇x4 🥈x4 🥉x1 | ||
| 4. BASELINE SciPy : 1032.5424 ms | ⭐ | 🥇x1 🥉x1 🚩x6 🥴x1 | ||
| 5. LAPX LAPJVC : 2640.2397 ms | ✅ | 🥉x1 🏳️x5 🥴x3 | ||
| 6. LAPX LAPJV-IFT : 2970.9133 ms | ✅ | 🏳️x4 🥴x5 | ||
| 1. LAPX LAPJVS : 501.3927 ms | ✅ | 🥇x1 🥈x3 🥉x5 | ||
| 2. LAPX LAPJV : 979.8427 ms | ✅ | 🥈x5 🥉x2 🚩x1 🏳️x1 | ||
| 3. LAPX LAPJVX : 980.0842 ms | ✅ | 🥇x7 🥈x1 🥉x1 | ||
| 4. BASELINE SciPy : 1089.0140 ms | ⭐ | 🥇x1 🚩x7 🥴x1 | ||
| 5. LAPX LAPJVC : 2521.4207 ms | ✅ | 🥉x1 🏳️x5 🥴x3 | ||
| 6. LAPX LAPJV-IFT : 3815.8903 ms | ✅ | 🚩x1 🏳️x3 🥴x5 | ||
| 🎉 ------------------------------------------------------------------------- 🎉 | ||
@@ -815,0 +820,0 @@ ``` |
+2
-1
@@ -110,3 +110,3 @@ # Copyright (c) 2025 Ratha SIV | MIT License | ||
| __version__ = '0.9.0' | ||
| __version__ = '0.9.1' | ||
| __author__ = 'Ratha SIV' | ||
@@ -123,1 +123,2 @@ __description__ = 'Linear assignment problem solvers, including single and batch solvers.' | ||
| ] | ||
| Metadata-Version: 2.4 | ||
| Name: lapx | ||
| Version: 0.9.0 | ||
| Version: 0.9.1 | ||
| Summary: Linear assignment problem solvers, including single and batch solvers. | ||
@@ -67,5 +67,5 @@ Home-page: https://github.com/rathaROG/lapx | ||
| [](https://github.com/rathaROG/lapx/releases) | ||
| [](https://github.com/rathaROG/lapx/releases) | ||
| [](https://pypi.org/project/lapx/#files) | ||
| [](https://pypi.org/project/lapx/) | ||
| [](https://pypi.org/project/lapx/) | ||
@@ -99,3 +99,3 @@ [](https://github.com/rathaROG/lapx/actions/workflows/benchmark_single.yaml) | ||
| [](https://pypi.org/project/lapx/) | ||
| [](https://badge.fury.io/py/lapx) | ||
| [](https://badge.fury.io/py/lapx) | ||
| [](https://pepy.tech/project/lapx) | ||
@@ -131,2 +131,12 @@ [](https://pepy.tech/project/lapx) | ||
| <details><summary>⚡ Extra performance</summary><br> | ||
| Since [v0.9.1](https://github.com/rathaROG/lapx/releases/tag/v0.9.1), `lapx` enables safe optimizations by default. For source build, you can opt into extra flags via environment variables which might boost the performance further: | ||
| - `LAPX_FASTMATH=1` — enable fast-math (may change floating‑point semantics) | ||
| - `LAPX_NATIVE=1` — GCC/Clang only; tune for the CPU of build machine (not suitable for sharing) | ||
| - `LAPX_LTO=0` — disable link-time optimization if link time/memory is an issue | ||
| See the [setup.py](https://github.com/rathaROG/lapx/blob/main/setup.py) for details. | ||
| </details> | ||
| ## 🧪 Usage | ||
@@ -232,3 +242,3 @@ | ||
| `lapjvs()` is an enhanced version of Vadim Markovtsev's [`lapjv`](https://github.com/src-d/lapjv). While `lapjvs()` does not use CPU special instruction sets like the original implementation, it still delivers comparable performance. It natively supports both square and rectangular cost matrices and can produce output either in SciPy's [`linear_sum_assignment`](https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.linear_sum_assignment.html) style or `(x, y)` mappings. See the [docstring here](https://github.com/rathaROG/lapx/blob/main/lap/lapjvs.py) for more details. | ||
| `lapjvs()` is an enhanced version of Vadim Markovtsev's [`lapjv`](https://github.com/src-d/lapjv). While `lapjvs()` does not use CPU special instruction sets like the original implementation, it still delivers comparable performance. It natively supports both square and rectangular cost matrices and can produce output either in SciPy's [`linear_sum_assignment`](https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.linear_sum_assignment.html) style or `(x, y)` mappings. See the [docstring here](https://github.com/rathaROG/lapx/blob/main/lap/_lapjvs_wp.py) for more details. | ||
@@ -235,0 +245,0 @@ ```python |
+15
-5
| Metadata-Version: 2.4 | ||
| Name: lapx | ||
| Version: 0.9.0 | ||
| Version: 0.9.1 | ||
| Summary: Linear assignment problem solvers, including single and batch solvers. | ||
@@ -67,5 +67,5 @@ Home-page: https://github.com/rathaROG/lapx | ||
| [](https://github.com/rathaROG/lapx/releases) | ||
| [](https://github.com/rathaROG/lapx/releases) | ||
| [](https://pypi.org/project/lapx/#files) | ||
| [](https://pypi.org/project/lapx/) | ||
| [](https://pypi.org/project/lapx/) | ||
@@ -99,3 +99,3 @@ [](https://github.com/rathaROG/lapx/actions/workflows/benchmark_single.yaml) | ||
| [](https://pypi.org/project/lapx/) | ||
| [](https://badge.fury.io/py/lapx) | ||
| [](https://badge.fury.io/py/lapx) | ||
| [](https://pepy.tech/project/lapx) | ||
@@ -131,2 +131,12 @@ [](https://pepy.tech/project/lapx) | ||
| <details><summary>⚡ Extra performance</summary><br> | ||
| Since [v0.9.1](https://github.com/rathaROG/lapx/releases/tag/v0.9.1), `lapx` enables safe optimizations by default. For source build, you can opt into extra flags via environment variables which might boost the performance further: | ||
| - `LAPX_FASTMATH=1` — enable fast-math (may change floating‑point semantics) | ||
| - `LAPX_NATIVE=1` — GCC/Clang only; tune for the CPU of build machine (not suitable for sharing) | ||
| - `LAPX_LTO=0` — disable link-time optimization if link time/memory is an issue | ||
| See the [setup.py](https://github.com/rathaROG/lapx/blob/main/setup.py) for details. | ||
| </details> | ||
| ## 🧪 Usage | ||
@@ -232,3 +242,3 @@ | ||
| `lapjvs()` is an enhanced version of Vadim Markovtsev's [`lapjv`](https://github.com/src-d/lapjv). While `lapjvs()` does not use CPU special instruction sets like the original implementation, it still delivers comparable performance. It natively supports both square and rectangular cost matrices and can produce output either in SciPy's [`linear_sum_assignment`](https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.linear_sum_assignment.html) style or `(x, y)` mappings. See the [docstring here](https://github.com/rathaROG/lapx/blob/main/lap/lapjvs.py) for more details. | ||
| `lapjvs()` is an enhanced version of Vadim Markovtsev's [`lapjv`](https://github.com/src-d/lapjv). While `lapjvs()` does not use CPU special instruction sets like the original implementation, it still delivers comparable performance. It natively supports both square and rectangular cost matrices and can produce output either in SciPy's [`linear_sum_assignment`](https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.linear_sum_assignment.html) style or `(x, y)` mappings. See the [docstring here](https://github.com/rathaROG/lapx/blob/main/lap/_lapjvs_wp.py) for more details. | ||
@@ -235,0 +245,0 @@ ```python |
+14
-4
@@ -16,5 +16,5 @@ <details><summary>🆕 What's new</summary><hr> | ||
| [](https://github.com/rathaROG/lapx/releases) | ||
| [](https://github.com/rathaROG/lapx/releases) | ||
| [](https://pypi.org/project/lapx/#files) | ||
| [](https://pypi.org/project/lapx/) | ||
| [](https://pypi.org/project/lapx/) | ||
@@ -48,3 +48,3 @@ [](https://github.com/rathaROG/lapx/actions/workflows/benchmark_single.yaml) | ||
| [](https://pypi.org/project/lapx/) | ||
| [](https://badge.fury.io/py/lapx) | ||
| [](https://badge.fury.io/py/lapx) | ||
| [](https://pepy.tech/project/lapx) | ||
@@ -80,2 +80,12 @@ [](https://pepy.tech/project/lapx) | ||
| <details><summary>⚡ Extra performance</summary><br> | ||
| Since [v0.9.1](https://github.com/rathaROG/lapx/releases/tag/v0.9.1), `lapx` enables safe optimizations by default. For source build, you can opt into extra flags via environment variables which might boost the performance further: | ||
| - `LAPX_FASTMATH=1` — enable fast-math (may change floating‑point semantics) | ||
| - `LAPX_NATIVE=1` — GCC/Clang only; tune for the CPU of build machine (not suitable for sharing) | ||
| - `LAPX_LTO=0` — disable link-time optimization if link time/memory is an issue | ||
| See the [setup.py](https://github.com/rathaROG/lapx/blob/main/setup.py) for details. | ||
| </details> | ||
| ## 🧪 Usage | ||
@@ -181,3 +191,3 @@ | ||
| `lapjvs()` is an enhanced version of Vadim Markovtsev's [`lapjv`](https://github.com/src-d/lapjv). While `lapjvs()` does not use CPU special instruction sets like the original implementation, it still delivers comparable performance. It natively supports both square and rectangular cost matrices and can produce output either in SciPy's [`linear_sum_assignment`](https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.linear_sum_assignment.html) style or `(x, y)` mappings. See the [docstring here](https://github.com/rathaROG/lapx/blob/main/lap/lapjvs.py) for more details. | ||
| `lapjvs()` is an enhanced version of Vadim Markovtsev's [`lapjv`](https://github.com/src-d/lapjv). While `lapjvs()` does not use CPU special instruction sets like the original implementation, it still delivers comparable performance. It natively supports both square and rectangular cost matrices and can produce output either in SciPy's [`linear_sum_assignment`](https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.linear_sum_assignment.html) style or `(x, y)` mappings. See the [docstring here](https://github.com/rathaROG/lapx/blob/main/lap/_lapjvs_wp.py) for more details. | ||
@@ -184,0 +194,0 @@ ```python |
+102
-1
| # Copyright (c) 2025 Ratha SIV | MIT License | ||
| from setuptools import Extension, setup, find_packages | ||
| from setuptools.command.build_ext import build_ext # custom build_ext for high-perf flags | ||
@@ -28,2 +29,78 @@ LICENSE = "MIT" | ||
| class BuildExt(build_ext): | ||
| """ | ||
| Add portable, high-performance compiler/linker flags and allow | ||
| optional opt-ins via env vars: | ||
| - LAPX_FASTMATH=1 -> -ffast-math (or /fp:fast) | ||
| - LAPX_NATIVE=1 -> -march=native -mtune=native | ||
| - LAPX_LTO=0 -> disable LTO if needed | ||
| """ | ||
| def has_flag(self, flag): | ||
| import tempfile, os | ||
| with tempfile.NamedTemporaryFile('w', suffix='.cpp', delete=False) as f: | ||
| f.write("int main(){return 0;}") | ||
| fname = f.name | ||
| try: | ||
| self.compiler.compile([fname], extra_postargs=[flag]) | ||
| except Exception: | ||
| try: os.remove(fname) | ||
| except OSError: pass | ||
| return False | ||
| try: os.remove(fname) | ||
| except OSError: pass | ||
| return True | ||
| def build_extensions(self): | ||
| import os, sys | ||
| ctype = self.compiler.compiler_type | ||
| is_msvc = (ctype == 'msvc') | ||
| compile_opts = [] | ||
| link_opts = [] | ||
| if is_msvc: | ||
| compile_opts += ['/O2', '/DNDEBUG'] | ||
| # Link-time optimization (LTO) | ||
| if self.has_flag('/GL'): | ||
| compile_opts += ['/GL'] | ||
| link_opts += ['/LTCG'] | ||
| # Optional fast-math (opt-in) | ||
| if os.environ.get('LAPX_FASTMATH') == '1': | ||
| compile_opts += ['/fp:fast'] | ||
| else: | ||
| compile_opts += ['-O3', '-DNDEBUG'] | ||
| if sys.version_info >= (3, 9) and self.has_flag('-fvisibility=hidden'): | ||
| compile_opts += ['-fvisibility=hidden'] | ||
| if self.has_flag('-fno-math-errno'): | ||
| compile_opts += ['-fno-math-errno'] | ||
| # Link-time optimization (prefer ThinLTO when available) | ||
| if os.environ.get('LAPX_LTO', '1') == '1': | ||
| if self.has_flag('-flto=thin'): | ||
| compile_opts += ['-flto=thin'] | ||
| link_opts += ['-flto=thin'] | ||
| elif self.has_flag('-flto'): | ||
| compile_opts += ['-flto'] | ||
| link_opts += ['-flto'] | ||
| # Optional fast-math (opt-in) | ||
| if os.environ.get('LAPX_FASTMATH') == '1' and self.has_flag('-ffast-math'): | ||
| compile_opts += ['-ffast-math'] | ||
| # Optional native tuning (opt-in; avoid for portable wheels) | ||
| if os.environ.get('LAPX_NATIVE') == '1': | ||
| if self.has_flag('-march=native'): | ||
| compile_opts += ['-march=native'] | ||
| if self.has_flag('-mtune=native'): | ||
| compile_opts += ['-mtune=native'] | ||
| # Minor call overhead reduction on Linux/glibc (if supported) | ||
| if sys.platform.startswith('linux') and self.has_flag('-fno-plt'): | ||
| compile_opts += ['-fno-plt'] | ||
| # Apply to all extensions | ||
| for ext in self.extensions: | ||
| prev_cargs = list(getattr(ext, 'extra_compile_args', []) or []) | ||
| prev_largs = list(getattr(ext, 'extra_link_args', []) or []) | ||
| ext.extra_compile_args = prev_cargs + compile_opts | ||
| ext.extra_link_args = prev_largs + link_opts | ||
| super().build_extensions() | ||
| def main_setup(): | ||
@@ -96,4 +173,17 @@ import os | ||
| # Safe, high-performance Cython directives | ||
| cython_directives = dict( | ||
| language_level=3, | ||
| boundscheck=False, | ||
| wraparound=False, | ||
| nonecheck=False, | ||
| initializedcheck=False, | ||
| cdivision=True, | ||
| infer_types=True, | ||
| profile=False, | ||
| linetrace=False, | ||
| ) | ||
| # Merge all extensions | ||
| ext_modules = cythonize([ext_jv, ext_jvx]) + [ext_jvc, ext_jvs] | ||
| ext_modules = cythonize([ext_jv, ext_jvx], compiler_directives=cython_directives) + [ext_jvc, ext_jvs] | ||
@@ -144,2 +234,3 @@ setup( | ||
| ext_modules=ext_modules, | ||
| cmdclass={'build_ext': BuildExt}, | ||
| ) | ||
@@ -155,3 +246,13 @@ | ||
| >>> python -m build --wheel | ||
| Base optimizations are applied automatically (e.g., optimized build | ||
| [/O2 on MSVC or -O3 on GCC/Clang], -DNDEBUG, and LTO when supported). | ||
| Extra opt-ins can be enabled via environment variables: | ||
| - LAPX_FASTMATH=1 -> enables fast-math (/fp:fast on MSVC, -ffast-math on GCC/Clang) | ||
| - LAPX_NATIVE=1 -> enables -march=native -mtune=native (GCC/Clang only) | ||
| - LAPX_LTO=0 -> disables LTO if needed | ||
| Note: Cython compiler directives (boundscheck=False, wraparound=False, cdivision=True, etc.) | ||
| are enabled by default for Cython modules. | ||
| """ | ||
| main_setup() |
+24
-10
@@ -110,3 +110,3 @@ #include <functional> | ||
| if (typ != NPY_FLOAT32 && typ != NPY_FLOAT64) { | ||
| PyErr_SetString(PyExc_TypeError, "\"cost_matrix\" must be float32 or float64 for lapjvs_native()"); | ||
| PyErr_SetString(PyExc_TypeError, "\"cost_matrix\" must be float32 or float64"); | ||
| return NULL; | ||
@@ -125,7 +125,14 @@ } | ||
| int dim = static_cast<int>(dims[0]); | ||
| if (dim <= 0) { | ||
| PyErr_SetString(PyExc_ValueError, "\"cost_matrix\"'s shape is invalid or too large"); | ||
| if (dim < 0) { | ||
| PyErr_SetString(PyExc_ValueError, "\"cost_matrix\"'s shape is too large or invalid"); | ||
| return NULL; | ||
| } | ||
| if (dim == 0) { | ||
| npy_intp ret_dims[] = {0}; | ||
| pyarray row_ind_array(PyArray_SimpleNew(1, ret_dims, NPY_INT)); | ||
| pyarray col_ind_array(PyArray_SimpleNew(1, ret_dims, NPY_INT)); | ||
| return Py_BuildValue("(OO)", row_ind_array.get(), col_ind_array.get()); | ||
| } | ||
| auto cost_matrix = PyArray_DATA(cost_matrix_array.get()); | ||
@@ -179,7 +186,14 @@ | ||
| int dim = static_cast<int>(dims[0]); | ||
| if (dim <= 0) { | ||
| PyErr_SetString(PyExc_ValueError, "\"cost_matrix\"'s shape is invalid or too large"); | ||
| if (dim < 0) { | ||
| PyErr_SetString(PyExc_ValueError, "\"cost_matrix\"'s shape is too large or invalid"); | ||
| return NULL; | ||
| } | ||
| if (dim == 0) { | ||
| npy_intp ret_dims[] = {0}; | ||
| pyarray row_ind_array(PyArray_SimpleNew(1, ret_dims, NPY_INT)); | ||
| pyarray col_ind_array(PyArray_SimpleNew(1, ret_dims, NPY_INT)); | ||
| return Py_BuildValue("(OO)", row_ind_array.get(), col_ind_array.get()); | ||
| } | ||
| auto cost_matrix = PyArray_DATA(cost_matrix_array.get()); | ||
@@ -217,3 +231,3 @@ | ||
| if (typ != NPY_FLOAT32 && typ != NPY_FLOAT64) { | ||
| PyErr_SetString(PyExc_TypeError, "\"cost_matrix\" must be float32 or float64 for lapjvsa()"); | ||
| PyErr_SetString(PyExc_TypeError, "\"cost_matrix\" must be float32 or float64"); | ||
| return NULL; | ||
@@ -233,3 +247,3 @@ } | ||
| if (dim < 0) { | ||
| PyErr_SetString(PyExc_ValueError, "\"cost_matrix\"'s shape is invalid"); | ||
| PyErr_SetString(PyExc_ValueError, "\"cost_matrix\"'s shape is too large or invalid"); | ||
| return NULL; | ||
@@ -296,3 +310,3 @@ } | ||
| if (!cost_matrix_array) { | ||
| PyErr_SetString(PyExc_ValueError, "\"cost_matrix\" must be convertible to float32 for lapjvsa_float32()"); | ||
| PyErr_SetString(PyExc_ValueError, "\"cost_matrix\" must be convertible to float32"); | ||
| return NULL; | ||
@@ -312,3 +326,3 @@ } | ||
| if (dim < 0) { | ||
| PyErr_SetString(PyExc_ValueError, "\"cost_matrix\"'s shape is invalid"); | ||
| PyErr_SetString(PyExc_ValueError, "\"cost_matrix\"'s shape is too large or invalid"); | ||
| return NULL; | ||
@@ -353,2 +367,2 @@ } | ||
| return reinterpret_cast<PyObject*>(pairs.release()); | ||
| } | ||
| } |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Alert delta unavailable
Currently unable to show alert delta for PyPI packages.
1666092
0.41%1525
6.42%