Optimize memory when denoise chroma is not used

This commit is contained in:
Desmis 2020-04-06 17:33:01 +02:00
parent 9f5e81b026
commit ded866f5af

View File

@ -838,6 +838,7 @@ void ImProcFunctions::ip_wavelet(LabImage * lab, LabImage * dst, int kall, const
if (levwavL < 4) {
levwavL = 4; //to allow edge => I always allocate 3 (4) levels..because if user select wavelet it is to do something !!
}
bool usechrom = cp.chromfi > 0.f || cp.chromco > 0.f;
if (levwavL > 0) {
const std::unique_ptr<wavelet_decomposition> Ldecomp(new wavelet_decomposition(labco->data, labco->W, labco->H, levwavL, 1, skip, rtengine::max(1, wavNestedLevels), DaubLen));
@ -970,7 +971,9 @@ void ImProcFunctions::ip_wavelet(LabImage * lab, LabImage * dst, int kall, const
}
}
*/
if(!usechrom) {
Ldecomp->reconstruct(labco->data, cp.strength);
}
float variC[7];
float variCb[7];
@ -1299,9 +1302,11 @@ void ImProcFunctions::ip_wavelet(LabImage * lab, LabImage * dst, int kall, const
delete hhCurve;
}
if(usechrom) {
Ldecomp->reconstruct(labco->data, cp.strength);
}
}
}
if (numtiles > 1 || (numtiles == 1 /*&& cp.avoi*/)) { //in all case since I add contrast curve
//calculate mask for feathering output tile overlaps