allow translation of demosaic methods; hide Sub-Image selector for files with onyl 1 raw file; ...

This commit is contained in:
heckflosse
2017-03-22 14:13:51 +01:00
parent a7511f754e
commit 9bb5164cce
18 changed files with 149 additions and 56 deletions

View File

@@ -202,7 +202,7 @@ public:
}
// use with indices
T * operator[](int index)
T * operator[](int index) const
{
assert((index >= 0) && (index < y));
return ptr[index];

View File

@@ -81,14 +81,12 @@ public:
virtual bool IsrgbSourceModified() const = 0; // tracks whether cached rgb output of demosaic has been modified
virtual void setCurrentFrame(unsigned int frameNum) = 0;
virtual int getFrameCount() = 0;
// use right after demosaicing image, add coarse transformation and put the result in the provided Imagefloat*
virtual void getImage (const ColorTemp &ctemp, int tran, Imagefloat* image, const PreviewProps &pp, const ToneCurveParams &hlp, const ColorManagementParams &cmp, const RAWParams &raw) = 0;
virtual eSensorType getSensorType ()
{
return ST_NONE;
}
virtual eSensorType getSensorType () const = 0;
// true is ready to provide the AutoWB, i.e. when the image has been demosaiced for RawImageSource
virtual bool isWBProviderReady () = 0;

View File

@@ -87,7 +87,7 @@ ImProcCoordinator::ImProcCoordinator ()
fw(0), fh(0), tr(0),
fullw(1), fullh(1),
pW(-1), pH(-1),
plistener(nullptr), imageListener(nullptr), aeListener(nullptr), acListener(nullptr), abwListener(nullptr), awbListener(nullptr), actListener(nullptr), adnListener(nullptr), awavListener(nullptr), dehaListener(nullptr), hListener(nullptr),
plistener(nullptr), imageListener(nullptr), aeListener(nullptr), acListener(nullptr), abwListener(nullptr), awbListener(nullptr), actListener(nullptr), adnListener(nullptr), awavListener(nullptr), dehaListener(nullptr), frameCountListener(nullptr), imageTypeListener(nullptr), hListener(nullptr),
resultValid(false), lastOutputProfile("BADFOOD"), lastOutputIntent(RI__COUNT), lastOutputBPC(false), thread(nullptr), changeSinceLast(0), updaterRunning(false), destroying(false), utili(false), autili(false), wavcontlutili(false),
butili(false), ccutili(false), cclutili(false), clcutili(false), opautili(false), conversionBuffer(1, 1), colourToningSatLimit(0.f), colourToningSatLimitOpacity(0.f)
{}
@@ -183,9 +183,14 @@ void ImProcCoordinator::updatePreviewImage (int todo, Crop* cropCall)
progress ("Applying white balance, color correction & sRGB conversion...", 100 * readyphase / numofphases);
if(frameCountListener) {
frameCountListener->FrameCountChanged(imgsrc->getFrameCount(), params.raw.bayersensor.imageNum);
}
// raw auto CA is bypassed if no high detail is needed, so we have to compute it when high detail is needed
if ( (todo & M_PREPROC) || (!highDetailPreprocessComputed && highDetailNeeded)) {
imgsrc->setCurrentFrame(params.raw.bayersensor.imageNum);
imgsrc->preprocess( rp, params.lensProf, params.coarse );
imgsrc->getRAWHistogram( histRedRaw, histGreenRaw, histBlueRaw );
@@ -208,6 +213,10 @@ void ImProcCoordinator::updatePreviewImage (int todo, Crop* cropCall)
*/
// If high detail (=100%) is newly selected, do a demosaic update, since the last was just with FAST
if(imageTypeListener) {
imageTypeListener->imageTypeChanged(imgsrc->isRAW(), imgsrc->getSensorType() == ST_BAYER, imgsrc->getSensorType() == ST_FUJI_XTRANS);
}
if ( (todo & M_RAW)
|| (!highDetailRawComputed && highDetailNeeded)
|| ( params.toneCurve.hrenabled && params.toneCurve.method != "Color" && imgsrc->IsrgbSourceModified())
@@ -834,6 +843,7 @@ void ImProcCoordinator::updatePreviewImage (int todo, Crop* cropCall)
updateLRGBHistograms ();
hListener->histogramChanged (histRed, histGreen, histBlue, histLuma, histToneCurve, histLCurve, histCCurve, /*histCLurve, histLLCurve,*/ histLCAM, histCCAM, histRedRaw, histGreenRaw, histBlueRaw, histChroma, histLRETI);
}
}

View File

@@ -158,6 +158,8 @@ protected:
AutoCamListener* acListener;
AutoBWListener* abwListener;
AutoWBListener* awbListener;
FrameCountListener *frameCountListener;
ImageTypeListener *imageTypeListener;
AutoColorTonListener* actListener;
AutoChromaListener* adnListener;
@@ -335,6 +337,16 @@ public:
awavListener = awa;
}
void setFrameCountListener (FrameCountListener* fcl)
{
frameCountListener = fcl;
}
void setImageTypeListener (ImageTypeListener* itl)
{
imageTypeListener = itl;
}
void saveInputICCReference (const Glib::ustring& fname, bool apply_wb);
InitialImage* getInitialImage ()

View File

@@ -133,7 +133,7 @@ void paintMotionMask(int index, bool showMotion, float gridMax, bool showOnlyMas
}
}
void invertMask(int xStart, int xEnd, int yStart, int yEnd, array2D<uint8_t> &maskIn, array2D<uint8_t> &maskOut)
void invertMask(int xStart, int xEnd, int yStart, int yEnd, const array2D<uint8_t> &maskIn, array2D<uint8_t> &maskOut)
{
#pragma omp parallel for schedule(dynamic,16)
@@ -146,7 +146,7 @@ void invertMask(int xStart, int xEnd, int yStart, int yEnd, array2D<uint8_t> &ma
}
}
void xorMasks(int xStart, int xEnd, int yStart, int yEnd, array2D<uint8_t> &maskIn, array2D<uint8_t> &maskOut)
void xorMasks(int xStart, int xEnd, int yStart, int yEnd, const array2D<uint8_t> &maskIn, array2D<uint8_t> &maskOut)
{
#pragma omp parallel for schedule(dynamic,16)
@@ -375,7 +375,7 @@ void RawImageSource::pixelshift(int winx, int winy, int winw, int winh, const RA
for(int i=0, frameIndex = 0;i<4;++i) {
if(i != currFrame) {
if(bayerParams.pixelShiftLmmse) {
lmmse_interpolate_omp(winw, winh, *(rawDataFrames[i]), redTmp[frameIndex], greenTmp[frameIndex], blueTmp[frameIndex], raw.bayersensor.lmmse_iterations);
lmmse_interpolate_omp(winw, winh, *(rawDataFrames[i]), redTmp[frameIndex], greenTmp[frameIndex], blueTmp[frameIndex], bayerParams.lmmse_iterations);
} else {
amaze_demosaic_RT (0, 0, winw, winh, *(rawDataFrames[i]), redTmp[frameIndex], greenTmp[frameIndex], blueTmp[frameIndex]);
}
@@ -689,9 +689,9 @@ void RawImageSource::pixelshift(int winx, int winy, int winw, int winh, const RA
#ifdef PIXELSHIFTDEV
std::cout << "WL: " << c_white[0] << " BL: " << c_black[0] << " ePerIso multiplicator: " << (65535.f / (c_white[0] - c_black[0])) << std::endl;
#endif
float eperIsoRed = (eperIso / scale_mul[0]) * (65535.f / (c_white[0] - c_black[0]));
float eperIsoGreen = (eperIso * scaleGreen) * (65535.f / (c_white[1] - c_black[1]));
float eperIsoBlue = (eperIso / scale_mul[2]) * (65535.f / (c_white[2] - c_black[2]));
const float eperIsoRed = (eperIso / scale_mul[0]) * (65535.f / (c_white[0] - c_black[0]));
const float eperIsoGreen = (eperIso * scaleGreen) * (65535.f / (c_white[1] - c_black[1]));
const float eperIsoBlue = (eperIso / scale_mul[2]) * (65535.f / (c_white[2] - c_black[2]));
const float clippedRed = 65535.f / scale_mul[0];
const float clippedBlue = 65535.f / scale_mul[2];
@@ -705,7 +705,7 @@ void RawImageSource::pixelshift(int winx, int winy, int winw, int winh, const RA
nRead *= nRead;
// If the values of two corresponding green pixels differ my more then motionThreshold %, the pixel will be treated as a badGreen pixel
float motionThreshold = 1.f - (motion / 100.f);
const float motionThreshold = 1.f - (motion / 100.f);
// For shades of green motion indicators
const float blendFactor = ((adaptive || motion == 0.f) ? 1.f : 1.f / (1.f - motionThreshold));
@@ -733,12 +733,6 @@ void RawImageSource::pixelshift(int winx, int winy, int winw, int winh, const RA
}
}
const float thresh = adaptive ? 0.f : motionThreshold;
array2D<float> psRed(winw + 32, winh); // increase width to avoid cache conflicts
array2D<float> psG1(winw + 32, winh);
array2D<float> psG2(winw + 32, winh);
array2D<float> psBlue(winw + 32, winh);
// calculate average green brightness for each frame
float greenBrightness[4] = {1.f, 1.f, 1.f, 1.f};
@@ -766,18 +760,9 @@ void RawImageSource::pixelshift(int winx, int winy, int winw, int winh, const RA
#endif
for(int i = winy + 1; i < winh - 1; ++i) {
int j = winx + 1;
int c = FC(i, j);
// offset to keep the code short. It changes its value between 0 and 1 for each iteration of the loop
unsigned int offset = c & 1;
for(; j < winw - 1; ++j) {
float green1 = (*rawDataFrames[1 - offset])[i - offset + 1][j];
float green2 = (*rawDataFrames[3 - offset])[i + offset][j + 1];
(*histoThr[1 - offset])[green1]++;
(*histoThr[3 - offset])[green2]++;
offset ^= 1; // 0 => 1 or 1 => 0
for(int j = winx + 1, offset = FC(i, j) & 1; j < winw - 1; ++j, offset ^= 1) {
(*histoThr[1 - offset])[(*rawDataFrames[1 - offset])[i - offset + 1][j]]++;
(*histoThr[3 - offset])[(*rawDataFrames[3 - offset])[i + offset][j + 1]]++;
}
}
@@ -817,6 +802,12 @@ void RawImageSource::pixelshift(int winx, int winy, int winw, int winh, const RA
}
const float thresh = adaptive ? 0.f : motionThreshold;
array2D<float> psRed(winw + 32, winh); // increase width to avoid cache conflicts
array2D<float> psG1(winw + 32, winh);
array2D<float> psG2(winw + 32, winh);
array2D<float> psBlue(winw + 32, winh);
// fill channels psRed, psG1, psG2 and psBlue
#ifdef _OPENMP
#pragma omp parallel for schedule(dynamic,16)
@@ -1336,7 +1327,6 @@ void RawImageSource::pixelshift(int winx, int winy, int winw, int winh, const RA
}
array2D<uint8_t> mask(W, H, ARRAY2D_CLEAR_DATA);
array2D<uint8_t> maskInv(W, H, ARRAY2D_CLEAR_DATA);
#pragma omp parallel for schedule(dynamic,16)
@@ -1346,7 +1336,7 @@ void RawImageSource::pixelshift(int winx, int winy, int winw, int winh, const RA
for(int v = -1; v <= 1; v++) {
for(int h = -1; h < 1; h++) {
v3sum[1 + h] += (psMask[i + v][j + h]);
v3sum[1 + h] += psMask[i + v][j + h];
}
}
@@ -1366,6 +1356,7 @@ void RawImageSource::pixelshift(int winx, int winy, int winw, int winh, const RA
}
if(holeFill) {
array2D<uint8_t> maskInv(W, H);
invertMask(winx + border - offsX, winw - (border + offsX), winy + border - offsY, winh - (border + offsY), mask, maskInv);
floodFill4(winx + border - offsX, winw - (border + offsX), winy + border - offsY, winh - (border + offsY), maskInv);
xorMasks(winx + border - offsX, winw - (border + offsX), winy + border - offsY, winh - (border + offsY), maskInv, mask);
@@ -1403,21 +1394,21 @@ void RawImageSource::pixelshift(int winx, int winy, int winw, int winh, const RA
if(mask[i][j] == 255) {
paintMotionMask(j + offsX, showMotion, 0.5f, showOnlyMask, greenDest, redDest, blueDest);
} else if(showOnlyMask) { // we want only motion mask => paint areas without motion in pure black
red[i + offsY][j + offsX] = green[i + offsY][j + offsX] = blue[i + offsY][j + offsX] = 0.f;
redDest[j + offsX] = greenDest[j + offsX] = blueDest[j + offsX] = 0.f;
} else {
if(smoothTransitions) {
#ifdef __SSE2__
float blend = psMask[i][j];
const float blend = psMask[i][j];
#else
float blend = smoothFactor == 0.f ? 1.f : pow_F(std::max(psMask[i][j] - 1.f, 0.f), smoothFactor);
const float blend = smoothFactor == 0.f ? 1.f : pow_F(std::max(psMask[i][j] - 1.f, 0.f), smoothFactor);
#endif
red[i + offsY][j + offsX] = intp(blend, red[i + offsY][j + offsX], psRed[i][j] );
green[i + offsY][j + offsX] = intp(blend, green[i + offsY][j + offsX], (psG1[i][j] + psG2[i][j]) * 0.5f);
blue[i + offsY][j + offsX] = intp(blend, blue[i + offsY][j + offsX], psBlue[i][j]);
redDest[j + offsX] = intp(blend, redDest[j + offsX], psRed[i][j] );
greenDest[j + offsX] = intp(blend, greenDest[j + offsX], (psG1[i][j] + psG2[i][j]) * 0.5f);
blueDest[j + offsX] = intp(blend, blueDest[j + offsX], psBlue[i][j]);
} else {
red[i + offsY][j + offsX] = psRed[i][j];
green[i + offsY][j + offsX] = (psG1[i][j] + psG2[i][j]) * 0.5f;
blue[i + offsY][j + offsX] = psBlue[i][j];
redDest[j + offsX] = psRed[i][j];
greenDest[j + offsX] = (psG1[i][j] + psG2[i][j]) * 0.5f;
blueDest[j + offsX] = psBlue[i][j];
}
}
}

View File

@@ -204,6 +204,8 @@ public:
currFrame = std::min(numFrames - 1, frameNum);
ri = riFrames[currFrame];
}
int getFrameCount() {return numFrames;}
protected:
typedef unsigned short ushort;
void processFalseColorCorrection (Imagefloat* i, const int steps);

View File

@@ -317,6 +317,20 @@ public :
virtual void WBChanged(double temp, double green) = 0;
};
class FrameCountListener
{
public :
virtual ~FrameCountListener() = default;
virtual void FrameCountChanged(int n, int frameNum) = 0;
};
class ImageTypeListener
{
public :
virtual ~ImageTypeListener() = default;
virtual void imageTypeChanged(bool isRaw, bool isBayer, bool isXtrans) = 0;
};
class WaveletListener
{
public :
@@ -417,12 +431,14 @@ public:
virtual void setHistogramListener (HistogramListener *l) = 0;
virtual void setPreviewImageListener (PreviewImageListener* l) = 0;
virtual void setAutoCamListener (AutoCamListener* l) = 0;
virtual void setFrameCountListener (FrameCountListener* l) = 0;
virtual void setAutoBWListener (AutoBWListener* l) = 0;
virtual void setAutoWBListener (AutoWBListener* l) = 0;
virtual void setAutoColorTonListener (AutoColorTonListener* l) = 0;
virtual void setAutoChromaListener (AutoChromaListener* l) = 0;
virtual void setRetinexListener (RetinexListener* l) = 0;
virtual void setWaveletListener (WaveletListener* l) = 0;
virtual void setImageTypeListener (ImageTypeListener* l) = 0;
virtual void setMonitorProfile (const Glib::ustring& monitorProfile, RenderingIntent intent) = 0;
virtual void getMonitorProfile (Glib::ustring& monitorProfile, RenderingIntent& intent) const = 0;

View File

@@ -51,6 +51,8 @@ public:
void getAutoWBMultipliers (double &rm, double &gm, double &bm);
ColorTemp getSpotWB (std::vector<Coord2D> &red, std::vector<Coord2D> &green, std::vector<Coord2D> &blue, int tran, double equal);
eSensorType getSensorType() const {return ST_NONE;}
bool isWBProviderReady ()
{
return true;
@@ -96,6 +98,8 @@ public:
return rgbSourceModified;
}
void setCurrentFrame(unsigned int frameNum) {}
int getFrameCount() {return 1;}
void getRawValues(int x, int y, int rotate, int &R, int &G, int &B) { R = G = B = 0;}