|
|
|
#pragma once
|
|
|
|
#include "actiondep.h"
|
|
|
|
#include "ncfilew.h"
|
|
|
|
#include "ncfuncs.h"
|
|
|
|
|
|
|
|
class GradMethods
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
using DataType = float;
|
|
|
|
|
|
|
|
static DataType ToNum(const MString& str) { return michlib_internal::RealType<sizeof(DataType)>::String2Real(str.Buf()); }
|
|
|
|
|
|
|
|
struct MinMax
|
|
|
|
{
|
|
|
|
bool automin, automax, log;
|
|
|
|
DataType min, max;
|
|
|
|
DataType fill;
|
|
|
|
};
|
|
|
|
|
|
|
|
class Matrix;
|
|
|
|
class NCFileW;
|
|
|
|
};
|
|
|
|
|
|
|
|
class GradMethods::Matrix
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
using MDataType = michlib::uint2;
|
|
|
|
|
|
|
|
private:
|
|
|
|
size_t nx, ny;
|
|
|
|
std::vector<MDataType> data;
|
|
|
|
|
|
|
|
public:
|
|
|
|
Matrix(const std::vector<DataType>& in, size_t nx_, size_t ny_, struct MinMax minmax);
|
|
|
|
|
|
|
|
void Grad();
|
|
|
|
|
|
|
|
auto Nx() const { return nx; }
|
|
|
|
auto Ny() const { return ny; }
|
|
|
|
|
|
|
|
const auto& V(size_t ix, size_t iy) const { return data[iy * nx + ix]; }
|
|
|
|
|
|
|
|
auto& V(size_t ix, size_t iy) { return data[iy * nx + ix]; }
|
|
|
|
|
|
|
|
const auto& Data() const { return data; }
|
|
|
|
};
|
|
|
|
|
|
|
|
class GradMethods::NCFileW: public NCFileWBase
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
MString Create(const MString& name, const MString& history, const std::vector<MString>& vnames, const std::vector<MString>& lnames, const std::vector<GradMethods::DataType>& lons,
|
|
|
|
const std::vector<GradMethods::DataType>& lats, int compress);
|
|
|
|
MString WriteVariable(const MString& name, const GradMethods::Matrix& data);
|
|
|
|
};
|
|
|
|
|
|
|
|
template<class T>
|
|
|
|
concept GradSupported = requires(T t, const MString& vname) {
|
|
|
|
{
|
|
|
|
t.ReadVar(vname)
|
|
|
|
} -> std::same_as<std::vector<GradMethods::DataType>>;
|
|
|
|
};
|
|
|
|
|
|
|
|
ADD_ACTION(GRAD, grad, GradSupported<Source>, GradMethods);
|
|
|
|
|
|
|
|
template<class D> MString ActionGRAD::DoAction(const CLArgs& args, D& ds)
|
|
|
|
{
|
|
|
|
auto resop = ds.Open(args);
|
|
|
|
if(resop.Exist()) return "Can't open source: " + resop;
|
|
|
|
|
|
|
|
MString name = args.contains("out") ? args.at("out") : "out.nc";
|
|
|
|
|
|
|
|
MString min = args.contains("min") ? args.at("min") : "auto";
|
|
|
|
MString max = args.contains("max") ? args.at("max") : "auto";
|
|
|
|
|
|
|
|
int compress = args.contains("compress") ? args.at("compress").ToInt() : 3;
|
|
|
|
|
|
|
|
std::vector<Matrix> data;
|
|
|
|
|
|
|
|
std::vector<MString> lnames;
|
|
|
|
|
|
|
|
// Read data
|
|
|
|
for(size_t i = 0; i < ds.NVar(); i++)
|
|
|
|
{
|
|
|
|
const MString& name = ds.VarNames()[i];
|
|
|
|
const MString& lname = ds.LongNames()[i];
|
|
|
|
bool hmin = args.contains(name + "_min");
|
|
|
|
bool hmax = args.contains(name + "_max");
|
|
|
|
struct MinMax minmax;
|
|
|
|
|
|
|
|
minmax.log = args.contains(name + "_log");
|
|
|
|
|
|
|
|
if(hmin)
|
|
|
|
{
|
|
|
|
MString vmin = args.at(name + "_min");
|
|
|
|
minmax.automin = (vmin == "auto");
|
|
|
|
minmax.min = ToNum(vmin);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
minmax.automin = (min == "auto");
|
|
|
|
minmax.min = ToNum(min);
|
|
|
|
}
|
|
|
|
|
|
|
|
if(hmax)
|
|
|
|
{
|
|
|
|
MString vmax = args.at(name + "_max");
|
|
|
|
minmax.automax = (vmax == "auto");
|
|
|
|
minmax.max = ToNum(vmax);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
minmax.automax = (max == "auto");
|
|
|
|
minmax.max = ToNum(max);
|
|
|
|
}
|
|
|
|
|
|
|
|
minmax.fill = ds.FillVal(name);
|
|
|
|
|
|
|
|
data.emplace_back(ds.ReadVar(name), ds.Nx(), ds.Ny(), minmax);
|
|
|
|
lnames.emplace_back(lname + ", gradient");
|
|
|
|
}
|
|
|
|
|
|
|
|
NCFileW fw;
|
|
|
|
fw.Create(name, (ds.History().Exist() ? (ds.History() + "; ") : "") + args.at("_cmdline"), ds.VarNames(), lnames, ds.ReadLons(), ds.ReadLats(), compress);
|
|
|
|
|
|
|
|
for(size_t i = 0; i < ds.NVar(); i++)
|
|
|
|
{
|
|
|
|
const MString& name = ds.VarNames()[i];
|
|
|
|
data[i].Grad();
|
|
|
|
fw.WriteVariable(name, data[i]);
|
|
|
|
}
|
|
|
|
|
|
|
|
return "";
|
|
|
|
};
|