Skip to content

Commit

Permalink
Fixing API utils (#4651)
Browse files Browse the repository at this point in the history
I've made a mistake in #4642 . The required information there are about input properties, not output properties. Fixing this mistake. Also added prefixes to the function names, to make this clearer.

Signed-off-by: szalpal <mszolucha@nvidia.com>
  • Loading branch information
szalpal authored and stiepan committed Feb 13, 2023
1 parent 3af5f68 commit 513c685
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 27 deletions.
12 changes: 6 additions & 6 deletions dali/operators/input/video_input.h
Original file line number Diff line number Diff line change
Expand Up @@ -179,17 +179,17 @@ class VideoInput : public VideoDecoderBase<Backend, FramesDecoder>, public Input
}


const TensorLayout& layout() const override {
return layout_;
const TensorLayout& in_layout() const override {
return in_layout_;
}


int ndim() const override {
return 4;
int in_ndim() const override {
return 1;
}


DALIDataType dtype() const override {
DALIDataType in_dtype() const override {
return DALIDataType::DALI_UINT8;
}

Expand Down Expand Up @@ -311,7 +311,7 @@ class VideoInput : public VideoDecoderBase<Backend, FramesDecoder>, public Input
/// CPU operators have default Thread Pool inside Workspace. Mixed and GPU ops don't.
std::optional<ThreadPool> thread_pool_ = std::nullopt;

TensorLayout layout_ = "FHWC";
TensorLayout in_layout_ = "B"; // Byte stream.
};


Expand Down
6 changes: 3 additions & 3 deletions dali/pipeline/operator/builtin/external_source.h
Original file line number Diff line number Diff line change
Expand Up @@ -55,15 +55,15 @@ class ExternalSource : public InputOperator<Backend> {
return "ExternalSource (" + output_name_ + ")";
}

const TensorLayout& layout() const override {
const TensorLayout& in_layout() const override {
return layout_;
}

int ndim() const override {
int in_ndim() const override {
return ndim_;
}

DALIDataType dtype() const override {
DALIDataType in_dtype() const override {
return dtype_;
}

Expand Down
12 changes: 6 additions & 6 deletions dali/pipeline/operator/builtin/input_operator.h
Original file line number Diff line number Diff line change
Expand Up @@ -232,19 +232,19 @@ class InputOperator : public Operator<Backend>, virtual public BatchSizeProvider
}

/**
* Returns the layout of the output from this Operator.
* Returns the layout at the input of this Operator.
*/
virtual const TensorLayout& layout() const = 0;
virtual const TensorLayout& in_layout() const = 0;

/**
* Returns the number of dimensions at the output from this Operator.
* Returns the number of dimensions at the input of this Operator.
*/
virtual int ndim() const = 0;
virtual int in_ndim() const = 0;

/**
* Returns the type of the data at the output from this Operator.
* Returns the type of the data at the input of this Operator.
*/
virtual DALIDataType dtype() const = 0;
virtual DALIDataType in_dtype() const = 0;


protected:
Expand Down
18 changes: 9 additions & 9 deletions dali/pipeline/pipeline.cc
Original file line number Diff line number Diff line change
Expand Up @@ -797,17 +797,17 @@ const TensorLayout &Pipeline::GetInputLayout(const std::string &name) {
if (node->op_type == OpType::CPU) {
const auto *in_op = dynamic_cast<InputOperator<CPUBackend> *>(node->op.get());
if (in_op) {
return in_op->layout();
return in_op->in_layout();
}
} else if (node->op_type == OpType::MIXED) {
const auto *in_op = dynamic_cast<InputOperator<MixedBackend> *>(node->op.get());
if (in_op) {
return in_op->layout();
return in_op->in_layout();
}
} else if (node->op_type == OpType::GPU) {
const auto *in_op = dynamic_cast<InputOperator<GPUBackend> *>(node->op.get());
if (in_op) {
return in_op->layout();
return in_op->in_layout();
}
}
DALI_FAIL(make_string("Could not find an input operator named \"", name, "\"."));
Expand All @@ -820,17 +820,17 @@ int Pipeline::GetInputNdim(const std::string &name) {
if (node->op_type == OpType::CPU) {
const auto *in_op = dynamic_cast<InputOperator<CPUBackend> *>(node->op.get());
if (in_op) {
return in_op->ndim();
return in_op->in_ndim();
}
} else if (node->op_type == OpType::MIXED) {
const auto *in_op = dynamic_cast<InputOperator<MixedBackend> *>(node->op.get());
if (in_op) {
return in_op->ndim();
return in_op->in_ndim();
}
} else if (node->op_type == OpType::GPU) {
const auto *in_op = dynamic_cast<InputOperator<GPUBackend> *>(node->op.get());
if (in_op) {
return in_op->ndim();
return in_op->in_ndim();
}
}
DALI_FAIL(make_string("Could not find an input operator named \"", name, "\"."));
Expand All @@ -843,17 +843,17 @@ DALIDataType Pipeline::GetInputDtype(const std::string &name) {
if (node->op_type == OpType::CPU) {
const auto *in_op = dynamic_cast<InputOperator<CPUBackend> *>(node->op.get());
if (in_op) {
return in_op->dtype();
return in_op->in_dtype();
}
} else if (node->op_type == OpType::MIXED) {
const auto *in_op = dynamic_cast<InputOperator<MixedBackend> *>(node->op.get());
if (in_op) {
return in_op->dtype();
return in_op->in_dtype();
}
} else if (node->op_type == OpType::GPU) {
const auto *in_op = dynamic_cast<InputOperator<GPUBackend> *>(node->op.get());
if (in_op) {
return in_op->dtype();
return in_op->in_dtype();
}
}
DALI_FAIL(make_string("Could not find an input operator named \"", name, "\"."));
Expand Down
6 changes: 3 additions & 3 deletions dali/test/operators/identity_input.h
Original file line number Diff line number Diff line change
Expand Up @@ -80,15 +80,15 @@ class IdentityInput : public InputOperator<Backend> {
}


const TensorLayout& layout() const override {
const TensorLayout& in_layout() const override {
DALI_FAIL("Not implemented");
}

int ndim() const override {
int in_ndim() const override {
DALI_FAIL("Not implemented");
}

DALIDataType dtype() const override {
DALIDataType in_dtype() const override {
DALI_FAIL("Not implemented");
}

Expand Down

0 comments on commit 513c685

Please sign in to comment.