ONE - On-device Neural Engine
Loading...
Searching...
No Matches
DecodeCommand.cpp
Go to the documentation of this file.
1/*
2 * Copyright (c) 2019 Samsung Electronics Co., Ltd. All Rights Reserved
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "DecodeCommand.hpp"
18#include "Support.hpp"
19
20#include <tensorflow/core/framework/graph.pb.h>
21
22#include <google/protobuf/io/coded_stream.h>
23#include <google/protobuf/io/zero_copy_stream_impl.h>
24#include <google/protobuf/text_format.h>
25
26namespace tfkit
27{
28
29int DecodeCommand::run(int argc, const char *const *argv) const
30{
31 tensorflow::GraphDef graph_def;
32
33 CmdArguments cmdargs(argc, argv);
34
35 auto ioconfig = make_ioconfig(cmdargs);
36
37 google::protobuf::io::IstreamInputStream is{ioconfig->in()};
38 google::protobuf::io::CodedInputStream coded_is{&is};
39
40 if (!graph_def.ParseFromCodedStream(&coded_is))
41 {
42 std::cerr << "ERROR: Failed to parse tensorflow model" << std::endl;
43 return 255;
44 }
45
46 google::protobuf::io::OstreamOutputStream os{ioconfig->out()};
47 google::protobuf::TextFormat::Print(graph_def, &os);
48
49 return 0;
50}
51
52} // namespace tfkit
std::unique_ptr< IOConfiguration > make_ioconfig(const CmdArguments &cmdargs)
Definition Support.cpp:111
int run(int argc, const char *const *argv) const override