forked from Kei18/lacam
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.cpp
93 lines (84 loc) · 3.29 KB
/
main.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
#include <argparse/argparse.hpp>
#include <lacam.hpp>
#include <torch/script.h>
int main(int argc, char* argv[])
{
// arguments parser
argparse::ArgumentParser program("lacam", "0.1.0");
program.add_argument("-m", "--map").help("map file").required();
program.add_argument("-i", "--scen")
.help("scenario file")
.default_value(std::string(""));
program.add_argument("-N", "--num").help("number of agents").required();
program.add_argument("-s", "--seed")
.help("seed")
.default_value(std::string("0"));
program.add_argument("-v", "--verbose")
.help("verbose")
.default_value(std::string("0"));
program.add_argument("-t", "--time_limit_sec")
.help("time limit sec")
.default_value(std::string("10"));
program.add_argument("-o", "--output")
.help("output file")
.default_value(std::string("./build/result.txt"));
program.add_argument("-l", "--log_short")
.default_value(false)
.implicit_value(true);
program.add_argument("-M", "--model")
.help("model file")
.default_value(std::string("./models/91_val_paris.pt"));
program.add_argument("-k", "--kval")
.default_value(std::string("4")); //TODO <-- this could cause problems
program.add_argument("-n", "--neural_flag")
.default_value(std::string("true")); //TODO <-- this could cause problems
try {
program.parse_known_args(argc, argv);
} catch (const std::runtime_error& err) {
std::cerr << err.what() << std::endl;
std::cerr << program;
std::exit(1);
}
// setup instance
const auto verbose = std::stoi(program.get<std::string>("verbose"));
const auto time_limit_sec =
std::stoi(program.get<std::string>("time_limit_sec"));
const auto scen_name = program.get<std::string>("scen");
const auto seed = std::stoi(program.get<std::string>("seed"));
auto MT = std::mt19937(seed);
const auto map_name = program.get<std::string>("map");
const auto output_name = program.get<std::string>("output");
const auto log_short = program.get<bool>("log_short");
const auto model_name = program.get<std::string>("model");
const auto k = std::stoi(program.get<std::string>("kval"));
const auto N = std::stoi(program.get<std::string>("num"));
const auto ins = scen_name.size() > 0 ? Instance(scen_name, map_name, N)
: Instance(map_name, &MT, N);
bool neural_flag = program.get<std::string>("neural_flag") == "true";
if (!ins.is_valid(1)) return 1;
//setup model
torch::jit::script::Module module;
try {
// Deserialize the ScriptModule from a file using torch::jit::load().
module = torch::jit::load(model_name);
}
catch (const c10::Error& e) {
std::cerr << "error loading the model\n";
return -1;
}
// solve
const auto deadline = Deadline(time_limit_sec * 1000);
const auto solution = solve(ins, verbose - 1, &deadline, &MT, &module, k, neural_flag);
const auto comp_time_ms = deadline.elapsed_ms();
// failure
if (solution.empty()) info(1, verbose, "failed to solve");
// check feasibility
if (!is_feasible_solution(ins, solution, verbose)) {
info(0, verbose, "invalid solution");
return 1;
}
// post processing
print_stats(verbose, ins, solution, comp_time_ms);
make_log(ins, solution, output_name, comp_time_ms, map_name, seed, log_short);
return 0;
}