bool CState::SetLocalDir(wxString dir, wxString *error /*=0*/) { dir = Canonicalize(m_localDir, dir, error); if (dir == _T("")) return false; m_localDir = dir; COptions::Get()->SetOption(OPTION_LASTLOCALDIR, dir); NotifyHandlers(STATECHANGE_LOCAL_DIR); return true; }
CFilePath &CFilePath::Clean(DWORD dwCleanup) { if (dwCleanup & epcRemoveArgs) { if (dwCleanup & epcTrim) msPath.TrimLeft(); PathRemoveArgs(CStringLock(msPath)); } if (dwCleanup & epcRemoveIconLocation) PathParseIconLocation(CStringLock(msPath)); if (dwCleanup & epcTrim) Trim(); if (dwCleanup & epcUnquote) { Unquote(); if (dwCleanup & epcTrimInQuote) Trim(); } if (dwCleanup & epcExpandEnvStrings) ExpandEnvStrings(); if (dwCleanup & epcCanonicalize) Canonicalize(); if (dwCleanup & epcRemoveXXL) ShrinkXXLPath(); if (dwCleanup & epcSlashToBackslash) msPath.Replace('/', '\\'); if (dwCleanup & epcMakePretty) MakePretty(); return *this; }
void TestPath() { printf("TestPath()\n"); assert(Canonicalize("", "") == ""); assert(Canonicalize("", "baker") == "baker"); assert(Canonicalize("able", "") == "able"); assert(Canonicalize("able", "baker") == "able/baker"); assert(Canonicalize("able/", "baker") == "able/baker"); assert(Canonicalize("baker/charlie", "#delta") == "delta"); assert(Canonicalize("baker/charlie", "#") == ""); assert(Canonicalize("baker/charlie", "#../external") == "../external"); assert(Canonicalize("baker/charlie", "..") == "baker"); assert(Canonicalize("baker/charlie", "delta") == "baker/charlie/delta"); assert(Canonicalize("baker/charlie", "../delta") == "baker/delta"); assert(Canonicalize("baker/charlie/", "../delta") == "baker/delta"); assert(Canonicalize("baker/charlie", "../../delta") == "delta"); assert(Canonicalize("baker/charlie", "../..") == ""); assert(Canonicalize("baker/charlie", "../../../external") == "../external"); assert(Canonicalize("baker/charlie", "#:test") == ":test"); assert(PathJoin("a/b/c", "x/y/z") == "a/b/c/x/y/z"); assert(PathJoin("a/b/..", "x/y/z") == "a/b/../x/y/z"); assert(PathJoin("../..", "../x/y/z") == "../../../x/y/z"); assert(IsAbsolute("hello") == false); assert(IsAbsolute("hello/there") == false); assert(IsAbsolute("../hello") == false); assert(IsAbsolute("../../hello/there") == false); assert(IsAbsolute("/hello") == true); assert(IsAbsolute("c:/hello/there") == true); assert(IsAbsolute("C:/hello/there") == true); assert(IsAbsolute("c:\\hello\\there") == true); assert(IsAbsolute("C:\\hello\\there") == true); }
void initJITBindings(PyObject *module) { auto m = py::handle(module).cast<py::module>(); py::class_<python::IODescriptor>(m, "IODescriptor"); m.def("_jit_init", loadPythonClasses) .def("_jit_pass_onnx", ToONNX) .def("_jit_pass_onnx_peephole", PeepholeOptimizeONNX) .def("_jit_pass_fuse", FuseGraph) .def("_jit_pass_dce", [](std::shared_ptr<Graph>& g){ return EliminateDeadCode(g); // overload resolution }) .def("_jit_pass_cse", EliminateCommonSubexpression) .def("_jit_pass_peephole", PeepholeOptimize) .def("_jit_pass_canonicalize", [](const std::shared_ptr<Graph>& g) { return Canonicalize(g); }) .def("_jit_pass_lint", LintGraph) .def("_jit_pass_shape_analysis", [](Graph& graph, py::tuple inputs, bool with_grad) { auto tensor_inputs = createVariableTensorList(inputs); PropagateInputShapes(graph, ArgumentSpec(with_grad, tensor_inputs)); }) .def("_jit_pass_loop_unrolling", UnrollLoops) .def("_jit_run_cpp_tests", [] { // We have to release the GIL inside this method, because if we happen to // initialize the autograd engine in these tests, the newly spawned worker threads will // try to initialize their PyThreadState*, and they need the GIL for this. AutoNoGIL _no_gil; return runJITCPPTests(); }) .def("_jit_flatten", [](py::handle& obj) { auto res = python::flatten(obj); return std::make_pair(res.vars, res.desc); }) .def("_jit_unflatten", [](autograd::variable_list vars, python::IODescriptor& desc) { return py::reinterpret_steal<py::object>(python::unflatten(vars, desc)); }) .def("_jit_pass_onnx_block", BlockToONNX) .def("_jit_pass_fixup_onnx_loops", FixupONNXLoops) .def("_jit_pass_decompose_addmm", DecomposeAddmm); py::class_<ArgumentSpec>(m, "ArgumentSpec") .def("__repr__", [](ArgumentSpec& self) { std::ostringstream s; s << self; return s.str(); }); py::class_<Code>(m, "Code") .def("executors", [](Code& c) { return py::make_iterator(c.executors().begin(), c.executors().end()); }); py::class_<ExecutionPlanState>(m, "ExecutionPlanState") .def_property_readonly("graph", [](ExecutionPlanState& s) { return s.graph; }) .def_property_readonly("code", [](ExecutionPlanState& s) { return s.f; }) .def_property_readonly("grad_executor", [](ExecutionPlanState& s) { return s.grad_executor.get(); }); py::class_<GraphExecutorState>(m, "GraphExecutorState") .def_property_readonly("graph", [](GraphExecutorState& s) { return s.graph; }) .def_property_readonly("execution_plans", [](GraphExecutorState& s) { return s.execution_plans; }) .def_property_readonly("autograd_fallback", [](GraphExecutorState& s) { return s.autograd_fallback; }) .def_property_readonly("autograd_fallback_graph", [](GraphExecutorState& s) { return s.autograd_fallback_graph; }); py::class_<GraphExecutor>(m, "GraphExecutor", py::dynamic_attr()) .def( py::init([](py::function func, variable_list inputs, bool optimize) { size_t num_inputs = inputs.size(); auto graph = tracer::createGraphByTracing(func, std::move(inputs), num_inputs); return GraphExecutor(graph, optimize); }), py::arg("func"), py::arg("inputs"), py::arg("optimize") = true) .def( py::init([](std::shared_ptr<Graph> graph, bool optimize) { return GraphExecutor(std::move(graph), optimize); }), py::arg("graph"), py::arg("optimize") = true) .def_property_readonly("graph", [](GraphExecutor& ge) { return ge.graph(); }) .def("graph_for", [](GraphExecutor& ge, py::args args) { return ge.graphFor(createVariableTensorList(args)); }) .def("get_debug_state", [](GraphExecutor& ge) { return ge.getDebugState(); }) .def("__call__", [](GraphExecutor& ge, py::args args) -> py::object { auto inputs = createVariableTensorList(args); auto outputs = ge.run(std::move(inputs)); // if we don't tell pybind these are variables it chokes on the // conversion. // TODO: fix conversions to be sane and make sure this works. if (outputs.size() == 0) { return py::none(); } else if (outputs.size() == 1) { return py::cast(static_cast<autograd::Variable&>(outputs[0])); } else { py::tuple tuple(outputs.size()); for(size_t i = 0; i < outputs.size(); i++) { tuple[i] = py::cast(static_cast<autograd::Variable&>(outputs[i])); } return tuple; } }); initPythonIRBindings(module); tracer::initPythonTracerBindings(module); script::initTreeViewBindings(module); script::initJitScriptBindings(module); registerPythonInterpreterOps(); }