Commit af2778fe authored by Nguyen, Thien Minh's avatar Nguyen, Thien Minh
Browse files

Work on adding a short-hand for gradient calculation for kernel



i.e. in the OptFunction itself rather than the ObjectiveFunction

Signed-off-by: default avatarThien Nguyen <nguyentm@ornl.gov>
parent f74ef694
Loading
Loading
Loading
Loading
+37 −0
Original line number Diff line number Diff line
__qpu__ void ansatz(qreg q, double theta) {
  X(q[0]);
  Ry(q[1], theta);
  CX(q[1], q[0]);
}

int main(int argc, char **argv) {
  // Allocate 2 qubits
  auto q = qalloc(2);

  // Programmer needs to set
  // the number of variational params
  auto n_variational_params = 1;

  // Create the Deuteron Hamiltonian
  auto H = 5.907 - 2.1433 * X(0) * X(1) - 2.1433 * Y(0) * Y(1) + .21829 * Z(0) -
           6.125 * Z(1);

  // Create the ObjectiveFunction, here we want to run VQE
  // need to provide ansatz, Operator, and qreg
  auto objective = createObjectiveFunction(ansatz, H, q, n_variational_params,
                                           {{"gradient-strategy", "central"}});

  // Create the Optimizer.
  auto optimizer = createOptimizer("nlopt", {{"nlopt-optimizer", "l-bfgs"}});
  OptFunction opt_function(
      [&](const std::vector<double> &x, std::vector<double> &dx) {
        auto q = qalloc(2);
        auto exp = ansatz::autograd(H, dx, q, x[0]);
        print("<E(", x[0], ") = ", exp);
        return exp;
      },
      1);

  auto [energy, opt_params] = optimizer->optimize(opt_function);
  print(energy);
}
+18 −0
Original line number Diff line number Diff line
@@ -187,6 +187,24 @@ class QuantumKernel {
    return observe(*obs, args...);
  }

  // Simple autograd support for kernel with simple type: double or
  // vector<double>. Other signatures must provide a translator...
  static double autograd(Observable &obs, std::vector<double> &dx, qreg q,
                         double x) {
    std::function<std::shared_ptr<xacc::CompositeInstruction>(
        std::vector<double>)>
        kernel_eval = [q](std::vector<double> x) {
          Derived derived(q, x[0]);
          return derived.parent_kernel;
        };

    auto gradiend_method = qcor::__internal__::get_gradient_method(
        qcor::__internal__::DEFAULT_GRADIENT_METHOD, kernel_eval, obs);
    const double cost_val = observe(obs, q, x);
    dx = (*gradiend_method)({x}, cost_val);
    return cost_val;
  }

  static std::string openqasm(Args... args) {
    Derived derived(args...);
    KernelSignature<Args...> callable(derived);
+13 −0
Original line number Diff line number Diff line
@@ -15,5 +15,18 @@ get_gradient_method(const std::string &type,
  service->initialize(obj_func, std::move(options));
  return service;
}

std::shared_ptr<GradientFunction>
get_gradient_method(const std::string &type,
                    std::function<std::shared_ptr<xacc::CompositeInstruction>(
                        std::vector<double>)>
                        kernel_eval,
                    xacc::Observable &obs) {
  if (!xacc::isInitialized())
    xacc::internal_compiler::compiler_InitializeXACC();
  auto service = xacc::getService<KernelGradientService>(type);
  service->initialize(kernel_eval, obs);
  return service;
}
} // namespace __internal__
} // namespace qcor
 No newline at end of file
+12 −0
Original line number Diff line number Diff line
@@ -35,6 +35,13 @@ std::shared_ptr<GradientFunction>
get_gradient_method(const std::string &type,
                    std::shared_ptr<ObjectiveFunction> obj_func,
                    xacc::HeterogeneousMap options = {});

std::shared_ptr<GradientFunction>
get_gradient_method(const std::string &type,
                    std::function<std::shared_ptr<xacc::CompositeInstruction>(
                        std::vector<double>)>
                        kernel_eval,
                    xacc::Observable &obs);
} // namespace __internal__

// Interface for gradient calculation services.
@@ -47,5 +54,10 @@ class KernelGradientService : public GradientFunction,
public:
  virtual void initialize(std::shared_ptr<ObjectiveFunction> obj_func,
                          xacc::HeterogeneousMap &&options = {}) = 0;
  virtual void
  initialize(std::function<std::shared_ptr<xacc::CompositeInstruction>(
                 std::vector<double>)>
                 kernel_eval,
             xacc::Observable &obs, xacc::HeterogeneousMap &&options = {}) = 0;
};
} // namespace qcor
 No newline at end of file
+56 −1
Original line number Diff line number Diff line
@@ -44,10 +44,27 @@ class KernelForwardDifferenceGradient : public KernelGradientService {
protected:
  std::shared_ptr<ObjectiveFunction> m_objFunc;
  double m_step = 1e-3;

  std::function<std::shared_ptr<xacc::CompositeInstruction>(
                 std::vector<double>)> m_kernel_eval;
public:
  const std::string name() const override { return "forward"; }
  const std::string description() const override { return ""; }

  virtual void
  initialize(std::function<std::shared_ptr<xacc::CompositeInstruction>(
                 std::vector<double>)> kernel_eval, xacc::Observable &obs,
             xacc::HeterogeneousMap &&options = {}) override {
    if (options.keyExists<double>("step")) {
      m_step = options.get<double>("step");
    }
    m_kernel_eval = kernel_eval;
    gradient_func = [&](const std::vector<double> &x,
                        double cost_val) -> std::vector<double> {
      return run_gradient_strategy(x, cost_val, "forward", m_step,
                                   xacc::as_shared_ptr(&obs), m_kernel_eval);
    };
  }

  void initialize(std::shared_ptr<ObjectiveFunction> obj_func,
                  HeterogeneousMap &&options) override {
    m_objFunc = obj_func;
@@ -67,10 +84,28 @@ class KernelBackwardDifferenceGradient : public KernelGradientService {
protected:
  std::shared_ptr<ObjectiveFunction> m_objFunc;
  double m_step = 1e-3;
  std::function<std::shared_ptr<xacc::CompositeInstruction>(
                 std::vector<double>)> m_kernel_eval;

public:
  const std::string name() const override { return "backward"; }
  const std::string description() const override { return ""; }

  virtual void
  initialize(std::function<std::shared_ptr<xacc::CompositeInstruction>(
                 std::vector<double>)> kernel_eval, xacc::Observable &obs,
             xacc::HeterogeneousMap &&options = {}) override {
    if (options.keyExists<double>("step")) {
      m_step = options.get<double>("step");
    }
    m_kernel_eval = kernel_eval;
    gradient_func = [&](const std::vector<double> &x,
                        double cost_val) -> std::vector<double> {
      return run_gradient_strategy(x, cost_val, "forward", m_step,
                                   xacc::as_shared_ptr(&obs), m_kernel_eval);
    };
  }

  void initialize(std::shared_ptr<ObjectiveFunction> obj_func,
                  HeterogeneousMap &&options) override {
    m_objFunc = obj_func;
@@ -90,10 +125,30 @@ class KernelCentralDifferenceGradient : public KernelGradientService {
protected:
  std::shared_ptr<ObjectiveFunction> m_objFunc;
  double m_step = 1e-3;
  std::function<std::shared_ptr<xacc::CompositeInstruction>(
                 std::vector<double>)> m_kernel_eval;
  std::shared_ptr<xacc::Observable> m_obs;

public:
  const std::string name() const override { return "central"; }
  const std::string description() const override { return ""; }
  
  virtual void
  initialize(std::function<std::shared_ptr<xacc::CompositeInstruction>(
                 std::vector<double>)> kernel_eval, xacc::Observable &obs,
             xacc::HeterogeneousMap &&options = {}) override {
    if (options.keyExists<double>("step")) {
      m_step = options.get<double>("step");
    }
    m_kernel_eval = kernel_eval;
    m_obs = xacc::as_shared_ptr(&obs);
    gradient_func = [&](const std::vector<double> &x,
                        double cost_val) -> std::vector<double> {
      return run_gradient_strategy(x, cost_val, "forward", m_step, m_obs,
                                   m_kernel_eval);
    };
  }

  void initialize(std::shared_ptr<ObjectiveFunction> obj_func,
                  HeterogeneousMap &&options) override {
    m_objFunc = obj_func;