Commit fea975ed authored by Nguyen, Thien Minh's avatar Nguyen, Thien Minh
Browse files

Refactor default gradients impl to be service-based



Signed-off-by: default avatarThien Nguyen <nguyentm@ornl.gov>
parent cf2aadb6
Loading
Loading
Loading
Loading
+1 −0
Original line number Diff line number Diff line
@@ -8,6 +8,7 @@ set(LIBRARY_NAME qcor)
file(GLOB SRC observable/qcor_observable.cpp 
              optimizer/qcor_optimizer.cpp 
              objectives/objective_function.cpp
              objectives/gradient_function.cpp
              execution/taskInitiate.cpp
              utils/qcor_utils.cpp)

+2 −1
Original line number Diff line number Diff line
add_subdirectory(vqe)
add_subdirectory(gradients)
 No newline at end of file
+13 −35
Original line number Diff line number Diff line
#define US_BUNDLE_NAME
#include "gradient_function.hpp"
#include "AlgorithmGradientStrategy.hpp"
#include "cppmicroservices/ServiceProperties.h"
#include "qcor.hpp"
#include "xacc.hpp"
#include "xacc_internal_compiler.hpp"
#include "xacc_plugin.hpp"
#include "xacc_service.hpp"
using namespace cppmicroservices;
#include "qcor_utils.hpp"

namespace qcor {
KernelForwardDifferenceGradient::KernelForwardDifferenceGradient(
    std::function<std::shared_ptr<xacc::CompositeInstruction>(
        std::vector<double>)> &kernel_evaluator,
    std::shared_ptr<xacc::Observable> observable, double step_size)
    : m_kernelEval(kernel_evaluator), m_step(step_size), m_obs(observable) {
  gradient_func = [&](const std::vector<double> &x,
                      double cost_val) -> std::vector<double> {
    std::vector<double> gradients(x.size(), 0.0);
    // TODO: port the implementation here as well.
    auto gradient_strategy =
        xacc::getService<xacc::AlgorithmGradientStrategy>("forward");

    if (gradient_strategy->isNumerical() && m_obs->getIdentitySubTerm()) {
      gradient_strategy->setFunctionValue(
          cost_val - std::real(m_obs->getIdentitySubTerm()->coefficient()));
    }
    auto kernel = m_kernelEval(x);
    gradient_strategy->initialize({{"observable", m_obs}, {"step", m_step}});
    auto grad_kernels = gradient_strategy->getGradientExecutions(kernel, x);
    const size_t nb_qubits =
        std::max(static_cast<size_t>(m_obs->nBits()), kernel->nPhysicalBits());
    auto tmp_grad = qalloc(nb_qubits);
    xacc::internal_compiler::execute(tmp_grad.results(), grad_kernels);
    auto tmp_grad_children = tmp_grad.results()->getChildren();
    gradient_strategy->compute(gradients, tmp_grad_children);
    return gradients;
  };
namespace __internal__ {
std::shared_ptr<GradientFunction> get_gradient_method(
    const std::string &type, std::shared_ptr<ObjectiveFunction> obj_func,
    std::function<std::shared_ptr<xacc::CompositeInstruction>(std::vector<double>)>
        &kernel_eval) {
  if (!xacc::isInitialized())
    xacc::internal_compiler::compiler_InitializeXACC();
  auto service = xacc::getService<KernelGradientService>(type);
  service->initialize(obj_func, kernel_eval);
  return service;
}
} // namespace __internal__
} // namespace qcor
 No newline at end of file
+23 −14
Original line number Diff line number Diff line
#pragma once
#include "heterogeneous.hpp"
#include "Identifiable.hpp"
#include <functional>
#include <memory>
#include <vector>
#include <functional>

namespace xacc {
class CompositeInstruction;
class Observable;
}
} // namespace xacc
namespace qcor {
class ObjectiveFunction;
// Gradient function type:
// Input: set of current parameters (std::vector<double>) and the current
// objective (cost) function value. Output: gradients (std::vector<double>)
@@ -27,18 +29,25 @@ public:
  }
};

// Evaluate the Forward Difference gradients of a variational kernel.
class KernelForwardDifferenceGradient : public GradientFunction {
protected:
  std::function<std::shared_ptr<xacc::CompositeInstruction>(std::vector<double>)>
      &m_kernelEval;
  double m_step;
  std::shared_ptr<xacc::Observable> m_obs;
namespace __internal__ {
std::shared_ptr<GradientFunction>
get_gradient_method(const std::string &type,
                    std::shared_ptr<ObjectiveFunction> obj_func,
                    std::function<std::shared_ptr<xacc::CompositeInstruction>(
                        std::vector<double>)> &kernel_eval);
} // namespace __internal__

// Interface for gradient calculation services.
// Note: we keep the base GradientFunction API as simple as possible (just a
// thin wrapper around std::function, i.e. C++ lambda) so that users can define
// it in-place if need be. We also provide a set of registered gradient
// services implementing this interface.
class KernelGradientService : public GradientFunction, public xacc::Identifiable {
public:
  KernelForwardDifferenceGradient(
      std::function<std::shared_ptr<xacc::CompositeInstruction>(std::vector<double>)>
          &kernel_evaluator,
      std::shared_ptr<xacc::Observable> observable, double step_size = 1.0e-7);
  virtual void
  initialize(std::shared_ptr<ObjectiveFunction> obj_func,
             std::function<std::shared_ptr<xacc::CompositeInstruction>(
                 std::vector<double>)> &kernel_eval,
             xacc::HeterogeneousMap &&options = {}) = 0;
};
} // namespace qcor
 No newline at end of file
+1 −0
Original line number Diff line number Diff line
add_subdirectory(FiniteDifference)
Loading