FiniteDifferenceGradients.cpp 7.91 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
/*******************************************************************************
 * Copyright (c) 2018-, UT-Battelle, LLC.
 * All rights reserved. This program and the accompanying materials
 * are made available under the terms of the BSD 3-Clause License 
 * which accompanies this distribution. 
 *
 * Contributors:
 *   Alexander J. McCaskey - initial API and implementation
 *   Thien Nguyen - implementation
 *******************************************************************************/
11
12
#include "AlgorithmGradientStrategy.hpp"
#include "cppmicroservices/ServiceProperties.h"
Mccaskey, Alex's avatar
Mccaskey, Alex committed
13
#include "objective_function.hpp"
14
15
#include "gradient_function.hpp"
#include "qcor.hpp"
16
#include "qcor_observable.hpp"
17
18
19
20
#include "xacc.hpp"
#include "xacc_internal_compiler.hpp"
#include "xacc_plugin.hpp"
#include "xacc_service.hpp"
21

22
23
using namespace cppmicroservices;

24
25
26
27
28
namespace {
// Wrapper to call XACC numerical AlgorithmGradientStrategy:
// TODO: implement QCOR native methods
std::vector<double> run_gradient_strategy(
    const std::vector<double> &x, double cost_val, const std::string &name,
29
    double step, qcor::Operator &obs,
30
    std::function<
31
32
33
34
        std::shared_ptr<qcor::CompositeInstruction>(std::vector<double>)>
        _kernel_eval) {
  auto observable =
      std::dynamic_pointer_cast<xacc::Observable>(obs.get_as_opaque());
35
36
37
38
39
40
41
  std::vector<double> gradients(x.size(), 0.0);
  auto gradient_strategy =
      xacc::getService<xacc::AlgorithmGradientStrategy>(name);
  if (gradient_strategy->isNumerical() && observable->getIdentitySubTerm()) {
    gradient_strategy->setFunctionValue(
        cost_val - std::real(observable->getIdentitySubTerm()->coefficient()));
  }
42
43
44
  std::function<std::shared_ptr<xacc::CompositeInstruction>(
      std::vector<double>)>
      kernel_eval = [&](std::vector<double> x)
45
46
      -> std::shared_ptr<xacc::CompositeInstruction> {
    // _kernel_eval produces a qcor::CompositeInstruction ptr
47
    return _kernel_eval(x)->as_xacc();
48
  };
49

50
51
52
53
54
55
56
57
  auto kernel = kernel_eval(x);
  gradient_strategy->initialize({{"observable", observable},
                                 {"step", step},
                                 {"kernel-evaluator", kernel_eval}});
  auto grad_kernels = gradient_strategy->getGradientExecutions(kernel, x);
  const size_t nb_qubits = std::max(static_cast<size_t>(observable->nBits()),
                                    kernel->nPhysicalBits());
  auto tmp_grad = qalloc(nb_qubits);
58

59
60
61
62
63
  xacc::internal_compiler::execute(tmp_grad.results(), grad_kernels);
  auto tmp_grad_children = tmp_grad.results()->getChildren();
  gradient_strategy->compute(gradients, tmp_grad_children);
  return gradients;
}
64
}  // namespace
65

66
67
namespace qcor {
class KernelForwardDifferenceGradient : public KernelGradientService {
68
 protected:
69
  std::shared_ptr<ObjectiveFunction> m_objFunc;
70
  double m_step = 1e-3;
71
72
73
74
75
  std::function<std::shared_ptr<qcor::CompositeInstruction>(
      std::vector<double>)>
      m_kernel_eval;

 public:
76
77
  const std::string name() const override { return "forward"; }
  const std::string description() const override { return ""; }
78

79
80
81
82
83
  virtual void initialize(
      std::function<
          std::shared_ptr<qcor::CompositeInstruction>(std::vector<double>)>
          _kernel_eval,
      Operator &obs, xacc::HeterogeneousMap &&options = {}) override {
84
85
86
    if (options.keyExists<double>("step")) {
      m_step = options.get<double>("step");
    }
87
    m_kernel_eval = _kernel_eval;
88
89
    gradient_func = [&](const std::vector<double> &x,
                        double cost_val) -> std::vector<double> {
90
      return run_gradient_strategy(x, cost_val, "forward", -m_step, obs,
91
                                   m_kernel_eval);
92
93
94
    };
  }

95
96
97
98
99
100
101
102
  void initialize(std::shared_ptr<ObjectiveFunction> obj_func,
                  HeterogeneousMap &&options) override {
    m_objFunc = obj_func;
    if (options.keyExists<double>("step")) {
      m_step = options.get<double>("step");
    }
    gradient_func = [&](const std::vector<double> &x,
                        double cost_val) -> std::vector<double> {
103
      auto obs = m_objFunc->get_observable();
104
      return run_gradient_strategy(x, cost_val, "forward", -m_step, obs,
105
106
107
108
109
110
                                   m_objFunc->get_kernel_evaluator());
    };
  }
};

class KernelBackwardDifferenceGradient : public KernelGradientService {
111
 protected:
112
  std::shared_ptr<ObjectiveFunction> m_objFunc;
113
  double m_step = 1e-3;
114
115
116
  std::function<std::shared_ptr<qcor::CompositeInstruction>(
      std::vector<double>)>
      m_kernel_eval;
117

118
 public:
119
120
  const std::string name() const override { return "backward"; }
  const std::string description() const override { return ""; }
121

122
123
124
125
  virtual void initialize(
      std::function<std::shared_ptr<CompositeInstruction>(std::vector<double>)>
          kernel_eval,
      Operator &obs, xacc::HeterogeneousMap &&options = {}) override {
126
127
128
129
130
131
    if (options.keyExists<double>("step")) {
      m_step = options.get<double>("step");
    }
    m_kernel_eval = kernel_eval;
    gradient_func = [&](const std::vector<double> &x,
                        double cost_val) -> std::vector<double> {
132
      return run_gradient_strategy(x, cost_val, "backward", m_step, obs,
133
                                   m_kernel_eval);
134
135
136
    };
  }

137
138
139
  void initialize(std::shared_ptr<ObjectiveFunction> obj_func,
                  HeterogeneousMap &&options) override {
    m_objFunc = obj_func;
140
141
142
    if (options.keyExists<double>("step")) {
      m_step = options.get<double>("step");
    }
143
144
    gradient_func = [&](const std::vector<double> &x,
                        double cost_val) -> std::vector<double> {
145
      auto obs = m_objFunc->get_observable();
146
      return run_gradient_strategy(x, cost_val, "backward", m_step, obs,
147
148
149
150
151
152
                                   m_objFunc->get_kernel_evaluator());
    };
  }
};

class KernelCentralDifferenceGradient : public KernelGradientService {
153
 protected:
154
  std::shared_ptr<ObjectiveFunction> m_objFunc;
155
  double m_step = 1e-3;
156
157
158
  std::function<std::shared_ptr<CompositeInstruction>(std::vector<double>)>
      m_kernel_eval;
  Operator m_obs;
159

160
 public:
161
162
  const std::string name() const override { return "central"; }
  const std::string description() const override { return ""; }
163
164
165
166
167

  virtual void initialize(
      std::function<std::shared_ptr<CompositeInstruction>(std::vector<double>)>
          kernel_eval,
      Operator &obs, xacc::HeterogeneousMap &&options = {}) override {
168
169
170
171
    if (options.keyExists<double>("step")) {
      m_step = options.get<double>("step");
    }
    m_kernel_eval = kernel_eval;
172
    m_obs = obs;
173
174
    gradient_func = [&](const std::vector<double> &x,
                        double cost_val) -> std::vector<double> {
175
      return run_gradient_strategy(x, cost_val, "central", m_step, m_obs,
176
177
178
179
                                   m_kernel_eval);
    };
  }

180
181
182
183
184
185
  void initialize(std::shared_ptr<ObjectiveFunction> obj_func,
                  HeterogeneousMap &&options) override {
    m_objFunc = obj_func;
    if (options.keyExists<double>("step")) {
      m_step = options.get<double>("step");
    }
186
187
    gradient_func = [&](const std::vector<double> &x,
                        double cost_val) -> std::vector<double> {
188
      auto obs = m_objFunc->get_observable();
189
      return run_gradient_strategy(x, cost_val, "central", m_step, obs,
190
                                   m_objFunc->get_kernel_evaluator());
191
192
193
    };
  }
};
194
}  // namespace qcor
195
196
197
namespace {
// Register all three diff plugins
class US_ABI_LOCAL FiniteDiffActivator : public BundleActivator {
198
 public:
199
200
201
202
203
204
205
206
207
208
209
  FiniteDiffActivator() {}
  void Start(BundleContext context) {
    context.RegisterService<qcor::KernelGradientService>(
        std::make_shared<qcor::KernelForwardDifferenceGradient>());
    context.RegisterService<qcor::KernelGradientService>(
        std::make_shared<qcor::KernelBackwardDifferenceGradient>());
    context.RegisterService<qcor::KernelGradientService>(
        std::make_shared<qcor::KernelCentralDifferenceGradient>());
  }
  void Stop(BundleContext /*context*/) {}
};
210
CPPMICROSERVICES_EXPORT_BUNDLE_ACTIVATOR(FiniteDiffActivator)
211
}  // namespace