Skip to content

Commit

Permalink
Add Expression, InputNode, NaryReduceNode
Browse files Browse the repository at this point in the history
  • Loading branch information
wbernoudy committed Dec 5, 2024
1 parent ea7a6f3 commit aacee34
Show file tree
Hide file tree
Showing 24 changed files with 1,192 additions and 38 deletions.
1 change: 1 addition & 0 deletions dwave/optimization/_model.pxd
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ cdef class _Graph:
cpdef bool is_locked(self) noexcept
cpdef Py_ssize_t num_constraints(self) noexcept
cpdef Py_ssize_t num_decisions(self) noexcept
cpdef Py_ssize_t num_inputs(self) noexcept
cpdef Py_ssize_t num_nodes(self) noexcept
cpdef Py_ssize_t num_symbols(self) noexcept

Expand Down
1 change: 1 addition & 0 deletions dwave/optimization/_model.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ class _Graph:
def is_locked(self) -> bool: ...
def iter_constraints(self) -> collections.abc.Iterator[ArraySymbol]: ...
def iter_decisions(self) -> collections.abc.Iterator[Symbol]: ...
def iter_inputs(self) -> collections.abc.Iterator[Symbol]: ...
def iter_symbols(self) -> collections.abc.Iterator[Symbol]: ...
def lock(self): ...
def minimize(self, value: ArraySymbol): ...
Expand Down
10 changes: 9 additions & 1 deletion dwave/optimization/_model.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ from libcpp.utility cimport move
from libcpp.vector cimport vector

from dwave.optimization.libcpp.array cimport Array as cppArray
from dwave.optimization.libcpp.graph cimport DecisionNode as cppDecisionNode
from dwave.optimization.libcpp.graph cimport DecisionNode as cppDecisionNode, Node as cppNode
from dwave.optimization.libcpp.nodes cimport InputNode as cppInputNode
from dwave.optimization.states cimport States
from dwave.optimization.states import StateView
from dwave.optimization.symbols cimport symbol_from_ptr
Expand Down Expand Up @@ -402,6 +403,10 @@ cdef class _Graph:
for ptr in self._graph.decisions():
yield symbol_from_ptr(self, ptr)

def iter_inputs(self):
for ptr in self._graph.inputs():
yield symbol_from_ptr(self, ptr)

def iter_symbols(self):
"""Iterate over all symbols in the model.
Expand Down Expand Up @@ -522,6 +527,9 @@ cdef class _Graph:
num_edges += self._graph.nodes()[i].get().successors().size()
return num_edges

cpdef Py_ssize_t num_inputs(self) noexcept:
return self._graph.num_inputs()

cpdef Py_ssize_t num_nodes(self) noexcept:
"""Number of nodes in the directed acyclic graph for the model.
Expand Down
4 changes: 4 additions & 0 deletions dwave/optimization/include/dwave-optimization/array.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -935,6 +935,10 @@ std::ostream& operator<<(std::ostream& os, const Array::View& view);
bool array_shape_equal(const Array* lhs_ptr, const Array* rhs_ptr);
bool array_shape_equal(const Array& lhs, const Array& rhs);

// Test whether multiple arrays all have the same shape.
bool array_shape_equal(const std::span<const Array* const> array_ptrs);
bool array_shape_equal(const std::vector<const Array*>& array_ptrs);

/// Get the shape induced by broadcasting two arrays together.
/// See https://numpy.org/doc/stable/user/basics.broadcasting.html.
/// Raises an exception if the two arrays cannot be broadcast together
Expand Down
11 changes: 11 additions & 0 deletions dwave/optimization/include/dwave-optimization/graph.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ namespace dwave::optimization {
class ArrayNode;
class Node;
class DecisionNode;
class InputNode;

// We don't want this interface to be opinionated about what type of rng we're using.
// So we create this class to do type erasure on RNGs.
Expand Down Expand Up @@ -73,6 +74,7 @@ class Graph {
public:
Graph();
~Graph();
Graph(Graph&&);

template <class NodeType, class... Args>
NodeType* emplace_node(Args&&... args);
Expand Down Expand Up @@ -138,6 +140,9 @@ class Graph {
// The number of constraints in the model.
ssize_t num_constraints() const noexcept { return constraints_.size(); }

// The number of input nodes in the model.
ssize_t num_inputs() const noexcept { return inputs_.size(); }

// Specify the objective node. Must be an array with a single element.
// To unset the objective provide nullptr.
void set_objective(ArrayNode* objective_ptr);
Expand All @@ -158,6 +163,9 @@ class Graph {
std::span<DecisionNode* const> decisions() noexcept { return decisions_; }
std::span<const DecisionNode* const> decisions() const noexcept { return decisions_; }

std::span<InputNode* const> inputs() noexcept { return inputs_; }
std::span<const InputNode* const> inputs() const noexcept { return inputs_; }

// Remove unused nodes from the graph.
//
// This method will reset the topological sort if there is one.
Expand All @@ -181,6 +189,7 @@ class Graph {
ArrayNode* objective_ptr_ = nullptr;
std::vector<ArrayNode*> constraints_;
std::vector<DecisionNode*> decisions_;
std::vector<InputNode*> inputs_;

// Track whether the model is currently topologically sorted
bool topologically_sorted_ = false;
Expand Down Expand Up @@ -331,6 +340,8 @@ NodeType* Graph::emplace_node(Args&&... args) {
static_assert(std::is_base_of_v<DecisionNode, NodeType>);
ptr->topological_index_ = decisions_.size();
decisions_.emplace_back(ptr);
} else if constexpr (std::is_base_of_v<InputNode, NodeType>) {
inputs_.emplace_back(ptr);
}

return ptr; // return the observing pointer
Expand Down
1 change: 1 addition & 0 deletions dwave/optimization/include/dwave-optimization/nodes.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
#include "dwave-optimization/nodes/constants.hpp"
#include "dwave-optimization/nodes/flow.hpp"
#include "dwave-optimization/nodes/indexing.hpp"
#include "dwave-optimization/nodes/lambda.hpp"
#include "dwave-optimization/nodes/manipulation.hpp"
#include "dwave-optimization/nodes/mathematical.hpp"
#include "dwave-optimization/nodes/numbers.hpp"
Expand Down
20 changes: 11 additions & 9 deletions dwave/optimization/include/dwave-optimization/nodes/constants.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,17 @@ class ConstantNode : public ArrayOutputMixin<ArrayNode> {
void commit(State&) const noexcept override {}
void revert(State&) const noexcept override {}

protected:
// Information about the values in the buffer
struct BufferStats {
BufferStats() = delete;
explicit BufferStats(std::span<const double> buffer);

bool integral;
double min;
double max;
};

private:
// Allocate the memory to hold shape worth of doubles, but don't populate it
explicit ConstantNode(std::initializer_list<ssize_t> shape)
Expand All @@ -118,15 +129,6 @@ class ConstantNode : public ArrayOutputMixin<ArrayNode> {
// holds its values on the object itself rather than in a State.
double* buffer_ptr_;

// Information about the values in the buffer
struct BufferStats {
BufferStats() = delete;
explicit BufferStats(std::span<const double> buffer);

bool integral;
double min;
double max;
};
mutable std::optional<BufferStats> buffer_stats_;
};

Expand Down
109 changes: 109 additions & 0 deletions dwave/optimization/include/dwave-optimization/nodes/lambda.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
// Copyright 2023 D-Wave Systems Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

#include <vector>

#include "dwave-optimization/array.hpp"
#include "dwave-optimization/graph.hpp"

namespace dwave::optimization {

// InputNode acts like a placeholder or store of data very similar to ConstantNode,
// with the key different being that its contents *may* change in between propagations.
// However, it is not a decision variable--instead its use cases are acting as an "input"
// for "models as functions", or for placeholders in large models where (otherwise constant)
// data changes infrequently (e.g. a scheduling problem with a preference matrix).
//
// Currently there is no "default" way to initialize the state, so its must be initialized
// explicitly with some data.
class InputNode : public ArrayOutputMixin<ArrayNode> {
public:
explicit InputNode(std::span<const ssize_t> shape, double min, double max, bool integral)
: ArrayOutputMixin(shape), min_(min), max_(max), integral_(integral) {};

explicit InputNode(std::initializer_list<ssize_t> shape, double min, double max, bool integral)
: ArrayOutputMixin(shape), min_(min), max_(max), integral_(integral) {};

explicit InputNode()
: InputNode({}, -std::numeric_limits<double>::infinity(),
std::numeric_limits<double>::infinity(), false) {};

bool integral() const override { return integral_; };

double max() const override { return max_; };
double min() const override { return min_; };

void initialize_state(State& state) const override {
throw std::logic_error(
"InputNode must have state explicity initialized (with `initialize_state(state, "
"data)`)");
}

void initialize_state(State& state, std::span<const double> data) const;

double const* buff(const State&) const override;

std::span<const Update> diff(const State& state) const noexcept override;

void propagate(State& state) const noexcept override {};
void commit(State& state) const noexcept override;
void revert(State& state) const noexcept override;

void assign(State& state, const std::vector<double>& new_values) const;
void assign(State& state, std::span<const double> new_values) const;

private:
double min_, max_;
bool integral_;
};

class NaryReduceNode : public ArrayOutputMixin<ArrayNode> {
public:
// Runtime constructor that can be used from Cython/Python
NaryReduceNode(Graph&& expression, const std::vector<InputNode*>& inputs,
const ArrayNode* output, const std::vector<double>& initial_values,
const std::vector<ArrayNode*>& operands);

// Array overloads
double const* buff(const State& state) const override;
std::span<const Update> diff(const State& state) const override;
ssize_t size(const State& state) const override;
std::span<const ssize_t> shape(const State& state) const override;
ssize_t size_diff(const State& state) const override;
SizeInfo sizeinfo() const override;

// Information about the values are all inherited from the array
bool integral() const override;
double min() const override;
double max() const override;

// Node overloads
void commit(State& state) const override;
void initialize_state(State& state) const override;
void propagate(State& state) const override;
void revert(State& state) const override;

private:
double evaluate_expression(State& register_) const;

const Graph expression_;
const std::vector<InputNode*> inputs_;
const ArrayNode* output_;
const std::vector<ArrayNode*> operands_;
const std::vector<double> initial_values_;
};

} // namespace dwave::optimization
17 changes: 17 additions & 0 deletions dwave/optimization/include/dwave-optimization/utils.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

#include <iostream>
#include <numeric>
#include <variant>
#include <vector>

namespace dwave::optimization {
Expand Down Expand Up @@ -166,4 +167,20 @@ void deduplicate_diff(std::vector<Update>& diff);
// Return whether the given double encodes an integer.
bool is_integer(const double& value);

template <class T, class... Ts, class node_type>
bool is_variant(const node_type* node_ptr) {
// If the pointer can be dynamically cast to this type, return true
if (dynamic_cast<const T*>(node_ptr)) {
return true;
}

// If there are still types left to check then "recurse"
if constexpr (sizeof...(Ts) > 0) {
return is_variant<Ts...>(node_ptr);
}

// If none match, then this Node didn't belong to the list of types
return false;
}

} // namespace dwave::optimization
13 changes: 12 additions & 1 deletion dwave/optimization/libcpp/graph.pxd
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ from dwave.optimization.libcpp cimport span
from dwave.optimization.libcpp.array cimport Array
from dwave.optimization.libcpp.state cimport State


cdef extern from "dwave-optimization/graph.hpp" namespace "dwave::optimization" nogil:
cdef cppclass Node:
struct SuccessorView:
Expand All @@ -38,22 +39,32 @@ cdef extern from "dwave-optimization/graph.hpp" namespace "dwave::optimization"
cdef cppclass DecisionNode(Node):
pass

cdef cppclass InputNode(Node, Array):
pass

# Sometimes Cython isn't able to reason about pointers as template inputs, so
# we make a few aliases for convenience
ctypedef Node* NodePtr
ctypedef ArrayNode* ArrayNodePtr
ctypedef DecisionNode* DecisionNodePtr

# This seems to be necessary to allow Cython to iterate over the returned
# span from `inputs()` directly. Otherwise it tries to cast it to a non-const
# version of span before iterating, which the C++ compiler will complain about.
ctypedef InputNode* const constInputNodePtr

cdef extern from "dwave-optimization/graph.hpp" namespace "dwave::optimization" nogil:
cdef cppclass Graph:
T* emplace_node[T](...) except+
void initialize_state(State&) except+
span[const unique_ptr[Node]] nodes() const
span[const ArrayNodePtr] constraints()
span[const DecisionNodePtr] decisions()
span[constInputNodePtr] inputs()
Py_ssize_t num_constraints()
Py_ssize_t num_nodes()
Py_ssize_t num_decisions()
Py_ssize_t num_constraints()
Py_ssize_t num_inputs()
@staticmethod
void recursive_initialize(State&, Node*) except+
@staticmethod
Expand Down
8 changes: 8 additions & 0 deletions dwave/optimization/libcpp/nodes.pxd
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,9 @@ cdef extern from "dwave-optimization/nodes/constants.hpp" namespace "dwave::opti
cdef cppclass ConstantNode(ArrayNode):
const double* buff() const

cdef cppclass InputNode(ArrayNode):
const double* buff() const


cdef extern from "dwave-optimization/nodes/flow.hpp" namespace "dwave::optimization" nogil:
cdef cppclass WhereNode(ArrayNode):
Expand All @@ -77,6 +80,11 @@ cdef extern from "dwave-optimization/nodes/indexing.hpp" namespace "dwave::optim
pass


cdef extern from "dwave-optimization/nodes/lambda.hpp" namespace "dwave::optimization" nogil:
cdef cppclass NaryReduceNode(ArrayNode):
pass


cdef extern from "dwave-optimization/nodes/manipulation.hpp" namespace "dwave::optimization" nogil:
cdef cppclass ConcatenateNode(ArrayNode):
Py_ssize_t axis()
Expand Down
Loading

0 comments on commit aacee34

Please sign in to comment.