# related datasets that can add and update records

class Node < Sequel::Model(:nodes)
  belongs_to :parent, :class => Node, :key => :parent_id
  has_many :children, :class => Node, :key => :parent_id
end

n = Node.root
n.parent #=> nil - because it's the root node
n.children #=> dataset of children

class Node < Sequel::Model(:nodes)
  has_many :attributes
end

n = Node.root
n.attributes #=> dataset
n.attributes << {:kind => 1, :value => 'true'}
# implies:
  n.attributes << {:node_id => n.id, :kind => 1, :value => 'true'}
  
class Node < Sequel::Model(:nodes)
  has_many :attributes do
    def [](kind)
      if r = first(:kind => kind)
        r[:value]
      else
        Attributes::Defaults[kind]
      end
    end
    
    def []=(kind, value)
      value = value.to_s
      if filter(:kind => kind).update(:value => value) == 0
        insert(:kind => kind, :value => value)
      end
    end
    
    def to_hash
      super(:kind, :value)
    end
  end
end

n = Node.root
n.attributes[Node::Attributes::Point::SampleRate] #=> ...
n.attributes[2] = 'true'

# OK. How do we cache this thing?

# First memoize the dataset. Since we use Sequel, we need to construct the
# dataset only once

class Node
  has_many :attributes
  
  # implies:
  
  def attributes
    @rel_attributes ||= db[:attributes].filter(:node_id => @pkey)
  end
end

# Now for the caching - it really has to happen at the Dataset level, and we 
# need to support it right in there. We also should minimize the number of 
# methods that the adapter needs to define.

class Sequel::Dataset
  def fetch_records(opts = nil, &block) # this is the adapter interface
  end
  
  def model_class=(c)
    extend Module.new do
      def fetch_records(opts = nil, &block)
        super(opts) do |r|
          r = c.new(r) unless opts[:naked]
          block.call(r)
        end
      end
    end
  end
  
  # Extends the dataset to return polymorphic models. Instead of records
  # being returned as hashes, the 
  def polymorphic(column, hash, default = nil)
    extend(Module.new do
      def fetch_records(opts = nil, &block)
        super(opts) do |r|
          if opts[:naked]
            block.call(r)
          else
            c = hash[r[column]] || default
            c ? block.call(c.new(r)) : raise SequelError, "No matching model class for record (#{column} = #{r[column].inspect})"
          end
        end
      end
    end)
  end
  
  def cache_results
    @use_cache = true
    extend(Module.new do
      def fetch_records(opts = nil, &block)
        unless opts
          if @cached_results
            @cached_results.each(&block)
          else
            @cached_results = []
            super do |r|
              @cached_results << r
              block.call(r)
            end
          end
        else
          super(opts, &block)
        end
      end
    end)
  end
  
  def disable_caching
  
  def clear_cache
    @cached_results = nil
  end
  
  def each(opts = nil, &block)
    fetch_records(opts, &block)
  end
end










class Node < Sequel::Model(:nodes)
  class Attributes
    Defaults = {
      # ...
    }
    SampleRate = 1
    Deadband = 1
  end
  
  one_to_many(:attributes).on(:node_id) do
    def [](kind)
      if r = first(:kind => kind)
        r[:value]
      else
        Attributes::Defaults[kind]
      end
    end
    
    def []=(kind, value)
      (filter(:kind => kind).update(:value => value.to_s) > 0) ||
        insert(:node_id => )
      filter(:kind => kind).update_or_create(:value => value.to_s)
      # implies self << {:node_id => xxx, :kind => kind, :value => value}
    end
    
    def get_integer(kind)
      (v = self[kind]).nil? ? nil : v.to_i
    end
    
    def get_bool(kind)
      (v = self[kind]).nil? ? nil : v.to_bool
    end
  end
end

node = Node.root
rate = node.attributes.get_integer(Node::Attributes::SampleRate)


# The next idea relates to pre-fetching results and keeping them around

class Node < Sequel::Model(:nodes)
  one_to_many(:attributes, :prefetch => :on_demand) do
    # ...
  end
end

node = Node.root # attributes are not yet fetched
rate = node.attributes.get_integer(Node::Attributes::SampleRate) # all attributes are fetched
deadband = node.attributes.get_integer(Node::Attributes::Deadband) # no additional query

# The prefetch feature only overrides the [] and []= methods of the attributes dataset.
# It might be beneficial to add a :prefetch_key option and store the attribute records
# in a hash instead of an array to make searching for a single record faster.

# Another issue is caching. It looks like any support for memcached would have to be implemented
# at the dataset layer.

