initialise repo
authorSteven McDonald <steven@steven-mcdonald.id.au>
Sun, 25 Sep 2011 13:45:24 +0000 (23:45 +1000)
committerSteven McDonald <steven@steven-mcdonald.id.au>
Sun, 25 Sep 2011 13:45:24 +0000 (23:45 +1000)
29 files changed:
INSTALL [new file with mode: 0644]
LICENSE [new file with mode: 0644]
QUICKSTART [new file with mode: 0644]
README [new file with mode: 0644]
TODO [new file with mode: 0644]
config.py [new file with mode: 0644]
core/__init__.py [new file with mode: 0644]
core/bits.py [new file with mode: 0755]
core/deptools.py [new file with mode: 0644]
core/marshal.py [new file with mode: 0644]
core/store.py [new file with mode: 0644]
doc/01-what.txt [new file with mode: 0644]
doc/02-flow.txt [new file with mode: 0644]
doc/03-items.txt [new file with mode: 0644]
doc/example_python_items.py [new file with mode: 0644]
doc/sample_items.json [new file with mode: 0644]
lib/__init__.py [new file with mode: 0644]
lib/httpd.py [new file with mode: 0755]
lib/loaders.py [new file with mode: 0644]
lib/magic.py [new file with mode: 0644]
lib/vis.py [new file with mode: 0644]
local_config.py [new file with mode: 0644]
magic_httpd.py [new file with mode: 0755]
mclient.py [new file with mode: 0755]
tests/breakfast.py [new file with mode: 0644]
tests/gjunktest.py [new file with mode: 0755]
tests/groupedbfast.py [new file with mode: 0644]
tests/junktest.py [new file with mode: 0755]
tests/test.py [new file with mode: 0755]

diff --git a/INSTALL b/INSTALL
new file mode 100644 (file)
index 0000000..603be1f
--- /dev/null
+++ b/INSTALL
@@ -0,0 +1,12 @@
+- Install python2.6 or newer
+
+- Install mongodb (version 1.8 works)
+
+- Install cherrypy3 (either from your distro PyPi)
+
+- Install the following from PyPi using easy_install or pip:
+
+       pip install digraphtools
+       pip install requests
+
+- Read QUICKSTART
diff --git a/LICENSE b/LICENSE
new file mode 100644 (file)
index 0000000..7eca0f5
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,24 @@
+Copyright (c) 2011, Anchor Systems Pty Ltd
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+      notice, this list of conditions and the following disclaimer in the
+      documentation and/or other materials provided with the distribution.
+    * Neither the name of Anchor Systems Pty Ltd nor the
+      names of its contributors may be used to endorse or promote products
+      derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL ANCHOR SYSTEMS PTY LTD BE LIABLE FOR ANY
+DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/QUICKSTART b/QUICKSTART
new file mode 100644 (file)
index 0000000..36e82f9
--- /dev/null
@@ -0,0 +1,302 @@
+Take a look in INSTALL and install all the dependencies.
+
+Look at config.py and make sure it's pointing at your mongodb
+
+Kick off the HTTP API server by running: ./magic_httpd.py
+
+That's it! You're up and running!  Okay, so you probably want to do stuff
+with it at all.  There is a simple shell client to talk to the HTTP API for
+testing. For production, you'll want to use mudpuppy (or your own agent)
+to automate the individual items.
+
+
+OKAY, IT'S RUNNING, SO NOW SHOW ME MORE THAN JUST A BLINKING CURSOR!
+
+By default, make-magic is using an example set of possible items to do
+read from doc/sample_items.json, which is a simple list of steps for getting
+out of bed in the morning and going to work.  When you make a new task from
+this, the only requirement that it recognises is 'coffee'. If you say you
+require coffee, the steps to make it and drink it will be included.
+
+Let's get started. First, we can list the tasks that are currently being
+handled by make-magic:
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py tasks
+       []
+       davidb@kelp:~/src/make-magic$ 
+
+Not very interesting. Let's create a task, and say we require coffee.
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py task_create '{"requirements": [ "coffee" ]}'
+       {
+        "items": [
+         {
+          "state": "INCOMPLETE", 
+          "name": "TaskComplete", 
+          "depends": [
+           "go_to_work"
+          ]
+         }, 
+
+       (... many more items... )
+
+         {
+          "state": "INCOMPLETE", 
+          "name": "make_coffee", 
+          "depends": [
+           "get_up"
+          ]
+         }
+        ], 
+        "metadata": {
+         "requirements": [
+          "coffee"
+         ], 
+         "uuid": "1ede91f0-6b39-4da9-8fe6-cc0b028ed349", 
+         "metadata": true
+        }
+       }
+       davidb@kelp:~/src/make-magic$ 
+
+mclient is pretty simple. Most of the time it will just talk the JSON that is
+part of the (vaguely RESTful) HTTP API.  make-magic has created a new task
+based on the requirements (in this case make_coffee and drink_coffee are in 
+there because we said we required coffee).  It's also allocated a uuid for the
+task. If we had wanted to, we could have added more key/value pairs, and they
+would have been added into the task metadata.
+
+If we hadn't told it we required coffee, it would have removed any reference to
+it, and nothing would depend on the removed items:
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py task_create '{"requirements": []}'
+       {
+        "items": [
+
+            (... items in here but no making or drinking coffee ...)
+
+        ], 
+        "metadata": {
+         "requirements": [], 
+         "uuid": "301b925c-cf35-4195-8bfa-0fa41ccaf8c8", 
+         "metadata": true
+        }
+       }
+       davidb@kelp:~/src/make-magic$
+
+
+Let's list the tasks again:
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py tasks
+       [
+       "1ede91f0-6b39-4da9-8fe6-cc0b028ed349",
+       "301b925c-cf35-4195-8bfa-0fa41ccaf8c8"
+       ]
+       davidb@kelp:~/src/make-magic$ 
+
+Now it's showing the UUID for the tasks we just created. If we want, we can now
+ask for all the information associated with a task with: 
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py task 1ede91f0-6b39-4da9-8fe6-cc0b028ed349
+
+Okay, so there are a whole lot of things that need to be done, but some things
+depend on other things. What we really want is a list of things that need to be
+done, but which haven't had their dependencies satisfied yet. We can do this with:
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py items_ready 1ede91f0-6b39-4da9-8fe6-cc0b028ed349
+       [
+        {
+         "state": "INCOMPLETE", 
+         "name": "wake_up"
+        }
+       ]
+       davidb@kelp:~/src/make-magic$ 
+
+At the moment, the only thing that we can do at the moment is to wake up. If you
+were inclined to look through all the items and follow their dependencies, this is
+because you're not going to be able to get out of bed till you're awake, and you're
+not going to be able to do the rest of getting ready for work unless you're in bed
+etc. 
+
+What we'll do is change the state from INCOMPLETE, to IN_PROGRESS, and finally to
+COMPLETED:
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py update_item_state 1ede91f0-6b39-4da9-8fe6-cc0b028ed349 wake_up INCOMPLETE IN_PROGRESS
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py update_item_state 1ede91f0-6b39-4da9-8fe6-cc0b028ed349 wake_up IN_PROGRESS COMPLETE
+
+Before a client does any work on an item, it first sets it's state to IN_PROGRESS so that
+other workers know not to also try and do it. When the client has finished successfully it
+sents the state to COMPLETE.
+
+       (Some notes on concurrency: update_item_state is pretty much the only time the client is doing 
+       anything other than a single HTTP request passing what it gets from the command line.  
+       The reason is that you will likely have multiple agents (or a single agents with 
+       multiple threads or processes) looking for items to do, and then moving them to 
+       IN_PROGRESS to do them.  To guard against race conditions, in this case the state
+        will only be changed if it already matches the one that we've told it we're changing
+       from (the server enforces this atomically), and the client also passes a random token to the
+       server that will only come back if it's request was the one that succeeded)
+
+
+Now that we've gotten out of bed, if we check to see what items are ready again:
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py items_ready 1ede91f0-6b39-4da9-8fe6-cc0b028ed349
+
+it will show the next step is 'get_up'. Let's complete that as well:
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py update_item_state 1ede91f0-6b39-4da9-8fe6-cc0b028ed349 get_up INCOMPLETE IN_PROGRESS
+       davidb@kelp:~/src/make-magic$ ./mclient.py update_item_state 1ede91f0-6b39-4da9-8fe6-cc0b028ed349 get_up IN_PROGRESS COMPLETE
+
+(Yes, this gets boring pretty quick, but the whole point is to automate all this 
+stuff. Mudpuppy, which is also posted on anchor's github account, will do all this
+dealing with state stuff for you and let you write simple modules to do the steps)
+
+Now let's check what items are ready to go, because it's slightly more interesting
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py items_ready 1ede91f0-6b39-4da9-8fe6-cc0b028ed349
+       [
+        {
+         "state": "INCOMPLETE", 
+         "name": "make_breakfast", 
+         "depends": [
+          "get_up"
+         ]
+        }, 
+        {
+         "state": "INCOMPLETE", 
+         "name": "make_coffee", 
+         "depends": [
+          "get_up"
+         ]
+        }
+       ]
+       davidb@kelp:~/src/make-magic$ 
+
+Now that we're out of bed, there are two things available to do. Make breakfast, or
+(because we said we needed it), make coffee. The important thing to note is that
+both of these can be done at the same time! Both of them have had all their dependencies
+completed; if one depended on the other from finishing, it wouldn't show up in the
+list.   One of the cool things about make-magic is that you can do multiple steps at the
+same time, and make-magic will keep track of which items are completed, which ones
+are needed by other items still, and figure out on the fly what has to be done next.
+
+You can now (if you desire) go through and do all the tasks in order. Myself, I'd recommend
+getting something like mudpuppy to automated them, which is indeed the whole point.
+See: https://github.com/anchor/mudpuppy
+
+There is a single special item that is created for automatically for each task, and 
+that's called TaskComplete.  
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py item 1ede91f0-6b39-4da9-8fe6-cc0b028ed349 TaskComplete
+       {
+        "state": "INCOMPLETE", 
+        "name": "TaskComplete", 
+        "depends": [
+         "go_to_work"
+        ]
+       }
+
+TaskComplete depends (indirectly) on every item in the task. If you ask make-magic
+for items that are ready, and it sees that the only item ready to go is TaskComplete,
+the server will set it to COMPLETED itself, and return back an empty list of things
+to do.
+
+
+MORE STUFF
+
+There is actually a few more useful things you can do. You can add and update items
+in the task metadata: 
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py metadata 1ede91f0-6b39-4da9-8fe6-cc0b028ed349 
+       {
+        "requirements": [
+         "coffee"
+        ], 
+        "uuid": "1ede91f0-6b39-4da9-8fe6-cc0b028ed349", 
+        "metadata": true
+       }
+       davidb@kelp:~/src/make-magic$ ./mclient.py update_metadata 1ede91f0-6b39-4da9-8fe6-cc0b028ed349 '{"mornings_are": "meh"}'
+       (... previous metadata contents...) 
+
+       {
+        "mornings_are": "meh", 
+        "requirements": [
+         "coffee"
+        ], 
+        "uuid": "1ede91f0-6b39-4da9-8fe6-cc0b028ed349", 
+        "metadata": true
+       }
+       davidb@kelp:~/src/make-magic$ 
+
+
+We use it for things like saving the IP addresses that we've allocated to a
+server so that other items later on have easy access to it (like the ones
+setting up networking) without having to store it somewhere else.
+
+You can also add metadata to items in a very similar way:
+
+       davidb@kelp:~/src/make-magic$ ./mclient.py item 1ede91f0-6b39-4da9-8fe6-cc0b028ed349 get_up
+       {
+        "_change_state_token": 18106579636852, 
+        "depends": [
+         "wake_up"
+        ], 
+        "name": "get_up", 
+        "state": "COMPLETE"
+       }
+       davidb@kelp:~/src/make-magic$ ./mclient.py update_item 1ede91f0-6b39-4da9-8fe6-cc0b028ed349 get_up '{"bed was": "comfy", "sleep": "good"}'
+       {
+        "_change_state_token": 18106579636852, 
+        "depends": [
+         "wake_up"
+        ], 
+        "name": "get_up", 
+        "state": "COMPLETE"
+       }
+
+       {
+        "bed was": "comfy", 
+        "name": "get_up", 
+        "state": "COMPLETE", 
+        "depends": [
+         "wake_up"
+        ], 
+        "sleep": "good", 
+        "_change_state_token": 18106579636852
+       }
+       davidb@kelp:~/src/make-magic$ 
+
+You can use this for pretty much anything. If automation fails and you have to 
+change it to the FAILED state, you add in debugging information as to why for
+example.
+
+Now you've gone through this, it's probably going to be more interesting to
+define your own items (we have hundreds in our own production environment).
+
+Rather than just having stuff filtering on a single requirement, you can filter
+an individual item on many different ones, e.g.:
+
+       {
+               "name":         "do_something",
+               "if":           "(os.debian | os.rhel5) & hardware.vm & ( ! support_level.basic )",
+               "depends": [
+                       "reboot_debian", "reboot_rhel"
+               ]
+       }
+
+would only turn up in a task if the requirements included "os.debian" or "os.rhel5", 
+also included "hardware.vm", but didn't include "support_level.basic". 
+
+It might at first seem a bit seem a bit weird that it's depending on both
+reboot_debian and reboot_rhel, but the definition of reboot_debian
+will almost certainly include "if": "os.debian" at the very least, and
+similar for RHEL;  Any dependencies that are filtered out by their own
+'if' entries will also be removed from any dependencies when a task is
+created.
+
+This works much better than you would first expect; It also gives you
+the ability to build a complex list of item dependencies without having
+to explicitly define every single permutation of tasks that can be
+generated from requirements (in our case we would die of old age before
+being able to define them by hand).  This is part of what makes
+make-magic so cool.
diff --git a/README b/README
new file mode 100644 (file)
index 0000000..5c196c2
--- /dev/null
+++ b/README
@@ -0,0 +1,52 @@
+If you want to build a new server, and you're talking to someone who isn't
+a sysadmin, the stuff that you know is generally going to go something like
+this:
+
+       Someone needs a server. They want one of those
+       Linux ones, have a gopher server, and they want
+       it blue. Several other requirements specific to
+       them
+
+          |
+          V
+
+       A miracle happens
+
+          |
+          V
+
+       Really specific list of things to do to make it happen: 
+       Figure out a hostname. Allocate an IP. Put the IP in DNS.
+       Get the right VLAN. Go to the store and get some blue paint.
+       Get a hypervisor with enough free capacity. Put the host in 
+       the accounting system. Setup the guest. Configure the installer 
+       for the required distro.  kick off the install. Add some user 
+       accounts. Setup a gopher server. Firewall the gopher server 
+       from Jeremy because he doesn't like blue (and dozens more)
+
+          |
+          V
+
+       Do the stuff in the list
+
+
+The miracle is normally a sysadmin. She's probably also got about 50
+scripts to automate the rest as well, because like any good sysadmin,
+her first priority was to automate herself out of a job. It will still
+take a while to go through and run them in the right order, but for
+more complicated builds, there's no one-size-fits-all script, so bits
+and pieces around the place will have to be set off by hand.
+
+make-magic is desiged to be the miracle.
+
+It figures out what has to be done, what order to do it in. It
+keeps track of which steps have been done, and makes sure that 
+everything is done a correct right order.   It can hook into 
+systems like mudpuppy and orchestra to automate the steps 
+themselves, the combination of which can do complex, unique
+builds without any human intervention.
+
+mudpuppy (a python based automation agent to do the tasks that
+make-magic sets it) is available at:
+
+https://github.com/anchor/mudpuppy
diff --git a/TODO b/TODO
new file mode 100644 (file)
index 0000000..3ff9dc4
--- /dev/null
+++ b/TODO
@@ -0,0 +1,21 @@
+- predicate implementation
+- state machine implementation
+- external interface (e.g. http, xml-rpc, carrier pidgeon)
+- storage of currently active tasks
+       * Can likely to be something quite simple like mongodb
+       * Have a unique task id
+       * Have a few objects below that:
+               - items
+               - dependencies
+               - task state
+               - item state etc.
+  Note: ONLY currently active tasks. anything else can be archived / dumped to
+  logs once a task is complete!  (preferably as JSON or something).   If people
+  want access to old task data, it would be better to have it as part of a
+  seperate database rather than cluttering the design of the ephemeral data
+
+- Replace DeprecatedDependencyMagic with a TaskDependencyManager
+  This can know about internal structure of tasks, and might be able to offload
+  some of the magic from TaskFactory
+
+- Clean up many, many things
diff --git a/config.py b/config.py
new file mode 100644 (file)
index 0000000..3aa57bc
--- /dev/null
+++ b/config.py
@@ -0,0 +1,29 @@
+#! /usr/bin/env python
+'''config file for make-magic
+
+This is currently written in pure python, but should be pretty
+easy to deal with
+
+You can also override things in local_config.py
+'''
+
+# Where do we read our item definitions from
+#
+items_file = 'doc/sample_items.json'
+
+
+# MongoDB database to store information about tasks in
+mongodb_server = 'localhost'
+mongodb_port = 27017
+mongodb_database = 'magic' 
+
+# Where the webserver should listen
+httpd_listen_address = '127.0.0.1'
+httpd_listen_port = 4554
+
+
+# Attempt to import a local config to override stuff
+try:
+       from local_config import *
+except: 
+       pass
diff --git a/core/__init__.py b/core/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/core/bits.py b/core/bits.py
new file mode 100755 (executable)
index 0000000..2183797
--- /dev/null
@@ -0,0 +1,87 @@
+#! /usr/bin/env python
+
+from uuid import uuid4
+
+class Scheme(object):
+       def __init__(self, items):
+               self.items = items
+
+class BaseItem(object):
+       description = ""
+       depends = ()
+       predicate = lambda task: True
+       name = property(lambda self: self.__class__.__name__)
+       def __repr__(self):
+               return '<'+self.name+'('+self.__class__.__bases__[0].__name__+') instance>'
+
+class Item(BaseItem):
+       INCOMPLETE = 'INCOMPLETE'
+       FAILED = 'FAILED'
+       IN_PROGRESS = 'IN_PROGRESS'
+       CANNOT_AUTOMATE = 'CANNOT_AUTOMATE'
+       COMPLETE = 'COMPLETE'
+
+       allowed_states = set((INCOMPLETE,FAILED,IN_PROGRESS,CANNOT_AUTOMATE,COMPLETE))
+       def __init__(self, data=None):
+               if data == None:
+                       data = dict(state=self.INCOMPLETE)
+               self.data = data
+       def isComplete(self): 
+               return self.data['state'] == self.COMPLETE
+
+class TaskComplete(Item):
+       '''sentinal task that contains all items required for completion'''
+       def __init__(self, goals=None, data=None):
+               if goals != None:
+                       self.depends = tuple(goals)
+               if len(self.depends) == 0:
+                       raise ValueError('MUST provide goals to create TaskComplete')
+               Item.__init__(self,data)
+
+class DataItem(Item):
+       def __init__(self):
+               Item.__init__(self)
+               self.data = None
+
+class Group(BaseItem):
+       '''"logical" groups containing items
+       groups are sets of items that cannot be started until the group's dependencies are satisfied.
+
+       This sounds perfectly sane until you realise that what you're actually
+       doing is defining every item in the group to be dependent on every dependency of the
+       group.  As groups can contain groups, every item in the whole graph is actually
+       dependent on all dependencies of any groups that contain it, any groups that contain that
+       group, and so on recursively.  This is actually a digraph not a chain so a naive implementation
+       to see which groups contain an item is not going to work.
+
+       Groups make intuitive sense to humans, and make things so much more hackish to implement.
+       They do make it easier for humans to build the original set of dependencies, at the expense
+       of using somewhat of a shotgun approach in dependencies at times.  It can also be used to
+       effectively partition the dependency digraph through critical nodes.
+
+       One solution to applying dependencies of groups to those it contains is to:
+               - For every group, find the group contents plus the contents of any groups
+                 contained by the group recursively
+               - For each item, add to it's dependencies the union of the set of dependencies of all
+                 groups it is contained in.
+                 
+       Once that is complete, it is then (if needed) possible to perform a reduction to remove the
+       groups. A simple implementation would be to for each group, find each dependency on that group
+       and replace it with the contents of the group.
+
+       Groups are currently not serialised and stored as state; They should be removed as quickly
+       as possible after task creation
+       '''
+       contains = NotImplemented
+
+class Task(object):
+       def __init__(self, items, requirements, goal, uuid=None, data=None):
+               if uuid == None:
+                       uuid = str(uuid4())
+               if data == None:
+                       data = dict()
+               self.uuid, self.items, self.requirements, self.data = uuid, items, requirements, data
+               assert isinstance(goal, TaskComplete)
+               self.goal = goal
+       def __repr__(self):
+               return "Task(uuid='%s')" % (self.uuid,)
diff --git a/core/deptools.py b/core/deptools.py
new file mode 100644 (file)
index 0000000..8687c30
--- /dev/null
@@ -0,0 +1,347 @@
+#! /usr/bin/env python 
+from collections import deque,defaultdict
+from itertools import ifilter
+import digraphtools
+
+from core.bits import Item,Group
+
+def unbound(func):
+       '''always return the underlying function from a bound method'''
+       return getattr(func, 'im_func', func)
+
+class BaseDependencyStrategy:
+       '''Base class for strategies for dependency resolution'''
+       @classmethod
+       def iterate_pruned_item_dependencies(cls, requirements, item):
+               '''return an ordered list of dependencies for an item such that an items dependencies are before it in the list
+               prunted based on predicate
+               Not used, but left here because at some point it might be needed to do 
+               this in a way that does not alter the items at all (as per the below filter)'''
+               raise NotImplementedError
+
+       @classmethod
+       def iterate_item_dependencies(cls, item):
+               '''return an ordered list of dependencies for an item such that an items dependencies are before it in the list'''
+               raise NotImplementedError
+
+       @classmethod
+       def make_group_dependencies_explicit(cls, item):
+               '''applying dependencies of groups to those it contains'''
+               raise NotImplementedError
+       
+       @classmethod
+       def filter_dependency_graph(cls, requirements, item):
+               '''filter items in the dependency graph INPLACE based on the supplied requirements
+               It's highly recommended you only do this on item instances and not classes as
+               it alters or the depends attribute on all items in the supplied DAG '''
+               raise NotImplementedError
+
+       @classmethod
+       def item_factory(cls, goal_item):
+               '''instantiate all items in a dependency tree from an end-goal item
+
+               for every instance in the dependency tree, the depends attribute on the instance overriding the class
+               returns an instance of the end-goal where all recursively all dependencies are item instances
+               '''
+               raise NotImplementedError
+
+       @classmethod
+       def early_iter_all_items(cls, item):
+               '''get a list of all times, including all groups, contents and dependencies of same
+               before groups are unrolled into something that can be represented by a digraph we
+               need a way of getting all items, including traversing group contents
+               '''
+               raise NotImplementedError
+
+
+class SimpleDependencyStrategy(BaseDependencyStrategy):
+       '''Reasonably generic strategy for working with dependencies
+       Requires implementation of some things by other strategies
+       '''
+
+       @classmethod
+       def iterate_pruned_item_dependencies(cls, requirements, item, seen=None):
+               '''return an ordered list of dependencies for an item such that an items dependencies are before it in the list
+               This is equivalent to treating the dependencies as a DAG and traversing while:
+                       - reducing it to a tree by ignoring any nodes seen in the traversal
+                       - pruning branches where the requirements do not meet an item's predicate
+                       - doing a post-order traversal to maintain the invaraint that a node's
+                         dependencies preceed it in the traversal.
+               '''
+               if seen is None: seen = set()
+               if item in seen: raise StopIteration
+               seen.add(item)
+               filtereddeps = ifilter(lambda i: unbound(i.predicate)(requirements), item.depends)
+               for dep in filtereddeps:
+                       for cdep in cls.iterate_pruned_item_dependencies(requirements,dep,seen):
+                               yield cdep
+               yield item
+
+       @classmethod
+       def iterate_item_dependencies(cls, item, seen=None):
+               if seen is None: seen = set()
+               if item in seen: raise StopIteration
+               seen.add(item)
+               for dep in item.depends:
+                       for cdep in cls.iterate_item_dependencies(dep,seen):
+                               yield cdep
+               yield item
+
+       @classmethod
+       def early_iter_all_items(cls, item, seen=None):
+               '''get a list of all times, including all groups, contents and dependencies of same
+               before groups are unrolled into soemthing that can be represented by a digraph we
+               need a way of getting all items, including traversing group contents
+               '''
+               if seen is None: seen = set()
+               if item in seen: raise StopIteration
+               seen.add(item)
+               for dep in item.depends:
+                       for cdep in cls.early_iter_all_items(dep,seen):
+                               yield cdep
+               if isinstance(item,Group) or type(item) == type and issubclass(item,Group):
+                       for member in item.contains:
+                               for cdep in cls.early_iter_all_items(member,seen):
+                                       yield cdep
+               yield item
+
+       @classmethod
+       def filter_dependency_graph(cls, requirements, item):
+               '''filter items in the dependency graph INPLACE based on the supplied requirements
+               It's highly recommended you only do this on item instances and not classes as
+               it alters or the depends attribute on all items in the supplied DAG '''
+               items = cls.iterate_item_dependencies(item)
+               keptitems = set(filter(lambda i: unbound(i.predicate)(requirements), items))
+               droppeditems = set(items).difference(keptitems)
+
+               for dropped in droppeditems: del(dropped.depends) # Drop possible circular refs to filtered instances
+               for survivor in keptitems:
+                       # Drop references to filtered instances
+                       survivor.depends = tuple(dep for dep in survivor.depends if dep in keptitems)
+               if item not in keptitems:
+                       return None
+               return item
+
+       @classmethod
+       def instantiate_items(cls, items):
+               '''returns a map from classes to instances'''
+               # Pre: All items, including all deps, are in 'items'
+               instancemap = dict((item, item()) for item in items) # Only ever instantiate each item once
+               iteminstances = map(instancemap.get, items)
+               for inst in iteminstances:
+                       inst.depends = tuple(map(instancemap.get, inst.depends))
+                       if isinstance(inst,Group):
+                               inst.contains = tuple(map(instancemap.get, inst.contains))
+               return instancemap
+
+       @classmethod
+       def item_factory(cls, goal_item):
+               '''instantiate all items in a dependency tree from an end-goal item
+
+               for every instance in the dependency tree, the depends attribute on the instance overriding the class
+               returns an instance of the end-goal where all recursively all dependencies are item instances
+               '''
+               # Flatten the dep graph to a topsort, instantiate all items, then override depends attributes
+               items = set(cls.early_iter_all_items(goal_item))
+               instancemap = cls.instantiate_items(items)
+               return instancemap[goal_item]
+
+       @classmethod
+       def make_group_dependencies_explicit(cls, item):
+               items = set(cls.early_iter_all_items(item))     # Gotta catch them all
+               items = cls.make_group_dependencies_explicit_for_items(items)
+               assert item in items
+               return item
+
+       @classmethod
+       def make_group_dependencies_explicit_for_items(cls, items):
+               allitems = items
+               items = set(k for k in allitems if isinstance(k,Item) or type(k) == type and issubclass(k,Item))
+               groups = set(k for k in allitems if isinstance(k,Group) or type(k) == type and issubclass(k,Group))
+               origitems,origgroups = set(items),set(groups)   # For later testing
+               assert allitems == items.union(groups)
+               assert items.isdisjoint(groups)
+
+               contained_by = defaultdict(set)
+
+               # First find out what groups an item is contained by
+               def iter_group_contents(group):
+                       for k in group.contains:
+                               if k in groups: 
+                                       for kk in iter_group_contents(k): yield kk
+                               else: yield k
+               for group in groups:
+                       for k in iter_group_contents(group):
+                               contained_by[k].add(group)
+
+               # Item dependencies are the explicit dependencies of the items themselves
+               # plus the dependencies of the groups they are contained by
+               for item,ingroups in contained_by.items(): 
+                       assert ingroups.issubset(groups)
+                       new_deps = set(item.depends)
+                       for g in ingroups:
+                               new_deps.update(g.depends)
+                       item.depends = tuple(new_deps)
+
+               # Now the dependencies of the items inside groups are unrolled, reduce any
+               # references of groups to the contents of the groups
+
+               # First do the group contents themselves recursively, and store as sets
+               for group in groups:
+                       group.contains = set(group.contains)
+                       while not group.contains.isdisjoint(groups):
+                               for containedgroup in group.contains.intersection(groups):
+                                       assert containedgroup != group
+                                       group.contains.update(containedgroup.contains)
+                                       group.contains.remove(containedgroup)
+
+               # Now that for group.contains has been reduced to non-group items,
+               # replace any reference to groups in the dependencies of any item with
+               # the contents of that group
+               for item in items.difference(groups):
+                       assert item not in groups
+                       if not groups.isdisjoint(item.depends):
+                               item.depends = set(item.depends)
+                               for groupdep in item.depends.intersection(groups):
+                                       item.depends.update(groupdep.contains)
+                                       item.depends.remove(groupdep)
+                               item.depends = tuple(item.depends)
+                       assert groups.isdisjoint(item.depends)
+
+               assert items == origitems
+               assert groups == origgroups
+               assert allitems == items.union(groups)
+               assert items.isdisjoint(groups)
+               assert items == allitems.difference(groups)
+
+               return items
+
+
+not_none = lambda n: n is not None
+
+class GraphDependencyStrategy(SimpleDependencyStrategy):
+       '''deal with item dependencies by treating them as a directed acyclic graph
+       a graph is represented as a 2tuple of a node, and a list of nodes connected to it's outgoing edges
+
+       graphs calls are generally passed by goal node, with dependencies on the goal being 
+       it's outgoing edges'''
+
+       @classmethod
+       def get_graph(cls, item, seen=None):
+               '''return a DAG from a base item and a set of requirements
+               items are pruned from the graph if their predicates are false for the requirements
+               '''
+               if seen is None: seen = dict()
+               if item in seen: return seen[item]
+               branches = []
+               seen[item] = [item, branches]
+               for dep in item.depends:
+                       branch = cls.get_graph(dep, seen)
+                       if branch: 
+                               branches.append(branch)
+               return seen[item]
+
+       @classmethod
+       def get_pruned_graph(cls, requirements, item, seen=None):
+               '''return a DAG from a base item and a set of requirements
+               items are pruned from the graph if their predicates are false for the requirements
+               '''
+               if seen is None: seen = dict()
+               if item in seen: return seen[item]
+               branches = []
+               seen[item] = [item, branches]
+               for dep in filter(lambda i: unbound(i.predicate)(requirements), item.depends):
+                       branch = cls.get_pruned_graph(requirements, dep, seen)
+                       if branch: 
+                               branches.append(branch)
+               return seen[item]
+
+       @classmethod
+       def tree_from_graph(cls, graph, seen=None):
+               '''convert a DAG into a arborescence by removing edges to seen nodes'''
+               node,connected = graph
+               if seen is None: seen=set()
+               if node in seen: return None
+               seen.add(node)
+               connected = [cls.tree_from_graph(sub,seen) for sub in connected]
+               return [ node,filter(not_none,connected) ]
+
+
+       @classmethod
+       def postorder_traversal(cls, tree):
+               '''traverse tree post-order and return a list of nodes'''
+               root,branches = tree
+               ret = map(cls.postorder_traversal,branches)
+               return sum(ret,[]) + [root]
+               
+       @classmethod
+       def iterate_pruned_item_dependencies(cls, requirements, item):
+               tree = cls.tree_from_graph( cls.get_pruned_graph(requirements,item) )
+               return cls.postorder_traversal(tree)
+
+       @classmethod
+       def iterate_item_dependencies(cls, item):
+               tree = cls.tree_from_graph( cls.get_graph(item) )
+               return cls.postorder_traversal(tree)
+
+
+class DigraphDependencyStrategy(SimpleDependencyStrategy):
+       @classmethod
+       def edges_from_item_deps(cls, item):
+               '''iterates over dependency edges with transiability from item'''
+               tovisit = deque([item])
+               while len(tovisit):
+                       item = tovisit.pop()
+                       for dep in item.depends:
+                               yield (item,dep)
+                       tovisit.extendleft(item.depends)
+
+       @classmethod
+       def graph_from_item_deps(cls, item):
+               return digraphtools.graph_from_edges( cls.edges_from_item_deps(item) )
+
+       @classmethod
+       def iterate_item_dependencies(cls, item):
+               g = cls.graph_from_item_deps(item)
+               return digraphtools.dfs_topsort_traversal(g, item)
+
+       @classmethod
+       def graph_from_items(cls, items):
+               # pre: all items in graph are passed
+               def edges_from_items(items):
+                       for i in items:
+                               for d in i.depends: yield (i,d)
+               return digraphtools.graph_from_edges( edges_from_items(items) )
+
+       @classmethod
+       def find_goal_nodes(cls, items):
+               '''return the set of all nodes that aren't depended on'''
+               graph = cls.graph_from_items(items)
+               start_nodes,dependencies = zip(*list(digraphtools.iter_edges(graph)))
+               return set(start_nodes).difference(dependencies)
+
+       @classmethod
+       def needed_dependencies(cls, graph, item):
+               '''return which of an item's dependencies are incomplete'''
+               return [dep for dep in graph[item] if dep.data['state'] != dep.COMPLETE]
+
+       @classmethod
+       def ready_to_run(cls, items):
+               '''return items that are incomplete who have no uncompleted dependencies'''
+               graph = cls.graph_from_items(items)
+               incomplete =  [item for item in items if item.data['state'] == item.INCOMPLETE]
+               return [item for item in incomplete if len(cls.needed_dependencies(graph,item)) == 0]
+
+class DeprecatedDependencyMagic(object):
+       #TODO: Figure out the object structure for *Magic. Likely it shouldn't even be in this module
+       def __init__(self, strategy=DigraphDependencyStrategy):
+               self.strategy = strategy
+       def make_new_dep_graph(self, goal_node):
+               goal_node = self.strategy.item_factory(goal_node)
+               self.strategy.make_group_dependencies_explicit(goal_node)
+               return goal_node
+       def item_list_for_task(self, task):
+               # FIXME: Doesn't look at all goal nodes. If we want only 1 goal node we should enforce it
+               goal = self.make_new_dep_graph(task.will[0])
+               goal = self.strategy.filter_dependency_graph(task.wants, goal)
+               return self.strategy.iterate_item_dependencies(goal)
diff --git a/core/marshal.py b/core/marshal.py
new file mode 100644 (file)
index 0000000..23fc8b2
--- /dev/null
@@ -0,0 +1,182 @@
+#! /usr/bin/env python
+
+'''Marshal in and out of internal object representations
+
+Internally, we use the object types defined in core.bits,
+however we need some way of getting stuff in and out of that format
+both for IPC, and so that people don't have to write their
+item definitions in Python[0].  We also need to be able to talk in 
+different formats to build APIs with.
+
+To do this, we're going to use a simple, data based common format that
+should be able to be represented in several different formats (e.g.
+python objects, json, xml etc). Bonus points for being able to use 
+off-the-shelf encoders and decoders.
+
+The internal format for item classes is based is a list of dicts in
+the form:
+
+items = [
+   { 'name':         'itemname',                           # required
+     'depends':      ['itemname2', 'itemname43', 'groupname']  # optional
+     'description':  'multi-line description of the item', # optional
+     'if':           '<predicate definition>'              # optional
+   },
+
+   { 'name':         'itemname2',
+     'depends':      []
+   },
+
+   { 'group':        'groupname',                          # required
+     'contains':     ['itemname43', itemname32','groupname5']  # required
+     'depends':      ['itemname47', 'groupname2' ...]      # optional
+     'description':  'multi-line description of the item', # optional
+     'if':           '<predicate definition>'              # optional
+   },
+   ...
+]
+
+where all dependencies refered to must be defined in the list.
+This is equivalent to the internal definition:
+
+       class itemname(bits.Item):
+               description = 'multi-line description of the item'
+               depends = (itemname2, itemname43, groupname)
+               predicate = <callable that returns True iff predicate holds over passed requirements>
+
+       class itemname2(bits.Item):
+               pass
+
+       class groupname(bits.Group):
+               description = 'multi-line description of the item'
+               depends = (itemname47, groupname2)
+               contains = (itemname43, itemname32, groupname5)
+               predicate = <callable that returns True iff predicate holds over passed requirements>
+
+       items = [ itemname, itemname2, groupname ]
+
+Item instances are represented in the same way, but the dicts can have 
+extra key/value pairs for item state and metadata. These are available
+as as a dict as the 'data' property on the python object instances.
+
+Group instances are not currently able to be marshalled; Only classes.
+Groups should be reduced out during the process of Task creation.
+
+Predicate definitions as strings are currently as defined in the digraphtools.predicate module
+We use the PredicateContainsFactory to generate the predicates. This also allows us to marshal
+them back and forward pretty easily from strings
+
+[0] Although it's extensible inside so that you can do things like
+write predicates in pure python, the whole system has to be usable
+by someone that doesn't know a line of python.
+'''
+import core.bits
+from digraphtools.predicate import PredicateContainsFactory
+
+class ItemConverter(object):
+       '''Convert items to and from Item objects
+       '''
+
+       identifier_chrs = set("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890_")
+       reserved_keys = set(('name','group','depends','description','contains','if'))
+       def normalise_item_name(self, name):
+               '''return a passed string that can be used as a python class name'''
+               name = str(name)
+               name = filter(self.identifier_chrs.__contains__, name)
+               if name[:1].isdigit(): 
+                       name = '_'+name
+               return name
+
+       def predicate_string_to_callable(self, predicate):
+               '''turn a predicate into a callable'''
+               pf = PredicateContainsFactory()
+               pred = pf.predicate_from_string(predicate)
+               pred._predicate_string = predicate  # Save for marshalling back the other way
+               return pred
+
+       def predicate_callable_to_string(self, predicate):
+               '''turn a predicate into a callable'''
+               if hasattr(predicate, '_predicate_string'):
+                       return predicate._predicate_string      # Restore previous knowledge. Mwahahahah
+               raise ValueError('Cannot marshal strange predicate into a string.')
+
+       def itemdict_to_group(self, itemdict):
+               '''return a Group subclass from an item dict datastructure
+               This does not unroll dependencies or group contents from strings into classes
+               pre: itemdict is valid
+               '''
+               assert not itemdict.has_key('name')
+               name = self.normalise_item_name(itemdict['group'])
+               attrs = dict(contains=itemdict['contains'])
+               if itemdict.has_key('depends'): attrs['depends'] = tuple(itemdict['depends'])
+               if itemdict.has_key('description'): attrs['description'] = itemdict['description']
+               if itemdict.has_key('if'): attrs['predicate'] = self.predicate_string_to_callable(itemdict['if'])
+               return type.__new__(type, name, (core.bits.Group,), attrs)
+               
+       def itemdict_to_item_class(self, itemdict):
+               '''return an Item subclass from an item dict datastructure
+               This does not unroll item dependencies from strings into classes
+
+               pre: itemdict is valid
+               '''
+               if itemdict.has_key('group'): 
+                       return self.itemdict_to_group(itemdict)
+
+               name = self.normalise_item_name(itemdict['name'])
+               if name == 'TaskComplete':
+                       itemsuper = core.bits.TaskComplete
+               else:
+                       itemsuper = core.bits.Item
+               attrs = dict()
+               if itemdict.has_key('depends'): attrs['depends'] = tuple(itemdict['depends'])
+               if itemdict.has_key('description'): attrs['description'] = itemdict['description']
+               if itemdict.has_key('if'): attrs['predicate'] = self.predicate_string_to_callable(itemdict['if'])
+               return type.__new__(type, name, (itemsuper,), attrs)
+
+       def itemdict_to_item_instance(self, itemdict):
+               cl = self.itemdict_to_item_class(itemdict)
+               data = dict((k,v) for k,v in itemdict.items() if k not in self.reserved_keys)
+               return cl(data=data)
+
+       def itemclass_to_itemdict(self, item):
+               '''return an item dict datastructure from an Item or Group subclass'''
+               if issubclass(item,core.bits.Group):
+                       itemdict = dict(group=item.__name__, contains=[c.__name__ for c in item.contains])
+               else:
+                       itemdict = dict(name=item.__name__)
+               if item.depends: itemdict['depends'] = list(d.__name__ for d in item.depends)
+               if item.description: itemdict['description'] = item.description
+               if item.predicate != core.bits.BaseItem.predicate:
+                       # This might fail if someone has put their own callable in as a predicate
+                       # That's okay; it just means they can't marshal their classes back to json
+                       itemdict['if'] = self.predicate_callable_to_string(item.predicate)
+               return itemdict
+
+       def item_to_itemdict(self, item):
+               '''return an item dict datastructure from an Item instance
+               Note: Does not work on groups and does not convert predicates
+               '''
+               assert not isinstance(item, core.bits.Group)
+               itemdict = dict(name=item.name)
+               itemdict.update(dict((k,v) for k,v in item.data.items() if k not in self.reserved_keys))
+               if item.description: itemdict['description'] = item.description
+               if len(item.depends):
+                       itemdict['depends'] = [d.name for d in item.depends]
+               return itemdict
+
+class TaskConverter(ItemConverter):
+       def taskdict_to_task(self, taskdict):
+               # turn the items into instances
+               items = map(self.itemdict_to_item_instance, taskdict['items'])
+
+               # reference them to each other correctly
+               item_by_name = dict((item.name,item) for item in items)
+               for item in items:
+                       item.depends = tuple(item_by_name[dep] for dep in item.depends)
+
+               # Find the goal node
+               metadata = taskdict['metadata']
+               goal = item_by_name['TaskComplete']
+               requirements = metadata['requirements']
+               uuid = metadata['uuid']
+               return core.bits.Task(items, requirements, goal, uuid, metadata)
diff --git a/core/store.py b/core/store.py
new file mode 100644 (file)
index 0000000..6c8c1ab
--- /dev/null
@@ -0,0 +1,81 @@
+#! /usr/bin/env python
+
+'''persistant storage of state data
+
+We need some way to keep track of Tasks that are being worked on,
+the state of items etc. It would be even cooler if that data was
+to hang around when the process was gone.
+'''
+
+try: import pymongo
+except: pass   # allow import where noone is using the MongoStore
+
+import config
+import random
+
+class MongoStore(object):
+       '''persistant mongodb store'''
+
+       def __init__(self):
+               self.connection = pymongo.Connection(config.mongodb_server,config.mongodb_port)
+               self.db = self.connection[config.mongodb_database]
+       def get_tasks(self):
+               return [name for name in self.db.collection_names() if 'system.' not in name]
+       def new_task(self, uuid, items, metadata=None):
+               if metadata == None: metadata = {}
+               metadata['uuid'] = uuid
+               metadata['metadata'] = True
+               self.db[uuid].create_index('name')
+               self.db[uuid].create_index('metadata')
+               self.db[uuid].insert(items)
+               self.db[uuid].insert(metadata)
+       def _noid(self, item):
+               if item is None or '_id' not in item: return item
+               del item['_id']
+               return item
+       def item(self, uuid, name):
+               '''get a specific item for a task'''
+               return self._noid( self.db[uuid].find_one({'name': name}) )
+       def items(self, uuid):
+               '''get all the items for a task'''
+               # ALL THE THINGS!
+               return [self._noid(item) for item in self.db[uuid].find({'name': {'$exists': True},'metadata': {'$exists': False}})]
+       def metadata(self, uuid):
+               '''get metadata for a task'''
+               metadata = self.db[uuid].find_one({'metadata': {'$exists': True}})
+               return self._noid(metadata)
+       def update_item(self, uuid, name, updatedict, existingstate={}):
+               '''updates an item similar to dict.update()
+
+               if 'existingdict' is supplied, the update will only succeed if 
+               the items in existingdict match what is in the item already
+
+               returns the contents of the item after the attempt is made. 
+               It is up to the caller to check if the update worked or failed.
+               '''
+               matchon = dict(existingstate)
+               matchon['name'] = name
+               self.db[uuid].update(matchon, {'$set': updatedict})
+               return self.item(uuid, name)
+       def update_metadata(self, uuid, updatedict, existingstate={}):
+               '''updates a metadata similar to dict.update()
+
+               if 'existingdict' is supplied, the update will only succeed if 
+               the items in existingdict match what is in the metadata already
+
+               returns the contents of the metadata after the attempt is made. 
+               It is up to the caller to check if the update worked or failed.
+               '''
+               matchon = dict(existingstate)
+               matchon['metadata'] = {'$exists': True}
+               self.db[uuid].update(matchon, {'$set': updatedict})
+               return self.metadata(uuid)
+       def delete_task(self, uuid):
+               '''delete a task, all it's items, and all it's metadata
+
+               This is not recoverable.
+               '''
+               self.db[uuid].drop()
+
+# Define the default Store here
+Store = MongoStore
diff --git a/doc/01-what.txt b/doc/01-what.txt
new file mode 100644 (file)
index 0000000..f8d41a9
--- /dev/null
@@ -0,0 +1,49 @@
+If you want to build a new server, and you're talking to someone who isn't
+a sysadmin, the stuff that you know is generally going to go something like
+this:
+
+       Someone needs a server. They want one of those
+       Linux ones, have a gopher server, and they want
+       it blue. Several other requirements specific to
+       them
+
+          |
+          V
+
+       A miracle happens
+
+          |
+          V
+
+       Really specific list of things to do to make it happen: 
+       Figure out a hostname. Allocate an IP. Put the IP in DNS.
+       Get the right VLAN. Go to the store and get some blue paint.
+       Get a hypervisor with enough free capacity. Put the host in 
+       the accounting system. Setup the guest. Configure the installer 
+       for the required distro.  kick off the install. Add some user 
+       accounts. Setup a gopher server. Firewall the gopher server 
+       from Jeremy because he doesn't like blue (and dozens more)
+
+          |
+          V
+
+       Do the stuff in the list
+
+
+The miracle is normally a sysadmin. She's probably also got about 50
+scripts to automate the rest as well, because like any good sysadmin,
+her first priority was to automate herself out of a job. It will still
+take a while to go through and run them in the right order, but for
+more complicated builds, there's no one-size-fits-all script, so bits
+and pieces around the place will have to be set off by hand.
+
+make magic is desiged to be the miracle.
+
+It figures out what has to be done, what order to do it in. It
+keeps track of which steps have been done, and makes sure that 
+everything is done a correct right order.   It can hook into 
+systems like mudpuppy and orchestra to automate the steps 
+themselves, the combination of which can do complex, unique
+builds without any human intervention.
+
+
diff --git a/doc/02-flow.txt b/doc/02-flow.txt
new file mode 100644 (file)
index 0000000..718c910
--- /dev/null
@@ -0,0 +1,17 @@
+When we want to create a new build, to start with we:
+
+1) Get some requirements
+2) Get the list of anything we know how to do
+3) Use the requirements to cut down the list to just what we need to do
+4) Figure out what order things should be done in
+
+We get this together, and also assign some state to each item that
+has to be done for the build (initially just that it isn't done yet).
+
+A client can come along and ask which step need to be done next.
+We look for a step that hasn't been done yet, but which has already
+had all the steps it depends on done.
+
+The client can then say it's working on the step. It can later on come
+back saying that it managed to do it, failed at doing it, or just doesn't
+know how how to do that one.
diff --git a/doc/03-items.txt b/doc/03-items.txt
new file mode 100644 (file)
index 0000000..dd32bfd
--- /dev/null
@@ -0,0 +1,43 @@
+Items are just something that has to be done, like rebooting
+a server, finding an IP address, or installing some software.
+
+It's very flexible what the items can be; make magic only cares
+that there is something to do, if it needs to be done in this
+case, what needs to be done first and if it's done yet.
+
+If you want to automate any of the items though (and that's
+pretty much the point) these requirements have to be met
+for every item:
+
+
+* It's well defined. 
+
+Basically ask yourself, Can I automate this on it's own?
+If not, maybe it needs to be split up into multiple steps,
+or think of it more in terms of what you're trying to get
+done.
+
+* Knowing what needs to happen first
+
+You need to be clear what information you need to do the
+item, and what needs to happen before it does. e.g.
+before setting up the network on a system you probably
+need it's hostname, and need to have allocated an IP
+address.
+
+* There is a clear point at which it finishes.
+
+In almost every case, other items are going to be waiting
+until this item has finished, because it needs to rely on
+it having been done.  If you have a step called "Reboot the
+server", it's more likely what you really want is "Reboot
+the server and wait for it to come up again".
+
+* You can tell if it worked or not
+
+If you assume an item completed successfully without checking,
+other items that depend on it will run under the assumption
+everything is in a well known and consistent state, because
+everything before it ran correctly.  Blindly going ahead and
+doing stuff when things are already in a broken state will
+at best make it hard to debug, and at worst do damage.
diff --git a/doc/example_python_items.py b/doc/example_python_items.py
new file mode 100644 (file)
index 0000000..205a1b3
--- /dev/null
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+'''simple example of how to define items in pure python
+
+These are the same items as in examples/sample_items.json,
+but rather than converting them from JSON, they are defined
+using pure python (actually, the supplied doc/sample_items.json 
+was generated using this module and core.marshall.ItemConverter)
+
+breakfast_task_factory is a task factory exactly like the
+one used in lib/magic.py. If you want to define your items using
+python rather than JSON, this is a pretty good way to do it
+'''
+
+import lib.loaders
+from core.bits import *
+from core.marshal import ItemConverter
+from digraphtools.predicate import predicate
+
+want_coffee = ItemConverter().predicate_string_to_callable(' coffee ')
+assert want_coffee(['coffee','tv']) == True
+assert want_coffee(['fish','tv']) == False
+
+# Items 
+class wake_up(Item):
+       pass
+
+class get_up(Item):
+       depends = (wake_up,)
+
+class make_breakfast(Item):
+       depends = (get_up,)
+
+class eat_breakfast(Item):
+       depends = (make_breakfast,)
+
+class make_coffee(Item):
+       depends = (get_up,)
+       predicate = want_coffee
+
+class drink_coffee(Item):
+       depends = (make_coffee,)
+       predicate = want_coffee
+
+class have_breakfast(Group):
+       depends = (get_up,)
+       contains = (eat_breakfast, drink_coffee)
+
+class walk_out_door(Item):
+       depends = (get_up, have_breakfast)
+
+class go_to_work(Item):
+       description = 'Leave to go to work'
+       depends = (walk_out_door,)
+
+# Tasks
+
+items = [wake_up, get_up, have_breakfast, make_breakfast, eat_breakfast, make_coffee, drink_coffee, walk_out_door, go_to_work]
+breakfast_task_factory = lib.loaders.TaskFactory(items)
diff --git a/doc/sample_items.json b/doc/sample_items.json
new file mode 100644 (file)
index 0000000..ed8be7e
--- /dev/null
@@ -0,0 +1,42 @@
+[
+ {
+  "name": "wake_up"
+ }, 
+
+ {
+  "name": "get_up",
+  "depends": [ "wake_up" ]
+ }, 
+
+ {
+  "name": "make_breakfast",
+  "depends": [ "get_up" ]
+ }, 
+
+ {
+  "name": "eat_breakfast",
+  "depends": [ "make_breakfast" ]
+ }, 
+
+ {
+  "name": "make_coffee",
+  "depends": [ "get_up" ], 
+  "if": " coffee " 
+ }, 
+ {
+  "name": "drink_coffee",
+  "depends": [ "make_coffee" ], 
+  "if": " coffee "
+ }, 
+
+ {
+  "name": "walk_out_door",
+  "depends": [ "get_up", "eat_breakfast", "make_coffee" ]
+ }, 
+
+ {
+  "name": "go_to_work", 
+  "description": "Leave to go to work",
+  "depends": [ "walk_out_door", "eat_breakfast", "drink_coffee" ]
+ }
+]
diff --git a/lib/__init__.py b/lib/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/lib/httpd.py b/lib/httpd.py
new file mode 100755 (executable)
index 0000000..fd2b949
--- /dev/null
@@ -0,0 +1,157 @@
+#! /usr/bin/env python
+
+'''mudpuppy httpd API interface
+
+This provides a very, very lightweight WSGI based http server 
+to expose a JSON based API.
+
+This is by no means the only way to interact with make magic
+from the outside world, but it is a convenient way to make a
+solid point of demarcation, and give something like mudpuppy
+something to talk to.
+
+There is NO user security implemented here. If you want some
+(and you really, really do), use your favourite web server's
+WSGI interface rather than cherrypy's own one
+'''
+
+import cherrypy
+import json
+
+import config
+import lib.magic
+
+from contextlib import contextmanager
+
+def expose_json(func):
+       '''decorate to set Content-Type on return to application/json and to expose to cherrypy'''
+       def wrapper(*argc,**argd):
+               cherrypy.response.headers['Content-Type'] = 'application/json'
+               return func(*argc,**argd)
+       wrapper.exposed = True
+       return wrapper
+
+@contextmanager
+def http_resource():
+       '''propogate KeyErrors and ValueErrors as HTTP errors'''
+       try:
+               yield
+       except KeyError as err:
+               raise cherrypy.HTTPError(404, str(err)) # Resource not found
+       except ValueError as err:
+               raise cherrypy.HTTPError(400, str(err)) # Bad request
+
+def simple_error_page(status, message, traceback, version):
+       return '{"error": "%s", "message": "%s"}' % (status, message)
+def error_page(status, message, traceback, version):
+       '''simple error page for HTTP service rather than default overblown HTML one'''
+       return '{"error": "%s", "message": "%s",\n"traceback": "%s"}' % (status, message,traceback)
+
+cherrypy.config.update({'error_page.400': simple_error_page,
+                       'error_page.404': simple_error_page,
+                       'error_page.405': simple_error_page,
+                       'error_page.default': error_page
+                       })
+
+class Task(object):
+
+       @expose_json
+       def index(self):
+               if cherrypy.request.method == 'GET':
+                       # List ALL THE TASKS!
+                       return json.dumps(self.magic.get_tasks(),indent=1)
+               raise cherrypy.HTTPError(405)
+
+       @expose_json
+       def create(self):
+               if cherrypy.request.method == 'POST':
+                       # TODO: ANY sort of taint checking
+                       with http_resource():
+                               taskdata = json.load(cherrypy.request.body)
+                               task = self.magic.create_task(taskdata)
+                       return json.dumps(task,indent=1)
+               raise cherrypy.HTTPError(405)
+
+       @expose_json
+       def default(self, uuid, *args):
+               # TODO: replace this horrible, horrible spagetti
+
+               if len(args) == 0:
+                       if cherrypy.request.method == 'GET':
+                               with http_resource(): # Show the task
+                                       return json.dumps(self.magic.get_task(uuid), indent=1)
+                       elif cherrypy.request.method == 'DELETE':
+                               with http_resource(): # wipe task
+                                       self.magic.delete_task(uuid)
+                                       return '{}'
+                       else: raise cherrypy.HTTPError(405) # Invalid method
+
+               if args[0] == 'available':
+                       # return items that we can do now
+                       if cherrypy.request.method == 'GET':
+                               with http_resource():
+                                       return json.dumps(self.magic.ready_to_run(uuid), indent=1)
+                       else: raise cherrypy.HTTPError(405) # Invalid method
+               elif args[0] == 'metadata':
+                       if cherrypy.request.method == 'GET':
+                               with http_resource():
+                                       return json.dumps(self.magic.get_metadata(uuid), indent=1)
+                       elif cherrypy.request.method == 'POST':
+                               with http_resource():
+                                       updatedata = json.load(cherrypy.request.body)
+                                       return json.dumps(self.magic.update_task_metadata(uuid,updatedata), indent=1)
+
+               # Nothing so simple as a single task.
+               args = dict(zip(['itemname','attrib'],args))
+
+               if 'attrib' in args:
+                       # Handle attribute on the item (only state so far)
+                       if args['attrib'] == 'state':
+                               if cherrypy.request.method == 'GET':
+                                       with http_resource():
+                                               return self.magic.get_item(uuid,args['itemname'])['state']
+                       raise cherrypy.HTTPError(405)
+               else:
+                       if cherrypy.request.method == 'GET':
+                               with http_resource():
+                                       return json.dumps(self.magic.get_item(uuid,args['itemname']), indent=1)
+                       elif cherrypy.request.method == 'POST':
+                               # Update stuff in the item
+                               with http_resource():
+                                       updatedata = json.load(cherrypy.request.body)
+                                       return json.dumps(self.magic.update_item(uuid,args['itemname'],updatedata), indent=1)
+               raise cherrypy.HTTPError(405)
+
+class Root(object):
+       @cherrypy.expose
+       def index(self):
+               cherrypy.response.headers['Content-Type'] = 'text/plain'
+               return '''make magic httpd API is running and happy
+               /task/          GET: list tasks
+               /task/          POST: create new task  (takes { 'requirements': [] } at minimum)
+               /task/uuid/     GET: show task
+'''
+
+def get_cherrypy_root(magiclib):
+       '''return the root object to be given to cherrypy
+
+       also defines which URLs work. Fun times :)
+       '''
+       root = Root()
+       root.task = Task()
+
+       # Allow the objects to access magic
+       # this may not be the optimal way to do it; might want to start more
+       # instances so everything isn't using the same Store connection
+       root.magic = magiclib
+       root.task.magic = magiclib
+
+       return root
+
+def run_httpd():
+       magiclib = lib.magic.Magic() 
+       cpconfig = {'global': {'server.socket_host': config.httpd_listen_address, 'server.socket_port': config.httpd_listen_port}}
+       cherrypy.quickstart(get_cherrypy_root(magiclib), config=cpconfig)
+
+if __name__ == '__main__':
+       run_httpd()
diff --git a/lib/loaders.py b/lib/loaders.py
new file mode 100644 (file)
index 0000000..0d24db3
--- /dev/null
@@ -0,0 +1,96 @@
+#! /usr/bin/env python
+
+'''Loader of item groups'''
+
+import json
+
+import core.marshal
+import core.deptools
+from core.bits import *
+
+# FIXME: Need to implement predicate and requirement stuff and
+#        all that implies!
+
+class TaskFactory(object):
+       '''Factory to generate tasks'''
+       def __init__(self, classes, dependency_strategy=core.deptools.DigraphDependencyStrategy):
+               self.classes = classes
+               self.dependency_strategy = dependency_strategy
+
+       def task_from_requirements(self, requirements):
+               '''This is to create a new task given a set of requirements
+               '''
+               # Instantiate items
+               items = self.dependency_strategy.instantiate_items(self.classes)
+               items = set(items.values())     # TODO: Fix instantiate_items return. this is unintuitive
+
+               # Define a clear goal node that all existing goals depend on
+               goal = TaskComplete(self.dependency_strategy.find_goal_nodes(items))
+               items.add(goal)
+               assert set(self.dependency_strategy.early_iter_all_items(goal)) == items
+
+               # Filter out things that aren't for this task
+               goal = self.dependency_strategy.filter_dependency_graph(requirements, goal)
+               assert goal.name == 'TaskComplete'
+
+               # Unroll groups
+               items = set(self.dependency_strategy.early_iter_all_items(goal))
+               items = self.dependency_strategy.make_group_dependencies_explicit_for_items(items)
+               assert goal in items
+
+               # Create task for great justice
+               return Task(items, requirements, goal)
+
+class ObjectItemLoader(object):
+       '''Load in items defined by python objects and make a TaskFactory from them'''
+
+       @classmethod
+       def taskfactory_from_objects(self, objects):
+               ''''process class items represented by simple python objects and return a TaskFactory'''
+               classes = []
+               marsh = core.marshal.ItemConverter()
+               for o in objects:
+                       self.check_sanity(o)
+                       classes.append(marsh.itemdict_to_item_class(o))
+
+               # Have dependencies refer to the classes they depend on, not just the names
+               # FIXME: This should probably be in the marshaller module
+               class_dict = dict((cls.__name__, cls) for cls in classes)
+               for cls in classes:
+                       # TODO: Warn more sanely about dependencies on non-existant items
+                       cls.depends = tuple(class_dict[c] for c in cls.depends)
+                       if issubclass(cls, Group):
+                               cls.contains = tuple(class_dict[c] for c in cls.contains)
+
+               return TaskFactory(classes)
+
+       @classmethod
+       def check_sanity(self, objdict):
+               '''check that objdict is actually in the correct form
+               if objdict isn't in the correct form for marshalling, raise
+               a ValueError'''
+               name,group = objdict.get('name'),objdict.get('group')
+               if not name and not group: 
+                       raise ValueError('dict has neither name nor group keys',objdict)
+               if name and group:
+                       raise ValueError('dict has both name and group keys')
+               if group:
+                       contains = objdict.get('contains')
+                       if not contains:
+                               raise ValueError('group dict has no contains key')
+                       if len(contains) == 0:
+                               raise ValueError('group contains list is empty')
+               return True
+
+class JSONItemLoader(ObjectItemLoader):
+       '''Load in items defined by some json and make a TaskFactory from them'''
+
+       @classmethod
+       def load_item_classes_from_file(cls, f):
+               ''''load json items from a file and return a TaskFactory'''
+               return cls.taskfactory_from_objects(json.load(f))
+
+       @classmethod
+       def load_item_classes_from_string(cls, data):
+               ''''load json items from a file and return a TaskFactory'''
+               return cls.taskfactory_from_objects(json.loads(data))
diff --git a/lib/magic.py b/lib/magic.py
new file mode 100644 (file)
index 0000000..bfcf991
--- /dev/null
@@ -0,0 +1,114 @@
+#! /usr/bin/env python
+
+'''main python API for make-magic
+
+This is where most of the magic happens. Any APIs you write (e.g. for HTTP)
+may very well just call this much of the time
+'''
+
+import config
+
+from core.store import Store
+from core.marshal import ItemConverter,TaskConverter
+from lib.loaders import JSONItemLoader
+from core.deptools import DigraphDependencyStrategy
+from core.bits import Item
+
+class Magic(object):
+       def __init__(self,store_factory=Store,dependency_strategy=DigraphDependencyStrategy):
+               # Load items
+               self.load_items()
+               self.store = store_factory()
+               self.dependency_strategy = dependency_strategy
+
+       def load_items(self):
+               itemsf = open(config.items_file)
+               self.taskfactory = JSONItemLoader.load_item_classes_from_file(itemsf)
+               itemsf.close()
+
+       reload_items = load_items
+
+       #
+       # This stuff is pretty easy. Get information about existing tasks
+       # and update them: Just pass it off to the storage module
+       #
+       def get_tasks(self):
+               return self.store.get_tasks()
+       def get_task(self, uuid):
+               metadata = self.store.metadata(uuid)
+               items = self.store.items(uuid)
+               if not metadata and not items:
+                       raise KeyError('uuid '+str(uuid)+' not found')
+               return {'items': items, 'metadata': metadata}
+       def get_item(self, uuid, itemname):
+               item = self.store.item(uuid, itemname)
+               if item is None:
+                       raise KeyError(uuid+'/'+itemname)
+               return item
+       def get_metadata(self, uuid):
+               metadata = self.store.metadata(uuid)
+               if not metadata:
+                       raise KeyError('uuid '+str(uuid)+' not found')
+               return metadata
+       def update_item(self, uuid, name, updatedict, onlyif={}):
+               cannot_update = set(('name','depends','if'))
+               onlyif = dict(onlyif)
+               for k,v in updatedict.items():
+                       if k in cannot_update:
+                               raise ValueError('cannot modify item attribute "%s"' %(k,))
+               if 'onlyif' in updatedict:
+                       if not getattr(updatedict['onlyif'], 'items', None): 
+                               raise ValueError('can only set "onlyif" to a dictionary')
+                       onlyif.update(updatedict['onlyif'])
+                       updatedict.pop('onlyif')
+               if 'state' in updatedict:
+                       if updatedict['state'] not in Item.allowed_states:
+                               raise ValueError('can only change state to '+','.join(Item.allowed_states))
+               return self.store.update_item(uuid,name,updatedict,onlyif)
+       def update_item_state(self, uuid, name, oldstate, newstate):
+               '''helper to update a state with a guard against the old one
+               WARNING: This actually sucks quite a bit. It doesn't guard against race conditions
+               from multiple agents.  This is deliberately not exposed to the HTTP interface for
+               that reason.
+               '''
+               return self.update_item(uuid, name, {'state': newstate}, {'state': oldstate})
+       def update_task_metadata(self, uuid, updatedict, onlyif={}):
+               if 'uuid' in updatedict and uuid != updatedict['uuid']:
+                       raise ValueError('cannot change uuid for a task')
+               return self.store.update_metadata(uuid,updatedict,onlyif)
+       def delete_task(self, uuid):
+               self.store.delete_task(uuid)
+
+       #
+       # Creating a new task is almost as easy!
+       #
+       def create_task(self, task_data):
+               if 'requirements' not in task_data:
+                       raise ValueError('No requirements supplied to create task')
+               task = self.taskfactory.task_from_requirements(task_data['requirements'])
+
+               # FIXME: Should be in core.marshal
+               # This is also an awful hack
+               task.data.update(task_data)
+               ic = ItemConverter()
+               items = [ic.item_to_itemdict(item) for item in task.items]
+
+               # Save!
+               self.store.new_task(task.uuid, items, metadata=task.data)
+               return self.get_task(task.uuid)
+
+       #
+       #  What can we do?
+       #
+       def ready_to_run(self, uuid):
+               '''return all the items that we can run'''
+               task = self.get_task(uuid)
+               converter = TaskConverter()
+               task = converter.taskdict_to_task(task)
+               ready =  self.dependency_strategy.ready_to_run(task.items)
+               # FIXME: Evil, Evil hack
+               if 'TaskComplete' in (r.name for r in ready):
+                       self.update_item(uuid, 'TaskComplete', {'data': {'state': 'COMPLETED'}})
+                       return self.ready_to_run(uuid)
+               ready =  self.dependency_strategy.ready_to_run(task.items)
+               return [converter.item_to_itemdict(item) for item in ready]
diff --git a/lib/vis.py b/lib/vis.py
new file mode 100644 (file)
index 0000000..0adf107
--- /dev/null
@@ -0,0 +1,21 @@
+#! /usr/bin/env python 
+
+'''visualisation tools for make-magic'''
+
+def write_dot_from_items(items, outfile):
+       '''generate dot file output for dependencies amongst supplied items
+       writes output to a supplied file already opened for writing.
+
+       Does not have any idea what a group is and will treat it just like anything else
+       '''
+       print >> outfile, 'digraph depgraph {'
+       for item in items:
+               print '\tname = "%s";' % (item.name,)
+               for dep in item.depends:
+                       print '\t\t"%s" -> "%s";' % (item.name, dep.name)
+       print >> outfile, '}'
+
+
+if __name__ == '__main__':
+       import sys
+       write_dot_from_items([], sys.stdout)
diff --git a/local_config.py b/local_config.py
new file mode 100644 (file)
index 0000000..57bd7e4
--- /dev/null
@@ -0,0 +1,6 @@
+#! /usr/bin/env python
+
+#items_file = 'doc/anchor_items.json'
+
+httpd_listen_address = '0.0.0.0'
+httpd_listen_port = 4554
diff --git a/magic_httpd.py b/magic_httpd.py
new file mode 100755 (executable)
index 0000000..a302656
--- /dev/null
@@ -0,0 +1,8 @@
+#! /usr/bin/env python
+
+'''Run the make-magic httpd API'''
+
+import lib.httpd
+
+if __name__ == '__main__':
+       lib.httpd.run_httpd()
diff --git a/mclient.py b/mclient.py
new file mode 100755 (executable)
index 0000000..59ffd0a
--- /dev/null
@@ -0,0 +1,72 @@
+#! /usr/bin/env python
+
+'''overly simple command line client to access the magic HTTP API
+
+TODO: Make useful as something other than an unintuitive debugging tool
+'''
+
+import sys
+import requests
+import random
+
+base_url = 'http://localhost:4554/'
+
+class CLIhandler(object):
+       def cmd_tasks(self):
+               print requests.get(base_url+'task').content
+       def cmd_task_create(self, json_task_data):
+               print requests.post(base_url+'task/create', headers={'Content-Type':'application/json'}, data=json_task_data).content
+       def cmd_metadata(self, uuid):
+               print requests.get(base_url+'task/'+uuid+'/metadata').content
+       def cmd_update_metadata(self, uuid, json_item_data):
+               print requests.get(base_url+'task/'+uuid+'/metadata').content
+               print
+               print requests.post(base_url+'task/'+uuid+'/metadata', headers={'Content-Type':'application/json'}, data=json_item_data).content
+       def cmd_task(self, uuid):
+               print requests.get(base_url+'task/'+uuid).content
+       def cmd_item(self, uuid, item):
+               print requests.get(base_url+'task/'+uuid+'/'+item).content
+       def cmd_item_state(self, uuid, item):
+               print requests.get(base_url+'task/'+uuid+'/'+item+'/state').content
+       def cmd_update_item(self, uuid, item, json_item_data):
+               print requests.get(base_url+'task/'+uuid+'/'+item).content
+               print
+               print requests.post(base_url+'task/'+uuid+'/'+item, headers={'Content-Type':'application/json'}, data=json_item_data).content
+       def cmd_update_item_state(self, uuid, item, old_state, new_state):
+               '''Example state update
+               This is a good example of where we can do item updates that will only work if the item
+               is in the same state as we think it is by passing a 'onlyif' dict.
+               
+               Also, if we are doing something like changing state to lock an item for work, we want to
+               make sure that if someone else is doing the same, we have a way of figuring out who actually
+               got the lock (simply checking if the state changed is not enough information as someone else
+               may be trying to change the state to the same thing; If we're both changing it to IN_PROGRESS,
+               we don't want to both assume that we were the one to change it to that)
+
+               We guard against this by setting an attribute to a random value, and checking when we get
+               a response that that random value is what we expect.  There is nothing magical about the
+               attribute we set, but if all workers don't use the same attribute name, it's not going to be
+               as useful.  This should only be a problem if you're using different worker codebases against
+               the same task
+               '''
+               # show the existing state just for demonstration purposes. We don't actually use it
+               print "existing state:",
+               self.cmd_item_state(uuid,item)
+               token = random.randint(1,2**48)
+               updatewith = '{"state": "%s", "_change_state_token": %d, "onlyif": {"state": "%s"}}' % (new_state, token, old_state)
+               print "updating request:",updatewith
+               print requests.post(base_url+'task/'+uuid+'/'+item, headers={'Content-Type':'application/json'}, data=updatewith).content
+       def cmd_items_ready(self, uuid):
+               print requests.get(base_url+'task/'+uuid+'/available').content
+       def cmd_task_delete(self, uuid):
+               print requests.delete(base_url+'task/'+uuid).content
+
+       commands = property(lambda self: [n[4:] for n in dir(self) if n[:4] == 'cmd_'])
+       def call(self, cmd, *args):  return getattr(self, 'cmd_'+cmd)(*args)
+
+if __name__ == '__main__':
+       clih = CLIhandler()
+       if len(sys.argv) < 2:
+               print >> sys.stderr, sys.argv[0],'[',' | '.join(clih.commands),']'
+       else:
+               clih.call(sys.argv[1], *sys.argv[2:])
diff --git a/tests/breakfast.py b/tests/breakfast.py
new file mode 100644 (file)
index 0000000..87d2799
--- /dev/null
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+'''simple example of how to define items in pure python'''
+
+import lib.loaders
+from core.bits import *
+from core.marshal import ItemConverter
+from digraphtools.predicate import predicate
+
+want_coffee = ItemConverter().predicate_string_to_callable(' coffee ')
+assert want_coffee(['coffee','tv']) == True
+assert want_coffee(['fish','tv']) == False
+
+# Items 
+class wake_up(Item):
+       pass
+
+class get_up(Item):
+       depends = (wake_up,)
+
+class make_breakfast(Item):
+       depends = (get_up,)
+
+class eat_breakfast(Item):
+       depends = (make_breakfast,)
+
+class make_coffee(Item):
+       depends = (get_up,)
+       predicate = want_coffee
+
+class drink_coffee(Item):
+       depends = (make_coffee,)
+       predicate = want_coffee
+
+class walk_out_door(Item):
+       depends = (get_up, eat_breakfast, make_coffee)
+
+class go_to_work(Item):
+       description = 'Leave to go to work'
+       depends = (walk_out_door, eat_breakfast, drink_coffee)
+
+# Tasks
+
+items = [wake_up, get_up, make_breakfast, eat_breakfast, make_coffee, drink_coffee, walk_out_door, go_to_work]
+breakfast_task_factory = lib.loaders.TaskFactory(items)
diff --git a/tests/gjunktest.py b/tests/gjunktest.py
new file mode 100755 (executable)
index 0000000..9b39d0c
--- /dev/null
@@ -0,0 +1,22 @@
+#! /usr/bin/env python
+from core.deptools import *
+from groupedbfast import *
+
+coffee_drinker = WageSlave( wants=('coffee','hugs') )
+caffeine_free = WageSlave()
+
+dm = DependencyMagic(DigraphDependencyStrategy)
+
+root = dm.make_new_dep_graph(coffee_drinker.will[0])
+items = dm.strategy.early_iter_all_items(root)
+print "goal nodes are",dm.strategy.find_goal_nodes(items),"\n"
+
+
+for i in dm.item_list_for_task(coffee_drinker):
+       print i
+
+print 
+
+for i in dm.item_list_for_task(caffeine_free):
+       print i
+
diff --git a/tests/groupedbfast.py b/tests/groupedbfast.py
new file mode 100644 (file)
index 0000000..1849d6f
--- /dev/null
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+
+import lib.loaders
+from core.bits import *
+from core.marshal import ItemConverter
+from digraphtools.predicate import predicate
+
+want_coffee = ItemConverter().predicate_string_to_callable(' coffee ')
+assert want_coffee(['coffee','tv']) == True
+assert want_coffee(['fish','tv']) == False
+
+# Items 
+class wake_up(Item):
+       pass
+
+class get_up(Item):
+       depends = (wake_up,)
+
+class make_breakfast(Item):
+       depends = (get_up,)
+
+class eat_breakfast(Item):
+       depends = (make_breakfast,)
+
+class make_coffee(Item):
+       depends = (get_up,)
+       predicate = want_coffee
+
+class drink_coffee(Item):
+       depends = (make_coffee,)
+       predicate = want_coffee
+
+class have_breakfast(Group):
+       depends = (get_up,)
+       contains = (eat_breakfast, drink_coffee)
+
+class walk_out_door(Item):
+       depends = (get_up, have_breakfast)
+
+class go_to_work(Item):
+       description = 'Leave to go to work'
+       depends = (walk_out_door,)
+
+# Tasks
+
+items = [wake_up, get_up, have_breakfast, make_breakfast, eat_breakfast, make_coffee, drink_coffee, walk_out_door, go_to_work]
+breakfast_task_factory = lib.loaders.TaskFactory(items)
diff --git a/tests/junktest.py b/tests/junktest.py
new file mode 100755 (executable)
index 0000000..930aeed
--- /dev/null
@@ -0,0 +1,56 @@
+#! /usr/bin/env python
+from core.deptools import *
+from breakfast import *
+
+# Pure python defined item test code
+#
+# Tests that we can define items in pure python code and have the marshallers
+# convert it back and forth several times to simple dicts and JSON without
+# any loss of information
+#
+# THIS IS NOT AN EXAMPLE OF GENERAL USAGE!
+#
+# TODO: Move this into unit testing
+
+coffee_drinker = breakfast_task_factory.task_from_requirements( ('coffee','hugs') )
+caffeine_free = breakfast_task_factory.task_from_requirements( ('hugs',) )
+
+ds = DigraphDependencyStrategy
+
+print coffee_drinker
+for item in ds.iterate_item_dependencies(coffee_drinker.goal):
+       print item
+
+print
+print caffeine_free
+for item in ds.iterate_item_dependencies(caffeine_free.goal):
+       print item
+
+# Marshalling
+
+import core.marshal
+import lib.loaders
+import json
+
+print "\nconverting breakfast task to json:"
+ic = core.marshal.ItemConverter()
+objitems = map(ic.itemclass_to_itemdict, breakfast_task_factory.classes)
+
+jsondata = json.dumps(objitems)
+print jsondata
+
+
+print "\nimporting back in"
+task_factory = lib.loaders.JSONItemLoader.load_item_classes_from_string(jsondata)
+for c in task_factory.classes:
+       print c()
+
+print "\nand back again"
+objitems = map(ic.itemclass_to_itemdict, breakfast_task_factory.classes)
+jsondata = json.dumps(objitems)
+task_factory = lib.loaders.JSONItemLoader.load_item_classes_from_string(jsondata)
+
+print "\nNow trying another pruning example:"
+another_caffeine_free = task_factory.task_from_requirements( [] )
+for item in ds.iterate_item_dependencies(another_caffeine_free.goal):
+       print item
diff --git a/tests/test.py b/tests/test.py
new file mode 100755 (executable)
index 0000000..aef286d
--- /dev/null
@@ -0,0 +1,91 @@
+#! /usr/bin/env python
+
+import unittest2 as unittest
+import digraphtools
+import digraphtools.topsort as topsort
+
+import core.bits as bits
+import core.deptools as deptools
+import core.store
+
+class BitTests(unittest.TestCase):
+       def testTask(self):
+               task = bits.Task(tuple(), None, bits.TaskComplete(['fnord']))
+               task = bits.Task(('fish','heads'), None, bits.TaskComplete(['fnord']))
+
+class GraphStrategyTests(unittest.TestCase):
+       def setUp(self):
+               class C(bits.Item): pass
+               class B(bits.Item): depends = (C,)
+               class A(bits.Item): depends = (B,C)
+               self.A,self.B,self.C = A,B,C
+       def test_get_graph(self):
+               A,B,C = self.A,self.B,self.C
+               g = deptools.GraphDependencyStrategy.get_graph(A)
+               ref = [A,[ [B,[ [C, []] ]], [C, []] ]]  # Reasons to not use this representation
+               self.assertEqual(g,ref)
+       def test_iterate_item_dependencies(self):
+               toporder = deptools.GraphDependencyStrategy.iterate_item_dependencies(self.A)
+               self.assertEqual(list(toporder), [self.C,self.B,self.A])
+       def test_item_factory(self):
+               a = deptools.GraphDependencyStrategy.item_factory(self.A)
+               self.assertIsInstance(a, self.A)
+               clstoporder = deptools.GraphDependencyStrategy.iterate_item_dependencies(self.A)
+               insttoporder = deptools.GraphDependencyStrategy.iterate_item_dependencies(a)
+               for cls,inst in zip(clstoporder,insttoporder):
+                       self.assertIsInstance(inst,cls)
+
+class DigraphStrategyTests(unittest.TestCase):
+       def setUp(self):
+               class C(bits.Item): pass
+               class B(bits.Item): depends = (C,)
+               class A(bits.Item): depends = (B,C)
+               self.A,self.B,self.C = A,B,C
+       def test_iterate_item_dependencies(self):
+               toporder = deptools.DigraphDependencyStrategy.iterate_item_dependencies(self.A)
+               self.assertEqual(list(toporder), [self.C,self.B,self.A])
+       def test_item_factory(self):
+               a = deptools.DigraphDependencyStrategy.item_factory(self.A)
+               self.assertIsInstance(a, self.A)
+               clstoporder = deptools.DigraphDependencyStrategy.iterate_item_dependencies(self.A)
+               insttoporder = deptools.DigraphDependencyStrategy.iterate_item_dependencies(a)
+               for cls,inst in zip(clstoporder,insttoporder):
+                       self.assertIsInstance(inst,cls)
+       def test_find_goal_nodes(self):
+               goals = deptools.DigraphDependencyStrategy.find_goal_nodes([self.A,self.B,self.C])
+               self.assertEqual(set([self.A]), goals)
+
+class SimpleStrategyTests(unittest.TestCase):
+       def setUp(self):
+               class C(bits.Item): pass
+               class B(bits.Item): depends = (C,)
+               class A(bits.Item): depends = (B,C)
+               self.A,self.B,self.C = A,B,C
+       def test_iterate_item_dependencies(self):
+               toporder = deptools.SimpleDependencyStrategy.iterate_item_dependencies(self.A)
+               self.assertEqual(list(toporder), [self.C,self.B,self.A])
+
+class TopsortTests(unittest.TestCase):
+       def test_vr_topsort(self):
+               n = 5
+               partial_order = [(1,2), (2,3), (1,5)]
+               g = digraphtools.graph_from_edges(digraphtools.from_partial_order(partial_order))
+               grid = topsort.partial_order_to_grid(partial_order,n)
+               for le in topsort.vr_topsort(n,grid):
+                       digraphtools.verify_partial_order(digraphtools.iter_partial_order(g), le)
+
+class StoreTests(unittest.TestCase):
+       sample_json = '[{"name": "wake_up"}, {"depends": ["wake_up"], "name": "get_up"}, {"depends": ["get_up"], "name": "make_coffee"}, {"depends": ["make_coffee"], "name": "drink_coffee"}, {"depends": ["get_up"], "name": "make_breakfast"}, {"depends": ["make_breakfast"], "name": "eat_breakfast"}, {"depends": ["get_up", "eat_breakfast", "make_coffee"], "name": "walk_out_door"}, {"depends": ["walk_out_door", "eat_breakfast", "drink_coffee"], "name": "go_to_work", "description": "Leave to go to work"}]'
+       def _dont_test_memorystore(self):
+               uuid = '123456'
+               ms = core.store.MemoryStore()
+               self.assertFalse(ms.exists(uuid))
+               self.assertFalse(ms.exists(uuid, 'items'))
+               ms.new_task(uuid)
+               self.assertTrue(ms.exists(uuid))
+               self.assertFalse(ms.exists(uuid, 'items'))
+               ms.store(uuid, 'items', self.sample_json)
+               self.assertEqual(ms.retrieve(uuid, 'items'), self.sample_json)
+
+if __name__ == '__main__':
+       unittest.main()