.gitignore | 1
docs/main/Deployment.rst | 53
docs/main/Development.rst | 89
docs/main/GettingStarted.rst | 79
docs/main/IntegratingWithTG2.rst | 100
docs/main/MokshaApplications.rst | 2
docs/main/RPMInstallation.rst | 87
docs/main/VirtualenvInstallation.rst | 69
moksha.spec | 27
moksha/__init__.py | 2
moksha/api/widgets/live/__init__.py | 2
moksha/api/widgets/live/live.py | 21
moksha/api/widgets/stomp/stomp.py | 12
moksha/apps/chat/moksha/apps/__init__.py | 1
moksha/apps/chat/pavement.py | 2
moksha/apps/docs/moksha/apps/__init__.py | 1
moksha/apps/docs/pavement.py | 2
moksha/apps/menus/moksha/apps/__init__.py | 1
moksha/apps/menus/pavement.py | 2
moksha/apps/metrics/moksha/apps/__init__.py | 1
moksha/apps/metrics/moksha/apps/metrics/widgets/metrics.py | 2
moksha/apps/metrics/pavement.py | 2
moksha/controllers/widgets.py | 14
moksha/lib/paver_tasks.py | 4
moksha/middleware/middleware.py | 10
moksha/public/javascript/moksha.js | 28
moksha/templates/index.mak | 2
moksha/widgets/container/container.py | 42
moksha/widgets/container/static/css/mbContainer.css | 50
moksha/widgets/container/static/js/mbContainer.js | 607 +-
moksha/widgets/container/static/js/mbContainer.min.js | 2
moksha/widgets/container/templates/container.mak | 2
moksha/widgets/source.py | 16
normal-reqs.txt | 69
pavement.py | 6
pip.py | 3857 -------------
production/apache/moksha.wsgi | 9
production/moksha-hub | 20
production/moksha-hub.init | 52
production/sample-production.ini | 2
production/stable-reqs.txt | 62
requirements.txt | 69
setup.py | 2
43 files changed, 1108 insertions(+), 4375 deletions(-)
New commits:
commit 1431f373507b560a5cd25e3ef3be55eec4de5bab
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sun Aug 30 21:45:57 2009 -0400
update the .gitignore
diff --git a/.gitignore b/.gitignore
index 86ad0f1..cd34329 100644
--- a/.gitignore
+++ b/.gitignore
@@ -18,3 +18,4 @@ rabbitmq-stomp
moksha/apps/*/*.spec
moksha/apps/*/build
moksha/apps/*/*egg-info
+tg2-bootstrap.py
commit fce72b78f1a7187f77e1f9cc57d753c8d57d27e5
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sun Aug 30 21:45:16 2009 -0400
Add a seperate RPMInstallation doc
diff --git a/docs/main/RPMInstallation.rst b/docs/main/RPMInstallation.rst
new file mode 100644
index 0000000..32f7ca9
--- /dev/null
+++ b/docs/main/RPMInstallation.rst
@@ -0,0 +1,87 @@
+==============================================
+Setting up a Moksha RPM & mod_wsgi environment
+==============================================
+
+Setup the TurboGears2/Moksha yum repo
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+At the moment, all of Moksha's dependencies are not all in Fedora. They are
+all currently under review, but in the mean time these instructions will run
+Moksha within a virtual Python environment, without changing your global
+site-packages.
+
+You can track the progress of getting TurboGears2 into Fedora `here
<
http://fedoraproject.org/wiki/TurboGears2>`_.
+
+To setup Luke Macken's TurboGears2 yum repository, run the following commands
+as root, replacing `$DISTRO` with either `fedora-rawhide`, `fedora-11`,
+`fedora-10`, or `epel-5`.
+
+.. code-block:: bash
+
+ cd /etc/yum.repos.d/
+ curl -O
http://lmacken.fedorapeople.org/rpms/tg2/$DISTRO/tg2.repo
+ yum -y install TurboGears2 python-tg-devtools
+
+.. note::
+
+ It is recommended that you perform a `yum update` after installing
+ the Moksha/TurboGears2 stack, to ensure that you have the latest
+ versions of all the dependencies.
+
+.. note::
+
+ At the moment the full TurboGears2 stack is not yet fully in
+ Fedora/EPEL, so you'll have to hook up a third party repository. You
+ can track the status of TurboGears2 in Fedora here:
+
+
http://fedoraproject.org/wiki/TurboGears2
+
+Install the dependencies and setup your RPM tree
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. code-block:: bash
+
+ $ sudo yum install rpmdevtools python-paver python-tg-devtools TurboGears2
+ $ sudo yum-builddep -y moksha
+ $ rpmdev-setuptree
+
+Installing the Moksha Apache/mod_wsgi server
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. code-block:: bash
+
+ $ sudo yum install moksha-server
+ $ sudo semanage fcontext -a -t httpd_cache_t '/var/cache/moksha(/.*)?'
+ $ sudo restorecon -Rv /var/cache/moksha
+
+
+Running Moksha
+~~~~~~~~~~~~~~
+
+.. code-block:: bash
+
+ $ sudo /sbin/service httpd restart
+
+
+Running Orbited
+~~~~~~~~~~~~~~~
+
+.. code-block:: bash
+
+ $ orbited
+
+
+Running the Moksha Hub
+~~~~~~~~~~~~~~~~~~~~~~
+
+.. code-block:: bash
+
+ $ sudo /sbin/service moksha-hub restart
+
+
+Watching the Error Log
+~~~~~~~~~~~~~~~~~~~~~~
+
+.. code-block:: bash
+
+ $ sudo tail -f /var/log/httpd/moksha_error_log
commit b5f51c83faa2612d9dfc00af46580cac43a8bf99
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sun Aug 30 21:44:58 2009 -0400
Add a seperate VirtualenvInstallation doc
diff --git a/docs/main/VirtualenvInstallation.rst b/docs/main/VirtualenvInstallation.rst
new file mode 100644
index 0000000..daafa61
--- /dev/null
+++ b/docs/main/VirtualenvInstallation.rst
@@ -0,0 +1,69 @@
+Virtualenv installation
+-----------------------
+
+This guide will help quickly get you up and running with a local copy of
+Moksha. It will run the Moksha WSGI application using the Paste threaded http
+server, a single orbited daemon with an embeded MorbidQ stomp message broker,
+SQLite SQLAlchemy and Feed databases, and an in-memory cache. This setup is
+meant to be dead-simple to get up and running, and is not designed for
+production deployments.
+
+This installation method has been tested with OSX, Fedora, and RHEL.
+See the :doc:`RPMInstallation` for a deploying with RPM and mod_wsgi.
+
+Installing the necessary dependencies
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You'll need the `virtualenv <
http://pypi.python.org/pypi/virtualenv>`_
package.
+
+.. code-block:: bash
+
+ # yum -y install python-virtualenv python-memcached gcc
+
+The `start-moksha` script mentioned below should install all of the necessary
+dependencies. However, it will attempt to compile a few things, such as lxml. So, you
may need to install some additional dependencies like `libxml2` and `libxslt` in order to
build it. If you're using yum, you can easily install all of the build requirements
by doing:
+
+.. code-block:: bash
+
+ # yum-builddep -y python-lxml PyOpenSSL python-sqlite2
+
+
+Getting the code
+~~~~~~~~~~~~~~~~
+
+.. code-block:: bash
+
+ $ git clone
git://git.fedorahosted.org/git/moksha
+
+
+Starting
+~~~~~~~~
+
+.. code-block:: bash
+
+ $ ./start-moksha
+
+.. note::
+ This script takes care of setting up your TurboGears2 virtual environment
+ the first time it is run. To drop into the virtualenv manually you can run
+ `source tg2env/bin/activate` to enter it, and `deactivate` to leave it.
+
+
+Stopping
+~~~~~~~~
+
+.. code-block:: bash
+
+ $ ./stop-moksha
+
+
+Using Moksha
+~~~~~~~~~~~~
+
+Now you can navigate your web browser to the following url:
+
+`http://localhost:8080 <
http://localhost:8080>`_
+
+.. note::
+ Going to `127.0.0.1` will not work properly with the current Orbited setup,
+ so you must make sure to go to `localhost`.
commit bc596d475c1be6c6e366064a69168fa308a0db0e
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sun Aug 30 21:44:40 2009 -0400
Some minor container tweaks for our source widget
diff --git a/moksha/widgets/source.py b/moksha/widgets/source.py
index e119cc5..88176a4 100644
--- a/moksha/widgets/source.py
+++ b/moksha/widgets/source.py
@@ -41,9 +41,9 @@ class SourceCodeWidget(Widget):
template = "${code}"
engine_name = 'mako'
container_options = {'width': 600, 'height': 500, 'title':
'View Source',
- 'icon': 'comment.png', 'top': 80,
'left': 250,
- 'iconize': False, 'minimize': False}
+ 'icon': 'comment.png', 'top': 80,
'left': 250}
hidden = True
+ module = False
def update_params(self, d):
super(SourceCodeWidget, self).update_params(d)
commit 51b284f2750f89d0ea49fa72da55bf295901ca7b
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sun Aug 30 21:43:35 2009 -0400
Detect if we're dealing with a LiveWidget, and set some container
callbacks to unsubscribe from any message topics.
diff --git a/moksha/widgets/container/container.py
b/moksha/widgets/container/container.py
index 70d004a..3ce8526 100644
--- a/moksha/widgets/container/container.py
+++ b/moksha/widgets/container/container.py
@@ -66,6 +66,18 @@ class MokshaContainer(Widget):
if isinstance(d.content, Widget):
d.widget_name = d.content.__class__.__name__
+
+ if isinstance(d.content, LiveWidget):
+ topics = d.content.get_topics()
+ # FIXME: also unregister the moksha callback functions. Handle
+ # cases where multiple widgets are listening to the same topics
+ d.onClose = js_callback("function(o){%s $(o).remove();}" %
+ unsubscribe_topics(topics))
+ d.onIconize = d.onCollapse = js_callback("function(o){%s}" %
+ unsubscribe_topics(topics))
+ d.onRestore = js_callback("function(o){%s}" %
+ subscribe_topics(topics))
+
d.content = d.content.display()
for option in self.options:
commit 27452c56197621243c179a6c0768b4a6a21168a6
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sun Aug 30 21:41:07 2009 -0400
Update our MokshaContainer API to match the functionality of the mbContainers.
This adds a lot of useful callbacks, and will allow us to properly handle live
widgets. Right now when you close a LiveWidget, it will break JavaScript for
everything :( This will allow us to detect when a window is closed, collapsed,
or iconified, and let the widget unsubscribe from any message topics.
diff --git a/moksha/widgets/container/container.py
b/moksha/widgets/container/container.py
index 754229a..70d004a 100644
--- a/moksha/widgets/container/container.py
+++ b/moksha/widgets/container/container.py
@@ -16,9 +16,12 @@
#
# Authors: Luke Macken <lmacken(a)redhat.com>
-from tw.api import Widget, JSLink, CSSLink
+from tw.api import Widget, JSLink, CSSLink, js_callback
from tw.jquery import jquery_js, jQuery
+from moksha.api.widgets.live import LiveWidget
+from moksha.api.widgets.live import subscribe_topics, unsubscribe_topics
+
container_js = JSLink(filename='static/js/mbContainer.min.js', modname=__name__)
container_css = CSSLink(filename='static/css/mbContainer.css', modname=__name__)
@@ -29,9 +32,11 @@ class MokshaContainer(Widget):
options = ['draggable', 'resizable']
button_options = ['iconize', 'minimize', 'close']
params = ['buttons', 'skin', 'height', 'width',
'left', 'top', 'id',
- 'title', 'icon', 'content', 'widget_name',
'view_source'] + \
- options[:]
- draggable = droppable = resizable = True
+ 'title', 'icon', 'content', 'widget_name',
'view_source', 'dock',
+ 'onResize', 'onClose', 'onCollapse',
'onIconize', 'onDrag',
+ 'onRestore'] + options[:]
+ draggable = droppable = True
+ resizable = False
iconize = minimize = close = True
hidden = True # hide from the moksha menu
content = '' # either text, or a Widget instance
@@ -48,6 +53,14 @@ class MokshaContainer(Widget):
left = 170
top = 270
+ # Javascript callbacks
+ onResize = js_callback("function(o){}")
+ onClose = js_callback("function(o){}")
+ onCollapse = js_callback("function(o){}")
+ onIconize = js_callback("function(o){}")
+ onDrag = js_callback("function(o){}")
+ onRestore = js_callback("function(o){}")
+
def update_params(self, d):
super(MokshaContainer, self).update_params(d)
@@ -65,7 +78,13 @@ class MokshaContainer(Widget):
d.buttons = d.buttons[:-1]
self.add_call(jQuery('#%s' % d.id).buildContainers({
- 'elementsPath':
'/toscawidgets/resources/moksha.widgets.container.container/static/css/elements/'
+ 'elementsPath':
'/toscawidgets/resources/moksha.widgets.container.container/static/css/elements/',
+ 'onClose': d.onClose,
+ 'onResize': d.onResize,
+ 'onCollapse': d.onCollapse,
+ 'onIconize': d.onIconize,
+ 'onDrag': d.onDrag,
+ 'onRestore': d.onRestore,
}))
diff --git a/moksha/widgets/container/templates/container.mak
b/moksha/widgets/container/templates/container.mak
index 71147f6..07a26cb 100644
--- a/moksha/widgets/container/templates/container.mak
+++ b/moksha/widgets/container/templates/container.mak
@@ -1,4 +1,4 @@
-<div id="${id}" class="containerPlus ${draggable} ${resizable}"
style="top:${top}px;left:${left}px" buttons="${buttons}"
skin="${skin}" icon="${icon}" width="${width}"
height="${height}">
+<div id="${id}" class="containerPlus ${draggable} ${resizable}"
style="top:${top}px;left:${left}px" buttons="${buttons}"
skin="${skin}" icon="${icon}" width="${width}"
height="${height}" dock="${dock}">
<div class="no">
<div class="ne">
<div class="n">${title}</div>
commit f52d4487e62af3941e9011d79fad157cf57c3d12
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sun Aug 30 21:40:40 2009 -0400
Add a moksha container dock
diff --git a/moksha/templates/index.mak b/moksha/templates/index.mak
index 255da73..101f88b 100644
--- a/moksha/templates/index.mak
+++ b/moksha/templates/index.mak
@@ -29,6 +29,8 @@
</div>
</a>
+ <div id="moksha_dock" style="display:block; padding-top: 10px;
height: 30px; bottom:5px; position: absolute;" />
+
## Inject our global resources
${tmpl_context.moksha_global_resources()}
diff --git a/moksha/widgets/container/container.py
b/moksha/widgets/container/container.py
index e50d293..754229a 100644
--- a/moksha/widgets/container/container.py
+++ b/moksha/widgets/container/container.py
@@ -39,6 +39,7 @@ class MokshaContainer(Widget):
title = 'Moksha Container'
skin = 'default' # default, black, white, stiky, alert
view_source = True
+ dock = 'moksha_dock'
#icon = 'chart.png'
# Pixel tweaking
commit 9917c5f569d8c479e8a26773e2ed2e1874886852
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sun Aug 30 21:40:07 2009 -0400
Update to the latest mbContainer release
diff --git a/moksha/widgets/container/static/css/mbContainer.css
b/moksha/widgets/container/static/css/mbContainer.css
index 13125ab..c7ff84d 100644
--- a/moksha/widgets/container/static/css/mbContainer.css
+++ b/moksha/widgets/container/static/css/mbContainer.css
@@ -1,24 +1,48 @@
+/*
+* UI
+*/
+.ui-resizable { position: relative;}
+.ui-resizable-handle { position: absolute;font-size: 0.1px;z-index: 99999; display:
block;}
+.ui-resizable-disabled .ui-resizable-handle, .ui-resizable-autohide .ui-resizable-handle
{ display: none; }
+.ui-resizable-n { cursor: n-resize; height: 7px; width: 100%; top: -5px; left: 0px; }
+.ui-resizable-s { cursor: s-resize; height: 7px; width: 100%; bottom: -5px; left: 0px; }
+.ui-resizable-e { cursor: e-resize; width: 7px; right: -5px; top: 0px; height: 100%; }
+.ui-resizable-w { cursor: w-resize; width: 7px; left: -5px; top: 0px; height: 100%; }
+.ui-resizable-se { cursor: se-resize; width: 12px; height: 12px; right: 1px; bottom: 1px;
}
+.ui-resizable-sw { cursor: sw-resize; width: 9px; height: 9px; left: -5px; bottom: -5px;
}
+.ui-resizable-nw { cursor: nw-resize; width: 9px; height: 9px; left: -5px; top: -5px; }
+.ui-resizable-ne { cursor: ne-resize; width: 9px; height: 9px; right: -5px; top: -5px;}
+
+
.mb-resize{
-background:transparent !important;
-border:none !important;
-}
-.mb-resize.ui-resizable-se {
-width: 20px !important;
-height: 20px !important;
+ display:block;
+ background-color:transparent !important;
+ border:3px solid transparent !important;
+ *border:5px solid transparent !important;
+ margin:0 !important;
}
+.mb-resize-resizable-n { cursor: e-resize; width: 100% !important; height: 50px
!important; right: 0 !important; top: -10px !important; z-index:auto !important;}
+.mb-resize-resizable-e { cursor: e-resize; width: 20px !important; height: 100%
!important; right: -20px !important; top: 0 !important;z-index:auto !important;}
+.mb-resize-resizable-w { cursor: e-resize; width: 20px !important; height: 100%
!important; left: -20px !important; top: 0 !important;z-index:auto !important;}
+.mb-resize-resizable-s { cursor: s-resize; width: 100% !important; height: 10px
!important; bottom: -10px !important; left: 0 !important;z-index:auto !important; }
+.mb-resize-resizable-se { cursor: se-resize; width: 30px !important; height: 30px
!important; right:0 !important; bottom: -10px !important; z-index:auto !important;}
+
.mbproxy{border:1px dotted gray;background: url(elements/proxy.png)}
-.containerPlus {font-family:Verdana; font-size:13px};
+.containerPlus {font-family:Verdana; font-size:13px; visibility:hidden;};
.containerPlus .spacer {margin-top:10px}
.containerPlus .buttonBar { position:relative;top:0;float:right;margin-top:-23px;
margin-right:15px}
.containerPlus .buttonBar img {margin-left:5px}
-.containerPlus .content {margin-right:-15px; padding-right:10px;overflow:auto;}
+.containerPlus .mbcontainercontent {margin-right:-15px;
padding-right:10px;overflow:auto;}
+
+/*.iconLabel{font-family:sans-serif;font-size:10px;padding:5px;background:black;color:white;display:block;-moz-border-radius:5px;-webkit-border-radius:5px;}*/
+
+.iconLabel{font-family:sans-serif;font-size:10px;padding:5px;background:#f3f3f3;color:gray;display:block;-moz-border-radius:5px;-webkit-border-radius:5px;border:1px
solid #ccc}
/*
* default
*/
-.containerPlus{}
.containerPlus .no { background: url('elements/default/no.png') top left
no-repeat; }
.containerPlus .ne { background: url('elements/default/ne.png') top right
no-repeat; margin-left: 21px;}
.containerPlus .ne img {left:-10px; top:-10px}
@@ -47,8 +71,8 @@ height: 20px !important;
.containerPlus.black .o { background: url('elements/black/o.png') top left
repeat-y;}
.containerPlus.black .e { background: url('elements/black/e.png') top right
repeat-y; margin-left: 21px;}
.containerPlus.black .c { background: url('elements/black/c.png'); margin-right:
25px; padding-top: 1px; padding-bottom: 1px; }
-.containerPlus.black .c .content {color:white;}
-.containerPlus.black .c .content a {color:white;}
+.containerPlus.black .c .mbcontainercontent {color:white;}
+.containerPlus.black .c .mbcontainercontent a {color:white;}
.containerPlus.black .so { background: url('elements/black/so.png') bottom left
no-repeat;}
.containerPlus.black .se { background: url('elements/black/se.png') bottom right
no-repeat; margin-left: 21px;}
@@ -65,7 +89,7 @@ height: 20px !important;
.containerPlus.white .o { background: url('elements/white/o.png') top left
repeat-y;}
.containerPlus.white .e { background: url('elements/white/e.png') top right
repeat-y; margin-left: 21px;}
.containerPlus.white .c { background: url('elements/white/c.png'); margin-right:
25px; padding-top: 1px; padding-bottom: 1px; }
-.containerPlus.white .c .content {color:#000;}
+.containerPlus.white .c .mbcontainercontent {color:#000;}
.containerPlus.white .n a{}
.containerPlus.white .n a:hover{ text-decoration:underline}
@@ -85,7 +109,7 @@ height: 20px !important;
.containerPlus.alert .o { background: url('elements/alert/o.png') top left
repeat-y;}
.containerPlus.alert .e { background: url('elements/alert/e.png') top right
repeat-y; margin-left: 21px;}
.containerPlus.alert .c { background: url('elements/alert/c.png'); margin-right:
25px; padding-top: 1px; padding-bottom: 1px; }
-.containerPlus.alert .c .content {color:#000;}
+.containerPlus.alert .c .mbcontainercontent {color:#000;}
.containerPlus.alert .n a{}
.containerPlus.alert .n a:hover{ text-decoration:underline}
diff --git a/moksha/widgets/container/static/js/mbContainer.js
b/moksha/widgets/container/static/js/mbContainer.js
index 974d98a..79479dc 100644
--- a/moksha/widgets/container/static/js/mbContainer.js
+++ b/moksha/widgets/container/static/js/mbContainer.js
@@ -1,193 +1,436 @@
/*
* developed by Matteo Bicocchi on JQuery
- * © 2002-2009 Open Lab srl, Matteo Bicocchi
+ * © 2002-2009 Open Lab srl, Matteo Bicocchi
*
www.open-lab.com - info(a)open-lab.com
- * version 1.0.4
+ * version 2.2
* tested on: Explorer and FireFox for PC
* FireFox and Safari for Mac Os X
* FireFox for Linux
- * GPL (GPL-LICENSE.txt) licenses.
+ * MIT - GPL (GPL-LICENSE.txt) licenses.
*
* CONTAINERS BUILD WITH BLOCK ELEMENTS
*/
+(function($){
+ jQuery.fn.buildContainers = function (options){
+ return this.each (function ()
+ {
+ if ($(this).is("[inited=true]")) return;
+
+ this.options = {
+ containment:"document",
+ elementsPath:"elements/",
+ onCollapse:function(){},
+ onIconize:function(){},
+ onClose: function(){},
+ onResize: function(){},
+ onDrag: function(){},
+ onRestore:function(){},
+ minimizeEffect:"slide", //or "fade"
+ effectDuration:300
+ };
+
+ $.extend (this.options, options);
+ var container=$(this);
+
+ container.attr("inited","true");
+ container.attr("iconized","false");
+ container.attr("collapsed","false");
+ container.attr("closed","false");
+ container.attr("options",this.options);
+ container.css({position: "relative"});
+
+ if ($.metadata){
+ $.metadata.setType("class");
+ if (container.metadata().skin)
container.attr("skin",container.metadata().skin);
+ if (container.metadata().collapsed)
container.attr("collapsed",container.metadata().collapsed);
+ if (container.metadata().iconized)
container.attr("iconized",container.metadata().iconized);
+ if (container.metadata().icon)
container.attr("icon",container.metadata().icon);
+ if (container.metadata().buttons)
container.attr("buttons",container.metadata().buttons);
+ if (container.metadata().content)
container.attr("content",container.metadata().content); //ajax
+ if (container.metadata().aspectRatio)
container.attr("aspectRatio",container.metadata().aspectRatio); //ui.resize
+ if (container.metadata().grid)
container.attr("grid",container.metadata().grid); //ui.grid
+ if (container.metadata().gridx)
container.attr("gridx",container.metadata().gridx); //ui.grid
+ if (container.metadata().gridy)
container.attr("gridy",container.metadata().gridy); //ui.grid
+ if (container.metadata().handles)
container.attr("handles",container.metadata().handles); //ui.resize
+ if (container.metadata().dock)
container.attr("dock",container.metadata().dock);
+
+ if (container.metadata().width)
container.attr("width",container.metadata().width);
+ if (container.metadata().height)
container.attr("height",container.metadata().height);
+ }
+
+ if (container.attr("content"))
+ container.mb_changeContainerContent(container.attr("content"));
+
+ container.addClass(container.attr("skin"));
+
container.find(".n:first").attr("unselectable","on");
+ if (!container.find(".n:first").html())
container.find(".n:first").html(" ");
+ container.containerSetIcon(container.attr("icon"),
this.options.elementsPath);
+ if (container.attr("buttons"))
container.containerSetButtons(container.attr("buttons"),this.options);
+ container.css({width:"99.9%"});
+ if (container.attr("width")){
+ var cw= $.browser.msie?
container.attr("width"):container.attr("width")+"px";
+ container.css({width:cw});
+ }
+
+ if (container.attr("height")){
+ container.find(".c:first ,
.mbcontainercontent:first").css("height",container.attr("height")-container.find(".n:first").outerHeight()-(container.find(".s:first").outerHeight()));
+ }
+
+ if (container.hasClass("draggable")){
+ container.css({position:"absolute", margin:0});
+ container.find(".n:first").css({cursor:"move"});
+ container.mb_BringToFront();
+ container.draggable({
+ handle:".n:first",
+ delay:0,
+ containment:this.options.containment,
+ stop:function(){
+ var opt=$(this).attr("options");
+ if(opt.onDrag) opt.onDrag($(this));
+ }
+ });
+ if (container.attr("grid") || (container.attr("gridx")
&& container.attr("gridy"))){
+ var grid= container.attr("grid")?
[container.attr("grid"),container.attr("grid")]:[container.attr("gridx"),container.attr("gridy")];
+ container.draggable('option', 'grid', grid);
+ }
+ container.bind("mousedown",function(){
+ $(this).mb_BringToFront();
+ });
+ }
+ if (container.hasClass("resizable")){
+ container.containerResize();
+ }
+ if (container.attr("collapsed")=="true"){
+ container.attr("collapsed","false");
+ container.containerCollapse(this.options);
+ }
+ if (container.attr("iconized")=="true"){
+ container.attr("iconized","false");
+ container.containerIconize(this.options);
+ }
+ setTimeout(function(){
+ container.css("visibility","visible");
+ },500);
+ });
+ };
+
+ jQuery.fn.containerResize = function (){
+
+ var isDraggable=$(this).hasClass("draggable");
+ var handles=
$(this).attr("handles")?$(this).attr("handles"):"s";
+ var aspectRatio=
$(this).attr("aspectRatio")?$(this).attr("aspectRatio"):false;
+
+ $(this).resizable({
+ handles:isDraggable ? "":handles,
+ aspectRatio:aspectRatio,
+ minWidth: 350,
+ minHeight: 150,
+ iframeFix:true,
+ helper: "mbproxy",
+ start:function(e,o){
+ o.helper.mb_BringToFront();
+ },
+ stop:function(){
+ var resCont= $(this);//$.browser.msie || Opera ?o.helper:
+ var elHeight=
resCont.outerHeight()-$(this).find(".n:first").outerHeight()-($(this).find(".s:first").outerHeight());
+ $(this).find(".c:first , .mbcontainercontent:first").css({height:
elHeight});
+ if (!isDraggable && !$(this).attr("handles")){
+ var elWidth=$(this).attr("width") &&
$(this).attr("width")>0 ?$(this).attr("width"):"99.9%";
+ $(this).css({width: elWidth});
+ }
+ var opt=$(this).attr("options");
+ if(opt.onResize) opt.onResize($(this));
+ }
+ });
+
+ /*
+ *TO SOLVE UI CSS CONFLICT I REDEFINED A SPECIFIC CLASS FOR HANDLERS
+ */
+
+
$(this).find(".ui-resizable-n").addClass("mb-resize").addClass("mb-resize-resizable-n");
+
$(this).find(".ui-resizable-e").addClass("mb-resize").addClass("mb-resize-resizable-e");
+
$(this).find(".ui-resizable-w").addClass("mb-resize").addClass("mb-resize-resizable-w");
+
$(this).find(".ui-resizable-s").addClass("mb-resize").addClass("mb-resize-resizable-s");
+
$(this).find(".ui-resizable-se").addClass("mb-resize").addClass("mb-resize-resizable-se");
+
+ };
+
+ jQuery.fn.containerSetIcon = function (icon,path){
+ if (icon && icon!="" ){
+ $(this).find(".ne:first").prepend("<img class='icon'
src='"+path+"icons/"+icon+"'
style='position:absolute'/>");
+ $(this).find(".n:first").css({paddingLeft:25});
+ }else{
+ $(this).find(".n:first").css({paddingLeft:0});
+ }
+ };
+
+ jQuery.fn.containerSetButtons = function (buttons,opt){
+ if (!opt) opt=$(this).attr("options");
+ var path= opt.elementsPath;
+ var container=$(this);
+ if (buttons !=""){
+ var btn=buttons.split(",");
+ $(this).find(".ne:first").append("<div
class='buttonBar'></div>");
+ for (var i in btn){
+ if (btn[i]=="c"){
+ $(this).find(".buttonBar:first").append("<img
src='"+path+$(this).attr('skin')+"/close.png'
class='close'/>");
+ $(this).find(".close:first").bind("click",function(){
+ if (!$.browser.msie) container.fadeOut(opt.effectDuration);
+ else container.hide();
+ container.attr("closed","true");
+ if (opt.onClose) opt.onClose(container);
+ });
+ }
+ if (btn[i]=="m"){
+ $(this).find(".buttonBar:first").append("<img
src='"+path+$(this).attr('skin')+"/min.png'
class='collapsedContainer'/>");
+
$(this).find(".collapsedContainer:first").bind("click",function(){container.containerCollapse(opt);});
+
$(this).find(".n:first").bind("dblclick",function(){container.containerCollapse(opt);});
+ }
+ if (btn[i]=="p"){
+ $(this).find(".buttonBar:first").append("<img
src='"+path+$(this).attr('skin')+"/print.png'
class='printContainer'/>");
+
$(this).find(".printContainer:first").bind("click",function(){});
+ }
+ if (btn[i]=="i"){
+ $(this).find(".buttonBar:first").append("<img
src='"+path+$(this).attr('skin')+"/iconize.png'
class='iconizeContainer'/>");
+
$(this).find(".iconizeContainer:first").bind("click",function(){container.containerIconize(opt);});
+ }
+ }
+ var fadeOnClose=$.browser.mozilla || $.browser.safari;
+ if (fadeOnClose) $(this).find(".buttonBar:first img")
+ .css({opacity:.5, cursor:"pointer","mozUserSelect":
"none", "khtmlUserSelect": "none"})
+ .mouseover(function(){$(this).fadeTo(200,1);})
+ .mouseout(function(){if (fadeOnClose)$(this).fadeTo(200,.5);});
+ $(this).find(".buttonBar:first
img").attr("unselectable","on");
+ }
+ };
+
+ jQuery.fn.containerCollapse = function (opt){
+ this.each (function () {
+ if (!opt) opt=$(this).attr("options");
+ var container=$(this);
+ if ($(this).attr("collapsed")=="false"){
+ container.attr("w" , container.outerWidth());
+ container.attr("h" , container.outerHeight());
+ if (opt.minimizeEffect=="fade")
+ container.find(".o:first").fadeOut(opt.effectDuration,function(){});
+ else{
+ container.find(".icon:first").hide();
+ container.find(".o:first").slideUp(opt.effectDuration,function(){});
+
container.animate({height:container.find(".n:first").outerHeight()+container.find(".s:first").outerHeight()},opt.effectDuration,function(){container.find(".icon:first").show();});
+ }
+ container.attr("collapsed","true");
+
container.find(".collapsedContainer:first").attr("src",opt.elementsPath+$(this).attr('skin')+"/max.png");
+ container.resizable("disable");
+ if (opt.onCollapse) opt.onCollapse(container);
+ }else{
+ if (opt.minimizeEffect=="fade")
+ container.find(".o:first").fadeIn(opt.effectDuration,function(){});
+ else{
+
container.find(".o:first").slideDown(opt.effectDuration,function(){});
+ container.find(".icon:first").hide();
+
container.animate({height:container.attr("h")},opt.effectDuration,function(){container.find(".icon:first").show();});
+ }
+ if (container.hasClass("resizable"))
container.resizable("enable");
+ container.attr("collapsed","false");
+
container.find(".collapsedContainer:first").attr("src",opt.elementsPath+$(this).attr('skin')+"/min.png");
+
container.find(".mbcontainercontent:first").css("overflow","auto");
+ }
+ });
+ };
+
+ jQuery.fn.containerIconize = function (opt){
+ if (!opt) opt=$(this).attr("options");
+ return this.each (function ()
+ {
+ var container=$(this);
+ container.attr("iconized","true");
+ if(container.attr("collapsed")=="false"){
+ container.attr("h",container.outerHeight());
+ }
+ container.attr("w",container.attr("width") &&
container.attr("width")>0 ? (!container.hasClass("resizable")?
container.attr("width"):container.width()):!$(this).attr("handles")?"99.9%":container.width());
+ container.attr("t",container.css("top"));
+ container.attr("l",container.css("left"));
+ container.resizable("disable");
+ var l=0;
+ var t= container.css("top");
+ var dockPlace= container;
+ if (container.attr("dock")){
+ dockPlace = $("#"+container.attr("dock"));
+ var icns= dockPlace.find("img").size();
+ l=$("#"+container.attr("dock")).offset().left+(32*icns);
+ t=$("#"+container.attr("dock")).offset().top;
+ };
+ /*
+ ICONIZING CONTAINER
+ */
+ this.dockIcon= $("<img
src='"+opt.elementsPath+"icons/"+(container.attr("icon")?container.attr("icon"):"restore.png")+"'
class='restoreContainer' width='32'/>").appendTo(dockPlace)
+ .css("cursor","pointer")
+ .hide()
+ .attr("contTitle",container.find(".n:first").html())
+ .bind("click",function(){
+
+
+ container.attr("iconized","false");
+ if (container.is(".draggable"))
+ container.css({top:$(this).offset().top, left:$(this).offset().left});
+ else
+ container.css({left:"auto",top:"auto"});
+ container.show();
+
+ if (!$.browser.msie) {
+ container.find(".no:first").fadeIn("fast");
+ if(container.attr("collapsed")=="false"){
+ container.animate({height:container.attr("h"),
width:container.attr("w"),left:container.attr("l"),top:container.attr("t")},opt.effectDuration,function(){
+
container.find(".mbcontainercontent:first").css("overflow","auto");
+ if(container.hasClass("draggable")) {
+ container.mb_BringToFront();
+ }
+ });
+ container.find(".c:first ,
.mbcontainercontent:first").css("height",container.attr("h")-container.find(".n:first").outerHeight()-(container.find(".s:first").outerHeight()));
+ }
+ else
+ container.animate({height:"60px",
width:container.attr("w"),
left:container.attr("l"),top:container.attr("t")},opt.effectDuration);
+ } else {
+ container.find(".no:first").show();
+ if(container.attr("collapsed")=="false"){
+ container.css({height:container.attr("h"),
width:container.attr("w"),left:container.attr("l"),top:container.attr("t")},opt.effectDuration);
+ container.find(".c:first ,
.mbcontainercontent:first").css("height",container.attr("h")-container.find(".n:first").outerHeight()-(container.find(".s:first").outerHeight()));
+ }
+ else
+ container.css({height:"60px",
width:container.attr("w"),left:container.attr("l"),top:container.attr("t")},opt.effectDuration);
+ }
+ if (container.hasClass("resizable") &&
container.attr("collapsed")=="false")
container.resizable("enable");
+;
+ $(this).remove();
+ if(container.hasClass("draggable")) container.mb_BringToFront();
+ $(".iconLabel").remove();
+ if(opt.onRestore) opt.onRestore(container);
+ })
+ .bind("mouseenter",function(){
+ var label="<div
class='iconLabel'>"+$(this).attr("contTitle")+"</div>";
+ $("body").append(label);
+ $(".iconLabel").hide().css({
+ position:"absolute",
+ top:$(this).offset().top-15,
+ left:$(this).offset().left+15,
+ opacity:.9
+ }).fadeIn("slow").mb_BringToFront();
+ })
+ .bind("mouseleave",function(){
+ $(".iconLabel").remove();
+ });
+
+
+ if (!$.browser.msie) {
+
container.find(".mbcontainercontent:first").css("overflow","hidden");
+ container.find(".no:first").slideUp("fast");
+ container.animate({ height:"32px",
width:"32px",left:l,top:t},opt.effectDuration,function(){
+ $(this.dockIcon).show();
+ if (container.attr("dock")) container.hide();
+ });
+ }else{
+ container.find(".no:first").hide();
+ container.css({ height:"32px", width:"32px",left:l,top:t});
+ $(this.dockIcon).show();
+ if (container.attr("dock")) container.hide();
+ }
+ if (opt.onIconize) opt.onIconize(container);
+ });
+ };
+
+ jQuery.fn.mb_resizeTo = function (h,w){
+ if (!w) w=$(this).outerWidth();
+ if (!h) h=$(this).outerHeight();
+ $(this).animate({"height":h,"width":w},500,function(){
+ var elHeight=
$(this).outerHeight()-$(this).find(".n:first").outerHeight()-($(this).find(".s:first").outerHeight());
+ $(this).find(".c:first , .mbcontainercontent:first").animate({height:
elHeight});
+ });
+ };
+
+ jQuery.fn.mb_iconize = function (){
+ if ($(this).attr("closed")=="false"){
+ if ($(this).attr("iconized")=="true"){
+ var icon=$(this)[0].dockIcon;
+ $(icon).click();
+ $(this).mb_BringToFront();
+ }else{
+ $(this).containerIconize();
+ }
+ }
+ };
+
+ jQuery.fn.mbOpenBox = function (url,data){
+ if ($(this).attr("closed")=="true"){
+ if (!data) data="";
+ if (url){
+ $(this).mb_changeContainerContent(url,data);
+ }
+ if (!$.browser.msie) $(this).fadeIn(300);
+ else $(this).show();
+ $(this).attr("closed","false");
+ $(this).mb_BringToFront();
+ }
+ };
+
+ jQuery.fn.mbCloseBox = function (){
+ if ($(this).attr("closed")=="false"){
+ $(this).find(".close:first").click();
+ }
+ };
+
+ jQuery.fn.mb_toggle = function (){
+ if ($(this).attr("closed")=="false" &&
$(this).attr("iconized")=="false"){
+ $(this).containerCollapse();
+ }
+ };
+
+ jQuery.fn.mb_BringToFront= function(){
+ var zi=10;
+ $('*').each(function() {
+ if($(this).css("position")=="absolute"){
+ var cur = parseInt($(this).css('zIndex'));
+ zi = cur > zi ? parseInt($(this).css('zIndex')) : zi;
+ }
+ });
+ $(this).css('zIndex',zi+=1);
+ };
+
+ jQuery.fn.mb_changeContent= function(url, data){
+ var where=$(this);
+ if (!data) data="";
+ $.ajax({
+ type: "POST",
+ url: url,
+ data: data,
+ success: function(html){
+ where.html(html);
+ }
+ });
+ };
+
+ jQuery.fn.mb_changeContainerContent=function(url, data){
+ $(this).find(".mbcontainercontent:first").mb_changeContent(url,data);
+ };
+
+ jQuery.fn.mb_getState= function(attr){
+ var state = $(this).attr(attr);
+ state= state == "true";
+ return state;
+ };
+
+ jQuery.fn.mb_fullscreen= function(){
+ //
console.log(!$(this).is(".draggable"),$(this).is("[iconized='true']"),$(this).is("[collapsed='true']"))
+ if (!$(this).is(".draggable") ||
$(this).is("[iconized='true']") ||
$(this).is("[collapsed='true']")) return;
+
+ $(this).attr("w",$(this).width());
+ $(this).attr("h",$(this).height());
+ $(this).attr("t",$(this).css("top"));
+ $(this).attr("l",$(this).css("left"));
+
+ $(this).animate({top:10,left:10, position:"relative"});
+ $(this).mb_resizeTo("98%", "98%");
+ };
+
+})(jQuery);
-var msie6=$.browser.msie && $.browser.version=="6.0";
-var Opera=$.browser.opera;
-var zi=100;
-jQuery.fn.buildContainers = function (options){
- return this.each (function ()
- {
- if ($(this).is("[inited=true]")) return;
-
- this.options = {
- containment:"document",
- elementsPath:"elements/"
- }
- $.extend (this.options, options);
- var container=$(this);
- container.attr("inited","true");
- container.addClass(container.attr("skin"));
- if (!container.attr("minimized"))
container.attr("minimized","false");
- container.find(".n:first").attr("unselectable","on");
- if (!container.find(".n:first").html())
container.find(".n:first").html(" ")
- var
icon=container.attr("icon")?container.attr("icon"):"";
- var
buttons=container.attr("buttons")?container.attr("buttons"):"";
- container.setIcon(icon, this.options.elementsPath);
- container.setButtons(buttons,this.options.elementsPath);
- if (container.attr("width")){
- container.css({width:container.attr("width")+"px"});
- }
-
- if (container.attr("height")){
- container.find(".c:first ,
.content:first").css("height",container.attr("height")-container.find(".n:first").outerHeight()-(container.find(".s:first").outerHeight()));
- }
-
- if (container.hasClass("draggable")){
- container.css({position:"absolute", margin:0});
- container.find(".n:first").css({cursor:"move"});
- container.css({zIndex:zi++});
-
- container.draggable({handle:".n:first",cancel:".c",delay:0,
containment:this.options.containment});
- container.mousedown(function(){
- $(this).css({zIndex:zi++});
- });
- }
- if (container.hasClass("resizable")){
- container.containerResize();
- }
- if (container.attr("minimized")=="true"){
- container.attr("minimized","false");
- container.minimize(this.options.elementsPath);
- }
- if (container.attr("iconized")=="true"){
- container.attr("iconized","false");
- container.iconize();
- }
-
- });
-}
-jQuery.fn.containerResize = function (){
- var isDraggable=$(this).hasClass("draggable");
- $(this).resizable({
- handles:isDraggable ? "":"s",
- minWidth: 150,
- minHeight: 150,
- iframeFix:true,
- helper: "mbproxy",
- stop:function(e,o){
- var resCont= msie6 || Opera ?o.helper:$(this);
- var elHeight=
resCont.outerHeight()-$(this).find(".n:first").outerHeight()-($(this).find(".s:first").outerHeight());
- $(this).find(".c:first , .content:first").css({height: elHeight});
-
- if (!isDraggable){
-
- var elWidth=$(this).attr("width") &&
$(this).attr("width")>0 ?$(this).attr("width"):"100%";
- $(this).css({width: elWidth});
- }
- }
- });
- $(this).find(".ui-resizable-s").addClass("mb-resize");
- $(this).find(".ui-resizable-se").addClass("mb-resize");
- $(this).find(".ui-resizable-w").addClass("mb-resize");
- $(this).find(".ui-resizable-e").addClass("mb-resize");
-
-}
-jQuery.fn.setIcon = function (icon,path){
- if (icon !="" ){
- $(this).find(".ne:first").prepend("<img class='icon'
src='"+path+"icons/"+icon+"'
style='position:absolute'>");
- $(this).find(".n:first").css({paddingLeft:15});
- }else{
- $(this).find(".n:first").css({paddingLeft:0});
- }
-}
-jQuery.fn.setButtons = function (buttons,path){
- var container=$(this);
- if (buttons !=""){
- var btn=buttons.split(",");
- $(this).find(".ne:first").append("<div
class='buttonBar'></div>");
- for (var i in btn){
- if (btn[i]=="c"){
- $(this).find(".buttonBar:first").append("<img
src='"+path+$(this).attr('skin')+"/close.png'
class='close'>");
- $(this).find(".close:first").bind("click",function(){container.fadeOut(200)});
- }
- if (btn[i]=="m"){
- $(this).find(".buttonBar:first").append("<img
src='"+path+$(this).attr('skin')+"/min.png'
class='minimizeContainer'>");
- $(this).find(".minimizeContainer:first").bind("click",function(){container.minimize(path)});
- $(this).find(".n:first").bind("dblclick",function(){container.minimize(path)});
- }
- if (btn[i]=="p"){
- $(this).find(".buttonBar:first").append("<img
src='"+path+$(this).attr('skin')+"/print.png'
class='printContainer'>");
- $(this).find(".printContainer:first").bind("click",function(){});
- }
- if (btn[i]=="i"){
- $(this).find(".buttonBar:first").append("<img
src='"+path+$(this).attr('skin')+"/iconize.png'
class='iconizeContainer'>");
- $(this).find(".iconizeContainer:first").bind("click",function(){container.iconize()});
- }
- }
- var fadeOnClose=$.browser.mozilla || $.browser.safari;
- $(this).find(".buttonBar:first img").css({opacity:.5,
cursor:"pointer"}).mouseover(function(){if
(fadeOnClose)$(this).fadeTo(200,1)}).mouseout(function(){if
(fadeOnClose)$(this).fadeTo(200,.5)});
- }
-}
-jQuery.fn.minimize = function (path){
- this.each (function ()
- {
- var container=$(this);
- if ($(this).attr("minimized")=="false"){
- this.w = container.outerWidth();
- this.h = container.outerHeight();
- container.find(".icon:first").hide();
- container.find(".o:first").slideUp(100,function(){});
- container.animate({height:container.find(".n:first").outerHeight()+container.find(".s:first").outerHeight()},100,function(){container.find(".icon:first").show()});
- container.attr("minimized","true");
- container.find(".minimizeContainer:first").attr("src",path+$(this).attr('skin')+"/max.png");
- container.resizable("destroy");
- }else{
- container.find(".o:first").slideDown(100,function(){});
- if (container.hasClass("resizable")) container.containerResize();
- container.attr("minimized","false");
- container.find(".icon:first").hide();
- container.animate({height:this.h},100,function(){container.find(".icon:first").show()});
- container.find(".minimizeContainer:first").attr("src",path+$(this).attr('skin')+"/min.png");
-
- }
- })
-}
-jQuery.fn.iconize = function (){
- return this.each (function ()
- {
- var container=$(this);
- if(container.attr("minimized")=="false"){
- container.attr("w",container.attr("width") &&
container.attr("width")>0 ? (!container.hasClass("resizable")?
container.attr("width"):container.width()):"100%");
- container.attr("h",container.height());
- }
- container.attr("t",container.css("top"));
- container.attr("l",container.css("left"));
- container.resizable("destroy");
- if (!$.browser.msie) {
- container.find(".no:first").fadeOut("fast");
- container.animate({ height:"32px", width:"32px",left:0},200);
- }else{
- container.find(".no:first").hide();
- container.css({ height:"32px", width:"32px",left:0});
- }
- container.append("<img
src='elements/icons/"+(container.attr("icon")?container.attr("icon"):"restore.png")+"'
class='restoreContainer' width='32'>");
- container.find(".restoreContainer:first").bind("click",function(){
- if (!$.browser.msie) {
- container.find(".no:first").fadeIn("fast");
- if(container.attr("minimized")=="false")
- container.animate({height:container.attr("h"),
width:container.attr("w"),left:container.attr("l")},200);
- else
- container.animate({height:"60px",
width:container.attr("w"),left:container.attr("l")},200);
-
- container.find(".c:first ,
.content:first").css("height",container.attr("h")-container.find(".n:first").outerHeight()-(container.find(".s:first").outerHeight()));
- } else {
- container.find(".no:first").show();
- container.css({
width:container.attr("w"),left:container.attr("l")});
- container.find(".c:first ,
.content:first").css("height",container.attr("h")-container.find(".n:first").outerHeight()-(container.find(".s:first").outerHeight()));
- }
- container.find(".restoreContainer:first").remove();
- if (container.hasClass("resizable") &&
container.attr("minimized")=="false") container.containerResize();
- });
- });
-}
\ No newline at end of file
diff --git a/moksha/widgets/container/static/js/mbContainer.min.js
b/moksha/widgets/container/static/js/mbContainer.min.js
index 007f733..7946723 100644
--- a/moksha/widgets/container/static/js/mbContainer.min.js
+++ b/moksha/widgets/container/static/js/mbContainer.min.js
@@ -1 +1 @@
-var msie6=$.browser.msie&&$.browser.version=="6.0";var
Opera=$.browser.opera;var zi=100;jQuery.fn.buildContainers=function(A){return
this.each(function(){if($(this).is("[inited=true]")){return
}this.options={containment:"document",elementsPath:"elements/"};$.extend(this.options,A);var
B=$(this);B.attr("inited","true");B.addClass(B.attr("skin"));if(!B.attr("minimized")){B.attr("minimized","false")}B.find(".n:first").attr("unselectable","on");if(!B.find(".n:first").html()){B.find(".n:first").html(" ")}var
C=B.attr("icon")?B.attr("icon"):"";var
D=B.attr("buttons")?B.attr("buttons"):"";B.setIcon(C,this.options.elementsPath);B.setButtons(D,this.options.elementsPath);if(B.attr("width")){B.css({width:B.attr("width")+"px"})}if(B.attr("height")){B.find(".c:first
,
.content:first").css("height",B.attr("height")-B.find(".n:first").outerHeight()-(B.find(".s:first").outerHeight()))}if(B.hasClass("draggable")){B.css({position:"absolute",margin:0});B.find(".n:first").css({cursor:"move"});B.css({zIndex:zi++});B.draggable({handle:".n:first",cancel:".c",delay:0,containment:this.options.containment});B.mousedown(function(){$(this).css({zIndex:zi++})})}if(B.hasClass("resizable")){B.containerResize()}if(B.attr("minimized")=="true"){B.attr("minimized","false");B.minimize(this.options.elementsPath)}if(B.attr("iconized")=="true"){B.attr("iconized","false");B.iconize()}})};jQuery.fn.containerResize=function(){var
A=$(this).hasClass("draggable");$(this).resizable({handles:A?"":"s",minWidth:150,minHeight:150,iframeFix:true,helper:"mbproxy",stop:function(E,F){var
D=msie6||Opera?F.helper:$(this);var
B=D.outerHeight()-$(this).find(".n:first").outerHeight()-($(this).find(".s:first").outerHeight());$(this).find(".c:first
, .content:first").css({height:B});if(!A){var
C=$(this).attr("width")&&$(this).attr("width")>0?$(this).attr("width"):"100%";$(this).css({width:C})}}});$(this).find(".ui-resizable-s").addClass("mb-resize");$(this).find(".ui-resizable-se").addClass("mb-resize");$(this).find(".ui-resizable-w").addClass("mb-resize");$(this).find(".ui-resizable-e").addClass("mb-resize")};jQuery.fn.setIcon=function(A,B){if(A!=""){$(this).find(".ne:first").prepend("<img
class='icon' src='"+B+"icons/"+A+"'
style='position:absolute'>");$(this).find(".n:first").css({paddingLeft:15})}else{$(this).find(".n:first").css({paddingLeft:0})}};jQuery.fn.setButtons=function(E,F){var
A=$(this);if(E!=""){var
D=E.split(",");$(this).find(".ne:first").append("<div
class='buttonBar'></div>");for(var C in
D){if(D[C]=="c"){$(this).find(".buttonBar:first").append("<img
src='"+F+$(this).attr("skin")+"/close.png'
class='close'>");$(this).find(".close:first").bind("click",function(){A.fadeOut(200)})}if(D[C]=="m"){$(this).find(".buttonBar:first").append("<img
src='"+F+$(this).attr("skin")+"/min.png'
class='minimizeContainer'>");$(this).find(".minimizeContainer:first").bind("click",function(){A.minimize(F)});$(this).find(".n:first").bind("dblclick",function(){A.minimize(F)})}if(D[C]=="p"){$(this).find(".buttonBar:first").append("<img
src='"+F+$(this).attr("skin")+"/print.png'
class='printContainer'>");$(this).find(".printContainer:first").bind("click",function(){})}if(D[C]=="i"){$(this).find(".buttonBar:first").append("<img
src='"+F+$(this).attr("skin")+"/iconize.png'
class='iconizeContainer'>");$(this).find(".iconizeContainer:first").bind("click",function(){A.iconize()})}}var
B=$.browser.mozilla||$.browser.safari;$(this).find(".buttonBar:first
img").css({opacity:0.5,cursor:"pointer"}).mouseover(function(){if(B){$(this).fadeTo(200,1)}}).mouseout(function(){if(B){$(this).fadeTo(200,0.5)}})}};jQuery.fn.minimize=function(A){this.each(function(){var
B=$(this);if($(this).attr("minimized")=="false"){this.w=B.outerWidth();this.h=B.outerHeight();B.find(".icon:first").hide();B.find(".o:first").slideUp(100,function(){});B.animate({height:B.find(".n:first").outerHeight()+B.find(".s:first").outerHeight()},100,function(){B.find(".icon:first").show()});B.attr("minimized","true");B.find(".minimizeContainer:first").attr("src",A+$(this).attr("skin")+"/max.png");B.resizable("destroy")}else{B.find(".o:first").slideDown(100,function(){});if(B.hasClass("resizable")){B.containerResize()}B.attr("minimized","false");B.find(".icon:first").hide();B.animate({height:this.h},100,function(){B.find(".icon:first").show()});B.find(".minimizeContainer:first").attr("src",A+$(this).attr("skin")+"/min.png")}})};jQuery.fn.iconize=function(){return
this.each(function(){var
A=$(this);if(A.attr("minimized")=="false"){A.attr("w",A.attr("width")&&A.attr("width")>0?(!A.hasClass("resizable")?A.attr("width"):A.width()):"100%");A.attr("h",A.height())}A.attr("t",A.css("top"));A.attr("l",A.css("left"));A.resizable("destroy");if(!$.browser.msie){A.find(".no:first").fadeOut("fast");A.animate({height:"32px",width:"32px",left:0},200)}else{A.find(".no:first").hide();A.css({height:"32px",width:"32px",left:0})}A.append("<img
src='elements/icons/"+(A.attr("icon")?A.attr("icon"):"restore.png")+"'
class='restoreContainer'
width='32'>");A.find(".restoreContainer:first").bind("click",function(){if(!$.browser.msie){A.find(".no:first").fadeIn("fast");if(A.attr("minimized")=="false"){A.animate({height:A.attr("h"),width:A.attr("w"),left:A.attr("l")},200)}else{A.animate({height:"60px",width:A.attr("w"),left:A.attr("l")},200)}A.find(".c:first
,
.content:first").css("height",A.attr("h")-A.find(".n:first").outerHeight()-(A.find(".s:first").outerHeight()))}else{A.find(".no:first").show();A.css({width:A.attr("w"),left:A.attr("l")});A.find(".c:first
,
.content:first").css("height",A.attr("h")-A.find(".n:first").outerHeight()-(A.find(".s:first").outerHeight()))}A.find(".restoreContainer:first").remove();if(A.hasClass("resizable")&&A.attr("minimized")=="false"){A.containerResize()}})})};
\ No newline at end of file
+(function(a){jQuery.fn.buildContainers=function(b){return
this.each(function(){if(a(this).is("[inited=true]")){return}this.options={containment:"document",elementsPath:"elements/",onCollapse:function(){},onIconize:function(){},onClose:function(){},onResize:function(){},onDrag:function(){},onRestore:function(){},minimizeEffect:"slide",effectDuration:300};a.extend(this.options,b);var
d=a(this);d.attr("inited","true");d.attr("iconized","false");d.attr("collapsed","false");d.attr("closed","false");d.attr("options",this.options);d.css({position:"relative"});if(a.metadata){a.metadata.setType("class");if(d.metadata().skin){d.attr("skin",d.metadata().skin)}if(d.metadata().collapsed){d.attr("collapsed",d.metadata().collapsed)}if(d.metadata().iconized){d.attr("iconized",d.metadata().iconized)}if(d.metadata().icon){d.attr("icon",d.metadata().icon)}if(d.metadata().buttons){d.attr("buttons",d.metadata().buttons)}if(d.metadata().content){d.attr("content",d.metadata().content)}if(d.metadata().aspectRatio){d.attr("aspectRatio",d.metadata().aspectRatio)}if(d.metadata().grid){d.attr("grid",d.metadata().grid)}if(d.metadata().gridx){d.attr("gridx",d.metadata().gridx)}if(d.metadata().gridy){d.attr("gridy",d.metadata().gridy)}if(d.metadata().handles){d.attr("handles",d.metadata().handles)}if(d.metadata().dock){d.attr("dock",d.metadata().dock)}if(d.metadata().width){d.attr("width",d.metadata().width)}if(d.metadata().height){d.attr("height",d.metadata().height)}}if(d.attr("content")){d.mb_changeContainerContent(d.attr("content"))}d.addClass(d.attr("skin"));d.find(".n:first").attr("unselectable","on");if(!d.find(".n:first").html()){d.find(".n:first").html(" ")}d.containerSetIcon(d.attr("icon"),this.options.elementsPath);if(d.attr("buttons")){d.containerSetButtons(d.attr("buttons"),this.options)}d.css({width:"99.9%"});if(d.attr("width")){var
c=a.browser.msie?d.attr("width"):d.attr("width")+"px";d.css({width:c})}if(d.attr("height")){d.find(".c:first
,
.mbcontainercontent:first").css("height",d.attr("height")-d.find(".n:first").outerHeight()-(d.find(".s:first").outerHeight()))}if(d.hasClass("draggable")){d.css({position:"absolute",margin:0});d.find(".n:first").css({cursor:"move"});d.mb_BringToFront();d.draggable({handle:".n:first",delay:0,containment:this.options.containment,stop:function(){var
f=a(this).attr("options");if(f.onDrag){f.onDrag(a(this))}}});if(d.attr("grid")||(d.attr("gridx")&&d.attr("gridy"))){var
e=d.attr("grid")?[d.attr("grid"),d.attr("grid")]:[d.attr("gridx"),d.attr("gridy")];d.draggable("option","grid",e)}d.bind("mousedown",function(){a(this).mb_BringToFront()})}if(d.hasClass("resizable")){d.containerResize()}if(d.attr("collapsed")=="true"){d.attr("collapsed","false");d.containerCollapse(this.options)}if(d.attr("iconized")=="true"){d.attr("iconized","false");d.containerIconize(this.options)}setTimeout(function(){d.css("visibility","visible")},500)})};jQuery.fn.containerResize=function(){var
c=a(this).hasClass("draggable");var
b=a(this).attr("handles")?a(this).attr("handles"):"s";var
d=a(this).attr("aspectRatio")?a(this).attr("aspectRatio"):false;a(this).resizable({handles:c?"":b,aspectRatio:d,minWidth:350,minHeight:150,iframeFix:true,helper:"mbproxy",start:function(f,g){g.helper.mb_BringToFront()},stop:function(){var
h=a(this);var
e=h.outerHeight()-a(this).find(".n:first").outerHeight()-(a(this).find(".s:first").outerHeight());a(this).find(".c:first
,
.mbcontainercontent:first").css({height:e});if(!c&&!a(this).attr("handles")){var
g=a(this).attr("width")&&a(this).attr("width")>0?a(this).attr("width"):"99.9%";a(this).css({width:g})}var
f=a(this).attr("options");if(f.onResize){f.onResize(a(this))}}});a(this).find(".ui-resizable-n").addClass("mb-resize").addClass("mb-resize-resizable-n");a(this).find(".ui-resizable-e").addClass("mb-resize").addClass("mb-resize-resizable-e");a(this).find(".ui-resizable-w").addClass("mb-resize").addClass("mb-resize-resizable-w");a(this).find(".ui-resizable-s").addClass("mb-resize").addClass("mb-resize-resizable-s");a(this).find(".ui-resizable-se").addClass("mb-resize").addClass("mb-resize-resizable-se")};jQuery.fn.containerSetIcon=function(b,c){if(b&&b!=""){a(this).find(".ne:first").prepend("<img
class='icon' src='"+c+"icons/"+b+"'
style='position:absolute'/>");a(this).find(".n:first").css({paddingLeft:25})}else{a(this).find(".n:first").css({paddingLeft:0})}};jQuery.fn.containerSetButtons=function(g,f){if(!f){f=a(this).attr("options")}var
h=f.elementsPath;var b=a(this);if(g!=""){var
e=g.split(",");a(this).find(".ne:first").append("<div
class='buttonBar'></div>");for(var d in
e){if(e[d]=="c"){a(this).find(".buttonBar:first").append("<img
src='"+h+a(this).attr("skin")+"/close.png'
class='close'/>");a(this).find(".close:first").bind("click",function(){if(!a.browser.msie){b.fadeOut(f.effectDuration)}else{b.hide()}b.attr("closed","true");if(f.onClose){f.onClose(b)}})}if(e[d]=="m"){a(this).find(".buttonBar:first").append("<img
src='"+h+a(this).attr("skin")+"/min.png'
class='collapsedContainer'/>");a(this).find(".collapsedContainer:first").bind("click",function(){b.containerCollapse(f)});a(this).find(".n:first").bind("dblclick",function(){b.containerCollapse(f)})}if(e[d]=="p"){a(this).find(".buttonBar:first").append("<img
src='"+h+a(this).attr("skin")+"/print.png'
class='printContainer'/>");a(this).find(".printContainer:first").bind("click",function(){})}if(e[d]=="i"){a(this).find(".buttonBar:first").append("<img
src='"+h+a(this).attr("skin")+"/iconize.png'
class='iconizeContainer'/>");a(this).find(".iconizeContainer:first").bind("click",function(){b.containerIconize(f)})}}var
c=a.browser.mozilla||a.browser.safari;if(c){a(this).find(".buttonBar:first
img").css({opacity:0.5,cursor:"pointer",mozUserSelect:"none",khtmlUserSelect:"none"}).mouseover(function(){a(this).fadeTo(200,1)}).mouseout(function(){if(c){a(this).fadeTo(200,0.5)}})}a(this).find(".buttonBar:first
img").attr("unselectable","on")}};jQuery.fn.containerCollapse=function(b){this.each(function(){if(!b){b=a(this).attr("options")}var
c=a(this);if(a(this).attr("collapsed")=="false"){c.attr("w",c.outerWidth());c.attr("h",c.outerHeight());if(b.minimizeEffect=="fade"){c.find(".o:first").fadeOut(b.effectDuration,function(){})}else{c.find(".icon:first").hide();c.find(".o:first").slideUp(b.effectDuration,function(){});c.animate({height:c.find(".n:first").outerHeight()+c.find(".s:first").outerHeight()},b.effectDuration,function(){c.find(".icon:first").show()})}c.attr("collapsed","true");c.find(".collapsedContainer:first").attr("src",b.elementsPath+a(this).attr("skin")+"/max.png");c.resizable("disable");if(b.onCollapse){b.onCollapse(c)}}else{if(b.minimizeEffect=="fade"){c.find(".o:first").fadeIn(b.effectDuration,function(){})}else{c.find(".o:first").slideDown(b.effectDuration,function(){});c.find(".icon:first").hide();c.animate({height:c.attr("h")},b.effectDuration,function(){c.find(".icon:first").show()})}if(c.hasClass("resizable")){c.resizable("enable")}c.attr("collapsed","false");c.find(".collapsedContainer:first").attr("src",b.elementsPath+a(this).attr("skin")+"/min.png");c.find(".mbcontainercontent:first").css("overflow","auto")}})};jQuery.fn.containerIconize=function(b){if(!b){b=a(this).attr("options")}return
this.each(function(){var
d=a(this);d.attr("iconized","true");if(d.attr("collapsed")=="false"){d.attr("h",d.outerHeight())}d.attr("w",d.attr("width")&&d.attr("width")>0?(!d.hasClass("resizable")?d.attr("width"):d.width()):!a(this).attr("handles")?"99.9%":d.width());d.attr("t",d.css("top"));d.attr("l",d.css("left"));d.resizable("disable");var
c=0;var e=d.css("top");var
f=d;if(d.attr("dock")){f=a("#"+d.attr("dock"));var
g=f.find("img").size();c=a("#"+d.attr("dock")).offset().left+(32*g);e=a("#"+d.attr("dock")).offset().top}this.dockIcon=a("<img
src='"+b.elementsPath+"icons/"+(d.attr("icon")?d.attr("icon"):"restore.png")+"'
class='restoreContainer'
width='32'/>").appendTo(f).css("cursor","pointer").hide().attr("contTitle",d.find(".n:first").html()).bind("click",function(){d.attr("iconized","false");if(d.is(".draggable")){d.css({top:a(this).offset().top,left:a(this).offset().left})}else{d.css({left:"auto",top:"auto"})}d.show();if(!a.browser.msie){d.find(".no:first").fadeIn("fast");if(d.attr("collapsed")=="false"){d.animate({height:d.attr("h"),width:d.attr("w"),left:d.attr("l"),top:d.attr("t")},b.effectDuration,function(){d.find(".mbcontainercontent:first").css("overflow","auto");if(d.hasClass("draggable")){d.mb_BringToFront()}});d.find(".c:first
,
.mbcontainercontent:first").css("height",d.attr("h")-d.find(".n:first").outerHeight()-(d.find(".s:first").outerHeight()))}else{d.animate({height:"60px",width:d.attr("w"),left:d.attr("l"),top:d.attr("t")},b.effectDuration)}}else{d.find(".no:first").show();if(d.attr("collapsed")=="false"){d.css({height:d.attr("h"),width:d.attr("w"),left:d.attr("l"),top:d.attr("t")},b.effectDuration);d.find(".c:first
,
.mbcontainercontent:first").css("height",d.attr("h")-d.find(".n:first").outerHeight()-(d.find(".s:first").outerHeight()))}else{d.css({height:"60px",width:d.attr("w"),left:d.attr("l"),top:d.attr("t")},b.effectDuration)}}if(d.hasClass("resizable")&&d.attr("collapsed")=="false"){d.resizable("enable")}a(this).remove();if(d.hasClass("draggable")){d.mb_BringToFront()}a(".iconLabel").remove();if(b.onRestore){b.onRestore(d)}}).bind("mouseenter",function(){var
h="<div
class='iconLabel'>"+a(this).attr("contTitle")+"</div>";a("body").append(h);a(".iconLabel").hide().css({position:"absolute",top:a(this).offset().top-15,left:a(this).offset().left+15,opacity:0.9}).fadeIn("slow").mb_BringToFront()}).bind("mouseleave",function(){a(".iconLabel").remove()});if(!a.browser.msie){d.find(".mbcontainercontent:first").css("overflow","hidden");d.find(".no:first").slideUp("fast");d.animate({height:"32px",width:"32px",left:c,top:e},b.effectDuration,function(){a(this.dockIcon).show();if(d.attr("dock")){d.hide()}})}else{d.find(".no:first").hide();d.css({height:"32px",width:"32px",left:c,top:e});a(this.dockIcon).show();if(d.attr("dock")){d.hide()}}if(b.onIconize){b.onIconize(d)}})};jQuery.fn.mb_resizeTo=function(c,b){if(!b){b=a(this).outerWidth()}if(!c){c=a(this).outerHeight()}a(this).animate({height:c,width:b},500,function(){var
d=a(this).outerHeight()-a(this).find(".n:first").outerHeight()-(a(this).find(".s:first").outerHeight());a(this).find(".c:first
,
.mbcontainercontent:first").animate({height:d})})};jQuery.fn.mb_iconize=function(){if(a(this).attr("closed")=="false"){if(a(this).attr("iconized")=="true"){var
b=a(this)[0].dockIcon;a(b).click();a(this).mb_BringToFront()}else{a(this).containerIconize()}}};jQuery.fn.mbOpenBox=function(b,c){if(a(this).attr("closed")=="true"){if(!c){c=""}if(b){a(this).mb_changeContainerContent(b,c)}if(!a.browser.msie){a(this).fadeIn(300)}else{a(this).show()}a(this).attr("closed","false");a(this).mb_BringToFront()}};jQuery.fn.mbCloseBox=function(){if(a(this).attr("closed")=="false"){a(this).find(".close:first").click()}};jQuery.fn.mb_toggle=function(){if(a(this).attr("closed")=="false"&&a(this).attr("iconized")=="false"){a(this).containerCollapse()}};jQuery.fn.mb_BringToFront=function(){var
b=10;a("*").each(function(){if(a(this).css("position")=="absolute"){var
c=parseInt(a(this).css("zIndex"));b=c>b?parseInt(a(this).css("zIndex")):b}});a(this).css("zIndex",b+=1)};jQuery.fn.mb_changeContent=function(c,d){var
b=a(this);if(!d){d=""}a.ajax({type:"POST",url:c,data:d,success:function(e){b.html(e)}})};jQuery.fn.mb_changeContainerContent=function(b,c){a(this).find(".mbcontainercontent:first").mb_changeContent(b,c)};jQuery.fn.mb_getState=function(b){var
c=a(this).attr(b);c=c=="true";return
c};jQuery.fn.mb_fullscreen=function(){if(!a(this).is(".draggable")||a(this).is("[iconized='true']")||a(this).is("[collapsed='true']")){return}a(this).attr("w",a(this).width());a(this).attr("h",a(this).height());a(this).attr("t",a(this).css("top"));a(this).attr("l",a(this).css("left"));a(this).animate({top:10,left:10,position:"relative"});a(this).mb_resizeTo("98%","98%")}})(jQuery);
\ No newline at end of file
commit f0b086415a51a1a3ca9e044f2946a6db783b0ac8
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sun Aug 30 21:39:40 2009 -0400
Tweak some of our container options
diff --git a/moksha/apps/metrics/moksha/apps/metrics/widgets/metrics.py
b/moksha/apps/metrics/moksha/apps/metrics/widgets/metrics.py
index 9204b51..5b30155 100644
--- a/moksha/apps/metrics/moksha/apps/metrics/widgets/metrics.py
+++ b/moksha/apps/metrics/moksha/apps/metrics/widgets/metrics.py
@@ -39,7 +39,6 @@ class MokshaMemoryUsageWidget(LiveFlotWidget):
topic = 'moksha_mem_metrics'
container_options = {
'icon': 'chart.png', 'top': 400, 'left': 80,
'height': 325,
- 'iconize': False, 'minimize': False,
}
@@ -48,7 +47,6 @@ class MokshaCPUUsageWidget(LiveFlotWidget):
topic = 'moksha_cpu_metrics'
container_options = {
'icon': 'chart.png', 'top': 80, 'left': 80,
'height': 325,
- 'iconize': False, 'minimize': False,
}
commit 22c83cb53fee9b06de00933f380b332eb1341a96
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sun Aug 30 21:38:06 2009 -0400
Add a moksha.api.widgets.live.{,un}subscribe_topics API.
This allows us to plug the leaky abstraction in the LiveWidget API
that requires us to use stomp-specific methods everywhere. Now, we
can easily swap STOMP out for AMQP without pain.
This also adds a useful LiveWidget.get_topics method
diff --git a/moksha/api/widgets/live/__init__.py b/moksha/api/widgets/live/__init__.py
index 0c822bf..0776958 100644
--- a/moksha/api/widgets/live/__init__.py
+++ b/moksha/api/widgets/live/__init__.py
@@ -14,7 +14,7 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <
http://www.gnu.org/licenses/>.
-from live import LiveWidget
+from live import LiveWidget, subscribe_topics, unsubscribe_topics
# At the moment we're using the StompWidget as our primary publish/subscribe
# messaging standard. If we want to change the default to an AMQP widget
diff --git a/moksha/api/widgets/live/live.py b/moksha/api/widgets/live/live.py
index 8325a46..a6588ca 100644
--- a/moksha/api/widgets/live/live.py
+++ b/moksha/api/widgets/live/live.py
@@ -20,7 +20,7 @@ import moksha
from tw.api import Widget
from moksha.exc import MokshaException
-from moksha.api.widgets.stomp import stomp_widget, stomp_subscribe
+from moksha.api.widgets.stomp import stomp_widget, stomp_subscribe, stomp_unsubscribe
class LiveWidget(Widget):
""" A live streaming widget.
@@ -46,3 +46,22 @@ class LiveWidget(Widget):
moksha.stomp['onconnectedframe'].append(stomp_subscribe(topics))
elif callback in self.params:
moksha.stomp[callback].append(getattr(self, callback))
+
+ def get_topics(self):
+ topics = []
+ for key in ('topic', 'topics'):
+ if hasattr(self, key):
+ topic = getattr(self, key)
+ if topic:
+ if isinstance(topic, basestring):
+ map(topics.append, topic.split())
+ else:
+ topics += topic
+ return topics
+
+
+# Moksha Topic subscription handling methods
+subscribe_topics = stomp_subscribe
+unsubscribe_topics = stomp_unsubscribe
+
+
diff --git a/moksha/api/widgets/stomp/stomp.py b/moksha/api/widgets/stomp/stomp.py
index fcddd92..15e3bf6 100644
--- a/moksha/api/widgets/stomp/stomp.py
+++ b/moksha/api/widgets/stomp/stomp.py
@@ -39,6 +39,18 @@ def stomp_subscribe(topic):
return sub
+def stomp_unsubscribe(topic):
+ """ Return a javascript callback that unsubscribes to a given topic,
+ or a list of topics.
+ """
+ sub = 'stomp.unsubscribe("%s");'
+ if isinstance(topic, list):
+ sub = ''.join([sub % t for t in topic])
+ else:
+ sub = sub % topic
+ return sub
+
+
class StompWidget(Widget):
callbacks = ['onopen', 'onerror', 'onerrorframe',
'onclose',
'onconnectedframe', 'onmessageframe']
commit 4ae34ffdca290e1da8b23b58930bc67a07fa328f
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 23:40:04 2009 -0400
Add the ability to display a widget's entire module.
This also adds moksha.view_source and moksha.view_module_source
javascript functions
diff --git a/moksha/controllers/widgets.py b/moksha/controllers/widgets.py
index 62a7ebb..e8a4258 100644
--- a/moksha/controllers/widgets.py
+++ b/moksha/controllers/widgets.py
@@ -38,8 +38,10 @@ class WidgetController(Controller):
'live': validators.StringBool(),
'chrome': validators.StringBool(),
'source': validators.UnicodeString(),
+ 'module': validators.StringBool(),
})
- def default(self, widget, chrome=False, live=False, source=False, **kw):
+ def default(self, widget, chrome=False, live=False, source=False,
+ module=False, **kw):
""" Display a single widget.
:chrome: Display in a Moksha Container
@@ -66,13 +68,17 @@ class WidgetController(Controller):
if live:
tmpl_context.moksha_socket = moksha_socket
if source:
- options['content'] = iframe_widget(url='/widgets/code/' +
source,
+ options['content'] = iframe_widget(url='/widgets/code/' +
source +
+ '?module=%s' % module,
height='90%')
options['id'] += source + '_source'
options['view_source'] = False
return dict(options=options)
@expose('mako:moksha.templates.widget')
- def code(self, widget):
+ @validate({
+ 'module': validators.StringBool(),
+ })
+ def code(self, widget, module=False):
tmpl_context.widget = code_widget
- return dict(options={'widget': widget})
+ return dict(options={'widget': widget, 'module': module})
diff --git a/moksha/public/javascript/moksha.js b/moksha/public/javascript/moksha.js
index 50ed366..e2ed1b2 100644
--- a/moksha/public/javascript/moksha.js
+++ b/moksha/public/javascript/moksha.js
@@ -758,7 +758,33 @@ moksha = {
document.title = title_str;
moksha.title = title;
- }
+ },
+
+ view_source: function(widget) {
+ $('#footer').append($('<div/>').attr('id', widget +
'_loader'));
+ $.ajax({
+ url: '/widgets/code_widget?chrome=True&source='+widget,
+ success: function(r, s) {
+ var $panel = $('#' + widget + '_loader');
+ var $stripped = moksha.filter_resources(r);
+ $panel.html($stripped);
+ }
+ });
+ return false;
+ },
+
+ view_module_source: function(widget) {
+ $('#footer').append($('<div/>').attr('id', widget +
'_loader'));
+ $.ajax({
+ url:
'/widgets/code_widget?chrome=True&module=True&source='+widget,
+ success: function(r, s) {
+ var $panel = $('#' + widget + '_loader');
+ var $stripped = moksha.filter_resources(r);
+ $panel.html($stripped);
+ }
+ });
+ return false;
+ },
}
})();
diff --git a/moksha/widgets/source.py b/moksha/widgets/source.py
index 3453cc6..e119cc5 100644
--- a/moksha/widgets/source.py
+++ b/moksha/widgets/source.py
@@ -33,18 +33,28 @@ from pygments.lexers import PythonLexer
from pygments.formatters import HtmlFormatter
class SourceCodeWidget(Widget):
- params = ['widget', 'code']
+ params = {
+ 'widget': 'The name of the widget',
+ 'module': 'Whether to display the entire module',
+ 'code': 'The actual source code',
+ }
template = "${code}"
engine_name = 'mako'
container_options = {'width': 600, 'height': 500, 'title':
'View Source',
- 'icon': 'comment.png'}
+ 'icon': 'comment.png', 'top': 80,
'left': 250,
+ 'iconize': False, 'minimize': False}
hidden = True
def update_params(self, d):
super(SourceCodeWidget, self).update_params(d)
d.widget = moksha.get_widget(d.widget)
title = d.widget.__class__.__name__
- source = inspect.getsource(d.widget.__class__)
+ if d.module:
+ obj = __import__(d.widget.__module__, globals(), locals(),
+ [d.widget.__module__])
+ else:
+ obj = d.widget.__class__
+ source = inspect.getsource(obj)
d.code = highlight(source, PythonLexer(),
HtmlFormatter(full=True))
commit a152c42d4da37eb4755e3d7133e02f27b2bec166
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 21:05:23 2009 -0400
Prep for 0.3.4
diff --git a/moksha.spec b/moksha.spec
index 34683d4..6da9b2c 100644
--- a/moksha.spec
+++ b/moksha.spec
@@ -2,7 +2,7 @@
%{!?pyver: %define pyver %(%{__python} -c "import sys ; print
sys.version[:3]")}
Name: moksha
-Version: 0.3.3
+Version: 0.3.4
Release: 1%{?dist}
Summary: A flexable platform for creating live collaborative web applications
Group: Applications/Internet
diff --git a/moksha/__init__.py b/moksha/__init__.py
index ffadd30..0d3251f 100644
--- a/moksha/__init__.py
+++ b/moksha/__init__.py
@@ -16,8 +16,6 @@
__import__('pkg_resources').declare_namespace(__name__)
-version = '0.3.3'
-
from paste.registry import StackedObjectProxy
# The central feed cache, used by the Feed widget.
diff --git a/pavement.py b/pavement.py
index 8caa416..0239825 100644
--- a/pavement.py
+++ b/pavement.py
@@ -25,7 +25,7 @@ from paver.setuputils import find_packages, find_package_data
import paver.misctasks
import paver.virtual
-VERSION = '0.3.3'
+VERSION = '0.3.4'
HEADER = """This file is part of Moksha.
Copyright (C) 2008-2009 Red Hat, Inc.
diff --git a/setup.py b/setup.py
index 1ccff85..b105363 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@ from paver.setuputils import find_package_data
setup(
name='moksha',
- version='0.3.3',
+ version='0.3.4',
description='',
author='',
author_email='',
commit 40bfa42203350cb38b7bc3f3c414ab3faef480da
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 21:04:16 2009 -0400
Add a moksha-hub subpackage
diff --git a/moksha.spec b/moksha.spec
index 5b18d2d..34683d4 100644
--- a/moksha.spec
+++ b/moksha.spec
@@ -32,8 +32,6 @@ Requires: python-tw-jquery >= 0.9.4.1
#Requires: python-repoze-squeeze
#Requires: python-repoze-profile
Requires: orbited
-Requires: python-twisted
-Requires: python-stomper
Requires: python-sphinx
Requires: python-paver
Requires: python-tw-forms
@@ -68,6 +66,15 @@ Requires: mod_wsgi httpd
%description server
This package contains an Apache mod_wsgi configuration for Moksha.
+%package hub
+Summary: Moksha Hub
+Group: Applications/Internet
+Requires: %{name} = %{version}-%{release}
+Requires: python-twisted
+Requires: python-stomper
+
+%description hub
+This package contains the Moksha Hub.
%prep
%setup -q
@@ -90,6 +97,7 @@ make -C docs html
%{__mkdir_p} -m 0755 %{buildroot}%{_sysconfdir}/httpd/conf.d
%{__mkdir_p} -m 0755 %{buildroot}/%{_sysconfdir}/%{name}/
%{__mkdir_p} -m 0755 %{buildroot}/%{_sysconfdir}/%{name}/conf.d
+%{__mkdir_p} -m 0755 %{buildroot}/%{_sysconfdir}/init.d/
%{__install} production/*.* %{buildroot}%{_datadir}/%{name}/production/
%{__install} production/apache/* %{buildroot}%{_datadir}/%{name}/production/apache
@@ -100,6 +108,9 @@ make -C docs html
%{__sed} -i -e 's/$VERSION/%{version}/g'
%{buildroot}%{_sysconfdir}/%{name}/production.ini
%{__cp} orbited.cfg %{buildroot}%{_sysconfdir}/%{name}/orbited.cfg
+%{__install} production/moksha-hub %{buildroot}%{_bindir}/moksha-hub
+%{__install} production/moksha-hub.init %{buildroot}%{_sysconfdir}/init.d/moksha-hub
+
%clean
%{__rm} -rf %{buildroot}
@@ -110,7 +121,6 @@ make -C docs html
%{python_sitelib}/%{name}/
%{python_sitelib}/%{name}-%{version}-py%{pyver}.egg-info/
%attr(-,apache,apache) %dir %{_localstatedir}/lib/%{name}
-%{_bindir}/moksha-hub
%files server
%attr(-,apache,root) %{_datadir}/%{name}
@@ -119,11 +129,19 @@ make -C docs html
%config(noreplace) %{_sysconfdir}/%{name}/orbited.cfg
%attr(-,apache,apache) %dir %{_localstatedir}/cache/%{name}/
+%files hub
+%defattr(-,root,root,-)
+%{_bindir}/moksha-hub
+%{_sysconfdir}/init.d/moksha-hub
+
%files docs
%defattr(-,root,root)
%doc docs/_build/html
%changelog
+* Sat Aug 29 2009 Luke Macken <lmacken(a)redhat.com> - 0.3.4-1
+- Add a moksha-hub subpackage
+
* Mon Aug 24 2009 Luke Macken <lmacken(a)redhat.com> - 0.3.3-1
- Include our orbited configuration file in the moksha-server subpackage
- Create a /etc/moksha/conf.d for our app configs
diff --git a/pavement.py b/pavement.py
index 84d143f..8caa416 100644
--- a/pavement.py
+++ b/pavement.py
@@ -177,13 +177,13 @@ def test():
@task
def reinstall():
print "Removing existing Moksha install"
- sh('sudo rpm -e --nodeps moksha moksha-docs moksha-server',
ignore_error=True)
+ sh('sudo rpm -e --nodeps moksha{,-docs,-server,-hub}', ignore_error=True)
sh('rm -fr dist/')
sh('python setup.py sdist --format=bztar')
sh('mv dist/* ~/rpmbuild/SOURCES/')
sh('cp moksha.spec ~/rpmbuild/SPECS/')
sh('rpmbuild -ba ~/rpmbuild/SPECS/moksha.spec')
- sh('sudo rpm -ivh
~/rpmbuild/RPMS/noarch/moksha{,-docs,-server}-%s-1.*noarch.rpm' %
options.version.number)
+ sh('sudo rpm -ivh
~/rpmbuild/RPMS/noarch/moksha{,-docs,-server,-hub}-%s-1.*noarch.rpm' %
options.version.number)
@task
def restart_httpd():
commit 557c8a70db79e0ba91c793b425d87e6f63daefd9
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 19:15:59 2009 -0400
Add a custom moksha-hub script that can daemonize itself
diff --git a/production/moksha-hub b/production/moksha-hub
new file mode 100755
index 0000000..8ba6650
--- /dev/null
+++ b/production/moksha-hub
@@ -0,0 +1,20 @@
+#!/usr/bin/python
+
+__requires__ = 'moksha'
+
+import os
+import sys
+
+def main():
+ from pkg_resources import load_entry_point
+ sys.exit(load_entry_point('moksha', 'console_scripts',
'moksha-hub')())
+
+if '--daemon' in sys.argv:
+ pid = os.fork()
+ if pid == 0:
+ os.setsid()
+ pid = os.fork()
+ if pid == 0:
+ main()
+else:
+ main()
commit 6c3433aacd7a4a04cc3d79b2fbdb920708bb8257
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 19:11:23 2009 -0400
Add a moksha-hub init script
diff --git a/production/moksha-hub.init b/production/moksha-hub.init
new file mode 100644
index 0000000..2290c11
--- /dev/null
+++ b/production/moksha-hub.init
@@ -0,0 +1,52 @@
+#!/bin/bash
+#
+# /etc/rc.d/init.d/
+#
+#
+#
+#
+#
+
+# Source function library.
+. /etc/init.d/functions
+
+start() {
+ echo -n "Starting the Moksha Hub: "
+ if [ -f /var/lock/subsys/moksha-hub ]; then
+ echo Moksha Hub already running
+ exit 2;
+ fi
+ touch /var/lock/subsys/moksha-hub
+ daemon moksha-hub --daemon
+ return $?
+}
+
+stop() {
+ echo -n "Shutting down : "
+ echo
+ killproc moksha-hub
+ echo
+ rm -f /var/lock/subsys/moksha-hub
+ return
+}
+
+case "$1" in
+ start)
+ start
+ ;;
+ stop)
+ stop
+ ;;
+ status)
+
+ ;;
+ restart)
+ stop
+ start
+ ;;
+ *)
+ echo "Usage: {start|stop|status|reload|restart[|probe]"
+ exit 1
+ ;;
+esac
+exit $?
commit 3eccba04f337440d5ca95e5f27d0ed7bd521657d
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 19:10:23 2009 -0400
Don't instantiate our WSGI app_class
diff --git a/moksha/middleware/middleware.py b/moksha/middleware/middleware.py
index 6ac4e6d..51693de 100644
--- a/moksha/middleware/middleware.py
+++ b/moksha/middleware/middleware.py
@@ -149,11 +149,11 @@ class MokshaMiddleware(object):
log.info('Loading moksha WSGI applications')
for app_entry in pkg_resources.iter_entry_points('moksha.wsgiapp'):
log.info('Loading %s WSGI application' % app_entry.name)
- app_class = app_entry.load()
app_path = app_entry.dist.location
+ app_class = app_entry.load()
moksha._apps[app_entry.name] = {
'name': getattr(app_class, 'name', app_entry.name),
- 'controller': WSGIAppController(app_class()),
+ 'controller': WSGIAppController(app_class),
'path': app_path,
'model': None,
}
commit 67f38ba491f56e330308553bdead6fca1d01a2e2
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 19:10:06 2009 -0400
Update our MokshaMiddleware docstring
diff --git a/moksha/middleware/middleware.py b/moksha/middleware/middleware.py
index 7c44f32..6ac4e6d 100644
--- a/moksha/middleware/middleware.py
+++ b/moksha/middleware/middleware.py
@@ -46,7 +46,11 @@ class MokshaMiddleware(object):
environment, as well as handling every request/response in the application.
If a request for an application comes in (/apps/$NAME), it will dispatch to
- the RootController of that application as defined in it's egg-info.
+ the WSGI Application or RootController of that application as defined in
+ it's egg-info entry-points.
+
+ This middleware also sets up the `moksha.stomp` StackedObjectProxy, which
+ acts as a registry for Moksha LiveWidget topic callbacks.
"""
def __init__(self, application):
commit bdc1565537bdffc1400f4b5dc6937d21c69ce6b8
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 19:09:24 2009 -0400
Have our paver tasks remove Moksha & apps before installing.
The rpm --replace* commands are dangerous and can leave stray files around, so
it's
best to just remove the packages from disk first.
diff --git a/moksha/lib/paver_tasks.py b/moksha/lib/paver_tasks.py
index bf40b5f..92bbc3a 100644
--- a/moksha/lib/paver_tasks.py
+++ b/moksha/lib/paver_tasks.py
@@ -64,7 +64,9 @@ paver install -O1 --skip-build --root %%{buildroot}
@task
@needs(['rpm'])
def reinstall():
- sh('sudo rpm -ivh --replacefiles --replacepkgs
~/rpmbuild/RPMS/noarch/%s-%s-%s.*noarch.rpm' % (options.rpm_name, options.version,
options.release))
+ sh('sudo rpm -e %s' % options.rpm_name, ignore_error=True)
+ sh('sudo rpm -ivh ~/rpmbuild/RPMS/noarch/%s-%s-%s.*noarch.rpm' % (
+ options.rpm_name, options.version, options.release))
@task
@needs('setuptools.command.install')
diff --git a/pavement.py b/pavement.py
index 4beab3a..84d143f 100644
--- a/pavement.py
+++ b/pavement.py
@@ -176,12 +176,14 @@ def test():
@task
def reinstall():
+ print "Removing existing Moksha install"
+ sh('sudo rpm -e --nodeps moksha moksha-docs moksha-server',
ignore_error=True)
sh('rm -fr dist/')
sh('python setup.py sdist --format=bztar')
sh('mv dist/* ~/rpmbuild/SOURCES/')
sh('cp moksha.spec ~/rpmbuild/SPECS/')
sh('rpmbuild -ba ~/rpmbuild/SPECS/moksha.spec')
- sh('sudo rpm -ivh --replacefiles --replacepkgs
~/rpmbuild/RPMS/noarch/moksha{,-docs,-server}-%s-1.*noarch.rpm' %
options.version.number)
+ sh('sudo rpm -ivh
~/rpmbuild/RPMS/noarch/moksha{,-docs,-server}-%s-1.*noarch.rpm' %
options.version.number)
@task
def restart_httpd():
commit 72488ce9f2bb0c3cd0f09c3f6eceb1c5497b7b08
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 19:08:49 2009 -0400
Back to having our apps use the `moksha.apps` namespace, so they don't provide
moksha/apps/__init__.py
diff --git a/moksha/apps/chat/moksha/apps/__init__.py
b/moksha/apps/chat/moksha/apps/__init__.py
index e69de29..de40ea7 100644
--- a/moksha/apps/chat/moksha/apps/__init__.py
+++ b/moksha/apps/chat/moksha/apps/__init__.py
@@ -0,0 +1 @@
+__import__('pkg_resources').declare_namespace(__name__)
diff --git a/moksha/apps/chat/pavement.py b/moksha/apps/chat/pavement.py
index 97fb4cb..6aee196 100644
--- a/moksha/apps/chat/pavement.py
+++ b/moksha/apps/chat/pavement.py
@@ -19,7 +19,7 @@ options(
rpm_name='moksha-apps-chat',
packages=find_packages(),
package_data=find_package_data(),
- namespace_packages=['moksha'],
+ namespace_packages=['moksha', 'moksha.apps'],
install_requires=["Moksha"],
entry_points={
'moksha.application': (
diff --git a/moksha/apps/docs/moksha/apps/__init__.py
b/moksha/apps/docs/moksha/apps/__init__.py
index e69de29..de40ea7 100644
--- a/moksha/apps/docs/moksha/apps/__init__.py
+++ b/moksha/apps/docs/moksha/apps/__init__.py
@@ -0,0 +1 @@
+__import__('pkg_resources').declare_namespace(__name__)
diff --git a/moksha/apps/docs/pavement.py b/moksha/apps/docs/pavement.py
index 91fd88d..2db9dcf 100644
--- a/moksha/apps/docs/pavement.py
+++ b/moksha/apps/docs/pavement.py
@@ -21,7 +21,7 @@ options(
package_data=find_package_data(),
include_package_data=True,
install_requires=["Moksha"],
- namespace_packages=['moksha'],
+ namespace_packages=['moksha', 'moksha.apps'],
entry_points={
'moksha.application': (
'docs = moksha.apps.docs:docs',
diff --git a/moksha/apps/menus/moksha/apps/__init__.py
b/moksha/apps/menus/moksha/apps/__init__.py
index e69de29..de40ea7 100644
--- a/moksha/apps/menus/moksha/apps/__init__.py
+++ b/moksha/apps/menus/moksha/apps/__init__.py
@@ -0,0 +1 @@
+__import__('pkg_resources').declare_namespace(__name__)
diff --git a/moksha/apps/menus/pavement.py b/moksha/apps/menus/pavement.py
index 12812d8..d81be98 100644
--- a/moksha/apps/menus/pavement.py
+++ b/moksha/apps/menus/pavement.py
@@ -19,7 +19,7 @@ options(
rpm_name='moksha-menus',
packages=find_packages(),
package_data=find_package_data(),
- namespace_packages=['moksha'],
+ namespace_packages=['moksha', 'moksha.apps'],
install_requires=["Moksha"],
entry_points={
'moksha.menu': (
diff --git a/moksha/apps/metrics/moksha/apps/__init__.py
b/moksha/apps/metrics/moksha/apps/__init__.py
index e69de29..de40ea7 100644
--- a/moksha/apps/metrics/moksha/apps/__init__.py
+++ b/moksha/apps/metrics/moksha/apps/__init__.py
@@ -0,0 +1 @@
+__import__('pkg_resources').declare_namespace(__name__)
diff --git a/moksha/apps/metrics/pavement.py b/moksha/apps/metrics/pavement.py
index c6fe8fb..0cb1a74 100644
--- a/moksha/apps/metrics/pavement.py
+++ b/moksha/apps/metrics/pavement.py
@@ -19,7 +19,7 @@ options(
rpm_name='moksha-metrics',
packages=find_packages(),
package_data=find_package_data(),
- namespace_packages=['moksha'],
+ namespace_packages=['moksha', 'moksha.apps'],
install_requires=["Moksha"],
entry_points={
'moksha.stream': (
commit 129a15e40e003da0b6b83525cbe51bad9cc0bae5
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 19:08:34 2009 -0400
Replace the moksha version in our production.ini
diff --git a/moksha.spec b/moksha.spec
index f095a7d..5b18d2d 100644
--- a/moksha.spec
+++ b/moksha.spec
@@ -97,6 +97,7 @@ make -C docs html
%{__install} production/nginx/* %{buildroot}%{_datadir}/%{name}/production/nginx
%{__install} production/rabbitmq/* %{buildroot}%{_datadir}/%{name}/production/rabbitmq
%{__cp} production/sample-production.ini
%{buildroot}%{_sysconfdir}/%{name}/production.ini
+%{__sed} -i -e 's/$VERSION/%{version}/g'
%{buildroot}%{_sysconfdir}/%{name}/production.ini
%{__cp} orbited.cfg %{buildroot}%{_sysconfdir}/%{name}/orbited.cfg
%clean
diff --git a/production/sample-production.ini b/production/sample-production.ini
index 3f5c1df..4e1068d 100644
--- a/production/sample-production.ini
+++ b/production/sample-production.ini
@@ -52,7 +52,7 @@ stomp_pass = guest
#amqp_broker = guest/guest@localhost
# Documentation directory
-docs_dir = /usr/share/doc/moksha-docs-0.3.2/html/
+docs_dir = /usr/share/doc/moksha-docs-$VERSION/html/
# Moksha chat configuration
commit a5e2e2caabfd6f2e7be68a48f109fdfa75749e61
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 19:08:20 2009 -0400
Update the MokshaApplication docs to reflect new config paths
diff --git a/docs/main/MokshaApplications.rst b/docs/main/MokshaApplications.rst
index 82ea7e0..6e226ee 100644
--- a/docs/main/MokshaApplications.rst
+++ b/docs/main/MokshaApplications.rst
@@ -52,7 +52,7 @@ Configuration
-------------
Moksha will reads every application's ``production.ini`` or ``development.ini``
-upon startup and loads all of the ``[DEFAULT]`` variables into the global
+from ``/etc/moksha/conf.d/$APPNAME/`` upon startup and loads all of the ``[DEFAULT]``
variables into the global
:class:`pylons.config` object. This enables TG2/Pylons Moksha applications to
use the config object as they would do normally. However, this requires that
applications do not have conflicting configuration variable names. Moksha will
commit ec4fd76be2ba7c07e1e605db7b1e206ac7ec1ead
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 19:07:41 2009 -0400
Elaborate on IntegratingWithTG2 about running your app inside of Moksha
diff --git a/docs/main/IntegratingWithTG2.rst b/docs/main/IntegratingWithTG2.rst
index ad92a1a..048f6b8 100644
--- a/docs/main/IntegratingWithTG2.rst
+++ b/docs/main/IntegratingWithTG2.rst
@@ -6,6 +6,8 @@ The best way to learn how to use Moksha within TurboGears2 by example.
.. image:: ../_static/jqplotdemo.png
+:Running demo:
http://moksha.csh.rit.edu/apps/jqplotdemo
+
We'll take a demo written to show off the :mod:`tw.jquery.jqplot` module for
example. This demo shows off the JQPlotWidget, which queries the server every
2 seconds and reloads the graphs.
@@ -218,3 +220,101 @@ TODO: link to docs on entry points
The source code for the MokshaJQPlotDemo can be found here:
http://lmacken.fedorapeople.org/MokshaJQPlotDemo.tar.bz2
+
+
+At this point, you're all set to run `paster serve development.ini` and enjoy
+your shiny new live web app.
+
+You are now free to go and deploy your application however you please.
+However, Moksha can run it for you if you wish...
+
+--------------------------------------------------------------------------------
+
+Running your app inside of Moksha
+---------------------------------
+
+The above example shows how you can easily use Moksha within your existing app.
+Moksha also allows lets you run your app inside of it. Moksha is preconfigured
+to run in an Apache & mod_wsgi environment, which will handle loading and
+mounting your apps within itself.
+
+
+.. warning::
+
+ If you're running your app inside of Moksha, you must ensure that you
+ are not running the MokshaMiddleware inside of your app first. This
+ currently leads to a fun infinite WSGI middleware loop :)
+
+ So if you're creating a new app, don't worry about this, but for the above
+ example, just remove the `wrap_app=make_moksha_middleware` from your
+ `jqplotdemo/config/middleware.py`
+
+
+Create your WSGI app
+~~~~~~~~~~~~~~~~~~~~
+
+If your app is already WSGI-mountable, then don't worry about this. For a
TurboGears2 app, it's as easy as:
+
+.. code-block:: diff
+
+ --- /dev/null
+ +++ b/jqplotdemo/wsgi.py
+ @@ -0,0 +1,2 @@
+ +from paste.deploy import loadapp
+ +application = loadapp('config:/etc/moksha/conf.d/jqplotdemo/production.ini')
+
+
+Make a production configuration
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In production we want to make sure any caches are setup in the right spot.
+We base the `production.ini` on our existing `development.ini`, and make a tiny tweak.
+
+.. code-block:: diff
+
+ --- development.ini
+ +++ production.in
+ @@ -23,7 +23,7 @@
+ use = egg:JQPlotDemo
+ lang = en
+ -cache_dir = %(here)s/data
+ +cache_dir = /var/cache/moksha/jqplotdemo/data
+ beaker.session.key = jqplotdemo
+
+.. code-block:: diff
+
+ --- a/ MANIFEST.in
+ +++ b/ MANIFEST.in
+ @@ -2,3 +2,4 @@ recursive-include jqplotdemo/public *
+ include jqplotdemo/public/favicon.ico
+ recursive-include jqplotdemo/i18n *
+ recursive-include jqplotdemo/templates *
+ +include production.ini
+
+
+Integrate your TG2/WSGI app into Moksha
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You can plug your WSGI application into Moksha by using the `moksha.wsgiapp`
+entry-point.
+
+.. code-block:: diff
+
+ --- a/ setup.py
+ +++ b/ setup.py
+ @@ -47,6 +47,9 @@ setup(
+ [moksha.stream]
+ jqplot_stream = jqplotdemo.streams:JQPlotDemoStream
+
+ + [moksha.wsgiapp]
+ + jqplotdemo = jqplotdemo.wsgi:application
+ +
+ [moksha.global]
+ moksha_socket = moksha.api.widgets:moksha_socket
+
+
+.. seealso::
+
+ :doc:`MokshaApplications`
+
+
commit a89fa07fcfdab18f1ea28482f5fb1020d06023ef
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 19:07:09 2009 -0400
Improve our Getting Started documentation
diff --git a/docs/main/GettingStarted.rst b/docs/main/GettingStarted.rst
index ebcae16..331ad91 100644
--- a/docs/main/GettingStarted.rst
+++ b/docs/main/GettingStarted.rst
@@ -1,76 +1,11 @@
-===============
-Getting Started
-===============
+===========================
+Getting Started with Moksha
+===========================
-:Status: Draft.
+There are currently two ways of installing and running Moksha.
-This guide will help quickly get you up and running with a local copy of
-Moksha. It will run the Moksha WSGI application using the Paste threaded http
-server, a single orbited daemon with an embeded MorbidQ stomp message broker,
-SQLite SQLAlchemy and Feed databases, and an in-memory cache. This setup is
-meant to be dead-simple to get up and running, and is not designed for
-production deployments.
+.. toctree::
-At the moment, all of Moksha's dependencies are not all in Fedora. They are
-all currently under review, but in the mean time these instructions will run
-Moksha within a virtual Python environment, without changing your global
-site-packages.
+ RPMInstallation
+ VirtualenvInstallation
-You can track the progress of getting TurboGears2 into Fedora `here
<
http://fedoraproject.org/wiki/TurboGears2>`_.
-
-Installing the necessary dependencies
--------------------------------------
-
-You'll need the `virtualenv <
http://pypi.python.org/pypi/virtualenv>`_
package.
-
-.. code-block:: bash
-
- # yum -y install python-virtualenv python-memcached gcc
-
-The `start-moksha` script mentioned below should install all of the necessary
-dependencies. However, it will attempt to compile a few things, such as lxml. So, you
may need to install some additional dependencies like `libxml2` and `libxslt` in order to
build it. If you're using yum, you can easily install all of the build requirements
by doing:
-
-.. code-block:: bash
-
- # yum-builddep -y python-lxml PyOpenSSL python-sqlite2
-
-Getting the Moksha source
--------------------------
-
-.. code-block:: bash
-
- $ git clone
git://git.fedorahosted.org/git/moksha
- $ cd moksha
-
-Starting Moksha
----------------
-
-.. code-block:: bash
-
- $ ./start-moksha
-
-.. note::
- Do not run this command as root.
-
-.. note::
- This script takes care of setting up your TurboGears2 virtual environment
- the first time it is run. To drop into the virtualenv manually you can run
- `source tg2env/bin/activate` to enter it, and `deactivate` to leave it.
-
-Stopping Moksha
----------------
-
-.. code-block:: bash
-
- $ ./stop-moksha
-
-Using Moksha
-------------
-
-Now you can navigate your web browser to the following url:
-
-`http://localhost:8080 <
http://localhost:8080>`_
-
-.. note::
- Going to `127.0.0.1` will not work properly with the current Orbited setup,
- so you must make sure to go to `localhost`.
commit cf56c9a736f68940cf1c306734e219ec4e661b04
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 19:06:55 2009 -0400
Improve our Development documentation
diff --git a/docs/main/Development.rst b/docs/main/Development.rst
index b52ed39..f59dc24 100644
--- a/docs/main/Development.rst
+++ b/docs/main/Development.rst
@@ -2,34 +2,11 @@
Hacking with Moksha
===================
-RPM Installation
-----------------
+Setting up your RPM/virtualenv development environments
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Install the dependencies
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-Run the following commands as root, replacing `$DISTRO` with either
-`fedora-11`, `fedora-10`, or `epel-5`.
-
-.. code-block:: bash
-
- # cd /etc/yum.repos.d/
- # wget
http://lmacken.fedorapeople.org/rpms/tg2/$DISTRO/tg2.repo
- # yum -y install TurboGears2 python-tg-devtools
-
-.. note::
-
- It is recommended that you perform a `yum update` after installing the
- Moksha/TurboGears2 stack, to ensure that you have the latest versions
- of all the dependencies.
-
-.. note::
-
- At the moment the full TurboGears2 stack is not yet fully in Fedora/EPEL,
- so you'll have to hook up a third party repository. You can track the
- status of TurboGears2 in Fedora here:
-
-
http://fedoraproject.org/wiki/TurboGears2
+:RPM mod_wsgi installation (Red Hat, Fedora, etc.): :doc:`RPMInstallation`
+:virtualenv installation (OSX, Ubuntu, etc.): :doc:`VirtualenvInstallation`
Getting the code
~~~~~~~~~~~~~~~~
@@ -38,62 +15,12 @@ Getting the code
$ git clone
git://git.fedorahosted.org/git/moksha
-Rebuilding and Reinstalling the RPM
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Rebuilding and reinstall *everything*, and restart apache
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. code-block:: bash
- $ paver reinstall
-
-Reinstalling *all* apps
-~~~~~~~~~~~~~~~~~~~~~~~
-
-.. code-block:: bash
-
- $ paver reinstall_apps
-
-
-Reinstall everything, and restart apache
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-.. code-block:: bash
-
- $ paver reinstall reinstall_apps restart_httpd
-
-.. note::
-
- These instructions assume that you already have an RPM development
- environment setup. To do this, simply install `rpmdevtools` and run
- `rpmdev-setuptree`
-
-
-
-Non-RPM installation (OSX)
---------------------------
-
-This installation method should work on non
-
-Getting the code
-~~~~~~~~~~~~~~~~
-
-.. code-block:: bash
-
- $ git clone
git://git.fedorahosted.org/git/moksha
-
-Starting
-~~~~~~~~
-
-.. code-block:: bash
-
- $ ./start-moksha
-
-Stopping
---------
-
-.. code-block:: bash
-
- $ ./stop-moksha
-
+ $ paver reinstall reinstall_apps restart_httpd
Generating documentation
~~~~~~~~~~~~~~~~~~~~~~~~
@@ -114,7 +41,7 @@ Freezing requirements
.. code-block:: bash
- $ ./pip.py freeze -E tg2env -r normal-reqs.txt production/stable-reqs.txt
+ $ pip freeze -E tg2env -r requirements.txt production/stable-reqs.txt
Profiling the WSGI stack
------------------------
commit efa9efb5bc504c4be4e31de23edf4db9bdc6dc71
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 19:06:30 2009 -0400
Improve our Deployment documentation
diff --git a/docs/main/Deployment.rst b/docs/main/Deployment.rst
index d8b6b4c..677117a 100644
--- a/docs/main/Deployment.rst
+++ b/docs/main/Deployment.rst
@@ -2,51 +2,52 @@
Deployment
==========
-:Status: Incomplete
+Installing and configuring the Moksha mod_wsgi environment
+----------------------------------------------------------
-Installing and configuring Moksha
----------------------------------
-
-TODO
-
-Setting up mod_wsgi
--------------------
-
-TODO
+:doc:`RPMInstallation`
Serving ToscaWidgets static resources
-------------------------------------
`Deploying projects which use ToscaWidgets
<
http://toscawidgets.org/documentation/ToscaWidgets/deploy.html>`_
-Setting up orbited
-------------------
+Once extracted, comment out the ToscaWidgets alias in your
+`/etc/httpd/conf.d/moksha.conf`.
-TODO
-Choosing a message broker
--------------------------
+Running orbited
+---------------
-By default Moksha utilizes the embeded MorbidQ message broker inside of Orbited. This
allows for widgets to communicate with the server using the Stomp protocol. In production
you can easily switch to an enterprise-grade message broker, such as RabbitMQ, and
[eventually] Qpid.
+.. code-block:: bash
-.. toctree::
- :maxdepth: 2
+ $ orbited -c /etc/moksha/orbited.cfg
- RabbitMQ
+Setting up an AMQP message broker
+---------------------------------
+
+By default Moksha utilizes the embeded MorbidQ message broker inside of
+Orbited. This allows for widgets to communicate with the server using the
+Stomp protocol. In production you can easily switch to an enterprise-grade
+message broker, such as `Apache Qpid <
http://qpid.apache.org>`_.
-Using Qpid with MokshA
-~~~~~~~~~~~~~~~~~~~~~~
+See the documentatoin on :doc:`MessageBrokers` for how to hook up an AMQP broker.
-:Status: Coming soon
+.. seealso::
+
+ If you're interested in using RabbitMQ with Moksha, see the :doc:`RabbitMQ`
+ docs. Warning: it's not very well tested or supported, yet.
Setting up memcached
--------------------
-TODO
+After installing memcached, you'll want to update your `production.ini`
configuration
+to utlize the memcached Beaker extension. This example uses two memcached servers.
+
+.. code-block:: ini
-Setting up the database
------------------------
+ beaker.cache.type = ext:memcached
+ beaker.cache.url = memcached1;memcached2
-TODO
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..cbe244c
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,69 @@
+AddOns
+Beaker
+BytecodeAssembler
+DecoratorTools
+Extremes
+FormEncode
+Genshi
+Jinja
+Mako
+## FIXME: could not find svn URL in dependency_links for this package:
+PEAK-Rules
+Paste
+PasteDeploy
+PasteScript
+Paver
+Pygments
+Pylons
+Routes
+SQLAlchemy
+Sphinx
+SymbolType
+Tempita
+## FIXME: could not find svn URL in dependency_links for this package:
+ToscaWidgets==0.9.4
+TurboGears2
+TurboJson
+Twisted
+WebError
+WebFlash
+WebHelpers
+WebOb
+WebTest
+## FIXME: could not find svn URL in dependency_links for this package:
+#WidgetBrowser
+boto
+decorator
+demjson
+docutils
+feedcache
+feedparser
+lxml
+meld3
+#-e
git+git+ssh://git.fedorahosted.org/git/moksha#egg=moksha-0.1dev-py2.5-dev
+morbid
+nose
+orbited
+#psycopg2
+PyOpenSSL
+pyprof2calltree
+repoze.tm2
+repoze.what
+repoze.what-pylons
+repoze.what-quickstart
+repoze.what.plugins.sql
+repoze.who
+repoze.who-friendlyform
+repoze.who-testutil
+repoze.who.plugins.sa
+shove
+simplejson
+## FIXME: could not find svn URL in dependency_links for this package:
+sqlalchemy-migrate
+stomper
+tg.devtools
+transaction
+tw.forms
+tw.jquery
+zope.interface
+zope.sqlalchemy
commit 62e10ac2527c869ffbb868cbc42fefeca409c308
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 15:47:39 2009 -0400
Update our frozen stable requirements
diff --git a/production/stable-reqs.txt b/production/stable-reqs.txt
index f6433d1..782df05 100644
--- a/production/stable-reqs.txt
+++ b/production/stable-reqs.txt
@@ -1,11 +1,10 @@
-AddOns==0.6
-Beaker==1.2.3
+AddOns==0.7
+Beaker==1.3.1
BytecodeAssembler==0.5.1
DecoratorTools==1.7
Extremes==1.1.1
-FormEncode==1.2.2
+FormEncode==1.2.1
Genshi==0.5.1
-Jinja==1.2
Mako==0.2.4
## FIXME: could not find svn URL in dependency_links for this package:
## FIXME: could not find svn URL in dependency_links for this package:
@@ -13,18 +12,17 @@ PEAK-Rules==0.5a1.dev-r2582
Paste==1.7.2
PasteDeploy==1.3.3
PasteScript==1.7.3
-Paver==1.0b1
+Paver==1.0.1
Pygments==1.0
Pylons==0.9.7
Routes==1.10.3
-SQLAlchemy==0.5.2
-Sphinx==0.6b1
+SQLAlchemy==0.5.5
+Sphinx==0.6.2
SymbolType==1.0
-Tempita==0.3
+Tempita==0.2
## FIXME: could not find svn URL in dependency_links for this package:
-## FIXME: could not find svn URL in dependency_links for this package:
-ToscaWidgets==0.9.5dev-20081026
-TurboGears2==2.0b7
+ToscaWidgets==0.9.7.2
+TurboGears2==2.0.3
TurboJson==1.2.1
Twisted==8.2.0
WebError==0.10.1
@@ -34,46 +32,40 @@ WebOb==0.9.6.1
WebTest==1.1
## FIXME: could not find svn URL in dependency_links for this package:
#WidgetBrowser
-boto==1.6b
-decorator==3.0.1
+boto==1.8d
+decorator==3.0.0
demjson==1.4
docutils==0.5
-feedcache==1.3
+feedcache==1.3.1
feedparser==4.1
-lxml==2.2beta4
-meld3==0.6.5
#-e
git+git+ssh://git.fedorahosted.org/git/moksha#egg=moksha-0.1dev-py2.5-dev
-morbid==0.8.4
+morbid==0.8.7.1
nose==0.10.4
-orbited==0.7.7
-psycopg2==2.0.9
-pyprof2calltree==1.1.0
-repoze.profile==0.8
-repoze.squeeze==0.3
+orbited==0.7.10
+#psycopg2
repoze.tm2==1.0a4
repoze.what==1.0.8
-repoze.what-pylons==1.0rc4
+repoze.what-pylons==1.0rc3
repoze.what-quickstart==1.0
repoze.what.plugins.sql==1.0rc1
-repoze.who==1.0.10
+repoze.who==1.0.15
repoze.who-friendlyform==1.0b3
repoze.who-testutil==1.0rc1
-## FIXME: could not find svn URL in dependency_links for this package:
-repoze.who.plugins.sa==1.0rc2-r3742
-shove==0.1.5
-simplejson==2.0.9
+repoze.who.plugins.sa==1.0rc1
+shove==0.2.1
+simplejson==2.0.8
## FIXME: could not find svn URL in dependency_links for this package:
sqlalchemy-migrate==0.5.2
stomper==0.2.2
-tg.devtools==2.0b7
+tg.devtools==2.0.2
transaction==1.0a1
-tw.forms==0.9.2
-tw.jquery==0.9.4.2
-zope.interface==3.5.1
+tw.forms==0.9.7.2
+tw.jquery==0.9.5
+zope.interface==3.4.1
zope.sqlalchemy==0.4
## The following requirements were added by pip --freeze:
Jinja2==2.1.1
-#WidgetBrowser==0.1
-#-e
git+git+ssh://git.fedorahosted.org/git/moksha@999ac3b5e39ef22d9407c4faf00...
+-e
git+git+ssh://git.fedorahosted.org/git/moksha@da3c1723e9b4a896d769c353404...
prioritized-methods==0.2.1
-pyOpenSSL==0.8
+pytz==2009l
+uuid==1.30
commit da3c1723e9b4a896d769c3534044a6be5f573f35
Author: Luke Macken <lmacken(a)redhat.com>
Date: Sat Aug 29 15:33:13 2009 -0400
Remove the pip.py and old reqs.txt
Right now we don't specifically use pip anywhere, but it's mentioned
in the docs for freezing requirements. If we wish to bring utilize
pip in the future for further app/widget packaging, then we should
use the paver-minilib to pull it in for us.
diff --git a/normal-reqs.txt b/normal-reqs.txt
deleted file mode 100644
index cbe244c..0000000
--- a/normal-reqs.txt
+++ /dev/null
@@ -1,69 +0,0 @@
-AddOns
-Beaker
-BytecodeAssembler
-DecoratorTools
-Extremes
-FormEncode
-Genshi
-Jinja
-Mako
-## FIXME: could not find svn URL in dependency_links for this package:
-PEAK-Rules
-Paste
-PasteDeploy
-PasteScript
-Paver
-Pygments
-Pylons
-Routes
-SQLAlchemy
-Sphinx
-SymbolType
-Tempita
-## FIXME: could not find svn URL in dependency_links for this package:
-ToscaWidgets==0.9.4
-TurboGears2
-TurboJson
-Twisted
-WebError
-WebFlash
-WebHelpers
-WebOb
-WebTest
-## FIXME: could not find svn URL in dependency_links for this package:
-#WidgetBrowser
-boto
-decorator
-demjson
-docutils
-feedcache
-feedparser
-lxml
-meld3
-#-e
git+git+ssh://git.fedorahosted.org/git/moksha#egg=moksha-0.1dev-py2.5-dev
-morbid
-nose
-orbited
-#psycopg2
-PyOpenSSL
-pyprof2calltree
-repoze.tm2
-repoze.what
-repoze.what-pylons
-repoze.what-quickstart
-repoze.what.plugins.sql
-repoze.who
-repoze.who-friendlyform
-repoze.who-testutil
-repoze.who.plugins.sa
-shove
-simplejson
-## FIXME: could not find svn URL in dependency_links for this package:
-sqlalchemy-migrate
-stomper
-tg.devtools
-transaction
-tw.forms
-tw.jquery
-zope.interface
-zope.sqlalchemy
diff --git a/pip.py b/pip.py
deleted file mode 100755
index 356435b..0000000
--- a/pip.py
+++ /dev/null
@@ -1,3857 +0,0 @@
-#!/usr/bin/env python
-import sys
-import os
-import errno
-import stat
-import optparse
-import pkg_resources
-import urllib2
-import urllib
-import mimetypes
-import zipfile
-import tarfile
-import tempfile
-import subprocess
-import posixpath
-import re
-import shutil
-import fnmatch
-try:
- from hashlib import md5
-except ImportError:
- import md5 as md5_module
- md5 = md5_module.new
-import urlparse
-from email.FeedParser import FeedParser
-import traceback
-from cStringIO import StringIO
-import socket
-from Queue import Queue
-from Queue import Empty as QueueEmpty
-import threading
-import httplib
-import time
-import logging
-import ConfigParser
-
-class InstallationError(Exception):
- """General exception during installation"""
-
-class DistributionNotFound(InstallationError):
- """Raised when a distribution cannot be found to satisfy a
requirement"""
-
-if getattr(sys, 'real_prefix', None):
- ## FIXME: is build/ a good name?
- base_prefix = os.path.join(sys.prefix, 'build')
- base_src_prefix = os.path.join(sys.prefix, 'src')
-else:
- ## FIXME: this isn't a very good default
- base_prefix = os.path.join(os.getcwd(), 'build')
- base_src_prefix = os.path.join(os.getcwd(), 'src')
-
-pypi_url = "http://pypi.python.org/simple"
-
-default_timeout = 15
-
-# Choose a Git command based on platform.
-if sys.platform == 'win32':
- GIT_CMD = 'git.cmd'
- BZR_CMD = 'bzr.bat'
-else:
- GIT_CMD = 'git'
- BZR_CMD = 'bzr'
-
-## FIXME: this shouldn't be a module setting
-default_vcs = None
-if os.environ.get('PIP_DEFAULT_VCS'):
- default_vcs = os.environ['PIP_DEFAULT_VCS']
-
-
-try:
- pip_dist = pkg_resources.get_distribution('pip')
- version = '%s from %s (python %s)' % (
- pip_dist, pip_dist.location, sys.version[:3])
-except pkg_resources.DistributionNotFound:
- # when running pip.py without installing
- version=None
-
-def rmtree_errorhandler(func, path, exc_info):
- typ, val, tb = exc_info
- if issubclass(typ, OSError) and val.errno == errno.EACCES:
- os.chmod(path, stat.S_IWRITE)
- func(path)
- else:
- raise typ, val, tb
-
-class VcsSupport(object):
- _registry = {}
- # Register more schemes with urlparse for the versio control support
- schemes = ['ssh', 'git', 'hg', 'bzr',
'sftp']
-
- def __init__(self):
- urlparse.uses_netloc.extend(self.schemes)
- urlparse.uses_fragment.extend(self.schemes)
- super(VcsSupport, self).__init__()
-
- def __iter__(self):
- return self._registry.__iter__()
-
- @property
- def backends(self):
- return self._registry.values()
-
- @property
- def dirnames(self):
- return [backend.dirname for backend in self.backends]
-
- def register(self, cls):
- if not hasattr(cls, 'name'):
- logger.warn('Cannot register VCS %s' % cls.__name__)
- return
- if cls.name not in self._registry:
- self._registry[cls.name] = cls
-
- def unregister(self, cls=None, name=None):
- if name in self._registry:
- del self._registry[name]
- elif cls in self._registry.values():
- del self._registry[cls.name]
- else:
- logger.warn('Cannot unregister because no class or name given')
-
- def get_backend_name(self, location):
- """
- Return the name of the version control backend if found at given
- location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
- """
- for vc_type in self._registry:
- path = os.path.join(location, '.%s' % vc_type)
- if os.path.exists(path):
- return vc_type
- return None
-
- def get_backend(self, name):
- if name in self._registry:
- return self._registry[name]
-
- def get_backend_from_location(self, location):
- vc_type = self.get_backend_name(location)
- if vc_type:
- return self.get_backend(vc_type)
- return None
-
-
-vcs = VcsSupport()
-
-parser = optparse.OptionParser(
- usage='%prog COMMAND [OPTIONS]',
- version=version,
- add_help_option=False)
-
-parser.add_option(
- '-h', '--help',
- dest='help',
- action='store_true',
- help='Show help')
-parser.add_option(
- '-E', '--environment',
- dest='venv',
- metavar='DIR',
- help='virtualenv environment to run pip in (either give the '
- 'interpreter or the environment base directory)')
-parser.add_option(
- '-v', '--verbose',
- dest='verbose',
- action='count',
- default=0,
- help='Give more output')
-parser.add_option(
- '-q', '--quiet',
- dest='quiet',
- action='count',
- default=0,
- help='Give less output')
-parser.add_option(
- '--log',
- dest='log',
- metavar='FILENAME',
- help='Log file where a complete (maximum verbosity) record will be kept')
-parser.add_option(
- '--proxy',
- dest='proxy',
- type='str',
- default='',
- help="Specify a proxy in the form user:passwd@proxy.server:port. "
- "Note that the user:password@ is optional and required only if you "
- "are behind an authenticated proxy. If you provide "
- "user@proxy.server:port then you will be prompted for a password."
- )
-parser.add_option(
- '--timeout',
- metavar='SECONDS',
- dest='timeout',
- type='float',
- default=default_timeout,
- help='Set the socket timeout (default %s seconds)' % default_timeout)
-
-parser.disable_interspersed_args()
-
-
-_commands = {}
-
-class Command(object):
- name = None
- usage = None
- def __init__(self):
- assert self.name
- self.parser = optparse.OptionParser(
- usage=self.usage,
- prog='%s %s' % (sys.argv[0], self.name),
- version=parser.version)
- for option in parser.option_list:
- if not option.dest or option.dest == 'help':
- # -h, --version, etc
- continue
- self.parser.add_option(option)
- _commands[self.name] = self
-
- def merge_options(self, initial_options, options):
- for attr in ['log', 'venv', 'proxy']:
- setattr(options, attr, getattr(initial_options, attr) or getattr(options,
attr))
- options.quiet += initial_options.quiet
- options.verbose += initial_options.verbose
-
- def main(self, complete_args, args, initial_options):
- global logger
- options, args = self.parser.parse_args(args)
- self.merge_options(initial_options, options)
-
- if args and args[-1] == '___VENV_RESTART___':
- ## FIXME: We don't do anything this this value yet:
- venv_location = args[-2]
- args = args[:-2]
- options.venv = None
- level = 1 # Notify
- level += options.verbose
- level -= options.quiet
- level = Logger.level_for_integer(4-level)
- complete_log = []
- logger = Logger([(level, sys.stdout),
- (Logger.DEBUG, complete_log.append)])
- if os.environ.get('PIP_LOG_EXPLICIT_LEVELS'):
- logger.explicit_levels = True
- if options.venv:
- if options.verbose > 0:
- # The logger isn't setup yet
- print 'Running in environment %s' % options.venv
- restart_in_venv(options.venv, complete_args)
- # restart_in_venv should actually never return, but for clarity...
- return
- ## FIXME: not sure if this sure come before or after venv restart
- if options.log:
- log_fp = open_logfile_append(options.log)
- logger.consumers.append((logger.DEBUG, log_fp))
- else:
- log_fp = None
-
- socket.setdefaulttimeout(options.timeout or None)
-
- setup_proxy_handler(options.proxy)
-
- exit = 0
- try:
- self.run(options, args)
- except InstallationError, e:
- logger.fatal(str(e))
- logger.info('Exception information:\n%s' % format_exc())
- exit = 1
- except:
- logger.fatal('Exception:\n%s' % format_exc())
- exit = 2
-
- if log_fp is not None:
- log_fp.close()
- if exit:
- log_fn = './pip-log.txt'
- text = '\n'.join(complete_log)
- logger.fatal('Storing complete log in %s' % log_fn)
- log_fp = open_logfile_append(log_fn)
- log_fp.write(text)
- log_fp.close()
- return exit
-
-class HelpCommand(Command):
- name = 'help'
- usage = '%prog'
- summary = 'Show available commands'
-
- def run(self, options, args):
- if args:
- ## FIXME: handle errors better here
- command = args[0]
- if command not in _commands:
- raise InstallationError('No command with the name: %s' %
command)
- command = _commands[command]
- command.parser.print_help()
- return
- parser.print_help()
- print
- print 'Commands available:'
- commands = list(set(_commands.values()))
- commands.sort(key=lambda x: x.name)
- for command in commands:
- print ' %s: %s' % (command.name, command.summary)
-
-HelpCommand()
-
-class InstallCommand(Command):
- name = 'install'
- usage = '%prog [OPTIONS] PACKAGE_NAMES...'
- summary = 'Install packages'
- bundle = False
-
- def __init__(self):
- super(InstallCommand, self).__init__()
- self.parser.add_option(
- '-e', '--editable',
- dest='editables',
- action='append',
- default=[],
- metavar='VCS+REPOS_URL[@REV]#egg=PACKAGE',
- help='Install a package directly from a checkout. Source will be checked
'
- 'out into src/PACKAGE (lower-case) and installed in-place (using '
- 'setup.py develop). You can run this on an existing directory/checkout
(like '
- 'pip install -e src/mycheckout). This option may be provided multiple
times. '
- 'Possible values for VCS are: svn, git, hg and bzr.')
- self.parser.add_option(
- '-r', '--requirement',
- dest='requirements',
- action='append',
- default=[],
- metavar='FILENAME',
- help='Install all the packages listed in the given requirements file.
'
- 'This option can be used multiple times.')
- self.parser.add_option(
- '-f', '--find-links',
- dest='find_links',
- action='append',
- default=[],
- metavar='URL',
- help='URL to look for packages at')
- self.parser.add_option(
- '-i', '--index-url',
- dest='index_url',
- metavar='URL',
- default=pypi_url,
- help='base URL of Python Package Index')
- self.parser.add_option(
- '--extra-index-url',
- dest='extra_index_urls',
- metavar='URL',
- action='append',
- default=[],
- help='extra URLs of package indexes to use in addition to
--index-url')
-
- self.parser.add_option(
- '-b', '--build', '--build-dir',
'--build-directory',
- dest='build_dir',
- metavar='DIR',
- default=None,
- help='Unpack packages into DIR (default %s) and build from there' %
base_prefix)
- self.parser.add_option(
- '--src', '--source',
- dest='src_dir',
- metavar='DIR',
- default=None,
- help='Check out --editable packages into DIR (default %s)' %
base_src_prefix)
-
- self.parser.add_option(
- '-U', '--upgrade',
- dest='upgrade',
- action='store_true',
- help='Upgrade all packages to the newest available version')
- self.parser.add_option(
- '-I', '--ignore-installed',
- dest='ignore_installed',
- action='store_true',
- help='Ignore the installed packages (reinstalling instead)')
- self.parser.add_option(
- '--no-install',
- dest='no_install',
- action='store_true',
- help="Download and unpack all packages, but don't actually install
them")
-
- self.parser.add_option(
- '--install-option',
- dest='install_options',
- action='append',
- help="Extra arguments to be supplied to the setup.py install "
- "command (use like
--install-option=\"--install-scripts=/usr/local/bin\"). "
- "Use multiple --install-option options to pass multiple options to
setup.py install. "
- "If you are using an option with a directory path, be sure to use
absolute path."
- )
-
- def run(self, options, args):
- if not options.build_dir:
- options.build_dir = base_prefix
- if not options.src_dir:
- options.src_dir = base_src_prefix
- options.build_dir = os.path.abspath(options.build_dir)
- options.src_dir = os.path.abspath(options.src_dir)
- install_options = options.install_options or []
- index_urls = [options.index_url] + options.extra_index_urls
- finder = PackageFinder(
- find_links=options.find_links,
- index_urls=index_urls)
- requirement_set = RequirementSet(
- build_dir=options.build_dir,
- src_dir=options.src_dir,
- upgrade=options.upgrade,
- ignore_installed=options.ignore_installed)
- for name in args:
- requirement_set.add_requirement(
- InstallRequirement.from_line(name, None))
- for name in options.editables:
- requirement_set.add_requirement(
- InstallRequirement.from_editable(name))
- for filename in options.requirements:
- for req in parse_requirements(filename, finder=finder):
- requirement_set.add_requirement(req)
- requirement_set.install_files(finder, force_root_egg_info=self.bundle)
- if not options.no_install and not self.bundle:
- requirement_set.install(install_options)
- logger.notify('Successfully installed %s' % requirement_set)
- elif not self.bundle:
- logger.notify('Successfully downloaded %s' % requirement_set)
- return requirement_set
-
-InstallCommand()
-
-class BundleCommand(InstallCommand):
- name = 'bundle'
- usage = '%prog [OPTIONS] BUNDLE_NAME.pybundle PACKAGE_NAMES...'
- summary = 'Create pybundles (archives containing multiple packages)'
- bundle = True
-
- def __init__(self):
- super(BundleCommand, self).__init__()
-
- def run(self, options, args):
- if not args:
- raise InstallationError('You must give a bundle filename')
- if not options.build_dir:
- options.build_dir = backup_dir(base_prefix, '-bundle')
- if not options.src_dir:
- options.src_dir = backup_dir(base_src_prefix, '-bundle')
- # We have to get everything when creating a bundle:
- options.ignore_installed = True
- logger.notify('Putting temporary build files in %s and source/develop files
in %s'
- % (display_path(options.build_dir),
display_path(options.src_dir)))
- bundle_filename = args[0]
- args = args[1:]
- requirement_set = super(BundleCommand, self).run(options, args)
- # FIXME: here it has to do something
- requirement_set.create_bundle(bundle_filename)
- logger.notify('Created bundle in %s' % bundle_filename)
- return requirement_set
-
-BundleCommand()
-
-class FreezeCommand(Command):
- name = 'freeze'
- usage = '%prog [OPTIONS] FREEZE_NAME.txt'
- summary = 'Put all currently installed packages (exact versions) into a
requirements file'
-
- def __init__(self):
- super(FreezeCommand, self).__init__()
- self.parser.add_option(
- '-r', '--requirement',
- dest='requirement',
- action='store',
- default=None,
- metavar='FILENAME',
- help='Use the given requirements file as a hint about how to generate the
new frozen requirements')
- self.parser.add_option(
- '-f', '--find-links',
- dest='find_links',
- action='append',
- default=[],
- metavar='URL',
- help='URL for finding packages, which will be added to the frozen
requirements file')
-
- def run(self, options, args):
- if args:
- filename = args[0]
- else:
- filename = '-'
- requirement = options.requirement
- find_links = options.find_links or []
- ## FIXME: Obviously this should be settable:
- find_tags = False
-
- if filename == '-':
- logger.move_stdout_to_stderr()
- dependency_links = []
- if filename == '-':
- f = sys.stdout
- else:
- ## FIXME: should be possible to overwrite requirement file
- logger.notify('Writing frozen requirements to %s' % filename)
- f = open(filename, 'w')
- for dist in pkg_resources.working_set:
- if dist.has_metadata('dependency_links.txt'):
-
dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
- for link in find_links:
- if '#egg=' in link:
- dependency_links.append(link)
- for link in find_links:
- f.write('-f %s\n' % link)
- installations = {}
- for dist in pkg_resources.working_set:
- if dist.key in ('setuptools', 'pip', 'python'):
- ## FIXME: also skip virtualenv?
- continue
- req = FrozenRequirement.from_dist(dist, dependency_links,
find_tags=find_tags)
- installations[req.name] = req
- if requirement:
- req_f = open(requirement)
- for line in req_f:
- if not line.strip() or line.strip().startswith('#'):
- f.write(line)
- continue
- elif line.startswith('-e') or
line.startswith('--editable'):
- if line.startswith('-e'):
- line = line[2:].strip()
- else:
- line =
line[len('--editable'):].strip().lstrip('=')
- line_req = InstallRequirement.from_editable(line)
- elif (line.startswith('-r') or
line.startswith('--requirement')
- or line.startswith('-Z') or
line.startswith('--always-unzip')):
- logger.debug('Skipping line %r' % line.strip())
- continue
- else:
- line_req = InstallRequirement.from_line(line)
- if not line_req.name:
- logger.notify("Skipping line because it's not clear what it
would install: %s"
- % line.strip())
- continue
- if line_req.name not in installations:
- logger.warn("Requirement file contains %s, but that package is
not installed"
- % line.strip())
- continue
- f.write(str(installations[line_req.name]))
- del installations[line_req.name]
- f.write('## The following requirements were added by pip
--freeze:\n')
- for installation in sorted(installations.values(), key=lambda x: x.name):
- f.write(str(installation))
- if filename != '-':
- logger.notify('Put requirements in %s' % filename)
- f.close()
-
-FreezeCommand()
-
-class ZipCommand(Command):
- name = 'zip'
- usage = '%prog [OPTIONS] PACKAGE_NAMES...'
- summary = 'Zip individual packages'
-
- def __init__(self):
- super(ZipCommand, self).__init__()
- if self.name == 'zip':
- self.parser.add_option(
- '--unzip',
- action='store_true',
- dest='unzip',
- help='Unzip (rather than zip) a package')
- else:
- self.parser.add_option(
- '--zip',
- action='store_false',
- dest='unzip',
- default=True,
- help='Zip (rather than unzip) a package')
- self.parser.add_option(
- '--no-pyc',
- action='store_true',
- dest='no_pyc',
- help='Do not include .pyc files in zip files (useful on Google App
Engine)')
- self.parser.add_option(
- '-l', '--list',
- action='store_true',
- dest='list',
- help='List the packages available, and their zip status')
- self.parser.add_option(
- '--sort-files',
- action='store_true',
- dest='sort_files',
- help='With --list, sort packages according to how many files they
contain')
- self.parser.add_option(
- '--path',
- action='append',
- dest='paths',
- help='Restrict operations to the given paths (may include
wildcards)')
- self.parser.add_option(
- '-n', '--simulate',
- action='store_true',
- help='Do not actually perform the zip/unzip operation')
-
- def paths(self):
- """All the entries of sys.path, possibly restricted by
--path"""
- if not self.select_paths:
- return sys.path
- result = []
- match_any = set()
- for path in sys.path:
- path = os.path.normcase(os.path.abspath(path))
- for match in self.select_paths:
- match = os.path.normcase(os.path.abspath(match))
- if '*' in match:
- if re.search(fnmatch.translate(match+'*'), path):
- result.append(path)
- match_any.add(match)
- break
- else:
- if path.startswith(match):
- result.append(path)
- match_any.add(match)
- break
- else:
- logger.debug("Skipping path %s because it doesn't match
%s"
- % (path, ', '.join(self.select_paths)))
- for match in self.select_paths:
- if match not in match_any and '*' not in match:
- result.append(match)
- logger.debug("Adding path %s because it doesn't match anything
already on sys.path"
- % match)
- return result
-
- def run(self, options, args):
- self.select_paths = options.paths
- self.simulate = options.simulate
- if options.list:
- return self.list(options, args)
- if not args:
- raise InstallationError(
- 'You must give at least one package to zip or unzip')
- packages = []
- for arg in args:
- module_name, filename = self.find_package(arg)
- if options.unzip and os.path.isdir(filename):
- raise InstallationError(
- 'The module %s (in %s) is not a zip file; cannot be
unzipped'
- % (module_name, filename))
- elif not options.unzip and not os.path.isdir(filename):
- raise InstallationError(
- 'The module %s (in %s) is not a directory; cannot be zipped'
- % (module_name, filename))
- packages.append((module_name, filename))
- last_status = None
- for module_name, filename in packages:
- if options.unzip:
- last_status = self.unzip_package(module_name, filename)
- else:
- last_status = self.zip_package(module_name, filename, options.no_pyc)
- return last_status
-
- def unzip_package(self, module_name, filename):
- zip_filename = os.path.dirname(filename)
- if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename):
- raise InstallationError(
- 'Module %s (in %s) isn\'t located in a zip file in %s'
- % (module_name, filename, zip_filename))
- package_path = os.path.dirname(zip_filename)
- if not package_path in self.paths():
- logger.warn(
- 'Unpacking %s into %s, but %s is not on sys.path'
- % (display_path(zip_filename), display_path(package_path),
- display_path(package_path)))
- logger.notify('Unzipping %s (in %s)' % (module_name,
display_path(zip_filename)))
- if self.simulate:
- logger.notify('Skipping remaining operations because of --simulate')
- return
- logger.indent += 2
- try:
- ## FIXME: this should be undoable:
- zip = zipfile.ZipFile(zip_filename)
- to_save = []
- for name in zip.namelist():
- if name.startswith('%s/' % module_name):
- content = zip.read(name)
- dest = os.path.join(package_path, name)
- if not os.path.exists(os.path.dirname(dest)):
- os.makedirs(os.path.dirname(dest))
- if not content and dest.endswith('/'):
- if not os.path.exists(dest):
- os.makedirs(dest)
- else:
- f = open(dest, 'wb')
- f.write(content)
- f.close()
- else:
- to_save.append((name, zip.read(name)))
- zip.close()
- if not to_save:
- logger.info('Removing now-empty zip file %s' %
display_path(zip_filename))
- os.unlink(zip_filename)
- self.remove_filename_from_pth(zip_filename)
- else:
- logger.info('Removing entries in %s/ from zip file %s' %
(module_name, display_path(zip_filename)))
- zip = zipfile.ZipFile(zip_filename, 'w')
- for name, content in to_save:
- zip.writestr(name, content)
- zip.close()
- finally:
- logger.indent -= 2
-
- def zip_package(self, module_name, filename, no_pyc):
- orig_filename = filename
- logger.notify('Zip %s (in %s)' % (module_name, display_path(filename)))
- logger.indent += 2
- if filename.endswith('.egg'):
- dest_filename = filename
- else:
- dest_filename = filename + '.zip'
- try:
- ## FIXME: I think this needs to be undoable:
- if filename == dest_filename:
- filename = backup_dir(orig_filename)
- logger.notify('Moving %s aside to %s' % (orig_filename,
filename))
- if not self.simulate:
- shutil.move(orig_filename, filename)
- try:
- logger.info('Creating zip file in %s' %
display_path(dest_filename))
- if not self.simulate:
- zip = zipfile.ZipFile(dest_filename, 'w')
- zip.writestr(module_name + '/', '')
- for dirpath, dirnames, filenames in os.walk(filename):
- if no_pyc:
- filenames = [f for f in filenames
- if not f.lower().endswith('.pyc')]
- for fns, is_dir in [(dirnames, True), (filenames, False)]:
- for fn in fns:
- full = os.path.join(dirpath, fn)
- dest = os.path.join(module_name,
dirpath[len(filename):].lstrip(os.path.sep), fn)
- if is_dir:
- zip.writestr(dest+'/', '')
- else:
- zip.write(full, dest)
- zip.close()
- logger.info('Removing old directory %s' %
display_path(filename))
- if not self.simulate:
- shutil.rmtree(filename)
- except:
- ## FIXME: need to do an undo here
- raise
- ## FIXME: should also be undone:
- self.add_filename_to_pth(dest_filename)
- finally:
- logger.indent -= 2
-
- def remove_filename_from_pth(self, filename):
- for pth in self.pth_files():
- f = open(pth, 'r')
- lines = f.readlines()
- f.close()
- new_lines = [
- l for l in lines if l.strip() != filename]
- if lines != new_lines:
- logger.info('Removing reference to %s from .pth file %s'
- % (display_path(filename), display_path(pth)))
- if not filter(None, new_lines):
- logger.info('%s file would be empty: deleting' %
display_path(pth))
- if not self.simulate:
- os.unlink(pth)
- else:
- if not self.simulate:
- f = open(pth, 'w')
- f.writelines(new_lines)
- f.close()
- return
- logger.warn('Cannot find a reference to %s in any .pth file' %
display_path(filename))
-
- def add_filename_to_pth(self, filename):
- path = os.path.dirname(filename)
- dest = os.path.join(path, filename + '.pth')
- if path not in self.paths():
- logger.warn('Adding .pth file %s, but it is not on sys.path' %
display_path(dest))
- if not self.simulate:
- if os.path.exists(dest):
- f = open(dest)
- lines = f.readlines()
- f.close()
- if lines and not lines[-1].endswith('\n'):
- lines[-1] += '\n'
- lines.append(filename+'\n')
- else:
- lines = [filename + '\n']
- f = open(dest, 'w')
- f.writelines(lines)
- f.close()
-
- def pth_files(self):
- for path in self.paths():
- if not os.path.exists(path) or not os.path.isdir(path):
- continue
- for filename in os.listdir(path):
- if filename.endswith('.pth'):
- yield os.path.join(path, filename)
-
- def find_package(self, package):
- for path in self.paths():
- full = os.path.join(path, package)
- if os.path.exists(full):
- return package, full
- if not os.path.isdir(path) and zipfile.is_zipfile(path):
- zip = zipfile.ZipFile(path, 'r')
- try:
- zip.read('%s/__init__.py' % package)
- except KeyError:
- pass
- else:
- zip.close()
- return package, full
- zip.close()
- ## FIXME: need special error for package.py case:
- raise InstallationError(
- 'No package with the name %s found' % package)
-
- def list(self, options, args):
- if args:
- raise InstallationError(
- 'You cannot give an argument with --list')
- for path in sorted(self.paths()):
- if not os.path.exists(path):
- continue
- basename = os.path.basename(path.rstrip(os.path.sep))
- if os.path.isfile(path) and zipfile.is_zipfile(path):
- if os.path.dirname(path) not in self.paths():
- logger.notify('Zipped egg: %s' % display_path(path))
- continue
- if (basename != 'site-packages'
- and not path.replace('\\',
'/').endswith('lib/python')):
- continue
- logger.notify('In %s:' % display_path(path))
- logger.indent += 2
- zipped = []
- unzipped = []
- try:
- for filename in sorted(os.listdir(path)):
- ext = os.path.splitext(filename)[1].lower()
- if ext in ('.pth', '.egg-info',
'.egg-link'):
- continue
- if ext == '.py':
- logger.info('Not displaying %s: not a package' %
display_path(filename))
- continue
- full = os.path.join(path, filename)
- if os.path.isdir(full):
- unzipped.append((filename, self.count_package(full)))
- elif zipfile.is_zipfile(full):
- zipped.append(filename)
- else:
- logger.info('Unknown file: %s' % display_path(filename))
- if zipped:
- logger.notify('Zipped packages:')
- logger.indent += 2
- try:
- for filename in zipped:
- logger.notify(filename)
- finally:
- logger.indent -= 2
- else:
- logger.notify('No zipped packages.')
- if unzipped:
- if options.sort_files:
- unzipped.sort(key=lambda x: -x[1])
- logger.notify('Unzipped packages:')
- logger.indent += 2
- try:
- for filename, count in unzipped:
- logger.notify('%s (%i files)' % (filename, count))
- finally:
- logger.indent -= 2
- else:
- logger.notify('No unzipped packages.')
- finally:
- logger.indent -= 2
-
- def count_package(self, path):
- total = 0
- for dirpath, dirnames, filenames in os.walk(path):
- filenames = [f for f in filenames
- if not f.lower().endswith('.pyc')]
- total += len(filenames)
- return total
-
-ZipCommand()
-
-class UnzipCommand(ZipCommand):
- name = 'unzip'
- summary = 'Unzip individual packages'
-
-UnzipCommand()
-
-
-def main(initial_args=None):
- if initial_args is None:
- initial_args = sys.argv[1:]
- options, args = parser.parse_args(initial_args)
- if options.help and not args:
- args = ['help']
- if not args:
- parser.error('You must give a command (use "pip help" see a list of
commands)')
- command = args[0].lower()
- ## FIXME: search for a command match?
- if command not in _commands:
- parser.error('No command by the name %s %s' %
(os.path.basename(sys.argv[0]), command))
- command = _commands[command]
- return command.main(initial_args, args[1:], options)
-
-def get_proxy(proxystr=''):
- """Get the proxy given the option passed on the command line. If an
- empty string is passed it looks at the HTTP_PROXY environment
- variable."""
- if not proxystr:
- proxystr = os.environ.get('HTTP_PROXY', '')
- if proxystr:
- if '@' in proxystr:
- user_password, server_port = proxystr.split('@', 1)
- if ':' in user_password:
- user, password = user_password.split(':', 1)
- else:
- user = user_password
- import getpass
- prompt = 'Password for %s@%s: ' % (user, server_port)
- password = urllib.quote(getpass.getpass(prompt))
- return '%s:%s@%s' % (user, password, server_port)
- else:
- return proxystr
- else:
- return None
-
-def setup_proxy_handler(proxystr=''):
- """Set the proxy handler given the option passed on the command
- line. If an empty string is passed it looks at the HTTP_PROXY
- environment variable. """
- proxy = get_proxy(proxystr)
- if proxy:
- proxy_support = urllib2.ProxyHandler({"http": proxy, "ftp":
proxy})
- opener = urllib2.build_opener(proxy_support, urllib2.CacheFTPHandler)
- urllib2.install_opener(opener)
-
-def format_exc(exc_info=None):
- if exc_info is None:
- exc_info = sys.exc_info()
- out = StringIO()
- traceback.print_exception(*exc_info, **dict(file=out))
- return out.getvalue()
-
-def restart_in_venv(venv, args):
- """
- Restart this script using the interpreter in the given virtual environment
- """
- venv = os.path.abspath(venv)
- if not os.path.exists(venv):
- try:
- import virtualenv
- except ImportError:
- print 'The virtual environment does not exist: %s' % venv
- print 'and virtualenv is not installed, so a new environment cannot be
created'
- sys.exit(3)
- print 'Creating new virtualenv environment in %s' % venv
- virtualenv.logger = logger
- logger.indent += 2
- ## FIXME: always have no_site_packages?
- virtualenv.create_environment(venv, site_packages=False)
- if sys.platform == 'win32':
- python = os.path.join(venv, 'Scripts', 'python.exe')
- else:
- python = os.path.join(venv, 'bin', 'python')
- if not os.path.exists(python):
- python = venv
- if not os.path.exists(python):
- raise BadCommand('Cannot find virtual environment interpreter at %s' %
python)
- base = os.path.dirname(os.path.dirname(python))
- file = __file__
- if file.endswith('.pyc'):
- file = file[:-1]
- call_subprocess([python, file] + args + [base, '___VENV_RESTART___'])
- sys.exit(0)
- #~ os.execv(python, )
-
-class PackageFinder(object):
- """This finds packages.
-
- This is meant to match easy_install's technique for looking for
- packages, by reading pages and looking for appropriate links
- """
-
- failure_limit = 3
-
- def __init__(self, find_links, index_urls):
- self.find_links = find_links
- self.index_urls = index_urls
- self.dependency_links = []
- self.cache = PageCache()
- # These are boring links that have already been logged somehow:
- self.logged_links = set()
-
- def add_dependency_links(self, links):
- ## FIXME: this shouldn't be global list this, it should only
- ## apply to requirements of the package that specifies the
- ## dependency_links value
- ## FIXME: also, we should track comes_from (i.e., use Link)
- self.dependency_links.extend(links)
-
- def find_requirement(self, req, upgrade):
- url_name = req.url_name
- # Check that we have the url_name correctly spelled:
- main_index_url = Link(posixpath.join(self.index_urls[0], url_name))
- # This will also cache the page, so it's okay that we get it again later:
- page = self._get_page(main_index_url, req)
- if page is None:
- url_name = self._find_url_name(Link(self.index_urls[0]), url_name, req) or
req.url_name
- def mkurl_pypi_url(url):
- loc = posixpath.join(url, url_name)
- # For maximum compatibility with easy_install, ensure the path
- # ends in a trailing slash. Although this isn't in the spec
- # (and PyPI can handle it without the slash) some other index
- # implementations might break if they relied on easy_install's behavior.
- if not loc.endswith('/'):
- loc = loc + '/'
- return loc
- if url_name is not None:
- locations = [
- mkurl_pypi_url(url)
- for url in self.index_urls] + self.find_links
- else:
- locations = list(self.find_links)
- locations.extend(self.dependency_links)
- for version in req.absolute_versions:
- if url_name is not None:
- locations = [
- posixpath.join(url, url_name, version)] + locations
- locations = [Link(url) for url in locations]
- logger.debug('URLs to search for versions for %s:' % req)
- for location in locations:
- logger.debug('* %s' % location)
- found_versions = []
- found_versions.extend(
- self._package_versions(
- [Link(url, '-f') for url in self.find_links], req.name.lower()))
- for page in self._get_pages(locations, req):
- logger.debug('Analyzing links from page %s' % page.url)
- logger.indent += 2
- try:
- found_versions.extend(self._package_versions(page.links,
req.name.lower()))
- finally:
- logger.indent -= 2
- dependency_versions = list(self._package_versions([Link(url) for url in
self.dependency_links], req.name.lower()))
- if dependency_versions:
- logger.info('dependency_links found: %s' % ',
'.join([link.url for parsed, link, version in dependency_versions]))
- found_versions.extend(dependency_versions)
- if not found_versions:
- logger.fatal('Could not find any downloads that satisfy the requirement
%s' % req)
- raise DistributionNotFound('No distributions at all found for %s' %
req)
- if req.satisfied_by is not None:
- found_versions.append((req.satisfied_by.parsed_version, Inf,
req.satisfied_by.version))
- found_versions.sort(reverse=True)
- applicable_versions = []
- for (parsed_version, link, version) in found_versions:
- if version not in req.req:
- logger.info("Ignoring link %s, version %s doesn't match
%s"
- % (link, version, ','.join([''.join(s) for s
in req.req.specs])))
- continue
- applicable_versions.append((link, version))
- existing_applicable = bool([link for link, version in applicable_versions if link
is Inf])
- if not upgrade and existing_applicable:
- if applicable_versions[0][1] is Inf:
- logger.info('Existing installed version (%s) is most up-to-date and
satisfies requirement'
- % req.satisfied_by.version)
- else:
- logger.info('Existing installed version (%s) satisfies requirement
(most up-to-date version is %s)'
- % (req.satisfied_by.version, application_versions[0][2]))
- return None
- if not applicable_versions:
- logger.fatal('Could not find a version that satisfies the requirement %s
(from versions: %s)'
- % (req, ', '.join([version for parsed_version, link,
version in found_versions])))
- raise DistributionNotFound('No distributions matching the version for
%s' % req)
- if applicable_versions[0][0] is Inf:
- # We have an existing version, and its the best version
- logger.info('Installed version (%s) is most up-to-date (past versions:
%s)'
- % (req.satisfied_by.version, ', '.join([version for link,
version in applicable_versions[1:]]) or 'none'))
- return None
- if len(applicable_versions) > 1:
- logger.info('Using version %s (newest of versions: %s)' %
- (applicable_versions[0][1], ', '.join([version for link,
version in applicable_versions])))
- return applicable_versions[0][0]
-
- def _find_url_name(self, index_url, url_name, req):
- """Finds the true URL name of a package, when the given name
isn't quite correct.
- This is usually used to implement case-insensitivity."""
- if not index_url.url.endswith('/'):
- # Vaguely part of the PyPI API... weird but true.
- ## FIXME: bad to modify this?
- index_url.url += '/'
- page = self._get_page(index_url, req)
- if page is None:
- logger.fatal('Cannot fetch index base URL %s' % index_url)
- raise DistributionNotFound('Cannot find requirement %s, nor fetch index
URL %s' % (req, index_url))
- norm_name = normalize_name(req.url_name)
- for link in page.links:
- base = posixpath.basename(link.path.rstrip('/'))
- if norm_name == normalize_name(base):
- logger.notify('Real name of requirement %s is %s' % (url_name,
base))
- return base
- return None
-
- def _get_pages(self, locations, req):
- """Yields (page, page_url) from the given locations, skipping
- locations that have errors, and adding download/homepage links"""
- pending_queue = Queue()
- for location in locations:
- pending_queue.put(location)
- done = []
- seen = set()
- threads = []
- for i in range(min(10, len(locations))):
- t = threading.Thread(target=self._get_queued_page, args=(req, pending_queue,
done, seen))
- t.setDaemon(True)
- threads.append(t)
- t.start()
- for t in threads:
- t.join()
- return done
-
- _log_lock = threading.Lock()
-
- def _get_queued_page(self, req, pending_queue, done, seen):
- while 1:
- try:
- location = pending_queue.get(False)
- except QueueEmpty:
- return
- if location in seen:
- continue
- seen.add(location)
- page = self._get_page(location, req)
- if page is None:
- continue
- done.append(page)
- for link in page.rel_links():
- pending_queue.put(link)
-
- _egg_fragment_re = re.compile(r'#egg=([^&]*)')
- _egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I)
- _py_version_re = re.compile(r'-py([123]\.[0-9])$')
-
- def _package_versions(self, links, search_name):
- seen_links = {}
- for link in links:
- if link.url in seen_links:
- continue
- seen_links[link.url] = None
- if link.egg_fragment:
- egg_info = link.egg_fragment
- else:
- path = link.path
- egg_info, ext = link.splitext()
- if not ext:
- if link not in self.logged_links:
- logger.debug('Skipping link %s; not a file' % link)
- self.logged_links.add(link)
- continue
- if egg_info.endswith('.tar'):
- # Special double-extension case:
- egg_info = egg_info[:-4]
- ext = '.tar' + ext
- if ext not in ('.tar.gz', '.tar.bz2', '.tar',
'.tgz', '.zip'):
- if link not in self.logged_links:
- logger.debug('Skipping link %s; unknown archive format:
%s' % (link, ext))
- self.logged_links.add(link)
- continue
- version = self._egg_info_matches(egg_info, search_name, link)
- if version is None:
- logger.debug('Skipping link %s; wrong project name (not %s)' %
(link, search_name))
- continue
- match = self._py_version_re.search(version)
- if match:
- version = version[:match.start()]
- py_version = match.group(1)
- if py_version != sys.version[:3]:
- logger.debug('Skipping %s because Python version is
incorrect' % link)
- continue
- logger.debug('Found link %s, version: %s' % (link, version))
- yield (pkg_resources.parse_version(version),
- link,
- version)
-
- def _egg_info_matches(self, egg_info, search_name, link):
- match = self._egg_info_re.search(egg_info)
- if not match:
- logger.debug('Could not parse version from link: %s' % link)
- return None
- name = match.group(0).lower()
- # To match the "safe" name that pkg_resources creates:
- name = name.replace('_', '-')
- if name.startswith(search_name.lower()):
- return match.group(0)[len(search_name):].lstrip('-')
- else:
- return None
-
- def _get_page(self, link, req):
- return HTMLPage.get_page(link, req, cache=self.cache)
-
-
-class InstallRequirement(object):
-
- def __init__(self, req, comes_from, source_dir=None, editable=False,
- url=None, update=True):
- if isinstance(req, basestring):
- req = pkg_resources.Requirement.parse(req)
- self.req = req
- self.comes_from = comes_from
- self.source_dir = source_dir
- self.editable = editable
- self.url = url
- self._egg_info_path = None
- # This holds the pkg_resources.Distribution object if this requirement
- # is already available:
- self.satisfied_by = None
- self._temp_build_dir = None
- self._is_bundle = None
- # True if the editable should be updated:
- self.update = update
-
- @classmethod
- def from_editable(cls, editable_req, comes_from=None):
- name, url = parse_editable(editable_req)
- if url.startswith('file:'):
- source_dir = url_to_filename(url)
- else:
- source_dir = None
- return cls(name, comes_from, source_dir=source_dir, editable=True, url=url)
-
- @classmethod
- def from_line(cls, name, comes_from=None):
- """Creates an InstallRequirement from a name, which might be a
- requirement, filename, or URL.
- """
- url = None
- name = name.strip()
- req = name
- if is_url(name):
- url = name
- ## FIXME: I think getting the requirement here is a bad idea:
- #req = get_requirement_from_url(url)
- req = None
- elif is_filename(name):
- if not os.path.exists(name):
- logger.warn('Requirement %r looks like a filename, but the file does
not exist'
- % name)
- url = filename_to_url(name)
- #req = get_requirement_from_url(url)
- req = None
- return cls(req, comes_from, url=url)
-
- def __str__(self):
- if self.req:
- s = str(self.req)
- if self.url:
- s += ' from %s' % self.url
- else:
- s = self.url
- if self.satisfied_by is not None:
- s += ' in %s' % display_path(self.satisfied_by.location)
- if self.comes_from:
- if isinstance(self.comes_from, basestring):
- comes_from = self.comes_from
- else:
- comes_from = self.comes_from.from_path()
- if comes_from:
- s += ' (from %s)' % comes_from
- return s
-
- def from_path(self):
- s = str(self.req)
- if self.comes_from:
- if isinstance(self.comes_from, basestring):
- comes_from = self.comes_from
- else:
- comes_from = self.comes_from.from_path()
- s += '->' + comes_from
- return s
-
- def build_location(self, build_dir):
- if self._temp_build_dir is not None:
- return self._temp_build_dir
- if self.req is None:
- self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-')
- self._ideal_build_dir = build_dir
- return self._temp_build_dir
- if self.editable:
- name = self.name.lower()
- else:
- name = self.name
- # FIXME: Is there a better place to create the build_dir? (hg and bzr need this)
- if not os.path.exists(build_dir):
- os.makedirs(build_dir)
- return os.path.join(build_dir, name)
-
- def correct_build_location(self):
- """If the build location was a temporary directory, this will move
it
- to a new more permanent location"""
- if self.source_dir is not None:
- return
- assert self.req is not None
- assert self._temp_build_dir
- old_location = self._temp_build_dir
- new_build_dir = self._ideal_build_dir
- del self._ideal_build_dir
- if self.editable:
- name = self.name.lower()
- else:
- name = self.name
- new_location = os.path.join(new_build_dir, name)
- if not os.path.exists(new_build_dir):
- logger.debug('Creating directory %s' % new_build_dir)
- os.makedirs(new_build_dir)
- if os.path.exists(new_location):
- raise InstallationError(
- 'A package already exists in %s; please remove it to continue'
- % display_path(new_location))
- logger.debug('Moving package %s from %s to new location %s'
- % (self, display_path(old_location), display_path(new_location)))
- shutil.move(old_location, new_location)
- self._temp_build_dir = new_location
- self.source_dir = new_location
- self._egg_info_path = None
-
- @property
- def name(self):
- if self.req is None:
- return None
- return self.req.project_name
-
- @property
- def url_name(self):
- if self.req is None:
- return None
- return urllib.quote(self.req.unsafe_name)
-
- @property
- def setup_py(self):
- return os.path.join(self.source_dir, 'setup.py')
-
- def run_egg_info(self, force_root_egg_info=False):
- assert self.source_dir
- if self.name:
- logger.notify('Running setup.py egg_info for package %s' %
self.name)
- else:
- logger.notify('Running setup.py egg_info for package from %s' %
self.url)
- logger.indent += 2
- try:
- script = self._run_setup_py
- script = script.replace('__SETUP_PY__', repr(self.setup_py))
- script = script.replace('__PKG_NAME__', repr(self.name))
- # We can't put the .egg-info files at the root, because then the source
code will be mistaken
- # for an installed egg, causing problems
- if self.editable or force_root_egg_info:
- egg_base_option = []
- else:
- egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info')
- if not os.path.exists(egg_info_dir):
- os.makedirs(egg_info_dir)
- egg_base_option = ['--egg-base', 'pip-egg-info']
- call_subprocess(
- [sys.executable, '-c', script, 'egg_info'] +
egg_base_option,
- cwd=self.source_dir, filter_stdout=self._filter_install,
show_stdout=False,
- command_level=Logger.VERBOSE_DEBUG,
- command_desc='python setup.py egg_info')
- finally:
- logger.indent -= 2
- if not self.req:
- self.req = pkg_resources.Requirement.parse(self.pkg_info()['Name'])
- self.correct_build_location()
-
- ## FIXME: This is a lame hack, entirely for PasteScript which has
- ## a self-provided entry point that causes this awkwardness
- _run_setup_py = """
-__file__ = __SETUP_PY__
-from setuptools.command import egg_info
-def replacement_run(self):
- self.mkpath(self.egg_info)
- installer = self.distribution.fetch_build_egg
- for ep in egg_info.iter_entry_points('egg_info.writers'):
- # require=False is the change we're making:
- writer = ep.load(require=False)
- writer(self, ep.name, egg_info.os.path.join(self.egg_info,ep.name))
- self.find_sources()
-egg_info.egg_info.run = replacement_run
-execfile(__file__)
-"""
-
- def egg_info_data(self, filename):
- if self.satisfied_by is not None:
- if not self.satisfied_by.has_metadata(filename):
- return None
- return self.satisfied_by.get_metadata(filename)
- assert self.source_dir
- filename = self.egg_info_path(filename)
- if not os.path.exists(filename):
- return None
- fp = open(filename, 'r')
- data = fp.read()
- fp.close()
- return data
-
- def egg_info_path(self, filename):
- if self._egg_info_path is None:
- if self.editable:
- base = self.source_dir
- else:
- base = os.path.join(self.source_dir, 'pip-egg-info')
- filenames = os.listdir(base)
- if self.editable:
- filenames = []
- for root, dirs, files in os.walk(base):
- for dir in vcs.dirnames:
- if dir in dirs:
- dirs.remove(dir)
- filenames.extend([os.path.join(root, dir)
- for dir in dirs])
- filenames = [f for f in filenames if f.endswith('.egg-info')]
- assert len(filenames) == 1, "Unexpected files/directories in %s:
%s" % (base, ' '.join(filenames))
- self._egg_info_path = os.path.join(base, filenames[0])
- return os.path.join(self._egg_info_path, filename)
-
- def egg_info_lines(self, filename):
- data = self.egg_info_data(filename)
- if not data:
- return []
- result = []
- for line in data.splitlines():
- line = line.strip()
- if not line or line.startswith('#'):
- continue
- result.append(line)
- return result
-
- def pkg_info(self):
- p = FeedParser()
- data = self.egg_info_data('PKG-INFO')
- if not data:
- logger.warn('No PKG-INFO file found in %s' %
display_path(self.egg_info_path('PKG-INFO')))
- p.feed(data or '')
- return p.close()
-
- @property
- def dependency_links(self):
- return self.egg_info_lines('dependency_links.txt')
-
- _requirements_section_re = re.compile(r'\[(.*?)\]')
-
- def requirements(self, extras=()):
- in_extra = None
- for line in self.egg_info_lines('requires.txt'):
- match = self._requirements_section_re.match(line)
- if match:
- in_extra = match.group(1)
- continue
- if in_extra and in_extra not in extras:
- # Skip requirement for an extra we aren't requiring
- continue
- yield line
-
- @property
- def absolute_versions(self):
- for qualifier, version in self.req.specs:
- if qualifier == '==':
- yield version
-
- @property
- def installed_version(self):
- return self.pkg_info()['version']
-
- def assert_source_matches_version(self):
- assert self.source_dir
- if self.comes_from == 'command line':
- # We don't check the versions of things explicitly installed.
- # This makes, e.g., "pip Package==dev" possible
- return
- version = self.installed_version
- if version not in self.req:
- logger.fatal(
- 'Source in %s has the version %s, which does not match the
requirement %s'
- % (display_path(self.source_dir), version, self))
- raise InstallationError(
- 'Source in %s has version %s that conflicts with %s'
- % (display_path(self.source_dir), version, self))
- else:
- logger.debug('Source in %s has version %s, which satisfies requirement
%s'
- % (display_path(self.source_dir), version, self))
-
- def update_editable(self):
- if not self.url:
- logger.info("Cannot update repository at %s; repository location is
unknown" % self.source_dir)
- return
- assert self.editable
- assert self.source_dir
- if self.url.startswith('file:'):
- # Static paths don't get updated
- return
- assert '+' in self.url, "bad url: %r" % self.url
- if not self.update:
- return
- vc_type, url = self.url.split('+', 1)
- vc_type = vc_type.lower()
- version_control = vcs.get_backend(vc_type)
- if version_control:
- version_control(self.url).obtain(self.source_dir)
- else:
- assert 0, (
- 'Unexpected version control type (in %s): %s'
- % (self.url, vc_type))
-
- def install(self, install_options):
- if self.editable:
- self.install_editable()
- return
- ## FIXME: this is not a useful record:
- ## Also a bad location
- if sys.platform == 'win32':
- install_location = os.path.join(sys.prefix, 'Lib')
- else:
- install_location = os.path.join(sys.prefix, 'lib', 'python%s'
% sys.version[:3])
- temp_location = tempfile.mkdtemp('-record', 'pip-')
- record_filename = os.path.join(temp_location, 'install-record.txt')
- ## FIXME: I'm not sure if this is a reasonable location; probably not
- ## but we can't put it in the default location, as that is a virtualenv
symlink that isn't writable
- header_dir = os.path.join(os.path.dirname(os.path.dirname(self.source_dir)),
'lib', 'include')
- logger.notify('Running setup.py install for %s' % self.name)
- logger.indent += 2
- try:
- call_subprocess(
- [sys.executable, '-c',
- "import setuptools; __file__=%r; execfile(%r)" %
(self.setup_py, self.setup_py),
- 'install', '--single-version-externally-managed',
'--record', record_filename,
- '--install-headers', header_dir] + install_options,
- cwd=self.source_dir, filter_stdout=self._filter_install,
show_stdout=False)
- finally:
- logger.indent -= 2
- f = open(record_filename)
- for line in f:
- line = line.strip()
- if line.endswith('.egg-info'):
- egg_info_dir = line
- break
- else:
- logger.warn('Could not find .egg-info directory in install record for
%s' % self)
- ## FIXME: put the record somewhere
- return
- f.close()
- new_lines = []
- f = open(record_filename)
- for line in f:
- filename = line.strip()
- if os.path.isdir(filename):
- filename += os.path.sep
- new_lines.append(make_path_relative(filename, egg_info_dir))
- f.close()
- f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w')
- f.write('\n'.join(new_lines)+'\n')
- f.close()
-
- def remove_temporary_source(self):
- """Remove the source files from this requirement, if they are
marked
- for deletion"""
- if self.is_bundle or os.path.exists(self.delete_marker_filename):
- logger.info('Removing source in %s' % self.source_dir)
- if self.source_dir:
- shutil.rmtree(self.source_dir, ignore_errors=True,
onerror=rmtree_errorhandler)
- self.source_dir = None
- if self._temp_build_dir and os.path.exists(self._temp_build_dir):
- shutil.rmtree(self._temp_build_dir, ignore_errors=True,
onerror=rmtree_errorhandler)
- self._temp_build_dir = None
-
- def install_editable(self):
- logger.notify('Running setup.py develop for %s' % self.name)
- logger.indent += 2
- try:
- ## FIXME: should we do --install-headers here too?
- call_subprocess(
- [sys.executable, '-c',
- "import setuptools; __file__=%r; execfile(%r)" %
(self.setup_py, self.setup_py),
- 'develop', '--no-deps'], cwd=self.source_dir,
filter_stdout=self._filter_install,
- show_stdout=False)
- finally:
- logger.indent -= 2
-
- def _filter_install(self, line):
- level = Logger.NOTIFY
- for regex in [r'^running .*', r'^writing .*', '^creating
.*', '^[Cc]opying .*',
- r'^reading .*', r"^removing .*\.egg-info' \(and
everything under it\)$",
- r'^byte-compiling ',
- # Not sure what this warning is, but it seems harmless:
- r"^warning: manifest_maker: standard file '-c' not
found$"]:
- if re.search(regex, line.strip()):
- level = Logger.INFO
- break
- return (level, line)
-
- def check_if_exists(self):
- """Checks if this requirement is satisfied by something already
installed"""
- if self.req is None:
- return False
- try:
- dist = pkg_resources.get_distribution(self.req)
- except pkg_resources.DistributionNotFound:
- return False
- self.satisfied_by = dist
- return True
-
- @property
- def is_bundle(self):
- if self._is_bundle is not None:
- return self._is_bundle
- base = self._temp_build_dir
- if not base:
- ## FIXME: this doesn't seem right:
- return False
- self._is_bundle = (os.path.exists(os.path.join(base,
'pip-manifest.txt'))
- or os.path.exists(os.path.join(base,
'pyinstall-manifest.txt')))
- return self._is_bundle
-
- def bundle_requirements(self):
- base = self._temp_build_dir
- assert base
- src_dir = os.path.join(base, 'src')
- build_dir = os.path.join(base, 'build')
- if os.path.exists(src_dir):
- for package in os.listdir(src_dir):
- ## FIXME: svnism:
- for vcs_backend in vcs.backends:
- url = rev = None
- vcs_bundle_file = os.path.join(
- src_dir, package, vcs_backend.bundle_file)
- if os.path.exists(vcs_bundle_file):
- vc_type = vcs_backend.name
- fp = open(vcs_bundle_file)
- content = fp.read()
- fp.close()
- url, rev = vcs_backend().parse_vcs_bundle_file(content)
- break
- if url:
- url = '%s+%s@%s' % (vc_type, url, rev)
- else:
- url = None
- yield InstallRequirement(
- package, self, editable=True, url=url,
- update=False, source_dir=os.path.join(src_dir, package))
- if os.path.exists(build_dir):
- for package in os.listdir(build_dir):
- yield InstallRequirement(
- package, self,
- source_dir=os.path.join(build_dir, package))
-
- def move_bundle_files(self, dest_build_dir, dest_src_dir):
- base = self._temp_build_dir
- assert base
- src_dir = os.path.join(base, 'src')
- build_dir = os.path.join(base, 'build')
- for source_dir, dest_dir in [(src_dir, dest_src_dir),
- (build_dir, dest_build_dir)]:
- if os.path.exists(source_dir):
- for dirname in os.listdir(source_dir):
- dest = os.path.join(dest_dir, dirname)
- if os.path.exists(dest):
- logger.warn('The directory %s (containing package %s) already
exists; cannot move source from bundle %s'
- % (dest, dirname, self))
- continue
- if not os.path.exists(dest_dir):
- logger.info('Creating directory %s' % dest_dir)
- os.makedirs(dest_dir)
- shutil.move(os.path.join(source_dir, dirname), dest)
-
- @property
- def delete_marker_filename(self):
- assert self.source_dir
- return os.path.join(self.source_dir, 'pip-delete-this-directory.txt')
-
-DELETE_MARKER_MESSAGE = '''\
-This file is placed here by pip to indicate the source was put
-here by pip.
-
-Once this package is successfully installed this source code will be
-deleted (unless you remove this file).
-'''
-
-class RequirementSet(object):
-
- def __init__(self, build_dir, src_dir, upgrade=False, ignore_installed=False):
- self.build_dir = build_dir
- self.src_dir = src_dir
- self.upgrade = upgrade
- self.ignore_installed = ignore_installed
- self.requirements = {}
- # Mapping of alias: real_name
- self.requirement_aliases = {}
- self.unnamed_requirements = []
-
- def __str__(self):
- reqs = [req for req in self.requirements.values()
- if not req.comes_from]
- reqs.sort(key=lambda req: req.name.lower())
- return ' '.join([str(req.req) for req in reqs])
-
- def add_requirement(self, install_req):
- name = install_req.name
- if not name:
- self.unnamed_requirements.append(install_req)
- else:
- if self.has_requirement(name):
- raise InstallationError(
- 'Double requirement given: %s (aready in %s, name=%r)'
- % (install_req, self.get_requirement(name), name))
- self.requirements[name] = install_req
- ## FIXME: what about other normalizations? E.g., _ vs. -?
- if name.lower() != name:
- self.requirement_aliases[name.lower()] = name
-
- def has_requirement(self, project_name):
- for name in project_name, project_name.lower():
- if name in self.requirements or name in self.requirement_aliases:
- return True
- return False
-
- def get_requirement(self, project_name):
- for name in project_name, project_name.lower():
- if name in self.requirements:
- return self.requirements[name]
- if name in self.requirement_aliases:
- return self.requirements[self.requirement_aliases[name]]
- raise KeyError("No project with the name %r" % project_name)
-
- def install_files(self, finder, force_root_egg_info=False):
- unnamed = list(self.unnamed_requirements)
- reqs = self.requirements.values()
- while reqs or unnamed:
- if unnamed:
- req_to_install = unnamed.pop(0)
- else:
- req_to_install = reqs.pop(0)
- install = True
- if not self.ignore_installed and not req_to_install.editable and not
self.upgrade:
- if req_to_install.check_if_exists():
- install = False
- if req_to_install.satisfied_by is not None and not self.upgrade:
- logger.notify('Requirement already satisfied: %s' %
req_to_install)
- elif req_to_install.editable:
- logger.notify('Obtaining %s' % req_to_install)
- else:
- if req_to_install.url and
req_to_install.url.lower().startswith('file:'):
- logger.notify('Unpacking %s' %
display_path(url_to_filename(req_to_install.url)))
- else:
- logger.notify('Downloading/unpacking %s' % req_to_install)
- logger.indent += 2
- is_bundle = False
- try:
- if req_to_install.editable:
- if req_to_install.source_dir is None:
- location = req_to_install.build_location(self.src_dir)
- req_to_install.source_dir = location
- else:
- location = req_to_install.source_dir
- req_to_install.update_editable()
- req_to_install.run_egg_info()
- elif install:
- location = req_to_install.build_location(self.build_dir)
- ## FIXME: is the existance of the checkout good enough to use it?
I'm don't think so.
- unpack = True
- if not os.path.exists(os.path.join(location, 'setup.py')):
- ## FIXME: this won't upgrade when there's an existing
package unpacked in `location`
- if req_to_install.url is None:
- url = finder.find_requirement(req_to_install,
upgrade=self.upgrade)
- else:
- ## FIXME: should req_to_install.url already be a link?
- url = Link(req_to_install.url)
- assert url
- if url:
- try:
- self.unpack_url(url, location)
- except urllib2.HTTPError, e:
- logger.fatal('Could not install requirement %s
because of error %s'
- % (req_to_install, e))
- raise InstallationError(
- 'Could not install requirement %s because of HTTP
error %s for URL %s'
- % (req_to_install, e, url))
- else:
- unpack = False
- if unpack:
- is_bundle = req_to_install.is_bundle
- if is_bundle:
- for subreq in req_to_install.bundle_requirements():
- reqs.append(subreq)
- self.add_requirement(subreq)
- req_to_install.move_bundle_files(self.build_dir,
self.src_dir)
- else:
- req_to_install.source_dir = location
- req_to_install.run_egg_info()
- if force_root_egg_info:
- # We need to run this to make sure that the .egg-info/
- # directory is created for packing in the bundle
- req_to_install.run_egg_info(force_root_egg_info=True)
- req_to_install.assert_source_matches_version()
- f = open(req_to_install.delete_marker_filename, 'w')
- f.write(DELETE_MARKER_MESSAGE)
- f.close()
- if not is_bundle:
- ## FIXME: shouldn't be globally added:
- finder.add_dependency_links(req_to_install.dependency_links)
- ## FIXME: add extras in here:
- for req in req_to_install.requirements():
- try:
- name = pkg_resources.Requirement.parse(req).project_name
- except ValueError, e:
- ## FIXME: proper warning
- logger.error('Invalid requirement: %r (%s) in requirement
%s' % (req, e, req_to_install))
- continue
- if self.has_requirement(name):
- ## FIXME: check for conflict
- continue
- subreq = InstallRequirement(req, req_to_install)
- reqs.append(subreq)
- self.add_requirement(subreq)
- if req_to_install.name not in self.requirements:
- self.requirements[req_to_install.name] = req_to_install
- else:
- req_to_install.remove_temporary_source()
- finally:
- logger.indent -= 2
-
- def unpack_url(self, link, location):
- for backend in vcs.backends:
- if link.scheme in backend.schemes:
- backend(link).unpack(location)
- return
- dir = tempfile.mkdtemp()
- if link.url.lower().startswith('file:'):
- source = url_to_filename(link.url)
- content_type = mimetypes.guess_type(source)
- self.unpack_file(source, location, content_type, link)
- return
- md5_hash = link.md5_hash
- target_url = link.url.split('#', 1)[0]
- target_file = None
- if os.environ.get('PIP_DOWNLOAD_CACHE'):
- target_file = os.path.join(os.environ['PIP_DOWNLOAD_CACHE'],
- urllib.quote(target_url, ''))
- if (target_file and os.path.exists(target_file)
- and os.path.exists(target_file+'.content-type')):
- fp = open(target_file+'.content-type')
- content_type = fp.read().strip()
- fp.close()
- if md5_hash:
- download_hash = md5()
- fp = open(target_file, 'rb')
- while 1:
- chunk = fp.read(4096)
- if not chunk:
- break
- download_hash.update(chunk)
- fp.close()
- temp_location = target_file
- logger.notify('Using download cache from %s' % target_file)
- else:
- try:
- resp = urllib2.urlopen(target_url)
- except urllib2.HTTPError, e:
- logger.fatal("HTTP error %s while getting %s" % (e.code,
link))
- raise
- except IOError, e:
- # Typically an FTP error
- logger.fatal("Error %s while getting %s" % (e, link))
- raise
- content_type = resp.info()['content-type']
- filename = link.filename
- ext = splitext(filename)
- if not ext:
- ext = mimetypes.guess_extension(content_type)
- filename += ext
- temp_location = os.path.join(dir, filename)
- fp = open(temp_location, 'wb')
- if md5_hash:
- download_hash = md5()
- try:
- total_length = int(resp.info()['content-length'])
- except (ValueError, KeyError):
- total_length = 0
- downloaded = 0
- show_progress = total_length > 40*1000 or not total_length
- show_url = link.show_url
- try:
- if show_progress:
- ## FIXME: the URL can get really long in this message:
- if total_length:
- logger.start_progress('Downloading %s (%s): ' %
(show_url, format_size(total_length)))
- else:
- logger.start_progress('Downloading %s (unknown size): ' %
show_url)
- else:
- logger.notify('Downloading %s' % show_url)
- logger.debug('Downloading from URL %s' % link)
- while 1:
- chunk = resp.read(4096)
- if not chunk:
- break
- downloaded += len(chunk)
- if show_progress:
- if not total_length:
- logger.show_progress('%s' % format_size(downloaded))
- else:
- logger.show_progress('%3i%% %s' %
(100*downloaded/total_length, format_size(downloaded)))
- if md5_hash:
- download_hash.update(chunk)
- fp.write(chunk)
- fp.close()
- finally:
- if show_progress:
- logger.end_progress('%s downloaded' %
format_size(downloaded))
- if md5_hash:
- download_hash = download_hash.hexdigest()
- if download_hash != md5_hash:
- logger.fatal("MD5 hash of the package %s (%s) doesn't match the
expected hash %s!"
- % (link, download_hash, md5_hash))
- raise InstallationError('Bad MD5 hash for package %s' % link)
- self.unpack_file(temp_location, location, content_type, link)
- if target_file and target_file != temp_location:
- logger.notify('Storing download in cache at %s' %
display_path(target_file))
- shutil.copyfile(temp_location, target_file)
- fp = open(target_file+'.content-type', 'w')
- fp.write(content_type)
- fp.close()
- os.unlink(temp_location)
-
- def unpack_file(self, filename, location, content_type, link):
- if (content_type == 'application/zip'
- or filename.endswith('.zip')
- or filename.endswith('.pybundle')
- or zipfile.is_zipfile(filename)):
- self.unzip_file(filename, location, flatten=not
filename.endswith('.pybundle'))
- elif (content_type == 'application/x-gzip'
- or tarfile.is_tarfile(filename)
- or splitext(filename)[1].lower() in ('.tar', '.tar.gz',
'.tar.bz2', '.tgz')):
- self.untar_file(filename, location)
- elif (content_type.startswith('text/html')
- and is_svn_page(file_contents(filename))):
- # We don't really care about this
- Subversion('svn+' + link.url).unpack(location)
- else:
- ## FIXME: handle?
- ## FIXME: magic signatures?
- logger.fatal('Cannot unpack file %s (downloaded from %s, content-type:
%s); cannot detect archive format'
- % (filename, location, content_type))
- raise InstallationError('Cannot determine archive format of %s' %
location)
-
- def unzip_file(self, filename, location, flatten=True):
- """Unzip the file (zip file located at filename) to the
destination
- location"""
- if not os.path.exists(location):
- os.makedirs(location)
- zipfp = open(filename, 'rb')
- try:
- zip = zipfile.ZipFile(zipfp)
- leading = has_leading_dir(zip.namelist()) and flatten
- for name in zip.namelist():
- data = zip.read(name)
- fn = name
- if leading:
- fn = split_leading_dir(name)[1]
- fn = os.path.join(location, fn)
- dir = os.path.dirname(fn)
- if not os.path.exists(dir):
- os.makedirs(dir)
- if fn.endswith('/') or fn.endswith('\\'):
- # A directory
- if not os.path.exists(fn):
- os.makedirs(fn)
- else:
- fp = open(fn, 'wb')
- try:
- fp.write(data)
- finally:
- fp.close()
- finally:
- zipfp.close()
-
- def untar_file(self, filename, location):
- """Untar the file (tar file located at filename) to the
destination location"""
- if not os.path.exists(location):
- os.makedirs(location)
- if filename.lower().endswith('.gz') or
filename.lower().endswith('.tgz'):
- mode = 'r:gz'
- elif filename.lower().endswith('.bz2'):
- mode = 'r:bz2'
- elif filename.lower().endswith('.tar'):
- mode = 'r'
- else:
- logger.warn('Cannot determine compression type for file %s' %
filename)
- mode = 'r:*'
- tar = tarfile.open(filename, mode)
- try:
- leading = has_leading_dir([member.name for member in tar.getmembers()])
- for member in tar.getmembers():
- fn = member.name
- if leading:
- fn = split_leading_dir(fn)[1]
- path = os.path.join(location, fn)
- if member.isdir():
- if not os.path.exists(path):
- os.makedirs(path)
- else:
- try:
- fp = tar.extractfile(member)
- except KeyError, e:
- # Some corrupt tar files seem to produce this
- # (specifically bad symlinks)
- logger.warn(
- 'In the tar file %s the member %s is invalid: %s'
- % (filename, member.name, e))
- continue
- if not os.path.exists(os.path.dirname(path)):
- os.makedirs(os.path.dirname(path))
- destfp = open(path, 'wb')
- try:
- shutil.copyfileobj(fp, destfp)
- finally:
- destfp.close()
- fp.close()
- finally:
- tar.close()
-
- def install(self, install_options):
- """Install everything in this set (after having downloaded and
unpacked the packages)"""
- requirements = sorted(self.requirements.values(), key=lambda p: p.name.lower())
- logger.notify('Installing collected packages: %s' % (',
'.join([req.name for req in requirements])))
- logger.indent += 2
- try:
- for requirement in self.requirements.values():
- if requirement.satisfied_by is not None:
- # Already installed
- continue
- requirement.install(install_options)
- requirement.remove_temporary_source()
- finally:
- logger.indent -= 2
-
- def create_bundle(self, bundle_filename):
- ## FIXME: can't decide which is better; zip is easier to read
- ## random files from, but tar.bz2 is smaller and not as lame a
- ## format.
-
- ## FIXME: this file should really include a manifest of the
- ## packages, maybe some other metadata files. It would make
- ## it easier to detect as well.
- zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED)
- vcs_dirs = []
- for dir, basename in (self.build_dir, 'build'), (self.src_dir,
'src'):
- dir = os.path.normcase(os.path.abspath(dir))
- for dirpath, dirnames, filenames in os.walk(dir):
- for backend in vcs.backends:
- vcs_backend = backend()
- vcs_url = vcs_rev = None
- if vcs_backend.dirname in dirnames:
- for vcs_dir in vcs_dirs:
- if dirpath.startswith(vcs_dir):
- # vcs bundle file already in parent directory
- break
- else:
- vcs_url, vcs_rev = vcs_backend.get_info(
- os.path.join(dir, dirpath))
- vcs_dirs.append(dirpath)
- vcs_bundle_file = vcs_backend.bundle_file
- vcs_guide = vcs_backend.guide % {'url': vcs_url,
- 'rev': vcs_rev}
- dirnames.remove(vcs_backend.dirname)
- break
- if 'pip-egg-info' in dirnames:
- dirnames.remove('pip-egg-info')
- for dirname in dirnames:
- dirname = os.path.join(dirpath, dirname)
- name = self._clean_zip_name(dirname, dir)
- zip.writestr(basename + '/' + name + '/',
'')
- for filename in filenames:
- if filename == 'pip-delete-this-directory.txt':
- continue
- filename = os.path.join(dirpath, filename)
- name = self._clean_zip_name(filename, dir)
- zip.write(filename, basename + '/' + name)
- if vcs_url:
- name = os.path.join(dirpath, vcs_bundle_file)
- name = self._clean_zip_name(name, dir)
- zip.writestr(basename + '/' + name, vcs_guide)
-
- zip.writestr('pip-manifest.txt', self.bundle_requirements())
- zip.close()
- # Unlike installation, this will always delete the build directories
- logger.info('Removing temporary build dir %s and source dir %s'
- % (self.build_dir, self.src_dir))
- for dir in self.build_dir, self.src_dir:
- if os.path.exists(dir):
- shutil.rmtree(dir)
-
-
- BUNDLE_HEADER = '''\
-# This is a pip bundle file, that contains many source packages
-# that can be installed as a group. You can install this like:
-# pip this_file.zip
-# The rest of the file contains a list of all the packages included:
-'''
-
- def bundle_requirements(self):
- parts = [self.BUNDLE_HEADER]
- for req in sorted(
- [req for req in self.requirements.values()
- if not req.comes_from],
- key=lambda x: x.name):
- parts.append('%s==%s\n' % (req.name, req.installed_version))
- parts.append('# These packages were installed to satisfy the above
requirements:\n')
- for req in sorted(
- [req for req in self.requirements.values()
- if req.comes_from],
- key=lambda x: x.name):
- parts.append('%s==%s\n' % (req.name, req.installed_version))
- ## FIXME: should we do something with self.unnamed_requirements?
- return ''.join(parts)
-
- def _clean_zip_name(self, name, prefix):
- assert name.startswith(prefix+'/'), (
- "name %r doesn't start with prefix %r" % (name, prefix))
- name = name[len(prefix)+1:]
- name = name.replace(os.path.sep, '/')
- return name
-
-class HTMLPage(object):
- """Represents one page, along with its URL"""
-
- ## FIXME: these regexes are horrible hacks:
- _homepage_re = re.compile(r'<th>\s*home\s*page', re.I)
- _download_re = re.compile(r'<th>\s*download\s+url', re.I)
- ## These aren't so aweful:
- _rel_re =
re.compile("""<[^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*>""",
re.I)
- _href_re =
re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))',
re.I|re.S)
-
- def __init__(self, content, url, headers=None):
- self.content = content
- self.url = url
- self.headers = headers
-
- def __str__(self):
- return self.url
-
- @classmethod
- def get_page(cls, link, req, cache=None, skip_archives=True):
- url = link.url
- url = url.split('#', 1)[0]
- if cache.too_many_failures(url):
- return None
- if url.lower().startswith('svn'):
- logger.debug('Cannot look at svn URL %s' % link)
- return None
- if cache is not None:
- inst = cache.get_page(url)
- if inst is not None:
- return inst
- try:
- if skip_archives:
- if cache is not None:
- if cache.is_archive(url):
- return None
- filename = link.filename
- for bad_ext in ['.tar', '.tar.gz', '.tar.bz2',
'.tgz', '.zip']:
- if filename.endswith(bad_ext):
- content_type = cls._get_content_type(url)
- if content_type.lower().startswith('text/html'):
- break
- else:
- logger.debug('Skipping page %s because of Content-Type:
%s' % (link, content_type))
- if cache is not None:
- cache.set_is_archive(url)
- return None
- logger.debug('Getting page %s' % url)
- resp = urllib2.urlopen(url)
- real_url = resp.geturl()
- headers = resp.info()
- inst = cls(resp.read(), real_url, headers)
- except (urllib2.HTTPError, urllib2.URLError, socket.timeout, socket.error), e:
- desc = str(e)
- if isinstance(e, socket.timeout):
- log_meth = logger.warn
- level =1
- desc = 'timed out'
- elif isinstance(e, urllib2.URLError):
- log_meth = logger.warn
- if hasattr(e, 'reason') and isinstance(e.reason,
socket.timeout):
- desc = 'timed out'
- level = 1
- else:
- level = 2
- elif isinstance(e, urllib2.HTTPError) and e.code == 404:
- ## FIXME: notify?
- log_meth = logger.info
- level = 2
- else:
- log_meth = logger.warn
- level = 1
- log_meth('Could not fetch URL %s: %s' % (link, desc))
- log_meth('Will skip URL %s when looking for download links for %s' %
(link.url, req))
- if cache is not None:
- cache.add_page_failure(url, level)
- return None
- if cache is not None:
- cache.add_page([url, real_url], inst)
- return inst
-
- @staticmethod
- def _get_content_type(url):
- """Get the Content-Type of the given url, using a HEAD
request"""
- scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
- if scheme == 'http':
- ConnClass = httplib.HTTPConnection
- elif scheme == 'https':
- ConnClass = httplib.HTTPSConnection
- else:
- ## FIXME: some warning or something?
- ## assertion error?
- return ''
- if query:
- path += '?' + query
- conn = ConnClass(netloc)
- try:
- conn.request('HEAD', path, headers={'Host': netloc})
- resp = conn.getresponse()
- if resp.status != 200:
- ## FIXME: doesn't handle redirects
- return ''
- return resp.getheader('Content-Type') or ''
- finally:
- conn.close()
-
- @property
- def links(self):
- """Yields all links in the page"""
- for match in self._href_re.finditer(self.content):
- url = match.group(1) or match.group(2) or match.group(3)
- url = self.clean_link(urlparse.urljoin(self.url, url))
- yield Link(url, self)
-
- def rel_links(self):
- for url in self.explicit_rel_links():
- yield url
- for url in self.scraped_rel_links():
- yield url
-
- def explicit_rel_links(self, rels=('homepage', 'download')):
- """Yields all links with the given relations"""
- for match in self._rel_re.finditer(self.content):
- found_rels = match.group(1).lower().split()
- for rel in rels:
- if rel in found_rels:
- break
- else:
- continue
- match = self._href_re.search(match.group(0))
- if not match:
- continue
- url = match.group(1) or match.group(2) or match.group(3)
- url = self.clean_link(urlparse.urljoin(self.url, url))
- yield Link(url, self)
-
- def scraped_rel_links(self):
- for regex in (self._homepage_re, self._download_re):
- match = regex.search(self.content)
- if not match:
- continue
- href_match = self._href_re.search(self.content, pos=match.end())
- if not href_match:
- continue
- url = match.group(1) or match.group(2) or match.group(3)
- if not url:
- continue
- url = self.clean_link(urlparse.urljoin(self.url, url))
- yield Link(url, self)
-
- _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
-
- def clean_link(self, url):
- """Makes sure a link is fully encoded. That is, if a ' '
shows up in
- the link, it will be rewritten to %20 (while not over-quoting
- % or other characters)."""
- return self._clean_re.sub(
- lambda match: '%%%2x' % ord(match.group(0)), url)
-
-class PageCache(object):
- """Cache of HTML pages"""
-
- failure_limit = 3
-
- def __init__(self):
- self._failures = {}
- self._pages = {}
- self._archives = {}
-
- def too_many_failures(self, url):
- return self._failures.get(url, 0) >= self.failure_limit
-
- def get_page(self, url):
- return self._pages.get(url)
-
- def is_archive(self, url):
- return self._archives.get(url, False)
-
- def set_is_archive(self, url, value=True):
- self._archives[url] = value
-
- def add_page_failure(self, url, level):
- self._failures[url] = self._failures.get(url, 0)+level
-
- def add_page(self, urls, page):
- for url in urls:
- self._pages[url] = page
-
-class Link(object):
-
- def __init__(self, url, comes_from=None):
- self.url = url
- self.comes_from = comes_from
-
- def __str__(self):
- if self.comes_from:
- return '%s (from %s)' % (self.url, self.comes_from)
- else:
- return self.url
-
- def __repr__(self):
- return '<Link %s>' % self
-
- @property
- def filename(self):
- url = self.url
- url = url.split('#', 1)[0]
- url = url.split('?', 1)[0]
- url = url.rstrip('/')
- name = posixpath.basename(url)
- assert name, (
- 'URL %r produced no filename' % url)
- return name
-
- @property
- def scheme(self):
- return urlparse.urlsplit(self.url)[0]
-
- @property
- def path(self):
- return urlparse.urlsplit(self.url)[2]
-
- def splitext(self):
- return splitext(posixpath.basename(self.path.rstrip('/')))
-
- _egg_fragment_re = re.compile(r'#egg=([^&]*)')
-
- @property
- def egg_fragment(self):
- match = self._egg_fragment_re.search(self.url)
- if not match:
- return None
- return match.group(1)
-
- _md5_re = re.compile(r'md5=([a-f0-9]+)')
-
- @property
- def md5_hash(self):
- match = self._md5_re.search(self.url)
- if match:
- return match.group(1)
- return None
-
- @property
- def show_url(self):
- return posixpath.basename(self.url.split('#', 1)[0].split('?',
1)[0])
-
-############################################################
-## Writing freeze files
-
-
-class FrozenRequirement(object):
-
- def __init__(self, name, req, editable, comments=()):
- self.name = name
- self.req = req
- self.editable = editable
- self.comments = comments
-
- _rev_re = re.compile(r'-r(\d+)$')
- _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
-
- @classmethod
- def from_dist(cls, dist, dependency_links, find_tags=False):
- location = os.path.normcase(os.path.abspath(dist.location))
- comments = []
- if vcs.get_backend_name(location):
- editable = True
- req = get_src_requirement(dist, location, find_tags)
- if req is None:
- logger.warn('Could not determine repository location of %s' %
location)
- comments.append('## !! Could not determine repository location')
- req = dist.as_requirement()
- editable = False
- else:
- editable = False
- req = dist.as_requirement()
- specs = req.specs
- assert len(specs) == 1 and specs[0][0] == '=='
- version = specs[0][1]
- ver_match = cls._rev_re.search(version)
- date_match = cls._date_re.search(version)
- if ver_match or date_match:
- svn_backend = vcs.get_backend('svn')
- if svn_backend:
- svn_location = svn_backend(
- ).get_location(dist, dependency_links)
- if not svn_location:
- logger.warn(
- 'Warning: cannot find svn location for %s' % req)
- comments.append('## FIXME: could not find svn URL in
dependency_links for this package:')
- else:
- comments.append('# Installing as editable to satisfy requirement
%s:' % req)
- if ver_match:
- rev = ver_match.group(1)
- else:
- rev = '{%s}' % date_match.group(1)
- editable = True
- req = 'svn+%s@%s#egg=%s' % (svn_location, rev,
cls.egg_name(dist))
- return cls(dist.project_name, req, editable, comments)
-
- @staticmethod
- def egg_name(dist):
- name = dist.egg_name()
- match = re.search(r'-py\d\.\d$', name)
- if match:
- name = name[:match.start()]
- return name
-
- def __str__(self):
- req = self.req
- if self.editable:
- req = '-e %s' % req
- return '\n'.join(list(self.comments)+[str(req)])+'\n'
-
-class VersionControl(object):
- name = ''
-
- def __init__(self, url=None, *args, **kwargs):
- self.url = url
- super(VersionControl, self).__init__(*args, **kwargs)
-
- def _filter(self, line):
- return (Logger.INFO, line)
-
- def get_url_rev(self):
- """
- Returns the correct repository URL and revision by parsing the given
- repository URL
- """
- url = self.url.split('+', 1)[1]
- scheme, netloc, path, query, frag = urlparse.urlsplit(url)
- rev = None
- if '@' in path:
- path, rev = path.rsplit('@', 1)
- url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
- return url, rev
-
- def parse_vcs_bundle_file(self, content):
- """
- Takes the contents of the bundled text file that explains how to revert
- the stripped off version control data of the given package and returns
- the URL and revision of it.
- """
- raise NotImplementedError
-
- def obtain(self, dest):
- """
- Called when installing or updating an editable package, takes the
- source path of the checkout.
- """
- raise NotImplementedError
-
- def unpack(self, location):
- raise NotImplementedError
-
- def get_src_requirement(self, dist, location, find_tags=False):
- raise NotImplementedError
-
-_svn_xml_url_re = re.compile('url="([^"]+)"')
-_svn_rev_re = re.compile('committed-rev="(\d+)"')
-_svn_url_re = re.compile(r'URL: (.+)')
-_svn_revision_re = re.compile(r'Revision: (.+)')
-
-class Subversion(VersionControl):
- name = 'svn'
- dirname = '.svn'
- schemes = ('svn', 'svn+ssh')
- bundle_file = 'svn-checkout.txt'
- guide = ('# This was an svn checkout; to make it a checkout again run:\n'
- 'svn checkout --force -r %(rev)s %(url)s .\n')
-
- def get_info(self, location):
- """Returns (url, revision), where both are
strings"""
- assert not location.rstrip('/').endswith('.svn'), 'Bad
directory: %s' % location
- output = call_subprocess(
- ['svn', 'info', location], show_stdout=False,
extra_environ={'LANG': 'C'})
- match = _svn_url_re.search(output)
- if not match:
- logger.warn('Cannot determine URL of svn checkout %s' %
display_path(location))
- logger.info('Output that cannot be parsed: \n%s' % output)
- return 'unknown', 'unknown'
- url = match.group(1).strip()
- match = _svn_revision_re.search(output)
- if not match:
- logger.warn('Cannot determine revision of svn checkout %s' %
display_path(location))
- logger.info('Output that cannot be parsed: \n%s' % output)
- return url, 'unknown'
- return url, match.group(1)
-
- def parse_vcs_bundle_file(self, content):
- for line in content.splitlines():
- if not line.strip() or line.strip().startswith('#'):
- continue
- match = re.search(r'^-r\s*([^ ])?', line)
- if not match:
- return None, None
- rev = match.group(1)
- rest = line[match.end():].strip().split(None, 1)[0]
- return rest, rev
- return None, None
-
- def unpack(self, location):
- """Check out the svn repository at the url to the destination
location"""
- url, rev = self.get_url_rev()
- logger.notify('Checking out svn repository %s to %s' % (url, location))
- logger.indent += 2
- try:
- if os.path.exists(location):
- # Subversion doesn't like to check out over an existing directory
- # --force fixes this, but was only added in svn 1.5
- shutil.rmtree(location, onerror=rmtree_errorhandler)
- call_subprocess(
- ['svn', 'checkout', url, location],
- filter_stdout=self._filter, show_stdout=False)
- finally:
- logger.indent -= 2
-
- def obtain(self, dest):
- url, rev = self.get_url_rev()
- if rev:
- rev_options = ['-r', rev]
- rev_display = ' (to revision %s)' % rev
- else:
- rev_options = []
- rev_display = ''
- checkout = True
- if os.path.exists(os.path.join(dest, '.svn')):
- existing_url = self.get_info(dest)[0]
- checkout = False
- if existing_url == url:
- logger.info('Checkout in %s exists, and has correct URL (%s)'
- % (display_path(dest), url))
- logger.notify('Updating checkout %s%s'
- % (display_path(dest), rev_display))
- call_subprocess(
- ['svn', 'update'] + rev_options + [dest])
- else:
- logger.warn('svn checkout in %s exists with URL %s'
- % (display_path(dest), existing_url))
- logger.warn('The plan is to install the svn repository %s'
- % url)
- response = ask('What to do? (s)witch, (i)gnore, (w)ipe, (b)ackup
', ('s', 'i', 'w', 'b'))
- if response == 's':
- logger.notify('Switching checkout %s to %s%s'
- % (display_path(dest), url, rev_display))
- call_subprocess(
- ['svn', 'switch'] + rev_options + [url, dest])
- elif response == 'i':
- # do nothing
- pass
- elif response == 'w':
- logger.warn('Deleting %s' % display_path(dest))
- shutil.rmtree(dest)
- checkout = True
- elif response == 'b':
- dest_dir = backup_dir(dest)
- logger.warn('Backing up %s to %s'
- % display_path(dest, dest_dir))
- shutil.move(dest, dest_dir)
- checkout = True
- if checkout:
- logger.notify('Checking out %s%s to %s'
- % (url, rev_display, display_path(dest)))
- call_subprocess(
- ['svn', 'checkout', '-q'] + rev_options + [url,
dest])
-
- def get_location(self, dist, dependency_links):
- egg_fragment_re = re.compile(r'#egg=(.*)$')
- for url in dependency_links:
- egg_fragment = Link(url).egg_fragment
- if not egg_fragment:
- continue
- if '-' in egg_fragment:
- ## FIXME: will this work when a package has - in the name?
- key = '-'.join(egg_fragment.split('-')[:-1]).lower()
- else:
- key = egg_fragment
- if key == dist.key:
- return url.split('#', 1)[0]
- return None
-
- def get_revision(self, location):
- """
- Return the maximum revision for all files under a given location
- """
- # Note: taken from setuptools.command.egg_info
- revision = 0
-
- for base, dirs, files in os.walk(location):
- if '.svn' not in dirs:
- dirs[:] = []
- continue # no sense walking uncontrolled subdirs
- dirs.remove('.svn')
- entries_fn = os.path.join(base, '.svn', 'entries')
- if not os.path.exists(entries_fn):
- ## FIXME: should we warn?
- continue
- f = open(entries_fn)
- data = f.read()
- f.close()
-
- if data.startswith('8') or data.startswith('9'):
- data = map(str.splitlines,data.split('\n\x0c\n'))
- del data[0][0] # get rid of the '8'
- dirurl = data[0][3]
- revs = [int(d[9]) for d in data if len(d)>9 and d[9]]+[0]
- if revs:
- localrev = max(revs)
- else:
- localrev = 0
- elif data.startswith('<?xml'):
- dirurl = _svn_xml_url_re.search(data).group(1) # get repository URL
- revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)]+[0]
- if revs:
- localrev = max(revs)
- else:
- localrev = 0
- else:
- logger.warn("Unrecognized .svn/entries format; skipping %s",
base)
- dirs[:] = []
- continue
- if base == location:
- base_url = dirurl+'/' # save the root url
- elif not dirurl.startswith(base_url):
- dirs[:] = []
- continue # not part of the same svn tree, skip it
- revision = max(revision, localrev)
- return revision
-
- def get_url(self, location):
- # In cases where the source is in a subdirectory, not alongside setup.py
- # we have to look up in the location until we find a real setup.py
- orig_location = location
- while not os.path.exists(os.path.join(location, 'setup.py')):
- last_location = location
- location = os.path.dirname(location)
- if location == last_location:
- # We've traversed up to the root of the filesystem without finding
setup.py
- logger.warn("Could not find setup.py for directory %s (tried all
parent directories)"
- % orig_location)
- return None
- f = open(os.path.join(location, '.svn', 'entries'))
- data = f.read()
- f.close()
- if data.startswith('8') or data.startswith('9'):
- data = map(str.splitlines,data.split('\n\x0c\n'))
- del data[0][0] # get rid of the '8'
- return data[0][3]
- elif data.startswith('<?xml'):
- match = _svn_xml_url_re.search(data)
- if not match:
- raise ValueError('Badly formatted data: %r' % data)
- return match.group(1) # get repository URL
- else:
- logger.warn("Unrecognized .svn/entries format in %s" % location)
- # Or raise exception?
- return None
-
- def get_tag_revs(self, svn_tag_url):
- stdout = call_subprocess(
- ['svn', 'ls', '-v', svn_tag_url], show_stdout=False)
- results = []
- for line in stdout.splitlines():
- parts = line.split()
- rev = int(parts[0])
- tag = parts[-1].strip('/')
- results.append((tag, rev))
- return results
-
- def find_tag_match(self, rev, tag_revs):
- best_match_rev = None
- best_tag = None
- for tag, tag_rev in tag_revs:
- if (tag_rev > rev and
- (best_match_rev is None or best_match_rev > tag_rev)):
- # FIXME: Is best_match > tag_rev really possible?
- # or is it a sign something is wacky?
- best_match_rev = tag_rev
- best_tag = tag
- return best_tag
-
- def get_src_requirement(self, dist, location, find_tags=False):
- repo = self.get_url(location)
- if repo is None:
- return None
- parts = repo.split('/')
- ## FIXME: why not project name?
- egg_project_name = dist.egg_name().split('-', 1)[0]
- if parts[-2] in ('tags', 'tag'):
- # It's a tag, perfect!
- return 'svn+%s#egg=%s-%s' % (repo, egg_project_name, parts[-1])
- elif parts[-2] in ('branches', 'branch'):
- # It's a branch :(
- rev = self.get_revision(location)
- return 'svn+%s@%s#egg=%s%s-r%s' % (repo, rev, dist.egg_name(),
parts[-1], rev)
- elif parts[-1] == 'trunk':
- # Trunk :-/
- rev = self.get_revision(location)
- if find_tags:
- tag_url = '/'.join(parts[:-1]) + '/tags'
- tag_revs = self.get_tag_revs(tag_url)
- match = self.find_tag_match(rev, tag_revs)
- if match:
- logger.notify('trunk checkout %s seems to be equivalent to tag
%s' % match)
- return 'svn+%s/%s#egg=%s-%s' % (tag_url, match,
egg_project_name, match)
- return 'svn+%s@%s#egg=%s-dev' % (repo, rev, dist.egg_name())
- else:
- # Don't know what it is
- logger.warn('svn URL does not fit normal structure (tags/branches/trunk):
%s' % repo)
- rev = self.get_revision(location)
- return 'svn+%s@%s#egg=%s-dev' % (repo, rev, egg_project_name)
-
-vcs.register(Subversion)
-
-
-class Git(VersionControl):
- name = 'git'
- dirname = '.git'
- schemes = ('git', 'git+http', 'git+ssh')
- bundle_file = 'git-clone.txt'
- guide = ('# This was a Git repo; to make it a repo again run:\n'
- 'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n')
-
- def get_info(self, location):
- """Returns (url, revision), where both are
strings"""
- assert not location.rstrip('/').endswith('.git'), 'Bad
directory: %s' % location
- return self.get_url(location), self.get_revision(location)
-
- def parse_vcs_bundle_file(self, content):
- url = rev = None
- for line in content.splitlines():
- if not line.strip() or line.strip().startswith('#'):
- continue
- url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line)
- if url_match:
- url = url_match.group(1).strip()
- rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line)
- if rev_match:
- rev = rev_match.group(1).strip()
- if url and rev:
- return url, rev
- return None, None
-
- def unpack(self, location):
- """Clone the Git repository at the url to the destination
location"""
- url, rev = self.get_url_rev()
- logger.notify('Cloning Git repository %s to %s' % (url, location))
- logger.indent += 2
- try:
- if os.path.exists(location):
- os.rmdir(location)
- call_subprocess(
- [GIT_CMD, 'clone', url, location],
- filter_stdout=self._filter, show_stdout=False)
- finally:
- logger.indent -= 2
-
- def obtain(self, dest):
- url, rev = self.get_url_rev()
- if rev:
- rev_options = [rev]
- rev_display = ' (to revision %s)' % rev
- else:
- rev_options = ['master']
- rev_display = ''
- clone = True
- if os.path.exists(os.path.join(dest, '.git')):
- existing_url = self.get_url(dest)
- clone = False
- if existing_url == url:
- logger.info('Clone in %s exists, and has correct URL (%s)'
- % (display_path(dest), url))
- logger.notify('Updating clone %s%s'
- % (display_path(dest), rev_display))
- call_subprocess([GIT_CMD, 'fetch', '-q'], cwd=dest)
- call_subprocess(
- [GIT_CMD, 'checkout', '-q', '-f'] +
rev_options, cwd=dest)
- else:
- logger.warn('Git clone in %s exists with URL %s'
- % (display_path(dest), existing_url))
- logger.warn('The plan is to install the Git repository %s'
- % url)
- response = ask('What to do? (s)witch, (i)gnore, (w)ipe, (b)ackup
', ('s', 'i', 'w', 'b'))
- if response == 's':
- logger.notify('Switching clone %s to %s%s'
- % (display_path(dest), url, rev_display))
- call_subprocess(
- [GIT_CMD, 'config', 'remote.origin.url', url],
cwd=dest)
- call_subprocess(
- [GIT_CMD, 'checkout', '-q'] + rev_options,
cwd=dest)
- elif response == 'i':
- # do nothing
- pass
- elif response == 'w':
- logger.warn('Deleting %s' % display_path(dest))
- shutil.rmtree(dest)
- clone = True
- elif response == 'b':
- dest_dir = backup_dir(dest)
- logger.warn('Backing up %s to %s' % (display_path(dest),
dest_dir))
- shutil.move(dest, dest_dir)
- clone = True
- if clone:
- logger.notify('Cloning %s%s to %s' % (url, rev_display,
display_path(dest)))
- call_subprocess(
- [GIT_CMD, 'clone', '-q', url, dest])
- call_subprocess(
- [GIT_CMD, 'checkout', '-q'] + rev_options, cwd=dest)
-
- def get_url(self, location):
- url = call_subprocess(
- [GIT_CMD, 'config', 'remote.origin.url'],
- show_stdout=False, cwd=location)
- return url.strip()
-
- def get_revision(self, location):
- current_rev = call_subprocess(
- [GIT_CMD, 'rev-parse', 'HEAD'], show_stdout=False,
cwd=location)
- return current_rev.strip()
-
- def get_master_revision(self, location):
- master_rev = call_subprocess(
- [GIT_CMD, 'rev-parse', 'master'], show_stdout=False,
cwd=location)
- return master_rev.strip()
-
- def get_tag_revs(self, location):
- tags = call_subprocess(
- [GIT_CMD, 'tag'], show_stdout=False, cwd=location)
- tag_revs = []
- for line in tags.splitlines():
- tag = line.strip()
- rev = call_subprocess(
- [GIT_CMD, 'rev-parse', tag], show_stdout=False, cwd=location)
- tag_revs.append((rev.strip(), tag))
- tag_revs = dict(tag_revs)
- return tag_revs
-
- def get_branch_revs(self, location):
- branches = call_subprocess(
- [GIT_CMD, 'branch', '-r'], show_stdout=False, cwd=location)
- branch_revs = []
- for line in branches.splitlines():
- branch = "".join([b for b in line.split() if b != '*'])
- rev = call_subprocess(
- [GIT_CMD, 'rev-parse', branch], show_stdout=False, cwd=location)
- branch_revs.append((rev.strip(), branch))
- branch_revs = dict(branch_revs)
- return branch_revs
-
- def get_src_requirement(self, dist, location, find_tags):
- repo = self.get_url(location)
- if not repo.lower().startswith('git:'):
- repo = 'git+' + repo
- egg_project_name = dist.egg_name().split('-', 1)[0]
- if not repo:
- return None
- current_rev = self.get_revision(location)
- tag_revs = self.get_tag_revs(location)
- master_rev = self.get_master_revision(location)
- branch_revs = self.get_branch_revs(location)
-
- if current_rev in tag_revs:
- # It's a tag, perfect!
- tag = tag_revs.get(current_rev, current_rev)
- return '%s@%s#egg=%s-%s' % (repo, tag, egg_project_name, tag)
- elif current_rev in branch_revs:
- # It's the head of a branch, nice too.
- branch = branch_revs.get(current_rev, current_rev)
- return '%s@%s#egg=%s-%s' % (repo, current_rev, dist.egg_name(),
current_rev)
- elif current_rev == master_rev:
- if find_tags:
- if current_rev in tag_revs:
- tag = tag_revs.get(current_rev, current_rev)
- logger.notify('Revision %s seems to be equivalent to tag %s'
% (current_rev, tag))
- return '%s@%s#egg=%s-%s' % (repo, tag, egg_project_name,
tag)
- return '%s@%s#egg=%s-dev' % (repo, master_rev, dist.egg_name())
- else:
- # Don't know what it is
- logger.warn('Git URL does not fit normal structure: %s' % repo)
- return '%s@%s#egg=%s-dev' % (repo, current_rev, egg_project_name)
-
- def get_url_rev(self):
- """
- Prefixes stub URLs like 'user@hostname:user/repo.git' with
'ssh://'.
- That's required because although they use SSH they sometimes doesn't
- work with a ssh:// scheme (e.g. Github). But we need a scheme for
- parsing. Hence we remove it again afterwards and return it as a stub.
- """
- if not '://' in self.url:
- self.url = self.url.replace('git+', 'git+ssh://')
- url, rev = super(Git, self).get_url_rev()
- url = url.replace('ssh://', '')
- return url, rev
- return super(Git, self).get_url_rev()
-
-vcs.register(Git)
-
-class Mercurial(VersionControl):
- name = 'hg'
- dirname = '.hg'
- schemes = ('hg', 'hg+http', 'hg+ssh')
- bundle_file = 'hg-clone.txt'
- guide = ('# This was a Mercurial repo; to make it a repo again run:\n'
- 'hg init\nhg pull %(url)s\nhg update -r %(rev)s\n')
-
- def get_info(self, location):
- """Returns (url, revision), where both are
strings"""
- assert not location.rstrip('/').endswith('.hg'), 'Bad
directory: %s' % location
- return self.get_url(location), self.get_revision(location)
-
- def parse_vcs_bundle_file(self, content):
- url = rev = None
- for line in content.splitlines():
- if not line.strip() or line.strip().startswith('#'):
- continue
- url_match = re.search(r'hg\s*pull\s*(.*)\s*', line)
- if url_match:
- url = url_match.group(1).strip()
- rev_match = re.search(r'^hg\s*update\s*-r\s*(.*)\s*', line)
- if rev_match:
- rev = rev_match.group(1).strip()
- if url and rev:
- return url, rev
- return None, None
-
- def unpack(self, location):
- """Clone the Hg repository at the url to the destination
location"""
- url, rev = self.get_url_rev()
- logger.notify('Cloning Mercurial repository %s to %s' % (url, location))
- logger.indent += 2
- try:
- if os.path.exists(location):
- os.rmdir(location)
- call_subprocess(
- ['hg', 'clone', url, location],
- filter_stdout=self._filter, show_stdout=False)
- finally:
- logger.indent -= 2
-
- def obtain(self, dest):
- url, rev = self.get_url_rev()
- if rev:
- rev_options = [rev]
- rev_display = ' (to revision %s)' % rev
- else:
- rev_options = ['default']
- rev_display = ''
- clone = True
- if os.path.exists(os.path.join(dest, '.hg')):
- existing_url = self.get_url(dest)
- clone = False
- if existing_url == url:
- logger.info('Clone in %s exists, and has correct URL (%s)'
- % (display_path(dest), url))
- logger.notify('Updating clone %s%s'
- % (display_path(dest), rev_display))
- call_subprocess(['hg', 'fetch', '-q'], cwd=dest)
- call_subprocess(
- ['hg', 'update', '-q'] + rev_options,
cwd=dest)
- else:
- logger.warn('Mercurial clone in %s exists with URL %s'
- % (display_path(dest), existing_url))
- logger.warn('The plan is to install the Mercurial repository %s'
- % url)
- response = ask('What to do? (s)witch, (i)gnore, (w)ipe, (b)ackup
', ('s', 'i', 'w', 'b'))
- if response == 's':
- logger.notify('Switching clone %s to %s%s'
- % (display_path(dest), url, rev_display))
- repo_config = os.path.join(dest, '.hg/hgrc')
- config = ConfigParser.SafeConfigParser()
- try:
- config_file = open(repo_config, 'wb')
- config.readfp(config_file)
- config.set('paths', ''.join(rev_options), url)
- config.write(config_file)
- except (OSError, ConfigParser.NoSectionError):
- logger.warn(
- 'Could not switch Mercurial repository to %s: %s'
- % (url, e))
- else:
- call_subprocess(
- ['hg', 'update', '-q'] + rev_options,
cwd=dest)
- elif response == 'i':
- # do nothing
- pass
- elif response == 'w':
- logger.warn('Deleting %s' % display_path(dest))
- shutil.rmtree(dest)
- clone = True
- elif response == 'b':
- dest_dir = backup_dir(dest)
- logger.warn('Backing up %s to %s' % (display_path(dest),
dest_dir))
- shutil.move(dest, dest_dir)
- clone = True
- if clone:
- logger.notify('Cloning hg %s%s to %s'
- % (url, rev_display, display_path(dest)))
- call_subprocess(['hg', 'clone', '-q', url, dest])
- call_subprocess(['hg', 'update', '-q'] + rev_options,
cwd=dest)
-
- def get_url(self, location):
- url = call_subprocess(
- ['hg', 'showconfig', 'paths.default'],
- show_stdout=False, cwd=location).strip()
- if url.startswith('/') or url.startswith('\\'):
- url = filename_to_url(url)
- return url.strip()
-
- def get_tip_revision(self, location):
- current_rev = call_subprocess(
- ['hg', 'tip', '--template={rev}'], show_stdout=False,
cwd=location)
- return current_rev.strip()
-
- def get_tag_revs(self, location):
- tags = call_subprocess(
- ['hg', 'tags'], show_stdout=False, cwd=location)
- tag_revs = []
- for line in tags.splitlines():
- tags_match = re.search(r'([\w-]+)\s*([\d]+):.*$', line)
- if tags_match:
- tag = tags_match.group(1)
- rev = tags_match.group(2)
- tag_revs.append((rev.strip(), tag.strip()))
- return dict(tag_revs)
-
- def get_branch_revs(self, location):
- branches = call_subprocess(
- ['hg', 'branches'], show_stdout=False, cwd=location)
- branch_revs = []
- for line in branches.splitlines():
- branches_match = re.search(r'([\w-]+)\s*([\d]+):.*$', line)
- if branches_match:
- branch = branches_match.group(1)
- rev = branches_match.group(2)
- branch_revs.append((rev.strip(), branch.strip()))
- return dict(branch_revs)
-
- def get_revision(self, location):
- current_branch = call_subprocess(
- ['hg', 'branch'], show_stdout=False, cwd=location).strip()
- branch_revs = self.get_branch_revs(location)
- for branch in branch_revs:
- if current_branch == branch_revs[branch]:
- return branch
- return self.get_tip_revision(location)
-
- def get_src_requirement(self, dist, location, find_tags):
- repo = self.get_url(location)
- if not repo.lower().startswith('hg:'):
- repo = 'hg+' + repo
- egg_project_name = dist.egg_name().split('-', 1)[0]
- if not repo:
- return None
- current_rev = self.get_revision(location)
- tag_revs = self.get_tag_revs(location)
- branch_revs = self.get_branch_revs(location)
- tip_rev = self.get_tip_revision(location)
- if current_rev in tag_revs:
- # It's a tag, perfect!
- tag = tag_revs.get(current_rev, current_rev)
- return '%s@%s#egg=%s-%s' % (repo, tag, egg_project_name, tag)
- elif current_rev in branch_revs:
- # It's the tip of a branch, nice too.
- branch = branch_revs.get(current_rev, current_rev)
- return '%s@%s#egg=%s-%s' % (repo, branch, dist.egg_name(),
current_rev)
- elif current_rev == tip_rev:
- if find_tags:
- if current_rev in tag_revs:
- tag = tag_revs.get(current_rev, current_rev)
- logger.notify('Revision %s seems to be equivalent to tag %s'
% (current_rev, tag))
- return '%s@%s#egg=%s-%s' % (repo, tag, egg_project_name,
tag)
- return '%s@%s#egg=%s-dev' % (repo, tip_rev, dist.egg_name())
- else:
- # Don't know what it is
- logger.warn('Mercurial URL does not fit normal structure: %s' %
repo)
- return '%s@%s#egg=%s-dev' % (repo, current_rev, egg_project_name)
-
-vcs.register(Mercurial)
-
-
-class Bazaar(VersionControl):
- name = 'bzr'
- dirname = '.bzr'
- bundle_file = 'bzr-branch.txt'
- schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh',
'bzr+sftp')
- guide = ('# This was a Bazaar branch; to make it a branch again run:\n'
- 'bzr branch -r %(rev)s %(url)s .\n')
-
- def get_info(self, location):
- """Returns (url, revision), where both are
strings"""
- assert not location.rstrip('/').endswith('.bzr'), 'Bad
directory: %s' % location
- return self.get_url(location), self.get_revision(location)
-
- def parse_vcs_bundle_file(self, content):
- url = rev = None
- for line in content.splitlines():
- if not line.strip() or line.strip().startswith('#'):
- continue
- match = re.search(r'^bzr\s*branch\s*-r\s*(\d*)', line)
- if match:
- rev = match.group(1).strip()
- url = line[match.end():].strip().split(None, 1)[0]
- if url and rev:
- return url, rev
- return None, None
-
- def unpack(self, location):
- """Get the bzr branch at the url to the destination
location"""
- url, rev = self.get_url_rev()
- logger.notify('Checking out bzr repository %s to %s' % (url, location))
- logger.indent += 2
- try:
- if os.path.exists(location):
- os.rmdir(location)
- call_subprocess(
- [BZR_CMD, 'branch', url, location],
- filter_stdout=self._filter, show_stdout=False)
- finally:
- logger.indent -= 2
-
- def obtain(self, dest):
- url, rev = self.get_url_rev()
- if rev:
- rev_options = ['-r', rev]
- rev_display = ' (to revision %s)' % rev
- else:
- rev_options = []
- rev_display = ''
- branch = True
- update = False
- if os.path.exists(os.path.join(dest, '.bzr')):
- existing_url = self.get_url(dest)
- branch = False
- if existing_url == url:
- logger.info('Checkout in %s exists, and has correct URL (%s)'
- % (display_path(dest), url))
- logger.notify('Updating branch %s%s'
- % (display_path(dest), rev_display))
- branch = update = True
- else:
- logger.warn('Bazaar branch in %s exists with URL %s'
- % (display_path(dest), existing_url))
- logger.warn('The plan is to install the Bazaar repository %s'
- % url)
- response = ask('What to do? (s)witch, (i)gnore, (w)ipe, (b)ackup
', ('s', 'i', 'w', 'b'))
- if response == 's':
- logger.notify('Switching branch %s to %s%s'
- % (display_path(dest), url, rev_display))
- call_subprocess([BZR_CMD, 'switch', url], cwd=dest)
- elif response == 'i':
- # do nothing
- pass
- elif response == 'w':
- logger.warn('Deleting %s' % display_path(dest))
- shutil.rmtree(dest)
- branch = True
- elif response == 'b':
- dest_dir = backup_dir(dest)
- logger.warn('Backing up %s to %s' % (display_path(dest),
dest_dir))
- shutil.move(dest, dest_dir)
- branch = True
- if branch:
- logger.notify('Checking out %s%s to %s'
- % (url, rev_display, display_path(dest)))
- # FIXME: find a better place to hotfix the URL scheme
- # after removing bzr+ from bzr+ssh:// readd it
- if url.startswith('ssh://'):
- url = 'bzr+' + url
- if update:
- call_subprocess(
- [BZR_CMD, 'pull', '-q'] + rev_options + [url],
cwd=dest)
- else:
- call_subprocess(
- [BZR_CMD, 'branch', '-q'] + rev_options + [url,
dest])
-
- def get_url(self, location):
- urls = call_subprocess(
- [BZR_CMD, 'info'], show_stdout=False, cwd=location)
- for line in urls.splitlines():
- line = line.strip()
- for x in ('checkout of branch: ',
- 'repository branch: ',
- 'parent branch: '):
- if line.startswith(x):
- return line.split(x)[1]
- return None
-
- def get_revision(self, location):
- revision = call_subprocess(
- [BZR_CMD, 'revno'], show_stdout=False, cwd=location)
- return revision.strip()
-
- def get_newest_revision(self, location):
- url = self.get_url(location)
- revision = call_subprocess(
- [BZR_CMD, 'revno', url], show_stdout=False, cwd=location)
- return revision.strip()
-
- def get_tag_revs(self, location):
- tags = call_subprocess(
- [BZR_CMD, 'tags'], show_stdout=False, cwd=location)
- tag_revs = []
- for line in tags.splitlines():
- tags_match = re.search(r'([.\w-]+)\s*(.*)$', line)
- if tags_match:
- tag = tags_match.group(1)
- rev = tags_match.group(2)
- tag_revs.append((rev.strip(), tag.strip()))
- return dict(tag_revs)
-
- def get_src_requirement(self, dist, location, find_tags):
- repo = self.get_url(location)
- if not repo.lower().startswith('bzr:'):
- repo = 'bzr+' + repo
- egg_project_name = dist.egg_name().split('-', 1)[0]
- if not repo:
- return None
- current_rev = self.get_revision(location)
- tag_revs = self.get_tag_revs(location)
- newest_rev = self.get_newest_revision(location)
- if current_rev in tag_revs:
- # It's a tag, perfect!
- tag = tag_revs.get(current_rev, current_rev)
- return '%s@%s#egg=%s-%s' % (repo, tag, egg_project_name, tag)
- elif current_rev == newest_rev:
- if find_tags:
- if current_rev in tag_revs:
- tag = tag_revs.get(current_rev, current_rev)
- logger.notify('Revision %s seems to be equivalent to tag %s'
% (current_rev, tag))
- return '%s@%s#egg=%s-%s' % (repo, tag, egg_project_name,
tag)
- return '%s@%s#egg=%s-dev' % (repo, newest_rev, dist.egg_name())
- else:
- # Don't know what it is
- logger.warn('Bazaar URL does not fit normal structure: %s' % repo)
- return '%s@%s#egg=%s-dev' % (repo, current_rev, egg_project_name)
-
-vcs.register(Bazaar)
-
-def get_src_requirement(dist, location, find_tags):
- version_control = vcs.get_backend_from_location(location)
- if version_control:
- return version_control().get_src_requirement(dist, location, find_tags)
- logger.warn('cannot determine version of editable source in %s (is not SVN
checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
- return dist.as_requirement()
-
-############################################################
-## Requirement files
-
-_scheme_re = re.compile(r'^(http|https|file):', re.I)
-_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
-def get_file_content(url, comes_from=None):
- """Gets the content of a file; it may be a filename, file: URL, or
- http: URL. Returns (location, content)"""
- match = _scheme_re.search(url)
- if match:
- scheme = match.group(1).lower()
- if (scheme == 'file' and comes_from
- and comes_from.startswith('http')):
- raise InstallationError(
- 'Requirements file %s references URL %s, which is local'
- % (comes_from, url))
- if scheme == 'file':
- path = url.split(':', 1)[1]
- path = path.replace('\\', '/')
- match = _url_slash_drive_re.match(path)
- if match:
- path = match.group(1) + ':' + path.split('|', 1)[1]
- path = urllib.unquote(path)
- if path.startswith('/'):
- path = '/' + path.lstrip('/')
- url = path
- else:
- ## FIXME: catch some errors
- resp = urllib2.urlopen(url)
- return resp.geturl(), resp.read()
- f = open(url)
- content = f.read()
- f.close()
- return url, content
-
-def parse_requirements(filename, finder, comes_from=None):
- skip_match = None
- if os.environ.get('PIP_SKIP_REQUIREMENTS_REGEX'):
- skip_match = re.compile(os.environ['PIP_SKIP_REQUIREMENTS_REGEX'])
- filename, content = get_file_content(filename, comes_from=comes_from)
- for line_number, line in enumerate(content.splitlines()):
- line_number += 1
- line = line.strip()
- if not line or line.startswith('#'):
- continue
- if skip_match and skip_match.search(line):
- continue
- if line.startswith('-r') or line.startswith('--requirement'):
- if line.startswith('-r'):
- req_url = line[2:].strip()
- else:
- req_url = line[len('--requirement'):].strip().strip('=')
- if _scheme_re.search(filename):
- # Relative to a URL
- req_url = urlparse.urljoin(filename, url)
- elif not _scheme_re.search(req_url):
- req_url = os.path.join(os.path.dirname(filename), req_url)
- for item in parse_requirements(req_url, finder, comes_from=filename):
- yield item
- elif line.startswith('-Z') or line.startswith('--always-unzip'):
- # No longer used, but previously these were used in
- # requirement files, so we'll ignore.
- pass
- elif line.startswith('-f') or line.startswith('--find-links'):
- if line.startswith('-f'):
- line = line[2:].strip()
- else:
- line = line[len('--find-links'):].strip().lstrip('=')
- ## FIXME: it would be nice to keep track of the source of
- ## the find_links:
- finder.find_links.append(line)
- else:
- comes_from = '-r %s (line %s)' % (filename, line_number)
- if line.startswith('-e') or line.startswith('--editable'):
- if line.startswith('-e'):
- line = line[2:].strip()
- else:
- line = line[len('--editable'):].strip()
- req = InstallRequirement.from_editable(
- line, comes_from)
- else:
- req = InstallRequirement.from_line(line, comes_from)
- yield req
-
-############################################################
-## Logging
-
-
-
-class Logger(object):
-
- """
- Logging object for use in command-line script. Allows ranges of
- levels, to avoid some redundancy of displayed information.
- """
-
- VERBOSE_DEBUG = logging.DEBUG-1
- DEBUG = logging.DEBUG
- INFO = logging.INFO
- NOTIFY = (logging.INFO+logging.WARN)/2
- WARN = WARNING = logging.WARN
- ERROR = logging.ERROR
- FATAL = logging.FATAL
-
- LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
-
- def __init__(self, consumers):
- self.consumers = consumers
- self.indent = 0
- self.explicit_levels = False
- self.in_progress = None
- self.in_progress_hanging = False
-
- def debug(self, msg, *args, **kw):
- self.log(self.DEBUG, msg, *args, **kw)
- def info(self, msg, *args, **kw):
- self.log(self.INFO, msg, *args, **kw)
- def notify(self, msg, *args, **kw):
- self.log(self.NOTIFY, msg, *args, **kw)
- def warn(self, msg, *args, **kw):
- self.log(self.WARN, msg, *args, **kw)
- def error(self, msg, *args, **kw):
- self.log(self.WARN, msg, *args, **kw)
- def fatal(self, msg, *args, **kw):
- self.log(self.FATAL, msg, *args, **kw)
- def log(self, level, msg, *args, **kw):
- if args:
- if kw:
- raise TypeError(
- "You may give positional or keyword arguments, not both")
- args = args or kw
- rendered = None
- for consumer_level, consumer in self.consumers:
- if self.level_matches(level, consumer_level):
- if (self.in_progress_hanging
- and consumer in (sys.stdout, sys.stderr)):
- self.in_progress_hanging = False
- sys.stdout.write('\n')
- sys.stdout.flush()
- if rendered is None:
- if args:
- rendered = msg % args
- else:
- rendered = msg
- rendered = ' '*self.indent + rendered
- if self.explicit_levels:
- ## FIXME: should this be a name, not a level number?
- rendered = '%02i %s' % (level, rendered)
- if hasattr(consumer, 'write'):
- consumer.write(rendered+'\n')
- else:
- consumer(rendered)
-
- def start_progress(self, msg):
- assert not self.in_progress, (
- "Tried to start_progress(%r) while in_progress %r"
- % (msg, self.in_progress))
- if self.level_matches(self.NOTIFY, self._stdout_level()):
- sys.stdout.write(' '*self.indent + msg)
- sys.stdout.flush()
- self.in_progress_hanging = True
- else:
- self.in_progress_hanging = False
- self.in_progress = msg
- self.last_message = None
-
- def end_progress(self, msg='done.'):
- assert self.in_progress, (
- "Tried to end_progress without start_progress")
- if self.stdout_level_matches(self.NOTIFY):
- if not self.in_progress_hanging:
- # Some message has been printed out since start_progress
- sys.stdout.write('...' + self.in_progress + msg + '\n')
- sys.stdout.flush()
- else:
- # These erase any messages shown with show_progress (besides .'s)
- logger.show_progress('')
- logger.show_progress('')
- sys.stdout.write(msg + '\n')
- sys.stdout.flush()
- self.in_progress = None
- self.in_progress_hanging = False
-
- def show_progress(self, message=None):
- """If we are in a progress scope, and no log messages have been
- shown, write out another '.'"""
- if self.in_progress_hanging:
- if message is None:
- sys.stdout.write('.')
- sys.stdout.flush()
- else:
- if self.last_message:
- padding = ' ' * max(0, len(self.last_message)-len(message))
- else:
- padding = ''
- sys.stdout.write('\r%s%s%s%s' % (' '*self.indent,
self.in_progress, message, padding))
- sys.stdout.flush()
- self.last_message = message
-
- def stdout_level_matches(self, level):
- """Returns true if a message at this level will go to
stdout"""
- return self.level_matches(level, self._stdout_level())
-
- def _stdout_level(self):
- """Returns the level that stdout runs at"""
- for level, consumer in self.consumers:
- if consumer is sys.stdout:
- return level
- return self.FATAL
-
- def level_matches(self, level, consumer_level):
- """
- >>> l = Logger()
- >>> l.level_matches(3, 4)
- False
- >>> l.level_matches(3, 2)
- True
- >>> l.level_matches(slice(None, 3), 3)
- False
- >>> l.level_matches(slice(None, 3), 2)
- True
- >>> l.level_matches(slice(1, 3), 1)
- True
- >>> l.level_matches(slice(2, 3), 1)
- False
- """
- if isinstance(level, slice):
- start, stop = level.start, level.stop
- if start is not None and start > consumer_level:
- return False
- if stop is not None or stop <= consumer_level:
- return False
- return True
- else:
- return level >= consumer_level
-
- @classmethod
- def level_for_integer(cls, level):
- levels = cls.LEVELS
- if level < 0:
- return levels[0]
- if level >= len(levels):
- return levels[-1]
- return levels[level]
-
- def move_stdout_to_stderr(self):
- to_remove = []
- to_add = []
- for consumer_level, consumer in self.consumers:
- if consumer == sys.stdout:
- to_remove.append((consumer_level, consumer))
- to_add.append((consumer_level, sys.stderr))
- for item in to_remove:
- self.consumers.remove(item)
- self.consumers.extend(to_add)
-
-
-def call_subprocess(cmd, show_stdout=True,
- filter_stdout=None, cwd=None,
- raise_on_returncode=True,
- command_level=Logger.DEBUG, command_desc=None,
- extra_environ=None):
- if command_desc is None:
- cmd_parts = []
- for part in cmd:
- if ' ' in part or '\n' in part or '"' in part or
"'" in part:
- part = '"%s"' % part.replace('"',
'\\"')
- cmd_parts.append(part)
- command_desc = ' '.join(cmd_parts)
- if show_stdout:
- stdout = None
- else:
- stdout = subprocess.PIPE
- logger.log(command_level, "Running command %s" % command_desc)
- env = os.environ.copy()
- if extra_environ:
- env.update(extra_environ)
- try:
- proc = subprocess.Popen(
- cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
- cwd=cwd, env=env)
- except Exception, e:
- logger.fatal(
- "Error %s while executing command %s" % (e, command_desc))
- raise
- all_output = []
- if stdout is not None:
- stdout = proc.stdout
- while 1:
- line = stdout.readline()
- if not line:
- break
- line = line.rstrip()
- all_output.append(line + '\n')
- if filter_stdout:
- level = filter_stdout(line)
- if isinstance(level, tuple):
- level, line = level
- logger.log(level, line)
- if not logger.stdout_level_matches(level):
- logger.show_progress()
- else:
- logger.info(line)
- else:
- returned_stdout, returned_stderr = proc.communicate()
- all_output = [returned_stdout or '']
- proc.wait()
- if proc.returncode:
- if raise_on_returncode:
- if all_output:
- logger.notify('Complete output from command %s:' % command_desc)
- logger.notify('\n'.join(all_output) +
'\n----------------------------------------')
- raise InstallationError(
- "Command %s failed with error code %s"
- % (command_desc, proc.returncode))
- else:
- logger.warn(
- "Command %s had error code %s"
- % (command_desc, proc.returncode))
- if stdout is not None:
- return ''.join(all_output)
-
-############################################################
-## Utility functions
-
-def is_svn_page(html):
- """Returns true if the page appears to be the index page of an svn
repository"""
- return (re.search(r'<title>[^<]*Revision \d+:', html)
- and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html,
re.I))
-
-def file_contents(filename):
- fp = open(filename, 'rb')
- try:
- return fp.read()
- finally:
- fp.close()
-
-def split_leading_dir(path):
- path = str(path)
- path = path.lstrip('/').lstrip('\\')
- if '/' in path and (('\\' in path and path.find('/') <
path.find('\\'))
- or '\\' not in path):
- return path.split('/', 1)
- elif '\\' in path:
- return path.split('\\', 1)
- else:
- return path, ''
-
-def has_leading_dir(paths):
- """Returns true if all the paths have the same leading path name
- (i.e., everything is in one subdirectory in an archive)"""
- common_prefix = None
- for path in paths:
- prefix, rest = split_leading_dir(path)
- if not prefix:
- return False
- elif common_prefix is None:
- common_prefix = prefix
- elif prefix != common_prefix:
- return False
- return True
-
-def format_size(bytes):
- if bytes > 1000*1000:
- return '%.1fMb' % (bytes/1000.0/1000)
- elif bytes > 10*1000:
- return '%iKb' % (bytes/1000)
- elif bytes > 1000:
- return '%.1fKb' % (bytes/1000.0)
- else:
- return '%ibytes' % bytes
-
-_normalize_re = re.compile(r'[^a-z]', re.I)
-
-def normalize_name(name):
- return _normalize_re.sub('-', name.lower())
-
-def make_path_relative(path, rel_to):
- """
- Make a filename relative, where the filename path, and it is
- relative to rel_to
-
- >>> make_relative_path('/usr/share/something/a-file.pth',
- ... '/usr/share/another-place/src/Directory')
- '../../../something/a-file.pth'
- >>> make_relative_path('/usr/share/something/a-file.pth',
- ... '/home/user/src/Directory')
- '../../../usr/share/something/a-file.pth'
- >>> make_relative_path('/usr/share/a-file.pth',
'/usr/share/')
- 'a-file.pth'
- """
- path_filename = os.path.basename(path)
- path = os.path.dirname(path)
- path = os.path.normpath(os.path.abspath(path))
- rel_to = os.path.normpath(os.path.abspath(rel_to))
- path_parts = path.strip(os.path.sep).split(os.path.sep)
- rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep)
- while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]:
- path_parts.pop(0)
- rel_to_parts.pop(0)
- full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename]
- if full_parts == ['']:
- return '.' + os.path.sep
- return os.path.sep.join(full_parts)
-
-def display_path(path):
- """Gives the display value for a given path, making it relative to
cwd
- if possible."""
- path = os.path.normcase(os.path.abspath(path))
- if path.startswith(os.getcwd() + os.path.sep):
- path = '.' + path[len(os.getcwd()):]
- return path
-
-def parse_editable(editable_req):
- """Parses svn+http://blahblah@rev#egg=Foobar into a requirement
- (Foobar) and a URL"""
- url = editable_req
- if os.path.isdir(url) and os.path.exists(os.path.join(url, 'setup.py')):
- # Treating it as code that has already been checked out
- url = filename_to_url(url)
- if url.lower().startswith('file:'):
- return None, url
- for version_control in vcs:
- if url.lower().startswith('%s:' % version_control):
- url = '%s+%s' % (version_control, url)
- if '+' not in url:
- if default_vcs:
- url = default_vcs + '+' + url
- else:
- raise InstallationError(
- '--editable=%s should be formatted with svn+URL, git+URL, hg+URL or
bzr+URL' % editable_req)
- vc_type = url.split('+', 1)[0].lower()
- if not vcs.get_backend(vc_type):
- raise InstallationError(
- 'For --editable=%s only svn (svn+URL), Git (git+URL), Mercurial (hg+URL)
and Bazaar (bzr+URL) is currently supported' % editable_req)
- match = re.search(r'(?:#|#.*?&)egg=([^&]*)', editable_req)
- if (not match or not match.group(1)) and vcs.get_backend(vc_type):
- parts = [p for p in editable_req.split('#', 1)[0].split('/') if
p]
- if parts[-2] in ('tags', 'branches', 'tag',
'branch'):
- req = parts[-3]
- elif parts[-1] == 'trunk':
- req = parts[-2]
- else:
- raise InstallationError(
- '--editable=%s is not the right format; it must have
#egg=Package'
- % editable_req)
- else:
- req = match.group(1)
- ## FIXME: use package_to_requirement?
- match = re.search(r'^(.*?)(?:-dev|-\d.*)', req)
- if match:
- # Strip off -dev, -0.2, etc.
- req = match.group(1)
- return req, url
-
-def backup_dir(dir, ext='.bak'):
- """Figure out the name of a directory to back up the given dir to
- (adding .bak, .bak2, etc)"""
- n = 1
- extension = ext
- while os.path.exists(dir + extension):
- n += 1
- extension = ext + str(n)
- return dir + extension
-
-def ask(message, options):
- """Ask the message interactively, with the given possible
responses"""
- while 1:
- response = raw_input(message)
- response = response.strip().lower()
- if response not in options:
- print 'Your response (%r) was not one of the expected responses: %s'
% (
- response, ', '.join(options))
- else:
- return response
-
-def open_logfile_append(filename):
- """Open the named log file in append mode.
-
- If the file already exists, a separator will also be printed to
- the file to separate past activity from current activity.
- """
- exists = os.path.exists(filename)
- log_fp = open(filename, 'a')
- if exists:
- print >> log_fp, '-'*60
- print >> log_fp, '%s run on %s' % (sys.argv[0],
time.strftime('%c'))
- return log_fp
-
-def is_url(name):
- """Returns true if the name looks like a URL"""
- if ':' not in name:
- return False
- scheme = name.split(':', 1)[0].lower()
- return scheme in ('http', 'https', 'file', 'ftp')
-
-def is_filename(name):
- if (splitext(name)[1].lower() in ('.zip', '.tar.gz',
'.tar.bz2', '.tgz', '.tar', '.pybundle')
- and os.path.exists(name)):
- return True
- if os.path.sep not in name and '/' not in name:
- # Doesn't have any path components, probably a requirement like
'Foo'
- return False
- return True
-
-_drive_re = re.compile('^([a-z]):', re.I)
-_url_drive_re = re.compile('^([a-z])[|]', re.I)
-
-def filename_to_url(filename):
- """
- Convert a path to a file: URL. The path will be made absolute.
- """
- filename = os.path.normcase(os.path.abspath(filename))
- if _drive_re.match(filename):
- filename = filename[0] + '|' + filename[2:]
- url = urllib.quote(filename)
- url = url.replace(os.path.sep, '/')
- url = url.lstrip('/')
- return 'file:///' + url
-
-def url_to_filename(url):
- """
- Convert a file: URL to a path.
- """
- assert url.startswith('file:'), (
- "You can only turn file: urls into filenames (not %r)" % url)
- filename = url[len('file:'):].lstrip('/')
- filename = urllib.unquote(filename)
- if _url_drive_re.match(filename):
- filename = filename[0] + ':' + filename[2:]
- else:
- filename = '/' + filename
- return filename
-
-def get_requirement_from_url(url):
- """Get a requirement from the URL, if possible. This looks for #egg
- in the URL"""
- link = Link(url)
- egg_info = link.egg_fragment
- if not egg_info:
- egg_info = splitext(link.filename)[0]
- return package_to_requirement(egg_info)
-
-def package_to_requirement(package_name):
- """Translate a name like Foo-1.2 to Foo==1.3"""
- match = re.search(r'^(.*?)(-dev|-\d.*)', package_name)
- if match:
- name = match.group(1)
- version = match.group(2)
- else:
- name = package_name
- version = ''
- if version:
- return '%s==%s' % (name, version)
- else:
- return name
-
-def splitext(path):
- """Like os.path.splitext, but take off .tar too"""
- base, ext = posixpath.splitext(path)
- if base.lower().endswith('.tar'):
- ext = base[-4:] + ext
- base = base[:-4]
- return base, ext
-
-class _Inf(object):
- """I am bigger than everything!"""
- def __cmp__(self, a):
- if self is a:
- return 0
- return 1
- def __repr__(self):
- return 'Inf'
-Inf = _Inf()
-del _Inf
-
-if __name__ == '__main__':
- exit = main()
- if exit:
- sys.exit(exit)
commit 86ea48c164cc571a5ae2c9835cd6341bce33b5d1
Author: Luke Macken <lmacken(a)redhat.com>
Date: Fri Aug 28 13:43:32 2009 -0400
Hack around pkg_resources/setuptools for SQLAlchemy>=0.5 in our WSGI app
diff --git a/production/apache/moksha.wsgi b/production/apache/moksha.wsgi
index cdc76b6..a27ee17 100644
--- a/production/apache/moksha.wsgi
+++ b/production/apache/moksha.wsgi
@@ -1,9 +1,14 @@
-import sys
+#import sys
#import os
#sys.path.append('/srv/moksha')
#os.environ['PYTHON_EGG_CACHE'] = '/srv/moksha/.python-eggs'
-
#sys.stdout = sys.stderr
+import __main__
+__main__.__requires__ = 'SQLAlchemy>=0.5'
+import pkg_resources
+pkg_resources.require("SQLAlchemy>=0.5")
+import sqlalchemy
+
from paste.deploy import loadapp
application = loadapp('config:/etc/moksha/production.ini')