Initial version of the Windows DataCenter project. It is openstak-skeleton based.
This commit is contained in:
parent
750b94c09d
commit
7d7d480d79
2
windc/.gitignore
vendored
Normal file
2
windc/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
*.swp
|
||||||
|
*.pyc
|
8
windc/README
Normal file
8
windc/README
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
This is the Windows DataCenter project. It serves two main purposes:
|
||||||
|
|
||||||
|
* Proof the proposed architecture for windows dc service
|
||||||
|
* Provide a Demo for the Windows Environment Management features
|
||||||
|
|
||||||
|
This is not a final project. It is a POC for the Demo and architecture verification purposes.
|
||||||
|
|
||||||
|
|
64
windc/bin/windc-api
Executable file
64
windc/bin/windc-api
Executable file
@ -0,0 +1,64 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Windows DataCenter API Server
|
||||||
|
"""
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# If ../windc/__init__.py exists, add ../ to Python search path, so that
|
||||||
|
# it will override what happens to be installed in /usr/(local/)lib/python...
|
||||||
|
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||||
|
os.pardir,
|
||||||
|
os.pardir))
|
||||||
|
if os.path.exists(os.path.join(possible_topdir, 'windc', '__init__.py')):
|
||||||
|
sys.path.insert(0, possible_topdir)
|
||||||
|
|
||||||
|
from openstack.common import config
|
||||||
|
from openstack.common import wsgi
|
||||||
|
from windc import version
|
||||||
|
|
||||||
|
|
||||||
|
def create_options(parser):
|
||||||
|
"""
|
||||||
|
Sets up the CLI and config-file options that may be
|
||||||
|
parsed and program commands.
|
||||||
|
|
||||||
|
:param parser: The option parser
|
||||||
|
"""
|
||||||
|
config.add_common_options(parser)
|
||||||
|
config.add_log_options(parser)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
oparser = optparse.OptionParser(version='%%prog %s'
|
||||||
|
% version.version_string())
|
||||||
|
create_options(oparser)
|
||||||
|
(options, args) = config.parse_options(oparser)
|
||||||
|
|
||||||
|
try:
|
||||||
|
conf, app = config.load_paste_app('windc-api', options, args)
|
||||||
|
|
||||||
|
server = wsgi.Server()
|
||||||
|
server.start(app, int(conf['bind_port']), conf['bind_host'])
|
||||||
|
server.wait()
|
||||||
|
except RuntimeError, e:
|
||||||
|
sys.exit("ERROR: %s" % e)
|
97
windc/doc/Makefile
Normal file
97
windc/doc/Makefile
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
# Makefile for Sphinx documentation
|
||||||
|
#
|
||||||
|
|
||||||
|
# You can set these variables from the command line.
|
||||||
|
SPHINXOPTS =
|
||||||
|
SPHINXBUILD = sphinx-build
|
||||||
|
SPHINXSOURCE = source
|
||||||
|
PAPER =
|
||||||
|
BUILDDIR = build
|
||||||
|
|
||||||
|
# Internal variables.
|
||||||
|
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||||
|
PAPEROPT_letter = -D latex_paper_size=letter
|
||||||
|
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SPHINXSOURCE)
|
||||||
|
|
||||||
|
.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
|
||||||
|
|
||||||
|
.DEFAULT_GOAL = html
|
||||||
|
|
||||||
|
help:
|
||||||
|
@echo "Please use \`make <target>' where <target> is one of"
|
||||||
|
@echo " html to make standalone HTML files"
|
||||||
|
@echo " dirhtml to make HTML files named index.html in directories"
|
||||||
|
@echo " pickle to make pickle files"
|
||||||
|
@echo " json to make JSON files"
|
||||||
|
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||||
|
@echo " qthelp to make HTML files and a qthelp project"
|
||||||
|
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||||
|
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||||
|
@echo " linkcheck to check all external links for integrity"
|
||||||
|
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||||
|
|
||||||
|
clean:
|
||||||
|
-rm -rf $(BUILDDIR)/*
|
||||||
|
-rm -rf nova.sqlite
|
||||||
|
if [ -f .autogenerated ] ; then \
|
||||||
|
cat .autogenerated | xargs rm ; \
|
||||||
|
rm .autogenerated ; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
html:
|
||||||
|
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||||
|
|
||||||
|
dirhtml:
|
||||||
|
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||||
|
|
||||||
|
pickle:
|
||||||
|
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can process the pickle files."
|
||||||
|
|
||||||
|
json:
|
||||||
|
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can process the JSON files."
|
||||||
|
|
||||||
|
htmlhelp:
|
||||||
|
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||||
|
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||||
|
|
||||||
|
qthelp:
|
||||||
|
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||||
|
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||||
|
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/nova.qhcp"
|
||||||
|
@echo "To view the help file:"
|
||||||
|
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/nova.qhc"
|
||||||
|
|
||||||
|
latex:
|
||||||
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||||
|
@echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
|
||||||
|
"run these through (pdf)latex."
|
||||||
|
|
||||||
|
changes:
|
||||||
|
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||||
|
@echo
|
||||||
|
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||||
|
|
||||||
|
linkcheck:
|
||||||
|
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||||
|
@echo
|
||||||
|
@echo "Link check complete; look for any errors in the above output " \
|
||||||
|
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||||
|
|
||||||
|
doctest:
|
||||||
|
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||||
|
@echo "Testing of doctests in the sources finished, look at the " \
|
||||||
|
"results in $(BUILDDIR)/doctest/output.txt."
|
416
windc/doc/source/_static/basic.css
Normal file
416
windc/doc/source/_static/basic.css
Normal file
@ -0,0 +1,416 @@
|
|||||||
|
/**
|
||||||
|
* Sphinx stylesheet -- basic theme
|
||||||
|
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* -- main layout ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.clearer {
|
||||||
|
clear: both;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- relbar ---------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.related {
|
||||||
|
width: 100%;
|
||||||
|
font-size: 90%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related h3 {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0 0 0 10px;
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related li {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related li.right {
|
||||||
|
float: right;
|
||||||
|
margin-right: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- sidebar --------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.sphinxsidebarwrapper {
|
||||||
|
padding: 10px 5px 0 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
float: left;
|
||||||
|
width: 230px;
|
||||||
|
margin-left: -100%;
|
||||||
|
font-size: 90%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul {
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul ul,
|
||||||
|
div.sphinxsidebar ul.want-points {
|
||||||
|
margin-left: 20px;
|
||||||
|
list-style: square;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul ul {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar form {
|
||||||
|
margin-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar input {
|
||||||
|
border: 1px solid #98dbcc;
|
||||||
|
font-family: sans-serif;
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
img {
|
||||||
|
border: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- search page ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
ul.search {
|
||||||
|
margin: 10px 0 0 20px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.search li {
|
||||||
|
padding: 5px 0 5px 20px;
|
||||||
|
background-image: url(file.png);
|
||||||
|
background-repeat: no-repeat;
|
||||||
|
background-position: 0 7px;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.search li a {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.search li div.context {
|
||||||
|
color: #888;
|
||||||
|
margin: 2px 0 0 30px;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.keywordmatches li.goodmatch a {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- index page ------------------------------------------------------------ */
|
||||||
|
|
||||||
|
table.contentstable {
|
||||||
|
width: 90%;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.contentstable p.biglink {
|
||||||
|
line-height: 150%;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.biglink {
|
||||||
|
font-size: 1.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
span.linkdescr {
|
||||||
|
font-style: italic;
|
||||||
|
padding-top: 5px;
|
||||||
|
font-size: 90%;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- general index --------------------------------------------------------- */
|
||||||
|
|
||||||
|
table.indextable td {
|
||||||
|
text-align: left;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable dl, table.indextable dd {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable tr.pcap {
|
||||||
|
height: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.indextable tr.cap {
|
||||||
|
margin-top: 10px;
|
||||||
|
background-color: #f2f2f2;
|
||||||
|
}
|
||||||
|
|
||||||
|
img.toggler {
|
||||||
|
margin-right: 3px;
|
||||||
|
margin-top: 3px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- general body styles --------------------------------------------------- */
|
||||||
|
|
||||||
|
a.headerlink {
|
||||||
|
visibility: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1:hover > a.headerlink,
|
||||||
|
h2:hover > a.headerlink,
|
||||||
|
h3:hover > a.headerlink,
|
||||||
|
h4:hover > a.headerlink,
|
||||||
|
h5:hover > a.headerlink,
|
||||||
|
h6:hover > a.headerlink,
|
||||||
|
dt:hover > a.headerlink {
|
||||||
|
visibility: visible;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p.caption {
|
||||||
|
text-align: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body td {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.field-list ul {
|
||||||
|
padding-left: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.first {
|
||||||
|
}
|
||||||
|
|
||||||
|
p.rubric {
|
||||||
|
margin-top: 30px;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- sidebars -------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.sidebar {
|
||||||
|
margin: 0 0 0.5em 1em;
|
||||||
|
border: 1px solid #ddb;
|
||||||
|
padding: 7px 7px 0 7px;
|
||||||
|
background-color: #ffe;
|
||||||
|
width: 40%;
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.sidebar-title {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- topics ---------------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.topic {
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
padding: 7px 7px 0 7px;
|
||||||
|
margin: 10px 0 10px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.topic-title {
|
||||||
|
font-size: 1.1em;
|
||||||
|
font-weight: bold;
|
||||||
|
margin-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- admonitions ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
div.admonition {
|
||||||
|
margin-top: 10px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
padding: 7px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition dt {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition dl {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title {
|
||||||
|
margin: 0px 10px 5px 0px;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p.centered {
|
||||||
|
text-align: center;
|
||||||
|
margin-top: 25px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- tables ---------------------------------------------------------------- */
|
||||||
|
|
||||||
|
table.docutils {
|
||||||
|
border: 0;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.docutils td, table.docutils th {
|
||||||
|
padding: 1px 8px 1px 0;
|
||||||
|
border-top: 0;
|
||||||
|
border-left: 0;
|
||||||
|
border-right: 0;
|
||||||
|
border-bottom: 1px solid #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.field-list td, table.field-list th {
|
||||||
|
border: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.footnote td, table.footnote th {
|
||||||
|
border: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
th {
|
||||||
|
text-align: left;
|
||||||
|
padding-right: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- other body styles ----------------------------------------------------- */
|
||||||
|
|
||||||
|
dl {
|
||||||
|
margin-bottom: 15px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dd p {
|
||||||
|
margin-top: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dd ul, dd table {
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dd {
|
||||||
|
margin-top: 3px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
margin-left: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
dt:target, .highlight {
|
||||||
|
background-color: #fbe54e;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl.glossary dt {
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: 1.1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.field-list ul {
|
||||||
|
margin: 0;
|
||||||
|
padding-left: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.field-list p {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.refcount {
|
||||||
|
color: #060;
|
||||||
|
}
|
||||||
|
|
||||||
|
.optional {
|
||||||
|
font-size: 1.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.versionmodified {
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
.system-message {
|
||||||
|
background-color: #fda;
|
||||||
|
padding: 5px;
|
||||||
|
border: 3px solid red;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footnote:target {
|
||||||
|
background-color: #ffa
|
||||||
|
}
|
||||||
|
|
||||||
|
.line-block {
|
||||||
|
display: block;
|
||||||
|
margin-top: 1em;
|
||||||
|
margin-bottom: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.line-block .line-block {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0;
|
||||||
|
margin-left: 1.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- code displays --------------------------------------------------------- */
|
||||||
|
|
||||||
|
pre {
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.linenos pre {
|
||||||
|
padding: 5px 0px;
|
||||||
|
border: 0;
|
||||||
|
background-color: transparent;
|
||||||
|
color: #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable {
|
||||||
|
margin-left: 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.highlighttable td {
|
||||||
|
padding: 0 0.5em 0 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.descname {
|
||||||
|
background-color: transparent;
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: 1.2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.descclassname {
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt.xref, a tt {
|
||||||
|
background-color: transparent;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- math display ---------------------------------------------------------- */
|
||||||
|
|
||||||
|
img.math {
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body div.math p {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
span.eqno {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- printout stylesheet --------------------------------------------------- */
|
||||||
|
|
||||||
|
@media print {
|
||||||
|
div.document,
|
||||||
|
div.documentwrapper,
|
||||||
|
div.bodywrapper {
|
||||||
|
margin: 0 !important;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar,
|
||||||
|
div.related,
|
||||||
|
div.footer,
|
||||||
|
#top-link {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
}
|
230
windc/doc/source/_static/default.css
Normal file
230
windc/doc/source/_static/default.css
Normal file
@ -0,0 +1,230 @@
|
|||||||
|
/**
|
||||||
|
* Sphinx stylesheet -- default theme
|
||||||
|
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
*/
|
||||||
|
|
||||||
|
@import url("basic.css");
|
||||||
|
|
||||||
|
/* -- page layout ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: sans-serif;
|
||||||
|
font-size: 100%;
|
||||||
|
background-color: #11303d;
|
||||||
|
color: #000;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.document {
|
||||||
|
background-color: #1c4e63;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.documentwrapper {
|
||||||
|
float: left;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.bodywrapper {
|
||||||
|
margin: 0 0 0 230px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body {
|
||||||
|
background-color: #ffffff;
|
||||||
|
color: #000000;
|
||||||
|
padding: 0 20px 30px 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer {
|
||||||
|
color: #ffffff;
|
||||||
|
width: 100%;
|
||||||
|
padding: 9px 0 9px 0;
|
||||||
|
text-align: center;
|
||||||
|
font-size: 75%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.footer a {
|
||||||
|
color: #ffffff;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related {
|
||||||
|
background-color: #133f52;
|
||||||
|
line-height: 30px;
|
||||||
|
color: #ffffff;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related a {
|
||||||
|
color: #ffffff;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar {
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3 {
|
||||||
|
font-family: 'Trebuchet MS', sans-serif;
|
||||||
|
color: #ffffff;
|
||||||
|
font-size: 1.4em;
|
||||||
|
font-weight: normal;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h3 a {
|
||||||
|
color: #ffffff;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h4 {
|
||||||
|
font-family: 'Trebuchet MS', sans-serif;
|
||||||
|
color: #ffffff;
|
||||||
|
font-size: 1.3em;
|
||||||
|
font-weight: normal;
|
||||||
|
margin: 5px 0 0 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar p {
|
||||||
|
color: #ffffff;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar p.topless {
|
||||||
|
margin: 5px 10px 10px 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar ul {
|
||||||
|
margin: 10px;
|
||||||
|
padding: 0;
|
||||||
|
color: #ffffff;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar a {
|
||||||
|
color: #98dbcc;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar input {
|
||||||
|
border: 1px solid #98dbcc;
|
||||||
|
font-family: sans-serif;
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -- body styles ----------------------------------------------------------- */
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: #355f7c;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p, div.body dd, div.body li {
|
||||||
|
text-align: left;
|
||||||
|
line-height: 130%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body h1,
|
||||||
|
div.body h2,
|
||||||
|
div.body h3,
|
||||||
|
div.body h4,
|
||||||
|
div.body h5,
|
||||||
|
div.body h6 {
|
||||||
|
font-family: 'Trebuchet MS', sans-serif;
|
||||||
|
background-color: #f2f2f2;
|
||||||
|
font-weight: normal;
|
||||||
|
color: #20435c;
|
||||||
|
border-bottom: 1px solid #ccc;
|
||||||
|
margin: 20px -20px 10px -20px;
|
||||||
|
padding: 3px 0 3px 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body h1 { margin-top: 0; font-size: 200%; }
|
||||||
|
div.body h2 { font-size: 160%; }
|
||||||
|
div.body h3 { font-size: 140%; }
|
||||||
|
div.body h4 { font-size: 120%; }
|
||||||
|
div.body h5 { font-size: 110%; }
|
||||||
|
div.body h6 { font-size: 100%; }
|
||||||
|
|
||||||
|
a.headerlink {
|
||||||
|
color: #c60f0f;
|
||||||
|
font-size: 0.8em;
|
||||||
|
padding: 0 4px 0 4px;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.headerlink:hover {
|
||||||
|
background-color: #c60f0f;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.body p, div.body dd, div.body li {
|
||||||
|
text-align: left;
|
||||||
|
line-height: 130%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition p.admonition-title + p {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition p {
|
||||||
|
margin-bottom: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition pre {
|
||||||
|
margin-bottom: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition ul, div.admonition ol {
|
||||||
|
margin-bottom: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.note {
|
||||||
|
background-color: #eee;
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.seealso {
|
||||||
|
background-color: #ffc;
|
||||||
|
border: 1px solid #ff6;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.topic {
|
||||||
|
background-color: #eee;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.warning {
|
||||||
|
background-color: #ffe4e4;
|
||||||
|
border: 1px solid #f66;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title:after {
|
||||||
|
content: ":";
|
||||||
|
}
|
||||||
|
|
||||||
|
pre {
|
||||||
|
padding: 5px;
|
||||||
|
background-color: #eeffcc;
|
||||||
|
color: #333333;
|
||||||
|
line-height: 120%;
|
||||||
|
border: 1px solid #ac9;
|
||||||
|
border-left: none;
|
||||||
|
border-right: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
tt {
|
||||||
|
background-color: #ecf0f3;
|
||||||
|
padding: 0 1px 0 1px;
|
||||||
|
font-size: 0.95em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.warning tt {
|
||||||
|
background: #efc2c2;
|
||||||
|
}
|
||||||
|
|
||||||
|
.note tt {
|
||||||
|
background: #d6d6d6;
|
||||||
|
}
|
154
windc/doc/source/_static/jquery.tweet.js
Normal file
154
windc/doc/source/_static/jquery.tweet.js
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
(function($) {
|
||||||
|
|
||||||
|
$.fn.tweet = function(o){
|
||||||
|
var s = {
|
||||||
|
username: ["seaofclouds"], // [string] required, unless you want to display our tweets. :) it can be an array, just do ["username1","username2","etc"]
|
||||||
|
list: null, //[string] optional name of list belonging to username
|
||||||
|
avatar_size: null, // [integer] height and width of avatar if displayed (48px max)
|
||||||
|
count: 3, // [integer] how many tweets to display?
|
||||||
|
intro_text: null, // [string] do you want text BEFORE your your tweets?
|
||||||
|
outro_text: null, // [string] do you want text AFTER your tweets?
|
||||||
|
join_text: null, // [string] optional text in between date and tweet, try setting to "auto"
|
||||||
|
auto_join_text_default: "i said,", // [string] auto text for non verb: "i said" bullocks
|
||||||
|
auto_join_text_ed: "i", // [string] auto text for past tense: "i" surfed
|
||||||
|
auto_join_text_ing: "i am", // [string] auto tense for present tense: "i was" surfing
|
||||||
|
auto_join_text_reply: "i replied to", // [string] auto tense for replies: "i replied to" @someone "with"
|
||||||
|
auto_join_text_url: "i was looking at", // [string] auto tense for urls: "i was looking at" http:...
|
||||||
|
loading_text: null, // [string] optional loading text, displayed while tweets load
|
||||||
|
query: null // [string] optional search query
|
||||||
|
};
|
||||||
|
|
||||||
|
if(o) $.extend(s, o);
|
||||||
|
|
||||||
|
$.fn.extend({
|
||||||
|
linkUrl: function() {
|
||||||
|
var returning = [];
|
||||||
|
var regexp = /((ftp|http|https):\/\/(\w+:{0,1}\w*@)?(\S+)(:[0-9]+)?(\/|\/([\w#!:.?+=&%@!\-\/]))?)/gi;
|
||||||
|
this.each(function() {
|
||||||
|
returning.push(this.replace(regexp,"<a href=\"$1\">$1</a>"));
|
||||||
|
});
|
||||||
|
return $(returning);
|
||||||
|
},
|
||||||
|
linkUser: function() {
|
||||||
|
var returning = [];
|
||||||
|
var regexp = /[\@]+([A-Za-z0-9-_]+)/gi;
|
||||||
|
this.each(function() {
|
||||||
|
returning.push(this.replace(regexp,"<a href=\"http://twitter.com/$1\">@$1</a>"));
|
||||||
|
});
|
||||||
|
return $(returning);
|
||||||
|
},
|
||||||
|
linkHash: function() {
|
||||||
|
var returning = [];
|
||||||
|
var regexp = / [\#]+([A-Za-z0-9-_]+)/gi;
|
||||||
|
this.each(function() {
|
||||||
|
returning.push(this.replace(regexp, ' <a href="http://search.twitter.com/search?q=&tag=$1&lang=all&from='+s.username.join("%2BOR%2B")+'">#$1</a>'));
|
||||||
|
});
|
||||||
|
return $(returning);
|
||||||
|
},
|
||||||
|
capAwesome: function() {
|
||||||
|
var returning = [];
|
||||||
|
this.each(function() {
|
||||||
|
returning.push(this.replace(/\b(awesome)\b/gi, '<span class="awesome">$1</span>'));
|
||||||
|
});
|
||||||
|
return $(returning);
|
||||||
|
},
|
||||||
|
capEpic: function() {
|
||||||
|
var returning = [];
|
||||||
|
this.each(function() {
|
||||||
|
returning.push(this.replace(/\b(epic)\b/gi, '<span class="epic">$1</span>'));
|
||||||
|
});
|
||||||
|
return $(returning);
|
||||||
|
},
|
||||||
|
makeHeart: function() {
|
||||||
|
var returning = [];
|
||||||
|
this.each(function() {
|
||||||
|
returning.push(this.replace(/(<)+[3]/gi, "<tt class='heart'>♥</tt>"));
|
||||||
|
});
|
||||||
|
return $(returning);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function relative_time(time_value) {
|
||||||
|
var parsed_date = Date.parse(time_value);
|
||||||
|
var relative_to = (arguments.length > 1) ? arguments[1] : new Date();
|
||||||
|
var delta = parseInt((relative_to.getTime() - parsed_date) / 1000);
|
||||||
|
var pluralize = function (singular, n) {
|
||||||
|
return '' + n + ' ' + singular + (n == 1 ? '' : 's');
|
||||||
|
};
|
||||||
|
if(delta < 60) {
|
||||||
|
return 'less than a minute ago';
|
||||||
|
} else if(delta < (45*60)) {
|
||||||
|
return 'about ' + pluralize("minute", parseInt(delta / 60)) + ' ago';
|
||||||
|
} else if(delta < (24*60*60)) {
|
||||||
|
return 'about ' + pluralize("hour", parseInt(delta / 3600)) + ' ago';
|
||||||
|
} else {
|
||||||
|
return 'about ' + pluralize("day", parseInt(delta / 86400)) + ' ago';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function build_url() {
|
||||||
|
var proto = ('https:' == document.location.protocol ? 'https:' : 'http:');
|
||||||
|
if (s.list) {
|
||||||
|
return proto+"//api.twitter.com/1/"+s.username[0]+"/lists/"+s.list+"/statuses.json?per_page="+s.count+"&callback=?";
|
||||||
|
} else if (s.query == null && s.username.length == 1) {
|
||||||
|
return proto+'//twitter.com/status/user_timeline/'+s.username[0]+'.json?count='+s.count+'&callback=?';
|
||||||
|
} else {
|
||||||
|
var query = (s.query || 'from:'+s.username.join('%20OR%20from:'));
|
||||||
|
return proto+'//search.twitter.com/search.json?&q='+query+'&rpp='+s.count+'&callback=?';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.each(function(){
|
||||||
|
var list = $('<ul class="tweet_list">').appendTo(this);
|
||||||
|
var intro = '<p class="tweet_intro">'+s.intro_text+'</p>';
|
||||||
|
var outro = '<p class="tweet_outro">'+s.outro_text+'</p>';
|
||||||
|
var loading = $('<p class="loading">'+s.loading_text+'</p>');
|
||||||
|
|
||||||
|
if(typeof(s.username) == "string"){
|
||||||
|
s.username = [s.username];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (s.loading_text) $(this).append(loading);
|
||||||
|
$.getJSON(build_url(), function(data){
|
||||||
|
if (s.loading_text) loading.remove();
|
||||||
|
if (s.intro_text) list.before(intro);
|
||||||
|
$.each((data.results || data), function(i,item){
|
||||||
|
// auto join text based on verb tense and content
|
||||||
|
if (s.join_text == "auto") {
|
||||||
|
if (item.text.match(/^(@([A-Za-z0-9-_]+)) .*/i)) {
|
||||||
|
var join_text = s.auto_join_text_reply;
|
||||||
|
} else if (item.text.match(/(^\w+:\/\/[A-Za-z0-9-_]+\.[A-Za-z0-9-_:%&\?\/.=]+) .*/i)) {
|
||||||
|
var join_text = s.auto_join_text_url;
|
||||||
|
} else if (item.text.match(/^((\w+ed)|just) .*/im)) {
|
||||||
|
var join_text = s.auto_join_text_ed;
|
||||||
|
} else if (item.text.match(/^(\w*ing) .*/i)) {
|
||||||
|
var join_text = s.auto_join_text_ing;
|
||||||
|
} else {
|
||||||
|
var join_text = s.auto_join_text_default;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var join_text = s.join_text;
|
||||||
|
};
|
||||||
|
|
||||||
|
var from_user = item.from_user || item.user.screen_name;
|
||||||
|
var profile_image_url = item.profile_image_url || item.user.profile_image_url;
|
||||||
|
var join_template = '<span class="tweet_join"> '+join_text+' </span>';
|
||||||
|
var join = ((s.join_text) ? join_template : ' ');
|
||||||
|
var avatar_template = '<a class="tweet_avatar" href="http://twitter.com/'+from_user+'"><img src="'+profile_image_url+'" height="'+s.avatar_size+'" width="'+s.avatar_size+'" alt="'+from_user+'\'s avatar" title="'+from_user+'\'s avatar" border="0"/></a>';
|
||||||
|
var avatar = (s.avatar_size ? avatar_template : '');
|
||||||
|
var date = '<a href="http://twitter.com/'+from_user+'/statuses/'+item.id+'" title="view tweet on twitter">'+relative_time(item.created_at)+'</a>';
|
||||||
|
var text = '<span class="tweet_text">' +$([item.text]).linkUrl().linkUser().linkHash().makeHeart().capAwesome().capEpic()[0]+ '</span>';
|
||||||
|
|
||||||
|
// until we create a template option, arrange the items below to alter a tweet's display.
|
||||||
|
list.append('<li>' + avatar + date + join + text + '</li>');
|
||||||
|
|
||||||
|
list.children('li:first').addClass('tweet_first');
|
||||||
|
list.children('li:odd').addClass('tweet_even');
|
||||||
|
list.children('li:even').addClass('tweet_odd');
|
||||||
|
});
|
||||||
|
if (s.outro_text) list.after(outro);
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
};
|
||||||
|
})(jQuery);
|
65
windc/doc/source/_static/tweaks.css
Normal file
65
windc/doc/source/_static/tweaks.css
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
ul.todo_list {
|
||||||
|
list-style-type: none;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.todo_list li {
|
||||||
|
display: block;
|
||||||
|
margin: 0;
|
||||||
|
padding: 7px 0;
|
||||||
|
border-top: 1px solid #eee;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.todo_list li p {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.todo_list li p.link {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.todo_list li p.details {
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.todo_list li {
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition {
|
||||||
|
border: 1px solid #8F1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.admonition p.admonition-title {
|
||||||
|
background-color: #8F1000;
|
||||||
|
border-bottom: 1px solid #8E8E8E;
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: #CF2F19;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.related ul li a {
|
||||||
|
color: #CF2F19;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sphinxsidebar h4 {
|
||||||
|
background-color:#8E8E8E;
|
||||||
|
border:1px solid #255E6E;
|
||||||
|
color:white;
|
||||||
|
font-size:1em;
|
||||||
|
margin:1em 0 0.5em;
|
||||||
|
padding:0.1em 0 0.1em 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
em {
|
||||||
|
font-style: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.docutils {
|
||||||
|
font-size: 11px;
|
||||||
|
}
|
||||||
|
|
||||||
|
a tt {
|
||||||
|
color:#CF2F19;
|
||||||
|
}
|
0
windc/doc/source/_templates/.placeholder
Normal file
0
windc/doc/source/_templates/.placeholder
Normal file
86
windc/doc/source/_theme/layout.html
Normal file
86
windc/doc/source/_theme/layout.html
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
{% extends "sphinxdoc/layout.html" %}
|
||||||
|
{% set css_files = css_files + ['_static/tweaks.css'] %}
|
||||||
|
{% set script_files = script_files + ['_static/jquery.tweet.js'] %}
|
||||||
|
{% block extrahead %}
|
||||||
|
<script type='text/javascript'>
|
||||||
|
$(document).ready(function(){
|
||||||
|
$("#twitter_feed").tweet({
|
||||||
|
username: "openstack",
|
||||||
|
query: "from:openstack",
|
||||||
|
avatar_size: 32,
|
||||||
|
count: 10,
|
||||||
|
loading_text: "loading tweets..."
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{%- macro sidebar() %}
|
||||||
|
{%- if not embedded %}{% if not theme_nosidebar|tobool %}
|
||||||
|
<div class="sphinxsidebar">
|
||||||
|
<div class="sphinxsidebarwrapper">
|
||||||
|
{%- block sidebarlogo %}
|
||||||
|
{%- if logo %}
|
||||||
|
<p class="logo"><a href="{{ pathto(master_doc) }}">
|
||||||
|
<img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
|
||||||
|
</a></p>
|
||||||
|
{%- endif %}
|
||||||
|
{%- endblock %}
|
||||||
|
{%- block sidebartoc %}
|
||||||
|
{%- if display_toc %}
|
||||||
|
<h3><a href="{{ pathto(master_doc) }}">{{ _('Table Of Contents') }}</a></h3>
|
||||||
|
{{ toc }}
|
||||||
|
{%- endif %}
|
||||||
|
{%- endblock %}
|
||||||
|
{%- block sidebarrel %}
|
||||||
|
{%- if prev %}
|
||||||
|
<h4>{{ _('Previous topic') }}</h4>
|
||||||
|
<p class="topless"><a href="{{ prev.link|e }}"
|
||||||
|
title="{{ _('previous chapter') }}">{{ prev.title }}</a></p>
|
||||||
|
{%- endif %}
|
||||||
|
{%- if next %}
|
||||||
|
<h4>{{ _('Next topic') }}</h4>
|
||||||
|
<p class="topless"><a href="{{ next.link|e }}"
|
||||||
|
title="{{ _('next chapter') }}">{{ next.title }}</a></p>
|
||||||
|
{%- endif %}
|
||||||
|
{%- endblock %}
|
||||||
|
{%- block sidebarsourcelink %}
|
||||||
|
{%- if show_source and has_source and sourcename %}
|
||||||
|
<h3>{{ _('This Page') }}</h3>
|
||||||
|
<ul class="this-page-menu">
|
||||||
|
<li><a href="{{ pathto('_sources/' + sourcename, true)|e }}"
|
||||||
|
rel="nofollow">{{ _('Show Source') }}</a></li>
|
||||||
|
</ul>
|
||||||
|
{%- endif %}
|
||||||
|
{%- endblock %}
|
||||||
|
{%- if customsidebar %}
|
||||||
|
{% include customsidebar %}
|
||||||
|
{%- endif %}
|
||||||
|
{%- block sidebarsearch %}
|
||||||
|
{%- if pagename != "search" %}
|
||||||
|
<div id="searchbox" style="display: none">
|
||||||
|
<h3>{{ _('Quick search') }}</h3>
|
||||||
|
<form class="search" action="{{ pathto('search') }}" method="get">
|
||||||
|
<input type="text" name="q" size="18" />
|
||||||
|
<input type="submit" value="{{ _('Go') }}" />
|
||||||
|
<input type="hidden" name="check_keywords" value="yes" />
|
||||||
|
<input type="hidden" name="area" value="default" />
|
||||||
|
</form>
|
||||||
|
<p class="searchtip" style="font-size: 90%">
|
||||||
|
{{ _('Enter search terms or a module, class or function name.') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<script type="text/javascript">$('#searchbox').show(0);</script>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{%- if pagename == "index" %}
|
||||||
|
<h3>{{ _('Twitter Feed') }}</h3>
|
||||||
|
<div id="twitter_feed" class='twitter_feed'></div>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
|
||||||
|
{%- endblock %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{%- endif %}{% endif %}
|
||||||
|
{%- endmacro %}
|
5
windc/doc/source/_theme/theme.conf
Normal file
5
windc/doc/source/_theme/theme.conf
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
[theme]
|
||||||
|
inherit = sphinxdoc
|
||||||
|
stylesheet = sphinxdoc.css
|
||||||
|
pygments_style = friendly
|
||||||
|
|
252
windc/doc/source/conf.py
Normal file
252
windc/doc/source/conf.py
Normal file
@ -0,0 +1,252 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright (c) 2011 OpenStack, LLC.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
#
|
||||||
|
# Skeleton documentation build configuration file, created by
|
||||||
|
# sphinx-quickstart on Tue May 18 13:50:15 2010.
|
||||||
|
#
|
||||||
|
# This file is execfile()'d with the current directory set to it's containing
|
||||||
|
# dir.
|
||||||
|
#
|
||||||
|
# Note that not all possible configuration values are present in this
|
||||||
|
# autogenerated file.
|
||||||
|
#
|
||||||
|
# All configuration values have a default; values that are commented out
|
||||||
|
# serve to show the default.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
|
sys.path.append([os.path.abspath('../skeleton'),
|
||||||
|
os.path.abspath('..'),
|
||||||
|
os.path.abspath('../bin')
|
||||||
|
])
|
||||||
|
|
||||||
|
# -- General configuration ---------------------------------------------------
|
||||||
|
|
||||||
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||||
|
extensions = ['sphinx.ext.autodoc',
|
||||||
|
'sphinx.ext.coverage',
|
||||||
|
'sphinx.ext.ifconfig',
|
||||||
|
'sphinx.ext.intersphinx',
|
||||||
|
'sphinx.ext.pngmath',
|
||||||
|
'sphinx.ext.graphviz',
|
||||||
|
'sphinx.ext.todo']
|
||||||
|
|
||||||
|
todo_include_todos = True
|
||||||
|
|
||||||
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
|
templates_path = []
|
||||||
|
if os.getenv('HUDSON_PUBLISH_DOCS'):
|
||||||
|
templates_path = ['_ga', '_templates']
|
||||||
|
else:
|
||||||
|
templates_path = ['_templates']
|
||||||
|
|
||||||
|
# The suffix of source filenames.
|
||||||
|
source_suffix = '.rst'
|
||||||
|
|
||||||
|
# The encoding of source files.
|
||||||
|
#source_encoding = 'utf-8'
|
||||||
|
|
||||||
|
# The master toctree document.
|
||||||
|
master_doc = 'index'
|
||||||
|
|
||||||
|
# General information about the project.
|
||||||
|
project = u'Skeleton'
|
||||||
|
copyright = u'2011-present, OpenStack, LLC.'
|
||||||
|
|
||||||
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
# built documents.
|
||||||
|
#
|
||||||
|
# The short X.Y version.
|
||||||
|
from skeleton import version as skeleton_version
|
||||||
|
# The full version, including alpha/beta/rc tags.
|
||||||
|
release = skeleton_version.version_string()
|
||||||
|
# The short X.Y version.
|
||||||
|
version = skeleton_version.canonical_version_string()
|
||||||
|
|
||||||
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
|
# for a list of supported languages.
|
||||||
|
#language = None
|
||||||
|
|
||||||
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
|
# non-false value, then it is used:
|
||||||
|
#today = ''
|
||||||
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
|
#today_fmt = '%B %d, %Y'
|
||||||
|
|
||||||
|
# List of documents that shouldn't be included in the build.
|
||||||
|
#unused_docs = []
|
||||||
|
|
||||||
|
# List of directories, relative to source directory, that shouldn't be searched
|
||||||
|
# for source files.
|
||||||
|
exclude_trees = []
|
||||||
|
|
||||||
|
# The reST default role (for this markup: `text`) to use for all documents.
|
||||||
|
#default_role = None
|
||||||
|
|
||||||
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
|
#add_function_parentheses = True
|
||||||
|
|
||||||
|
# If true, the current module name will be prepended to all description
|
||||||
|
# unit titles (such as .. function::).
|
||||||
|
#add_module_names = True
|
||||||
|
|
||||||
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
|
# output. They are ignored by default.
|
||||||
|
show_authors = True
|
||||||
|
|
||||||
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
|
# A list of ignored prefixes for module index sorting.
|
||||||
|
modindex_common_prefix = ['skeleton.']
|
||||||
|
|
||||||
|
# -- Options for man page output --------------------------------------------
|
||||||
|
|
||||||
|
# Grouping the document tree for man pages.
|
||||||
|
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
|
||||||
|
|
||||||
|
man_pages = [
|
||||||
|
('man/skeletonapi', 'skeleton-api', u'Skeleton API Server',
|
||||||
|
[u'OpenStack'], 1),
|
||||||
|
('man/skeletonregistry', 'skeleton-registry', u'Skeleton Registry Server',
|
||||||
|
[u'OpenStack'], 1),
|
||||||
|
('man/skeletonmanage', 'skeleton-manage', u'Skeleton Management Utility',
|
||||||
|
[u'OpenStack'], 1)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for HTML output -------------------------------------------------
|
||||||
|
|
||||||
|
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||||
|
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||||
|
html_theme_path = ["."]
|
||||||
|
html_theme = '_theme'
|
||||||
|
|
||||||
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
|
# further. For a list of options available for each theme, see the
|
||||||
|
# documentation.
|
||||||
|
#html_theme_options = {}
|
||||||
|
|
||||||
|
# Add any paths that contain custom themes here, relative to this directory.
|
||||||
|
#html_theme_path = ['_theme']
|
||||||
|
|
||||||
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
|
# "<project> v<release> documentation".
|
||||||
|
#html_title = None
|
||||||
|
|
||||||
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
|
#html_short_title = None
|
||||||
|
|
||||||
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
|
# of the sidebar.
|
||||||
|
#html_logo = None
|
||||||
|
|
||||||
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
|
# pixels large.
|
||||||
|
#html_favicon = None
|
||||||
|
|
||||||
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
|
html_static_path = ['_static']
|
||||||
|
|
||||||
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
|
# using the given strftime format.
|
||||||
|
#html_last_updated_fmt = '%b %d, %Y'
|
||||||
|
|
||||||
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
|
# typographically correct entities.
|
||||||
|
#html_use_smartypants = True
|
||||||
|
|
||||||
|
# Custom sidebar templates, maps document names to template names.
|
||||||
|
#html_sidebars = {}
|
||||||
|
|
||||||
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
|
# template names.
|
||||||
|
#html_additional_pages = {}
|
||||||
|
|
||||||
|
# If false, no module index is generated.
|
||||||
|
#html_use_modindex = True
|
||||||
|
|
||||||
|
# If false, no index is generated.
|
||||||
|
#html_use_index = True
|
||||||
|
|
||||||
|
# If true, the index is split into individual pages for each letter.
|
||||||
|
#html_split_index = False
|
||||||
|
|
||||||
|
# If true, links to the reST sources are added to the pages.
|
||||||
|
#html_show_sourcelink = True
|
||||||
|
|
||||||
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
|
# base URL from which the finished HTML is served.
|
||||||
|
#html_use_opensearch = ''
|
||||||
|
|
||||||
|
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
|
#html_file_suffix = ''
|
||||||
|
|
||||||
|
# Output file base name for HTML help builder.
|
||||||
|
htmlhelp_basename = 'skeletondoc'
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for LaTeX output ------------------------------------------------
|
||||||
|
|
||||||
|
# The paper size ('letter' or 'a4').
|
||||||
|
#latex_paper_size = 'letter'
|
||||||
|
|
||||||
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
|
#latex_font_size = '10pt'
|
||||||
|
|
||||||
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
|
# (source start file, target name, title, author,
|
||||||
|
# documentclass [howto/manual]).
|
||||||
|
latex_documents = [
|
||||||
|
('index', 'Skeleton.tex', u'Skeleton Documentation',
|
||||||
|
u'Skeleton Team', 'manual'),
|
||||||
|
]
|
||||||
|
|
||||||
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
|
# the title page.
|
||||||
|
#latex_logo = None
|
||||||
|
|
||||||
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
|
# not chapters.
|
||||||
|
#latex_use_parts = False
|
||||||
|
|
||||||
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#latex_preamble = ''
|
||||||
|
|
||||||
|
# Documents to append as an appendix to all manuals.
|
||||||
|
#latex_appendices = []
|
||||||
|
|
||||||
|
# If false, no module index is generated.
|
||||||
|
#latex_use_modindex = True
|
||||||
|
|
||||||
|
# Example configuration for intersphinx: refer to the Python standard library.
|
||||||
|
intersphinx_mapping = {'python': ('http://docs.python.org/', None),
|
||||||
|
'dashboard': ('http://dashboard.openstack.org', None),
|
||||||
|
'glance': ('http://glance.openstack.org', None),
|
||||||
|
'keystone': ('http://keystone.openstack.org', None),
|
||||||
|
'nova': ('http://nova.openstack.org', None),
|
||||||
|
'swift': ('http://swift.openstack.org', None)}
|
53
windc/doc/source/index.rst
Normal file
53
windc/doc/source/index.rst
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
..
|
||||||
|
Copyright 2011 OpenStack, LLC.
|
||||||
|
All Rights Reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
not use this file except in compliance with the License. You may obtain
|
||||||
|
a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
License for the specific language governing permissions and limitations
|
||||||
|
under the License.
|
||||||
|
|
||||||
|
Welcome to Skeleton's documentation!
|
||||||
|
===================================
|
||||||
|
|
||||||
|
Description of Skeleton project
|
||||||
|
|
||||||
|
Concepts
|
||||||
|
========
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
Using Skeleton
|
||||||
|
==============
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
gettingstarted
|
||||||
|
installing
|
||||||
|
|
||||||
|
Developer Docs
|
||||||
|
==============
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
Outstanding Documentation Tasks
|
||||||
|
===============================
|
||||||
|
|
||||||
|
.. todolist::
|
||||||
|
|
||||||
|
Indices and tables
|
||||||
|
==================
|
||||||
|
|
||||||
|
* :ref:`genindex`
|
||||||
|
* :ref:`modindex`
|
||||||
|
* :ref:`search`
|
34
windc/etc/windc-api.conf
Normal file
34
windc/etc/windc-api.conf
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
[DEFAULT]
|
||||||
|
# Show more verbose log output (sets INFO log level output)
|
||||||
|
verbose = True
|
||||||
|
|
||||||
|
# Show debugging output in logs (sets DEBUG log level output)
|
||||||
|
debug = False
|
||||||
|
|
||||||
|
# Address to bind the server to
|
||||||
|
bind_host = 0.0.0.0
|
||||||
|
|
||||||
|
# Port the bind the server to
|
||||||
|
bind_port = 8082
|
||||||
|
|
||||||
|
# Log to this file. Make sure the user running skeleton-api has
|
||||||
|
# permissions to write to this file!
|
||||||
|
log_file = api.log
|
||||||
|
|
||||||
|
[pipeline:windc-api]
|
||||||
|
pipeline = versionnegotiation context apiv1app
|
||||||
|
|
||||||
|
[pipeline:versions]
|
||||||
|
pipeline = versionsapp
|
||||||
|
|
||||||
|
[app:versionsapp]
|
||||||
|
paste.app_factory = windc.api.versions:app_factory
|
||||||
|
|
||||||
|
[app:apiv1app]
|
||||||
|
paste.app_factory = windc.api.v1:app_factory
|
||||||
|
|
||||||
|
[filter:versionnegotiation]
|
||||||
|
paste.filter_factory = windc.api.middleware.version_negotiation:filter_factory
|
||||||
|
|
||||||
|
[filter:context]
|
||||||
|
paste.filter_factory = openstack.common.middleware.context:filter_factory
|
24
windc/openstack/__init__.py
Normal file
24
windc/openstack/__init__.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
# This ensures the openstack namespace is defined
|
||||||
|
try:
|
||||||
|
import pkg_resources
|
||||||
|
pkg_resources.declare_namespace(__name__)
|
||||||
|
except ImportError:
|
||||||
|
import pkgutil
|
||||||
|
__path__ = pkgutil.extend_path(__path__, __name__)
|
19
windc/openstack/common/__init__.py
Normal file
19
windc/openstack/common/__init__.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
# TODO(jaypipes) Code in this module is intended to be ported to the eventual
|
||||||
|
# openstack-common library
|
337
windc/openstack/common/config.py
Normal file
337
windc/openstack/common/config.py
Normal file
@ -0,0 +1,337 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Routines for configuring Openstack Projects
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import logging.config
|
||||||
|
import logging.handlers
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from paste import deploy
|
||||||
|
|
||||||
|
DEFAULT_LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s"
|
||||||
|
DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
||||||
|
|
||||||
|
|
||||||
|
def parse_options(parser, cli_args=None):
|
||||||
|
"""
|
||||||
|
Returns the parsed CLI options, command to run and its arguments, merged
|
||||||
|
with any same-named options found in a configuration file.
|
||||||
|
|
||||||
|
The function returns a tuple of (options, args), where options is a
|
||||||
|
mapping of option key/str(value) pairs, and args is the set of arguments
|
||||||
|
(not options) supplied on the command-line.
|
||||||
|
|
||||||
|
The reason that the option values are returned as strings only is that
|
||||||
|
ConfigParser and paste.deploy only accept string values...
|
||||||
|
|
||||||
|
:param parser: The option parser
|
||||||
|
:param cli_args: (Optional) Set of arguments to process. If not present,
|
||||||
|
sys.argv[1:] is used.
|
||||||
|
:retval tuple of (options, args)
|
||||||
|
"""
|
||||||
|
|
||||||
|
(options, args) = parser.parse_args(cli_args)
|
||||||
|
|
||||||
|
return (vars(options), args)
|
||||||
|
|
||||||
|
|
||||||
|
def add_common_options(parser):
|
||||||
|
"""
|
||||||
|
Given a supplied optparse.OptionParser, adds an OptionGroup that
|
||||||
|
represents all common configuration options.
|
||||||
|
|
||||||
|
:param parser: optparse.OptionParser
|
||||||
|
"""
|
||||||
|
help_text = "The following configuration options are common to "\
|
||||||
|
"this app's programs."
|
||||||
|
|
||||||
|
group = optparse.OptionGroup(parser, "Common Options", help_text)
|
||||||
|
group.add_option('-v', '--verbose', default=False, dest="verbose",
|
||||||
|
action="store_true",
|
||||||
|
help="Print more verbose output")
|
||||||
|
group.add_option('-d', '--debug', default=False, dest="debug",
|
||||||
|
action="store_true",
|
||||||
|
help="Print debugging output")
|
||||||
|
group.add_option('--config-file', default=None, metavar="PATH",
|
||||||
|
help="Path to the config file to use. When not specified "
|
||||||
|
"(the default), we generally look at the first "
|
||||||
|
"argument specified to be a config file, and if "
|
||||||
|
"that is also missing, we search standard "
|
||||||
|
"directories for a config file.")
|
||||||
|
parser.add_option_group(group)
|
||||||
|
|
||||||
|
|
||||||
|
def add_log_options(parser):
|
||||||
|
"""
|
||||||
|
Given a supplied optparse.OptionParser, adds an OptionGroup that
|
||||||
|
represents all the configuration options around logging.
|
||||||
|
|
||||||
|
:param parser: optparse.OptionParser
|
||||||
|
"""
|
||||||
|
help_text = "The following configuration options are specific to logging "\
|
||||||
|
"functionality for this program."
|
||||||
|
|
||||||
|
group = optparse.OptionGroup(parser, "Logging Options", help_text)
|
||||||
|
group.add_option('--log-config', default=None, metavar="PATH",
|
||||||
|
help="If this option is specified, the logging "
|
||||||
|
"configuration file specified is used and overrides "
|
||||||
|
"any other logging options specified. Please see "
|
||||||
|
"the Python logging module documentation for "
|
||||||
|
"details on logging configuration files.")
|
||||||
|
group.add_option('--log-date-format', metavar="FORMAT",
|
||||||
|
default=DEFAULT_LOG_DATE_FORMAT,
|
||||||
|
help="Format string for %(asctime)s in log records. "
|
||||||
|
"Default: %default")
|
||||||
|
group.add_option('--log-file', default=None, metavar="PATH",
|
||||||
|
help="(Optional) Name of log file to output to. "
|
||||||
|
"If not set, logging will go to stdout.")
|
||||||
|
group.add_option("--log-dir", default=None,
|
||||||
|
help="(Optional) The directory to keep log files in "
|
||||||
|
"(will be prepended to --logfile)")
|
||||||
|
group.add_option('--use-syslog', default=False, dest="use_syslog",
|
||||||
|
action="store_true",
|
||||||
|
help="Use syslog for logging.")
|
||||||
|
parser.add_option_group(group)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging(options, conf):
|
||||||
|
"""
|
||||||
|
Sets up the logging options for a log with supplied name
|
||||||
|
|
||||||
|
:param options: Mapping of typed option key/values
|
||||||
|
:param conf: Mapping of untyped key/values from config file
|
||||||
|
"""
|
||||||
|
|
||||||
|
if options.get('log_config', None):
|
||||||
|
# Use a logging configuration file for all settings...
|
||||||
|
if os.path.exists(options['log_config']):
|
||||||
|
logging.config.fileConfig(options['log_config'])
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Unable to locate specified logging "
|
||||||
|
"config file: %s" % options['log_config'])
|
||||||
|
|
||||||
|
# If either the CLI option or the conf value
|
||||||
|
# is True, we set to True
|
||||||
|
debug = options.get('debug') or \
|
||||||
|
get_option(conf, 'debug', type='bool', default=False)
|
||||||
|
verbose = options.get('verbose') or \
|
||||||
|
get_option(conf, 'verbose', type='bool', default=False)
|
||||||
|
root_logger = logging.root
|
||||||
|
if debug:
|
||||||
|
root_logger.setLevel(logging.DEBUG)
|
||||||
|
elif verbose:
|
||||||
|
root_logger.setLevel(logging.INFO)
|
||||||
|
else:
|
||||||
|
root_logger.setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
# Set log configuration from options...
|
||||||
|
# Note that we use a hard-coded log format in the options
|
||||||
|
# because of Paste.Deploy bug #379
|
||||||
|
# http://trac.pythonpaste.org/pythonpaste/ticket/379
|
||||||
|
log_format = options.get('log_format', DEFAULT_LOG_FORMAT)
|
||||||
|
log_date_format = options.get('log_date_format', DEFAULT_LOG_DATE_FORMAT)
|
||||||
|
formatter = logging.Formatter(log_format, log_date_format)
|
||||||
|
|
||||||
|
logfile = options.get('log_file')
|
||||||
|
if not logfile:
|
||||||
|
logfile = conf.get('log_file')
|
||||||
|
|
||||||
|
use_syslog = options.get('use_syslog') or \
|
||||||
|
get_option(conf, 'use_syslog', type='bool', default=False)
|
||||||
|
|
||||||
|
if use_syslog:
|
||||||
|
handler = logging.handlers.SysLogHandler(address='/dev/log')
|
||||||
|
elif logfile:
|
||||||
|
logdir = options.get('log_dir')
|
||||||
|
if not logdir:
|
||||||
|
logdir = conf.get('log_dir')
|
||||||
|
if logdir:
|
||||||
|
logfile = os.path.join(logdir, logfile)
|
||||||
|
handler = logging.FileHandler(logfile)
|
||||||
|
else:
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
root_logger.addHandler(handler)
|
||||||
|
|
||||||
|
|
||||||
|
def fix_path(path):
|
||||||
|
"""
|
||||||
|
Return the full absolute path
|
||||||
|
"""
|
||||||
|
return os.path.abspath(os.path.expanduser(path))
|
||||||
|
|
||||||
|
|
||||||
|
def find_config_file(app_name, options, args, config_dir=None):
|
||||||
|
"""
|
||||||
|
Return the first config file found for an application.
|
||||||
|
|
||||||
|
We search for the paste config file in the following order:
|
||||||
|
* If --config-file option is used, use that
|
||||||
|
* If args[0] is a file, use that
|
||||||
|
* Search for $app.conf in standard directories:
|
||||||
|
* .
|
||||||
|
* ~.config_dir/
|
||||||
|
* ~
|
||||||
|
* /etc/config_dir
|
||||||
|
* /etc
|
||||||
|
|
||||||
|
:retval Full path to config file, or None if no config file found
|
||||||
|
"""
|
||||||
|
config_dir = config_dir or app_name
|
||||||
|
|
||||||
|
if options.get('config_file'):
|
||||||
|
if os.path.exists(options['config_file']):
|
||||||
|
return fix_path(options['config_file'])
|
||||||
|
elif args:
|
||||||
|
if os.path.exists(args[0]):
|
||||||
|
return fix_path(args[0])
|
||||||
|
|
||||||
|
# Handle standard directory search for $app_name.conf
|
||||||
|
config_file_dirs = [fix_path(os.getcwd()),
|
||||||
|
fix_path(os.path.join('~', '.' + config_dir)),
|
||||||
|
fix_path('~'),
|
||||||
|
os.path.join('/etc', config_dir),
|
||||||
|
'/etc']
|
||||||
|
|
||||||
|
for cfg_dir in config_file_dirs:
|
||||||
|
cfg_file = os.path.join(cfg_dir, '%s.conf' % app_name)
|
||||||
|
if os.path.exists(cfg_file):
|
||||||
|
return cfg_file
|
||||||
|
|
||||||
|
|
||||||
|
def load_paste_config(app_name, options, args, config_dir=None):
|
||||||
|
"""
|
||||||
|
Looks for a config file to use for an app and returns the
|
||||||
|
config file path and a configuration mapping from a paste config file.
|
||||||
|
|
||||||
|
We search for the paste config file in the following order:
|
||||||
|
* If --config-file option is used, use that
|
||||||
|
* If args[0] is a file, use that
|
||||||
|
* Search for $app_name.conf in standard directories:
|
||||||
|
* .
|
||||||
|
* ~.config_dir/
|
||||||
|
* ~
|
||||||
|
* /etc/config_dir
|
||||||
|
* /etc
|
||||||
|
|
||||||
|
:param app_name: Name of the application to load config for, or None.
|
||||||
|
None signifies to only load the [DEFAULT] section of
|
||||||
|
the config file.
|
||||||
|
:param options: Set of typed options returned from parse_options()
|
||||||
|
:param args: Command line arguments from argv[1:]
|
||||||
|
:retval Tuple of (conf_file, conf)
|
||||||
|
|
||||||
|
:raises RuntimeError when config file cannot be located or there was a
|
||||||
|
problem loading the configuration file.
|
||||||
|
"""
|
||||||
|
conf_file = find_config_file(app_name, options, args, config_dir)
|
||||||
|
if not conf_file:
|
||||||
|
raise RuntimeError("Unable to locate any configuration file. "
|
||||||
|
"Cannot load application %s" % app_name)
|
||||||
|
try:
|
||||||
|
conf = deploy.appconfig("config:%s" % conf_file, name=app_name)
|
||||||
|
return conf_file, conf
|
||||||
|
except Exception, e:
|
||||||
|
raise RuntimeError("Error trying to load config %s: %s"
|
||||||
|
% (conf_file, e))
|
||||||
|
|
||||||
|
|
||||||
|
def load_paste_app(app_name, options, args, config_dir=None):
|
||||||
|
"""
|
||||||
|
Builds and returns a WSGI app from a paste config file.
|
||||||
|
|
||||||
|
We search for the paste config file in the following order:
|
||||||
|
* If --config-file option is used, use that
|
||||||
|
* If args[0] is a file, use that
|
||||||
|
* Search for $app_name.conf in standard directories:
|
||||||
|
* .
|
||||||
|
* ~.config_dir/
|
||||||
|
* ~
|
||||||
|
* /etc/config_dir
|
||||||
|
* /etc
|
||||||
|
|
||||||
|
:param app_name: Name of the application to load
|
||||||
|
:param options: Set of typed options returned from parse_options()
|
||||||
|
:param args: Command line arguments from argv[1:]
|
||||||
|
|
||||||
|
:raises RuntimeError when config file cannot be located or application
|
||||||
|
cannot be loaded from config file
|
||||||
|
"""
|
||||||
|
conf_file, conf = load_paste_config(app_name, options,
|
||||||
|
args, config_dir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Setup logging early, supplying both the CLI options and the
|
||||||
|
# configuration mapping from the config file
|
||||||
|
setup_logging(options, conf)
|
||||||
|
|
||||||
|
# We only update the conf dict for the verbose and debug
|
||||||
|
# flags. Everything else must be set up in the conf file...
|
||||||
|
debug = options.get('debug') or \
|
||||||
|
get_option(conf, 'debug', type='bool', default=False)
|
||||||
|
verbose = options.get('verbose') or \
|
||||||
|
get_option(conf, 'verbose', type='bool', default=False)
|
||||||
|
conf['debug'] = debug
|
||||||
|
conf['verbose'] = verbose
|
||||||
|
|
||||||
|
# Log the options used when starting if we're in debug mode...
|
||||||
|
if debug:
|
||||||
|
logger = logging.getLogger(app_name)
|
||||||
|
logger.debug("*" * 80)
|
||||||
|
logger.debug("Configuration options gathered from config file:")
|
||||||
|
logger.debug(conf_file)
|
||||||
|
logger.debug("================================================")
|
||||||
|
items = dict([(k, v) for k, v in conf.items()
|
||||||
|
if k not in ('__file__', 'here')])
|
||||||
|
for key, value in sorted(items.items()):
|
||||||
|
logger.debug("%(key)-30s %(value)s" % locals())
|
||||||
|
logger.debug("*" * 80)
|
||||||
|
app = deploy.loadapp("config:%s" % conf_file, name=app_name)
|
||||||
|
except (LookupError, ImportError), e:
|
||||||
|
raise RuntimeError("Unable to load %(app_name)s from "
|
||||||
|
"configuration file %(conf_file)s."
|
||||||
|
"\nGot: %(e)r" % locals())
|
||||||
|
return conf, app
|
||||||
|
|
||||||
|
|
||||||
|
def get_option(options, option, **kwargs):
|
||||||
|
if option in options:
|
||||||
|
value = options[option]
|
||||||
|
type_ = kwargs.get('type', 'str')
|
||||||
|
if type_ == 'bool':
|
||||||
|
if hasattr(value, 'lower'):
|
||||||
|
return value.lower() == 'true'
|
||||||
|
else:
|
||||||
|
return value
|
||||||
|
elif type_ == 'int':
|
||||||
|
return int(value)
|
||||||
|
elif type_ == 'float':
|
||||||
|
return float(value)
|
||||||
|
else:
|
||||||
|
return value
|
||||||
|
elif 'default' in kwargs:
|
||||||
|
return kwargs['default']
|
||||||
|
else:
|
||||||
|
raise KeyError("option '%s' not found" % option)
|
40
windc/openstack/common/context.py
Normal file
40
windc/openstack/common/context.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Simple class that stores security context information in the web request.
|
||||||
|
|
||||||
|
Projects should subclass this class if they wish to enhance the request
|
||||||
|
context or provide additional information in their specific WSGI pipeline.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class RequestContext(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Stores information about the security context under which the user
|
||||||
|
accesses the system, as well as additional request information.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, auth_tok=None, user=None, tenant=None, is_admin=False,
|
||||||
|
read_only=False, show_deleted=False):
|
||||||
|
self.auth_tok = auth_tok
|
||||||
|
self.user = user
|
||||||
|
self.tenant = tenant
|
||||||
|
self.is_admin = is_admin
|
||||||
|
self.read_only = read_only
|
||||||
|
self.show_deleted = show_deleted
|
147
windc/openstack/common/exception.py
Normal file
147
windc/openstack/common/exception.py
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Exceptions common to OpenStack projects
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
class ProcessExecutionError(IOError):
|
||||||
|
def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None,
|
||||||
|
description=None):
|
||||||
|
if description is None:
|
||||||
|
description = "Unexpected error while running command."
|
||||||
|
if exit_code is None:
|
||||||
|
exit_code = '-'
|
||||||
|
message = "%s\nCommand: %s\nExit code: %s\nStdout: %r\nStderr: %r" % (
|
||||||
|
description, cmd, exit_code, stdout, stderr)
|
||||||
|
IOError.__init__(self, message)
|
||||||
|
|
||||||
|
|
||||||
|
class Error(Exception):
|
||||||
|
def __init__(self, message=None):
|
||||||
|
super(Error, self).__init__(message)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiError(Error):
|
||||||
|
def __init__(self, message='Unknown', code='Unknown'):
|
||||||
|
self.message = message
|
||||||
|
self.code = code
|
||||||
|
super(ApiError, self).__init__('%s: %s' % (code, message))
|
||||||
|
|
||||||
|
|
||||||
|
class NotFound(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UnknownScheme(Error):
|
||||||
|
|
||||||
|
msg = "Unknown scheme '%s' found in URI"
|
||||||
|
|
||||||
|
def __init__(self, scheme):
|
||||||
|
msg = self.__class__.msg % scheme
|
||||||
|
super(UnknownScheme, self).__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class BadStoreUri(Error):
|
||||||
|
|
||||||
|
msg = "The Store URI %s was malformed. Reason: %s"
|
||||||
|
|
||||||
|
def __init__(self, uri, reason):
|
||||||
|
msg = self.__class__.msg % (uri, reason)
|
||||||
|
super(BadStoreUri, self).__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class Duplicate(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotAuthorized(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotEmpty(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Invalid(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BadInputError(Exception):
|
||||||
|
"""Error resulting from a client sending bad input to a server"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MissingArgumentError(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseMigrationError(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ClientConnectionError(Exception):
|
||||||
|
"""Error resulting from a client connecting to a server"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_exception(f):
|
||||||
|
def _wrap(*args, **kw):
|
||||||
|
try:
|
||||||
|
return f(*args, **kw)
|
||||||
|
except Exception, e:
|
||||||
|
if not isinstance(e, Error):
|
||||||
|
#exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||||
|
logging.exception('Uncaught exception')
|
||||||
|
#logging.error(traceback.extract_stack(exc_traceback))
|
||||||
|
raise Error(str(e))
|
||||||
|
raise
|
||||||
|
_wrap.func_name = f.func_name
|
||||||
|
return _wrap
|
||||||
|
|
||||||
|
|
||||||
|
class OpenstackException(Exception):
|
||||||
|
"""
|
||||||
|
Base Exception
|
||||||
|
|
||||||
|
To correctly use this class, inherit from it and define
|
||||||
|
a 'message' property. That message will get printf'd
|
||||||
|
with the keyword arguments provided to the constructor.
|
||||||
|
"""
|
||||||
|
message = "An unknown exception occurred"
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
try:
|
||||||
|
self._error_string = self.message % kwargs
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
# at least get the core message out if something happened
|
||||||
|
self._error_string = self.message
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self._error_string
|
||||||
|
|
||||||
|
|
||||||
|
class MalformedRequestBody(OpenstackException):
|
||||||
|
message = "Malformed message body: %(reason)s"
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidContentType(OpenstackException):
|
||||||
|
message = "Invalid content type %(content_type)s"
|
538
windc/openstack/common/extensions.py
Normal file
538
windc/openstack/common/extensions.py
Normal file
@ -0,0 +1,538 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# Copyright 2011 Justin Santa Barbara
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import imp
|
||||||
|
import os
|
||||||
|
import routes
|
||||||
|
import webob.dec
|
||||||
|
import webob.exc
|
||||||
|
import logging
|
||||||
|
from lxml import etree
|
||||||
|
|
||||||
|
from openstack.common import exception
|
||||||
|
from openstack.common import wsgi
|
||||||
|
|
||||||
|
LOG = logging.getLogger('extensions')
|
||||||
|
DEFAULT_XMLNS = "http://docs.openstack.org/"
|
||||||
|
XMLNS_ATOM = "http://www.w3.org/2005/Atom"
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionDescriptor(object):
|
||||||
|
"""Base class that defines the contract for extensions.
|
||||||
|
|
||||||
|
Note that you don't have to derive from this class to have a valid
|
||||||
|
extension; it is purely a convenience.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_name(self):
|
||||||
|
"""The name of the extension.
|
||||||
|
|
||||||
|
e.g. 'Fox In Socks'
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_alias(self):
|
||||||
|
"""The alias for the extension.
|
||||||
|
|
||||||
|
e.g. 'FOXNSOX'
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_description(self):
|
||||||
|
"""Friendly description for the extension.
|
||||||
|
|
||||||
|
e.g. 'The Fox In Socks Extension'
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_namespace(self):
|
||||||
|
"""The XML namespace for the extension.
|
||||||
|
|
||||||
|
e.g. 'http://www.fox.in.socks/api/ext/pie/v1.0'
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_updated(self):
|
||||||
|
"""The timestamp when the extension was last updated.
|
||||||
|
|
||||||
|
e.g. '2011-01-22T13:25:27-06:00'
|
||||||
|
|
||||||
|
"""
|
||||||
|
# NOTE(justinsb): Not sure of the purpose of this is, vs the XML NS
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_resources(self):
|
||||||
|
"""List of extensions.ResourceExtension extension objects.
|
||||||
|
|
||||||
|
Resources define new nouns, and are accessible through URLs.
|
||||||
|
|
||||||
|
"""
|
||||||
|
resources = []
|
||||||
|
return resources
|
||||||
|
|
||||||
|
def get_actions(self):
|
||||||
|
"""List of extensions.ActionExtension extension objects.
|
||||||
|
|
||||||
|
Actions are verbs callable from the API.
|
||||||
|
|
||||||
|
"""
|
||||||
|
actions = []
|
||||||
|
return actions
|
||||||
|
|
||||||
|
def get_request_extensions(self):
|
||||||
|
"""List of extensions.RequestException extension objects.
|
||||||
|
|
||||||
|
Request extensions are used to handle custom request data.
|
||||||
|
|
||||||
|
"""
|
||||||
|
request_exts = []
|
||||||
|
return request_exts
|
||||||
|
|
||||||
|
|
||||||
|
class ActionExtensionController(object):
|
||||||
|
def __init__(self, application):
|
||||||
|
self.application = application
|
||||||
|
self.action_handlers = {}
|
||||||
|
|
||||||
|
def add_action(self, action_name, handler):
|
||||||
|
self.action_handlers[action_name] = handler
|
||||||
|
|
||||||
|
def action(self, req, id, body):
|
||||||
|
for action_name, handler in self.action_handlers.iteritems():
|
||||||
|
if action_name in body:
|
||||||
|
return handler(body, req, id)
|
||||||
|
# no action handler found (bump to downstream application)
|
||||||
|
res = self.application
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
class ActionExtensionResource(wsgi.Resource):
|
||||||
|
|
||||||
|
def __init__(self, application):
|
||||||
|
controller = ActionExtensionController(application)
|
||||||
|
wsgi.Resource.__init__(self, controller)
|
||||||
|
|
||||||
|
def add_action(self, action_name, handler):
|
||||||
|
self.controller.add_action(action_name, handler)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestExtensionController(object):
|
||||||
|
|
||||||
|
def __init__(self, application):
|
||||||
|
self.application = application
|
||||||
|
self.handlers = []
|
||||||
|
|
||||||
|
def add_handler(self, handler):
|
||||||
|
self.handlers.append(handler)
|
||||||
|
|
||||||
|
def process(self, req, *args, **kwargs):
|
||||||
|
res = req.get_response(self.application)
|
||||||
|
# currently request handlers are un-ordered
|
||||||
|
for handler in self.handlers:
|
||||||
|
res = handler(req, res)
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
class RequestExtensionResource(wsgi.Resource):
|
||||||
|
|
||||||
|
def __init__(self, application):
|
||||||
|
controller = RequestExtensionController(application)
|
||||||
|
wsgi.Resource.__init__(self, controller)
|
||||||
|
|
||||||
|
def add_handler(self, handler):
|
||||||
|
self.controller.add_handler(handler)
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionsResource(wsgi.Resource):
|
||||||
|
|
||||||
|
def __init__(self, extension_manager):
|
||||||
|
self.extension_manager = extension_manager
|
||||||
|
body_serializers = {'application/xml': ExtensionsXMLSerializer()}
|
||||||
|
serializer = wsgi.ResponseSerializer(body_serializers=body_serializers)
|
||||||
|
super(ExtensionsResource, self).__init__(self, None, serializer)
|
||||||
|
|
||||||
|
def _translate(self, ext):
|
||||||
|
ext_data = {}
|
||||||
|
ext_data['name'] = ext.get_name()
|
||||||
|
ext_data['alias'] = ext.get_alias()
|
||||||
|
ext_data['description'] = ext.get_description()
|
||||||
|
ext_data['namespace'] = ext.get_namespace()
|
||||||
|
ext_data['updated'] = ext.get_updated()
|
||||||
|
ext_data['links'] = [] # TODO(dprince): implement extension links
|
||||||
|
return ext_data
|
||||||
|
|
||||||
|
def index(self, req):
|
||||||
|
extensions = []
|
||||||
|
for _alias, ext in self.extension_manager.extensions.iteritems():
|
||||||
|
extensions.append(self._translate(ext))
|
||||||
|
return dict(extensions=extensions)
|
||||||
|
|
||||||
|
def show(self, req, id):
|
||||||
|
# NOTE(dprince): the extensions alias is used as the 'id' for show
|
||||||
|
ext = self.extension_manager.extensions.get(id, None)
|
||||||
|
if not ext:
|
||||||
|
raise webob.exc.HTTPNotFound(
|
||||||
|
_("Extension with alias %s does not exist") % id)
|
||||||
|
|
||||||
|
return dict(extension=self._translate(ext))
|
||||||
|
|
||||||
|
def delete(self, req, id):
|
||||||
|
raise webob.exc.HTTPNotFound()
|
||||||
|
|
||||||
|
def create(self, req):
|
||||||
|
raise webob.exc.HTTPNotFound()
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionMiddleware(wsgi.Middleware):
|
||||||
|
"""Extensions middleware for WSGI."""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def factory(cls, global_config, **local_config):
|
||||||
|
"""Paste factory."""
|
||||||
|
def _factory(app):
|
||||||
|
return cls(app, global_config, **local_config)
|
||||||
|
return _factory
|
||||||
|
|
||||||
|
def _action_ext_resources(self, application, ext_mgr, mapper):
|
||||||
|
"""Return a dict of ActionExtensionResource-s by collection."""
|
||||||
|
action_resources = {}
|
||||||
|
for action in ext_mgr.get_actions():
|
||||||
|
if not action.collection in action_resources.keys():
|
||||||
|
resource = ActionExtensionResource(application)
|
||||||
|
mapper.connect("/%s/:(id)/action.:(format)" %
|
||||||
|
action.collection,
|
||||||
|
action='action',
|
||||||
|
controller=resource,
|
||||||
|
conditions=dict(method=['POST']))
|
||||||
|
mapper.connect("/%s/:(id)/action" %
|
||||||
|
action.collection,
|
||||||
|
action='action',
|
||||||
|
controller=resource,
|
||||||
|
conditions=dict(method=['POST']))
|
||||||
|
action_resources[action.collection] = resource
|
||||||
|
|
||||||
|
return action_resources
|
||||||
|
|
||||||
|
def _request_ext_resources(self, application, ext_mgr, mapper):
|
||||||
|
"""Returns a dict of RequestExtensionResource-s by collection."""
|
||||||
|
request_ext_resources = {}
|
||||||
|
for req_ext in ext_mgr.get_request_extensions():
|
||||||
|
if not req_ext.key in request_ext_resources.keys():
|
||||||
|
resource = RequestExtensionResource(application)
|
||||||
|
mapper.connect(req_ext.url_route + '.:(format)',
|
||||||
|
action='process',
|
||||||
|
controller=resource,
|
||||||
|
conditions=req_ext.conditions)
|
||||||
|
|
||||||
|
mapper.connect(req_ext.url_route,
|
||||||
|
action='process',
|
||||||
|
controller=resource,
|
||||||
|
conditions=req_ext.conditions)
|
||||||
|
request_ext_resources[req_ext.key] = resource
|
||||||
|
|
||||||
|
return request_ext_resources
|
||||||
|
|
||||||
|
def __init__(self, application, config, ext_mgr=None):
|
||||||
|
ext_mgr = ext_mgr or ExtensionManager(
|
||||||
|
config['api_extensions_path'])
|
||||||
|
mapper = routes.Mapper()
|
||||||
|
|
||||||
|
# extended resources
|
||||||
|
for resource_ext in ext_mgr.get_resources():
|
||||||
|
LOG.debug(_('Extended resource: %s'), resource_ext.collection)
|
||||||
|
controller_resource = wsgi.Resource(resource_ext.controller,
|
||||||
|
resource_ext.deserializer,
|
||||||
|
resource_ext.serializer)
|
||||||
|
self._map_custom_collection_actions(resource_ext, mapper,
|
||||||
|
controller_resource)
|
||||||
|
kargs = dict(controller=controller_resource,
|
||||||
|
collection=resource_ext.collection_actions,
|
||||||
|
member=resource_ext.member_actions)
|
||||||
|
if resource_ext.parent:
|
||||||
|
kargs['parent_resource'] = resource_ext.parent
|
||||||
|
mapper.resource(resource_ext.collection,
|
||||||
|
resource_ext.collection, **kargs)
|
||||||
|
|
||||||
|
# extended actions
|
||||||
|
action_resources = self._action_ext_resources(application, ext_mgr,
|
||||||
|
mapper)
|
||||||
|
for action in ext_mgr.get_actions():
|
||||||
|
LOG.debug(_('Extended action: %s'), action.action_name)
|
||||||
|
resource = action_resources[action.collection]
|
||||||
|
resource.add_action(action.action_name, action.handler)
|
||||||
|
|
||||||
|
# extended requests
|
||||||
|
req_controllers = self._request_ext_resources(application, ext_mgr,
|
||||||
|
mapper)
|
||||||
|
for request_ext in ext_mgr.get_request_extensions():
|
||||||
|
LOG.debug(_('Extended request: %s'), request_ext.key)
|
||||||
|
controller = req_controllers[request_ext.key]
|
||||||
|
controller.add_handler(request_ext.handler)
|
||||||
|
|
||||||
|
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
|
||||||
|
mapper)
|
||||||
|
|
||||||
|
super(ExtensionMiddleware, self).__init__(application)
|
||||||
|
|
||||||
|
def _map_custom_collection_actions(self, resource_ext, mapper,
|
||||||
|
controller_resource):
|
||||||
|
for action, method in resource_ext.collection_actions.iteritems():
|
||||||
|
parent = resource_ext.parent
|
||||||
|
conditions = dict(method=[method])
|
||||||
|
path = "/%s/%s" % (resource_ext.collection, action)
|
||||||
|
|
||||||
|
path_prefix = ""
|
||||||
|
if parent:
|
||||||
|
path_prefix = "/%s/{%s_id}" % (parent["collection_name"],
|
||||||
|
parent["member_name"])
|
||||||
|
|
||||||
|
with mapper.submapper(controller=controller_resource,
|
||||||
|
action=action,
|
||||||
|
path_prefix=path_prefix,
|
||||||
|
conditions=conditions) as submap:
|
||||||
|
submap.connect(path)
|
||||||
|
submap.connect("%s.:(format)" % path)
|
||||||
|
|
||||||
|
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
||||||
|
def __call__(self, req):
|
||||||
|
"""Route the incoming request with router."""
|
||||||
|
req.environ['extended.app'] = self.application
|
||||||
|
return self._router
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
||||||
|
def _dispatch(req):
|
||||||
|
"""Dispatch the request.
|
||||||
|
|
||||||
|
Returns the routed WSGI app's response or defers to the extended
|
||||||
|
application.
|
||||||
|
|
||||||
|
"""
|
||||||
|
match = req.environ['wsgiorg.routing_args'][1]
|
||||||
|
if not match:
|
||||||
|
return req.environ['extended.app']
|
||||||
|
app = match['controller']
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionManager(object):
|
||||||
|
"""Load extensions from the configured extension path.
|
||||||
|
|
||||||
|
See nova/tests/api/openstack/extensions/foxinsocks/extension.py for an
|
||||||
|
example extension implementation.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, path):
|
||||||
|
LOG.debug(_('Initializing extension manager.'))
|
||||||
|
|
||||||
|
self.path = path
|
||||||
|
self.extensions = {}
|
||||||
|
self._load_all_extensions()
|
||||||
|
|
||||||
|
def get_resources(self):
|
||||||
|
"""Returns a list of ResourceExtension objects."""
|
||||||
|
resources = []
|
||||||
|
extension_resource = ExtensionsResource(self)
|
||||||
|
res_ext = ResourceExtension('extensions',
|
||||||
|
extension_resource,
|
||||||
|
serializer=extension_resource.serializer)
|
||||||
|
resources.append(res_ext)
|
||||||
|
for alias, ext in self.extensions.iteritems():
|
||||||
|
try:
|
||||||
|
resources.extend(ext.get_resources())
|
||||||
|
except AttributeError:
|
||||||
|
# NOTE(dprince): Extension aren't required to have resource
|
||||||
|
# extensions
|
||||||
|
pass
|
||||||
|
return resources
|
||||||
|
|
||||||
|
def get_actions(self):
|
||||||
|
"""Returns a list of ActionExtension objects."""
|
||||||
|
actions = []
|
||||||
|
for alias, ext in self.extensions.iteritems():
|
||||||
|
try:
|
||||||
|
actions.extend(ext.get_actions())
|
||||||
|
except AttributeError:
|
||||||
|
# NOTE(dprince): Extension aren't required to have action
|
||||||
|
# extensions
|
||||||
|
pass
|
||||||
|
return actions
|
||||||
|
|
||||||
|
def get_request_extensions(self):
|
||||||
|
"""Returns a list of RequestExtension objects."""
|
||||||
|
request_exts = []
|
||||||
|
for alias, ext in self.extensions.iteritems():
|
||||||
|
try:
|
||||||
|
request_exts.extend(ext.get_request_extensions())
|
||||||
|
except AttributeError:
|
||||||
|
# NOTE(dprince): Extension aren't required to have request
|
||||||
|
# extensions
|
||||||
|
pass
|
||||||
|
return request_exts
|
||||||
|
|
||||||
|
def _check_extension(self, extension):
|
||||||
|
"""Checks for required methods in extension objects."""
|
||||||
|
try:
|
||||||
|
LOG.debug(_('Ext name: %s'), extension.get_name())
|
||||||
|
LOG.debug(_('Ext alias: %s'), extension.get_alias())
|
||||||
|
LOG.debug(_('Ext description: %s'), extension.get_description())
|
||||||
|
LOG.debug(_('Ext namespace: %s'), extension.get_namespace())
|
||||||
|
LOG.debug(_('Ext updated: %s'), extension.get_updated())
|
||||||
|
except AttributeError as ex:
|
||||||
|
LOG.exception(_("Exception loading extension: %s"), unicode(ex))
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _load_all_extensions(self):
|
||||||
|
"""Load extensions from the configured path.
|
||||||
|
|
||||||
|
Load extensions from the configured path. The extension name is
|
||||||
|
constructed from the module_name. If your extension module was named
|
||||||
|
widgets.py the extension class within that module should be
|
||||||
|
'Widgets'.
|
||||||
|
|
||||||
|
In addition, extensions are loaded from the 'contrib' directory.
|
||||||
|
|
||||||
|
See nova/tests/api/openstack/extensions/foxinsocks.py for an example
|
||||||
|
extension implementation.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if os.path.exists(self.path):
|
||||||
|
self._load_all_extensions_from_path(self.path)
|
||||||
|
|
||||||
|
contrib_path = os.path.join(os.path.dirname(__file__), "contrib")
|
||||||
|
if os.path.exists(contrib_path):
|
||||||
|
self._load_all_extensions_from_path(contrib_path)
|
||||||
|
|
||||||
|
def _load_all_extensions_from_path(self, path):
|
||||||
|
for f in os.listdir(path):
|
||||||
|
LOG.debug(_('Loading extension file: %s'), f)
|
||||||
|
mod_name, file_ext = os.path.splitext(os.path.split(f)[-1])
|
||||||
|
ext_path = os.path.join(path, f)
|
||||||
|
if file_ext.lower() == '.py' and not mod_name.startswith('_'):
|
||||||
|
mod = imp.load_source(mod_name, ext_path)
|
||||||
|
ext_name = mod_name[0].upper() + mod_name[1:]
|
||||||
|
new_ext_class = getattr(mod, ext_name, None)
|
||||||
|
if not new_ext_class:
|
||||||
|
LOG.warn(_('Did not find expected name '
|
||||||
|
'"%(ext_name)s" in %(file)s'),
|
||||||
|
{'ext_name': ext_name,
|
||||||
|
'file': ext_path})
|
||||||
|
continue
|
||||||
|
new_ext = new_ext_class()
|
||||||
|
self.add_extension(new_ext)
|
||||||
|
|
||||||
|
def add_extension(self, ext):
|
||||||
|
# Do nothing if the extension doesn't check out
|
||||||
|
if not self._check_extension(ext):
|
||||||
|
return
|
||||||
|
|
||||||
|
alias = ext.get_alias()
|
||||||
|
LOG.debug(_('Loaded extension: %s'), alias)
|
||||||
|
|
||||||
|
if alias in self.extensions:
|
||||||
|
raise exception.Error("Found duplicate extension: %s" % alias)
|
||||||
|
self.extensions[alias] = ext
|
||||||
|
|
||||||
|
|
||||||
|
class RequestExtension(object):
|
||||||
|
"""Extend requests and responses of core nova OpenStack API resources.
|
||||||
|
|
||||||
|
Provide a way to add data to responses and handle custom request data
|
||||||
|
that is sent to core nova OpenStack API controllers.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, method, url_route, handler):
|
||||||
|
self.url_route = url_route
|
||||||
|
self.handler = handler
|
||||||
|
self.conditions = dict(method=[method])
|
||||||
|
self.key = "%s-%s" % (method, url_route)
|
||||||
|
|
||||||
|
|
||||||
|
class ActionExtension(object):
|
||||||
|
"""Add custom actions to core nova OpenStack API resources."""
|
||||||
|
|
||||||
|
def __init__(self, collection, action_name, handler):
|
||||||
|
self.collection = collection
|
||||||
|
self.action_name = action_name
|
||||||
|
self.handler = handler
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceExtension(object):
|
||||||
|
"""Add top level resources to the OpenStack API in nova."""
|
||||||
|
|
||||||
|
def __init__(self, collection, controller, parent=None,
|
||||||
|
collection_actions=None, member_actions=None,
|
||||||
|
deserializer=None, serializer=None):
|
||||||
|
if not collection_actions:
|
||||||
|
collection_actions = {}
|
||||||
|
if not member_actions:
|
||||||
|
member_actions = {}
|
||||||
|
self.collection = collection
|
||||||
|
self.controller = controller
|
||||||
|
self.parent = parent
|
||||||
|
self.collection_actions = collection_actions
|
||||||
|
self.member_actions = member_actions
|
||||||
|
self.deserializer = deserializer
|
||||||
|
self.serializer = serializer
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionsXMLSerializer(wsgi.XMLDictSerializer):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.nsmap = {None: DEFAULT_XMLNS, 'atom': XMLNS_ATOM}
|
||||||
|
|
||||||
|
def show(self, ext_dict):
|
||||||
|
ext = etree.Element('extension', nsmap=self.nsmap)
|
||||||
|
self._populate_ext(ext, ext_dict['extension'])
|
||||||
|
return self._to_xml(ext)
|
||||||
|
|
||||||
|
def index(self, exts_dict):
|
||||||
|
exts = etree.Element('extensions', nsmap=self.nsmap)
|
||||||
|
for ext_dict in exts_dict['extensions']:
|
||||||
|
ext = etree.SubElement(exts, 'extension')
|
||||||
|
self._populate_ext(ext, ext_dict)
|
||||||
|
return self._to_xml(exts)
|
||||||
|
|
||||||
|
def _populate_ext(self, ext_elem, ext_dict):
|
||||||
|
"""Populate an extension xml element from a dict."""
|
||||||
|
|
||||||
|
ext_elem.set('name', ext_dict['name'])
|
||||||
|
ext_elem.set('namespace', ext_dict['namespace'])
|
||||||
|
ext_elem.set('alias', ext_dict['alias'])
|
||||||
|
ext_elem.set('updated', ext_dict['updated'])
|
||||||
|
desc = etree.Element('description')
|
||||||
|
desc.text = ext_dict['description']
|
||||||
|
ext_elem.append(desc)
|
||||||
|
for link in ext_dict.get('links', []):
|
||||||
|
elem = etree.SubElement(ext_elem, '{%s}link' % XMLNS_ATOM)
|
||||||
|
elem.set('rel', link['rel'])
|
||||||
|
elem.set('href', link['href'])
|
||||||
|
elem.set('type', link['type'])
|
||||||
|
return ext_elem
|
||||||
|
|
||||||
|
def _to_xml(self, root):
|
||||||
|
"""Convert the xml object to an xml string."""
|
||||||
|
|
||||||
|
return etree.tostring(root, encoding='UTF-8')
|
0
windc/openstack/common/middleware/__init__.py
Normal file
0
windc/openstack/common/middleware/__init__.py
Normal file
64
windc/openstack/common/middleware/context.py
Normal file
64
windc/openstack/common/middleware/context.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Middleware that attaches a context to the WSGI request
|
||||||
|
"""
|
||||||
|
|
||||||
|
from openstack.common import utils
|
||||||
|
from openstack.common import wsgi
|
||||||
|
from openstack.common import context
|
||||||
|
|
||||||
|
|
||||||
|
class ContextMiddleware(wsgi.Middleware):
|
||||||
|
def __init__(self, app, options):
|
||||||
|
self.options = options
|
||||||
|
super(ContextMiddleware, self).__init__(app)
|
||||||
|
|
||||||
|
def make_context(self, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Create a context with the given arguments.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Determine the context class to use
|
||||||
|
ctxcls = context.RequestContext
|
||||||
|
if 'context_class' in self.options:
|
||||||
|
ctxcls = utils.import_class(self.options['context_class'])
|
||||||
|
|
||||||
|
return ctxcls(*args, **kwargs)
|
||||||
|
|
||||||
|
def process_request(self, req):
|
||||||
|
"""
|
||||||
|
Extract any authentication information in the request and
|
||||||
|
construct an appropriate context from it.
|
||||||
|
"""
|
||||||
|
# Use the default empty context, with admin turned on for
|
||||||
|
# backwards compatibility
|
||||||
|
req.context = self.make_context(is_admin=True)
|
||||||
|
|
||||||
|
|
||||||
|
def filter_factory(global_conf, **local_conf):
|
||||||
|
"""
|
||||||
|
Factory method for paste.deploy
|
||||||
|
"""
|
||||||
|
conf = global_conf.copy()
|
||||||
|
conf.update(local_conf)
|
||||||
|
|
||||||
|
def filter(app):
|
||||||
|
return ContextMiddleware(app, conf)
|
||||||
|
|
||||||
|
return filter
|
210
windc/openstack/common/utils.py
Normal file
210
windc/openstack/common/utils.py
Normal file
@ -0,0 +1,210 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
System-level utilities and helper functions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import shlex
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
|
from eventlet import greenthread
|
||||||
|
from eventlet.green import subprocess
|
||||||
|
|
||||||
|
from openstack.common import exception
|
||||||
|
|
||||||
|
|
||||||
|
TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def int_from_bool_as_string(subject):
|
||||||
|
"""
|
||||||
|
Interpret a string as a boolean and return either 1 or 0.
|
||||||
|
|
||||||
|
Any string value in:
|
||||||
|
('True', 'true', 'On', 'on', '1')
|
||||||
|
is interpreted as a boolean True.
|
||||||
|
|
||||||
|
Useful for JSON-decoded stuff and config file parsing
|
||||||
|
"""
|
||||||
|
return bool_from_string(subject) and 1 or 0
|
||||||
|
|
||||||
|
|
||||||
|
def bool_from_string(subject):
|
||||||
|
"""
|
||||||
|
Interpret a string as a boolean.
|
||||||
|
|
||||||
|
Any string value in:
|
||||||
|
('True', 'true', 'On', 'on', '1')
|
||||||
|
is interpreted as a boolean True.
|
||||||
|
|
||||||
|
Useful for JSON-decoded stuff and config file parsing
|
||||||
|
"""
|
||||||
|
if isinstance(subject, types.BooleanType):
|
||||||
|
return subject
|
||||||
|
if isinstance(subject, types.StringTypes):
|
||||||
|
if subject.strip().lower() in ('true', 'on', '1'):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def execute(*cmd, **kwargs):
|
||||||
|
"""
|
||||||
|
Helper method to execute command with optional retry.
|
||||||
|
|
||||||
|
:cmd Passed to subprocess.Popen.
|
||||||
|
:process_input Send to opened process.
|
||||||
|
:check_exit_code Defaults to 0. Raise exception.ProcessExecutionError
|
||||||
|
unless program exits with this code.
|
||||||
|
:delay_on_retry True | False. Defaults to True. If set to True, wait a
|
||||||
|
short amount of time before retrying.
|
||||||
|
:attempts How many times to retry cmd.
|
||||||
|
:run_as_root True | False. Defaults to False. If set to True,
|
||||||
|
the command is prefixed by the command specified
|
||||||
|
in the root_helper kwarg.
|
||||||
|
:root_helper command to prefix all cmd's with
|
||||||
|
|
||||||
|
:raises exception.Error on receiving unknown arguments
|
||||||
|
:raises exception.ProcessExecutionError
|
||||||
|
"""
|
||||||
|
|
||||||
|
process_input = kwargs.pop('process_input', None)
|
||||||
|
check_exit_code = kwargs.pop('check_exit_code', 0)
|
||||||
|
delay_on_retry = kwargs.pop('delay_on_retry', True)
|
||||||
|
attempts = kwargs.pop('attempts', 1)
|
||||||
|
run_as_root = kwargs.pop('run_as_root', False)
|
||||||
|
root_helper = kwargs.pop('root_helper', '')
|
||||||
|
if len(kwargs):
|
||||||
|
raise exception.Error(_('Got unknown keyword args '
|
||||||
|
'to utils.execute: %r') % kwargs)
|
||||||
|
if run_as_root:
|
||||||
|
cmd = shlex.split(root_helper) + list(cmd)
|
||||||
|
cmd = map(str, cmd)
|
||||||
|
|
||||||
|
while attempts > 0:
|
||||||
|
attempts -= 1
|
||||||
|
try:
|
||||||
|
LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd))
|
||||||
|
_PIPE = subprocess.PIPE # pylint: disable=E1101
|
||||||
|
obj = subprocess.Popen(cmd,
|
||||||
|
stdin=_PIPE,
|
||||||
|
stdout=_PIPE,
|
||||||
|
stderr=_PIPE,
|
||||||
|
close_fds=True)
|
||||||
|
result = None
|
||||||
|
if process_input is not None:
|
||||||
|
result = obj.communicate(process_input)
|
||||||
|
else:
|
||||||
|
result = obj.communicate()
|
||||||
|
obj.stdin.close() # pylint: disable=E1101
|
||||||
|
_returncode = obj.returncode # pylint: disable=E1101
|
||||||
|
if _returncode:
|
||||||
|
LOG.debug(_('Result was %s') % _returncode)
|
||||||
|
if type(check_exit_code) == types.IntType \
|
||||||
|
and _returncode != check_exit_code:
|
||||||
|
(stdout, stderr) = result
|
||||||
|
raise exception.ProcessExecutionError(
|
||||||
|
exit_code=_returncode,
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr,
|
||||||
|
cmd=' '.join(cmd))
|
||||||
|
return result
|
||||||
|
except exception.ProcessExecutionError:
|
||||||
|
if not attempts:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
LOG.debug(_('%r failed. Retrying.'), cmd)
|
||||||
|
if delay_on_retry:
|
||||||
|
greenthread.sleep(random.randint(20, 200) / 100.0)
|
||||||
|
finally:
|
||||||
|
# NOTE(termie): this appears to be necessary to let the subprocess
|
||||||
|
# call clean something up in between calls, without
|
||||||
|
# it two execute calls in a row hangs the second one
|
||||||
|
greenthread.sleep(0)
|
||||||
|
|
||||||
|
|
||||||
|
def import_class(import_str):
|
||||||
|
"""Returns a class from a string including module and class"""
|
||||||
|
mod_str, _sep, class_str = import_str.rpartition('.')
|
||||||
|
try:
|
||||||
|
__import__(mod_str)
|
||||||
|
return getattr(sys.modules[mod_str], class_str)
|
||||||
|
except (ImportError, ValueError, AttributeError):
|
||||||
|
raise exception.NotFound('Class %s cannot be found' % class_str)
|
||||||
|
|
||||||
|
|
||||||
|
def import_object(import_str):
|
||||||
|
"""Returns an object including a module or module and class"""
|
||||||
|
try:
|
||||||
|
__import__(import_str)
|
||||||
|
return sys.modules[import_str]
|
||||||
|
except ImportError:
|
||||||
|
return import_class(import_str)
|
||||||
|
|
||||||
|
|
||||||
|
def isotime(at=None):
|
||||||
|
if not at:
|
||||||
|
at = datetime.datetime.utcnow()
|
||||||
|
return at.strftime(TIME_FORMAT)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_isotime(timestr):
|
||||||
|
return datetime.datetime.strptime(timestr, TIME_FORMAT)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_mailmap(mailmap='.mailmap'):
|
||||||
|
mapping = {}
|
||||||
|
if os.path.exists(mailmap):
|
||||||
|
fp = open(mailmap, 'r')
|
||||||
|
for l in fp:
|
||||||
|
l = l.strip()
|
||||||
|
if not l.startswith('#') and ' ' in l:
|
||||||
|
canonical_email, alias = l.split(' ')
|
||||||
|
mapping[alias] = canonical_email
|
||||||
|
return mapping
|
||||||
|
|
||||||
|
|
||||||
|
def str_dict_replace(s, mapping):
|
||||||
|
for s1, s2 in mapping.iteritems():
|
||||||
|
s = s.replace(s1, s2)
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def utcnow():
|
||||||
|
"""Overridable version of utils.utcnow."""
|
||||||
|
if utcnow.override_time:
|
||||||
|
return utcnow.override_time
|
||||||
|
return datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
|
||||||
|
utcnow.override_time = None
|
||||||
|
|
||||||
|
|
||||||
|
def set_time_override(override_time=datetime.datetime.utcnow()):
|
||||||
|
"""Override utils.utcnow to return a constant time."""
|
||||||
|
utcnow.override_time = override_time
|
||||||
|
|
||||||
|
|
||||||
|
def clear_time_override():
|
||||||
|
"""Remove the overridden time."""
|
||||||
|
utcnow.override_time = None
|
717
windc/openstack/common/wsgi.py
Normal file
717
windc/openstack/common/wsgi.py
Normal file
@ -0,0 +1,717 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""Utility methods for working with WSGI servers."""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import eventlet
|
||||||
|
import eventlet.wsgi
|
||||||
|
|
||||||
|
eventlet.patcher.monkey_patch(all=False, socket=True)
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import routes
|
||||||
|
import routes.middleware
|
||||||
|
import webob.dec
|
||||||
|
import webob.exc
|
||||||
|
from xml.dom import minidom
|
||||||
|
from xml.parsers import expat
|
||||||
|
|
||||||
|
from openstack.common import exception
|
||||||
|
|
||||||
|
|
||||||
|
LOG = logging.getLogger('wsgi')
|
||||||
|
|
||||||
|
|
||||||
|
class WritableLogger(object):
|
||||||
|
"""A thin wrapper that responds to `write` and logs."""
|
||||||
|
|
||||||
|
def __init__(self, logger, level=logging.DEBUG):
|
||||||
|
self.logger = logger
|
||||||
|
self.level = level
|
||||||
|
|
||||||
|
def write(self, msg):
|
||||||
|
self.logger.log(self.level, msg.strip("\n"))
|
||||||
|
|
||||||
|
|
||||||
|
def run_server(application, port):
|
||||||
|
"""Run a WSGI server with the given application."""
|
||||||
|
sock = eventlet.listen(('0.0.0.0', port))
|
||||||
|
eventlet.wsgi.server(sock, application)
|
||||||
|
|
||||||
|
|
||||||
|
class Server(object):
|
||||||
|
"""Server class to manage multiple WSGI sockets and applications."""
|
||||||
|
|
||||||
|
def __init__(self, threads=1000):
|
||||||
|
self.pool = eventlet.GreenPool(threads)
|
||||||
|
|
||||||
|
def start(self, application, port, host='0.0.0.0', backlog=128):
|
||||||
|
"""Run a WSGI server with the given application."""
|
||||||
|
socket = eventlet.listen((host, port), backlog=backlog)
|
||||||
|
self.pool.spawn_n(self._run, application, socket)
|
||||||
|
|
||||||
|
def wait(self):
|
||||||
|
"""Wait until all servers have completed running."""
|
||||||
|
try:
|
||||||
|
self.pool.waitall()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _run(self, application, socket):
|
||||||
|
"""Start a WSGI server in a new green thread."""
|
||||||
|
logger = logging.getLogger('eventlet.wsgi.server')
|
||||||
|
eventlet.wsgi.server(socket, application, custom_pool=self.pool,
|
||||||
|
log=WritableLogger(logger))
|
||||||
|
|
||||||
|
|
||||||
|
class Middleware(object):
|
||||||
|
"""
|
||||||
|
Base WSGI middleware wrapper. These classes require an application to be
|
||||||
|
initialized that will be called next. By default the middleware will
|
||||||
|
simply call its wrapped app, or you can override __call__ to customize its
|
||||||
|
behavior.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, application):
|
||||||
|
self.application = application
|
||||||
|
|
||||||
|
def process_request(self, req):
|
||||||
|
"""
|
||||||
|
Called on each request.
|
||||||
|
|
||||||
|
If this returns None, the next application down the stack will be
|
||||||
|
executed. If it returns a response then that response will be returned
|
||||||
|
and execution will stop here.
|
||||||
|
"""
|
||||||
|
return None
|
||||||
|
|
||||||
|
def process_response(self, response):
|
||||||
|
"""Do whatever you'd like to the response."""
|
||||||
|
return response
|
||||||
|
|
||||||
|
@webob.dec.wsgify
|
||||||
|
def __call__(self, req):
|
||||||
|
response = self.process_request(req)
|
||||||
|
if response:
|
||||||
|
return response
|
||||||
|
response = req.get_response(self.application)
|
||||||
|
return self.process_response(response)
|
||||||
|
|
||||||
|
|
||||||
|
class Debug(Middleware):
|
||||||
|
"""
|
||||||
|
Helper class that can be inserted into any WSGI application chain
|
||||||
|
to get information about the request and response.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@webob.dec.wsgify
|
||||||
|
def __call__(self, req):
|
||||||
|
print ("*" * 40) + " REQUEST ENVIRON"
|
||||||
|
for key, value in req.environ.items():
|
||||||
|
print key, "=", value
|
||||||
|
print
|
||||||
|
resp = req.get_response(self.application)
|
||||||
|
|
||||||
|
print ("*" * 40) + " RESPONSE HEADERS"
|
||||||
|
for (key, value) in resp.headers.iteritems():
|
||||||
|
print key, "=", value
|
||||||
|
print
|
||||||
|
|
||||||
|
resp.app_iter = self.print_generator(resp.app_iter)
|
||||||
|
|
||||||
|
return resp
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def print_generator(app_iter):
|
||||||
|
"""
|
||||||
|
Iterator that prints the contents of a wrapper string iterator
|
||||||
|
when iterated.
|
||||||
|
"""
|
||||||
|
print ("*" * 40) + " BODY"
|
||||||
|
for part in app_iter:
|
||||||
|
sys.stdout.write(part)
|
||||||
|
sys.stdout.flush()
|
||||||
|
yield part
|
||||||
|
print
|
||||||
|
|
||||||
|
|
||||||
|
class Router(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
WSGI middleware that maps incoming requests to WSGI apps.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, mapper):
|
||||||
|
"""
|
||||||
|
Create a router for the given routes.Mapper.
|
||||||
|
|
||||||
|
Each route in `mapper` must specify a 'controller', which is a
|
||||||
|
WSGI app to call. You'll probably want to specify an 'action' as
|
||||||
|
well and have your controller be a wsgi.Controller, who will route
|
||||||
|
the request to the action method.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
mapper = routes.Mapper()
|
||||||
|
sc = ServerController()
|
||||||
|
|
||||||
|
# Explicit mapping of one route to a controller+action
|
||||||
|
mapper.connect(None, "/svrlist", controller=sc, action="list")
|
||||||
|
|
||||||
|
# Actions are all implicitly defined
|
||||||
|
mapper.resource("server", "servers", controller=sc)
|
||||||
|
|
||||||
|
# Pointing to an arbitrary WSGI app. You can specify the
|
||||||
|
# {path_info:.*} parameter so the target app can be handed just that
|
||||||
|
# section of the URL.
|
||||||
|
mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp())
|
||||||
|
"""
|
||||||
|
self.map = mapper
|
||||||
|
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
|
||||||
|
self.map)
|
||||||
|
|
||||||
|
@webob.dec.wsgify
|
||||||
|
def __call__(self, req):
|
||||||
|
"""
|
||||||
|
Route the incoming request to a controller based on self.map.
|
||||||
|
If no match, return a 404.
|
||||||
|
"""
|
||||||
|
return self._router
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@webob.dec.wsgify
|
||||||
|
def _dispatch(req):
|
||||||
|
"""
|
||||||
|
Called by self._router after matching the incoming request to a route
|
||||||
|
and putting the information into req.environ. Either returns 404
|
||||||
|
or the routed WSGI app's response.
|
||||||
|
"""
|
||||||
|
match = req.environ['wsgiorg.routing_args'][1]
|
||||||
|
if not match:
|
||||||
|
return webob.exc.HTTPNotFound()
|
||||||
|
app = match['controller']
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
class Request(webob.Request):
|
||||||
|
"""Add some Openstack API-specific logic to the base webob.Request."""
|
||||||
|
|
||||||
|
default_request_content_types = ('application/json', 'application/xml')
|
||||||
|
default_accept_types = ('application/json', 'application/xml')
|
||||||
|
default_accept_type = 'application/json'
|
||||||
|
|
||||||
|
def best_match_content_type(self, supported_content_types=None):
|
||||||
|
"""Determine the requested response content-type.
|
||||||
|
|
||||||
|
Based on the query extension then the Accept header.
|
||||||
|
Defaults to default_accept_type if we don't find a preference
|
||||||
|
|
||||||
|
"""
|
||||||
|
supported_content_types = (supported_content_types or
|
||||||
|
self.default_accept_types)
|
||||||
|
|
||||||
|
parts = self.path.rsplit('.', 1)
|
||||||
|
if len(parts) > 1:
|
||||||
|
ctype = 'application/{0}'.format(parts[1])
|
||||||
|
if ctype in supported_content_types:
|
||||||
|
return ctype
|
||||||
|
|
||||||
|
bm = self.accept.best_match(supported_content_types)
|
||||||
|
return bm or self.default_accept_type
|
||||||
|
|
||||||
|
def get_content_type(self, allowed_content_types=None):
|
||||||
|
"""Determine content type of the request body.
|
||||||
|
|
||||||
|
Does not do any body introspection, only checks header
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not "Content-Type" in self.headers:
|
||||||
|
return None
|
||||||
|
|
||||||
|
content_type = self.content_type
|
||||||
|
allowed_content_types = (allowed_content_types or
|
||||||
|
self.default_request_content_types)
|
||||||
|
|
||||||
|
if content_type not in allowed_content_types:
|
||||||
|
raise exception.InvalidContentType(content_type=content_type)
|
||||||
|
return content_type
|
||||||
|
|
||||||
|
|
||||||
|
class Resource(object):
|
||||||
|
"""
|
||||||
|
WSGI app that handles (de)serialization and controller dispatch.
|
||||||
|
|
||||||
|
Reads routing information supplied by RoutesMiddleware and calls
|
||||||
|
the requested action method upon its deserializer, controller,
|
||||||
|
and serializer. Those three objects may implement any of the basic
|
||||||
|
controller action methods (create, update, show, index, delete)
|
||||||
|
along with any that may be specified in the api router. A 'default'
|
||||||
|
method may also be implemented to be used in place of any
|
||||||
|
non-implemented actions. Deserializer methods must accept a request
|
||||||
|
argument and return a dictionary. Controller methods must accept a
|
||||||
|
request argument. Additionally, they must also accept keyword
|
||||||
|
arguments that represent the keys returned by the Deserializer. They
|
||||||
|
may raise a webob.exc exception or return a dict, which will be
|
||||||
|
serialized by requested content type.
|
||||||
|
"""
|
||||||
|
def __init__(self, controller, deserializer=None, serializer=None):
|
||||||
|
"""
|
||||||
|
:param controller: object that implement methods created by routes lib
|
||||||
|
:param deserializer: object that supports webob request deserialization
|
||||||
|
through controller-like actions
|
||||||
|
:param serializer: object that supports webob response serialization
|
||||||
|
through controller-like actions
|
||||||
|
"""
|
||||||
|
self.controller = controller
|
||||||
|
self.serializer = serializer or ResponseSerializer()
|
||||||
|
self.deserializer = deserializer or RequestDeserializer()
|
||||||
|
|
||||||
|
@webob.dec.wsgify(RequestClass=Request)
|
||||||
|
def __call__(self, request):
|
||||||
|
"""WSGI method that controls (de)serialization and method dispatch."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
action, action_args, accept = self.deserialize_request(request)
|
||||||
|
except exception.InvalidContentType:
|
||||||
|
msg = _("Unsupported Content-Type")
|
||||||
|
return webob.exc.HTTPUnsupportedMediaType(explanation=msg)
|
||||||
|
except exception.MalformedRequestBody:
|
||||||
|
msg = _("Malformed request body")
|
||||||
|
return webob.exc.HTTPBadRequest(explanation=msg)
|
||||||
|
|
||||||
|
action_result = self.execute_action(action, request, **action_args)
|
||||||
|
try:
|
||||||
|
return self.serialize_response(action, action_result, accept)
|
||||||
|
# return unserializable result (typically a webob exc)
|
||||||
|
except Exception:
|
||||||
|
return action_result
|
||||||
|
|
||||||
|
def deserialize_request(self, request):
|
||||||
|
return self.deserializer.deserialize(request)
|
||||||
|
|
||||||
|
def serialize_response(self, action, action_result, accept):
|
||||||
|
return self.serializer.serialize(action_result, accept, action)
|
||||||
|
|
||||||
|
def execute_action(self, action, request, **action_args):
|
||||||
|
return self.dispatch(self.controller, action, request, **action_args)
|
||||||
|
|
||||||
|
def dispatch(self, obj, action, *args, **kwargs):
|
||||||
|
"""Find action-specific method on self and call it."""
|
||||||
|
try:
|
||||||
|
method = getattr(obj, action)
|
||||||
|
except AttributeError:
|
||||||
|
method = getattr(obj, 'default')
|
||||||
|
|
||||||
|
return method(*args, **kwargs)
|
||||||
|
|
||||||
|
def get_action_args(self, request_environment):
|
||||||
|
"""Parse dictionary created by routes library."""
|
||||||
|
try:
|
||||||
|
args = request_environment['wsgiorg.routing_args'][1].copy()
|
||||||
|
except Exception:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
del args['controller']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
del args['format']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
|
class ActionDispatcher(object):
|
||||||
|
"""Maps method name to local methods through action name."""
|
||||||
|
|
||||||
|
def dispatch(self, *args, **kwargs):
|
||||||
|
"""Find and call local method."""
|
||||||
|
action = kwargs.pop('action', 'default')
|
||||||
|
action_method = getattr(self, str(action), self.default)
|
||||||
|
return action_method(*args, **kwargs)
|
||||||
|
|
||||||
|
def default(self, data):
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class DictSerializer(ActionDispatcher):
|
||||||
|
"""Default request body serialization"""
|
||||||
|
|
||||||
|
def serialize(self, data, action='default'):
|
||||||
|
return self.dispatch(data, action=action)
|
||||||
|
|
||||||
|
def default(self, data):
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
class JSONDictSerializer(DictSerializer):
|
||||||
|
"""Default JSON request body serialization"""
|
||||||
|
|
||||||
|
def default(self, data):
|
||||||
|
def sanitizer(obj):
|
||||||
|
if isinstance(obj, datetime.datetime):
|
||||||
|
_dtime = obj - datetime.timedelta(microseconds=obj.microsecond)
|
||||||
|
return _dtime.isoformat()
|
||||||
|
return obj
|
||||||
|
return json.dumps(data, default=sanitizer)
|
||||||
|
|
||||||
|
|
||||||
|
class XMLDictSerializer(DictSerializer):
|
||||||
|
|
||||||
|
def __init__(self, metadata=None, xmlns=None):
|
||||||
|
"""
|
||||||
|
:param metadata: information needed to deserialize xml into
|
||||||
|
a dictionary.
|
||||||
|
:param xmlns: XML namespace to include with serialized xml
|
||||||
|
"""
|
||||||
|
super(XMLDictSerializer, self).__init__()
|
||||||
|
self.metadata = metadata or {}
|
||||||
|
self.xmlns = xmlns
|
||||||
|
|
||||||
|
def default(self, data):
|
||||||
|
# We expect data to contain a single key which is the XML root.
|
||||||
|
root_key = data.keys()[0]
|
||||||
|
doc = minidom.Document()
|
||||||
|
node = self._to_xml_node(doc, self.metadata, root_key, data[root_key])
|
||||||
|
|
||||||
|
return self.to_xml_string(node)
|
||||||
|
|
||||||
|
def to_xml_string(self, node, has_atom=False):
|
||||||
|
self._add_xmlns(node, has_atom)
|
||||||
|
return node.toprettyxml(indent=' ', encoding='UTF-8')
|
||||||
|
|
||||||
|
#NOTE (ameade): the has_atom should be removed after all of the
|
||||||
|
# xml serializers and view builders have been updated to the current
|
||||||
|
# spec that required all responses include the xmlns:atom, the has_atom
|
||||||
|
# flag is to prevent current tests from breaking
|
||||||
|
def _add_xmlns(self, node, has_atom=False):
|
||||||
|
if self.xmlns is not None:
|
||||||
|
node.setAttribute('xmlns', self.xmlns)
|
||||||
|
if has_atom:
|
||||||
|
node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom")
|
||||||
|
|
||||||
|
def _to_xml_node(self, doc, metadata, nodename, data):
|
||||||
|
"""Recursive method to convert data members to XML nodes."""
|
||||||
|
result = doc.createElement(nodename)
|
||||||
|
|
||||||
|
# Set the xml namespace if one is specified
|
||||||
|
# TODO(justinsb): We could also use prefixes on the keys
|
||||||
|
xmlns = metadata.get('xmlns', None)
|
||||||
|
if xmlns:
|
||||||
|
result.setAttribute('xmlns', xmlns)
|
||||||
|
|
||||||
|
#TODO(bcwaldon): accomplish this without a type-check
|
||||||
|
if type(data) is list:
|
||||||
|
collections = metadata.get('list_collections', {})
|
||||||
|
if nodename in collections:
|
||||||
|
metadata = collections[nodename]
|
||||||
|
for item in data:
|
||||||
|
node = doc.createElement(metadata['item_name'])
|
||||||
|
node.setAttribute(metadata['item_key'], str(item))
|
||||||
|
result.appendChild(node)
|
||||||
|
return result
|
||||||
|
singular = metadata.get('plurals', {}).get(nodename, None)
|
||||||
|
if singular is None:
|
||||||
|
if nodename.endswith('s'):
|
||||||
|
singular = nodename[:-1]
|
||||||
|
else:
|
||||||
|
singular = 'item'
|
||||||
|
for item in data:
|
||||||
|
node = self._to_xml_node(doc, metadata, singular, item)
|
||||||
|
result.appendChild(node)
|
||||||
|
#TODO(bcwaldon): accomplish this without a type-check
|
||||||
|
elif type(data) is dict:
|
||||||
|
collections = metadata.get('dict_collections', {})
|
||||||
|
if nodename in collections:
|
||||||
|
metadata = collections[nodename]
|
||||||
|
for k, v in data.items():
|
||||||
|
node = doc.createElement(metadata['item_name'])
|
||||||
|
node.setAttribute(metadata['item_key'], str(k))
|
||||||
|
text = doc.createTextNode(str(v))
|
||||||
|
node.appendChild(text)
|
||||||
|
result.appendChild(node)
|
||||||
|
return result
|
||||||
|
attrs = metadata.get('attributes', {}).get(nodename, {})
|
||||||
|
for k, v in data.items():
|
||||||
|
if k in attrs:
|
||||||
|
result.setAttribute(k, str(v))
|
||||||
|
else:
|
||||||
|
node = self._to_xml_node(doc, metadata, k, v)
|
||||||
|
result.appendChild(node)
|
||||||
|
else:
|
||||||
|
# Type is atom
|
||||||
|
node = doc.createTextNode(str(data))
|
||||||
|
result.appendChild(node)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _create_link_nodes(self, xml_doc, links):
|
||||||
|
link_nodes = []
|
||||||
|
for link in links:
|
||||||
|
link_node = xml_doc.createElement('atom:link')
|
||||||
|
link_node.setAttribute('rel', link['rel'])
|
||||||
|
link_node.setAttribute('href', link['href'])
|
||||||
|
if 'type' in link:
|
||||||
|
link_node.setAttribute('type', link['type'])
|
||||||
|
link_nodes.append(link_node)
|
||||||
|
return link_nodes
|
||||||
|
|
||||||
|
|
||||||
|
class ResponseHeadersSerializer(ActionDispatcher):
|
||||||
|
"""Default response headers serialization"""
|
||||||
|
|
||||||
|
def serialize(self, response, data, action):
|
||||||
|
self.dispatch(response, data, action=action)
|
||||||
|
|
||||||
|
def default(self, response, data):
|
||||||
|
response.status_int = 200
|
||||||
|
|
||||||
|
|
||||||
|
class ResponseSerializer(object):
|
||||||
|
"""Encode the necessary pieces into a response object"""
|
||||||
|
|
||||||
|
def __init__(self, body_serializers=None, headers_serializer=None):
|
||||||
|
self.body_serializers = {
|
||||||
|
'application/xml': XMLDictSerializer(),
|
||||||
|
'application/json': JSONDictSerializer(),
|
||||||
|
}
|
||||||
|
self.body_serializers.update(body_serializers or {})
|
||||||
|
|
||||||
|
self.headers_serializer = headers_serializer or \
|
||||||
|
ResponseHeadersSerializer()
|
||||||
|
|
||||||
|
def serialize(self, response_data, content_type, action='default'):
|
||||||
|
"""Serialize a dict into a string and wrap in a wsgi.Request object.
|
||||||
|
|
||||||
|
:param response_data: dict produced by the Controller
|
||||||
|
:param content_type: expected mimetype of serialized response body
|
||||||
|
|
||||||
|
"""
|
||||||
|
response = webob.Response()
|
||||||
|
self.serialize_headers(response, response_data, action)
|
||||||
|
self.serialize_body(response, response_data, content_type, action)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def serialize_headers(self, response, data, action):
|
||||||
|
self.headers_serializer.serialize(response, data, action)
|
||||||
|
|
||||||
|
def serialize_body(self, response, data, content_type, action):
|
||||||
|
response.headers['Content-Type'] = content_type
|
||||||
|
if data is not None:
|
||||||
|
serializer = self.get_body_serializer(content_type)
|
||||||
|
response.body = serializer.serialize(data, action)
|
||||||
|
|
||||||
|
def get_body_serializer(self, content_type):
|
||||||
|
try:
|
||||||
|
return self.body_serializers[content_type]
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
raise exception.InvalidContentType(content_type=content_type)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestHeadersDeserializer(ActionDispatcher):
|
||||||
|
"""Default request headers deserializer"""
|
||||||
|
|
||||||
|
def deserialize(self, request, action):
|
||||||
|
return self.dispatch(request, action=action)
|
||||||
|
|
||||||
|
def default(self, request):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
class RequestDeserializer(object):
|
||||||
|
"""Break up a Request object into more useful pieces."""
|
||||||
|
|
||||||
|
def __init__(self, body_deserializers=None, headers_deserializer=None,
|
||||||
|
supported_content_types=None):
|
||||||
|
|
||||||
|
self.supported_content_types = supported_content_types
|
||||||
|
|
||||||
|
self.body_deserializers = {
|
||||||
|
'application/xml': XMLDeserializer(),
|
||||||
|
'application/json': JSONDeserializer(),
|
||||||
|
}
|
||||||
|
self.body_deserializers.update(body_deserializers or {})
|
||||||
|
|
||||||
|
self.headers_deserializer = headers_deserializer or \
|
||||||
|
RequestHeadersDeserializer()
|
||||||
|
|
||||||
|
def deserialize(self, request):
|
||||||
|
"""Extract necessary pieces of the request.
|
||||||
|
|
||||||
|
:param request: Request object
|
||||||
|
:returns tuple of expected controller action name, dictionary of
|
||||||
|
keyword arguments to pass to the controller, the expected
|
||||||
|
content type of the response
|
||||||
|
|
||||||
|
"""
|
||||||
|
action_args = self.get_action_args(request.environ)
|
||||||
|
action = action_args.pop('action', None)
|
||||||
|
|
||||||
|
action_args.update(self.deserialize_headers(request, action))
|
||||||
|
action_args.update(self.deserialize_body(request, action))
|
||||||
|
|
||||||
|
accept = self.get_expected_content_type(request)
|
||||||
|
|
||||||
|
return (action, action_args, accept)
|
||||||
|
|
||||||
|
def deserialize_headers(self, request, action):
|
||||||
|
return self.headers_deserializer.deserialize(request, action)
|
||||||
|
|
||||||
|
def deserialize_body(self, request, action):
|
||||||
|
if not len(request.body) > 0:
|
||||||
|
LOG.debug(_("Empty body provided in request"))
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
content_type = request.get_content_type()
|
||||||
|
except exception.InvalidContentType:
|
||||||
|
LOG.debug(_("Unrecognized Content-Type provided in request"))
|
||||||
|
raise
|
||||||
|
|
||||||
|
if content_type is None:
|
||||||
|
LOG.debug(_("No Content-Type provided in request"))
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
deserializer = self.get_body_deserializer(content_type)
|
||||||
|
except exception.InvalidContentType:
|
||||||
|
LOG.debug(_("Unable to deserialize body as provided Content-Type"))
|
||||||
|
raise
|
||||||
|
|
||||||
|
return deserializer.deserialize(request.body, action)
|
||||||
|
|
||||||
|
def get_body_deserializer(self, content_type):
|
||||||
|
try:
|
||||||
|
return self.body_deserializers[content_type]
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
raise exception.InvalidContentType(content_type=content_type)
|
||||||
|
|
||||||
|
def get_expected_content_type(self, request):
|
||||||
|
return request.best_match_content_type(self.supported_content_types)
|
||||||
|
|
||||||
|
def get_action_args(self, request_environment):
|
||||||
|
"""Parse dictionary created by routes library."""
|
||||||
|
try:
|
||||||
|
args = request_environment['wsgiorg.routing_args'][1].copy()
|
||||||
|
except Exception:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
del args['controller']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
del args['format']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
|
class TextDeserializer(ActionDispatcher):
|
||||||
|
"""Default request body deserialization"""
|
||||||
|
|
||||||
|
def deserialize(self, datastring, action='default'):
|
||||||
|
return self.dispatch(datastring, action=action)
|
||||||
|
|
||||||
|
def default(self, datastring):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
class JSONDeserializer(TextDeserializer):
|
||||||
|
|
||||||
|
def _from_json(self, datastring):
|
||||||
|
try:
|
||||||
|
return json.loads(datastring)
|
||||||
|
except ValueError:
|
||||||
|
msg = _("cannot understand JSON")
|
||||||
|
raise exception.MalformedRequestBody(reason=msg)
|
||||||
|
|
||||||
|
def default(self, datastring):
|
||||||
|
return {'body': self._from_json(datastring)}
|
||||||
|
|
||||||
|
|
||||||
|
class XMLDeserializer(TextDeserializer):
|
||||||
|
|
||||||
|
def __init__(self, metadata=None):
|
||||||
|
"""
|
||||||
|
:param metadata: information needed to deserialize xml into
|
||||||
|
a dictionary.
|
||||||
|
"""
|
||||||
|
super(XMLDeserializer, self).__init__()
|
||||||
|
self.metadata = metadata or {}
|
||||||
|
|
||||||
|
def _from_xml(self, datastring):
|
||||||
|
plurals = set(self.metadata.get('plurals', {}))
|
||||||
|
|
||||||
|
try:
|
||||||
|
node = minidom.parseString(datastring).childNodes[0]
|
||||||
|
return {node.nodeName: self._from_xml_node(node, plurals)}
|
||||||
|
except expat.ExpatError:
|
||||||
|
msg = _("cannot understand XML")
|
||||||
|
raise exception.MalformedRequestBody(reason=msg)
|
||||||
|
|
||||||
|
def _from_xml_node(self, node, listnames):
|
||||||
|
"""Convert a minidom node to a simple Python type.
|
||||||
|
|
||||||
|
:param listnames: list of XML node names whose subnodes should
|
||||||
|
be considered list items.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3:
|
||||||
|
return node.childNodes[0].nodeValue
|
||||||
|
elif node.nodeName in listnames:
|
||||||
|
return [self._from_xml_node(n, listnames) for n in node.childNodes]
|
||||||
|
else:
|
||||||
|
result = dict()
|
||||||
|
for attr in node.attributes.keys():
|
||||||
|
result[attr] = node.attributes[attr].nodeValue
|
||||||
|
for child in node.childNodes:
|
||||||
|
if child.nodeType != node.TEXT_NODE:
|
||||||
|
result[child.nodeName] = self._from_xml_node(child,
|
||||||
|
listnames)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def find_first_child_named(self, parent, name):
|
||||||
|
"""Search a nodes children for the first child with a given name"""
|
||||||
|
for node in parent.childNodes:
|
||||||
|
if node.nodeName == name:
|
||||||
|
return node
|
||||||
|
return None
|
||||||
|
|
||||||
|
def find_children_named(self, parent, name):
|
||||||
|
"""Return all of a nodes children who have the given name"""
|
||||||
|
for node in parent.childNodes:
|
||||||
|
if node.nodeName == name:
|
||||||
|
yield node
|
||||||
|
|
||||||
|
def extract_text(self, node):
|
||||||
|
"""Get the text field contained by the given node"""
|
||||||
|
if len(node.childNodes) == 1:
|
||||||
|
child = node.childNodes[0]
|
||||||
|
if child.nodeType == child.TEXT_NODE:
|
||||||
|
return child.nodeValue
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def default(self, datastring):
|
||||||
|
return {'body': self._from_xml(datastring)}
|
9
windc/setup.cfg
Normal file
9
windc/setup.cfg
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
[build_sphinx]
|
||||||
|
all_files = 1
|
||||||
|
build-dir = doc/build
|
||||||
|
source-dir = doc/source
|
||||||
|
|
||||||
|
[egg_info]
|
||||||
|
tag_build =
|
||||||
|
tag_date = 0
|
||||||
|
tag_svn_revision = 0
|
90
windc/setup.py
Normal file
90
windc/setup.py
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from setuptools import setup, find_packages
|
||||||
|
from setuptools.command.sdist import sdist
|
||||||
|
|
||||||
|
from windc import version
|
||||||
|
|
||||||
|
|
||||||
|
if os.path.isdir('.bzr'):
|
||||||
|
with open("windc/vcsversion.py", 'w') as version_file:
|
||||||
|
vcs_cmd = subprocess.Popen(["bzr", "version-info", "--python"],
|
||||||
|
stdout=subprocess.PIPE)
|
||||||
|
vcsversion = vcs_cmd.communicate()[0]
|
||||||
|
version_file.write(vcsversion)
|
||||||
|
|
||||||
|
|
||||||
|
class local_sdist(sdist):
|
||||||
|
"""Customized sdist hook - builds the ChangeLog file from VC first"""
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
if os.path.isdir('.bzr'):
|
||||||
|
# We're in a bzr branch
|
||||||
|
|
||||||
|
log_cmd = subprocess.Popen(["bzr", "log", "--gnu"],
|
||||||
|
stdout=subprocess.PIPE)
|
||||||
|
changelog = log_cmd.communicate()[0]
|
||||||
|
with open("ChangeLog", "w") as changelog_file:
|
||||||
|
changelog_file.write(changelog)
|
||||||
|
sdist.run(self)
|
||||||
|
|
||||||
|
cmdclass = {'sdist': local_sdist}
|
||||||
|
|
||||||
|
# If Sphinx is installed on the box running setup.py,
|
||||||
|
# enable setup.py to build the documentation, otherwise,
|
||||||
|
# just ignore it
|
||||||
|
try:
|
||||||
|
from sphinx.setup_command import BuildDoc
|
||||||
|
|
||||||
|
class local_BuildDoc(BuildDoc):
|
||||||
|
def run(self):
|
||||||
|
for builder in ['html', 'man']:
|
||||||
|
self.builder = builder
|
||||||
|
self.finalize_options()
|
||||||
|
BuildDoc.run(self)
|
||||||
|
cmdclass['build_sphinx'] = local_BuildDoc
|
||||||
|
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name='windc',
|
||||||
|
version=version.canonical_version_string(),
|
||||||
|
description='The WinDC project provides a simple WSGI server for Windows Environment Management',
|
||||||
|
license='Apache License (2.0)',
|
||||||
|
author='OpenStack',
|
||||||
|
author_email='openstack@lists.launchpad.net',
|
||||||
|
url='http://windc.openstack.org/',
|
||||||
|
packages=find_packages(exclude=['tests', 'bin']),
|
||||||
|
test_suite='nose.collector',
|
||||||
|
cmdclass=cmdclass,
|
||||||
|
include_package_data=True,
|
||||||
|
classifiers=[
|
||||||
|
'Development Status :: 4 - Beta',
|
||||||
|
'License :: OSI Approved :: Apache Software License',
|
||||||
|
'Operating System :: POSIX :: Linux',
|
||||||
|
'Programming Language :: Python :: 2.6',
|
||||||
|
'Environment :: No Input/Output (Daemon)',
|
||||||
|
],
|
||||||
|
scripts=['bin/windc',
|
||||||
|
'bin/windc-api'])
|
0
windc/tests/__init__.py
Normal file
0
windc/tests/__init__.py
Normal file
0
windc/tests/functional/__init__.py
Normal file
0
windc/tests/functional/__init__.py
Normal file
0
windc/tests/unit/__init__.py
Normal file
0
windc/tests/unit/__init__.py
Normal file
0
windc/windc/__init__.py
Normal file
0
windc/windc/__init__.py
Normal file
0
windc/windc/api/__init__.py
Normal file
0
windc/windc/api/__init__.py
Normal file
0
windc/windc/api/middleware/__init__.py
Normal file
0
windc/windc/api/middleware/__init__.py
Normal file
0
windc/windc/api/v1/__init__.py
Normal file
0
windc/windc/api/v1/__init__.py
Normal file
49
windc/windc/version.py
Normal file
49
windc/windc/version.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""Determine version of Skeleton library"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
from skeleton.vcsversion import version_info
|
||||||
|
except ImportError:
|
||||||
|
version_info = {'branch_nick': u'LOCALBRANCH',
|
||||||
|
'revision_id': 'LOCALREVISION',
|
||||||
|
'revno': 0}
|
||||||
|
|
||||||
|
SKELETON_VERSION = ['2011', '3']
|
||||||
|
YEAR, COUNT = SKELETON_VERSION
|
||||||
|
|
||||||
|
FINAL = False # This becomes true at Release Candidate time
|
||||||
|
|
||||||
|
|
||||||
|
def canonical_version_string():
|
||||||
|
return '.'.join([YEAR, COUNT])
|
||||||
|
|
||||||
|
|
||||||
|
def version_string():
|
||||||
|
if FINAL:
|
||||||
|
return canonical_version_string()
|
||||||
|
else:
|
||||||
|
return '%s-dev' % (canonical_version_string(),)
|
||||||
|
|
||||||
|
|
||||||
|
def vcs_version_string():
|
||||||
|
return "%s:%s" % (version_info['branch_nick'], version_info['revision_id'])
|
||||||
|
|
||||||
|
|
||||||
|
def version_string_with_vcs():
|
||||||
|
return "%s-%s" % (canonical_version_string(), vcs_version_string())
|
Loading…
x
Reference in New Issue
Block a user