Add a bunch more things to batcave01.

This commit is contained in:
Kevin Fenzi 2015-09-21 19:57:01 +00:00
parent ba4b744bcd
commit f6d0b0dee0
12 changed files with 2232 additions and 15 deletions

View file

@ -24,6 +24,7 @@
- cgit/base
- cgit/clean_lock_cron
- cgit/make_pkgs_list
- rsyncd
- apache
- httpd/mod_ssl
- { role: httpd/certificate, name: wildcard-2014.fedoraproject.org, SSLCertificateChainFile: wildcard-2014.fedoraproject.org.intermediate.cert }

View file

@ -0,0 +1,97 @@
allow from .fedoraproject.org
allow from 10.
allow from 64.34.163.94
allow from 64.34.163.95
allow from 64.34.163.96
allow from 64.34.183.130
allow from 64.34.184.178
allow from 64.34.184.179
allow from 64.34.195.12
allow from 64.34.195.13
allow from 64.34.195.14
allow from 64.34.203.162
allow from 64.34.212.36
allow from 64.34.212.37
allow from 64.34.212.38
allow from 66.135.52.16
allow from 66.135.52.17
allow from 66.135.52.84
allow from 66.135.52.85
allow from 66.135.55.241
allow from 66.135.60.113
allow from 66.35.62.160/28
allow from 80.239.144.84
allow from 80.239.156.208/28
allow from 94.76.206.175
allow from 128.197.185.42
allow from 128.197.185.45
allow from 140.211.169.192/26
allow from 209.132.178.0/23
allow from 209.132.180.0/24
allow from 209.132.181.0/24
allow from 209.132.182.51
allow from 209.132.184.0/24
allow from 213.175.193.204
allow from 213.175.193.205
allow from 213.175.193.206
allow from 213.175.193.207
# ibiblio
allow from 152.19.134.136
allow from 152.19.134.137
allow from 152.19.134.138
allow from 152.19.134.139
allow from 152.19.134.140
allow from 152.19.134.141
allow from 152.19.134.142
allow from 152.19.134.143
allow from 152.19.134.144
allow from 152.19.134.145
allow from 152.19.134.146
allow from 152.19.134.147
allow from 152.19.134.148
allow from 152.19.134.149
allow from 152.19.134.150
allow from 152.19.134.169
allow from 152.19.134.170
allow from 152.19.134.172
allow from 152.19.134.173
allow from 152.19.134.191
allow from 152.19.134.192
allow from 152.19.134.193
allow from 152.19.134.194
allow from 152.19.134.195
allow from 152.19.134.196
allow from 152.19.134.197
allow from 152.19.134.198
# internetx
allow from 85.236.55.0/28
allow from 2001:4178:2:1269::/64
# ibiblio ipv6
allow from 2610:28:3090:3001:dead:beef:cafe:fe00/120
# Private network cloud nat
allow from 209.132.180.6
# rdu - rht
allow from 204.85.14.1
allow from 204.85.14.2
allow from 204.85.14.3
# sourceware systems
allow from 209.132.180.128/27
# colocation america system.
allow from 67.203.2.64/29
allow from 2067:f188:0:0::0/64
# host1plus
allow from 5.175.150.48/28
# dedicatedsolutions
allow from 67.219.144.66
allow from 67.219.144.67
allow from 67.219.144.68
allow from 67.219.144.69
allow from 67.219.144.70

View file

@ -0,0 +1,141 @@
#!/usr/bin/env python
import getpass
import os
import subprocess as sp
import sys
from collections import defaultdict
import pygit2
import six
import fedmsg
import fedmsg.config
# Use $GIT_DIR to determine where this repo is.
abspath = os.path.abspath(os.environ['GIT_DIR'])
# This assumes git root dir is named "repo_name.git"
repo_name = '.'.join(abspath.split(os.path.sep)[-1].split('.')[:-1])
username = getpass.getuser()
repo = pygit2.Repository(abspath)
print("Emitting a message to the fedmsg bus.")
config = fedmsg.config.load_config([], None)
config['active'] = True
config['endpoints']['relay_inbound'] = config['relay_inbound']
fedmsg.init(name='relay_inbound', cert_prefix='scm', **config)
def revs_between(head, base):
""" Yield revisions between HEAD and BASE. """
# pygit2 can't do a rev-list yet, so we have to shell out.. silly.
cmd = '/usr/bin/git rev-list %s...%s' % (head.id, base.id)
proc = sp.Popen(cmd.split(), stdout=sp.PIPE, stderr=sp.PIPE, cwd=abspath)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise IOError('git rev-list failed: %r, err: %r' % (stdout, stderr))
for line in stdout.strip().split('\n'):
yield line.strip()
def build_stats(commit):
files = defaultdict(lambda: defaultdict(int))
# Calculate diffs against all parent commits
diffs = [repo.diff(parent, commit) for parent in commit.parents]
# Unless this is the first commit, with no parents.
diffs = diffs or [commit.tree.diff_to_tree(swap=True)]
for diff in diffs:
for patch in diff:
path = patch.new_file_path
files[path]['additions'] += patch.additions
files[path]['deletions'] += patch.deletions
files[path]['lines'] += patch.additions + patch.deletions
total = defaultdict(int)
for name, stats in files.items():
total['additions'] += stats['additions']
total['deletions'] += stats['deletions']
total['lines'] += stats['lines']
total['files'] += 1
return files, total
seen = []
# Read in all the rev information git-receive-pack hands us.
lines = [line.split() for line in sys.stdin.readlines()]
for line in lines:
base, head, branch = line
branch = '/'.join(branch.split('/')[2:])
try:
head = repo.revparse_single(head)
except KeyError:
# This means they are deleting this branch.. and we don't have a fedmsg
# for that (yet?). It is disallowed by dist-git in Fedora anyways.
continue
try:
base = repo.revparse_single(base)
revs = revs_between(head, base)
except KeyError:
revs = [head.id]
def _build_commit(rev):
commit = repo.revparse_single(six.text_type(rev))
# Tags are a little funny, and vary between versions of pygit2, so we'll
# just ignore them as far as fedmsg is concerned.
if isinstance(commit, pygit2.Tag):
return None
files, total = build_stats(commit)
return dict(
name=commit.author.name,
email=commit.author.email,
username=username,
summary=commit.message.split('\n')[0],
message=commit.message,
stats=dict(
files=files,
total=total,
),
rev=six.text_type(rev),
path=abspath,
repo=repo_name,
branch=branch,
agent=os.getlogin(),
)
commits = map(_build_commit, revs)
print("* Publishing information for %i commits" % len(commits))
for commit in reversed(commits):
if commit is None:
continue
# Keep track of whether or not we have already published this commit on
# another branch or not. It is conceivable that someone could make a
# commit to a number of branches, and push them all at the same time.
# Make a note in the fedmsg payload so we can try to reduce spam at a
# later stage.
if commit['rev'] in seen:
commit['seen'] = True
else:
commit['seen'] = False
seen.append(commit['rev'])
fedmsg.publish(
topic="receive",
msg=dict(commit=commit),
modname="infragit",
)

553
roles/batcave/files/git-notifier Executable file
View file

@ -0,0 +1,553 @@
#! /usr/bin/env python
import optparse
import os
import shutil
import socket
import sys
import subprocess
import tempfile
import time
Name = "git-notifier"
Version = "0.3"
CacheFile = ".%s.dat" % Name
Separator = "\n>---------------------------------------------------------------\n"
NoDiff = "[nodiff]"
NoMail = "[nomail]"
gitolite = "GL_USER" in os.environ
whoami = os.environ["LOGNAME"]
sender = os.environ["GL_USER"] if gitolite else whoami
Options = [
# Name, argument, default, help,
("allchanges", True, set(), "branches for which *all* changes are to be reported"),
("debug", False, False, "enable debug output"),
("diff", True, None, "mail out diffs between two revisions"),
("emailprefix", True, "[git]", "Subject prefix for mails"),
("hostname", True, socket.gethostname(), "host where the repository is hosted"),
("log", True, "%s.log" % Name, "set log output"),
("mailinglist", True, whoami, "destination address for mails"),
("manual", True, None, "notifiy for a manually given set of revisions"),
("maxdiffsize", True, 50, "limit the size of diffs in mails (KB)"),
("noupdate", False, False, "do not update the state file"),
("repouri", True, None, "full URI for the repository"),
("sender", True, sender, "sender address for mails"),
("link", True, None, "Link to insert into mail, %s will be replaced with revision"),
("updateonly", False, False, "update state file only, no mails"),
("users", True, None, "location of a user-to-email mapping file"),
]
class State:
def __init__(self):
self.clear()
def clear(self):
self.heads = {}
self.tags = {}
self.revs = set()
self.reported = set() # Revs reported this run so far.
def writeTo(self, file):
if os.path.exists(CacheFile):
try:
shutil.move(CacheFile, CacheFile + ".bak")
except IOError:
pass
out = open(file, "w")
for (head, ref) in self.heads.items():
print >>out, "head", head, ref
for (tag, ref) in self.tags.items():
print >>out, "tag", tag, ref
for rev in self.revs:
print >>out, "rev", rev
def readFrom(self, file):
self.clear()
for line in open(file):
line = line.strip()
if not line or line.startswith("#"):
continue
m = line.split()
if len(m) == 3:
(type, key, val) = (m[0], m[1], m[2])
else:
# No heads.
(type, key, val) = (m[0], m[1], "")
if type == "head":
self.heads[key] = val
elif type == "tag":
self.tags[key] = val
elif type == "rev":
self.revs.add(key)
else:
error("unknown type %s in cache file" % type)
class GitConfig:
def __init__(self, args):
self.parseArgs(args)
self.maxdiffsize *= 1024 # KBytes to bytes.
if self.allchanges and not isinstance(self.allchanges, set):
self.allchanges = set([head.strip() for head in self.allchanges.split(",")])
if not self.debug:
self.log = open(self.log, "a")
else:
self.log = sys.stderr
if not self.users and "GL_ADMINDIR" in os.environ:
users = os.path.join(os.environ["GL_ADMINDIR"], "conf/sender.cfg")
if os.path.exists(users):
self.users = users
self.readUsers()
def parseArgs(self, args):
parser = optparse.OptionParser(version=Version)
for (name, arg, default, help) in Options:
defval = self._git_config(name, default)
if isinstance(default, int):
defval = int(defval)
if not arg:
defval = bool(defval)
if not arg:
action = "store_true" if not default else "store_false"
parser.add_option("--%s" % name, action=action, dest=name, default=defval, help=help)
else:
type = "string" if not isinstance(default, int) else "int"
parser.add_option("--%s" % name, action="store", type=type, default=defval, dest=name, help=help)
(options, args) = parser.parse_args(args)
if len(args) != 0:
parser.error("incorrect number of arguments")
for (name, arg, default, help) in Options:
self.__dict__[name] = options.__dict__[name]
def readUsers(self):
if self.users and os.path.exists(self.users):
for line in open(self.users):
line = line.strip()
if not line or line.startswith("#"):
continue
m = line.split()
if self.sender == m[0]:
self.sender = " ".join(m[1:])
break
def _git_config(self, key, default):
cfg = git(["config hooks.%s" % key])
return cfg[0] if cfg else default
def log(msg):
print >>Config.log, "%s - %s" % (time.asctime(), msg)
def error(msg):
log("Error: %s" % msg)
sys.exit(1)
def git(args, stdout_to=subprocess.PIPE, all=False):
if isinstance(args, tuple) or isinstance(args, list):
args = " ".join(args)
try:
if Config.debug:
print >>sys.stderr, "> git " + args
except NameError:
# Config may not be defined yet.
pass
try:
child = subprocess.Popen("git " + args, shell=True, stdin=None, stdout=stdout_to, stderr=subprocess.PIPE)
(stdout, stderr) = child.communicate()
except OSError, e:
error("cannot start git: %s" % str(e))
if child.returncode != 0 and stderr:
msg = ": %s" % stderr if stderr else ""
error("git child failed with exit code %d%s" % (child.returncode, msg))
if stdout_to != subprocess.PIPE:
return []
if not all:
return [line.strip() for line in stdout.split("\n") if line]
else:
return stdout.split("\n")
def getHeads(state):
for (rev, head) in [head.split() for head in git("show-ref --heads")]:
if head.startswith("refs/heads/"):
head = head[11:]
state.heads[head] = rev
def getTags(state):
for (rev, tag) in [head.split() for head in git("show-ref --tags")]:
# We are only interested in annotaged tags.
type = git("cat-file -t %s" % rev)[0]
if type == "tag":
if tag.startswith("refs/tags/"):
tag= tag[10:]
state.tags[tag] = rev
def getReachableRefs(state):
for rev in git(["rev-list"] + state.heads.keys() + state.tags.keys()):
state.revs.add(rev)
def getCurrent():
state = State()
getHeads(state)
getTags(state)
getReachableRefs(state)
return state
Tmps = []
def makeTmp():
global Tmps
(fd, fname) = tempfile.mkstemp(prefix="%s-" % Name, suffix=".tmp")
Tmps += [fname]
return (os.fdopen(fd, "w"), fname)
def deleteTmps():
for tmp in Tmps:
os.unlink(tmp)
def mailTag(key, value):
return "%-11s: %s" % (key, value)
def generateMailHeader(subject):
repo = Config.repouri
if not repo:
if gitolite:
# Gitolite version.
repo = "ssh://%s@%s/%s" % (whoami, Config.hostname, os.path.basename(os.getcwd()))
else:
# Standard version.
repo = "ssh://%s/%s" % (Config.hostname, os.path.basename(os.getcwd()))
if repo.endswith(".git"):
repo = repo[0:-4]
(out, fname) = makeTmp()
print >>out, """From: %s
To: %s
Subject: %s %s
X-Git-Repository: %s
X-Mailer: %s %s
%s
""" % (Config.sender, Config.mailinglist, Config.emailprefix, subject, repo,
Name, Version, mailTag("Repository", repo)),
return (out, fname)
def sendMail(out, fname):
out.close()
if Config.debug:
for line in open(fname):
print " |", line,
print ""
else:
stdin = subprocess.Popen("/usr/sbin/sendmail -t", shell=True, stdin=subprocess.PIPE).stdin
for line in open(fname):
print >>stdin, line,
stdin.close()
# Wait a bit in case we're going to send more mails. Otherwise, the mails
# get sent back-to-back and are likely to end up with identical timestamps,
# which may then make them appear to have arrived in the wrong order.
if not Config.debug:
time.sleep(2)
def entryAdded(key, value, rev):
log("New %s %s" % (key, value))
(out, fname) = generateMailHeader("%s '%s' created" % (key, value))
print >>out, mailTag("New %s" % key, value)
print >>out, mailTag("Referencing", rev)
sendMail(out, fname)
def entryDeleted(key, value):
log("Deleted %s %s" % (key, value))
(out, fname) = generateMailHeader("%s '%s' deleted" % (key, value))
print >>out, mailTag("Deleted %s" % key, value)
sendMail(out, fname)
# Sends a mail for a notification consistent of two parts: (1) the output of a
# show command, and (2) the output of a diff command.
def sendChangeMail(subject, heads, show_cmd, diff_cmd):
(out, fname) = generateMailHeader(subject)
multi = "es" if len(heads) > 1 else ""
heads = ",".join(heads)
print >>out, mailTag("On branch%s" % multi, heads)
if Config.link:
url = Config.link.replace("%s", rev)
print >>out, ""
print >>out, mailTag("Link", url)
footer = ""
show = git(show_cmd)
for line in show:
if NoDiff in line:
break
if NoMail in line:
return
else:
(tmp, tname) = makeTmp()
diff = git(diff_cmd, stdout_to=tmp)
tmp.close()
size = os.path.getsize(tname)
if size > Config.maxdiffsize:
footer = "\nDiff suppressed because of size. To see it, use:\n\n git %s" % diff_cmd
tname = None
print >>out, Separator
for line in git(show_cmd, all=True):
if line == "---":
print >>out, Separator
else:
print >>out, line
print >>out, Separator
if tname:
for line in open(tname):
print >>out, line,
print >>out, footer
if Config.debug:
print >>out, "-- "
print >>out, "debug: show_cmd = git %s" % show_cmd
print >>out, "debug: diff_cmd = git %s" % diff_cmd
sendMail(out, fname)
# Sends notification for a specific revision.
def commit(current, rev):
if rev in current.reported:
# Already reported in this run of the script.
log("New revision %s, but already reported this time" % rev)
log("New revision %s" % rev)
current.reported.add(rev)
heads = [head.split()[-1] for head in git("branch --contains=%s" % rev)]
if len(set(heads) - Config.allchanges) == 0:
# We have reported full diffs for all already.
return
subject = git("show '--pretty=format:%%s (%%h)' -s %s" % rev)
subject = "%s: %s" % (",".join(heads), subject[0])
show_cmd = "show -s --no-color --find-copies-harder --pretty=medium %s" % rev
diff_cmd = "diff --patch-with-stat --no-color --find-copies-harder --ignore-space-at-eol ^%s~1 %s " % (rev, rev)
sendChangeMail(subject, heads, show_cmd, diff_cmd)
# Sends a diff between two revisions.
def diff(head, first, last):
log("Diffing %s..%s" % (first, last))
subject = git("show '--pretty=format:%%s (%%h)' -s %s" % last)
subject = "%s diff: %s" % (head, subject[0])
heads = [head]
show_cmd = "show -s --no-color --find-copies-harder --pretty=medium %s" % last
diff_cmd = "diff --patch-with-stat -m --no-color --find-copies-harder --ignore-space-at-eol %s %s" % (first, last)
sendChangeMail(subject, heads, show_cmd, diff_cmd)
# Sends pair-wise diffs for a path of revisions.
def diffPath(head, revs):
last = None
for rev in revs:
if last:
diff(head, last, rev)
last = rev
# Sends a commit notifications for a set of revisions, independent of whether
# they already have been reported.
def reportPath(current, revs):
for rev in revs:
commit(current, rev)
# Sends a summary mail for a set of revisions.
def headMoved(head, path):
log("Head moved: %s -> %s" % (head, path[-1]))
subject = git("show '--pretty=format:%%s (%%h)' -s %s" % path[-1])
(out, fname) = generateMailHeader("%s's head updated: %s" % (head, subject[0]))
print >>out, "Branch '%s' now includes:" % head
print >>out, ""
for rev in path:
print >>out, " ", git("show -s --pretty=oneline --abbrev-commit %s" % rev)[0]
sendMail(out, fname)
Config = GitConfig(sys.argv[1:])
log("Running for %s" % os.getcwd())
if Config.debug:
for (name, arg, default, help) in Options:
print >>sys.stderr, "[Option %s: %s]" % (name, Config.__dict__[name])
cache = State()
if os.path.exists(CacheFile):
cache.readFrom(CacheFile)
report = (not Config.updateonly)
else:
log("Initial run. Not generating any mails, just recording current state.")
report = False
current = getCurrent()
if Config.diff:
# Manual diff mode. The argument must be of the form "[old-rev..]new-rev".
path = [rev.strip() for rev in Config.diff.split("..")]
if len(path) == 1:
path = ("%s~2" % path[0], path[0]) # sic! ~2.
else:
path = ("%s~1" % path[0], path[1])
revs = git(["rev-list", "--reverse", "--first-parent", "--date-order", path[1], "^%s" % path[0]])
diffPath("<manual-diff>", revs)
sys.exit(0)
if Config.manual:
# Manual report mode. The argument must be of the form "[old-rev..]new-rev".
path = [rev.strip() for rev in Config.manual.split("..")]
if len(path) == 1:
path = ("%s~1" % path[0], path[0])
revs = git(["rev-list", "--reverse", "--date-order", path[1], "^%s" % path[0]])
reportPath(current, revs)
sys.exit(0)
if report:
# Check for changes to the set of heads.
old = set(cache.heads.keys())
new = set(current.heads.keys())
for head in (new - old):
entryAdded("branch", head, current.heads[head])
for head in (old - new):
entryDeleted("branch", head)
stable_heads = new & old
Config.allchanges = Config.allchanges & stable_heads
# Check tags.
old = set(cache.tags.keys())
new = set(current.tags.keys())
for tag in (new - old):
entryAdded("tag", tag, current.tags[tag])
for tag in (old - new):
entryDeleted("tag", tag)
# Do complete reports for the heads we want to see everything for.
for head in Config.allchanges:
old_rev = cache.heads[head]
new_rev = current.heads[head]
revs = git(["rev-list", "--reverse", "--first-parent", "--date-order", new_rev, "^%s~1" % old_rev])
diffPath(head, revs)
# Check for unreported commits.
# Sort updates by time.
def _key(rev):
ts = git("show -s '--pretty=format:%%ct' %s" % rev)
return int(ts[0])
old = set(cache.revs)
new = set(current.revs)
new_revs = new - old
for rev in sorted(new_revs, key=_key):
commit(current, rev)
# See if heads have moved to include already reported revisions.
for head in stable_heads:
if head in Config.allchanges:
# Already done complete diffs.
continue
old_rev = cache.heads[head]
new_rev = current.heads[head]
path = git(["rev-list", "--reverse", "--date-order", new_rev, "^%s" % old_rev])
if len(set(path) - new_revs):
headMoved(head, path)
if not Config.noupdate:
current.writeTo(CacheFile)
deleteTmps()

View file

@ -0,0 +1,206 @@
<VirtualHost *:80>
ServerName infrastructure.fedoraproject.org
ServerAlias infrastructure.stg.fedoraproject.org
ServerAdmin webmaster@fedoraproject.org
TraceEnable Off
# enable git smart http cloning.
SetEnv GIT_PROJECT_ROOT /srv/web/infra/
SetEnv GIT_HTTP_EXPORT_ALL
ScriptAliasMatch \
"(?x)^/infra/(.*/(HEAD | \
info/refs | \
objects/(info/[^/]+ | \
[0-9a-f]{2}/[0-9a-f]{38} | \
pack/pack-[0-9a-f]{40}\.(pack|idx)) | \
git-(upload|receive)-pack))$" \
/usr/libexec/git-core/git-http-backend/$1
#
# redirect everyone to use https
#
# We can't do this until virt-install can handle https
# RewriteEngine on
# RewriteCond %{SERVER_PORT} !^443$
# RewriteRule ^/(.*) https://%{HTTP_HOST}/$1 [L,R]
# robots location
Alias /robots.txt /srv/web/robots.txt.lockbox01
DocumentRoot /srv/web
<Directory /srv/web>
Options Indexes FollowSymLinks
AllowOverride None
Order allow,deny
Allow from all
</Directory>
<Directory /srv/web/repo>
Options Indexes FollowSymLinks
AllowOverride None
Order allow,deny
Allow from all
</Directory>
<Directory /srv/web/repo/rhel>
Order deny,allow
Deny from all
Include "conf.d/infrastructure.fedoraproject.org/allows"
</Directory>
<Directory /srv/web/rhel>
Order deny,allow
Deny from all
Include "conf.d/infrastructure.fedoraproject.org/allows"
</Directory>
<Directory /srv/web/pub>
Options Indexes FollowSymLinks
AllowOverride None
Order deny,allow
Deny from all
Include "conf.d/infrastructure.fedoraproject.org/allows"
</Directory>
<Directory /srv/web/infra>
Options Indexes FollowSymLinks
Order allow,deny
Allow from all
Include "conf.d/infrastructure.fedoraproject.org/allows"
</Directory>
<Directory /srv/web/infra/bigfiles>
Options FollowSymLinks
Order allow,deny
Allow from all
Include "conf.d/infrastructure.fedoraproject.org/allows"
</Directory>
# Needed for cgit cgi
ScriptAlias /cgi-bin/ "/var/www/cgi-bin/"
<Directory "/var/www/cgi-bin">
AllowOverride None
Options None
Order allow,deny
Allow from all
</Directory>
<Directory "/usr/share/cgit">
AllowOverride None
Options None
Order allow,deny
Allow from all
</Directory>
<Directory "/usr/libexec/git-core">
AllowOverride None
Options None
Order allow,deny
Allow from all
</Directory>
</VirtualHost>
<VirtualHost *:443>
ServerName infrastructure.fedoraproject.org
ServerAlias infrastructure.stg.fedoraproject.org
ServerAdmin webmaster@fedoraproject.org
# enable git smart http cloning.
SetEnv GIT_PROJECT_ROOT /srv/web/infra/
SetEnv GIT_HTTP_EXPORT_ALL
ScriptAliasMatch \
"(?x)^/infra/(.*/(HEAD | \
info/refs | \
objects/(info/[^/]+ | \
[0-9a-f]{2}/[0-9a-f]{38} | \
pack/pack-[0-9a-f]{40}\.(pack|idx)) | \
git-(upload|receive)-pack))$" \
/usr/libexec/git-core/git-http-backend/$1
SSLEngine on
SSLCertificateFile /etc/pki/tls/certs/wildcard-2014.fedoraproject.org.cert
SSLCertificateKeyFile /etc/pki/tls/private/wildcard-2014.fedoraproject.org.key
SSLCertificateChainFile /etc/pki/tls/certs/wildcard-2014.fedoraproject.org.intermediate.cert
Header always add Strict-Transport-Security "max-age=15768000; includeSubDomains; preload"
SSLHonorCipherOrder On
# https://fedorahosted.org/fedora-infrastructure/ticket/4101#comment:14
# If you change the protocols or cipher suites, you should probably update
# modules/squid/files/squid.conf-el6 too, to keep it in sync.
SSLProtocol -All +TLSv1 +TLSv1.1 +TLSv1.2
SSLCipherSuite ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!3DES:!MD5:!PSK
# robots location
Alias /robots.txt /srv/web/robots.txt.lockbox01
DocumentRoot /srv/web
<Directory /srv/web>
Options Indexes FollowSymLinks
AllowOverride None
Order allow,deny
Allow from all
</Directory>
<Directory /srv/web/repo>
Options Indexes FollowSymLinks
AllowOverride None
Order allow,deny
Allow from all
</Directory>
<Directory /srv/web/repo/rhel>
Order deny,allow
Deny from all
Include "conf.d/infrastructure.fedoraproject.org/allows"
</Directory>
<Directory /srv/web/rhel>
Order deny,allow
Deny from all
Include "conf.d/infrastructure.fedoraproject.org/allows"
</Directory>
<Directory /srv/web/pub>
Options Indexes FollowSymLinks
AllowOverride None
Order deny,allow
Deny from all
Include "conf.d/infrastructure.fedoraproject.org/allows"
</Directory>
<Directory /srv/web/infra>
Options Indexes FollowSymLinks
Order allow,deny
Allow from all
Include "conf.d/infrastructure.fedoraproject.org/allows"
</Directory>
<Directory /srv/web/infra/bigfiles>
Options FollowSymLinks
Order allow,deny
Allow from all
Include "conf.d/infrastructure.fedoraproject.org/allows"
</Directory>
# Needed for cgit cgi
ScriptAlias /cgi-bin/ "/var/www/cgi-bin/"
<Directory "/var/www/cgi-bin">
AllowOverride None
Options None
Order allow,deny
Allow from all
</Directory>
<Directory "/usr/share/cgit">
AllowOverride None
Options None
Order allow,deny
Allow from all
</Directory>
<Directory "/usr/libexec/git-core">
AllowOverride None
Options None
Order allow,deny
Allow from all
</Directory>
</VirtualHost>

View file

@ -0,0 +1 @@
AddType text/plain .rst

View file

@ -0,0 +1,742 @@
#!/usr/bin/env python
from __future__ import print_function
"""
This is a python interface for using Openshift-2.0 REST
version = 2.0 changed the basic support to use the new requests module
(http://docs.python-requests.org/en/latest/index.html)
Source: https://github.com/openshift/python-interface/raw/master/oshift/__init__.py
"""
import os
import sys
import logging
from optparse import OptionParser
import time
import traceback
import json
import base64
import requests
class OpenShiftException(BaseException):
pass
class OpenShiftLoginException(OpenShiftException):
"""Authorization failed."""
pass
class OpenShiftAppException(OpenShiftException):
"""App not found."""
pass
class OpenShiftNullDomainException(OpenShiftException):
"""User's domain hasn't been initialized."""
pass
class OpenShift500Exception(OpenShiftException):
"""Internal Server Error"""
pass
#### set this to True if we want to enable performance analysis
DOING_PERFORMANCE_ANALYSIS = False
global log
def config_logger():
# create formatter
formatter = logging.Formatter("%(levelname)s [%(asctime)s] %(message)s",
"%H:%M:%S")
logger = logging.getLogger("dump_logs")
log_formatter = logging.Formatter(
"%(name)s: %(asctime)s - %(levelname)s: %(message)s")
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setFormatter(formatter)
stream_handler.setLevel(logging.DEBUG)
logger.setLevel(logging.DEBUG)
logger.addHandler(stream_handler)
return logger
def config_parser():
# these are required options.
parser.set_defaults(VERBOSE=False)
parser.set_defaults(DEBUG=False)
parser.add_option("-d", action="store_true", dest="DEBUG", help="enable DEBUG (default true)")
parser.add_option("-i", "--ip", default="openshift.redhat.com", help="ip addaress of your devenv")
parser.add_option("-v", action="store_true", dest="VERBOSE", help="enable VERBOSE printing")
parser.add_option("-u", "--user", default=None, help="User name")
parser.add_option("-p", "--password", default=None, help="RHT password")
(options, args) = parser.parse_args()
if options.user is None:
options.user = os.getenv('OPENSHIFT_user_email')
if options.password is None:
options.password = os.getenv('OPENSHIFT_user_passwd')
return options, args
log = config_logger()
parser = OptionParser()
# helper function for to measure timedelta.
def timeit(method):
def timed(*args, **kw):
ts = time.time()
result = method(*args, **kw)
te = time.time()
log.info("%r (%r, %r) %2.2f sec" % (method.__name__, args, kw, te-ts))
return result, te-ts
return timed
class conditional_decorator(object):
def __init__(self, dec, condition):
self.decorator = dec
self.condition = condition
def __call__(self, func):
if not self.condition:
return func
else:
return self.decorator(func)
class RestApi(object):
"""
A base connection class to derive from.
"""
proto = 'https'
host = '127.0.0.1'
port = 443
username = None
password = None
headers = None
response = None
base_uri = None
verbose = False
debug = False
def __init__(self, host=None, port=443, username=username, password=password,
debug=False, verbose=False, proto=None, headers=None):
if proto is not None:
self.proto = proto
if host is not None:
self.host = host
if username:
self.username = username
if password:
self.password = password
if headers:
self.headers = headers
if verbose:
self.verbose = verbose
self.debug = debug
self.base_uri = self.proto + "://" + host + "/broker/rest"
def _get_auth_headers(self, username=None, password=None):
if username:
self.username = username
if password:
self.password = password
return (self.username, self.password)
def request(self, url, method, headers=None, params=None):
"""
wrapper method for Requests' methods
"""
if url.startswith("https://") or url.startswith("http://"):
self.url = url # self.base_uri + url
else:
self.url = self.base_uri + url
log.debug("URL: %s" % self.url)
auth = (self.username, self.password) # self._get_auth_headers()
#auth = self._get_auth_headers()
_headers = self.headers or {}
if headers:
_headers.update(headers)
if 'OPENSHIFT_REST_API' in os.environ:
user_specified_api_version = os.environ['OPENSHIFT_REST_API']
api_version = "application/json;version=%s" % user_specified_api_version
_headers['Accept'] = api_version
self.response = requests.request(
auth=None if None in auth else auth,
method=method, url=self.url, params=params,
headers=_headers, timeout=130, verify=False
)
try:
raw_response = self.response.raw
except Exception as e:
print("-"*80, file=sys.stderr)
traceback.print_exc(file=sys.stderr)
print("-"*80, file=sys.stderr)
raise e
self.data = self.response.json()
# see https://github.com/kennethreitz/requests/blob/master/requests/status_codes.py
if self.response.status_code == requests.codes.internal_server_error:
raise OpenShift500Exception('Internal Server Error: %s' % self.data)
if self.response.status_code == (200 or 201):
print("-"*80, file=sys.stderr)
log.debug("status: %s" % self.response.status_code)
#log.debug("msg: %s" % self.data()['messages'][0]['text'])
# the raw_response is not available
#log.debug("raw: %s"%raw_response)
print("-"*80, file=sys.stderr)
return (self.response.status_code, self.data)
class Openshift(object):
"""
wrappers class around REST API so use can use it with python
"""
rest = None
user = None
passwd = None
def __init__(self, host, user=None, passwd=None, debug=False, verbose=False, logger=None, proto=None, headers=None):
if user:
self.user = user
if passwd:
self.passwd = passwd
if logger:
global log
log = logger
self.rest = RestApi(host=host, username=self.user, password=self.passwd, debug=debug, verbose=verbose, proto=proto, headers=headers)
if 'OPENSHIFT_REST_API' in os.environ:
self.REST_API_VERSION = float(os.environ['OPENSHIFT_REST_API'])
else:
# just get the latest version returned from the Server
api_version, api_version_list = self.api_version()
self.REST_API_VERSION = api_version
def get_href(self, top_level_url, target_link, domain_name=None):
status, res = self.rest.request(method='GET', url=top_level_url)
index = target_link.upper()
if status == 'Authorization Required':
#log.error("Authorization failed. (Check your credentials)")
raise OpenShiftLoginException('Authorization Required')
if domain_name is None:
if self.rest.response.json()['data']:
res = self.rest.response.json()['data'][0]['links'][index]
return (res['href'], res['method'])
else:
raise OpenShiftNullDomainException("No domain has been initialized.")
#return ('Not Found', self.rest.response.json)
else: # domain name is specified, now find a match
json_data = self.rest.response.json()['data']
if json_data:
for jd in json_data:
if jd['name'] == domain_name:
res = jd['links'][index]
return (res['href'], res['method'])
### if here, then user has given a domain name that does not match what's registered with the system
return("Not Found", None)
else:
return(None, None)
##### /user (sshkey)
#@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def get_user(self):
log.debug("Getting user information...")
(status, raw_response) = self.rest.request(method='GET', url='/user')
if status == 'OK':
return (status, self.rest.response.json()['data']['login'])
else:
return (status, raw_response)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def keys_list(self):
log.debug("Getting ssh key information...")
(status, raw_response) = self.rest.request(method='GET', url='/user/keys')
return (status, raw_response)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def key_add(self, kwargs):
"""
params: {name, type, key_path}
"""
ssh_key_str = None
if 'key_str' in kwargs:
ssh_key_str = kwargs['key_str']
else:
if 'key' not in kwargs:
# use a default path
sshkey = '~/.ssh/id_rsa.pub'
else:
sshkey = kwargs['key']
ssh_path = os.path.expanduser(sshkey)
ssh_key_str = open(ssh_path, 'r').read().split(' ')[1]
if 'name' not in kwargs:
kwargs['name'] = 'default'
if 'type' not in kwargs:
kwargs['type'] = 'ssh-rsa'
data_dict = {
'name': kwargs['name'],
'type': kwargs['type'],
'content': ssh_key_str
}
params = data_dict
status, raw_response = self.rest.request(method='POST', url='/user/keys', params=params)
return (status, raw_response)
##### /domains
#@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
# TODO: should the rhlogin really be hardcoded in this function?
def domain_create(self, name, rhlogin='nate@appsembler.com'):
log.debug("Creating domain '%s'" % name)
params = {
'id': name,
'rhlogin': rhlogin
}
status, res = self.rest.request(method='POST', url='/domains', params=params)
return (status, res)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def domain_delete(self, domain_name=None, force=True):
""" destroy a user's domain, if no name is given, figure it out"""
if domain_name is None:
status, domain_name = self.domain_get()
url, method = self.get_href('/domains', 'delete', domain_name)
log.info("URL: %s" % url)
#res = self.rest.response.data[0]['links']['DELETE']
if force:
params = {'force': 'true'}
if url:
return self.rest.request(method=method, url=url, params=params)
else: # problem
return (url, self.rest.response.raw)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def domain_get(self, name=None):
log.info("Getting domain information...")
url, method = self.get_href('/domains', 'get', name)
if url == 'Not Found':
return ('Not Found', None)
else:
(status, raw_response) = self.rest.request(method=method, url=url)
if status == 200:
if self.REST_API_VERSION < 1.6:
domain_index_name = 'id'
else:
domain_index_name = 'name'
return (status, self.rest.response.json()['data'][domain_index_name])
def domain_update(self, new_name):
params = {'id': new_name}
url, method = self.get_href("/domains", 'update')
(status, res) = self.rest.request(method=method, url=url, params=params)
return (status, res)
def app_list(self):
url, method = self.get_href('/domains', 'list_applications')
(status, res) = self.rest.request(method=method, url=url)
return (status, self.rest.response.json()['data'])
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def app_create(self, app_name, app_type, scale='false', init_git_url=None):
url, method = self.get_href('/domains', 'add_application')
valid_options = self.rest.response.json()['data'][0]['links']['ADD_APPLICATION']['optional_params'][0]['valid_options']
#if app_type not in valid_options:
# log.error("The app type you specified '%s' is not supported!" % app_type)
# log.debug("supported apps types are: %s" % valid_options)
try:
json_data = json.loads(json.dumps(app_type))
except:
json_data = None
if json_data:
# translate json data into list
is_dict = all(isinstance(i, dict) for i in json_data)
cart_info = []
if is_dict:
# need to construct a cart as a list from dictionary
for data in json_data:
cart_info.append(data['name'])
else:
cart_info = json_data
else:
cart_info.append(app_type)
data_dict = {
'name': app_name,
'cartridges[]': cart_info,
'scale': scale,
}
if init_git_url:
data_dict['initial_git_url'] = init_git_url
params = data_dict
#log.debug("URL: %s, METHOD: %s" % (url, method))
(status, res) = self.rest.request(method=method, url=url, params=params)
return (status, res)
##### /cartridges
def cartridges(self):
(status, raw_response) = self.rest.request(method='GET', url='/cartridges')
if status == 'OK':
# return a list of cartridges that are supported
return (status, self.rest.response.json()['data'])
else:
return (status, raw_response)
##### /api get a list of support operations
def api(self):
#log.debug("Getting supported APIs...")
(status, raw_response) = self.rest.request(method='GET', url='/api')
return (status, raw_response)
def api_version(self):
# return the current version being used and the list of supported versions
status, res = self.api()
return (float(res['version']), res['supported_api_versions'])
##### helper functions
def do_action(self, kwargs):
op = kwargs['op_type']
if op == 'cartridge':
status, res = self.cartridge_list(kwargs['app_name'])
elif op == 'keys':
status, res = self.keys_list()
json_data = self.rest.response.json()
action = kwargs['action']
name = kwargs['name']
raw_response = None
for data in json_data['data']:
if data['name'] == name:
params = data['links'][action]
log.debug("Action: %s" % action)
if len(params['required_params']) > 0:
# construct require parameter dictionary
data = {}
for rp in params['required_params']:
param_name = rp['name']
if kwargs['op_type'] == 'cartridge':
data[param_name] = action.lower()
else:
data[param_name] = kwargs[param_name]
data = data
else:
data = None
(status, raw_response) = self.rest.request(method=params['method'],
url=params['href'],
params=data)
return (status, self.rest.response.json())
return (status, raw_response)
#### application tempalte
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def app_templates(self):
(status, raw_response) = self.rest.request(method='GET', url='/application_template')
if status == 'OK':
return (status, self.rest.response.json())
else:
return (status, raw_response)
##### keys
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def key_delete(self, key_name):
"""
li.key_delete('ssh_key_name')
"""
params = {"action": 'DELETE', 'name': key_name, "op_type": 'keys'}
return self.do_action(params)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def key_update(self, kwargs): # key_name, key_path, key_type='ssh-rsa'):
"""
li.key_update({'name': 'new_key_name', 'key': new_key_path})
"""
key_path = kwargs['key']
key_name = kwargs['name']
if 'key_type' in kwargs:
key_type = kwargs['key_type']
else:
key_type = 'ssh-rsa'
ssh_path = os.path.expanduser(key_path)
ssh_key_str = open(ssh_path, 'r').read().split(' ')[1]
params = {'op_type': 'keys', 'action': 'UPDATE', 'name': key_name, 'content': ssh_key_str, 'type': key_type}
return self.do_action(params)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def key_get(self, name):
"""
li.key_get('target_key_name')
returns the actual key content :$
"""
params = {'action': 'GET', 'name': name, 'op_type': 'keys'}
url = "/user/keys/" + name
(status, raw_response) = self.rest.request(method='GET', url=url)
if status == 'OK':
return status, self.rest.response.json()['data']
else:
return (status, raw_response)
def key_action(self, kwargs):
status, res = self.keys_list()
json_data = self.rest.response.json()
action = kwargs['action']
name = kwargs['name']
for data in json_data['data']:
if data['name'] == name:
params = data['links'][action]
log.debug("Action: %s" % action)
if len(params['required_params']) > 0:
# construct require parameter dictionary
data = {}
for rp in params['required_params']:
param_name = rp['name']
data[param_name] = kwargs[param_name]
data = data
else:
data = None
break
(status, raw_response) = self.rest.request(method=params['method'],
url=params['href'],
params=data)
return (status, raw_response)
##### apps
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def app_create_scale(self, app_name, app_type, scale, init_git_url=None):
self.app_create(app_name=app_name, app_type=app_type, scale=scale, init_git_url=init_git_url)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def app_delete(self, app_name):
params = {'action': 'DELETE', 'app_name': app_name}
return self.app_action(params)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def app_start(self, app_name):
params = {"action": 'START', 'app_name': app_name}
return self.app_action(params)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def app_stop(self, app_name):
params = {"action": 'STOP', 'app_name': app_name}
return self.app_action(params)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def app_restart(self, app_name):
params = {"action": 'RESTART', 'app_name': app_name}
return self.app_action(params)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def app_force_stop(self, app_name):
params = {"action": 'FORCE_STOP', 'app_name': app_name}
return self.app_action(params)
@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)
def app_get_descriptor(self, app_name):
params = {'action': 'GET', 'app_name': app_name}
return self.app_action(params)
#############################################################
# event related functions
#############################################################
def app_scale_up(self, app_name):
params = {'action': 'SCALE_UP', 'app_name': app_name}
return self.app_action(params)
def app_scale_down(self, app_name):
params = {'action': 'SCALE_DOWN', 'app_name': app_name}
return self.app_action(params)
def app_add_alias(self, app_name, alias):
params = {'action': 'ADD_ALIAS', 'app_name': app_name, 'alias': alias}
return self.app_action(params)
def app_remove_alias(self, app_name, alias):
params = {'action': 'REMOVE_ALIAS', 'app_name': app_name, 'alias': alias}
return self.app_action(params)
def app_get_estimates(self):
url, method = self.get_href('/estimates', 'get_estimate')
(status, res) = self.rest.request(method=method, url=url)
return (status, self.rest.response.json()['data'])
#params = {'action': 'GET_ESTIMATE'}
#return self.app_action(params)
def app_action(self, params):
""" generic helper function that is capable of doing all the operations
for application
"""
# step1. find th url and method
status, res = self.app_list()
app_found = False
action = params['action']
if 'app_name' in params:
app_name = params['app_name']
if 'cartridge' in params:
cart_name = params['cartridge']
for app in res:
#for app in res['data']:
if app['name'] == app_name:
# found match, now do your stuff
params_dict = app['links'][action]
method = params_dict['method']
log.info("Action: %s" % action)
data = {}
if len(params_dict['required_params']) > 0:
param_name = params_dict['required_params'][0]['name']
rp = params_dict['required_params'][0]
#data[param_name] = cart_name #'name'] = rp['name']
for rp in params_dict['required_params']:
# construct the data
param_name = rp['name']
if param_name == 'event':
if isinstance(rp['valid_options'], list):
data[param_name] = rp['valid_options'][0]
else:
data[param_name] = rp['valid_options']
else:
data[param_name] = params[param_name] # cart_name #params['op_type']
#data[param_name] = params[param_name]
data = data
else:
data = None
req_url = params_dict['href']
(status, raw_response) = self.rest.request(method=method, url=req_url, params=data)
app_found = True
return (status, raw_response)
if not app_found:
raise OpenShiftAppException("Can not find the app matching your request")
#log.error("Can not find app matching your request '%s'" % app_name)
#return ("Error", None)
def get_gears(self, app_name, domain_name=None):
""" return gears information """
params = {"action": 'GET_GEAR_GROUPS', 'app_name': app_name}
return self.app_action(params)
################################
# cartridges
################################
def cartridge_list(self, app_name):
params = {"action": 'LIST_CARTRIDGES', 'app_name': app_name}
return self.app_action(params)
def cartridge_add(self, app_name, cartridge):
params = {"action": 'ADD_CARTRIDGE', 'app_name': app_name,
'cartridge': cartridge}
status, res = self.app_action(params)
return (status, self.rest.response.json()['messages'])
def cartridge_delete(self, app_name, name):
params = {"action": 'DELETE', 'name': name, "op_type": 'cartridge', 'app_name': app_name}
return self.do_action(params)
def cartridge_start(self, app_name, name):
params = {"action": 'START', 'name': name, "op_type": 'cartridge', 'app_name': app_name}
return self.do_action(params)
def cartridge_stop(self, app_name, name):
params = {"action": 'STOP', 'name': name, "op_type": 'cartridge', 'app_name': app_name}
return self.do_action(params)
def cartridge_restart(self, app_name, name):
params = {"action": 'RESTART', 'name': name, "op_type": 'cartridge', 'app_name': app_name}
return self.do_action(params)
def cartridge_reload(self, app_name, name):
params = {"action": 'RELOAD', 'name': name, "op_type": 'cartridge', 'app_name': app_name}
return self.do_action(params)
def cartridge_get(self, app_name, name):
params = {"action": 'GET', 'name': name, "op_type": 'cartridge', 'app_name': app_name}
return self.do_action(params)
def app_template_get(self):
""" return a list of application template from an app """
status, res = self.rest.request(method='GET', url='/application_template')
if status == 'OK':
return (status, self.rest.response.json()['data'])
else:
return (status, res)
def sortedDict(adict):
keys = list(adict.keys())
keys.sort()
return map(adict.get, keys)
def perf_test(li):
cart_types = ['php-5.3']
od = {
1: {'name': 'app_create', 'params': {'app_name': 'perftest'}},
#2: {'name': 'app_delete', 'params': {'app_name': 'perftest'}},
}
sod = sortedDict(od)
#li.domain_create('blahblah')
cart_types = ['php-5.3'] # 'php-5.3', 'ruby-1.8', 'jbossas-7']
for cart in cart_types:
for action in sod:
method_call = getattr(li, action['name'])
k, v = list(action['params'].items()[0])
if action['name'] == 'app_create':
method_call(v, cart)
else:
method_call(v)
if __name__ == '__main__':
(options, args) = config_parser()
li = Openshift(host=options.ip, user=options.user, passwd=options.password,
debug=options.DEBUG,verbose=options.VERBOSE)
status, res = li.domain_get()
self.info('xxx', 1)
#status, res = li.app_create(app_name="app1", app_type=["ruby-1.8", "mysql-5.1"], init_git_url="https://github.com/openshift/wordpress-example")
#status, res = li.app_create(app_name="app2", app_type="php-5.3", init_git_url="https://github.com/openshift/wordpress-example")
#status, res = li.app_create(app_name="app3", app_type=[{"name": "ruby-1.8"}, {"name": "mysql-5.1"}], init_git_url="https://github.com/openshift/wordpress-example")

View file

@ -0,0 +1,108 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# Use this script to retrieve the security_question and security_answer from FAS (requires FAS >= 0.8.14)
# Author: Patrick Uiterwijk <puiterwijk@fedoraproject.org>
#
# Copyright 2012 Patrick Uiterwijk. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE FEDORA PROJECT ''AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE FREEBSD PROJECT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the Fedora Project.
import os
import getpass
import sys
import gpgme
from fedora.client import AccountSystem
from fedora.client import AuthError
from fedora.client import ServerError
import argparse
from io import BytesIO
parser = argparse.ArgumentParser()
parser.add_argument('admin_user', help='The user as which to log in to retrieve the question and answer')
parser.add_argument('target_user', help='The user of which to retrieve the security question and answer')
parser.add_argument('--verbose', action='store_true')
parser.add_argument('--no-answer', action='store_true', help='Only show the question, do not decrypt the answer')
parser.add_argument('--site', help='The FAS URL to get the information from')
parser.add_argument('--insecure', action='store_true', default=False,
help='Do not check the certificate for the server. *WARNING*: Only use this for testing')
parser.add_argument('--gpg_home', help='The directory where secring.gpg and pubring.gpg reside')
args = parser.parse_args()
args.admin_pass = getpass.getpass()
if args.site == None:
args.site = 'https://admin.fedoraproject.org/accounts/'
if args.verbose:
print 'Using site: %(site)s' % {'site': args.site}
if args.verbose:
if args.gpg_home == None:
print 'Using default gpg_home'
else:
print 'Using gpg_home: %(gpghome)s' % {'gpghome': args.gpg_home}
if args.gpg_home != None:
os.putenv('GNUPGHOME', args.gpg_home)
fas = AccountSystem(args.site, username=args.admin_user, password=args.admin_pass, insecure=args.insecure)
if args.verbose:
print 'Getting user details...'
try:
details = fas.person_by_username(args.target_user)
except AuthError:
print 'Failed to login to FAS. Please check admin_user and admin_pass!'
sys.exit(2)
except ServerError:
print 'Failed to retrieve user details: the server reported an error!'
sys.exit(3)
if not 'username' in details.keys():
print 'Error: user %(username)s is not known on this FAS site!' % {'username': args.target_user}
sys.exit(4)
if not 'security_question' in details.keys():
print 'Error: security_question was not retrieved by FAS! Are you sure you are using FAS >= 0.8.14, and that admin_user has the privileges to retrieve security_question?'
sys.exit(5)
if details.security_question == None or details.security_answer == None:
print 'Error: unable to retrieve security_question or security_answer. Are you sure you have privileges to return this information?'
sys.exit(6)
if not args.no_answer:
if args.verbose:
print 'Decrypting answer...'
cipher = BytesIO(details.security_answer.encode('utf-8'))
plain = BytesIO()
ctx = gpgme.Context()
ctx.decrypt(cipher, plain)
details.security_answer = plain.getvalue()
print 'Security question: %(question)s' % {'question': details.security_question}
if not args.no_answer:
print 'Security answer: %(answer)s' % {'answer': details.security_answer}

View file

@ -0,0 +1,137 @@
#!/usr/bin/python
# Copyright 2012 Patrick Uiterwijk <puiterwijk@fedoraproject.org>
# This file has been released as LGPLv3+, see COPYING for the complete license
import sys
import string
import ConfigParser
from argparse import ArgumentParser
from fedora.client import AccountSystem
from oshift_mod import Openshift
from getpass import getpass
def get_keys(host,user,passwd):
os = Openshift(host=host,user=user,passwd=passwd)
(resp, content) = os.keys_list()
if resp != 200:
print('ERROR! Result: %(resp)s' % {'resp': resp})
sys.exit(1)
return os.rest.response.json()['data']
def add_key(host,user,passwd,key_name,key_type,key_contents, verbose=False):
if verbose:
print('Adding key %(keyname)s' % {'keyname': key_name})
os = Openshift(host=host,user=user,passwd=passwd)
(resp, content) = os.key_add(name=key_name, type=key_type, key_str=key_contents)
if resp != 200:
print('ERROR! Result: %(resp)s' % {'resp': resp})
sys.exit(2)
if verbose:
print('Done')
return os.rest.response.json()['data']
def remove_key(host,user,passwd,key_name, verbose=False):
if verbose:
print('Removing key %(keyname)s' % {'keyname': key_name})
os = Openshift(host=host,user=user,passwd=passwd)
(resp, content) = os.key_delete(key_name)
if resp != 200:
print 'ERROR! Result: %(resp)s' % {'resp': resp}
sys.exit(3)
if verbose:
print('Done')
return os.rest.response.json()['data']
def get_users_to_have_access(fas, groups):
all_users = set()
for group in groups:
new_users = fas.group_members(group)
for new_user in new_users:
all_users.add(new_user['username'])
return all_users
def get_users_ssh_keys(fas, users):
keys = {}
user_data = fas.user_data()
for userid in user_data.keys():
if user_data[userid]['username'] in users:
if user_data[userid]['ssh_key']:
contents = user_data[userid]['ssh_key']
if contents.split(' ') > 1:
key_type = contents.split(' ')[0]
key_contents = contents.split(' ')[1]
keys[user_data[userid]['username']] = {'type': key_type,
'contents': key_contents,
'username': user_data[userid]['username']}
return keys
def get_keys_to_remove(keys_in_openshift, keys_in_fas):
keys_to_remove = set()
for key in keys_in_openshift:
keys_to_remove.add(key['name'])
for key_in_fas in keys_in_fas:
if keys_in_fas[key_in_fas]['contents'] == key['content']:
keys_to_remove.remove(key['name'])
return keys_to_remove
def get_keys_to_add(keys_in_openshift, keys_in_fas):
usernames_to_add = set()
for username in keys_in_fas:
usernames_to_add.add(username)
for key in keys_in_openshift:
if key['content'] == keys_in_fas[username]['contents']:
usernames_to_add.remove(username)
keys_to_add = []
for username in usernames_to_add:
keys_to_add.append(keys_in_fas[username])
return keys_to_add
def remove_keys(openshift_host, openshift_user, openshift_pass, to_remove, verbose=False):
if verbose:
print('Removing the following keys:')
print(to_remove)
for key in to_remove:
remove_key(openshift_host, openshift_user, openshift_pass, key, verbose=verbose)
if verbose:
print('Done')
def add_keys(openshift_host, openshift_user, openshift_pass, to_add, prefix, verbose=False):
if verbose:
print('Adding the following keys:')
print(to_add)
for key in to_add:
add_key(openshift_host, openshift_user, openshift_pass, '%(prefix)s%(username)s' % {'prefix': prefix, 'username': key['username']}, key['type'], key['contents'], verbose=verbose)
if verbose:
print('Done')
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('-config_file', help='The configuration file to use', default='/etc/sync-openshift-keys.conf')
parser.add_argument('--verbose', '-v', help='Make the script more verbose', action='store_true')
args = parser.parse_args()
config = ConfigParser.ConfigParser()
config.read(args.config_file)
fas = AccountSystem(config.get('fas', 'url'), username=config.get('fas', 'user'), password=config.get('fas', 'pass'))
fas.insecure = True
if args.verbose:
print('Getting users...')
users = get_users_to_have_access(fas, string.split(config.get('general', 'groups'), ','))
if args.verbose:
print('Done: %s' % users)
print('Getting keys in FAS...')
keys_fas = get_users_ssh_keys(fas, users)
if args.verbose:
print('Done: %s')
print('Getting keys in Openshift...')
keys_openshift = get_keys(config.get('openshift', 'host'), config.get('openshift', 'user'), config.get('openshift', 'pass'))
if args.verbose:
print('Done')
print('Getting keys to remove...')
keys_to_remove = get_keys_to_remove(keys_openshift, keys_fas)
if args.verbose:
print('Done')
print('Getting keys to add...')
keys_to_add = get_keys_to_add(keys_openshift, keys_fas)
if args.verbose:
print('Done')
remove_keys(config.get('openshift', 'host'), config.get('openshift', 'user'), config.get('openshift', 'pass'), keys_to_remove, verbose=args.verbose)
add_keys(config.get('openshift', 'host'), config.get('openshift', 'user'), config.get('openshift', 'pass'), keys_to_add, config.get('general', 'keyname_prefix'), verbose=args.verbose)

View file

@ -0,0 +1,138 @@
#!/usr/bin/env python
# encoding: utf-8
# (c) 2012 Red Hat, Inc.
# Authored by Ricky Elrod
# But when it breaks, don't yell at him because that's mean.
# update hook for FI repos -> zodbot.
import os
import sys
import subprocess
import shlex
import socket
import urllib
ZODBOT_SERVER = "value01"
ZODBOT_PORT = 5050
hook = sys.argv[0]
repodir = sys.argv[1]
channel = sys.argv[2]
old = sys.argv[3]
new = sys.argv[4]
branch = sys.argv[5]
# Split on /, nuke empties from the result, use the last nonempty
# element. This lets us not care if there's a trailing slash.
repodir = filter(None, repodir.split('/'))[-1]
def run_command(command):
""" Run a command and return a hash with the resulting stdout/stderr."""
escaped = shlex.split(command)
cmd = subprocess.Popen(escaped,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = cmd.communicate()
return {"stdout": stdout, "stderr": stderr}
def construct_url(slug):
""" Return a space-padded url to the commit.
If and only if it is handled by cgit. Otherwise, return an empty string.
"""
# Our long url template.
tmpl = "http://infrastructure.fedoraproject.org/cgit/{repo}/commit/?id={slug}"
repo = repodir + ".git"
with open('/etc/cgit-projects-lockbox01', 'r') as f:
lines = [line.strip() for line in f.readlines()]
if repo in lines and slug:
return " " + tmpl.format(repo=repo, slug=slug)
else:
return ""
def parse_commit(commit):
"""
So we're given a commit in the form of:
---
Ricky Elrod - test-repo:a045150 ---- add some more test files...
A foobar/asdf/testfile.1
A foobar/testfile.2
---
Essentially, we rip out the first line and set it aside.
Then all the other lines will begin with M/C/R/A/D/U.
Replace those letters with fancy little symbols (like + for A).
Combine them together in a list/array.
Show the first 4 and if more exist, append '...' to the list.
Lastly, replace the "----" in the original line above with these.
"""
lines = commit.split("\n")
message = lines.pop(0)
files = []
# extract the commit hash from the first line.
slug = None
try:
slug = message.split(' -')[1].strip().split(':')[1]
except IndexError:
print "** Couldn't parse slug from git-rev.", message
# The remaining lines are files changed.
for changed_file in filter(None, lines):
status, filename = changed_file.split()
if status == "M" or status == "R":
symbol = "*"
elif status == "C" or status == "A":
symbol = "+"
elif status == "D":
symbol = "-"
else:
symbol = "?"
files.append(symbol + filename)
# Show the first 4 files changed, and if there are more, add a '...'
# If no files were changed don't show empty [] because it looks tacky.
fileslist = ' '.join(files[0:4])
if len(files):
fileslist = '[' + fileslist
if len(files) > 4:
fileslist += ' ...'
fileslist += ']'
else:
fileslist = '-'
padded_url = construct_url(slug)
# Replace the ---- with the files list...
return message.replace('----', fileslist, 1) + padded_url
# Get a list of commits to report.
if branch == 'master':
revs = run_command("git rev-list ^%s %s" % (old, new))["stdout"].split("\n")
revs = filter(None, revs)
revs.reverse()
for commit_hash in revs:
# Get the commit in a format that we can deal with
commit = run_command(
"git show --name-status " + commit_hash + " --oneline "
"--format='%an - " + repodir + ":%h ---- %s'")
parsed_commit = parse_commit(commit["stdout"])
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((ZODBOT_SERVER, ZODBOT_PORT))
s.sendall(channel+ " " + parsed_commit)
s.close()

View file

@ -58,6 +58,78 @@
- batcave
- config
#
# Scripts
#
#
# Hook to notify on git commits used in git repos
#
- name: setup git-notifier script
copy: src=git-notifier dest=/usr/local/bin/git-notifier mode=0755
tags:
- batcave
- config
#
# Script used to gather encrypted security questions from fas
#
- name: setup /usr/local/bin/retrieve-security-question.py
copy: src=retrieve-security-question.py dest=/usr/local/bin/retrieve-security-question.py mode=0755
tags:
- batcave
- config
#
# Script to sync ssh keys from fas to openshift instances.
#
- name: setup python module for openshift sync script
copy: src=oshift_mod.py dest=/usr/local/bin/oshift_mod.py mode=0644
tags:
- batcave
- config
- name: setup setup sync-openshift-keys config
template: src=sync-openshift-keys.conf.j2 dest=/etc/sync-openshift-keys.conf mode=0600
tags:
- batcave
- config
- name: setup setup sync-openshift-keys script
copy: src=sync-openshift-keys.py dest=/usr/local/bin/sync-openshift-keys.py mode=0755
tags:
- batcave
- config
# The zodbot server must allow TCP on whatever port zodbot is listening on
# for this to work (currently TCP port 5050).
# Once that is done, you can symlink /usr/local/bin/zodbot-announce-commits.py
# to the 'hooks' directory of the bare repo you're wishing to receive commits
# for, then add a hooks.zodbotchannel to the repo's config file.
# Lastly, add the following lines to your 'update' hook:
# reposource=$(git config hooks.reposource)
# zodbot_channel=$(git config hooks.zodbotchannel)
# python $reposource/hooks/zodbot-announce-commits.py $reposource $zodbot_channel $oldrev $newrev ${1#refs/heads/}
- name: install zodbot-announce-commits script
copy: src=zodbot-announce-commits.py dest=/usr/local/bin/zodbot-announce-commits.py mode=0755
tags:
- batcave
- config
#
# This is another script to announce commits, this time to the fedmsg bus
#
- name: install fedmsg-announce-commits script
copy: src=fedmsg-announce-commits.py dest=/usr/local/bin/fedmsg-announce-commits.py mode=0755
tags:
- batcave
- config
#
# This script checks all the virthosts and logs what guests they are running.
#
@ -88,26 +160,34 @@
# - batcave
# - config
#
# Setup web server config
#
- name: install web server config for batcave (mimetypes)
copy: src=mime-typesconf dest=/etc/httpd/conf.d/mime-types.conf mode=0644
tags:
- batcave
- config
- httpd
- name: install web server config for batcave (access rules)
copy: src=allows dest=/etc/httpd/conf.d/allows mode=0644
tags:
- batcave
- config
- httpd
- name: install web server config for batcave (main config)
copy: src=infrastructure.fedoraproject.org.conf dest=/etc/httpd/conf.d/infrastructure.fedoraproject.org.conf mode=0644
tags:
- batcave
- config
- httpd
# still to convert from puppet:
# include scripts::check-sshkeys
# include scripts::git-notifier
# include scripts::retrieve-security-question
# include scripts::sync-openshift-keys
# include scripts::zodbotAnnounceCommits
# include scripts::fedmsgAnnounceCommits
# include scripts::sync-rhn
#
# include repo2json
# include ansible_utils::ansible_utils
#
# include rsync::server
# include scripts::geoip-retriever
# include geoip-retriever
#
# httpd::site { "infrastructure.fedoraproject.org": }
#
# httpd::mime-type { "restructured text docs":
# website => "infrastructure.fedoraproject.org",
# mimetype => "text/plain",
# extensions => [ ".rst" ],

View file

@ -0,0 +1,13 @@
[general]
groups=sysadmin-main,sysadmin-noc
keyname_prefix=fas
[openshift]
host=openshift.redhat.com
user={{ openshift_status_username }}
pass={{ openshift_status_password }}
[fas]
url=https://admin.fedoraproject.org/accounts/
user={{ fedorathirdpartyUser }}
pass={{ fedorathirdpartyPassword }}