[copr] skvidal-backend: - saving the jobs out per-build - so don't expect a 'builds' dictionary - fetch the jobs from the front end (2eb8d67)

skvidal at fedoraproject.org skvidal at fedoraproject.org
Thu Dec 6 06:55:08 UTC 2012


Repository : http://git.fedorahosted.org/cgit/copr.git

On branch  : skvidal-backend

>---------------------------------------------------------------

commit 2eb8d67a3a2ebbc509e8c25985d44b54287d8f62
Author: Seth Vidal <skvidal at fedoraproject.org>
Date:   Thu Dec 6 01:52:44 2012 -0500

    - saving the jobs out per-build - so don't expect a 'builds' dictionary
    - fetch the jobs from the front end


>---------------------------------------------------------------

 backend/dispatcher.py |    3 +--
 copr-be.py            |   22 ++++++++++++++++++++++
 2 files changed, 23 insertions(+), 2 deletions(-)

diff --git a/backend/dispatcher.py b/backend/dispatcher.py
index b9c1251..fc9a958 100644
--- a/backend/dispatcher.py
+++ b/backend/dispatcher.py
@@ -153,8 +153,7 @@ class Worker(multiprocessing.Process):
     def parse_job(self, jobfile):
         # read the json of the job in
         # break out what we need return a bunch of the info we need
-        d = json.load(open(jobfile))
-        build = d['builds'][0]
+        build = json.load(open(jobfile))
         jobdata = Bunch()
         jobdata.pkgs = build['pkgs'].split(' ')
         jobdata.repos = [r for r in build['repos'].split(' ') if r.strip() ]
diff --git a/copr-be.py b/copr-be.py
index bd26444..8ab8470 100644
--- a/copr-be.py
+++ b/copr-be.py
@@ -11,6 +11,8 @@ from backend import errors
 from bunch import Bunch
 import ConfigParser
 import optparse
+import json
+import requests
 
 def _get_conf(cp, section, option, default):
     """to make returning items from config parser less irritating"""
@@ -97,10 +99,30 @@ class CoprBackend(object):
             print >>sys.stderr, 'Could not write to logfile %s - %s' % (self.logfile, str(e))
 
 
+    def fetch_jobs(self):
+        self.log('fetching jobs')
+        try:
+            r = requests.get('%s/waiting_builds/' % self.opts.frontend_url) # auth stuff here? maybe/maybenot
+        except requests.RequestException, e:
+            self.log('Error retrieving jobs from %s: %s' % (self.opts.frontend_url, e))
+        else:
+            if 'builds' in r.json:
+                self.log('%s jobs returned' % len(r.json['builds']))
+                count = 0
+                for b in r.json['builds']:
+                    if 'id' in b:
+                        jobfile = self.opts.jobsdir + '/%s.json' % b['id']
+                        if not os.path.exists(jobfile) and b['id'] not in self.added_jobs:
+                            count += 1
+                            open(jobfile, 'w').write(json.dumps(b))
+                            self.log('Wrote job: %s' % b['id'])
+                self.log('New jobs: %s' % count)
+    
     def run(self):
 
         abort = False
         while not abort:
+            self.fetch_jobs()
             for f in sorted(glob.glob(self.opts.jobsdir + '/*.json')):
                 n = os.path.basename(f).replace('.json', '')
                 if n not in self.added_jobs:



More information about the copr-devel mailing list