[Oval-commits] r140 - trunk/oval-server/src/dsaUpdater
Pavel Vinogradov
blaze-guest at alioth.debian.org
Thu Aug 16 05:37:45 UTC 2007
Author: blaze-guest
Date: 2007-08-16 05:37:44 +0000 (Thu, 16 Aug 2007)
New Revision: 140
Modified:
trunk/oval-server/src/dsaUpdater/ftpUpdater.py
trunk/oval-server/src/dsaUpdater/httpUpdater.py
Log:
Implement http syncronization
Modified: trunk/oval-server/src/dsaUpdater/ftpUpdater.py
===================================================================
--- trunk/oval-server/src/dsaUpdater/ftpUpdater.py 2007-08-15 18:17:21 UTC (rev 139)
+++ trunk/oval-server/src/dsaUpdater/ftpUpdater.py 2007-08-16 05:37:44 UTC (rev 140)
@@ -69,7 +69,7 @@
logging.critical('Bad line format: %s' % line)
def __writer (self, data):
- file = open (self.curfile, 'a')
+ file = open (self.curfile, 'w')
file.write(data)
file.close()
Modified: trunk/oval-server/src/dsaUpdater/httpUpdater.py
===================================================================
--- trunk/oval-server/src/dsaUpdater/httpUpdater.py 2007-08-15 18:17:21 UTC (rev 139)
+++ trunk/oval-server/src/dsaUpdater/httpUpdater.py 2007-08-16 05:37:44 UTC (rev 140)
@@ -33,58 +33,50 @@
def __parse(self, data):
mtime = None
type = None
- #print data
- #year = 2007, in this case LIST return time instead of year
- patern = re.compile(r'<td><a href="\d+/">(\d+/)</a></td>.*(\d+)\-(\w+)\-(\d+) (\d+:\d+) *</td>')
+
+ paternDIR = re.compile(r'<td><a href="\d+/">(\d+/)</a></td>.*>(\d+)\-(\w+)\-(\d+) (\d+:\d+) *</td>')
+ paternFILE = re.compile(r'<td><a href="[\w\d\.\-]+">(dsa\-\d+\.(wml|data))</a></td>.*>(\d+)\-(\w+)\-(\d+) (\d+:\d+) *</td>')
+
for line in data.split('\n'):
- result = patern.search(line)
+ mtime = None
+ type = None
+
+ result = paternDIR.search(line)
if result:
- print result.groups()
-# type = 'dir'
-# month = self.month[result.groups()[1]]
-# day = int(result.groups()[2])
-# (hour, min) = result.groups()[3].split(':')
-# file = result.groups()[4]
-# mtime = datetime(2007, month, day, int(hour), int(min), 0)
-#
-
- print 'match'
+ type = 'dir'
+ file = result.groups()[0]
+ day = int(result.groups()[1])
+ month = self.month[result.groups()[2]]
+ year = int(result.groups()[3])
+ (hour, min) = result.groups()[4].split(':')
+ mtime = datetime(year, month, day, int(hour), int(min), 0)
else:
- print line
-
-#
-# type = result.groups()[0]
-# month = self.month[result.groups()[1]]
-# day = int(result.groups()[2])
-# (hour, min) = result.groups()[3].split(':')
-# file = result.groups()[4]
-# mtime = datetime(2007, month, day, int(hour), int(min), 0)
-# else:
-# #year != 2007
-# patern = re.compile(r'[drxr\- ]+(\d) *\d+ *\d+ *\d+ (\w+) (\d+) *(\d+) (.*)')
-# result = patern.search(line)
-# if result:
-# type = result.groups()[0]
-# month = self.month[result.groups()[1]]
-# day = int(result.groups()[2])
-# year = int(result.groups()[3])
-# file = result.groups()[4]
-# mtime = datetime(year, month, day, 12, 12, 0)
-# if mtime:
-# logging.critical ('File %s with mtime %s vs %s' % (file, mtime, self.actual))
-# if mtime > self.actual:
-# if type == '1':
-# self.result.append(self.curdir+os.sep+file)
-# else:
-# if type == '3':
-# self.dirs.append(file)
-# else:
-# logging.critical('Unknown file type: %s' % type)
-# else:
-# logging.critical('Bad line format: %s' % line)
+ result = paternFILE.search(line)
+ if result:
+ type = 'file'
+ file = result.groups()[0]
+ day = int(result.groups()[2])
+ month = self.month[result.groups()[3]]
+ year = int(result.groups()[4])
+ (hour, min) = result.groups()[5].split(':')
+ mtime = datetime(year, month, day, int(hour), int(min), 0)
+
+ if mtime:
+ logging.critical ('%s %s with mtime %s vs %s' % (type, file, mtime, self.actual))
+ if mtime > self.actual:
+ if type == 'file':
+ self.result.append(self.curdir+os.sep+file)
+ else:
+ if type == 'dir':
+ self.dirs.append(file)
+ else:
+ logging.critical('Unknown file type: %s' % type)
+ else:
+ pass
+ #logging.critical('Bad line format: %s' % line)
def __writer (self, data):
- file = open (self.curfile, 'a')
+ file = open (self.curfile, 'w')
file.write(data)
file.close()
@@ -94,14 +86,14 @@
logging.critical('LIST %s' % self.curdir)
self.conn.request('GET',self.curdir)
self.__parse(self.conn.getresponse().read())
- return 0
+
if self.dirs:
dirs = self.dirs[:]
for dir in dirs:
self.curdir += '/%s' % dir
self.cmpdir()
- self.ftp.cwd('../')
self.curdir = os.path.split(self.curdir)[0]
+ self.curdir = os.path.split(self.curdir)[0]
def syncfile (self, dir, file):
@@ -111,18 +103,22 @@
if os.access(path, os.W_OK) and os.path.isdir (path):
self.curfile = '%s/%s/%s' % (self.dsaStorage, dir, file)
logging.critical('sync %s' % (path+os.sep+file))
- self.ftp.retrbinary('RETR %s' % (self.dsaSource+os.sep+dir+os.sep+file), self.__writer)
+ self.conn.request('GET', '%s' % (self.dsaSource+os.sep+dir+os.sep+file))
+ data = self.conn.getresponse().read()
+ self.__writer(data)
else:
os.mkdir(path)
logging.critical('sync dir %s' % path)
self.syncfile(dir, file)
else:
self.curfile = self.dsaStorage+os.sep+file
- self.ftp.retrbinary('RETR %s' % (self.dsaSource + os.sep+file), self.__writer)
+ self.conn.request('GET', '%s' % (self.dsaSource+os.sep+file))
+ data = self.conn.getresponse().read()
+ self.__writer(data)
def syncdirs (self):
for path in self.result:
- file = path[len(self.dsaSource)+1:]
+ file = path[len(self.dsaSource):]
(dir, file) = os.path.split(file)
self.syncfile (dir, file)
@@ -132,8 +128,7 @@
self.cmpdir ()
if self.result:
- print result
- #self.syncdirs ()
+ self.syncdirs ()
if __name__ == '__main__':
try:
More information about the Oval-commits
mailing list