[googlecl] 05/05: Imported Debian patch 0.9.13-1.1
Luke Faraone
lfaraone at moszumanska.debian.org
Sat Dec 7 22:51:23 UTC 2013
This is an automated email from the git hooks/post-receive script.
lfaraone pushed a commit to branch master
in repository googlecl.
commit b3ce56bdf6bf13f52514d5fb8c3dc5ce6e65baf9
Author: Dmitry Shachnev <mitya57 at gmail.com>
Date: Thu Sep 6 18:40:42 2012 +0400
Imported Debian patch 0.9.13-1.1
---
debian/changelog | 8 +
debian/patches/fix_664989.patch | 328 ++++++++++++++++++++++++++++++++++++++++
debian/patches/series | 1 +
3 files changed, 337 insertions(+)
diff --git a/debian/changelog b/debian/changelog
index 184af26..bcf335b 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,11 @@
+googlecl (0.9.13-1.1) unstable; urgency=low
+
+ * Non-maintainer upload.
+ * debian/patches/fix_664989.patch: Fix docs module not working with
+ new python-gdata versions (Closes: #664989).
+
+ -- Dmitry Shachnev <mitya57 at gmail.com> Thu, 06 Sep 2012 18:40:42 +0400
+
googlecl (0.9.13-1) unstable; urgency=low
* New upstream release.
diff --git a/debian/patches/fix_664989.patch b/debian/patches/fix_664989.patch
new file mode 100644
index 0000000..56de593
--- /dev/null
+++ b/debian/patches/fix_664989.patch
@@ -0,0 +1,328 @@
+From: Dmitry Shachnev <mitya57 at gmail.com>
+Description: Fix docs module not working with new python-gdata versions
+ Most fixes in this patch have been cherry-picked from upstream SVN.
+ The only my addition is forwarded upstream, see comment 22 in the linked
+ upstream bug.
+Origin: upstream
+Bug: http://code.google.com/p/googlecl/issues/detail?id=449
+Bug-Debian: http://bugs.debian.org/664989
+
+diff -uN a/src/googlecl/docs/base.py b/src/googlecl/docs/base.py
+--- a/src/googlecl/docs/base.py 2011-02-09 06:35:57.000000000 +0300
++++ b/src/googlecl/docs/base.py 2012-09-06 18:22:39.200418012 +0400
+@@ -115,7 +115,12 @@
+ base_path = path
+
+ if not new_doc:
+- self.Export(doc_entry_or_title.content.src, path)
++ # This used to be the following, passing just the URL instead of the
++ # entry object (which it's guaranteed to be since not new_doc).
++ # Both client and service seem happy with it, so it was probably
++ # unnecessary to reduce it to a URL first.
++ # self.Export(doc_entry_or_title.content.src, path)
++ self.Export(doc_entry_or_title, path)
+ file_hash = _md5_hash_file(path)
+ else:
+ file_hash = None
+@@ -232,7 +232,10 @@
+ if can_export(entry):
+ self.Export(entry, path)
+ else:
+- self.Download(entry, path)
++ if hasattr(self, 'DownloadResource'):
++ self.DownloadResource(entry, path)
++ else:
++ self.Download(entry, path)
+ except self.request_error, err:
+ LOG.error(safe_encode('Download of ' + entry_title + ' failed: ' +
+ unicode(err)))
+diff -uN a/src/googlecl/docs/client.py b/src/googlecl/docs/client.py
+--- a/src/googlecl/docs/client.py 2010-12-17 03:26:30.000000000 +0300
++++ b/src/googlecl/docs/client.py 2012-09-06 18:22:39.204418012 +0400
+@@ -32,12 +32,13 @@
+ import gdata.docs.client
+ import logging
+ import os
++import re
+ import shutil
+ import googlecl
+ import googlecl.client
+ from googlecl.docs import SECTION_HEADER
+ import googlecl.docs.base
+-
++import atom.data
+
+ LOG = logging.getLogger(googlecl.docs.LOGGER_NAME + '.client')
+
+@@ -52,22 +53,62 @@
+ app with a command line interface.
+
+ """
+- DOCLIST_FEED_URI = gdata.docs.client.DOCLIST_FEED_URI
++
++ # Versions 2.0.5-2.0.14 of python gdata included a DOCLIST_FEED_URI variable,
++ # but 2.0.15 removed it, so we hard code it here.
++ DOCLIST_FEED_URI = '/feeds/default/private/full'
++
++ # Another casualty in 2.0.15.
++ FILE_EXT_PATTERN = re.compile('.*\.([a-zA-Z]{3,}$)')
++
++ # File extension/mimetype pairs of common format.
++ # These seem to have disappeared in python-gdata 2.0.15 and 2.0.16, so here
++ # they are given explicitly.
++ MIMETYPES = {
++ 'CSV': 'text/csv',
++ 'TSV': 'text/tab-separated-values',
++ 'TAB': 'text/tab-separated-values',
++ 'DOC': 'application/msword',
++ 'DOCX': ('application/vnd.openxmlformats-officedocument.'
++ 'wordprocessingml.document'),
++ 'ODS': 'application/x-vnd.oasis.opendocument.spreadsheet',
++ 'ODT': 'application/vnd.oasis.opendocument.text',
++ 'RTF': 'application/rtf',
++ 'SXW': 'application/vnd.sun.xml.writer',
++ 'TXT': 'text/plain',
++ 'XLS': 'application/vnd.ms-excel',
++ 'XLSX': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
++ 'PDF': 'application/pdf',
++ 'PNG': 'image/png',
++ 'PPT': 'application/vnd.ms-powerpoint',
++ 'PPS': 'application/vnd.ms-powerpoint',
++ 'HTM': 'text/html',
++ 'HTML': 'text/html',
++ 'ZIP': 'application/zip',
++ 'SWF': 'application/x-shockwave-flash'
++ }
+
+ def __init__(self, config):
+ """Constructor."""
+ gdata.docs.client.DocsClient.__init__(self, source='GoogleCL')
+ googlecl.client.BaseClientCL.__init__(self, SECTION_HEADER, config)
+
++ # Python gdata 2.0.15 drastically changed the API, including renaming
++ # gdata.docs.data.DocList to ResourceFeed.
++ def _doclist_class(self):
++ if (hasattr(gdata.docs.data, 'ResourceFeed')):
++ return gdata.docs.data.ResourceFeed
++ else:
++ return gdata.docs.data.DocList
++
+ def _create_folder(self, title, folder_or_uri):
+ """Wrapper function to mesh with DocsBaseCL.upload_docs()."""
+ return self.create(gdata.docs.data.FOLDER_LABEL, title,
+ folder_or_uri)
+
+ def _determine_content_type(self, file_ext):
+- from gdata.docs.data import MIMETYPES
+ try:
+- return MIMETYPES[file_ext.upper()]
++ return DocsClientCL.MIMETYPES[file_ext.upper()]
+ except KeyError:
+ LOG.info('No supported filetype found for extension %s', file_ext)
+ return None
+@@ -89,7 +130,12 @@
+ RequestError: on error response from server.
+
+ """
+- response_string = self.get_file_content(uri, auth_token=auth_token)
++ # More undocumented changes in python gdata 2.0.15
++ if hasattr(self, 'get_file_content'):
++ response_string = self.get_file_content(uri, auth_token=auth_token)
++ else:
++ response_string = self._get_content(uri, None);
++
+ if googlecl.docs.base.can_export(uri) and\
+ self.config.lazy_get(SECTION_HEADER, 'decode_utf_8', False, bool):
+ try:
+@@ -103,7 +149,7 @@
+ download_file.write(file_string)
+ download_file.flush()
+
+- def export(self, entry_or_id_or_url, file_path, gid=None, auth_token=None,
++ def export(self, entry, file_path, gid=None, auth_token=None,
+ **kwargs):
+ """Exports a document from the Document List in a different format.
+
+@@ -111,9 +157,12 @@
+ issue
+
+ Args:
+- entry_or_id_or_url: gdata.docs.data.DocsEntry or string representing a
++ entry: An entry object specifying the document to be exported.
++ Formerly, this was entry_or_id_or_url: a
++ gdata.data.GDEntry or string representing a
+ resource id or URL to download the document from (such as the content
+- src link).
++ src link). But that wreaks havoc in python gdata >2.0.15, and it was
++ easy to ensure we only call with an actual Entry.
+ file_path: str The full path to save the file to. The export
+ format is inferred from the the file extension.
+ gid: str (optional) grid id for downloading a single grid of a
+@@ -129,7 +178,7 @@
+ """
+ extra_params = {}
+
+- match = gdata.docs.data.FILE_EXT_PATTERN.match(file_path)
++ match = DocsClientCL.FILE_EXT_PATTERN.match(file_path)
+ if match:
+ export_format = match.group(1)
+ # Hack for apps-api-issues Issue 2294
+@@ -143,8 +192,19 @@
+ if gid is not None:
+ extra_params['gid'] = gid
+
+- self.download(entry_or_id_or_url, file_path, extra_params,
+- auth_token=auth_token, **kwargs)
++ if not hasattr(entry, 'content'):
++ LOG.fatal("This shouldn't happen. Export called with invalid entry")
++
++ # Sigh, more changes in python gdata 2.0.15. Has download_resource but not
++ # download.
++ if hasattr(self, 'download'):
++ self.download(entry, file_path, extra_params,
++ auth_token=auth_token, **kwargs)
++ elif hasattr(self, 'download_resource'):
++ self.download_resource(entry, file_path, extra_params,
++ **kwargs)
++ else:
++ LOG.fatal("Something is screwed up with python gdata.")
+
+ Export = export
+
+@@ -169,11 +229,12 @@
+ # folder.content.src is the uri to query for documents in that folder.
+ entries.extend(self.GetEntries(folder.content.src,
+ titles,
+- desired_class=gdata.docs.data.DocList))
++ desired_class=self._doclist_class))
+ else:
+- entries = self.GetEntries(gdata.docs.client.DOCLIST_FEED_URI,
++ entries = self.GetEntries(DocsClientCL.DOCLIST_FEED_URI,
+ titles,
+- desired_class=gdata.docs.data.DocList)
++ desired_class=self._doclist_class())
++
+ return entries
+
+ def get_single_doc(self, title=None, folder_entry_list=None):
+@@ -192,16 +253,16 @@
+ if len(folder_entry_list) == 1:
+ return self.GetSingleEntry(folder_entry_list[0].content.src,
+ title,
+- desired_class=gdata.docs.data.DocList)
++ desired_class=self._doclist_class())
+ else:
+ entries = self.get_doclist(title, folder_entry_list)
+ # Technically don't need the desired_class for this call
+ # because we have the entries.
+ return self.GetSingleEntry(entries, title)
+ else:
+- return self.GetSingleEntry(gdata.docs.client.DOCLIST_FEED_URI,
++ return self.GetSingleEntry(DocsClientCL.DOCLIST_FEED_URI,
+ title,
+- desired_class=gdata.docs.data.DocList)
++ desired_class=self._doclist_class())
+
+ GetSingleDoc = get_single_doc
+
+@@ -216,7 +277,7 @@
+
+ """
+ if title:
+- uri = gdata.docs.client.DOCLIST_FEED_URI + '-/folder'
++ uri = DocsClientCL.DOCLIST_FEED_URI + '-/folder'
+ folder_entries = self.GetEntries(uri, title)
+ if not folder_entries:
+ LOG.warning('No folder found that matches ' + title)
+@@ -229,7 +290,7 @@
+ def is_token_valid(self, test_uri=None):
+ """Check that the token being used is valid."""
+ if not test_uri:
+- docs_uri = gdata.docs.client.DOCLIST_FEED_URI
++ docs_uri = DocsClientCL.DOCLIST_FEED_URI
+ sheets_uri = ('https://spreadsheets.google.com/feeds/spreadsheets'
+ '/private/full')
+ docs_test = googlecl.client.BaseClientCL.IsTokenValid(self, docs_uri)
+@@ -248,16 +309,15 @@
+ (e.g. 'txt', 'doc')
+
+ """
+- from gdata.docs.data import MIMETYPES
+ try:
+- content_type = MIMETYPES[file_ext.upper()]
++ content_type = DocsClientCL.MIMETYPES[file_ext.upper()]
+ except KeyError:
+ print 'Could not find mimetype for ' + file_ext
+- while file_ext not in MIMETYPES.keys():
++ while file_ext not in DocsClientCL.MIMETYPES.keys():
+ file_ext = raw_input('Please enter one of ' +
+- MIMETYPES.keys() +
++ DocsClientCL.MIMETYPES.keys() +
+ ' to determine the content type to upload as.')
+- content_type = MIMETYPES[file_ext.upper()]
++ content_type = DocsClientCL.MIMETYPES[file_ext.upper()]
+ mediasource = gdata.data.MediaSource(file_path=path_to_new_content,
+ content_type=content_type)
+ return self.Update(doc_entry, media_source=mediasource)
+@@ -292,7 +352,35 @@
+ Returns:
+ Entry representing the document uploaded.
+ """
+- return self.upload(path, entry_title, post_uri, content_type)
+
++ # GoogleCL that uses gdata-2.0.0 through 2.0.4 won't ever see this code.
++ # If it uses gdata-2.0.5 through 2.0.7, it would otherwise give an error
++ # about a resumable uploader that it doesn't have. This avoids that error.
++ # If it uses gdata-2.0.8, 2.0.9, or 2.0.11 it can't upload docs due to an SSL error.
++ # If it uses gdata-2.0.10, 2.0.12, or later, this should allow it to
++ # upload all allowable file types.
++
++ if hasattr(gdata.client,"ResumableUploader"):
++ f = open(path)
++ file_size = os.path.getsize(f.name)
++ uploader = gdata.client.ResumableUploader(
++ self, f, content_type, file_size, chunk_size=1048576,
++ desired_class=gdata.data.GDEntry)
++
++ # Set metadata for our upload.
++ entry = gdata.data.GDEntry(title=atom.data.Title(text=entry_title))
++ new_entry = uploader.UploadFile('/feeds/upload/create-session/default/private/full', entry=entry)
++ # These might be useful for a verbose debug statement:
++ # print 'Document uploaded: ' + new_entry.title.text
++ # print 'Quota used: %s' % new_entry.quota_bytes_used.text
++ f.close()
++
++ return new_entry
++
++ else:
++ # If we have reached this point, we must be in gdata-2.0.5 through 2.0.7
++ # The upload is guaranteed to fail, so the self.upload call is here to
++ # return whatever the caller wanted.
++ return self.upload(path, entry_title, post_uri, content_type)
+
+ SERVICE_CLASS = DocsClientCL
+diff -uN a/src/googlecl/docs/__init__.py b/src/googlecl/docs/__init__.py
+--- a/src/googlecl/docs/__init__.py 2010-12-17 03:24:24.000000000 +0300
++++ b/src/googlecl/docs/__init__.py 2012-09-06 18:22:39.204418012 +0400
+@@ -124,7 +124,9 @@
+ # required
+ #===============================================================================
+ def _run_get(client, options, args):
+- if not hasattr(client, 'Download'):
++ # python gdata 2.0.15 removed Download and added DownloadResource.
++ if not hasattr(client, 'Download') and \
++ not hasattr(client, 'DownloadResource'):
+ LOG.error('Downloading documents is not supported for' +
+ ' gdata-python-client < 2.0')
+ return
+@@ -167,7 +169,9 @@
+ '"docs edit" yet')
+ LOG.debug('(Ignoring ' + unicode(args) +')')
+
+- if not hasattr(client, 'Download'):
++ # python gdata 2.0.15 removed Download and added DownloadResource.
++ if not hasattr(client, 'Download') and \
++ not hasattr(client, 'DownloadResource'):
+ LOG.error('Editing documents is not supported' +
+ ' for gdata-python-client < 2.0')
+ return
diff --git a/debian/patches/series b/debian/patches/series
new file mode 100644
index 0000000..4d899cc
--- /dev/null
+++ b/debian/patches/series
@@ -0,0 +1 @@
+fix_664989.patch
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-google/googlecl.git
More information about the Pkg-google-commits
mailing list