]> git.proxmox.com Git - ceph.git/blob - ceph/src/jaegertracing/opentelemetry-cpp/third_party/prometheus-cpp/3rdparty/googletest/googletest/scripts/upload.py
update ceph source to reef 18.1.2
[ceph.git] / ceph / src / jaegertracing / opentelemetry-cpp / third_party / prometheus-cpp / 3rdparty / googletest / googletest / scripts / upload.py
1 #!/usr/bin/env python
2 #
3 # Copyright 2007, Google Inc.
4 # All rights reserved.
5 #
6 # Redistribution and use in source and binary forms, with or without
7 # modification, are permitted provided that the following conditions are
8 # met:
9 #
10 # * Redistributions of source code must retain the above copyright
11 # notice, this list of conditions and the following disclaimer.
12 # * Redistributions in binary form must reproduce the above
13 # copyright notice, this list of conditions and the following disclaimer
14 # in the documentation and/or other materials provided with the
15 # distribution.
16 # * Neither the name of Google Inc. nor the names of its
17 # contributors may be used to endorse or promote products derived from
18 # this software without specific prior written permission.
19 #
20 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32 """Tool for uploading diffs from a version control system to the codereview app.
33
34 Usage summary: upload.py [options] [-- diff_options]
35
36 Diff options are passed to the diff command of the underlying system.
37
38 Supported version control systems:
39 Git
40 Mercurial
41 Subversion
42
43 It is important for Git/Mercurial users to specify a tree/node/branch to diff
44 against by using the '--rev' option.
45 """
46 # This code is derived from appcfg.py in the App Engine SDK (open source),
47 # and from ASPN recipe #146306.
48
49 import cookielib
50 import getpass
51 import logging
52 import md5
53 import mimetypes
54 import optparse
55 import os
56 import re
57 import socket
58 import subprocess
59 import sys
60 import urllib
61 import urllib2
62 import urlparse
63
64 try:
65 import readline
66 except ImportError:
67 pass
68
69 # The logging verbosity:
70 # 0: Errors only.
71 # 1: Status messages.
72 # 2: Info logs.
73 # 3: Debug logs.
74 verbosity = 1
75
76 # Max size of patch or base file.
77 MAX_UPLOAD_SIZE = 900 * 1024
78
79
80 def GetEmail(prompt):
81 """Prompts the user for their email address and returns it.
82
83 The last used email address is saved to a file and offered up as a suggestion
84 to the user. If the user presses enter without typing in anything the last
85 used email address is used. If the user enters a new address, it is saved
86 for next time we prompt.
87
88 """
89 last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
90 last_email = ""
91 if os.path.exists(last_email_file_name):
92 try:
93 last_email_file = open(last_email_file_name, "r")
94 last_email = last_email_file.readline().strip("\n")
95 last_email_file.close()
96 prompt += " [%s]" % last_email
97 except IOError, e:
98 pass
99 email = raw_input(prompt + ": ").strip()
100 if email:
101 try:
102 last_email_file = open(last_email_file_name, "w")
103 last_email_file.write(email)
104 last_email_file.close()
105 except IOError, e:
106 pass
107 else:
108 email = last_email
109 return email
110
111
112 def StatusUpdate(msg):
113 """Print a status message to stdout.
114
115 If 'verbosity' is greater than 0, print the message.
116
117 Args:
118 msg: The string to print.
119 """
120 if verbosity > 0:
121 print msg
122
123
124 def ErrorExit(msg):
125 """Print an error message to stderr and exit."""
126 print >>sys.stderr, msg
127 sys.exit(1)
128
129
130 class ClientLoginError(urllib2.HTTPError):
131 """Raised to indicate there was an error authenticating with ClientLogin."""
132
133 def __init__(self, url, code, msg, headers, args):
134 urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
135 self.args = args
136 self.reason = args["Error"]
137
138
139 class AbstractRpcServer(object):
140 """Provides a common interface for a simple RPC server."""
141
142 def __init__(self, host, auth_function, host_override=None, extra_headers={},
143 save_cookies=False):
144 """Creates a new HttpRpcServer.
145
146 Args:
147 host: The host to send requests to.
148 auth_function: A function that takes no arguments and returns an
149 (email, password) tuple when called. Will be called if authentication
150 is required.
151 host_override: The host header to send to the server (defaults to host).
152 extra_headers: A dict of extra headers to append to every request.
153 save_cookies: If True, save the authentication cookies to local disk.
154 If False, use an in-memory cookiejar instead. Subclasses must
155 implement this functionality. Defaults to False.
156 """
157 self.host = host
158 self.host_override = host_override
159 self.auth_function = auth_function
160 self.authenticated = False
161 self.extra_headers = extra_headers
162 self.save_cookies = save_cookies
163 self.opener = self._GetOpener()
164 if self.host_override:
165 logging.info("Server: %s; Host: %s", self.host, self.host_override)
166 else:
167 logging.info("Server: %s", self.host)
168
169 def _GetOpener(self):
170 """Returns an OpenerDirector for making HTTP requests.
171
172 Returns:
173 A urllib2.OpenerDirector object.
174 """
175 raise NotImplementedError()
176
177 def _CreateRequest(self, url, data=None):
178 """Creates a new urllib request."""
179 logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
180 req = urllib2.Request(url, data=data)
181 if self.host_override:
182 req.add_header("Host", self.host_override)
183 for key, value in self.extra_headers.iteritems():
184 req.add_header(key, value)
185 return req
186
187 def _GetAuthToken(self, email, password):
188 """Uses ClientLogin to authenticate the user, returning an auth token.
189
190 Args:
191 email: The user's email address
192 password: The user's password
193
194 Raises:
195 ClientLoginError: If there was an error authenticating with ClientLogin.
196 HTTPError: If there was some other form of HTTP error.
197
198 Returns:
199 The authentication token returned by ClientLogin.
200 """
201 account_type = "GOOGLE"
202 if self.host.endswith(".google.com"):
203 # Needed for use inside Google.
204 account_type = "HOSTED"
205 req = self._CreateRequest(
206 url="https://www.google.com/accounts/ClientLogin",
207 data=urllib.urlencode({
208 "Email": email,
209 "Passwd": password,
210 "service": "ah",
211 "source": "rietveld-codereview-upload",
212 "accountType": account_type,
213 }),
214 )
215 try:
216 response = self.opener.open(req)
217 response_body = response.read()
218 response_dict = dict(x.split("=")
219 for x in response_body.split("\n") if x)
220 return response_dict["Auth"]
221 except urllib2.HTTPError, e:
222 if e.code == 403:
223 body = e.read()
224 response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
225 raise ClientLoginError(req.get_full_url(), e.code, e.msg,
226 e.headers, response_dict)
227 else:
228 raise
229
230 def _GetAuthCookie(self, auth_token):
231 """Fetches authentication cookies for an authentication token.
232
233 Args:
234 auth_token: The authentication token returned by ClientLogin.
235
236 Raises:
237 HTTPError: If there was an error fetching the authentication cookies.
238 """
239 # This is a dummy value to allow us to identify when we're successful.
240 continue_location = "http://localhost/"
241 args = {"continue": continue_location, "auth": auth_token}
242 req = self._CreateRequest("http://%s/_ah/login?%s" %
243 (self.host, urllib.urlencode(args)))
244 try:
245 response = self.opener.open(req)
246 except urllib2.HTTPError, e:
247 response = e
248 if (response.code != 302 or
249 response.info()["location"] != continue_location):
250 raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
251 response.headers, response.fp)
252 self.authenticated = True
253
254 def _Authenticate(self):
255 """Authenticates the user.
256
257 The authentication process works as follows:
258 1) We get a username and password from the user
259 2) We use ClientLogin to obtain an AUTH token for the user
260 (see https://developers.google.com/identity/protocols/AuthForInstalledApps).
261 3) We pass the auth token to /_ah/login on the server to obtain an
262 authentication cookie. If login was successful, it tries to redirect
263 us to the URL we provided.
264
265 If we attempt to access the upload API without first obtaining an
266 authentication cookie, it returns a 401 response and directs us to
267 authenticate ourselves with ClientLogin.
268 """
269 for i in range(3):
270 credentials = self.auth_function()
271 try:
272 auth_token = self._GetAuthToken(credentials[0], credentials[1])
273 except ClientLoginError, e:
274 if e.reason == "BadAuthentication":
275 print >>sys.stderr, "Invalid username or password."
276 continue
277 if e.reason == "CaptchaRequired":
278 print >>sys.stderr, (
279 "Please go to\n"
280 "https://www.google.com/accounts/DisplayUnlockCaptcha\n"
281 "and verify you are a human. Then try again.")
282 break
283 if e.reason == "NotVerified":
284 print >>sys.stderr, "Account not verified."
285 break
286 if e.reason == "TermsNotAgreed":
287 print >>sys.stderr, "User has not agreed to TOS."
288 break
289 if e.reason == "AccountDeleted":
290 print >>sys.stderr, "The user account has been deleted."
291 break
292 if e.reason == "AccountDisabled":
293 print >>sys.stderr, "The user account has been disabled."
294 break
295 if e.reason == "ServiceDisabled":
296 print >>sys.stderr, ("The user's access to the service has been "
297 "disabled.")
298 break
299 if e.reason == "ServiceUnavailable":
300 print >>sys.stderr, "The service is not available; try again later."
301 break
302 raise
303 self._GetAuthCookie(auth_token)
304 return
305
306 def Send(self, request_path, payload=None,
307 content_type="application/octet-stream",
308 timeout=None,
309 **kwargs):
310 """Sends an RPC and returns the response.
311
312 Args:
313 request_path: The path to send the request to, eg /api/appversion/create.
314 payload: The body of the request, or None to send an empty request.
315 content_type: The Content-Type header to use.
316 timeout: timeout in seconds; default None i.e. no timeout.
317 (Note: for large requests on OS X, the timeout doesn't work right.)
318 kwargs: Any keyword arguments are converted into query string parameters.
319
320 Returns:
321 The response body, as a string.
322 """
323 # TODO: Don't require authentication. Let the server say
324 # whether it is necessary.
325 if not self.authenticated:
326 self._Authenticate()
327
328 old_timeout = socket.getdefaulttimeout()
329 socket.setdefaulttimeout(timeout)
330 try:
331 tries = 0
332 while True:
333 tries += 1
334 args = dict(kwargs)
335 url = "http://%s%s" % (self.host, request_path)
336 if args:
337 url += "?" + urllib.urlencode(args)
338 req = self._CreateRequest(url=url, data=payload)
339 req.add_header("Content-Type", content_type)
340 try:
341 f = self.opener.open(req)
342 response = f.read()
343 f.close()
344 return response
345 except urllib2.HTTPError, e:
346 if tries > 3:
347 raise
348 elif e.code == 401:
349 self._Authenticate()
350 ## elif e.code >= 500 and e.code < 600:
351 ## # Server Error - try again.
352 ## continue
353 else:
354 raise
355 finally:
356 socket.setdefaulttimeout(old_timeout)
357
358
359 class HttpRpcServer(AbstractRpcServer):
360 """Provides a simplified RPC-style interface for HTTP requests."""
361
362 def _Authenticate(self):
363 """Save the cookie jar after authentication."""
364 super(HttpRpcServer, self)._Authenticate()
365 if self.save_cookies:
366 StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
367 self.cookie_jar.save()
368
369 def _GetOpener(self):
370 """Returns an OpenerDirector that supports cookies and ignores redirects.
371
372 Returns:
373 A urllib2.OpenerDirector object.
374 """
375 opener = urllib2.OpenerDirector()
376 opener.add_handler(urllib2.ProxyHandler())
377 opener.add_handler(urllib2.UnknownHandler())
378 opener.add_handler(urllib2.HTTPHandler())
379 opener.add_handler(urllib2.HTTPDefaultErrorHandler())
380 opener.add_handler(urllib2.HTTPSHandler())
381 opener.add_handler(urllib2.HTTPErrorProcessor())
382 if self.save_cookies:
383 self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
384 self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
385 if os.path.exists(self.cookie_file):
386 try:
387 self.cookie_jar.load()
388 self.authenticated = True
389 StatusUpdate("Loaded authentication cookies from %s" %
390 self.cookie_file)
391 except (cookielib.LoadError, IOError):
392 # Failed to load cookies - just ignore them.
393 pass
394 else:
395 # Create an empty cookie file with mode 600
396 fd = os.open(self.cookie_file, os.O_CREAT, 0600)
397 os.close(fd)
398 # Always chmod the cookie file
399 os.chmod(self.cookie_file, 0600)
400 else:
401 # Don't save cookies across runs of update.py.
402 self.cookie_jar = cookielib.CookieJar()
403 opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
404 return opener
405
406
407 parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
408 parser.add_option("-y", "--assume_yes", action="store_true",
409 dest="assume_yes", default=False,
410 help="Assume that the answer to yes/no questions is 'yes'.")
411 # Logging
412 group = parser.add_option_group("Logging options")
413 group.add_option("-q", "--quiet", action="store_const", const=0,
414 dest="verbose", help="Print errors only.")
415 group.add_option("-v", "--verbose", action="store_const", const=2,
416 dest="verbose", default=1,
417 help="Print info level logs (default).")
418 group.add_option("--noisy", action="store_const", const=3,
419 dest="verbose", help="Print all logs.")
420 # Review server
421 group = parser.add_option_group("Review server options")
422 group.add_option("-s", "--server", action="store", dest="server",
423 default="codereview.appspot.com",
424 metavar="SERVER",
425 help=("The server to upload to. The format is host[:port]. "
426 "Defaults to 'codereview.appspot.com'."))
427 group.add_option("-e", "--email", action="store", dest="email",
428 metavar="EMAIL", default=None,
429 help="The username to use. Will prompt if omitted.")
430 group.add_option("-H", "--host", action="store", dest="host",
431 metavar="HOST", default=None,
432 help="Overrides the Host header sent with all RPCs.")
433 group.add_option("--no_cookies", action="store_false",
434 dest="save_cookies", default=True,
435 help="Do not save authentication cookies to local disk.")
436 # Issue
437 group = parser.add_option_group("Issue options")
438 group.add_option("-d", "--description", action="store", dest="description",
439 metavar="DESCRIPTION", default=None,
440 help="Optional description when creating an issue.")
441 group.add_option("-f", "--description_file", action="store",
442 dest="description_file", metavar="DESCRIPTION_FILE",
443 default=None,
444 help="Optional path of a file that contains "
445 "the description when creating an issue.")
446 group.add_option("-r", "--reviewers", action="store", dest="reviewers",
447 metavar="REVIEWERS", default=None,
448 help="Add reviewers (comma separated email addresses).")
449 group.add_option("--cc", action="store", dest="cc",
450 metavar="CC", default=None,
451 help="Add CC (comma separated email addresses).")
452 # Upload options
453 group = parser.add_option_group("Patch options")
454 group.add_option("-m", "--message", action="store", dest="message",
455 metavar="MESSAGE", default=None,
456 help="A message to identify the patch. "
457 "Will prompt if omitted.")
458 group.add_option("-i", "--issue", type="int", action="store",
459 metavar="ISSUE", default=None,
460 help="Issue number to which to add. Defaults to new issue.")
461 group.add_option("--download_base", action="store_true",
462 dest="download_base", default=False,
463 help="Base files will be downloaded by the server "
464 "(side-by-side diffs may not work on files with CRs).")
465 group.add_option("--rev", action="store", dest="revision",
466 metavar="REV", default=None,
467 help="Branch/tree/revision to diff against (used by DVCS).")
468 group.add_option("--send_mail", action="store_true",
469 dest="send_mail", default=False,
470 help="Send notification email to reviewers.")
471
472
473 def GetRpcServer(options):
474 """Returns an instance of an AbstractRpcServer.
475
476 Returns:
477 A new AbstractRpcServer, on which RPC calls can be made.
478 """
479
480 rpc_server_class = HttpRpcServer
481
482 def GetUserCredentials():
483 """Prompts the user for a username and password."""
484 email = options.email
485 if email is None:
486 email = GetEmail("Email (login for uploading to %s)" % options.server)
487 password = getpass.getpass("Password for %s: " % email)
488 return (email, password)
489
490 # If this is the dev_appserver, use fake authentication.
491 host = (options.host or options.server).lower()
492 if host == "localhost" or host.startswith("localhost:"):
493 email = options.email
494 if email is None:
495 email = "test@example.com"
496 logging.info("Using debug user %s. Override with --email" % email)
497 server = rpc_server_class(
498 options.server,
499 lambda: (email, "password"),
500 host_override=options.host,
501 extra_headers={"Cookie":
502 'dev_appserver_login="%s:False"' % email},
503 save_cookies=options.save_cookies)
504 # Don't try to talk to ClientLogin.
505 server.authenticated = True
506 return server
507
508 return rpc_server_class(options.server, GetUserCredentials,
509 host_override=options.host,
510 save_cookies=options.save_cookies)
511
512
513 def EncodeMultipartFormData(fields, files):
514 """Encode form fields for multipart/form-data.
515
516 Args:
517 fields: A sequence of (name, value) elements for regular form fields.
518 files: A sequence of (name, filename, value) elements for data to be
519 uploaded as files.
520 Returns:
521 (content_type, body) ready for httplib.HTTP instance.
522
523 Source:
524 https://web.archive.org/web/20160116052001/code.activestate.com/recipes/146306
525 """
526 BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
527 CRLF = '\r\n'
528 lines = []
529 for (key, value) in fields:
530 lines.append('--' + BOUNDARY)
531 lines.append('Content-Disposition: form-data; name="%s"' % key)
532 lines.append('')
533 lines.append(value)
534 for (key, filename, value) in files:
535 lines.append('--' + BOUNDARY)
536 lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
537 (key, filename))
538 lines.append('Content-Type: %s' % GetContentType(filename))
539 lines.append('')
540 lines.append(value)
541 lines.append('--' + BOUNDARY + '--')
542 lines.append('')
543 body = CRLF.join(lines)
544 content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
545 return content_type, body
546
547
548 def GetContentType(filename):
549 """Helper to guess the content-type from the filename."""
550 return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
551
552
553 # Use a shell for subcommands on Windows to get a PATH search.
554 use_shell = sys.platform.startswith("win")
555
556 def RunShellWithReturnCode(command, print_output=False,
557 universal_newlines=True):
558 """Executes a command and returns the output from stdout and the return code.
559
560 Args:
561 command: Command to execute.
562 print_output: If True, the output is printed to stdout.
563 If False, both stdout and stderr are ignored.
564 universal_newlines: Use universal_newlines flag (default: True).
565
566 Returns:
567 Tuple (output, return code)
568 """
569 logging.info("Running %s", command)
570 p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
571 shell=use_shell, universal_newlines=universal_newlines)
572 if print_output:
573 output_array = []
574 while True:
575 line = p.stdout.readline()
576 if not line:
577 break
578 print line.strip("\n")
579 output_array.append(line)
580 output = "".join(output_array)
581 else:
582 output = p.stdout.read()
583 p.wait()
584 errout = p.stderr.read()
585 if print_output and errout:
586 print >>sys.stderr, errout
587 p.stdout.close()
588 p.stderr.close()
589 return output, p.returncode
590
591
592 def RunShell(command, silent_ok=False, universal_newlines=True,
593 print_output=False):
594 data, retcode = RunShellWithReturnCode(command, print_output,
595 universal_newlines)
596 if retcode:
597 ErrorExit("Got error status from %s:\n%s" % (command, data))
598 if not silent_ok and not data:
599 ErrorExit("No output from %s" % command)
600 return data
601
602
603 class VersionControlSystem(object):
604 """Abstract base class providing an interface to the VCS."""
605
606 def __init__(self, options):
607 """Constructor.
608
609 Args:
610 options: Command line options.
611 """
612 self.options = options
613
614 def GenerateDiff(self, args):
615 """Return the current diff as a string.
616
617 Args:
618 args: Extra arguments to pass to the diff command.
619 """
620 raise NotImplementedError(
621 "abstract method -- subclass %s must override" % self.__class__)
622
623 def GetUnknownFiles(self):
624 """Return a list of files unknown to the VCS."""
625 raise NotImplementedError(
626 "abstract method -- subclass %s must override" % self.__class__)
627
628 def CheckForUnknownFiles(self):
629 """Show an "are you sure?" prompt if there are unknown files."""
630 unknown_files = self.GetUnknownFiles()
631 if unknown_files:
632 print "The following files are not added to version control:"
633 for line in unknown_files:
634 print line
635 prompt = "Are you sure to continue?(y/N) "
636 answer = raw_input(prompt).strip()
637 if answer != "y":
638 ErrorExit("User aborted")
639
640 def GetBaseFile(self, filename):
641 """Get the content of the upstream version of a file.
642
643 Returns:
644 A tuple (base_content, new_content, is_binary, status)
645 base_content: The contents of the base file.
646 new_content: For text files, this is empty. For binary files, this is
647 the contents of the new file, since the diff output won't contain
648 information to reconstruct the current file.
649 is_binary: True iff the file is binary.
650 status: The status of the file.
651 """
652
653 raise NotImplementedError(
654 "abstract method -- subclass %s must override" % self.__class__)
655
656
657 def GetBaseFiles(self, diff):
658 """Helper that calls GetBase file for each file in the patch.
659
660 Returns:
661 A dictionary that maps from filename to GetBaseFile's tuple. Filenames
662 are retrieved based on lines that start with "Index:" or
663 "Property changes on:".
664 """
665 files = {}
666 for line in diff.splitlines(True):
667 if line.startswith('Index:') or line.startswith('Property changes on:'):
668 unused, filename = line.split(':', 1)
669 # On Windows if a file has property changes its filename uses '\'
670 # instead of '/'.
671 filename = filename.strip().replace('\\', '/')
672 files[filename] = self.GetBaseFile(filename)
673 return files
674
675
676 def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
677 files):
678 """Uploads the base files (and if necessary, the current ones as well)."""
679
680 def UploadFile(filename, file_id, content, is_binary, status, is_base):
681 """Uploads a file to the server."""
682 file_too_large = False
683 if is_base:
684 type = "base"
685 else:
686 type = "current"
687 if len(content) > MAX_UPLOAD_SIZE:
688 print ("Not uploading the %s file for %s because it's too large." %
689 (type, filename))
690 file_too_large = True
691 content = ""
692 checksum = md5.new(content).hexdigest()
693 if options.verbose > 0 and not file_too_large:
694 print "Uploading %s file for %s" % (type, filename)
695 url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
696 form_fields = [("filename", filename),
697 ("status", status),
698 ("checksum", checksum),
699 ("is_binary", str(is_binary)),
700 ("is_current", str(not is_base)),
701 ]
702 if file_too_large:
703 form_fields.append(("file_too_large", "1"))
704 if options.email:
705 form_fields.append(("user", options.email))
706 ctype, body = EncodeMultipartFormData(form_fields,
707 [("data", filename, content)])
708 response_body = rpc_server.Send(url, body,
709 content_type=ctype)
710 if not response_body.startswith("OK"):
711 StatusUpdate(" --> %s" % response_body)
712 sys.exit(1)
713
714 patches = dict()
715 [patches.setdefault(v, k) for k, v in patch_list]
716 for filename in patches.keys():
717 base_content, new_content, is_binary, status = files[filename]
718 file_id_str = patches.get(filename)
719 if file_id_str.find("nobase") != -1:
720 base_content = None
721 file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
722 file_id = int(file_id_str)
723 if base_content != None:
724 UploadFile(filename, file_id, base_content, is_binary, status, True)
725 if new_content != None:
726 UploadFile(filename, file_id, new_content, is_binary, status, False)
727
728 def IsImage(self, filename):
729 """Returns true if the filename has an image extension."""
730 mimetype = mimetypes.guess_type(filename)[0]
731 if not mimetype:
732 return False
733 return mimetype.startswith("image/")
734
735
736 class SubversionVCS(VersionControlSystem):
737 """Implementation of the VersionControlSystem interface for Subversion."""
738
739 def __init__(self, options):
740 super(SubversionVCS, self).__init__(options)
741 if self.options.revision:
742 match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
743 if not match:
744 ErrorExit("Invalid Subversion revision %s." % self.options.revision)
745 self.rev_start = match.group(1)
746 self.rev_end = match.group(3)
747 else:
748 self.rev_start = self.rev_end = None
749 # Cache output from "svn list -r REVNO dirname".
750 # Keys: dirname, Values: 2-tuple (output for start rev and end rev).
751 self.svnls_cache = {}
752 # SVN base URL is required to fetch files deleted in an older revision.
753 # Result is cached to not guess it over and over again in GetBaseFile().
754 required = self.options.download_base or self.options.revision is not None
755 self.svn_base = self._GuessBase(required)
756
757 def GuessBase(self, required):
758 """Wrapper for _GuessBase."""
759 return self.svn_base
760
761 def _GuessBase(self, required):
762 """Returns the SVN base URL.
763
764 Args:
765 required: If true, exits if the url can't be guessed, otherwise None is
766 returned.
767 """
768 info = RunShell(["svn", "info"])
769 for line in info.splitlines():
770 words = line.split()
771 if len(words) == 2 and words[0] == "URL:":
772 url = words[1]
773 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
774 username, netloc = urllib.splituser(netloc)
775 if username:
776 logging.info("Removed username from base URL")
777 if netloc.endswith("svn.python.org"):
778 if netloc == "svn.python.org":
779 if path.startswith("/projects/"):
780 path = path[9:]
781 elif netloc != "pythondev@svn.python.org":
782 ErrorExit("Unrecognized Python URL: %s" % url)
783 base = "http://svn.python.org/view/*checkout*%s/" % path
784 logging.info("Guessed Python base = %s", base)
785 elif netloc.endswith("svn.collab.net"):
786 if path.startswith("/repos/"):
787 path = path[6:]
788 base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
789 logging.info("Guessed CollabNet base = %s", base)
790 elif netloc.endswith(".googlecode.com"):
791 path = path + "/"
792 base = urlparse.urlunparse(("http", netloc, path, params,
793 query, fragment))
794 logging.info("Guessed Google Code base = %s", base)
795 else:
796 path = path + "/"
797 base = urlparse.urlunparse((scheme, netloc, path, params,
798 query, fragment))
799 logging.info("Guessed base = %s", base)
800 return base
801 if required:
802 ErrorExit("Can't find URL in output from svn info")
803 return None
804
805 def GenerateDiff(self, args):
806 cmd = ["svn", "diff"]
807 if self.options.revision:
808 cmd += ["-r", self.options.revision]
809 cmd.extend(args)
810 data = RunShell(cmd)
811 count = 0
812 for line in data.splitlines():
813 if line.startswith("Index:") or line.startswith("Property changes on:"):
814 count += 1
815 logging.info(line)
816 if not count:
817 ErrorExit("No valid patches found in output from svn diff")
818 return data
819
820 def _CollapseKeywords(self, content, keyword_str):
821 """Collapses SVN keywords."""
822 # svn cat translates keywords but svn diff doesn't. As a result of this
823 # behavior patching.PatchChunks() fails with a chunk mismatch error.
824 # This part was originally written by the Review Board development team
825 # who had the same problem (https://reviews.reviewboard.org/r/276/).
826 # Mapping of keywords to known aliases
827 svn_keywords = {
828 # Standard keywords
829 'Date': ['Date', 'LastChangedDate'],
830 'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
831 'Author': ['Author', 'LastChangedBy'],
832 'HeadURL': ['HeadURL', 'URL'],
833 'Id': ['Id'],
834
835 # Aliases
836 'LastChangedDate': ['LastChangedDate', 'Date'],
837 'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
838 'LastChangedBy': ['LastChangedBy', 'Author'],
839 'URL': ['URL', 'HeadURL'],
840 }
841
842 def repl(m):
843 if m.group(2):
844 return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
845 return "$%s$" % m.group(1)
846 keywords = [keyword
847 for name in keyword_str.split(" ")
848 for keyword in svn_keywords.get(name, [])]
849 return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
850
851 def GetUnknownFiles(self):
852 status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
853 unknown_files = []
854 for line in status.split("\n"):
855 if line and line[0] == "?":
856 unknown_files.append(line)
857 return unknown_files
858
859 def ReadFile(self, filename):
860 """Returns the contents of a file."""
861 file = open(filename, 'rb')
862 result = ""
863 try:
864 result = file.read()
865 finally:
866 file.close()
867 return result
868
869 def GetStatus(self, filename):
870 """Returns the status of a file."""
871 if not self.options.revision:
872 status = RunShell(["svn", "status", "--ignore-externals", filename])
873 if not status:
874 ErrorExit("svn status returned no output for %s" % filename)
875 status_lines = status.splitlines()
876 # If file is in a cl, the output will begin with
877 # "\n--- Changelist 'cl_name':\n". See
878 # https://web.archive.org/web/20090918234815/svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
879 if (len(status_lines) == 3 and
880 not status_lines[0] and
881 status_lines[1].startswith("--- Changelist")):
882 status = status_lines[2]
883 else:
884 status = status_lines[0]
885 # If we have a revision to diff against we need to run "svn list"
886 # for the old and the new revision and compare the results to get
887 # the correct status for a file.
888 else:
889 dirname, relfilename = os.path.split(filename)
890 if dirname not in self.svnls_cache:
891 cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
892 out, returncode = RunShellWithReturnCode(cmd)
893 if returncode:
894 ErrorExit("Failed to get status for %s." % filename)
895 old_files = out.splitlines()
896 args = ["svn", "list"]
897 if self.rev_end:
898 args += ["-r", self.rev_end]
899 cmd = args + [dirname or "."]
900 out, returncode = RunShellWithReturnCode(cmd)
901 if returncode:
902 ErrorExit("Failed to run command %s" % cmd)
903 self.svnls_cache[dirname] = (old_files, out.splitlines())
904 old_files, new_files = self.svnls_cache[dirname]
905 if relfilename in old_files and relfilename not in new_files:
906 status = "D "
907 elif relfilename in old_files and relfilename in new_files:
908 status = "M "
909 else:
910 status = "A "
911 return status
912
913 def GetBaseFile(self, filename):
914 status = self.GetStatus(filename)
915 base_content = None
916 new_content = None
917
918 # If a file is copied its status will be "A +", which signifies
919 # "addition-with-history". See "svn st" for more information. We need to
920 # upload the original file or else diff parsing will fail if the file was
921 # edited.
922 if status[0] == "A" and status[3] != "+":
923 # We'll need to upload the new content if we're adding a binary file
924 # since diff's output won't contain it.
925 mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
926 silent_ok=True)
927 base_content = ""
928 is_binary = mimetype and not mimetype.startswith("text/")
929 if is_binary and self.IsImage(filename):
930 new_content = self.ReadFile(filename)
931 elif (status[0] in ("M", "D", "R") or
932 (status[0] == "A" and status[3] == "+") or # Copied file.
933 (status[0] == " " and status[1] == "M")): # Property change.
934 args = []
935 if self.options.revision:
936 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
937 else:
938 # Don't change filename, it's needed later.
939 url = filename
940 args += ["-r", "BASE"]
941 cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
942 mimetype, returncode = RunShellWithReturnCode(cmd)
943 if returncode:
944 # File does not exist in the requested revision.
945 # Reset mimetype, it contains an error message.
946 mimetype = ""
947 get_base = False
948 is_binary = mimetype and not mimetype.startswith("text/")
949 if status[0] == " ":
950 # Empty base content just to force an upload.
951 base_content = ""
952 elif is_binary:
953 if self.IsImage(filename):
954 get_base = True
955 if status[0] == "M":
956 if not self.rev_end:
957 new_content = self.ReadFile(filename)
958 else:
959 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
960 new_content = RunShell(["svn", "cat", url],
961 universal_newlines=True, silent_ok=True)
962 else:
963 base_content = ""
964 else:
965 get_base = True
966
967 if get_base:
968 if is_binary:
969 universal_newlines = False
970 else:
971 universal_newlines = True
972 if self.rev_start:
973 # "svn cat -r REV delete_file.txt" doesn't work. cat requires
974 # the full URL with "@REV" appended instead of using "-r" option.
975 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
976 base_content = RunShell(["svn", "cat", url],
977 universal_newlines=universal_newlines,
978 silent_ok=True)
979 else:
980 base_content = RunShell(["svn", "cat", filename],
981 universal_newlines=universal_newlines,
982 silent_ok=True)
983 if not is_binary:
984 args = []
985 if self.rev_start:
986 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
987 else:
988 url = filename
989 args += ["-r", "BASE"]
990 cmd = ["svn"] + args + ["propget", "svn:keywords", url]
991 keywords, returncode = RunShellWithReturnCode(cmd)
992 if keywords and not returncode:
993 base_content = self._CollapseKeywords(base_content, keywords)
994 else:
995 StatusUpdate("svn status returned unexpected output: %s" % status)
996 sys.exit(1)
997 return base_content, new_content, is_binary, status[0:5]
998
999
1000 class GitVCS(VersionControlSystem):
1001 """Implementation of the VersionControlSystem interface for Git."""
1002
1003 def __init__(self, options):
1004 super(GitVCS, self).__init__(options)
1005 # Map of filename -> hash of base file.
1006 self.base_hashes = {}
1007
1008 def GenerateDiff(self, extra_args):
1009 # This is more complicated than svn's GenerateDiff because we must convert
1010 # the diff output to include an svn-style "Index:" line as well as record
1011 # the hashes of the base files, so we can upload them along with our diff.
1012 if self.options.revision:
1013 extra_args = [self.options.revision] + extra_args
1014 gitdiff = RunShell(["git", "diff", "--full-index"] + extra_args)
1015 svndiff = []
1016 filecount = 0
1017 filename = None
1018 for line in gitdiff.splitlines():
1019 match = re.match(r"diff --git a/(.*) b/.*$", line)
1020 if match:
1021 filecount += 1
1022 filename = match.group(1)
1023 svndiff.append("Index: %s\n" % filename)
1024 else:
1025 # The "index" line in a git diff looks like this (long hashes elided):
1026 # index 82c0d44..b2cee3f 100755
1027 # We want to save the left hash, as that identifies the base file.
1028 match = re.match(r"index (\w+)\.\.", line)
1029 if match:
1030 self.base_hashes[filename] = match.group(1)
1031 svndiff.append(line + "\n")
1032 if not filecount:
1033 ErrorExit("No valid patches found in output from git diff")
1034 return "".join(svndiff)
1035
1036 def GetUnknownFiles(self):
1037 status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
1038 silent_ok=True)
1039 return status.splitlines()
1040
1041 def GetBaseFile(self, filename):
1042 hash = self.base_hashes[filename]
1043 base_content = None
1044 new_content = None
1045 is_binary = False
1046 if hash == "0" * 40: # All-zero hash indicates no base file.
1047 status = "A"
1048 base_content = ""
1049 else:
1050 status = "M"
1051 base_content, returncode = RunShellWithReturnCode(["git", "show", hash])
1052 if returncode:
1053 ErrorExit("Got error status from 'git show %s'" % hash)
1054 return (base_content, new_content, is_binary, status)
1055
1056
1057 class MercurialVCS(VersionControlSystem):
1058 """Implementation of the VersionControlSystem interface for Mercurial."""
1059
1060 def __init__(self, options, repo_dir):
1061 super(MercurialVCS, self).__init__(options)
1062 # Absolute path to repository (we can be in a subdir)
1063 self.repo_dir = os.path.normpath(repo_dir)
1064 # Compute the subdir
1065 cwd = os.path.normpath(os.getcwd())
1066 assert cwd.startswith(self.repo_dir)
1067 self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
1068 if self.options.revision:
1069 self.base_rev = self.options.revision
1070 else:
1071 self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
1072
1073 def _GetRelPath(self, filename):
1074 """Get relative path of a file according to the current directory,
1075 given its logical path in the repo."""
1076 assert filename.startswith(self.subdir), filename
1077 return filename[len(self.subdir):].lstrip(r"\/")
1078
1079 def GenerateDiff(self, extra_args):
1080 # If no file specified, restrict to the current subdir
1081 extra_args = extra_args or ["."]
1082 cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
1083 data = RunShell(cmd, silent_ok=True)
1084 svndiff = []
1085 filecount = 0
1086 for line in data.splitlines():
1087 m = re.match("diff --git a/(\S+) b/(\S+)", line)
1088 if m:
1089 # Modify line to make it look like as it comes from svn diff.
1090 # With this modification no changes on the server side are required
1091 # to make upload.py work with Mercurial repos.
1092 # NOTE: for proper handling of moved/copied files, we have to use
1093 # the second filename.
1094 filename = m.group(2)
1095 svndiff.append("Index: %s" % filename)
1096 svndiff.append("=" * 67)
1097 filecount += 1
1098 logging.info(line)
1099 else:
1100 svndiff.append(line)
1101 if not filecount:
1102 ErrorExit("No valid patches found in output from hg diff")
1103 return "\n".join(svndiff) + "\n"
1104
1105 def GetUnknownFiles(self):
1106 """Return a list of files unknown to the VCS."""
1107 args = []
1108 status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
1109 silent_ok=True)
1110 unknown_files = []
1111 for line in status.splitlines():
1112 st, fn = line.split(" ", 1)
1113 if st == "?":
1114 unknown_files.append(fn)
1115 return unknown_files
1116
1117 def GetBaseFile(self, filename):
1118 # "hg status" and "hg cat" both take a path relative to the current subdir
1119 # rather than to the repo root, but "hg diff" has given us the full path
1120 # to the repo root.
1121 base_content = ""
1122 new_content = None
1123 is_binary = False
1124 oldrelpath = relpath = self._GetRelPath(filename)
1125 # "hg status -C" returns two lines for moved/copied files, one otherwise
1126 out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
1127 out = out.splitlines()
1128 # HACK: strip error message about missing file/directory if it isn't in
1129 # the working copy
1130 if out[0].startswith('%s: ' % relpath):
1131 out = out[1:]
1132 if len(out) > 1:
1133 # Moved/copied => considered as modified, use old filename to
1134 # retrieve base contents
1135 oldrelpath = out[1].strip()
1136 status = "M"
1137 else:
1138 status, _ = out[0].split(' ', 1)
1139 if status != "A":
1140 base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
1141 silent_ok=True)
1142 is_binary = "\0" in base_content # Mercurial's heuristic
1143 if status != "R":
1144 new_content = open(relpath, "rb").read()
1145 is_binary = is_binary or "\0" in new_content
1146 if is_binary and base_content:
1147 # Fetch again without converting newlines
1148 base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
1149 silent_ok=True, universal_newlines=False)
1150 if not is_binary or not self.IsImage(relpath):
1151 new_content = None
1152 return base_content, new_content, is_binary, status
1153
1154
1155 # NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
1156 def SplitPatch(data):
1157 """Splits a patch into separate pieces for each file.
1158
1159 Args:
1160 data: A string containing the output of svn diff.
1161
1162 Returns:
1163 A list of 2-tuple (filename, text) where text is the svn diff output
1164 pertaining to filename.
1165 """
1166 patches = []
1167 filename = None
1168 diff = []
1169 for line in data.splitlines(True):
1170 new_filename = None
1171 if line.startswith('Index:'):
1172 unused, new_filename = line.split(':', 1)
1173 new_filename = new_filename.strip()
1174 elif line.startswith('Property changes on:'):
1175 unused, temp_filename = line.split(':', 1)
1176 # When a file is modified, paths use '/' between directories, however
1177 # when a property is modified '\' is used on Windows. Make them the same
1178 # otherwise the file shows up twice.
1179 temp_filename = temp_filename.strip().replace('\\', '/')
1180 if temp_filename != filename:
1181 # File has property changes but no modifications, create a new diff.
1182 new_filename = temp_filename
1183 if new_filename:
1184 if filename and diff:
1185 patches.append((filename, ''.join(diff)))
1186 filename = new_filename
1187 diff = [line]
1188 continue
1189 if diff is not None:
1190 diff.append(line)
1191 if filename and diff:
1192 patches.append((filename, ''.join(diff)))
1193 return patches
1194
1195
1196 def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
1197 """Uploads a separate patch for each file in the diff output.
1198
1199 Returns a list of [patch_key, filename] for each file.
1200 """
1201 patches = SplitPatch(data)
1202 rv = []
1203 for patch in patches:
1204 if len(patch[1]) > MAX_UPLOAD_SIZE:
1205 print ("Not uploading the patch for " + patch[0] +
1206 " because the file is too large.")
1207 continue
1208 form_fields = [("filename", patch[0])]
1209 if not options.download_base:
1210 form_fields.append(("content_upload", "1"))
1211 files = [("data", "data.diff", patch[1])]
1212 ctype, body = EncodeMultipartFormData(form_fields, files)
1213 url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
1214 print "Uploading patch for " + patch[0]
1215 response_body = rpc_server.Send(url, body, content_type=ctype)
1216 lines = response_body.splitlines()
1217 if not lines or lines[0] != "OK":
1218 StatusUpdate(" --> %s" % response_body)
1219 sys.exit(1)
1220 rv.append([lines[1], patch[0]])
1221 return rv
1222
1223
1224 def GuessVCS(options):
1225 """Helper to guess the version control system.
1226
1227 This examines the current directory, guesses which VersionControlSystem
1228 we're using, and returns an instance of the appropriate class. Exit with an
1229 error if we can't figure it out.
1230
1231 Returns:
1232 A VersionControlSystem instance. Exits if the VCS can't be guessed.
1233 """
1234 # Mercurial has a command to get the base directory of a repository
1235 # Try running it, but don't die if we don't have hg installed.
1236 # NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
1237 try:
1238 out, returncode = RunShellWithReturnCode(["hg", "root"])
1239 if returncode == 0:
1240 return MercurialVCS(options, out.strip())
1241 except OSError, (errno, message):
1242 if errno != 2: # ENOENT -- they don't have hg installed.
1243 raise
1244
1245 # Subversion has a .svn in all working directories.
1246 if os.path.isdir('.svn'):
1247 logging.info("Guessed VCS = Subversion")
1248 return SubversionVCS(options)
1249
1250 # Git has a command to test if you're in a git tree.
1251 # Try running it, but don't die if we don't have git installed.
1252 try:
1253 out, returncode = RunShellWithReturnCode(["git", "rev-parse",
1254 "--is-inside-work-tree"])
1255 if returncode == 0:
1256 return GitVCS(options)
1257 except OSError, (errno, message):
1258 if errno != 2: # ENOENT -- they don't have git installed.
1259 raise
1260
1261 ErrorExit(("Could not guess version control system. "
1262 "Are you in a working copy directory?"))
1263
1264
1265 def RealMain(argv, data=None):
1266 """The real main function.
1267
1268 Args:
1269 argv: Command line arguments.
1270 data: Diff contents. If None (default) the diff is generated by
1271 the VersionControlSystem implementation returned by GuessVCS().
1272
1273 Returns:
1274 A 2-tuple (issue id, patchset id).
1275 The patchset id is None if the base files are not uploaded by this
1276 script (applies only to SVN checkouts).
1277 """
1278 logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
1279 "%(lineno)s %(message)s "))
1280 os.environ['LC_ALL'] = 'C'
1281 options, args = parser.parse_args(argv[1:])
1282 global verbosity
1283 verbosity = options.verbose
1284 if verbosity >= 3:
1285 logging.getLogger().setLevel(logging.DEBUG)
1286 elif verbosity >= 2:
1287 logging.getLogger().setLevel(logging.INFO)
1288 vcs = GuessVCS(options)
1289 if isinstance(vcs, SubversionVCS):
1290 # base field is only allowed for Subversion.
1291 # Note: Fetching base files may become deprecated in future releases.
1292 base = vcs.GuessBase(options.download_base)
1293 else:
1294 base = None
1295 if not base and options.download_base:
1296 options.download_base = True
1297 logging.info("Enabled upload of base file")
1298 if not options.assume_yes:
1299 vcs.CheckForUnknownFiles()
1300 if data is None:
1301 data = vcs.GenerateDiff(args)
1302 files = vcs.GetBaseFiles(data)
1303 if verbosity >= 1:
1304 print "Upload server:", options.server, "(change with -s/--server)"
1305 if options.issue:
1306 prompt = "Message describing this patch set: "
1307 else:
1308 prompt = "New issue subject: "
1309 message = options.message or raw_input(prompt).strip()
1310 if not message:
1311 ErrorExit("A non-empty message is required")
1312 rpc_server = GetRpcServer(options)
1313 form_fields = [("subject", message)]
1314 if base:
1315 form_fields.append(("base", base))
1316 if options.issue:
1317 form_fields.append(("issue", str(options.issue)))
1318 if options.email:
1319 form_fields.append(("user", options.email))
1320 if options.reviewers:
1321 for reviewer in options.reviewers.split(','):
1322 if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1:
1323 ErrorExit("Invalid email address: %s" % reviewer)
1324 form_fields.append(("reviewers", options.reviewers))
1325 if options.cc:
1326 for cc in options.cc.split(','):
1327 if "@" in cc and not cc.split("@")[1].count(".") == 1:
1328 ErrorExit("Invalid email address: %s" % cc)
1329 form_fields.append(("cc", options.cc))
1330 description = options.description
1331 if options.description_file:
1332 if options.description:
1333 ErrorExit("Can't specify description and description_file")
1334 file = open(options.description_file, 'r')
1335 description = file.read()
1336 file.close()
1337 if description:
1338 form_fields.append(("description", description))
1339 # Send a hash of all the base file so the server can determine if a copy
1340 # already exists in an earlier patchset.
1341 base_hashes = ""
1342 for file, info in files.iteritems():
1343 if not info[0] is None:
1344 checksum = md5.new(info[0]).hexdigest()
1345 if base_hashes:
1346 base_hashes += "|"
1347 base_hashes += checksum + ":" + file
1348 form_fields.append(("base_hashes", base_hashes))
1349 # If we're uploading base files, don't send the email before the uploads, so
1350 # that it contains the file status.
1351 if options.send_mail and options.download_base:
1352 form_fields.append(("send_mail", "1"))
1353 if not options.download_base:
1354 form_fields.append(("content_upload", "1"))
1355 if len(data) > MAX_UPLOAD_SIZE:
1356 print "Patch is large, so uploading file patches separately."
1357 uploaded_diff_file = []
1358 form_fields.append(("separate_patches", "1"))
1359 else:
1360 uploaded_diff_file = [("data", "data.diff", data)]
1361 ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
1362 response_body = rpc_server.Send("/upload", body, content_type=ctype)
1363 patchset = None
1364 if not options.download_base or not uploaded_diff_file:
1365 lines = response_body.splitlines()
1366 if len(lines) >= 2:
1367 msg = lines[0]
1368 patchset = lines[1].strip()
1369 patches = [x.split(" ", 1) for x in lines[2:]]
1370 else:
1371 msg = response_body
1372 else:
1373 msg = response_body
1374 StatusUpdate(msg)
1375 if not response_body.startswith("Issue created.") and \
1376 not response_body.startswith("Issue updated."):
1377 sys.exit(0)
1378 issue = msg[msg.rfind("/")+1:]
1379
1380 if not uploaded_diff_file:
1381 result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
1382 if not options.download_base:
1383 patches = result
1384
1385 if not options.download_base:
1386 vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
1387 if options.send_mail:
1388 rpc_server.Send("/" + issue + "/mail", payload="")
1389 return issue, patchset
1390
1391
1392 def main():
1393 try:
1394 RealMain(sys.argv)
1395 except KeyboardInterrupt:
1396 print
1397 StatusUpdate("Interrupted.")
1398 sys.exit(1)
1399
1400
1401 if __name__ == "__main__":
1402 main()