mirror of
				https://github.com/django/django.git
				synced 2025-10-31 09:41:08 +00:00 
			
		
		
		
	Made more extensive usage of context managers with open.
This commit is contained in:
		| @@ -22,7 +22,8 @@ def unique_messages(): | ||||
|                 cmd = 'msguniq "%s.po"' % pf | ||||
|                 stdout = os.popen(cmd) | ||||
|                 msg = stdout.read() | ||||
|                 open('%s.po' % pf, 'w').write(msg) | ||||
|                 with open('%s.po' % pf, 'w') as fp: | ||||
|                     fp.write(msg) | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     unique_messages() | ||||
|   | ||||
| @@ -120,12 +120,9 @@ class SpatiaLiteCreation(DatabaseCreation): | ||||
|  | ||||
|             # Opening up the SpatiaLite SQL initialization file and executing | ||||
|             # as a script. | ||||
|             sql_fh = open(spatialite_sql, 'r') | ||||
|             try: | ||||
|             with open(spatialite_sql, 'r') as sql_fh: | ||||
|                 cur = self.connection._cursor() | ||||
|                 cur.executescript(sql_fh.read()) | ||||
|             finally: | ||||
|                 sql_fh.close() | ||||
|  | ||||
|     def spatialite_init_file(self): | ||||
|         # SPATIALITE_SQL may be placed in settings to tell GeoDjango | ||||
|   | ||||
| @@ -7,10 +7,10 @@ def fromfile(file_h): | ||||
|     """ | ||||
|     # If given a file name, get a real handle. | ||||
|     if isinstance(file_h, basestring):  | ||||
|         file_h = open(file_h, 'rb') | ||||
|  | ||||
|     # Reading in the file's contents, | ||||
|     buf = file_h.read() | ||||
|         with open(file_h, 'rb') as file_h: | ||||
|             buf = file_h.read() | ||||
|     else: | ||||
|         buf = file_h.read() | ||||
|  | ||||
|     # If we get WKB need to wrap in buffer(), so run through regexes. | ||||
|     if wkt_regex.match(buf) or hex_regex.match(buf): | ||||
|   | ||||
| @@ -47,18 +47,15 @@ class SessionStore(SessionBase): | ||||
|     def load(self): | ||||
|         session_data = {} | ||||
|         try: | ||||
|             session_file = open(self._key_to_file(), "rb") | ||||
|             try: | ||||
|             with open(self._key_to_file(), "rb") as session_file: | ||||
|                 file_data = session_file.read() | ||||
|                 # Don't fail if there is no data in the session file. | ||||
|                 # We may have opened the empty placeholder file. | ||||
|                 if file_data: | ||||
|                     try: | ||||
|                         session_data = self.decode(file_data) | ||||
|                     except (EOFError, SuspiciousOperation): | ||||
|                         self.create() | ||||
|             finally: | ||||
|                 session_file.close() | ||||
|             # Don't fail if there is no data in the session file. | ||||
|             # We may have opened the empty placeholder file. | ||||
|             if file_data: | ||||
|                 try: | ||||
|                     session_data = self.decode(file_data) | ||||
|                 except (EOFError, SuspiciousOperation): | ||||
|                     self.create() | ||||
|         except IOError: | ||||
|             self.create() | ||||
|         return session_data | ||||
|   | ||||
							
								
								
									
										27
									
								
								django/core/cache/backends/filebased.py
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								django/core/cache/backends/filebased.py
									
									
									
									
										vendored
									
									
								
							| @@ -31,16 +31,13 @@ class FileBasedCache(BaseCache): | ||||
|  | ||||
|         fname = self._key_to_file(key) | ||||
|         try: | ||||
|             f = open(fname, 'rb') | ||||
|             try: | ||||
|             with open(fname, 'rb') as f: | ||||
|                 exp = pickle.load(f) | ||||
|                 now = time.time() | ||||
|                 if exp < now: | ||||
|                     self._delete(fname) | ||||
|                 else: | ||||
|                     return pickle.load(f) | ||||
|             finally: | ||||
|                 f.close() | ||||
|         except (IOError, OSError, EOFError, pickle.PickleError): | ||||
|             pass | ||||
|         return default | ||||
| @@ -61,13 +58,10 @@ class FileBasedCache(BaseCache): | ||||
|             if not os.path.exists(dirname): | ||||
|                 os.makedirs(dirname) | ||||
|  | ||||
|             f = open(fname, 'wb') | ||||
|             try: | ||||
|             with open(fname, 'wb') as f: | ||||
|                 now = time.time() | ||||
|                 pickle.dump(now + timeout, f, pickle.HIGHEST_PROTOCOL) | ||||
|                 pickle.dump(value, f, pickle.HIGHEST_PROTOCOL) | ||||
|             finally: | ||||
|                 f.close() | ||||
|         except (IOError, OSError): | ||||
|             pass | ||||
|  | ||||
| @@ -94,17 +88,14 @@ class FileBasedCache(BaseCache): | ||||
|         self.validate_key(key) | ||||
|         fname = self._key_to_file(key) | ||||
|         try: | ||||
|             f = open(fname, 'rb') | ||||
|             try: | ||||
|             with open(fname, 'rb') as f: | ||||
|                 exp = pickle.load(f) | ||||
|                 now = time.time() | ||||
|                 if exp < now: | ||||
|                     self._delete(fname) | ||||
|                     return False | ||||
|                 else: | ||||
|                     return True | ||||
|             finally: | ||||
|                 f.close() | ||||
|             now = time.time() | ||||
|             if exp < now: | ||||
|                 self._delete(fname) | ||||
|                 return False | ||||
|             else: | ||||
|                 return True | ||||
|         except (IOError, OSError, EOFError, pickle.PickleError): | ||||
|             return False | ||||
|  | ||||
|   | ||||
| @@ -9,10 +9,9 @@ Cookbook, licensed under the Python Software License. | ||||
| Example Usage:: | ||||
|  | ||||
|     >>> from django.core.files import locks | ||||
|     >>> f = open('./file', 'wb') | ||||
|     >>> locks.lock(f, locks.LOCK_EX) | ||||
|     >>> f.write('Django') | ||||
|     >>> f.close() | ||||
|     >>> with open('./file', 'wb') as f: | ||||
|     >>>     locks.lock(f, locks.LOCK_EX) | ||||
|     >>>     f.write('Django') | ||||
| """ | ||||
|  | ||||
| __all__ = ('LOCK_EX','LOCK_SH','LOCK_NB','lock','unlock') | ||||
|   | ||||
| @@ -59,8 +59,7 @@ def file_move_safe(old_file_name, new_file_name, chunk_size = 1024*64, allow_ove | ||||
|         pass | ||||
|  | ||||
|     # first open the old file, so that it won't go away | ||||
|     old_file = open(old_file_name, 'rb') | ||||
|     try: | ||||
|     with open(old_file_name, 'rb') as old_file: | ||||
|         # now open the new file, not forgetting allow_overwrite | ||||
|         fd = os.open(new_file_name, os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) | | ||||
|                                     (not allow_overwrite and os.O_EXCL or 0)) | ||||
| @@ -73,8 +72,6 @@ def file_move_safe(old_file_name, new_file_name, chunk_size = 1024*64, allow_ove | ||||
|         finally: | ||||
|             locks.unlock(fd) | ||||
|             os.close(fd) | ||||
|     finally: | ||||
|         old_file.close() | ||||
|     copystat(old_file_name, new_file_name) | ||||
|  | ||||
|     try: | ||||
|   | ||||
| @@ -265,7 +265,8 @@ class EmailMessage(object): | ||||
|     def attach_file(self, path, mimetype=None): | ||||
|         """Attaches a file from the filesystem.""" | ||||
|         filename = os.path.basename(path) | ||||
|         content = open(path, 'rb').read() | ||||
|         with open(path, 'rb') as f: | ||||
|             content = f.read() | ||||
|         self.attach(filename, content, mimetype) | ||||
|  | ||||
|     def _create_message(self, msg): | ||||
|   | ||||
| @@ -5,8 +5,8 @@ from optparse import make_option | ||||
| from django.core.management.base import BaseCommand, CommandError | ||||
|  | ||||
| def has_bom(fn): | ||||
|     f = open(fn, 'r') | ||||
|     sample = f.read(4) | ||||
|     with open(fn, 'r') as f: | ||||
|         sample = f.read(4) | ||||
|     return sample[:3] == '\xef\xbb\xbf' or \ | ||||
|             sample.startswith(codecs.BOM_UTF16_LE) or \ | ||||
|             sample.startswith(codecs.BOM_UTF16_BE) | ||||
|   | ||||
| @@ -112,7 +112,8 @@ def copy_plural_forms(msgs, locale, domain, verbosity, stdout=sys.stdout): | ||||
|     for domain in domains: | ||||
|         django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain) | ||||
|         if os.path.exists(django_po): | ||||
|             m = plural_forms_re.search(open(django_po, 'rU').read()) | ||||
|             with open(django_po, 'rU') as fp: | ||||
|                 m = plural_forms_re.search(fp.read()) | ||||
|             if m: | ||||
|                 if verbosity > 1: | ||||
|                     stdout.write("copying plural forms: %s\n" % m.group('value')) | ||||
| @@ -141,11 +142,8 @@ def write_pot_file(potfile, msgs, file, work_file, is_templatized): | ||||
|         msgs = '\n'.join(dropwhile(len, msgs.split('\n'))) | ||||
|     else: | ||||
|         msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8') | ||||
|     f = open(potfile, 'ab') | ||||
|     try: | ||||
|         f.write(msgs) | ||||
|     finally: | ||||
|         f.close() | ||||
|     with open(potfile, 'ab') as fp: | ||||
|         fp.write(msgs) | ||||
|  | ||||
| def process_file(file, dirpath, potfile, domain, verbosity, | ||||
|                  extensions, wrap, location, stdout=sys.stdout): | ||||
| @@ -164,15 +162,13 @@ def process_file(file, dirpath, potfile, domain, verbosity, | ||||
|     if domain == 'djangojs' and file_ext in extensions: | ||||
|         is_templatized = True | ||||
|         orig_file = os.path.join(dirpath, file) | ||||
|         src_data = open(orig_file).read() | ||||
|         with open(orig_file) as fp: | ||||
|             src_data = fp.read() | ||||
|         src_data = prepare_js_for_gettext(src_data) | ||||
|         thefile = '%s.c' % file | ||||
|         work_file = os.path.join(dirpath, thefile) | ||||
|         f = open(work_file, "w") | ||||
|         try: | ||||
|             f.write(src_data) | ||||
|         finally: | ||||
|             f.close() | ||||
|         with open(work_file, "w") as fp: | ||||
|             fp.write(src_data) | ||||
|         cmd = ( | ||||
|             'xgettext -d %s -L C %s %s --keyword=gettext_noop ' | ||||
|             '--keyword=gettext_lazy --keyword=ngettext_lazy:1,2 ' | ||||
| @@ -184,14 +180,12 @@ def process_file(file, dirpath, potfile, domain, verbosity, | ||||
|         orig_file = os.path.join(dirpath, file) | ||||
|         is_templatized = file_ext in extensions | ||||
|         if is_templatized: | ||||
|             src_data = open(orig_file, "rU").read() | ||||
|             with open(orig_file, "rU") as fp: | ||||
|                 src_data = fp.read() | ||||
|             thefile = '%s.py' % file | ||||
|             content = templatize(src_data, orig_file[2:]) | ||||
|             f = open(os.path.join(dirpath, thefile), "w") | ||||
|             try: | ||||
|                 f.write(content) | ||||
|             finally: | ||||
|                 f.close() | ||||
|             with open(os.path.join(dirpath, thefile), "w") as fp: | ||||
|                 fp.write(content) | ||||
|         work_file = os.path.join(dirpath, thefile) | ||||
|         cmd = ( | ||||
|             'xgettext -d %s -L Python %s %s --keyword=gettext_noop ' | ||||
| @@ -232,11 +226,8 @@ def write_po_file(pofile, potfile, domain, locale, verbosity, stdout, | ||||
|         os.unlink(potfile) | ||||
|         raise CommandError("errors happened while running msguniq\n%s" % errors) | ||||
|     if os.path.exists(pofile): | ||||
|         f = open(potfile, 'w') | ||||
|         try: | ||||
|             f.write(msgs) | ||||
|         finally: | ||||
|             f.close() | ||||
|         with open(potfile, 'w') as fp: | ||||
|             fp.write(msgs) | ||||
|         msgs, errors = _popen('msgmerge %s %s -q "%s" "%s"' % | ||||
|                                 (wrap, location, pofile, potfile)) | ||||
|         if errors: | ||||
| @@ -247,11 +238,8 @@ def write_po_file(pofile, potfile, domain, locale, verbosity, stdout, | ||||
|         msgs = copy_plural_forms(msgs, locale, domain, verbosity, stdout) | ||||
|     msgs = msgs.replace( | ||||
|         "#. #-#-#-#-#  %s.pot (PACKAGE VERSION)  #-#-#-#-#\n" % domain, "") | ||||
|     f = open(pofile, 'wb') | ||||
|     try: | ||||
|         f.write(msgs) | ||||
|     finally: | ||||
|         f.close() | ||||
|     with open(pofile, 'wb') as fp: | ||||
|         fp.write(msgs) | ||||
|     os.unlink(potfile) | ||||
|     if no_obsolete: | ||||
|         msgs, errors = _popen('msgattrib %s %s -o "%s" --no-obsolete "%s"' % | ||||
|   | ||||
| @@ -157,13 +157,12 @@ def custom_sql_for_model(model, style, connection): | ||||
|                  os.path.join(app_dir, "%s.sql" % opts.object_name.lower())] | ||||
|     for sql_file in sql_files: | ||||
|         if os.path.exists(sql_file): | ||||
|             fp = open(sql_file, 'U') | ||||
|             for statement in statements.split(fp.read().decode(settings.FILE_CHARSET)): | ||||
|                 # Remove any comments from the file | ||||
|                 statement = re.sub(ur"--.*([\n\Z]|$)", "", statement) | ||||
|                 if statement.strip(): | ||||
|                     output.append(statement + u";") | ||||
|             fp.close() | ||||
|             with open(sql_file, 'U') as fp: | ||||
|                 for statement in statements.split(fp.read().decode(settings.FILE_CHARSET)): | ||||
|                     # Remove any comments from the file | ||||
|                     statement = re.sub(ur"--.*([\n\Z]|$)", "", statement) | ||||
|                     if statement.strip(): | ||||
|                         output.append(statement + u";") | ||||
|  | ||||
|     return output | ||||
|  | ||||
|   | ||||
| @@ -176,9 +176,8 @@ def runfastcgi(argset=[], **kwargs): | ||||
|         become_daemon(our_home_dir=options["workdir"], **daemon_kwargs) | ||||
|  | ||||
|     if options["pidfile"]: | ||||
|         fp = open(options["pidfile"], "w") | ||||
|         fp.write("%d\n" % os.getpid()) | ||||
|         fp.close() | ||||
|         with open(options["pidfile"], "w") as fp: | ||||
|             fp.write("%d\n" % os.getpid()) | ||||
|  | ||||
|     WSGIServer(get_internal_wsgi_application(), **wsgi_opts).run() | ||||
|  | ||||
|   | ||||
| @@ -328,9 +328,8 @@ class SsiNode(Node): | ||||
|             else: | ||||
|                 return '' # Fail silently for invalid includes. | ||||
|         try: | ||||
|             fp = open(filepath, 'r') | ||||
|             output = fp.read() | ||||
|             fp.close() | ||||
|             with open(filepath, 'r') as fp: | ||||
|                 output = fp.read() | ||||
|         except IOError: | ||||
|             output = '' | ||||
|         if self.parsed: | ||||
|   | ||||
| @@ -52,11 +52,8 @@ class Loader(BaseLoader): | ||||
|     def load_template_source(self, template_name, template_dirs=None): | ||||
|         for filepath in self.get_template_sources(template_name, template_dirs): | ||||
|             try: | ||||
|                 file = open(filepath) | ||||
|                 try: | ||||
|                     return (file.read().decode(settings.FILE_CHARSET), filepath) | ||||
|                 finally: | ||||
|                     file.close() | ||||
|                 with open(filepath) as fp: | ||||
|                     return (fp.read().decode(settings.FILE_CHARSET), filepath) | ||||
|             except IOError: | ||||
|                 pass | ||||
|         raise TemplateDoesNotExist(template_name) | ||||
|   | ||||
| @@ -34,11 +34,8 @@ class Loader(BaseLoader): | ||||
|         tried = [] | ||||
|         for filepath in self.get_template_sources(template_name, template_dirs): | ||||
|             try: | ||||
|                 file = open(filepath) | ||||
|                 try: | ||||
|                     return (file.read().decode(settings.FILE_CHARSET), filepath) | ||||
|                 finally: | ||||
|                     file.close() | ||||
|                 with open(filepath) as fp: | ||||
|                     return (fp.read().decode(settings.FILE_CHARSET), filepath) | ||||
|             except IOError: | ||||
|                 tried.append(filepath) | ||||
|         if tried: | ||||
|   | ||||
| @@ -226,7 +226,8 @@ def _load_testfile(filename, package, module_relative): | ||||
|                 # get_data() opens files as 'rb', so one must do the equivalent | ||||
|                 # conversion as universal newlines would do. | ||||
|                 return file_contents.replace(os.linesep, '\n'), filename | ||||
|     return open(filename).read(), filename | ||||
|     with open(filename) as fp: | ||||
|         return fp.read(), filename | ||||
|  | ||||
| def _indent(s, indent=4): | ||||
|     """ | ||||
| @@ -2519,9 +2520,8 @@ def debug_script(src, pm=False, globs=None): | ||||
|     # docs say, a file so created cannot be opened by name a second time | ||||
|     # on modern Windows boxes, and execfile() needs to open it. | ||||
|     srcfilename = tempfile.mktemp(".py", "doctestdebug") | ||||
|     f = open(srcfilename, 'w') | ||||
|     f.write(src) | ||||
|     f.close() | ||||
|     with open(srcfilename, 'w') as fp: | ||||
|         fp.write(src) | ||||
|  | ||||
|     try: | ||||
|         if globs: | ||||
|   | ||||
| @@ -15,9 +15,8 @@ Sample usage: | ||||
| ...     link=u"http://www.holovaty.com/test/", | ||||
| ...     description="Testing." | ||||
| ... ) | ||||
| >>> fp = open('test.rss', 'w') | ||||
| >>> feed.write(fp, 'utf-8') | ||||
| >>> fp.close() | ||||
| >>> with open('test.rss', 'w') as fp: | ||||
| >>>     feed.write(fp, 'utf-8') | ||||
|  | ||||
| For definitions of the different versions of RSS, see: | ||||
| http://diveintomark.org/archives/2004/02/04/incompatible-rss | ||||
|   | ||||
| @@ -333,11 +333,8 @@ class ExceptionReporter(object): | ||||
|                 source = source.splitlines() | ||||
|         if source is None: | ||||
|             try: | ||||
|                 f = open(filename) | ||||
|                 try: | ||||
|                     source = f.readlines() | ||||
|                 finally: | ||||
|                     f.close() | ||||
|                 with open(filename) as fp: | ||||
|                     source = fp.readlines() | ||||
|             except (OSError, IOError): | ||||
|                 pass | ||||
|         if source is None: | ||||
|   | ||||
| @@ -18,33 +18,18 @@ def process_file(fn, lines): | ||||
|     lines.insert(0, '\n') | ||||
|     lines.insert(0, '.. %s:\n' % target_name(fn)) | ||||
|     try: | ||||
|         f = open(fn, 'w') | ||||
|         with open(fn, 'w') as fp: | ||||
|             fp.writelines(lines) | ||||
|     except IOError: | ||||
|         print("Can't open %s for writing. Not touching it." % fn) | ||||
|         return | ||||
|     try: | ||||
|         f.writelines(lines) | ||||
|     except IOError: | ||||
|         print("Can't write to %s. Not touching it." % fn) | ||||
|     finally: | ||||
|         f.close() | ||||
|  | ||||
| def has_target(fn): | ||||
|     try: | ||||
|         f = open(fn, 'r') | ||||
|         with open(fn, 'r') as fp: | ||||
|             lines = fp.readlines() | ||||
|     except IOError: | ||||
|         print("Can't open %s. Not touching it." % fn) | ||||
|         print("Can't open or read %s. Not touching it." % fn) | ||||
|         return (True, None) | ||||
|     readok = True | ||||
|     try: | ||||
|         lines = f.readlines() | ||||
|     except IOError: | ||||
|         print("Can't read %s. Not touching it." % fn) | ||||
|         readok = False | ||||
|     finally: | ||||
|         f.close() | ||||
|         if not readok: | ||||
|             return (True, None) | ||||
|  | ||||
|     #print fn, len(lines) | ||||
|     if len(lines) < 1: | ||||
| @@ -82,7 +67,7 @@ def main(argv=None): | ||||
|                 print("Adding xref to %s" % fn) | ||||
|                 process_file(fn, lines) | ||||
|         else: | ||||
|             print "Skipping %s: already has a xref" % fn | ||||
|             print("Skipping %s: already has a xref" % fn) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     sys.exit(main()) | ||||
|   | ||||
| @@ -210,8 +210,7 @@ class DjangoStandaloneHTMLBuilder(StandaloneHTMLBuilder): | ||||
|                         if t == "templatefilter" and l == "ref/templates/builtins"], | ||||
|         } | ||||
|         outfilename = os.path.join(self.outdir, "templatebuiltins.js") | ||||
|         f = open(outfilename, 'wb') | ||||
|         f.write('var django_template_builtins = ') | ||||
|         json.dump(templatebuiltins, f) | ||||
|         f.write(';\n') | ||||
|         f.close(); | ||||
|         with open(outfilename, 'wb') as fp: | ||||
|             fp.write('var django_template_builtins = ') | ||||
|             json.dump(templatebuiltins, fp) | ||||
|             fp.write(';\n') | ||||
|   | ||||
| @@ -38,7 +38,8 @@ ALWAYS_SKIP = [ | ||||
| ] | ||||
|  | ||||
| def fixliterals(fname): | ||||
|     data = open(fname).read() | ||||
|     with open(fname) as fp: | ||||
|         data = fp.read() | ||||
|      | ||||
|     last = 0 | ||||
|     new = [] | ||||
| @@ -101,7 +102,8 @@ def fixliterals(fname): | ||||
|         lastvalues[m.group(1)] = replace_value | ||||
|      | ||||
|     new.append(data[last:]) | ||||
|     open(fname, "w").write("".join(new)) | ||||
|     with open(fname, "w") as fp: | ||||
|         fp.write("".join(new)) | ||||
|      | ||||
|     storage["lastvalues"] = lastvalues | ||||
|     storage.close() | ||||
|   | ||||
| @@ -239,9 +239,8 @@ Sample usage:: | ||||
|     ...     link=u"http://www.holovaty.com/test/", | ||||
|     ...     description="Testing." | ||||
|     ... ) | ||||
|     >>> fp = open('test.rss', 'w') | ||||
|     >>> feed.write(fp, 'utf-8') | ||||
|     >>> fp.close() | ||||
|     >>> with open('test.rss', 'w') as fp: | ||||
|     >>>     feed.write(fp, 'utf-8') | ||||
|  | ||||
| For simplifying the selection of a generator use ``feedgenerator.DefaultFeed`` | ||||
| which is currently ``Rss201rev2Feed`` | ||||
|   | ||||
| @@ -101,10 +101,9 @@ objects; see `UploadedFile objects`_ for a complete reference. | ||||
| Putting it all together, here's a common way you might handle an uploaded file:: | ||||
|  | ||||
|     def handle_uploaded_file(f): | ||||
|         destination = open('some/file/name.txt', 'wb+') | ||||
|         for chunk in f.chunks(): | ||||
|             destination.write(chunk) | ||||
|         destination.close() | ||||
|         with open('some/file/name.txt', 'wb+') as destination: | ||||
|             for chunk in f.chunks(): | ||||
|                 destination.write(chunk) | ||||
|  | ||||
| Looping over ``UploadedFile.chunks()`` instead of using ``read()`` ensures that | ||||
| large files don't overwhelm your system's memory. | ||||
|   | ||||
| @@ -36,8 +36,8 @@ You can also use a serializer object directly:: | ||||
| This is useful if you want to serialize data directly to a file-like object | ||||
| (which includes an :class:`~django.http.HttpResponse`):: | ||||
|  | ||||
|     out = open("file.xml", "w") | ||||
|     xml_serializer.serialize(SomeModel.objects.all(), stream=out) | ||||
|     with open("file.xml", "w") as out: | ||||
|         xml_serializer.serialize(SomeModel.objects.all(), stream=out) | ||||
|  | ||||
| .. note:: | ||||
|  | ||||
|   | ||||
| @@ -770,9 +770,8 @@ arguments at time of construction: | ||||
|         wish to upload as a value. For example:: | ||||
|  | ||||
|             >>> c = Client() | ||||
|             >>> f = open('wishlist.doc') | ||||
|             >>> c.post('/customers/wishes/', {'name': 'fred', 'attachment': f}) | ||||
|             >>> f.close() | ||||
|             >>> with open('wishlist.doc') as fp: | ||||
|             >>>     c.post('/customers/wishes/', {'name': 'fred', 'attachment': fp}) | ||||
|  | ||||
|         (The name ``attachment`` here is not relevant; use whatever name your | ||||
|         file-processing code expects.) | ||||
|   | ||||
| @@ -170,14 +170,13 @@ class Template(object): | ||||
|         try: | ||||
|             return self._content | ||||
|         except AttributeError: | ||||
|             fd = open(self.absolute_filename) | ||||
|             try: | ||||
|                 content = fd.read().decode(TEMPLATE_ENCODING) | ||||
|             except UnicodeDecodeError as e: | ||||
|                 message = '%s in %s' % ( | ||||
|                     e[4], self.absolute_filename.encode('UTF-8', 'ignore')) | ||||
|                 raise UnicodeDecodeError(*(e.args[:4] + (message,))) | ||||
|             fd.close() | ||||
|             with open(self.absolute_filename) as fd: | ||||
|                 try: | ||||
|                     content = fd.read().decode(TEMPLATE_ENCODING) | ||||
|                 except UnicodeDecodeError as e: | ||||
|                     message = '%s in %s' % ( | ||||
|                         e[4], self.absolute_filename.encode('UTF-8', 'ignore')) | ||||
|                     raise UnicodeDecodeError(*(e.args[:4] + (message,))) | ||||
|             self._content = content | ||||
|             return content | ||||
|     content = property(content) | ||||
| @@ -271,9 +270,8 @@ def get_python_code(paths): | ||||
|             for f in filenames: | ||||
|                 if len([True for e in PYTHON_SOURCE_EXTENSIONS if f.endswith(e)]) > 0: | ||||
|                     fn = os.path.join(dirpath, f) | ||||
|                     fd = open(fn) | ||||
|                     content = [l.decode(PYTHON_ENCODING) for l in fd.readlines()] | ||||
|                     fd.close() | ||||
|                     with open(fn) as fd: | ||||
|                         content = [l.decode(PYTHON_ENCODING) for l in fd.readlines()] | ||||
|                     retval.append((fn, content)) | ||||
|     return retval | ||||
|  | ||||
|   | ||||
| @@ -1242,8 +1242,10 @@ class OldFormForXTests(TestCase): | ||||
|         # it comes to validation. This specifically tests that #6302 is fixed for | ||||
|         # both file fields and image fields. | ||||
|  | ||||
|         image_data = open(os.path.join(os.path.dirname(__file__), "test.png"), 'rb').read() | ||||
|         image_data2 = open(os.path.join(os.path.dirname(__file__), "test2.png"), 'rb').read() | ||||
|         with open(os.path.join(os.path.dirname(__file__), "test.png"), 'rb') as fp: | ||||
|             image_data = fp.read() | ||||
|         with open(os.path.join(os.path.dirname(__file__), "test2.png"), 'rb') as fp: | ||||
|             image_data2 = fp.read() | ||||
|  | ||||
|         f = ImageFileForm( | ||||
|                 data={'description': u'An image'}, | ||||
|   | ||||
| @@ -27,32 +27,32 @@ class AdminScriptTestCase(unittest.TestCase): | ||||
|         if is_dir: | ||||
|             settings_dir = os.path.join(test_dir, filename) | ||||
|             os.mkdir(settings_dir) | ||||
|             settings_file = open(os.path.join(settings_dir, '__init__.py'), 'w') | ||||
|             settings_file_path = os.path.join(settings_dir, '__init__.py') | ||||
|         else: | ||||
|             settings_file = open(os.path.join(test_dir, filename), 'w') | ||||
|         settings_file.write('# Settings file automatically generated by regressiontests.admin_scripts test case\n') | ||||
|         exports = [ | ||||
|             'DATABASES', | ||||
|             'ROOT_URLCONF', | ||||
|             'SECRET_KEY', | ||||
|         ] | ||||
|         for s in exports: | ||||
|             if hasattr(settings, s): | ||||
|                 o = getattr(settings, s) | ||||
|                 if not isinstance(o, dict): | ||||
|                     o = "'%s'" % o | ||||
|                 settings_file.write("%s = %s\n" % (s, o)) | ||||
|             settings_file_path = os.path.join(test_dir, filename) | ||||
|  | ||||
|         if apps is None: | ||||
|             apps = ['django.contrib.auth', 'django.contrib.contenttypes', 'regressiontests.admin_scripts'] | ||||
|         with open(settings_file_path, 'w') as settings_file: | ||||
|             settings_file.write('# Settings file automatically generated by regressiontests.admin_scripts test case\n') | ||||
|             exports = [ | ||||
|                 'DATABASES', | ||||
|                 'ROOT_URLCONF', | ||||
|                 'SECRET_KEY', | ||||
|             ] | ||||
|             for s in exports: | ||||
|                 if hasattr(settings, s): | ||||
|                     o = getattr(settings, s) | ||||
|                     if not isinstance(o, dict): | ||||
|                         o = "'%s'" % o | ||||
|                     settings_file.write("%s = %s\n" % (s, o)) | ||||
|  | ||||
|         settings_file.write("INSTALLED_APPS = %s\n" % apps) | ||||
|             if apps is None: | ||||
|                 apps = ['django.contrib.auth', 'django.contrib.contenttypes', 'regressiontests.admin_scripts'] | ||||
|  | ||||
|         if sdict: | ||||
|             for k, v in sdict.items(): | ||||
|                 settings_file.write("%s = %s\n" % (k, v)) | ||||
|             settings_file.write("INSTALLED_APPS = %s\n" % apps) | ||||
|  | ||||
|         settings_file.close() | ||||
|             if sdict: | ||||
|                 for k, v in sdict.items(): | ||||
|                     settings_file.write("%s = %s\n" % (k, v)) | ||||
|  | ||||
|     def remove_settings(self, filename, is_dir=False): | ||||
|         full_name = os.path.join(test_dir, filename) | ||||
| @@ -989,13 +989,12 @@ class ManageSettingsWithImportError(AdminScriptTestCase): | ||||
|         if is_dir: | ||||
|             settings_dir = os.path.join(test_dir, filename) | ||||
|             os.mkdir(settings_dir) | ||||
|             settings_file = open(os.path.join(settings_dir, '__init__.py'), 'w') | ||||
|             settings_file_path = os.path.join(settings_dir, '__init__.py') | ||||
|         else: | ||||
|             settings_file = open(os.path.join(test_dir, filename), 'w') | ||||
|         settings_file.write('# Settings file automatically generated by regressiontests.admin_scripts test case\n') | ||||
|         settings_file.write('# The next line will cause an import error:\nimport foo42bar\n') | ||||
|  | ||||
|         settings_file.close() | ||||
|             settings_file_path = os.path.join(test_dir, filename) | ||||
|         with open(settings_file_path, 'w') as settings_file: | ||||
|             settings_file.write('# Settings file automatically generated by regressiontests.admin_scripts test case\n') | ||||
|             settings_file.write('# The next line will cause an import error:\nimport foo42bar\n') | ||||
|  | ||||
|     def test_builtin_command(self): | ||||
|         "import error: manage.py builtin commands shows useful diagnostic info when settings with import errors is provided" | ||||
|   | ||||
| @@ -24,7 +24,8 @@ class Bug639Test(unittest.TestCase): | ||||
|         """ | ||||
|         # Grab an image for testing. | ||||
|         filename = os.path.join(os.path.dirname(__file__), "test.jpg") | ||||
|         img = open(filename, "rb").read() | ||||
|         with open(filename, "rb") as fp: | ||||
|             img = fp.read() | ||||
|  | ||||
|         # Fake a POST QueryDict and FILES MultiValueDict. | ||||
|         data = {'title': 'Testing'} | ||||
|   | ||||
| @@ -24,11 +24,12 @@ UNICODE_FILENAME = u'test-0123456789_中文_Orléans.jpg' | ||||
|  | ||||
| class FileUploadTests(TestCase): | ||||
|     def test_simple_upload(self): | ||||
|         post_data = { | ||||
|             'name': 'Ringo', | ||||
|             'file_field': open(__file__), | ||||
|         } | ||||
|         response = self.client.post('/file_uploads/upload/', post_data) | ||||
|         with open(__file__) as fp: | ||||
|             post_data = { | ||||
|                 'name': 'Ringo', | ||||
|                 'file_field': fp, | ||||
|             } | ||||
|             response = self.client.post('/file_uploads/upload/', post_data) | ||||
|         self.assertEqual(response.status_code, 200) | ||||
|  | ||||
|     def test_large_upload(self): | ||||
| @@ -87,17 +88,16 @@ class FileUploadTests(TestCase): | ||||
|         tdir = tempfile.gettempdir() | ||||
|  | ||||
|         # This file contains chinese symbols and an accented char in the name. | ||||
|         file1 = open(os.path.join(tdir, UNICODE_FILENAME.encode('utf-8')), 'w+b') | ||||
|         file1.write('b' * (2 ** 10)) | ||||
|         file1.seek(0) | ||||
|         with open(os.path.join(tdir, UNICODE_FILENAME.encode('utf-8')), 'w+b') as file1: | ||||
|             file1.write('b' * (2 ** 10)) | ||||
|             file1.seek(0) | ||||
|  | ||||
|         post_data = { | ||||
|             'file_unicode': file1, | ||||
|             } | ||||
|             post_data = { | ||||
|                 'file_unicode': file1, | ||||
|                 } | ||||
|  | ||||
|         response = self.client.post('/file_uploads/unicode_name/', post_data) | ||||
|             response = self.client.post('/file_uploads/unicode_name/', post_data) | ||||
|  | ||||
|         file1.close() | ||||
|         try: | ||||
|             os.unlink(file1.name) | ||||
|         except: | ||||
| @@ -294,10 +294,6 @@ class FileUploadTests(TestCase): | ||||
|                 p = request.POST | ||||
|                 return ret | ||||
|  | ||||
|         post_data = { | ||||
|             'name': 'Ringo', | ||||
|             'file_field': open(__file__), | ||||
|         } | ||||
|         # Maybe this is a little more complicated that it needs to be; but if | ||||
|         # the django.test.client.FakePayload.read() implementation changes then | ||||
|         # this test would fail.  So we need to know exactly what kind of error | ||||
| @@ -310,16 +306,21 @@ class FileUploadTests(TestCase): | ||||
|         # install the custom handler that tries to access request.POST | ||||
|         self.client.handler = POSTAccessingHandler() | ||||
|  | ||||
|         try: | ||||
|             response = self.client.post('/file_uploads/upload_errors/', post_data) | ||||
|         except reference_error.__class__ as err: | ||||
|             self.assertFalse( | ||||
|                 str(err) == str(reference_error), | ||||
|                 "Caught a repeated exception that'll cause an infinite loop in file uploads." | ||||
|             ) | ||||
|         except Exception as err: | ||||
|             # CustomUploadError is the error that should have been raised | ||||
|             self.assertEqual(err.__class__, uploadhandler.CustomUploadError) | ||||
|         with open(__file__) as fp: | ||||
|             post_data = { | ||||
|                 'name': 'Ringo', | ||||
|                 'file_field': fp, | ||||
|             } | ||||
|             try: | ||||
|                 response = self.client.post('/file_uploads/upload_errors/', post_data) | ||||
|             except reference_error.__class__ as err: | ||||
|                 self.assertFalse( | ||||
|                     str(err) == str(reference_error), | ||||
|                     "Caught a repeated exception that'll cause an infinite loop in file uploads." | ||||
|                 ) | ||||
|             except Exception as err: | ||||
|                 # CustomUploadError is the error that should have been raised | ||||
|                 self.assertEqual(err.__class__, uploadhandler.CustomUploadError) | ||||
|  | ||||
|     def test_filename_case_preservation(self): | ||||
|         """ | ||||
| @@ -382,8 +383,7 @@ class DirectoryCreationTests(unittest.TestCase): | ||||
|     def test_not_a_directory(self): | ||||
|         """The correct IOError is raised when the upload directory name exists but isn't a directory""" | ||||
|         # Create a file with the upload directory name | ||||
|         fd = open(UPLOAD_TO, 'w') | ||||
|         fd.close() | ||||
|         open(UPLOAD_TO, 'w').close() | ||||
|         try: | ||||
|             self.obj.testfile.save('foo.txt', SimpleUploadedFile('foo.txt', 'x')) | ||||
|         except IOError as err: | ||||
|   | ||||
| @@ -517,7 +517,8 @@ class FileBackendTests(BaseEmailBackendTests, TestCase): | ||||
|     def get_mailbox_content(self): | ||||
|         messages = [] | ||||
|         for filename in os.listdir(self.tmp_dir): | ||||
|             session = open(os.path.join(self.tmp_dir, filename)).read().split('\n' + ('-' * 79) + '\n') | ||||
|             with open(os.path.join(self.tmp_dir, filename)) as fp: | ||||
|                 session = fp.read().split('\n' + ('-' * 79) + '\n') | ||||
|             messages.extend(email.message_from_string(m) for m in session if m) | ||||
|         return messages | ||||
|  | ||||
| @@ -528,7 +529,8 @@ class FileBackendTests(BaseEmailBackendTests, TestCase): | ||||
|         connection.send_messages([msg]) | ||||
|  | ||||
|         self.assertEqual(len(os.listdir(self.tmp_dir)), 1) | ||||
|         message = email.message_from_file(open(os.path.join(self.tmp_dir, os.listdir(self.tmp_dir)[0]))) | ||||
|         with open(os.path.join(self.tmp_dir, os.listdir(self.tmp_dir)[0])) as fp: | ||||
|             message = email.message_from_file(fp) | ||||
|         self.assertEqual(message.get_content_type(), 'text/plain') | ||||
|         self.assertEqual(message.get('subject'), 'Subject') | ||||
|         self.assertEqual(message.get('from'), 'from@example.com') | ||||
|   | ||||
| @@ -29,7 +29,8 @@ class StaticTests(TestCase): | ||||
|         for filename in media_files: | ||||
|             response = self.client.get('/views/%s/%s' % (self.prefix, filename)) | ||||
|             file_path = path.join(media_dir, filename) | ||||
|             self.assertEqual(open(file_path).read(), response.content) | ||||
|             with open(file_path) as fp: | ||||
|                 self.assertEqual(fp.read(), response.content) | ||||
|             self.assertEqual(len(response.content), int(response['Content-Length'])) | ||||
|             self.assertEqual(mimetypes.guess_type(file_path)[1], response.get('Content-Encoding', None)) | ||||
|  | ||||
| @@ -40,15 +41,15 @@ class StaticTests(TestCase): | ||||
|     def test_copes_with_empty_path_component(self): | ||||
|         file_name = 'file.txt' | ||||
|         response = self.client.get('/views/%s//%s' % (self.prefix, file_name)) | ||||
|         file = open(path.join(media_dir, file_name)) | ||||
|         self.assertEqual(file.read(), response.content) | ||||
|         with open(path.join(media_dir, file_name)) as fp: | ||||
|             self.assertEqual(fp.read(), response.content) | ||||
|  | ||||
|     def test_is_modified_since(self): | ||||
|         file_name = 'file.txt' | ||||
|         response = self.client.get('/views/%s/%s' % (self.prefix, file_name), | ||||
|             HTTP_IF_MODIFIED_SINCE='Thu, 1 Jan 1970 00:00:00 GMT') | ||||
|         file = open(path.join(media_dir, file_name)) | ||||
|         self.assertEqual(file.read(), response.content) | ||||
|         with open(path.join(media_dir, file_name)) as fp: | ||||
|             self.assertEqual(fp.read(), response.content) | ||||
|  | ||||
|     def test_not_modified_since(self): | ||||
|         file_name = 'file.txt' | ||||
| @@ -70,8 +71,8 @@ class StaticTests(TestCase): | ||||
|         invalid_date = 'Mon, 28 May 999999999999 28:25:26 GMT' | ||||
|         response = self.client.get('/views/%s/%s' % (self.prefix, file_name), | ||||
|                                    HTTP_IF_MODIFIED_SINCE=invalid_date) | ||||
|         file = open(path.join(media_dir, file_name)) | ||||
|         self.assertEqual(file.read(), response.content) | ||||
|         with open(path.join(media_dir, file_name)) as fp: | ||||
|             self.assertEqual(fp.read(), response.content) | ||||
|         self.assertEqual(len(response.content), | ||||
|                           int(response['Content-Length'])) | ||||
|  | ||||
| @@ -85,8 +86,8 @@ class StaticTests(TestCase): | ||||
|         invalid_date = ': 1291108438, Wed, 20 Oct 2010 14:05:00 GMT' | ||||
|         response = self.client.get('/views/%s/%s' % (self.prefix, file_name), | ||||
|                                    HTTP_IF_MODIFIED_SINCE=invalid_date) | ||||
|         file = open(path.join(media_dir, file_name)) | ||||
|         self.assertEqual(file.read(), response.content) | ||||
|         with open(path.join(media_dir, file_name)) as fp: | ||||
|             self.assertEqual(fp.read(), response.content) | ||||
|         self.assertEqual(len(response.content), | ||||
|                           int(response['Content-Length'])) | ||||
|  | ||||
|   | ||||
| @@ -61,6 +61,7 @@ def get_test_modules(): | ||||
|         for f in os.listdir(dirpath): | ||||
|             if (f.startswith('__init__') or | ||||
|                 f.startswith('.') or | ||||
|                 f == '__pycache__' or | ||||
|                 f.startswith('sql') or | ||||
|                 os.path.basename(f) in REGRESSION_SUBDIRS_TO_SKIP): | ||||
|                 continue | ||||
|   | ||||
		Reference in New Issue
	
	Block a user