Ticket #2070: 2070_latest7354.diff
File 2070_latest7354.diff, 68.8 KB (added by , 17 years ago) |
---|
-
django/test/client.py
18 18 BOUNDARY = 'BoUnDaRyStRiNg' 19 19 MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY 20 20 21 class FakePayload(object): 22 """ 23 A wrapper around StringIO that restricts what can be read, 24 since data from the network can't be seeked and cannot 25 be read outside of its content length (or else we hang). 26 """ 27 def __init__(self, content): 28 self.__content = StringIO(content) 29 self.__len = len(content) 30 31 def read(self, num_bytes=None): 32 if num_bytes is None: 33 num_bytes = self.__len or 1 34 assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data." 35 content = self.__content.read(num_bytes) 36 self.__len -= num_bytes 37 return content 38 39 21 40 class ClientHandler(BaseHandler): 22 41 """ 23 42 A HTTP Handler that can be used for testing purposes. … … 230 249 'CONTENT_TYPE': content_type, 231 250 'PATH_INFO': urllib.unquote(path), 232 251 'REQUEST_METHOD': 'POST', 233 'wsgi.input': StringIO(post_data),252 'wsgi.input': FakePayload(post_data), 234 253 } 235 254 r.update(extra) 236 255 -
django/http/multipartparser.py
1 """ 2 MultiPart parsing for file uploads. 3 4 This object will take the file upload headers 5 and the file upload handler and chunk the upload 6 data for the handler to deal with. 7 """ 8 from django.utils.datastructures import MultiValueDict 9 from django.utils.encoding import force_unicode 10 11 __all__ = ('MultiPartParser','MultiPartParserError','InputStreamExhausted') 12 13 class MultiPartParserError(Exception): 14 pass 15 16 class InputStreamExhausted(Exception): 17 """ No more reads are allowed from this device. """ 18 pass 19 20 class MultiPartParser(object): 21 """ 22 A rfc2388 multipart/form-data parser. 23 24 parse() reads the input stream in chunk_size chunks and returns a 25 tuple of (POST MultiValueDict, FILES MultiValueDict). If 26 file_upload_dir is defined files will be streamed to temporary 27 files in the specified directory. 28 """ 29 def __init__(self, META, input_data, upload_handlers, encoding=None): 30 """ 31 Initialize the MultiPartParser object. 32 33 *META* -- The standard META dictionary in Django request objects. 34 *input_data* -- The raw post data, as a bytestring. 35 *upload_handler* -- An object of type UploadHandler 36 that performs operations on the uploaded 37 data. 38 *encoding* -- The encoding with which to treat the incoming data. 39 """ 40 # Import cgi utilities for (near) future use. 41 global parse_header, valid_boundary, settings 42 from django.conf import settings 43 from cgi import valid_boundary, parse_header 44 45 ####### 46 # Check basic headers 47 ####### 48 49 # 50 # Content-Type should containt multipart and the boundary information. 51 #### 52 53 content_type = META.get('HTTP_CONTENT_TYPE', META.get('CONTENT_TYPE', '')) 54 if not content_type.startswith('multipart/'): 55 raise MultiPartParserError('Invalid Content-Type: %s' % 56 content_type) 57 58 # Parse the header to get the boundary to split the parts. 59 ctypes, opts = parse_header(content_type) 60 boundary = opts.get('boundary') 61 if not boundary or not valid_boundary(boundary): 62 raise MultiPartParserError('Invalid boundary in multipart: %s' % 63 boundary) 64 65 66 # 67 # Content-Length should contain the length of the body we are about 68 # to receive. 69 #### 70 try: 71 content_length = int(META.get('HTTP_CONTENT_LENGTH', 72 META.get('CONTENT_LENGTH',0))) 73 except (ValueError, TypeError): 74 # For now set it to 0...we'll try again later on down. 75 content_length = 0 76 77 # If we have better knowledge of how much 78 # data is remaining in the request stream, 79 # we should use that. (modpython for instance) 80 #try: 81 # remaining = input_data.remaining 82 # if remaining is not None and \ 83 # (content_length is None or remaining < content_length): 84 # content_length = remaining 85 #except AttributeError: 86 # pass 87 88 if not content_length: 89 # This means we shouldn't continue...raise an error. 90 raise MultiPartParserError("Invalid content length: %r" % content_length) 91 92 self._boundary = boundary 93 self._input_data = input_data 94 95 # For compatibility with low-level network APIs (with 32-bit integers), 96 # the chunk size should be < 2^31: 97 self._chunk_size = min(2147483647, *[x.chunk_size for x in upload_handlers 98 if x.chunk_size]) 99 100 self._meta = META 101 self._encoding = encoding or settings.DEFAULT_CHARSET 102 self._content_length = content_length 103 self._upload_handlers = upload_handlers 104 105 def parse(self): 106 """ 107 Parse the POST data and break it into a FILES MultiValueDict 108 and a POST MultiValueDict. 109 110 *returns* -- A tuple containing the POST and FILES dictionary, 111 respectively. 112 """ 113 from django.core.files.fileuploadhandler import StopUpload, SkipFile 114 from django.http import QueryDict 115 116 encoding = self._encoding 117 handlers = self._upload_handlers 118 119 limited_input_data = LimitBytes(self._input_data, self._content_length) 120 121 # See if the handler will want to take care of the parsing. 122 # This allows overriding everything if somebody wants it. 123 for handler in handlers: 124 result = handler.handle_raw_input(limited_input_data, 125 self._meta, 126 self._content_length, 127 self._boundary, 128 encoding) 129 if result is not None: 130 return result[0], result[1] 131 132 # Create the data structures to be used later. 133 self._post = QueryDict('', mutable=True) 134 self._files = MultiValueDict() 135 136 # Instantiate the parser and stream: 137 stream = LazyStream(ChunkIter(limited_input_data, self._chunk_size)) 138 for item_type, meta_data, stream in Parser(stream, self._boundary): 139 try: 140 disposition = meta_data['content-disposition'][1] 141 field_name = disposition['name'].strip() 142 except (KeyError, IndexError, AttributeError): 143 continue 144 145 transfer_encoding = meta_data.get('content-transfer-encoding') 146 147 field_name = force_unicode(field_name, encoding, errors='replace') 148 149 if item_type == 'FIELD': 150 # This is a post field, we can just set it in the post 151 if transfer_encoding == 'base64': 152 raw_data = stream.read() 153 try: 154 data = str(raw_data).decode('base64') 155 except: 156 data = raw_data 157 else: 158 data = stream.read() 159 160 self._post.appendlist(field_name, 161 force_unicode(data, encoding, errors='replace')) 162 elif item_type == 'FILE': 163 # This is a file, use the handler... 164 file_successful = True 165 file_name = self.IE_sanitize(disposition.get('filename')) 166 if not file_name: 167 continue 168 169 file_name = force_unicode(file_name, encoding, errors='replace') 170 171 content_type = meta_data.get('content-type', ('',))[0].strip() 172 try: 173 charset = meta_data.get('content-type', (0,{}))[1].get('charset', None) 174 except: 175 charset = None 176 177 try: 178 content_length = int(meta_data.get('content-length')[0]) 179 except (IndexError, TypeError, ValueError): 180 content_length = None 181 182 counter = 0 183 try: 184 for handler in handlers: 185 retval = handler.new_file(field_name, file_name, 186 content_type, content_length, 187 charset) 188 if retval: 189 break 190 191 for chunk in stream: 192 if transfer_encoding == 'base64': 193 # We only special-case base64 transfer encoding 194 try: 195 chunk = str(chunk).decode('base64') 196 except Exception, e: 197 # Since this is only a chunk, any error is an unfixable error. 198 raise MultiValueParseError("Could not decode base64 data: %r" % e) 199 200 chunk_length = len(chunk) 201 counter += chunk_length 202 for handler in handlers: 203 chunk = handler.receive_data_chunk(chunk, 204 counter - chunk_length, 205 counter) 206 if chunk is None: 207 break 208 209 except (StopUpload, SkipFile), e: 210 file_successful = False 211 if isinstance(e, SkipFile): 212 # Just use up the rest of this file... 213 stream.exhaust() 214 elif isinstance(e, StopUpload): 215 # Abort the parsing and break 216 parser.abort() 217 break 218 else: 219 # Only do this if the handler didn't raise an abort error 220 for handler in handlers: 221 file_obj = handler.file_complete(counter) 222 if file_obj: 223 # If it returns a file object, then set the files dict. 224 self._files.appendlist(force_unicode(field_name, 225 encoding, 226 errors='replace'), 227 file_obj) 228 break 229 else: 230 # If this is neither a FIELD or a FILE, just exhaust the stream. 231 stream.exhuast() 232 233 # Make sure that the request data is all fed 234 limited_input_data.exhaust() 235 236 # Signal that the upload has completed. 237 for handler in handlers: 238 retval = handler.upload_complete() 239 if retval: 240 break 241 242 return self._post, self._files 243 244 def IE_sanitize(self, filename): 245 """cleanup filename from IE full paths""" 246 return filename and filename[filename.rfind("\\")+1:].strip() 247 248 249 class LazyStream(object): 250 def __init__(self, producer, length=None): 251 """ 252 Every LazyStream must have a producer when instantiated. 253 254 A producer is an iterable that returns a string each time it 255 is called. 256 """ 257 self._producer = producer 258 self._empty = False 259 self._leftover = '' 260 self.length = length 261 self.position = 0 262 self._remaining = length 263 264 def tell(self): 265 return self.position 266 267 def read(self, size=None): 268 def parts(): 269 remaining = (size is not None and [size] or [self._remaining])[0] 270 # do the whole thing in one shot if no limit was provided. 271 if remaining is None: 272 yield ''.join(self) 273 return 274 275 # otherwise do some bookkeeping to return exactly enough 276 # of the stream and stashing any extra content we get from 277 # the producer 278 while remaining != 0: 279 assert remaining > 0, 'remaining bytes to read should never go negative' 280 281 chunk = self.next() 282 283 emitting = chunk[:remaining] 284 self.unget(chunk[remaining:]) 285 remaining -= len(emitting) 286 yield emitting 287 288 out = ''.join(parts()) 289 self.position += len(out) 290 return out 291 292 def next(self): 293 """ 294 Used when the exact number of bytes to read is unimportant. 295 296 This procedure just returns whatever is chunk is conveniently 297 returned from the iterator instead. Useful to avoid 298 unnecessary bookkeeping if performance is an issue. 299 """ 300 if self._leftover: 301 output = self._leftover 302 self.position += len(output) 303 self._leftover = '' 304 return output 305 else: 306 output = self._producer.next() 307 self.position += len(output) 308 return output 309 310 def close(self): 311 """ 312 Used to invalidate/disable this lazy stream. 313 314 Replaces the producer with an empty list. Any leftover bytes 315 that have already been read will still be reported upon read() 316 and/or next(). 317 """ 318 self._producer = [] 319 320 def __iter__(self): 321 return self 322 323 def unget(self, bytes): 324 """ 325 Places bytes back onto the front of the lazy stream. 326 327 Future calls to read() will return those bytes first. The 328 stream position and thus tell() will be rewound. 329 """ 330 self.position -= len(bytes) 331 self._leftover = ''.join([bytes, self._leftover]) 332 333 def exhaust(self): 334 """ 335 Exhausts the entire underlying stream. 336 337 Useful for skipping and advancing sections. 338 """ 339 for thing in self: 340 pass 341 342 343 class ChunkIter(object): 344 def __init__(self, flo, chunk_size=1024**2): 345 self.flo = flo 346 self.chunk_size = chunk_size 347 348 def next(self): 349 try: 350 data = self.flo.read(self.chunk_size) 351 except InputStreamExhausted: 352 raise StopIteration 353 if data: 354 return data 355 else: 356 raise StopIteration 357 358 def __iter__(self): 359 return self 360 361 362 class LimitBytes(object): 363 """ Limit bytes for a file object. """ 364 def __init__(self, fileobject, length): 365 self._file = fileobject 366 self.remaining = length 367 368 def read(self, num_bytes=None): 369 """ 370 Read data from the underlying file. 371 If you ask for too much or there isn't anything left, 372 this will raise an InputStreamExhausted error. 373 """ 374 if self.remaining <= 0: 375 raise InputStreamExhausted() 376 if num_bytes is None: 377 num_bytes = self.remaining 378 else: 379 num_bytes = min(num_bytes, self.remaining) 380 self.remaining -= num_bytes 381 return self._file.read(num_bytes) 382 383 def exhaust(self): 384 """ 385 Exhaust this file until all of the bytes it was limited by 386 have been read. 387 """ 388 while self.remaining > 0: 389 num_bytes = min(self.remaining, 16384) 390 __ = self._file.read(num_bytes) 391 self.remaining -= num_bytes 392 393 394 class InterBoundaryIter(object): 395 """ 396 A Producer that will iterate over boundaries. 397 """ 398 def __init__(self, stream, boundary): 399 self._stream = stream 400 self._boundary = boundary 401 402 def __iter__(self): 403 return self 404 405 def next(self): 406 try: 407 return LazyStream(BoundaryIter(self._stream, self._boundary)) 408 except InputStreamExhausted: 409 raise StopIteration 410 411 class BoundaryIter(object): 412 """ 413 A Producer that is sensitive to boundaries. 414 415 Will happily yield bytes until a boundary is found. Will yield the 416 bytes before the boundary, throw away the boundary bytes 417 themselves, and push the post-boundary bytes back on the stream. 418 419 The future calls to .next() after locating the boundary will raise 420 a StopIteration exception. 421 """ 422 def __init__(self, stream, boundary): 423 self._stream = stream 424 self._boundary = boundary 425 self._done = False 426 # rollback an additional six bytes because the format is like 427 # this: CRLF<boundary>[--CRLF] 428 self._rollback = len(boundary) + 6 429 430 # Try to use mx fast string search if available. Otherwise 431 # use Python find. Wrap the latter for consistency. 432 unused_char = self._stream.read(1) 433 if not unused_char: 434 raise InputStreamExhausted 435 self._stream.unget(unused_char) 436 try: 437 from mx.TextTools import FS 438 self._fs = FS(boundary).find 439 except ImportError: 440 self._fs = lambda data: data.find(boundary) 441 442 def __iter__(self): 443 return self 444 445 def next(self): 446 if self._done: 447 raise StopIteration 448 449 stream = self._stream 450 rollback = self._rollback 451 452 bytes_read = 0 453 chunks = [] 454 for bytes in stream: 455 bytes_read += len(bytes) 456 chunks.append(bytes) 457 if bytes_read > rollback: 458 break 459 if not bytes: 460 break 461 else: 462 self._done = True 463 464 if not chunks: 465 raise StopIteration 466 467 chunk = ''.join(chunks) 468 469 boundary = self._find_boundary(chunk, len(chunk) < self._rollback) 470 471 472 if boundary: 473 end, next = boundary 474 stream.unget(chunk[next:]) 475 self._done = True 476 return chunk[:end] 477 else: 478 # make sure we dont treat a partial boundary (and 479 # its separators) as data 480 if not chunk[:-rollback]:# and len(chunk) >= (len(self._boundary) + 6): 481 # There's nothing left, we should just return and mark as done. 482 self._done = True 483 return chunk 484 else: 485 stream.unget(chunk[-rollback:]) 486 return chunk[:-rollback] 487 488 def _find_boundary(self, data, eof = False): 489 """ 490 Finds a multipart boundary in data. 491 492 Should no boundry exist in the data None is returned 493 instead. Otherwise a tuple containing 494 the indices of the following are returned: 495 496 * the end of current encapsulation 497 498 * the start of the next encapsulation 499 """ 500 index = self._fs(data) 501 if index < 0: 502 return None 503 else: 504 end = index 505 next = index + len(self._boundary) 506 data_len = len(data) - 1 507 # backup over CRLF 508 if data[max(0,end-1)] == '\n': end -= 1 509 if data[max(0,end-1)] == '\r': end -= 1 510 # skip over --CRLF 511 if data[min(data_len,next)] == '-': next += 1 512 if data[min(data_len,next)] == '-': next += 1 513 if data[min(data_len,next)] == '\r': next += 1 514 if data[min(data_len,next)] == '\n': next += 1 515 return end, next 516 517 def ParseBoundaryStream(stream, max_header_size): 518 """ 519 Parses one and exactly one stream that encapsulates a boundary. 520 """ 521 # Stream at beginning of header, look for end of header 522 # and parse it if found. The header must fit within one 523 # chunk. 524 chunk = stream.read(max_header_size) 525 # 'find' returns the top of these four bytes, so we'll 526 # need to munch them later to prevent them from polluting 527 # the payload. 528 header_end = chunk.find('\r\n\r\n') 529 530 def parse_header(line): 531 from cgi import parse_header 532 main_value_pair, params = parse_header(line) 533 try: 534 name, value = main_value_pair.split(':', 1) 535 except: 536 raise ValueError("Invalid header: %r" % line) 537 return name, (value, params) 538 539 if header_end == -1: 540 # we find no header, so we just mark this fact and pass on 541 # the stream verbatim 542 stream.unget(chunk) 543 return ('RAW', {}, stream) 544 545 header = chunk[:header_end] 546 547 # here we place any excess chunk back onto the stream, as 548 # well as throwing away the CRLFCRLF bytes from above. 549 stream.unget(chunk[header_end + 4:]) 550 551 is_file_field = False 552 outdict = {} 553 554 # eliminate blank lines 555 for line in header.split('\r\n'): 556 # This terminology ("main value" and "dictionary of 557 # parameters") is from the Python docs. 558 name, (value, params) = parse_header(line) 559 if name == 'content-disposition' and params.get('filename'): 560 is_file_field = True 561 562 outdict[name] = value, params 563 564 if is_file_field: 565 return ('FILE', outdict, stream) 566 else: 567 return ('FIELD', outdict, stream) 568 569 570 class Parser(object): 571 def __init__(self, stream, boundary): 572 self._stream = stream 573 self._separator = '--' + boundary 574 575 def __iter__(self): 576 577 boundarystream = InterBoundaryIter(self._stream, 578 self._separator) 579 580 for sub_stream in boundarystream: 581 # Iterate over each part 582 yield ParseBoundaryStream(sub_stream, 1024) 583 584 585 -
django/http/__init__.py
11 11 12 12 from django.utils.datastructures import MultiValueDict, FileDict 13 13 from django.utils.encoding import smart_str, iri_to_uri, force_unicode 14 14 from django.http.multipartparser import MultiPartParser 15 15 from utils import * 16 16 17 17 RESERVED_CHARS="!*'();:@&=+$,/?%#[]" … … 30 30 self.GET, self.POST, self.COOKIES, self.META, self.FILES = {}, {}, {}, {}, {} 31 31 self.path = '' 32 32 self.method = None 33 self._upload_handlers = [] 33 34 34 35 def __repr__(self): 35 36 return '<HttpRequest\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \ … … 102 103 103 104 encoding = property(_get_encoding, _set_encoding) 104 105 105 def parse_file_upload(header_dict, post_data): 106 """Returns a tuple of (POST QueryDict, FILES MultiValueDict).""" 107 import email, email.Message 108 from cgi import parse_header 109 raw_message = '\r\n'.join(['%s:%s' % pair for pair in header_dict.items()]) 110 raw_message += '\r\n\r\n' + post_data 111 msg = email.message_from_string(raw_message) 112 POST = QueryDict('', mutable=True) 113 FILES = MultiValueDict() 114 for submessage in msg.get_payload(): 115 if submessage and isinstance(submessage, email.Message.Message): 116 name_dict = parse_header(submessage['Content-Disposition'])[1] 117 # name_dict is something like {'name': 'file', 'filename': 'test.txt'} for file uploads 118 # or {'name': 'blah'} for POST fields 119 # We assume all uploaded files have a 'filename' set. 120 if 'filename' in name_dict: 121 assert type([]) != type(submessage.get_payload()), "Nested MIME messages are not supported" 122 if not name_dict['filename'].strip(): 123 continue 124 # IE submits the full path, so trim everything but the basename. 125 # (We can't use os.path.basename because that uses the server's 126 # directory separator, which may not be the same as the 127 # client's one.) 128 filename = name_dict['filename'][name_dict['filename'].rfind("\\")+1:] 129 FILES.appendlist(name_dict['name'], FileDict({ 130 'filename': filename, 131 'content-type': 'Content-Type' in submessage and submessage['Content-Type'] or None, 132 'content': submessage.get_payload(), 133 })) 134 else: 135 POST.appendlist(name_dict['name'], submessage.get_payload()) 136 return POST, FILES 106 def _set_upload_handlers(self, upload_handlers): 107 """ 108 Set the upload handler to the new handler given in the parameter. 109 """ 110 if hasattr(self, '_files'): 111 raise AttributeError("You cannot set the upload handler after the upload has been processed.") 112 self._upload_handlers = upload_handlers 137 113 114 def _get_upload_handlers(self): 115 return self._upload_handlers 138 116 117 upload_handlers = property(_get_upload_handlers, _set_upload_handlers) 118 119 120 def parse_file_upload(self, META, post_data): 121 """Returns a tuple of (POST QueryDict, FILES MultiValueDict).""" 122 from django.core.files.fileuploadhandler import TemporaryFileUploadHandler, MemoryFileUploadHandler 123 if not self.upload_handlers: 124 # Order here is *very* important. 125 self.upload_handlers = (MemoryFileUploadHandler(), 126 TemporaryFileUploadHandler()) 127 else: 128 self.upload_handlers = tuple(self.upload_handlers) 129 130 parser = MultiPartParser(META, post_data, self.upload_handlers, 131 self.encoding) 132 return parser.parse() 133 134 139 135 class QueryDict(MultiValueDict): 140 136 """ 141 137 A specialized MultiValueDict that takes a query string when initialized. -
django/oldforms/__init__.py
680 680 self.field_name, self.is_required = field_name, is_required 681 681 self.validator_list = [self.isNonEmptyFile] + validator_list 682 682 683 def isNonEmptyFile(self, field_data, all_data):684 try:685 content = field_data['content']686 except TypeError:687 raise validators.CriticalValidationError, ugettext("No file was submitted. Check the encoding type on the form.")688 if not content:683 def isNonEmptyFile(self, new_data, all_data): 684 if hasattr(new_data, 'upload_errors'): 685 upload_errors = new_data.upload_errors() 686 if upload_errors: 687 raise validators.CriticalValidationError, upload_errors 688 if not new_data.file_size: 689 689 raise validators.CriticalValidationError, ugettext("The submitted file is empty.") 690 690 691 691 def render(self, data): 692 692 return mark_safe(u'<input type="file" id="%s" class="v%s" name="%s" />' % \ 693 693 (self.get_id(), self.__class__.__name__, self.field_name)) 694 694 695 def prepare(self, new_data): 696 if hasattr(new_data, 'upload_errors'): 697 upload_errors = new_data.upload_errors() 698 new_data[self.field_name] = { '_file_upload_error': upload_errors } 699 695 700 def html2python(data): 696 701 if data is None: 697 702 raise EmptyValue -
django/db/models/base.py
13 13 from django.utils.datastructures import SortedDict 14 14 from django.utils.functional import curry 15 15 from django.utils.encoding import smart_str, force_unicode, smart_unicode 16 from django.core.files.filemove import file_move_safe 16 17 from django.conf import settings 17 18 from itertools import izip 18 19 import types … … 384 385 def _get_FIELD_size(self, field): 385 386 return os.path.getsize(self._get_FIELD_filename(field)) 386 387 387 def _save_FIELD_file(self, field, filename, raw_ contents, save=True):388 def _save_FIELD_file(self, field, filename, raw_field, save=True): 388 389 directory = field.get_directory_name() 389 390 try: # Create the date-based directory if it doesn't exist. 390 391 os.makedirs(os.path.join(settings.MEDIA_ROOT, directory)) 391 392 except OSError: # Directory probably already exists. 392 393 pass 394 395 if filename is None: 396 filename = raw_field.file_name 397 393 398 filename = field.get_filename(filename) 394 399 395 400 # If the filename already exists, keep adding an underscore to the name of … … 406 411 setattr(self, field.attname, filename) 407 412 408 413 full_filename = self._get_FIELD_filename(field) 409 fp = open(full_filename, 'wb') 410 fp.write(raw_contents) 411 fp.close() 414 if hasattr(raw_field, 'temporary_file_path'): 415 raw_field.close() 416 file_move_safe(raw_field.temporary_file_path(), full_filename) 417 else: 418 from django.core.files import filelocks 419 fp = open(full_filename, 'wb') 420 # exclusive lock 421 filelocks.lock(fp, filelocks.LOCK_EX) 422 # Stream it into the file, from where it is. 423 for chunk in raw_field.chunk(65535): 424 fp.write(chunk) 425 fp.close() 412 426 413 427 # Save the width and/or height, if applicable. 414 428 if isinstance(field, ImageField) and (field.width_field or field.height_field): -
django/db/models/fields/__init__.py
785 785 setattr(cls, 'get_%s_filename' % self.name, curry(cls._get_FIELD_filename, field=self)) 786 786 setattr(cls, 'get_%s_url' % self.name, curry(cls._get_FIELD_url, field=self)) 787 787 setattr(cls, 'get_%s_size' % self.name, curry(cls._get_FIELD_size, field=self)) 788 setattr(cls, 'save_%s_file' % self.name, lambda instance, filename, raw_contents, save=True: instance._save_FIELD_file(self, filename, raw_contents, save)) 788 setattr(cls, 'save_%s_file' % self.name, lambda instance, filename, raw_field, save=True: instance._save_FIELD_file(self, filename, raw_field, save)) 789 setattr(cls, 'move_%s_file' % self.name, lambda instance, raw_field, save=True: instance._save_FIELD_file(self, None, raw_field, save)) 789 790 dispatcher.connect(self.delete_file, signal=signals.post_delete, sender=cls) 790 791 791 792 def delete_file(self, instance): … … 808 809 if new_data.get(upload_field_name, False): 809 810 func = getattr(new_object, 'save_%s_file' % self.name) 810 811 if rel: 811 func(new_data[upload_field_name][0] ["filename"], new_data[upload_field_name][0]["content"], save)812 func(new_data[upload_field_name][0].file_name, new_data[upload_field_name][0], save) 812 813 else: 813 func(new_data[upload_field_name] ["filename"], new_data[upload_field_name]["content"], save)814 func(new_data[upload_field_name].file_name, new_data[upload_field_name], save) 814 815 815 816 def get_directory_name(self): 816 817 return os.path.normpath(force_unicode(datetime.datetime.now().strftime(smart_str(self.upload_to)))) … … 823 824 def save_form_data(self, instance, data): 824 825 from django.newforms.fields import UploadedFile 825 826 if data and isinstance(data, UploadedFile): 826 getattr(instance, "save_%s_file" % self.name)(data.filename, data. content, save=False)827 getattr(instance, "save_%s_file" % self.name)(data.filename, data.data, save=False) 827 828 828 829 def formfield(self, **kwargs): 829 830 defaults = {'form_class': forms.FileField} -
django/conf/global_settings.py
224 224 # Example: "http://media.lawrence.com" 225 225 MEDIA_URL = '' 226 226 227 # Directory to upload streamed files temporarily. 228 # A value of `None` means that it will use the default temporary 229 # directory for the server's operating system. 230 FILE_UPLOAD_TEMP_DIR = None 231 227 232 # Default formatting for date objects. See all available format strings here: 228 233 # http://www.djangoproject.com/documentation/templates/#now 229 234 DATE_FORMAT = 'N j, Y' -
django/core/handlers/wsgi.py
78 78 self.path = force_unicode(environ['PATH_INFO']) 79 79 self.META = environ 80 80 self.method = environ['REQUEST_METHOD'].upper() 81 self._upload_handlers = [] 81 82 82 83 def __repr__(self): 83 84 # Since this is called as part of error handling, we need to be very … … 112 113 # Populates self._post and self._files 113 114 if self.method == 'POST': 114 115 if self.environ.get('CONTENT_TYPE', '').startswith('multipart'): 115 header_dict = dict([(k, v) for k, v in self.environ.items() if k.startswith('HTTP_')]) 116 header_dict['Content-Type'] = self.environ.get('CONTENT_TYPE', '') 117 self._post, self._files = http.parse_file_upload(header_dict, self.raw_post_data) 116 self._raw_post_data = '' 117 self._post, self._files = self.parse_file_upload(self.META, self.environ['wsgi.input']) 118 118 else: 119 119 self._post, self._files = http.QueryDict(self.raw_post_data, encoding=self._encoding), datastructures.MultiValueDict() 120 120 else: -
django/core/handlers/modpython.py
16 16 def __init__(self, req): 17 17 self._req = req 18 18 self.path = force_unicode(req.uri) 19 self._upload_handlers = [] 19 20 20 21 def __repr__(self): 21 22 # Since this is called as part of error handling, we need to be very … … 53 54 def _load_post_and_files(self): 54 55 "Populates self._post and self._files" 55 56 if 'content-type' in self._req.headers_in and self._req.headers_in['content-type'].startswith('multipart'): 56 self._post, self._files = http.parse_file_upload(self._req.headers_in, self.raw_post_data) 57 self._raw_post_data = '' 58 self._post, self._files = self.parse_file_upload(self.META, self._req) 57 59 else: 58 60 self._post, self._files = http.QueryDict(self.raw_post_data, encoding=self._encoding), datastructures.MultiValueDict() 59 61 -
django/core/files/filelocks.py
1 """ 2 Locking portability based partially on example by 3 Jonathan Feignberg <jdf@pobox.com> in python cookbook. 4 5 Example Usage:: 6 7 from django.utils import file_locks 8 9 f = open('./file', 'wb') 10 11 file_locks.lock(f, file_locks.LOCK_EX) 12 f.write('Django') 13 f.close() 14 """ 15 16 __all__ = ('LOCK_EX','LOCK_SH','LOCK_NB','lock','unlock') 17 18 system_type = None 19 20 try: 21 import win32con 22 import win32file 23 import pywintypes 24 LOCK_EX = win32con.LOCKFILE_EXCLUSIVE_LOCK 25 LOCK_SH = 0 26 LOCK_NB = win32con.LOCKFILE_FAIL_IMMEDIATELY 27 __overlapped = pywintypes.OVERLAPPED() 28 system_type = 'nt' 29 except (ImportError, AttributeError): 30 pass 31 32 try: 33 import fcntl 34 LOCK_EX = fcntl.LOCK_EX 35 LOCK_SH = fcntl.LOCK_SH 36 LOCK_NB = fcntl.LOCK_NB 37 system_type = 'posix' 38 except (ImportError, AttributeError): 39 pass 40 41 42 43 if system_type == 'nt': 44 def lock(file, flags): 45 hfile = win32file._get_osfhandle(file.fileno()) 46 win32file.LockFileEx(hfile, flags, 0, -0x10000, __overlapped) 47 48 def unlock(file): 49 hfile = win32file._get_osfhandle(file.fileno()) 50 win32file.UnlockFileEx(hfile, 0, -0x10000, __overlapped) 51 52 elif system_type =='posix': 53 def lock(file, flags): 54 fcntl.flock(file.fileno(), flags) 55 56 def unlock(file): 57 fcntl.flock(file.fileno(), fcntl.LOCK_UN) 58 59 else: 60 # File locking is not supported. 61 LOCK_EX = LOCK_SH = LOCK_NB = None 62 63 # Dummy functions that don't do anything. 64 def lock(file, flags): 65 pass 66 67 def unlock(file): 68 pass -
django/core/files/uploadedfile.py
1 """ 2 The uploaded file objects for Django. 3 This contains the base UploadedFile and the TemporaryUploadedFile 4 derived class. 5 """ 6 7 __all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile') 8 9 class UploadedFile(object): 10 """ 11 The UploadedFile object behaves somewhat like a file 12 object and represents some data that the user submitted 13 and is stored in some form. 14 """ 15 DEFAULT_CHUNK_SIZE = 64 * 2**10 16 17 def __init__(self): 18 self.file_size = None 19 self.file_name = None 20 self.content_type = None 21 self.charset = None 22 pass 23 24 def file_size(self): 25 return self.file_size 26 27 def chunk(self, chunk_size=None): 28 """ 29 Read the file to generate chunks of chunk_size bytes. 30 """ 31 if not chunk_size: 32 chunk_size = UploadedFile.DEFAULT_CHUNK_SIZE 33 34 if hasattr(self, 'seek'): 35 self.seek(0) 36 # Assume the pointer is at zero... 37 counter = self.file_size() 38 39 while counter > 0: 40 yield self.read(chunk_size) 41 counter -= chunk_size 42 43 44 def multiple_chunks(self, chunk_size=None): 45 """ 46 Return True if you can expect multiple chunks, False otherwise. 47 Note: If a particular file representation is in memory, then 48 override this to return False. 49 """ 50 if not chunk_size: 51 chunk_size = UploadedFile.DEFAULT_CHUNK_SIZE 52 return self.file_size() < chunk_size 53 54 55 def read(self, num_bytes=None): 56 """ 57 Read from the file in whatever representation it has. 58 """ 59 raise NotImplementedError() 60 61 def open(self): 62 """ 63 Open the file, if one needs to. 64 """ 65 pass 66 67 68 def close(self): 69 """ 70 Close the file, if one needs to. 71 """ 72 pass 73 74 def __getitem__(self, key): 75 """ 76 This maintains backwards compatibility. 77 """ 78 import warnings 79 warnings.warn("The dictionary access of uploaded file objects is deprecated. Use the new object interface instead.", DeprecationWarning) 80 # Dictionary to translate labels 81 # for backwards compatbility. 82 # Should be removed at some point. 83 backwards_translate = { 84 'filename': 'file_name', 85 'content-type': 'content_type', 86 } 87 88 if key == 'content': 89 return self.read() 90 else: 91 return getattr(self, backwards_translate.get(key, key)) 92 93 def __repr__(self): 94 """ 95 This representation could be anything and can be overridden. 96 This is mostly done to make it look somewhat useful. 97 """ 98 _dict = { 99 'file_name': self.file_name, 100 'content_type': self.content_type, 101 'content': '<omitted>', 102 } 103 return repr(_dict) 104 105 106 class TemporaryUploadedFile(UploadedFile): 107 """ 108 Upload a file to a temporary file. 109 """ 110 111 def __init__(self, file, file_name, content_type, file_size, charset): 112 self.file = file 113 self.file_name = file_name 114 self.path = file.name 115 self.content_type = content_type 116 self.file_size = file_size 117 self.charset = charset 118 self.file.seek(0) 119 120 def temporary_file_path(self): 121 """ 122 Return the full path of this file. 123 """ 124 return self.path 125 126 def read(self, *args, **kwargs): 127 return self.file.read(*args, **kwargs) 128 129 def open(self): 130 """ 131 Assume the person meant to seek. 132 """ 133 self.seek(0) 134 135 def seek(self, *args, **kwargs): 136 self.file.seek(*args, **kwargs) 137 138 139 class InMemoryUploadedFile(UploadedFile): 140 """ 141 Upload a file into memory. 142 """ 143 def __init__(self, file, field_name, file_name, content_type, charset): 144 self.file = file 145 self.field_name = field_name 146 self.file_name = file_name 147 self.content_type = content_type 148 self.charset = charset 149 self.file.seek(0) 150 151 def seek(self, *args, **kwargs): 152 self.file.seek(*args, **kwargs) 153 154 def open(self): 155 self.seek(0) 156 157 def read(self, *args, **kwargs): 158 return self.file.read(*args, **kwargs) 159 160 def chunk(self, chunk_size=None): 161 """ 162 Return the entirety of the data regardless. 163 """ 164 self.file.seek(0) 165 return self.read() 166 167 def multiple_chunks(self, chunk_size=None): 168 """ 169 Since it's in memory, we'll never have multiple chunks. 170 """ 171 return False 172 173 174 class SimpleUploadedFile(InMemoryUploadedFile): 175 """ 176 A simple representation of a file, which 177 just has content, size, and a name. 178 """ 179 def __init__(self, name, content, content_type='text/plain'): 180 try: 181 from cStringIO import StringIO 182 except ImportError: 183 from StringIO import StringIO 184 self.file = StringIO(content or '') 185 self.file_name = name 186 self.field_name = None 187 self.file_size = len(content or '') 188 self.content_type = content_type 189 self.charset = None 190 self.file.seek(0) 191 -
django/core/files/__init__.py
1 -
django/core/files/fileuploadhandler.py
1 """ A fileuploadhandler base and default subclass for handling file uploads. 2 """ 3 import os 4 try: 5 from cStringIO import StringIO 6 except ImportError: 7 from StringIO import StringIO 8 9 from django.utils.encoding import force_unicode 10 from django.utils.datastructures import MultiValueDict 11 12 from django.core.files.uploadedfile import TemporaryUploadedFile, InMemoryUploadedFile 13 14 __all__ = ('UploadFileException','StopUpload', 'SkipFile', 15 'FileUploadHandler', 'TemporaryFileUploadHandler', 16 'MemoryFileUploadHandler') 17 18 19 class UploadFileException(Exception): 20 """ Any error having to do with Uploading Files. """ 21 pass 22 23 class StopUpload(UploadFileException): 24 """ This exception is raised when an upload must abort. """ 25 pass 26 27 class SkipFile(UploadFileException): 28 """ This exception is raised when a file needs to be skipped. """ 29 pass 30 31 32 class FileUploadHandler(object): 33 """ FileUploadHandler will take data and handle file uploads 34 in a streamed fashion. 35 """ 36 chunk_size = 64 * 2 ** 10 #: The default chunk size is 64 KB. 37 38 def __init__(self): 39 " Initialize some local variables. " 40 self.file_name = None 41 self.content_type = None 42 self.content_length = None 43 self.charset = None 44 45 def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None): 46 """ 47 Handle the raw input from the client. 48 Parameters: 49 *input_data* -- An object that supports reading via .read(). 50 *content_length* -- The (integer) value of the Content-Length header from the client. 51 *boundary* -- The boundary from the Content-Type header. Be sure to prepend two '--'. 52 """ 53 pass 54 55 def new_file(self, field_name, file_name, content_type, content_length, charset=None): 56 """ 57 Signal that a new file has been started. 58 59 Warning: Do not trust content_length, if you get it at all. 60 """ 61 self.field_name = field_name 62 self.file_name = file_name 63 self.content_type = content_type 64 self.content_length = content_length 65 self.charset = charset 66 67 def receive_data_chunk(self, raw_data, start, stop): 68 """ 69 Receive data from the streamed upload parser. 70 Start and stop are the positions in the file. 71 This equality should always be true:: 72 len(raw_data) = stop - start 73 """ 74 raise NotImplementedError() 75 76 def file_complete(self, file_size): 77 """ 78 Signal that a file has completed. 79 File size corresponds to the actual size accumulated 80 by all the chunks. 81 82 This should return a valid UploadedFile object. 83 """ 84 raise NotImplementedError() 85 86 def upload_complete(self): 87 """ 88 Signal that the upload is complete. 89 Do any cleanup that is necessary for this handler. 90 """ 91 pass 92 93 94 95 class TemporaryFileUploadHandler(FileUploadHandler): 96 """ 97 Upload the streaming data into a temporary file. 98 """ 99 def __init__(self, *args, **kwargs): 100 """ Import settings for later. """ 101 super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs) 102 global settings 103 from django.conf import settings 104 105 def new_file(self, file_name, *args, **kwargs): 106 """ 107 Create the file object to append to as data is coming in. 108 """ 109 super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs) 110 self.file = TemporaryFile(settings.FILE_UPLOAD_TEMP_DIR) 111 self.write = self.file.write 112 113 def receive_data_chunk(self, raw_data, start, stop): 114 """ 115 Once we get the data, we will save it to our file. 116 """ 117 self.write(raw_data) 118 119 def file_complete(self, file_size): 120 """ 121 Signal that a file has completed. 122 File size corresponds to the actual size accumulated 123 by all the chunks. 124 125 This should return a valid UploadedFile object. 126 """ 127 self.file.seek(0) 128 return TemporaryUploadedFile(self.file, self.file_name, 129 self.content_type, file_size, 130 self.charset) 131 132 133 class TemporaryFile(object): 134 """ 135 A temporary file that tries to delete itself when garbage collected. 136 """ 137 def __init__(self, dir): 138 import tempfile 139 if not dir: 140 dir = tempfile.gettempdir() 141 try: 142 (fd, name) = tempfile.mkstemp(suffix='.upload', dir=dir) 143 self.file = os.fdopen(fd, 'w+b') 144 except (OSError, IOError): 145 raise OSError, "Could not create temporary file for uploading, have you set settings.FILE_UPLOAD_TEMP_DIR correctly?" 146 self.name = name 147 148 def __getattr__(self, name): 149 a = getattr(self.__dict__['file'], name) 150 if type(a) != type(0): 151 setattr(self, name, a) 152 return a 153 154 def __del__(self): 155 try: 156 os.unlink(self.name) 157 except OSError: 158 pass 159 160 161 class MemoryFileUploadHandler(FileUploadHandler): 162 """ 163 The MemoryFileUploadHandler will place the data directly into memory. 164 """ 165 166 def __init__(self): 167 pass 168 169 170 def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None): 171 """ 172 Parse the input data in-memory. 173 """ 174 if content_length > 2621440: 175 # If the post is greater than 2.5 MB, do nothing. 176 return 177 178 from django.http import QueryDict 179 import email, email.Message 180 from cgi import parse_header 181 182 ##### 183 # Get the headers from the META information. 184 headers = [] 185 if 'HTTP_CONTENT_TYPE' not in META: 186 headers.append('Content-Type: %s' % (META.get('CONTENT_TYPE', ''))) 187 188 if 'HTTP_CONTENT_LENGTH' not in META: 189 headers.append('Content-Length: %s' % (META.get('CONTENT_LENGTH', '0'))) 190 191 for key, value in META.items(): 192 if key.startswith('HTTP_'): 193 headers.append('%s: %s' % (key[5:].replace('_','-').title(), value)) 194 195 raw_message = '\r\n'.join(headers) 196 raw_message += '\r\n\r\n' + input_data.read() 197 198 msg = email.message_from_string(raw_message) 199 POST = QueryDict('', mutable=True) 200 FILES = MultiValueDict() 201 for submessage in msg.get_payload(): 202 if submessage and isinstance(submessage, email.Message.Message): 203 name_dict = parse_header(submessage['Content-Disposition'])[1] 204 field_name = force_unicode(name_dict['name'], encoding, errors='replace') 205 206 if 'filename' in name_dict: 207 assert not isinstance(submessage.get_payload(), list), "Nested MIME messages are not supported" 208 if not name_dict['filename'].strip(): 209 continue 210 211 filename = force_unicode(name_dict['filename'][name_dict['filename'].rfind("\\")+1:], 212 encoding, errors='replace') 213 content_type = 'Content-Type' in submessage and submessage['Content-Type'] or None 214 215 file_obj = InMemoryUploadedFile(StringIO(submessage.get_payload()), 216 field_name, filename, content_type, None) 217 218 FILES.appendlist(field_name, file_obj) 219 else: 220 content = force_unicode(submessage.get_payload(), encoding, errors='replace') 221 POST.appendlist(field_name, content) 222 223 return POST, FILES 224 225 226 def new_file(self, field_name, file_name, content_type, content_length, charset): 227 """ 228 Do Nothing. 229 """ 230 return 231 232 def receive_data_chunk(self, raw_data, start, stop): 233 """ 234 Do nothing. 235 """ 236 return raw_data 237 238 def file_complete(self, file_size): 239 """ 240 Do nothing. 241 """ 242 return 243 -
django/core/files/filemove.py
1 import os 2 3 __all__ = ('file_move_safe',) 4 5 try: 6 import shutil 7 file_move = shutil.move 8 except ImportError: 9 file_move = os.rename 10 11 def file_move_safe(old_file_name, new_file_name, chunk_size = 1024*64, allow_overwrite=False): 12 """ 13 Moves a file from one location to another in the safest way possible. 14 15 First, it tries using shutils.move, which is OS-dependent but doesn't 16 break with change of filesystems. Then it tries os.rename, which will 17 break if it encounters a change in filesystems. Lastly, it streams 18 it manually from one file to another in python. 19 20 Without ``allow_overwrite``, if the destination file exists, the 21 file will raise an IOError. 22 """ 23 24 from django.core.files import filelocks 25 26 if old_file_name == new_file_name: 27 # No file moving takes place. 28 return 29 30 if not allow_overwrite and os.path.exists(new_file_name): 31 raise IOError, "Django does not allow overwriting files." 32 33 try: 34 file_move(old_file_name, new_file_name) 35 return 36 except OSError: # moving to another filesystem 37 pass 38 39 new_file = open(new_file_name, 'wb') 40 # exclusive lock 41 filelocks.lock(new_file, filelocks.LOCK_EX) 42 old_file = open(old_file_name, 'rb') 43 current_chunk = None 44 45 while current_chunk != '': 46 current_chunk = old_file.read(chunk_size) 47 new_file.write(current_chunk) 48 49 new_file.close() 50 old_file.close() 51 52 os.remove(old_file_name) -
django/newforms/fields.py
416 416 417 417 class UploadedFile(StrAndUnicode): 418 418 "A wrapper for files uploaded in a FileField" 419 def __init__(self, filename, content):419 def __init__(self, filename, data): 420 420 self.filename = filename 421 self. content = content421 self.data = data 422 422 423 423 def __unicode__(self): 424 424 """ … … 445 445 elif not data and initial: 446 446 return initial 447 447 try: 448 f = UploadedFile(data ['filename'], data['content'])449 except TypeError:448 f = UploadedFile(data.file_name, data) 449 except (TypeError, AttributeError): 450 450 raise ValidationError(self.error_messages['invalid']) 451 except KeyError: 452 raise ValidationError(self.error_messages['missing']) 453 if not f.content: 451 if not f.data.file_size: 454 452 raise ValidationError(self.error_messages['empty']) 455 453 return f 456 454 … … 470 468 elif not data and initial: 471 469 return initial 472 470 from PIL import Image 473 from cStringIO import StringIO 471 472 # We need to get the file, it either has a path 473 # or we have to read it all into memory... 474 if hasattr(data, 'temporary_file_path'): 475 file = data.temporary_file_path() 476 else: 477 try: 478 from cStringIO import StringIO 479 except ImportError: 480 from StringIO import StringIO 481 file = StringIO(data.read()) 482 474 483 try: 475 484 # load() is the only method that can spot a truncated JPEG, 476 485 # but it cannot be called sanely after verify() 477 trial_image = Image.open( StringIO(f.content))486 trial_image = Image.open(file) 478 487 trial_image.load() 479 488 # verify() is the only method that can spot a corrupt PNG, 480 489 # but it must be called immediately after the constructor 481 trial_image = Image.open( StringIO(f.content))490 trial_image = Image.open(file) 482 491 trial_image.verify() 483 492 except Exception: # Python Imaging Library doesn't recognize it as an image 484 493 raise ValidationError(self.error_messages['invalid_image']) -
tests/modeltests/model_forms/models.py
75 75 __test__ = {'API_TESTS': """ 76 76 >>> from django import newforms as forms 77 77 >>> from django.newforms.models import ModelForm 78 >>> from django.core.files.uploadedfile import SimpleUploadedFile 78 79 79 80 The bare bones, absolutely nothing custom, basic case. 80 81 … … 792 793 793 794 # Upload a file and ensure it all works as expected. 794 795 795 >>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': {'filename': 'test1.txt', 'content': 'hello world'}})796 >>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': SimpleUploadedFile('test1.txt', 'hello world')}) 796 797 >>> f.is_valid() 797 798 True 798 799 >>> type(f.cleaned_data['file']) … … 819 820 820 821 # Override the file by uploading a new one. 821 822 822 >>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': {'filename': 'test2.txt', 'content': 'hello world'}}, instance=instance)823 >>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': SimpleUploadedFile('test2.txt', 'hello world')}, instance=instance) 823 824 >>> f.is_valid() 824 825 True 825 826 >>> instance = f.save() … … 838 839 >>> instance.file 839 840 '' 840 841 841 >>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': {'filename': 'test3.txt', 'content': 'hello world'}}, instance=instance)842 >>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': SimpleUploadedFile('test3.txt', 'hello world')}, instance=instance) 842 843 >>> f.is_valid() 843 844 True 844 845 >>> instance = f.save() … … 858 859 859 860 >>> image_data = open(os.path.join(os.path.dirname(__file__), "test.png")).read() 860 861 861 >>> f = ImageFileForm(data={'description': u'An image'}, files={'image': {'filename': 'test.png', 'content': image_data}})862 >>> f = ImageFileForm(data={'description': u'An image'}, files={'image': SimpleUploadedFile('test.png', image_data)}) 862 863 >>> f.is_valid() 863 864 True 864 865 >>> type(f.cleaned_data['image']) … … 885 886 886 887 # Override the file by uploading a new one. 887 888 888 >>> f = ImageFileForm(data={'description': u'Changed it'}, files={'image': {'filename': 'test2.png', 'content': image_data}}, instance=instance)889 >>> f = ImageFileForm(data={'description': u'Changed it'}, files={'image': SimpleUploadedFile('test2.png', image_data)}, instance=instance) 889 890 >>> f.is_valid() 890 891 True 891 892 >>> instance = f.save() … … 904 905 >>> instance.image 905 906 '' 906 907 907 >>> f = ImageFileForm(data={'description': u'And a final one'}, files={'image': {'filename': 'test3.png', 'content': image_data}}, instance=instance)908 >>> f = ImageFileForm(data={'description': u'And a final one'}, files={'image': SimpleUploadedFile('test3.png', image_data)}, instance=instance) 908 909 >>> f.is_valid() 909 910 True 910 911 >>> instance = f.save() -
tests/regressiontests/bug639/tests.py
9 9 from regressiontests.bug639.models import Photo 10 10 from django.http import QueryDict 11 11 from django.utils.datastructures import MultiValueDict 12 from django.core.files.uploadedfile import SimpleUploadedFile 12 13 13 14 class Bug639Test(unittest.TestCase): 14 15 … … 21 22 22 23 # Fake a request query dict with the file 23 24 qd = QueryDict("title=Testing&image=", mutable=True) 24 qd["image_file"] = { 25 "filename" : "test.jpg", 26 "content-type" : "image/jpeg", 27 "content" : img 28 } 25 qd["image_file"] = SimpleUploadedFile('test.jpg', img, 'image/jpeg') 29 26 30 27 manip = Photo.AddManipulator() 31 28 manip.do_html2python(qd) … … 39 36 Make sure to delete the "uploaded" file to avoid clogging /tmp. 40 37 """ 41 38 p = Photo.objects.get() 42 os.unlink(p.get_image_filename()) 43 No newline at end of file 39 os.unlink(p.get_image_filename()) -
tests/regressiontests/forms/error_messages.py
1 1 # -*- coding: utf-8 -*- 2 2 tests = r""" 3 3 >>> from django.newforms import * 4 >>> from django.core.files.uploadedfile import SimpleUploadedFile 4 5 5 6 # CharField ################################################################### 6 7 … … 214 215 Traceback (most recent call last): 215 216 ... 216 217 ValidationError: [u'INVALID'] 217 >>> f.clean( {})218 >>> f.clean(SimpleUploadedFile('name', None)) 218 219 Traceback (most recent call last): 219 220 ... 220 ValidationError: [u' MISSING']221 >>> f.clean( {'filename': 'name', 'content':''})221 ValidationError: [u'EMPTY FILE'] 222 >>> f.clean(SimpleUploadedFile('name', '')) 222 223 Traceback (most recent call last): 223 224 ... 224 225 ValidationError: [u'EMPTY FILE'] -
tests/regressiontests/forms/fields.py
2 2 tests = r""" 3 3 >>> from django.newforms import * 4 4 >>> from django.newforms.widgets import RadioFieldRenderer 5 >>> from django.core.files.uploadedfile import SimpleUploadedFile 5 6 >>> import datetime 6 7 >>> import time 7 8 >>> import re … … 773 774 >>> f.clean({}) 774 775 Traceback (most recent call last): 775 776 ... 776 ValidationError: [u'No file was submitted. ']777 ValidationError: [u'No file was submitted. Check the encoding type on the form.'] 777 778 778 779 >>> f.clean({}, '') 779 780 Traceback (most recent call last): 780 781 ... 781 ValidationError: [u'No file was submitted. ']782 ValidationError: [u'No file was submitted. Check the encoding type on the form.'] 782 783 783 784 >>> f.clean({}, 'files/test3.pdf') 784 785 'files/test3.pdf' … … 788 789 ... 789 790 ValidationError: [u'No file was submitted. Check the encoding type on the form.'] 790 791 791 >>> f.clean( {'filename': 'name', 'content': None})792 >>> f.clean(SimpleUploadedFile('name', None)) 792 793 Traceback (most recent call last): 793 794 ... 794 795 ValidationError: [u'The submitted file is empty.'] 795 796 796 >>> f.clean( {'filename': 'name', 'content': ''})797 >>> f.clean(SimpleUploadedFile('name', '')) 797 798 Traceback (most recent call last): 798 799 ... 799 800 ValidationError: [u'The submitted file is empty.'] 800 801 801 >>> type(f.clean( {'filename': 'name', 'content': 'Some File Content'}))802 >>> type(f.clean(SimpleUploadedFile('name', 'Some File Content'))) 802 803 <class 'django.newforms.fields.UploadedFile'> 803 804 804 >>> type(f.clean( {'filename': 'name', 'content': 'Some File Content'}, 'files/test4.pdf'))805 >>> type(f.clean(SimpleUploadedFile('name', 'Some File Content'), 'files/test4.pdf')) 805 806 <class 'django.newforms.fields.UploadedFile'> 806 807 807 808 # URLField ################################################################## -
tests/regressiontests/forms/forms.py
1 1 # -*- coding: utf-8 -*- 2 2 tests = r""" 3 3 >>> from django.newforms import * 4 >>> from django.core.files.uploadedfile import SimpleUploadedFile 4 5 >>> import datetime 5 6 >>> import time 6 7 >>> import re … … 1465 1466 >>> print f 1466 1467 <tr><th>File1:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="file" name="file1" /></td></tr> 1467 1468 1468 >>> f = FileForm(data={}, files={'file1': {'filename': 'name', 'content':''}}, auto_id=False)1469 >>> f = FileForm(data={}, files={'file1': SimpleUploadedFile('name', '')}, auto_id=False) 1469 1470 >>> print f 1470 1471 <tr><th>File1:</th><td><ul class="errorlist"><li>The submitted file is empty.</li></ul><input type="file" name="file1" /></td></tr> 1471 1472 … … 1473 1474 >>> print f 1474 1475 <tr><th>File1:</th><td><ul class="errorlist"><li>No file was submitted. Check the encoding type on the form.</li></ul><input type="file" name="file1" /></td></tr> 1475 1476 1476 >>> f = FileForm(data={}, files={'file1': {'filename': 'name', 'content':'some content'}}, auto_id=False)1477 >>> f = FileForm(data={}, files={'file1': SimpleUploadedFile('name', 'some content')}, auto_id=False) 1477 1478 >>> print f 1478 1479 <tr><th>File1:</th><td><input type="file" name="file1" /></td></tr> 1479 1480 >>> f.is_valid() -
tests/regressiontests/test_client_regress/views.py
1 1 from django.contrib.auth.decorators import login_required 2 2 from django.http import HttpResponse, HttpResponseRedirect, HttpResponseServerError 3 import sha 3 4 4 5 def no_template_view(request): 5 6 "A simple view that expects a GET request, and returns a rendered template" … … 10 11 Check that a file upload can be updated into the POST dictionary without 11 12 going pear-shaped. 12 13 """ 14 from django.core.files.uploadedfile import UploadedFile 13 15 form_data = request.POST.copy() 14 16 form_data.update(request.FILES) 15 if isinstance(form_data['file_field'], dict) and isinstance(form_data['name'], unicode):17 if isinstance(form_data['file_field'], UploadedFile) and isinstance(form_data['name'], unicode): 16 18 return HttpResponse('') 17 19 else: 18 20 return HttpResponseServerError() 19 21 22 def file_upload_view_verify(request): 23 """ 24 Use the sha digest hash to verify the uploaded contents. 25 """ 26 from django.core.files.uploadedfile import UploadedFile 27 form_data = request.POST.copy() 28 form_data.update(request.FILES) 29 for key, value in form_data.items(): 30 if key.endswith('_hash'): 31 continue 32 if key + '_hash' not in form_data: 33 continue 34 submitted_hash = form_data[key + '_hash'] 35 if isinstance(value, UploadedFile): 36 new_hash = sha.new(value.read()).hexdigest() 37 else: 38 new_hash = sha.new(value).hexdigest() 39 if new_hash != submitted_hash: 40 return HttpResponseServerError() 41 42 return HttpResponse('') 43 20 44 def get_view(request): 21 45 "A simple login protected view" 22 46 return HttpResponse("Hello world") … … 37 61 def login_protected_redirect_view(request): 38 62 "A view that redirects all requests to the GET view" 39 63 return HttpResponseRedirect('/test_client_regress/get_view/') 40 login_protected_redirect_view = login_required(login_protected_redirect_view) 41 No newline at end of file 64 login_protected_redirect_view = login_required(login_protected_redirect_view) -
tests/regressiontests/test_client_regress/models.py
5 5 from django.test import Client, TestCase 6 6 from django.core.urlresolvers import reverse 7 7 import os 8 import sha 8 9 9 10 class AssertContainsTests(TestCase): 10 11 def test_contains(self): … … 243 244 response = self.client.post('/test_client_regress/file_upload/', post_data) 244 245 self.assertEqual(response.status_code, 200) 245 246 247 def test_large_upload(self): 248 import tempfile 249 dir = tempfile.gettempdir() 250 251 (fd, name1) = tempfile.mkstemp(suffix='.file1', dir=dir) 252 file1 = os.fdopen(fd, 'w+b') 253 file1.write('a' * (2 ** 21)) 254 file1.seek(0) 255 256 (fd, name2) = tempfile.mkstemp(suffix='.file2', dir=dir) 257 file2 = os.fdopen(fd, 'w+b') 258 file2.write('a' * (10 * 2 ** 20)) 259 file2.seek(0) 260 261 post_data = { 262 'name': 'Ringo', 263 'file_field1': file1, 264 'file_field2': file2, 265 } 266 267 for key in post_data.keys(): 268 try: 269 post_data[key + '_hash'] = sha.new(post_data[key].read()).hexdigest() 270 post_data[key].seek(0) 271 except AttributeError: 272 post_data[key + '_hash'] = sha.new(post_data[key]).hexdigest() 273 274 response = self.client.post('/test_client_regress/file_upload_verify/', post_data) 275 276 for name in (name1, name2): 277 try: 278 os.unlink(name) 279 except: 280 pass 281 282 self.assertEqual(response.status_code, 200) 283 284 246 285 class LoginTests(TestCase): 247 286 fixtures = ['testdata'] 248 287 -
tests/regressiontests/test_client_regress/urls.py
4 4 urlpatterns = patterns('', 5 5 (r'^no_template_view/$', views.no_template_view), 6 6 (r'^file_upload/$', views.file_upload_view), 7 (r'^file_upload_verify/$', views.file_upload_view_verify), 7 8 (r'^get_view/$', views.get_view), 8 9 url(r'^arg_view/(?P<name>.+)/$', views.view_with_argument, name='arg_view'), 9 10 (r'^login_protected_redirect_view/$', views.login_protected_redirect_view) -
AUTHORS
58 58 Jökull Sólberg Auðunsson <jokullsolberg@gmail.com> 59 59 Arthur <avandorp@gmail.com> 60 60 David Avsajanishvili <avsd05@gmail.com> 61 axiak@mit.edu61 Mike Axiak <axiak@mit.edu> 62 62 Niran Babalola <niran@niran.org> 63 63 Morten Bagai <m@bagai.com> 64 64 Mikaël Barbero <mikael.barbero nospam at nospam free.fr> … … 135 135 Marc Fargas <telenieko@telenieko.com> 136 136 Szilveszter Farkas <szilveszter.farkas@gmail.com> 137 137 favo@exoweb.net 138 fdr <drfarina@gmail.com> 138 139 Dmitri Fedortchenko <zeraien@gmail.com> 139 140 Bill Fenner <fenner@gmail.com> 140 141 Stefane Fermgier <sf@fermigier.com> -
docs/settings.txt
513 513 The character encoding used to decode any files read from disk. This includes 514 514 template files and initial SQL data files. 515 515 516 FILE_UPLOAD_TEMP_DIR 517 -------------------- 518 519 **New in Django development version** 520 521 Default: ``None`` 522 523 The directory to store data temporarily while uploading files. If ``None``, Django will use the standard temporary directory for the operating system. For example, this will default to '/tmp' on *nix-style operating systems. 524 516 525 FIXTURE_DIRS 517 526 ------------- 518 527