* Remove the IO duping in favor of a full filehandle replacement, when
   the unexpected happens to handles in naughty filters.  This also
   removes the need for the maintenance of body keys.

* Traded ThingFish::Request#get_body_and_metadata for
  ThingFish::Request#entity_bodies as the public interface to all
  top-level bodies in a given request.
M lib/thingfish/daemon.rb +1 -1
@@ 507,8 507,8 @@ class ThingFish::Daemon < Mongrel::HttpS
 
 		handlers.each do |handler|
 			response.handlers << handler
+			request.check_body_ios
 			handler.process( request, response )
-			request.check_body_ios
 			break if response.is_handled? || client.closed?
 		end
 	end

          
M lib/thingfish/exceptions.rb +27 -3
@@ 55,7 55,31 @@ module ThingFish
 	# Error in an instance of the client
 	class ClientError < ThingFish::Error; end
 
-	# Something was wrong with a request
+	# 500: The server was unable to handle the request even though it was valid
+	class ServerError < ThingFish::Error
+		include ThingFish::Constants
+		
+		def initialize( *args )
+			super
+			@status = HTTP::SERVER_ERROR
+		end
+		
+		attr_reader :status
+	end
+	
+	# 501: We received a request that we don't quite know how to handle.
+	class NotImplementedError < ThingFish::ServerError
+		include ThingFish::Constants
+		
+		def initialize( *args )
+			super
+			@status = HTTP::NOT_IMPLEMENTED
+		end
+		
+		attr_reader :status
+	end
+	
+	# 400: Something was wrong with a request
 	class RequestError < ThingFish::Error
 		include ThingFish::Constants
 		

          
@@ 67,7 91,7 @@ module ThingFish
 		attr_reader :status
 	end
 
-	# Upload exceeded quota
+	# 413: Upload exceeded quota
 	class RequestEntityTooLargeError < ThingFish::RequestError
 		include ThingFish::Constants
 

          
@@ 77,7 101,7 @@ module ThingFish
 		end
 	end
 	
-	# Client requested a mimetype we don't know how to convert to
+	# 406: Client requested a mimetype we don't know how to convert to
 	class RequestNotAcceptableError < ThingFish::RequestError
 		include ThingFish::Constants
 

          
M lib/thingfish/handler/default.rb +7 -2
@@ 241,11 241,16 @@ class ThingFish::DefaultHandler < ThingF
 	### Handle a request to create a new resource with the request body as 
 	### data (POST to /)
 	def handle_create_request( request, response )
+		
+		if request.entity_bodies.length > 1
+			self.log.error "Can't handle multipart request (%p)" % [ request.entity_bodies ]
+			raise ThingFish::NotImplementedError, "multipart upload not implemented"
+		end
 
 		uuid = nil
-		
+
 		# Store the primary resource
-		body, metadata = request.get_body_and_metadata
+		body, metadata = request.entity_bodies.to_a.flatten
 		uuid = self.daemon.store_resource( body, metadata )
 	
 		# Store any related resources, linked to the primary

          
M lib/thingfish/mixins.rb +1 -1
@@ 347,7 347,7 @@ module ThingFish # :nodoc:
 			def virtual( *syms )
 				syms.each do |sym|
 					define_method( sym ) {
-						raise NotImplementedError,
+						raise ::NotImplementedError,
 							"%p does not provide an implementation of #%s" % [ self.class, sym ],
 							caller(1)
 					}

          
M lib/thingfish/request.rb +102 -134
@@ 84,7 84,6 @@ class ThingFish::Request
 		@profile         = false
 		@authed_user     = nil
 		
-		@body_key_mapping  = {}
 		@related_resources = Hash.new {|h,k| h[k] = {} }
 		@mongrel_request   = mongrel_request
 		@metadata          = Hash.new {|h,k| h[k] = {} }

          
@@ 261,35 260,6 @@ class ThingFish::Request
 	alias_method :is_multipart?, :has_multipart_body?
 
 
-	### Get the body IO and the merged hash of metadata
-	def get_body_and_metadata
-		raise ArgumentError, "Can't return a single body for a multipart request" if
-			self.has_multipart_body?
-		
-		default_metadata = {
-			:useragent     => self.headers[ :user_agent ],
-			:uploadaddress => self.remote_addr
-		}
-
-		# Read title out of the content-disposition
-		if self.headers[:content_disposition] &&
-			self.headers[:content_disposition] =~ /filename="(?:.*\\)?(.+?)"/i
-			default_metadata[ :title ] = $1
-		end
-		
-		extracted_metadata = self.metadata[ @mongrel_request.body ] || {}
-
-		# Content metadata is determined from http headers
-		merged = extracted_metadata.merge({
-			:format => self.content_type,
-			:extent => self.headers[ :content_length ],
-		})
-		merged.update( default_metadata )
-		
-		return @mongrel_request.body, merged
-	end
-	
-	
 	### Attach additional body and metadata information to the primary
 	### body, that will be stored with related_to metakeys.
 	### 

          
@@ 299,63 269,70 @@ class ThingFish::Request
 	###    The new resource body as an IO-like object
 	### +related_metadata+::
 	###    The metadata to attach to the new resource, as a Hash.
-	def append_related_resource( body, related_body, related_metadata={} )
-		# Convert the body to the key of the related resources hash
-		bodykey = self.make_body_key( body )
+	def append_related_resource( resource, related_resource, related_metadata={} )
+
+		unless @entity_bodies.key?( resource ) || @related_resources.key?( resource )
+			errmsg = "Cannot append %p related to %p: it is not a part of the request" % [
+				related_resource,
+				resource,
+		 	  ]
+			self.log.error( errmsg )
+			raise ThingFish::ResourceError, errmsg
+		end
+
+		related_metadata[:relation] ||= 'appended'
+		self.related_resources[ resource ][ related_resource ] = related_metadata
 		
-		unless original_body = @body_key_mapping[ bodykey ]
-			errmsg = "Cannot append a resource related to %p: %p isn't one of %p" % [
-				body,
-				bodykey,
-				@body_key_mapping.keys,
+		# Add the related_resource as a key so future checks are aware that
+		# it is part of this request
+		self.related_resources[ related_resource ] = {}
+	end
+	
+
+	### Append the specified additional +metadata+ for the given +resource+, which should be one
+	### of the entity bodies yielded by #each_body
+	def append_metadata_for( resource, metadata )
+
+		unless @entity_bodies.key?( resource ) || @related_resources.key?( resource )
+			errmsg = "Cannot append metadata related to %p: it is not a part of the request" % [
+				resource,
 		 	  ]
 			self.log.error( errmsg )
 			raise ThingFish::ResourceError, errmsg
 		end
 
-		related_bodykey = self.make_body_key( related_body )
-		@body_key_mapping[ related_bodykey ] = related_body
+		self.metadata[ resource ].merge!( metadata )
+	end
 
-		related_metadata[:relation] ||= 'appended'
-		self.related_resources[ original_body ][ related_body ] = related_metadata
-	end
-	
 	
-	### Append the specified additional +metadata+ for the given +resource+, which should be one
-	### of the entity bodies yielded by #each_body
-	###
-	### TODO: Do we need this method after bodykey removal?
-	###
-	def append_metadata_for( resource, metadata )
-		# Convert the body to the key of the related resources hash
-		bodykey = self.make_body_key( resource )
-		
-		unless original_body = @body_key_mapping[ bodykey ]
-			errmsg = "Cannot append metadata related to %p(%p): %p isn't one of %p" % [
-				body,
-				bodykey,
-				resource,
-				@body_key_mapping.keys,
-		 	  ]
-			self.log.error( errmsg )
-			raise ThingFish::ResourceError, errmsg
+	### Returns the entity bodies of the request along with any related metadata as
+	### a Hash:
+	### {
+	###    <body io> => { <body metadata> },
+	###    ...
+	### }
+	def entity_bodies
+		# Parse the request's body parts if they aren't already
+		unless @entity_bodies
+			if self.has_multipart_body?
+				self.log.debug "Parsing multiple entity bodies."
+				@entity_bodies, @form_metadata = self.parse_multipart_body
+			else
+				self.log.debug "Parsing single entity body."
+				body, metadata = self.get_body_and_metadata
+				
+				@entity_bodies = { body => metadata }
+				@form_metadata = {}
+			end
+
+			self.log.debug "Parsed %d bodies and %d form_metadata (%p)" % 
+				[@entity_bodies.length, @form_metadata.length, @form_metadata.keys]
 		end
 
-		self.metadata[ original_body ].merge!( metadata )
+		return @entity_bodies
 	end
-	
-	
-	### Generate a key based on the body object that will be the same even after duplication. This
-	### is used to work around our workaround for StringIO's behavior when #dup'ed.
-	def make_body_key( body )
-		if body.respond_to?( :string )
-			return Digest::MD5.hexdigest( body.string )
-		else
-			return "%s:%d" % [ body.path, body.object_id * 2 ]
-		end
-	end
-	
-	
+
+
 	### Call the provided block once for each entity body of the request, which may
 	### be multiple times in the case of a multipart request. If +include_appended_resources+ 
 	### is +true+, any resources which have been appended will be yielded immediately after the

          
@@ 384,19 361,29 @@ class ThingFish::Request
 	
 	### Check the body IO objects to ensure they're still open.
 	def check_body_ios
-		[ self.entity_bodies, self.related_resources ].each do |hash|
-			hash.each do |body, _|
-				if body.closed?
-					
-					# TODO asap:  :)
-					# substitute body for a fresh and clean filehandle,
-					# since filehandle closure has been such a poopy problem
-					# in the past.  this will remove the need for bodykeys, as well.
-					
-					self.log.warn "Entity body closed: %p" % [ body ]
-					body.open 
+		self.each_body do |body,_|
+			if body.closed?
+				self.log.warn "Body IO unexpectedly closed -- reopening a new handle"
+								
+				# Create a new IO based on what the original type was
+				clone = case body
+					when StringIO
+						StringIO.new( body.string )
+					else
+						File.open( body.path, 'r' )
+					end
+				
+				# Retain the original IO's metadata
+				@entity_bodies[ clone ] = @entity_bodies.delete( body ) if @entity_bodies.key?( body )
+				@related_resources[ clone ] = @related_resources.delete( body ) if @related_resources.key?( body )
+				@related_resources.each do |_,hash|
+					hash[ clone ] = hash.delete( body ) if hash.key?( body )
 				end
-				
+
+				self.log.debug "Body %p (%d) replaced with %p (%d)" % [ 
+					body, body.object_id, clone, clone.object_id
+				]
+			else
 				body.rewind
 			end
 		end

          
@@ 511,42 498,35 @@ class ThingFish::Request
 	protected
 	#########
 
-	### Returns the entity bodies of the request along with any related metadata as
-	### a Hash:
-	### {
-	###    <body io> => { <body metadata> },
-	###    ...
-	### }
-	def entity_bodies
-		# Parse the request's body parts if they aren't already
-		unless @entity_bodies
-			if self.has_multipart_body?
-				self.log.debug "Parsing multiple entity bodies."
-				@entity_bodies, @form_metadata = self.parse_multipart_body
-			else
-				self.log.debug "Parsing single entity body."
-				body, metadata = self.get_body_and_metadata
-				
-				@entity_bodies = { body => metadata }
-				@form_metadata = {}
-			end
+	### Get the body IO and the merged hash of metadata
+	def get_body_and_metadata
+		raise ArgumentError, "Can't return a single body for a multipart request" if
+			self.has_multipart_body?
+		
+		default_metadata = {
+			:useragent     => self.headers[ :user_agent ],
+			:uploadaddress => self.remote_addr
+		}
 
-			# Generate keys for each body that can be used to map IO copies given to filters
-			# back to the original body.
-			@entity_bodies.each do |body, _|
-				bodykey = self.make_body_key( body )
-				@body_key_mapping[ bodykey ] = body
-				self.log.debug "Made body key %p from body %p" % [ bodykey, body ]
-			end
+		# Read title out of the content-disposition
+		if self.headers[:content_disposition] &&
+			self.headers[:content_disposition] =~ /filename="(?:.*\\)?(.+?)"/i
+			default_metadata[ :title ] = $1
+		end
+		
+		extracted_metadata = self.metadata[ @mongrel_request.body ] || {}
 
-			self.log.debug "Parsed %d bodies and %d form_metadata (%p)" % 
-				[@entity_bodies.length, @form_metadata.length, @form_metadata.keys]
-		end
-
-		return @entity_bodies
+		# Content metadata is determined from http headers
+		merged = extracted_metadata.merge({
+			:format => self.content_type,
+			:extent => self.headers[ :content_length ],
+		})
+		merged.update( default_metadata )
+		
+		return @mongrel_request.body, merged
 	end
-
-
+	
+	
 	### For each resource => metadata pair returned by the current +iterator+, merge the
 	### toplevel metadata with the resource-specific metadata and pass both to the
 	### block.

          
@@ 555,8 535,6 @@ class ThingFish::Request
 		
 		# Call the block for every resource
 		iterator.each do |body, body_metadata|
-			self.log.debug "Prepping %s for yield with metadata: %p" %
-				[ body, body_metadata ]
 			body_metadata[ :format ] ||= DEFAULT_CONTENT_TYPE
 			extracted_metadata = self.metadata[body] || {}
 			

          
@@ 565,17 543,7 @@ class ThingFish::Request
 			merged.update( body_metadata )
 			merged.update( immutable_metadata )
 
-			# We have to explicitly case this because StringIO doesn't behave like a
-			# real IO when #dup'ed; closing the original also closes the copy.
-			clone = case body
-				when StringIO
-					StringIO.new( body.string )
-				else
-					body.dup
-				end
-
-			@body_key_mapping[ self.make_body_key(clone) ] = body
-			block.call( clone, merged )
+			block.call( body, merged )
 			
 			# Recurse if the appended resources should be included
 			if include_appended

          
M spec/thingfish/handler/default_spec.rb +14 -4
@@ 108,10 108,9 @@ describe ThingFish::DefaultHandler do
 		metadata = stub( "metadata hash from client" )
 
 		full_metadata = mock( "metadata fetched from the store", :null_object => true )
-		@metastore.should_receive( :get_properties ).
-			and_return( full_metadata )
+		@metastore.should_receive( :get_properties ).and_return( full_metadata )
 
-		@request.should_receive( :get_body_and_metadata ).and_return([ body, metadata ])
+		@request.should_receive( :entity_bodies ).twice.and_return({ body => metadata })
 
 		@response_headers.should_receive( :[]= ).
 			with( :location, %r{/#{TEST_UUID}} )

          
@@ 136,7 135,7 @@ describe ThingFish::DefaultHandler do
 		body = StringIO.new( TEST_CONTENT )
 		md = stub( "metadata hash" )
 		
-		@request.should_receive( :get_body_and_metadata ).and_return([ body, md ])
+		@request.should_receive( :entity_bodies ).twice.and_return({ body => md })
 		@daemon.should_receive( :store_resource ).
 			with( body, md ).
 			and_return { raise ThingFish::FileStoreQuotaError, "too NARROW, sucka!" }

          
@@ 175,6 174,17 @@ describe ThingFish::DefaultHandler do
 	end
 	
 
+	it "sends a NOT_IMPLEMENTED response for multipart POST to /" do
+		uri = URI.parse( "http://thingfish.laika.com:3474/" )
+		@request.should_receive( :uri ).at_least( :once ).and_return( uri )
+
+		@request.should_receive( :entity_bodies ).twice.and_return({ :body1 => :md1, :body2 => :md2 })
+
+		lambda {
+			@handler.handle_post_request( @request, @response )
+		}.should raise_error( ThingFish::NotImplementedError, /not implemented/ )
+	end
+
 	
 
 	### GET /«UUID» request tests

          
M spec/thingfish/request_spec.rb +33 -105
@@ 219,12 219,6 @@ describe ThingFish::Request do
 		}
 
 		upload = mock( "Mock Upload Tempfile" )
-		upload.should_receive( :path ).and_return( TEMPFILE_PATH )
-		duped_upload = mock( "Mock Upload Tempfile duplicate" )
-		duped_upload.should_receive( :path ).at_least( :once ).and_return( TEMPFILE_PATH )
-		
-		upload.should_receive( :dup ).and_return( duped_upload )
-		
 		@mongrel_request.stub!( :params ).and_return( params )
 		@mongrel_request.stub!( :body ).and_return( upload )
 		request = ThingFish::Request.new( @mongrel_request, @config )

          
@@ 265,6 259,7 @@ describe ThingFish::Request do
 
 		request.each_body do |body, metadata|
 			request.append_related_resource( body, generated_resource, metadata )
+			ThingFish.logger.debug "Request related resources is now: %p" % [ request.related_resources ]
 			request.append_related_resource( generated_resource, sub_generated_resource, sub_metadata )
 		end
 

          
@@ 655,13 650,9 @@ describe ThingFish::Request do
 		it "sends IO bodies as well as appended resources with merged metadata to the block " +
 		   "of the resource iterator" do
 			io1 = mock( "filehandle 1" )
-			io1_dup = mock( "duplicated filehandle 1" )
-
 			io2 = mock( "filehandle 2" )
-			io2_dup = mock( "duplicated filehandle 2" )
 
 			resource1 = mock( "extracted body 1" )
-			resource1_dup = mock( "duplicated extracted body 1" )
 
 			parser = mock( "multipart parser", :null_object => true )
 			entity_bodies = {

          
@@ 680,58 671,43 @@ describe ThingFish::Request do
 				with( :body, 'greatgoatsofgerta' ).
 				and_return([ entity_bodies, form_metadata ])
 
-			io1.should_receive( :dup ).at_least(:once).and_return( io1_dup )
-			io1.stub!( :path ).and_return( :a_path )
-			io1_dup.stub!( :path ).and_return( :a_path )
-			io2.should_receive( :dup ).at_least(:once).and_return( io2_dup )
-			io2.stub!( :path ).and_return( :another_path )
-			io2_dup.stub!( :path ).and_return( :another_path )
-			resource1.should_receive( :dup ).at_least(:once).and_return( resource1_dup )
-			resource1.stub!( :path ).and_return( :a_third_path )
-			resource1_dup.stub!( :path ).and_return( :a_third_path )
-
 			yielded_pairs = {}
 			@request.each_body( true ) do |res, parsed_metadata|
-				if res == io1_dup
+				if res == io1
 					thumb_metadata = {
 						:relation => 'thumbnail',
 						:format   => 'image/jpeg',
 						:title    => 'filename1_thumb.jpg',
 					  }
-					@request.append_related_resource( io1_dup, resource1, thumb_metadata )
+					@request.append_related_resource( io1, resource1, thumb_metadata )
 				end
 					
 				yielded_pairs[ res ] = parsed_metadata
 			end
 
 			yielded_pairs.keys.should have(3).members
-			yielded_pairs.keys.should include( io1_dup )
-			yielded_pairs.keys.should include( io2_dup )
-			yielded_pairs.keys.should include( resource1_dup )
+			yielded_pairs.keys.should include( io1, io2, resource1 )
 
-			yielded_pairs[ io1_dup ][ :title ].should == 'filename1'
-			yielded_pairs[ io1_dup ][ :format ].should == 'format1'
-			yielded_pairs[ io1_dup ][ :useragent ].should == "Hotdogs"
-			yielded_pairs[ io1_dup ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )
+			yielded_pairs[ io1 ][ :title ].should == 'filename1'
+			yielded_pairs[ io1 ][ :format ].should == 'format1'
+			yielded_pairs[ io1 ][ :useragent ].should == "Hotdogs"
+			yielded_pairs[ io1 ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )
 
-			yielded_pairs[ io2_dup ][ :title ].should == 'filename2'
-			yielded_pairs[ io2_dup ][ :format ].should == "format2"
-			yielded_pairs[ io2_dup ][ :useragent ].should == "Hotdogs"
-			yielded_pairs[ io2_dup ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )	
+			yielded_pairs[ io2 ][ :title ].should == 'filename2'
+			yielded_pairs[ io2 ][ :format ].should == "format2"
+			yielded_pairs[ io2 ][ :useragent ].should == "Hotdogs"
+			yielded_pairs[ io2 ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )	
 
-			yielded_pairs[ resource1_dup ][ :title ].should == 'filename1_thumb.jpg'
-			yielded_pairs[ resource1_dup ][ :format ].should == 'image/jpeg'
-			yielded_pairs[ resource1_dup ][ :useragent ].should == "Hotdogs"
-			yielded_pairs[ resource1_dup ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )	
+			yielded_pairs[ resource1 ][ :title ].should == 'filename1_thumb.jpg'
+			yielded_pairs[ resource1 ][ :format ].should == 'image/jpeg'
+			yielded_pairs[ resource1 ][ :useragent ].should == "Hotdogs"
+			yielded_pairs[ resource1 ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )	
 		end
 	
 		it "sends each IO body entity of the request and a copy of the merged metadata to " +
 			"the block of the body iterator" do
 			io1 = mock( "filehandle 1" )
-			io1_dup = mock( "duplicated filehandle 1" )
-			
 			io2 = mock( "filehandle 2" )
-			io2_dup = mock( "duplicated filehandle 2" )
 
 			parser = mock( "multipart parser", :null_object => true )
 			entity_bodies = {

          
@@ 750,70 726,30 @@ describe ThingFish::Request do
 				with( :body, 'greatgoatsofgerta' ).
 				and_return([ entity_bodies, form_metadata ])
 			
-			io1.should_receive( :dup ).and_return( io1_dup )
-			io1.stub!( :path ).and_return( :a_path )
-			io1_dup.stub!( :path ).and_return( :another_path )
-			io2.should_receive( :dup ).and_return( io2_dup )
-			io2.stub!( :path ).and_return( :another_path )
-			io2_dup.stub!( :path ).and_return( :another_path )
-		
 			yielded_pairs = {}
 			@request.each_body do |body, parsed_metadata|
 				yielded_pairs[ body ] = parsed_metadata
 			end
 		
 			yielded_pairs.keys.should have(2).members
-			yielded_pairs.keys.should include( io1_dup )
-			yielded_pairs.keys.should include( io2_dup )
+			yielded_pairs.keys.should include( io1, io2 )
 
-			yielded_pairs[ io1_dup ][ :title ].should == 'filename1'
-			yielded_pairs[ io1_dup ][ :format ].should == 'format1'
-			yielded_pairs[ io1_dup ][ :useragent ].should == "Hotdogs"
-			yielded_pairs[ io1_dup ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )
+			yielded_pairs[ io1 ][ :title ].should == 'filename1'
+			yielded_pairs[ io1 ][ :format ].should == 'format1'
+			yielded_pairs[ io1 ][ :useragent ].should == "Hotdogs"
+			yielded_pairs[ io1 ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )
 
-			yielded_pairs[ io2_dup ][ :title ].should == 'filename2'
-			yielded_pairs[ io2_dup ][ :format ].should == "format2"
-			yielded_pairs[ io2_dup ][ :useragent ].should == "Hotdogs"
-			yielded_pairs[ io2_dup ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )	
+			yielded_pairs[ io2 ][ :title ].should == 'filename2'
+			yielded_pairs[ io2 ][ :format ].should == "format2"
+			yielded_pairs[ io2 ][ :useragent ].should == "Hotdogs"
+			yielded_pairs[ io2 ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )	
 		end
 	
 
-		it "creates distinct duplicates for StringIO bodies" do
-			io1 = StringIO.new("foom!")
-			io2 = StringIO.new("DOOOOOM")
-			
-			parser = mock( "multipart parser", :null_object => true )
-			entity_bodies = {
-				io1 => {:title  => "filename1",:format => "format1",:extent => 100292},
-				io2 => {:title  => "filename2",:format => "format2",:extent => 100234}
-			  }
-			form_metadata = {
-				'foo' => 1,
-				:title => "a bogus filename",
-				:useragent => 'Clumpy the Clown',
-			  }
-
-			ThingFish::MultipartMimeParser.stub!( :new ).and_return( parser )
-			@mongrel_request.should_receive( :body ).once.and_return( :body )
-			parser.should_receive( :parse ).once.
-				with( :body, 'greatgoatsofgerta' ).
-				and_return([ entity_bodies, form_metadata ])
-			
-			@request.each_body do |body, parsed_metadata|
-				body.read     # modify the pointer on the duped StringIO
-			end
-		
-			io1.pos.should == 0
-			io2.pos.should == 0
-		end
-		
-		
 		it "ensures each part sent to the body has the default content-type " +
 		   "if none is explicitly provided by the request" do
 			io1 = mock( "filehandle 1" )
-			io1_dup = mock( "duplicated filehandle 1" )
 			io2 = mock( "filehandle 2" )
-			io2_dup = mock( "duplicated filehandle 2" )
 
 			parser = mock( "multipart parser", :null_object => true )
 			entity_bodies = {

          
@@ 832,29 768,21 @@ describe ThingFish::Request do
 				with( :body, 'greatgoatsofgerta' ).
 				and_return([ entity_bodies, form_metadata ])
 
-			io1.should_receive( :dup ).and_return( io1_dup )
-			io1.stub!( :path ).and_return( :a_path )
-			io1_dup.stub!( :path ).and_return( :a_path )
-			io2.should_receive( :dup ).and_return( io2_dup )
-			io2.stub!( :path ).and_return( :another_path )
-			io2_dup.stub!( :path ).and_return( :another_path )
-
 			yielded_pairs = {}
 			@request.each_body do |body, parsed_metadata|
 				yielded_pairs[ body ] = parsed_metadata
 			end
 
 			yielded_pairs.keys.should have(2).members
-			yielded_pairs.keys.should include( io1_dup )
-			yielded_pairs.keys.should include( io2_dup )
+			yielded_pairs.keys.should include( io1, io2 )
 
-			yielded_pairs[ io1_dup ][ :title ].should == 'filename1'
-			yielded_pairs[ io1_dup ][ :format ].should == DEFAULT_CONTENT_TYPE
-			yielded_pairs[ io1_dup ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )
-			yielded_pairs[ io2_dup ][ :title ].should == 'filename2'
-			yielded_pairs[ io2_dup ][ :format ].should == DEFAULT_CONTENT_TYPE
-			yielded_pairs[ io2_dup ][ :useragent ].should == "Hotdogs"
-			yielded_pairs[ io2_dup ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )	
+			yielded_pairs[ io1 ][ :title ].should == 'filename1'
+			yielded_pairs[ io1 ][ :format ].should == DEFAULT_CONTENT_TYPE
+			yielded_pairs[ io1 ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )
+			yielded_pairs[ io2 ][ :title ].should == 'filename2'
+			yielded_pairs[ io2 ][ :format ].should == DEFAULT_CONTENT_TYPE
+			yielded_pairs[ io2 ][ :useragent ].should == "Hotdogs"
+			yielded_pairs[ io2 ][ :uploadaddress ].should == IPAddr.new( '127.0.0.1' )	
 		end
 	
 	end