[web] consider inlined relations in has_editable_relation. Closes #3049970
In some cases where the user can only add/edit inlined relations (though
actually edit perms of the inlined relation doesn't seem checked), the "modify"
action doesn't appear while it should.
# This file has been extracted from the abandoned TwistedWeb2 project# http://twistedmatrix.com/trac/wiki/TwistedWeb2from__future__importgeneratorsimporttypes,timefromcalendarimporttimegmimportbase64importredefdashCapitalize(s):''' Capitalize a string, making sure to treat - as a word seperator '''return'-'.join([x.capitalize()forxins.split('-')])# datetime parsing and formattingweekdayname=['Mon','Tue','Wed','Thu','Fri','Sat','Sun']weekdayname_lower=[name.lower()fornameinweekdayname]monthname=[None,'Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']monthname_lower=[nameandname.lower()fornameinmonthname]# HTTP Header parsing APIheader_case_mapping={}defcasemappingify(d):globalheader_case_mappingnewd=dict([(key.lower(),key)forkeyind])header_case_mapping.update(newd)deflowerify(d):returndict([(key.lower(),value)forkey,valueind.items()])classHeaderHandler(object):"""HeaderHandler manages header generating and parsing functions. """HTTPParsers={}HTTPGenerators={}def__init__(self,parsers=None,generators=None):""" @param parsers: A map of header names to parsing functions. @type parsers: L{dict} @param generators: A map of header names to generating functions. @type generators: L{dict} """ifparsers:self.HTTPParsers.update(parsers)ifgenerators:self.HTTPGenerators.update(generators)defparse(self,name,header):""" Parse the given header based on its given name. @param name: The header name to parse. @type name: C{str} @param header: A list of unparsed headers. @type header: C{list} of C{str} @return: The return value is the parsed header representation, it is dependent on the header. See the HTTP Headers document. """parser=self.HTTPParsers.get(name,None)ifparserisNone:raiseValueError("No header parser for header '%s', either add one or use getHeaderRaw."%(name,))try:forpinparser:# print "Parsing %s: %s(%s)" % (name, repr(p), repr(h))header=p(header)# if isinstance(h, types.GeneratorType):# h=list(h)exceptValueErrorasv:# print vheader=Nonereturnheaderdefgenerate(self,name,header):""" Generate the given header based on its given name. @param name: The header name to generate. @type name: C{str} @param header: A parsed header, such as the output of L{HeaderHandler}.parse. @return: C{list} of C{str} each representing a generated HTTP header. """generator=self.HTTPGenerators.get(name,None)ifgeneratorisNone:# print self.generatorsraiseValueError("No header generator for header '%s', either add one or use setHeaderRaw."%(name,))forgingenerator:header=g(header)#self._raw_headers[name] = hreturnheaderdefupdateParsers(self,parsers):"""Update en masse the parser maps. @param parsers: Map of header names to parser chains. @type parsers: C{dict} """casemappingify(parsers)self.HTTPParsers.update(lowerify(parsers))defaddParser(self,name,value):"""Add an individual parser chain for the given header. @param name: Name of the header to add @type name: C{str} @param value: The parser chain @type value: C{str} """self.updateParsers({name:value})defupdateGenerators(self,generators):"""Update en masse the generator maps. @param parsers: Map of header names to generator chains. @type parsers: C{dict} """casemappingify(generators)self.HTTPGenerators.update(lowerify(generators))defaddGenerators(self,name,value):"""Add an individual generator chain for the given header. @param name: Name of the header to add @type name: C{str} @param value: The generator chain @type value: C{str} """self.updateGenerators({name:value})defupdate(self,parsers,generators):"""Conveniently update parsers and generators all at once. """self.updateParsers(parsers)self.updateGenerators(generators)DefaultHTTPHandler=HeaderHandler()## HTTP DateTime parserdefparseDateTime(dateString):"""Convert an HTTP date string (one of three formats) to seconds since epoch."""parts=dateString.split()ifnotparts[0][0:3].lower()inweekdayname_lower:# Weekday is stupid. Might have been omitted.try:returnparseDateTime("Sun, "+dateString)exceptValueError:# Guess not.passpartlen=len(parts)if(partlen==5orpartlen==6)andparts[1].isdigit():# 1st date format: Sun, 06 Nov 1994 08:49:37 GMT# (Note: "GMT" is literal, not a variable timezone)# (also handles without "GMT")# This is the normal formatday=parts[1]month=parts[2]year=parts[3]time=parts[4]elif(partlen==3orpartlen==4)andparts[1].find('-')!=-1:# 2nd date format: Sunday, 06-Nov-94 08:49:37 GMT# (Note: "GMT" is literal, not a variable timezone)# (also handles without without "GMT")# Two digit year, yucko.day,month,year=parts[1].split('-')time=parts[2]year=int(year)ifyear<69:year=year+2000elifyear<100:year=year+1900eliflen(parts)==5:# 3rd date format: Sun Nov 6 08:49:37 1994# ANSI C asctime() format.day=parts[2]month=parts[1]year=parts[4]time=parts[3]else:raiseValueError("Unknown datetime format %r"%dateString)day=int(day)month=int(monthname_lower.index(month.lower()))year=int(year)hour,min,sec=map(int,time.split(':'))returnint(timegm((year,month,day,hour,min,sec)))##### HTTP tokenizerclassToken(str):__slots__=[]tokens={}def__new__(self,char):token=Token.tokens.get(char)iftokenisNone:Token.tokens[char]=token=str.__new__(self,char)returntokendef__repr__(self):return"Token(%s)"%str.__repr__(self)http_tokens=" \t\"()<>@,;:\\/[]?={}"http_ctls="\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x7f"deftokenize(header,foldCase=True):"""Tokenize a string according to normal HTTP header parsing rules. In particular: - Whitespace is irrelevant and eaten next to special separator tokens. Its existance (but not amount) is important between character strings. - Quoted string support including embedded backslashes. - Case is insignificant (and thus lowercased), except in quoted strings. (unless foldCase=False) - Multiple headers are concatenated with ',' NOTE: not all headers can be parsed with this function. Takes a raw header value (list of strings), and Returns a generator of strings and Token class instances. """tokens=http_tokensctls=http_ctlsstring=",".join(header)list=[]start=0cur=0quoted=Falseqpair=FalseinSpaces=-1qstring=Noneforxinstring:ifquoted:ifqpair:qpair=Falseqstring=qstring+string[start:cur-1]+xstart=cur+1elifx=='\\':qpair=Trueelifx=='"':quoted=Falseyieldqstring+string[start:cur]qstring=Nonestart=cur+1elifxintokens:ifstart!=cur:iffoldCase:yieldstring[start:cur].lower()else:yieldstring[start:cur]start=cur+1ifx=='"':quoted=Trueqstring=""inSpaces=Falseelifxin" \t":ifinSpacesisFalse:inSpaces=Trueelse:inSpaces=-1yieldToken(x)elifxinctls:raiseValueError("Invalid control character: %d in header"%ord(x))else:ifinSpacesisTrue:yieldToken(' ')inSpaces=FalseinSpaces=Falsecur=cur+1ifqpair:raiseValueError,"Missing character after '\\'"ifquoted:raiseValueError,"Missing end quote"ifstart!=cur:iffoldCase:yieldstring[start:cur].lower()else:yieldstring[start:cur]defsplit(seq,delim):"""The same as str.split but works on arbitrary sequences. Too bad it's not builtin to python!"""cur=[]foriteminseq:ifitem==delim:yieldcurcur=[]else:cur.append(item)yieldcur# def find(seq, *args):# """The same as seq.index but returns -1 if not found, instead# Too bad it's not builtin to python!"""# try:# return seq.index(value, *args)# except ValueError:# return -1deffilterTokens(seq):"""Filter out instances of Token, leaving only a list of strings. Used instead of a more specific parsing method (e.g. splitting on commas) when only strings are expected, so as to be a little lenient. Apache does it this way and has some comments about broken clients which forget commas (?), so I'm doing it the same way. It shouldn't hurt anything, in any case. """l=[]forxinseq:ifnotisinstance(x,Token):l.append(x)returnl##### parser utilities:defcheckSingleToken(tokens):iflen(tokens)!=1:raiseValueError,"Expected single token, not %s."%(tokens,)returntokens[0]defparseKeyValue(val):iflen(val)==1:returnval[0],Noneeliflen(val)==3andval[1]==Token('='):returnval[0],val[2]raiseValueError,"Expected key or key=value, but got %s."%(val,)defparseArgs(field):args=split(field,Token(';'))val=args.next()args=[parseKeyValue(arg)forarginargs]returnval,argsdeflistParser(fun):"""Return a function which applies 'fun' to every element in the comma-separated list"""deflistParserHelper(tokens):fields=split(tokens,Token(','))forfieldinfields:iflen(field)!=0:yieldfun(field)returnlistParserHelperdeflast(seq):"""Return seq[-1]"""returnseq[-1]##### Generation utilitiesdefquoteString(s):return'"%s"'%s.replace('\\','\\\\').replace('"','\\"')deflistGenerator(fun):"""Return a function which applies 'fun' to every element in the given list, then joins the result with generateList"""deflistGeneratorHelper(l):returngenerateList([fun(e)foreinl])returnlistGeneratorHelperdefgenerateList(seq):return", ".join(seq)defsingleHeader(item):return[item]defgenerateKeyValues(kvs):l=[]# print kvsfork,vinkvs:ifvisNone:l.append('%s'%k)else:l.append('%s=%s'%(k,v))return";".join(l)classMimeType(object):deffromString(klass,mimeTypeString):"""Generate a MimeType object from the given string. @param mimeTypeString: The mimetype to parse @return: L{MimeType} """returnDefaultHTTPHandler.parse('content-type',[mimeTypeString])fromString=classmethod(fromString)def__init__(self,mediaType,mediaSubtype,params={},**kwargs):""" @type mediaType: C{str} @type mediaSubtype: C{str} @type params: C{dict} """self.mediaType=mediaTypeself.mediaSubtype=mediaSubtypeself.params=dict(params)ifkwargs:self.params.update(kwargs)def__eq__(self,other):ifnotisinstance(other,MimeType):returnNotImplementedreturn(self.mediaType==other.mediaTypeandself.mediaSubtype==other.mediaSubtypeandself.params==other.params)def__ne__(self,other):returnnotself.__eq__(other)def__repr__(self):return"MimeType(%r, %r, %r)"%(self.mediaType,self.mediaSubtype,self.params)def__hash__(self):returnhash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.iteritems()))##### Specific header parsers.defparseAccept(field):type,args=parseArgs(field)iflen(type)!=3ortype[1]!=Token('/'):raiseValueError,"MIME Type "+str(type)+" invalid."# okay, this spec is screwy. A 'q' parameter is used as the separator# between MIME parameters and (as yet undefined) additional HTTP# parameters.num=0forarginargs:ifarg[0]=='q':mimeparams=tuple(args[0:num])params=args[num:]breaknum=num+1else:mimeparams=tuple(args)params=[]# Default values for parameters:qval=1.0# Parse accept parameters:forparaminparams:ifparam[0]=='q':qval=float(param[1])else:# Warn? ignored parameter.passret=MimeType(type[0],type[2],mimeparams),qvalreturnretdefparseAcceptQvalue(field):type,args=parseArgs(field)type=checkSingleToken(type)qvalue=1.0# Default qvalue is 1forarginargs:ifarg[0]=='q':qvalue=float(arg[1])returntype,qvaluedefaddDefaultCharset(charsets):ifcharsets.get('*')isNoneandcharsets.get('iso-8859-1')isNone:charsets['iso-8859-1']=1.0returncharsetsdefaddDefaultEncoding(encodings):ifencodings.get('*')isNoneandencodings.get('identity')isNone:# RFC doesn't specify a default value for identity, only that it# "is acceptable" if not mentioned. Thus, give it a very low qvalue.encodings['identity']=.0001returnencodingsdefparseContentType(header):# Case folding is disabled for this header, because of use of# Content-Type: multipart/form-data; boundary=CaSeFuLsTuFf# So, we need to explicitly .lower() the type/subtype and arg keys.type,args=parseArgs(header)iflen(type)!=3ortype[1]!=Token('/'):raiseValueError,"MIME Type "+str(type)+" invalid."args=[(kv[0].lower(),kv[1])forkvinargs]returnMimeType(type[0].lower(),type[2].lower(),tuple(args))defparseContentMD5(header):try:returnbase64.decodestring(header)exceptExceptionase:raiseValueError(e)defparseContentRange(header):"""Parse a content-range header into (kind, start, end, realLength). realLength might be None if real length is not known ('*'). start and end might be None if start,end unspecified (for response code 416) """kind,other=header.strip().split()ifkind.lower()!="bytes":raiseValueError("a range of type %r is not supported")startend,realLength=other.split("/")ifstartend.strip()=='*':start,end=None,Noneelse:start,end=map(int,startend.split("-"))ifrealLength=="*":realLength=Noneelse:realLength=int(realLength)return(kind,start,end,realLength)defparseExpect(field):type,args=parseArgs(field)type=parseKeyValue(type)return(type[0],(lambda*args:args)(type[1],*args))defparseExpires(header):# """HTTP/1.1 clients and caches MUST treat other invalid date formats,# especially including the value 0, as in the past (i.e., "already expired")."""try:returnparseDateTime(header)exceptValueError:return0defparseIfModifiedSince(header):# Ancient versions of netscape and *current* versions of MSIE send# If-Modified-Since: Thu, 05 Aug 2004 12:57:27 GMT; length=123# which is blantantly RFC-violating and not documented anywhere# except bug-trackers for web frameworks.# So, we'll just strip off everything after a ';'.returnparseDateTime(header.split(';',1)[0])defparseIfRange(headers):try:returnETag.parse(tokenize(headers))exceptValueError:returnparseDateTime(last(headers))defparseRange(range):range=list(range)iflen(range)<3orrange[1]!=Token('='):raiseValueError("Invalid range header format: %s"%(range,))type=range[0]iftype!='bytes':raiseValueError("Unknown range unit: %s."%(type,))rangeset=split(range[2:],Token(','))ranges=[]forbyterangespecinrangeset:iflen(byterangespec)!=1:raiseValueError("Invalid range header format: %s"%(range,))start,end=byterangespec[0].split('-')ifnotstartandnotend:raiseValueError("Invalid range header format: %s"%(range,))ifstart:start=int(start)else:start=Noneifend:end=int(end)else:end=Noneifstartandendandstart>end:raiseValueError("Invalid range header, start > end: %s"%(range,))ranges.append((start,end))returntype,rangesdefparseRetryAfter(header):try:# delta secondsreturntime.time()+int(header)exceptValueError:# or datetimereturnparseDateTime(header)# WWW-Authenticate and AuthorizationdefparseWWWAuthenticate(tokenized):headers=[]tokenList=list(tokenized)whiletokenList:scheme=tokenList.pop(0)challenge={}last=NonekvChallenge=FalsewhiletokenList:token=tokenList.pop(0)iftoken==Token('='):kvChallenge=Truechallenge[last]=tokenList.pop(0)last=Noneeliftoken==Token(','):ifkvChallenge:iflen(tokenList)>1andtokenList[1]!=Token('='):breakelse:breakelse:last=tokeniflastandschemeandnotchallengeandnotkvChallenge:challenge=lastlast=Noneheaders.append((scheme,challenge))iflastandlastnotin(Token('='),Token(',')):ifheaders[-1]==(scheme,challenge):scheme=lastchallenge={}headers.append((scheme,challenge))returnheadersdefparseAuthorization(header):scheme,rest=header.split(' ',1)# this header isn't tokenized because it may eat characters# in the unquoted base64 encoded credentialsreturnscheme.lower(),rest#### Header generatorsdefgenerateAccept(accept):mimeType,q=acceptout="%s/%s"%(mimeType.mediaType,mimeType.mediaSubtype)ifmimeType.params:out+=';'+generateKeyValues(mimeType.params.iteritems())ifq!=1.0:out+=(';q=%.3f'%(q,)).rstrip('0').rstrip('.')returnoutdefremoveDefaultEncoding(seq):foriteminseq:ifitem[0]!='identity'oritem[1]!=.0001:yielditemdefgenerateAcceptQvalue(keyvalue):ifkeyvalue[1]==1.0:return"%s"%keyvalue[0:1]else:return("%s;q=%.3f"%keyvalue).rstrip('0').rstrip('.')defparseCacheControl(kv):k,v=parseKeyValue(kv)ifk=='max-age'ork=='min-fresh'ork=='s-maxage':# Required integer argumentifvisNone:v=0else:v=int(v)elifk=='max-stale':# Optional integer argumentifvisnotNone:v=int(v)elifk=='private'ork=='no-cache':# Optional list argumentifvisnotNone:v=[field.strip().lower()forfieldinv.split(',')]returnk,vdefgenerateCacheControl((k,v)):ifvisNone:returnstr(k)else:ifk=='no-cache'ork=='private':# quoted list of valuesv=quoteString(generateList([header_case_mapping.get(name)ordashCapitalize(name)fornameinv]))return'%s=%s'%(k,v)defgenerateContentRange(tup):"""tup is (type, start, end, len) len can be None. """type,start,end,len=tupiflen==None:len='*'else:len=int(len)ifstart==Noneandend==None:startend='*'else:startend='%d-%d'%(start,end)return'%s%s/%s'%(type,startend,len)defgenerateDateTime(secSinceEpoch):"""Convert seconds since epoch to HTTP datetime string."""# take care gmtime doesn't handle time before epoch (crash on windows at least)year,month,day,hh,mm,ss,wd,y,z=time.gmtime(max(0,secSinceEpoch))s="%s, %02d%3s%4d%02d:%02d:%02d GMT"%(weekdayname[wd],day,monthname[month],year,hh,mm,ss)returnsdefgenerateExpect(item):ifitem[1][0]isNone:out='%s'%(item[0],)else:out='%s=%s'%(item[0],item[1][0])iflen(item[1])>1:out+=';'+generateKeyValues(item[1][1:])returnoutdefgenerateRange(range):defnoneOr(s):ifsisNone:return''returnstype,ranges=rangeiftype!='bytes':raiseValueError("Unknown range unit: "+type+".")return(type+'='+','.join(['%s-%s'%(noneOr(startend[0]),noneOr(startend[1]))forstartendinranges]))defgenerateRetryAfter(when):# always generate delta seconds formatreturnstr(int(when-time.time()))defgenerateContentType(mimeType):out="%s/%s"%(mimeType.mediaType,mimeType.mediaSubtype)ifmimeType.params:out+=';'+generateKeyValues(mimeType.params.iteritems())returnoutdefgenerateIfRange(dateOrETag):ifisinstance(dateOrETag,ETag):returndateOrETag.generate()else:returngenerateDateTime(dateOrETag)# WWW-Authenticate and AuthorizationdefgenerateWWWAuthenticate(headers):_generated=[]forseqinheaders:scheme,challenge=seq[0],seq[1]# If we're going to parse out to something other than a dict# we need to be able to generate from something other than a dicttry:l=[]fork,vindict(challenge).iteritems():l.append("%s=%s"%(k,quoteString(v)))_generated.append("%s%s"%(scheme,", ".join(l)))exceptValueError:_generated.append("%s%s"%(scheme,challenge))return_generateddefgenerateAuthorization(seq):return[' '.join(seq)]####classETag(object):def__init__(self,tag,weak=False):self.tag=str(tag)self.weak=weakdefmatch(self,other,strongCompare):# Sec 13.3.# The strong comparison function: in order to be considered equal, both# validators MUST be identical in every way, and both MUST NOT be weak.## The weak comparison function: in order to be considered equal, both# validators MUST be identical in every way, but either or both of# them MAY be tagged as "weak" without affecting the result.ifnotisinstance(other,ETag)orother.tag!=self.tag:returnFalseifstrongCompareand(other.weakorself.weak):returnFalsereturnTruedef__eq__(self,other):returnisinstance(other,ETag)andother.tag==self.tagandother.weak==self.weakdef__ne__(self,other):returnnotself.__eq__(other)def__repr__(self):return"Etag(%r, weak=%r)"%(self.tag,self.weak)defparse(tokens):tokens=tuple(tokens)iflen(tokens)==1andnotisinstance(tokens[0],Token):returnETag(tokens[0])if(len(tokens)==3andtokens[0]=="w"andtokens[1]==Token('/')):returnETag(tokens[2],weak=True)raiseValueError("Invalid ETag.")parse=staticmethod(parse)defgenerate(self):ifself.weak:return'W/'+quoteString(self.tag)else:returnquoteString(self.tag)defparseStarOrETag(tokens):tokens=tuple(tokens)iftokens==('*',):return'*'else:returnETag.parse(tokens)defgenerateStarOrETag(etag):ifetag=='*':returnetagelse:returnetag.generate()#### Cookies. Blech!classCookie(object):# __slots__ = ['name', 'value', 'path', 'domain', 'ports', 'expires', 'discard', 'secure', 'comment', 'commenturl', 'version']def__init__(self,name,value,path=None,domain=None,ports=None,expires=None,discard=False,secure=False,comment=None,commenturl=None,version=0):self.name=nameself.value=valueself.path=pathself.domain=domainself.ports=portsself.expires=expiresself.discard=discardself.secure=secureself.comment=commentself.commenturl=commenturlself.version=versiondef__repr__(self):s="Cookie(%r=%r"%(self.name,self.value)ifself.pathisnotNone:s+=", path=%r"%(self.path,)ifself.domainisnotNone:s+=", domain=%r"%(self.domain,)ifself.portsisnotNone:s+=", ports=%r"%(self.ports,)ifself.expiresisnotNone:s+=", expires=%r"%(self.expires,)ifself.secureisnotFalse:s+=", secure=%r"%(self.secure,)ifself.commentisnotNone:s+=", comment=%r"%(self.comment,)ifself.commenturlisnotNone:s+=", commenturl=%r"%(self.commenturl,)ifself.version!=0:s+=", version=%r"%(self.version,)s+=")"returnsdef__eq__(self,other):return(isinstance(other,Cookie)andother.path==self.pathandother.domain==self.domainandother.ports==self.portsandother.expires==self.expiresandother.secure==self.secureandother.comment==self.commentandother.commenturl==self.commenturlandother.version==self.version)def__ne__(self,other):returnnotself.__eq__(other)defparseCookie(headers):"""Bleargh, the cookie spec sucks. This surely needs interoperability testing. There are two specs that are supported: Version 0) http://wp.netscape.com/newsref/std/cookie_spec.html Version 1) http://www.faqs.org/rfcs/rfc2965.html """cookies=[]# There can't really be multiple cookie headers according to RFC, because# if multiple headers are allowed, they must be joinable with ",".# Neither new RFC2965 cookies nor old netscape cookies are.header=';'.join(headers)ifheader[0:8].lower()=="$version":# RFC2965 cookieh=tokenize([header],foldCase=False)r_cookies=split(h,Token(','))forr_cookieinr_cookies:last_cookie=Nonerr_cookies=split(r_cookie,Token(';'))forcookieinrr_cookies:nameval=tuple(split(cookie,Token('=')))iflen(nameval)==2:(name,),(value,)=namevalelse:(name,),=namevalvalue=Nonename=name.lower()ifname=='$version':continueifname[0]=='$':iflast_cookieisnotNone:ifname=='$path':last_cookie.path=valueelifname=='$domain':last_cookie.domain=valueelifname=='$port':ifvalueisNone:last_cookie.ports=()else:last_cookie.ports=tuple([int(s)forsinvalue.split(',')])else:last_cookie=Cookie(name,value,version=1)cookies.append(last_cookie)else:# Oldstyle cookies don't do quoted strings or anything sensible.# All characters are valid for names except ';' and '=', and all# characters are valid for values except ';'. Spaces are stripped,# however.r_cookies=header.split(';')forr_cookieinr_cookies:name,value=r_cookie.split('=',1)name=name.strip(' \t')value=value.strip(' \t')cookies.append(Cookie(name,value))returncookiescookie_validname="[^"+re.escape(http_tokens+http_ctls)+"]*$"cookie_validname_re=re.compile(cookie_validname)cookie_validvalue=cookie_validname+'|"([^"]|\\\\")*"$'cookie_validvalue_re=re.compile(cookie_validvalue)defgenerateCookie(cookies):# There's a fundamental problem with the two cookie specifications.# They both use the "Cookie" header, and the RFC Cookie header only allows# one version to be specified. Thus, when you have a collection of V0 and# V1 cookies, you have to either send them all as V0 or send them all as# V1.# I choose to send them all as V1.# You might think converting a V0 cookie to a V1 cookie would be lossless,# but you'd be wrong. If you do the conversion, and a V0 parser tries to# read the cookie, it will see a modified form of the cookie, in cases# where quotes must be added to conform to proper V1 syntax.# (as a real example: "Cookie: cartcontents=oid:94680,qty:1,auto:0,esp:y")# However, that is what we will do, anyways. It has a high probability of# breaking applications that only handle oldstyle cookies, where some other# application set a newstyle cookie that is applicable over for site# (or host), AND where the oldstyle cookie uses a value which is invalid# syntax in a newstyle cookie.# Also, the cookie name *cannot* be quoted in V1, so some cookies just# cannot be converted at all. (e.g. "Cookie: phpAds_capAd[32]=2"). These# are just dicarded during conversion.# As this is an unsolvable problem, I will pretend I can just say# OH WELL, don't do that, or else upgrade your old applications to have# newstyle cookie parsers.# I will note offhandedly that there are *many* sites which send V0 cookies# that are not valid V1 cookie syntax. About 20% for my cookies file.# However, they do not generally mix them with V1 cookies, so this isn't# an issue, at least right now. I have not tested to see how many of those# webapps support RFC2965 V1 cookies. I suspect not many.max_version=max([cookie.versionforcookieincookies])ifmax_version==0:# no quoting or anything.return';'.join(["%s=%s"%(cookie.name,cookie.value)forcookieincookies])else:str_cookies=['$Version="1"']forcookieincookies:ifcookie.version==0:# Version 0 cookie: we make sure the name and value are valid# V1 syntax.# If they are, we use them as is. This means in *most* cases,# the cookie will look literally the same on output as it did# on input.# If it isn't a valid name, ignore the cookie.# If it isn't a valid value, quote it and hope for the best on# the other side.ifcookie_validname_re.match(cookie.name)isNone:continuevalue=cookie.valueifcookie_validvalue_re.match(cookie.value)isNone:value=quoteString(value)str_cookies.append("%s=%s"%(cookie.name,value))else:# V1 cookie, nice and easystr_cookies.append("%s=%s"%(cookie.name,quoteString(cookie.value)))ifcookie.path:str_cookies.append("$Path=%s"%quoteString(cookie.path))ifcookie.domain:str_cookies.append("$Domain=%s"%quoteString(cookie.domain))ifcookie.portsisnotNone:iflen(cookie.ports)==0:str_cookies.append("$Port")else:str_cookies.append("$Port=%s"%quoteString(",".join([str(x)forxincookie.ports])))return';'.join(str_cookies)defparseSetCookie(headers):setCookies=[]forheaderinheaders:try:parts=header.split(';')l=[]forpartinparts:namevalue=part.split('=',1)iflen(namevalue)==1:name=namevalue[0]value=Noneelse:name,value=namevaluevalue=value.strip(' \t')name=name.strip(' \t')l.append((name,value))setCookies.append(makeCookieFromList(l,True))exceptValueError:# If we can't parse one Set-Cookie, ignore it,# but not the rest of Set-Cookies.passreturnsetCookiesdefparseSetCookie2(toks):outCookies=[]forcookiein[[parseKeyValue(x)forxinsplit(y,Token(';'))]foryinsplit(toks,Token(','))]:try:outCookies.append(makeCookieFromList(cookie,False))exceptValueError:# Again, if we can't handle one cookie -- ignore it.passreturnoutCookiesdefmakeCookieFromList(tup,netscapeFormat):name,value=tup[0]ifnameisNoneorvalueisNone:raiseValueError("Cookie has missing name or value")ifname.startswith("$"):raiseValueError("Invalid cookie name: %r, starts with '$'."%name)cookie=Cookie(name,value)hadMaxAge=Falseforname,valueintup[1:]:name=name.lower()ifvalueisNone:ifnamein("discard","secure"):# Boolean attrsvalue=Trueelifname!="port":# Can be either boolean or explicitcontinueifnamein("comment","commenturl","discard","domain","path","secure"):# simple casessetattr(cookie,name,value)elifname=="expires"andnothadMaxAge:ifnetscapeFormatandvalue[0]=='"'andvalue[-1]=='"':value=value[1:-1]cookie.expires=parseDateTime(value)elifname=="max-age":hadMaxAge=Truecookie.expires=int(value)+time.time()elifname=="port":ifvalueisNone:cookie.ports=()else:ifnetscapeFormatandvalue[0]=='"'andvalue[-1]=='"':value=value[1:-1]cookie.ports=tuple([int(s)forsinvalue.split(',')])elifname=="version":cookie.version=int(value)returncookiedefgenerateSetCookie(cookies):setCookies=[]forcookieincookies:out=["%s=%s"%(cookie.name,cookie.value)]ifcookie.expires:out.append("expires=%s"%generateDateTime(cookie.expires))ifcookie.path:out.append("path=%s"%cookie.path)ifcookie.domain:out.append("domain=%s"%cookie.domain)ifcookie.secure:out.append("secure")setCookies.append('; '.join(out))returnsetCookiesdefgenerateSetCookie2(cookies):setCookies=[]forcookieincookies:out=["%s=%s"%(cookie.name,quoteString(cookie.value))]ifcookie.comment:out.append("Comment=%s"%quoteString(cookie.comment))ifcookie.commenturl:out.append("CommentURL=%s"%quoteString(cookie.commenturl))ifcookie.discard:out.append("Discard")ifcookie.domain:out.append("Domain=%s"%quoteString(cookie.domain))ifcookie.expires:out.append("Max-Age=%s"%(cookie.expires-time.time()))ifcookie.path:out.append("Path=%s"%quoteString(cookie.path))ifcookie.portsisnotNone:iflen(cookie.ports)==0:out.append("Port")else:out.append("Port=%s"%quoteString(",".join([str(x)forxincookie.ports])))ifcookie.secure:out.append("Secure")out.append('Version="1"')setCookies.append('; '.join(out))returnsetCookiesdefparseDepth(depth):ifdepthnotin("0","1","infinity"):raiseValueError("Invalid depth header value: %s"%(depth,))returndepthdefparseOverWrite(overwrite):ifoverwrite=="F":returnFalseelifoverwrite=="T":returnTrueraiseValueError("Invalid overwrite header value: %s"%(overwrite,))defgenerateOverWrite(overwrite):ifoverwrite:return"T"else:return"F"##### Random stuff that looks useful.# def sortMimeQuality(s):# def sorter(item1, item2):# if item1[0] == '*':# if item2[0] == '*':# return 0# def sortQuality(s):# def sorter(item1, item2):# if item1[1] < item2[1]:# return -1# if item1[1] < item2[1]:# return 1# if item1[0] == item2[0]:# return 0# def getMimeQuality(mimeType, accepts):# type,args = parseArgs(mimeType)# type=type.split(Token('/'))# if len(type) != 2:# raise ValueError, "MIME Type "+s+" invalid."# for accept in accepts:# accept,acceptQual=accept# acceptType=accept[0:1]# acceptArgs=accept[2]# if ((acceptType == type or acceptType == (type[0],'*') or acceptType==('*','*')) and# (args == acceptArgs or len(acceptArgs) == 0)):# return acceptQual# def getQuality(type, accepts):# qual = accepts.get(type)# if qual is not None:# return qual# return accepts.get('*')# Headers objectclass__RecalcNeeded(object):def__repr__(self):return"<RecalcNeeded>"_RecalcNeeded=__RecalcNeeded()classHeaders(object):"""This class stores the HTTP headers as both a parsed representation and the raw string representation. It converts between the two on demand."""def__init__(self,headers=None,rawHeaders=None,handler=DefaultHTTPHandler):self._raw_headers={}self._headers={}self.handler=handlerifheadersisnotNone:forkey,valueinheaders.iteritems():self.setHeader(key,value)ifrawHeadersisnotNone:forkey,valueinrawHeaders.iteritems():self.setRawHeaders(key,value)def_setRawHeaders(self,headers):self._raw_headers=headersself._headers={}def_toParsed(self,name):r=self._raw_headers.get(name,None)h=self.handler.parse(name,r)ifhisnotNone:self._headers[name]=hreturnhdef_toRaw(self,name):h=self._headers.get(name,None)r=self.handler.generate(name,h)ifrisnotNone:self._raw_headers[name]=rreturnrdef__contains__(self,name):"""Does a header with the given name exist?"""returnname.lower()inself._raw_headershasHeader=__contains__defgetRawHeaders(self,name,default=None):"""Returns a list of headers matching the given name as the raw string given."""name=name.lower()raw_header=self._raw_headers.get(name,default)ifraw_headerisnot_RecalcNeeded:returnraw_headerreturnself._toRaw(name)defgetHeader(self,name,default=None):"""Ret9urns the parsed representation of the given header. The exact form of the return value depends on the header in question. If no parser for the header exists, raise ValueError. If the header doesn't exist, return default (or None if not specified) """name=name.lower()parsed=self._headers.get(name,default)ifparsedisnot_RecalcNeeded:returnparsedreturnself._toParsed(name)defsetRawHeaders(self,name,value):"""Sets the raw representation of the given header. Value should be a list of strings, each being one header of the given name. """name=name.lower()self._raw_headers[name]=valueself._headers[name]=_RecalcNeededdefsetHeader(self,name,value):"""Sets the parsed representation of the given header. Value should be a list of objects whose exact form depends on the header in question. """name=name.lower()self._raw_headers[name]=_RecalcNeededself._headers[name]=valuedefaddRawHeader(self,name,value):""" Add a raw value to a header that may or may not already exist. If it exists, add it as a separate header to output; do not replace anything. """name=name.lower()raw_header=self._raw_headers.get(name)ifraw_headerisNone:# No header yetraw_header=[]self._raw_headers[name]=raw_headerelifraw_headeris_RecalcNeeded:raw_header=self._toRaw(name)raw_header.append(value)self._headers[name]=_RecalcNeededdefaddHeader(self,name,value):""" Add a parsed representatoin to a header that may or may not already exist. If it exists, add it as a separate header to output; do not replace anything. """name=name.lower()header=self._headers.get(name)ifheaderisNone:# No header yetheader=[]self._headers[name]=headerelifheaderis_RecalcNeeded:header=self._toParsed(name)header.append(value)self._raw_headers[name]=_RecalcNeededdefremoveHeader(self,name):"""Removes the header named."""name=name.lower()ifnameinself._raw_headers:delself._raw_headers[name]delself._headers[name]def__repr__(self):return'<Headers: Raw: %s Parsed: %s>'%(self._raw_headers,self._headers)defcanonicalNameCaps(self,name):"""Return the name with the canonical capitalization, if known, otherwise, Caps-After-Dashes"""returnheader_case_mapping.get(name)ordashCapitalize(name)defgetAllRawHeaders(self):"""Return an iterator of key,value pairs of all headers contained in this object, as strings. The keys are capitalized in canonical capitalization."""fork,vinself._raw_headers.iteritems():ifvis_RecalcNeeded:v=self._toRaw(k)yieldself.canonicalNameCaps(k),vdefmakeImmutable(self):"""Make this header set immutable. All mutating operations will raise an exception."""self.setHeader=self.setRawHeaders=self.removeHeader=self._mutateRaisedef_mutateRaise(self,*args):raiseAttributeError("This header object is immutable as the headers have already been sent.")"""The following dicts are all mappings of header to list of operations to perform. The first operation should generally be 'tokenize' if the header can be parsed according to the normal tokenization rules. If it cannot, generally the first thing you want to do is take only the last instance of the header (in case it was sent multiple times, which is strictly an error, but we're nice.). """iteritems=lambdax:x.iteritems()parser_general_headers={'Cache-Control':(tokenize,listParser(parseCacheControl),dict),'Connection':(tokenize,filterTokens),'Date':(last,parseDateTime),# 'Pragma':tokenize# 'Trailer':tokenize'Transfer-Encoding':(tokenize,filterTokens),# 'Upgrade':tokenize# 'Via':tokenize,stripComment# 'Warning':tokenize}generator_general_headers={'Cache-Control':(iteritems,listGenerator(generateCacheControl),singleHeader),'Connection':(generateList,singleHeader),'Date':(generateDateTime,singleHeader),# 'Pragma':# 'Trailer':'Transfer-Encoding':(generateList,singleHeader),# 'Upgrade':# 'Via':# 'Warning':}parser_request_headers={'Accept':(tokenize,listParser(parseAccept),dict),'Accept-Charset':(tokenize,listParser(parseAcceptQvalue),dict,addDefaultCharset),'Accept-Encoding':(tokenize,listParser(parseAcceptQvalue),dict,addDefaultEncoding),'Accept-Language':(tokenize,listParser(parseAcceptQvalue),dict),'Authorization':(last,parseAuthorization),'Cookie':(parseCookie,),'Expect':(tokenize,listParser(parseExpect),dict),'From':(last,),'Host':(last,),'If-Match':(tokenize,listParser(parseStarOrETag),list),'If-Modified-Since':(last,parseIfModifiedSince),'If-None-Match':(tokenize,listParser(parseStarOrETag),list),'If-Range':(parseIfRange,),'If-Unmodified-Since':(last,parseDateTime),'Max-Forwards':(last,int),# 'Proxy-Authorization':str, # what is "credentials"'Range':(tokenize,parseRange),'Referer':(last,str),# TODO: URI object?'TE':(tokenize,listParser(parseAcceptQvalue),dict),'User-Agent':(last,str),}generator_request_headers={'Accept':(iteritems,listGenerator(generateAccept),singleHeader),'Accept-Charset':(iteritems,listGenerator(generateAcceptQvalue),singleHeader),'Accept-Encoding':(iteritems,removeDefaultEncoding,listGenerator(generateAcceptQvalue),singleHeader),'Accept-Language':(iteritems,listGenerator(generateAcceptQvalue),singleHeader),'Authorization':(generateAuthorization,),# what is "credentials"'Cookie':(generateCookie,singleHeader),'Expect':(iteritems,listGenerator(generateExpect),singleHeader),'From':(str,singleHeader),'Host':(str,singleHeader),'If-Match':(listGenerator(generateStarOrETag),singleHeader),'If-Modified-Since':(generateDateTime,singleHeader),'If-None-Match':(listGenerator(generateStarOrETag),singleHeader),'If-Range':(generateIfRange,singleHeader),'If-Unmodified-Since':(generateDateTime,singleHeader),'Max-Forwards':(str,singleHeader),# 'Proxy-Authorization':str, # what is "credentials"'Range':(generateRange,singleHeader),'Referer':(str,singleHeader),'TE':(iteritems,listGenerator(generateAcceptQvalue),singleHeader),'User-Agent':(str,singleHeader),}parser_response_headers={'Accept-Ranges':(tokenize,filterTokens),'Age':(last,int),'ETag':(tokenize,ETag.parse),'Location':(last,),# TODO: URI object?# 'Proxy-Authenticate''Retry-After':(last,parseRetryAfter),'Server':(last,),'Set-Cookie':(parseSetCookie,),'Set-Cookie2':(tokenize,parseSetCookie2),'Vary':(tokenize,filterTokens),'WWW-Authenticate':(lambdah:tokenize(h,foldCase=False),parseWWWAuthenticate,)}generator_response_headers={'Accept-Ranges':(generateList,singleHeader),'Age':(str,singleHeader),'ETag':(ETag.generate,singleHeader),'Location':(str,singleHeader),# 'Proxy-Authenticate''Retry-After':(generateRetryAfter,singleHeader),'Server':(str,singleHeader),'Set-Cookie':(generateSetCookie,),'Set-Cookie2':(generateSetCookie2,),'Vary':(generateList,singleHeader),'WWW-Authenticate':(generateWWWAuthenticate,)}parser_entity_headers={'Allow':(lambdastr:tokenize(str,foldCase=False),filterTokens),'Content-Encoding':(tokenize,filterTokens),'Content-Language':(tokenize,filterTokens),'Content-Length':(last,int),'Content-Location':(last,),# TODO: URI object?'Content-MD5':(last,parseContentMD5),'Content-Range':(last,parseContentRange),'Content-Type':(lambdastr:tokenize(str,foldCase=False),parseContentType),'Expires':(last,parseExpires),'Last-Modified':(last,parseDateTime),}generator_entity_headers={'Allow':(generateList,singleHeader),'Content-Encoding':(generateList,singleHeader),'Content-Language':(generateList,singleHeader),'Content-Length':(str,singleHeader),'Content-Location':(str,singleHeader),'Content-MD5':(base64.encodestring,lambdax:x.strip("\n"),singleHeader),'Content-Range':(generateContentRange,singleHeader),'Content-Type':(generateContentType,singleHeader),'Expires':(generateDateTime,singleHeader),'Last-Modified':(generateDateTime,singleHeader),}DefaultHTTPHandler.updateParsers(parser_general_headers)DefaultHTTPHandler.updateParsers(parser_request_headers)DefaultHTTPHandler.updateParsers(parser_response_headers)DefaultHTTPHandler.updateParsers(parser_entity_headers)DefaultHTTPHandler.updateGenerators(generator_general_headers)DefaultHTTPHandler.updateGenerators(generator_request_headers)DefaultHTTPHandler.updateGenerators(generator_response_headers)DefaultHTTPHandler.updateGenerators(generator_entity_headers)# casemappingify(DefaultHTTPParsers)# casemappingify(DefaultHTTPGenerators)# lowerify(DefaultHTTPParsers)# lowerify(DefaultHTTPGenerators)