diff --git a/python/PyZ3950/CQLParser.py b/python/PyZ3950/CQLParser.py
deleted file mode 100644
index e0779ab..0000000
--- a/python/PyZ3950/CQLParser.py
+++ /dev/null
@@ -1,987 +0,0 @@
-#!/usr/bin/python
-
-# Author: Rob Sanderson (azaroth@liv.ac.uk)
-# Distributed and Usable under the GPL
-# Version: 1.7
-# Most Recent Changes: contexts, new modifier style for 1.1
-#
-# With thanks to Adam from IndexData and Mike Taylor for their valuable input
-
-from shlex import shlex
-from xml.sax.saxutils import escape
-from xml.dom.minidom import Node, parseString
-from PyZ3950.SRWDiagnostics import *
-# Don't use cStringIO as it borks Unicode (apparently)
-from StringIO import StringIO
-import types
-
-# Parsing strictness flags
-errorOnEmptyTerm = 0 # index = "" (often meaningless)
-errorOnQuotedIdentifier = 0 # "/foo/bar" = "" (unnecessary BNF restriction)
-errorOnDuplicatePrefix = 0 # >a=b >a=c "" (impossible due to BNF)
-fullResultSetNameCheck = 1 # srw.rsn=foo and srw.rsn=foo (mutant!!)
-
-# Base values for CQL
-serverChoiceRelation = "scr"
-serverChoiceIndex = "cql.serverchoice"
-
-order = ['=', '>', '>=', '<', '<=', '<>']
-modifierSeparator = "/"
-booleans = ['and', 'or', 'not', 'prox']
-
-reservedPrefixes = {"srw" : "http://www.loc.gov/zing/cql/srw-indexes/v1.0/",
- "cql" : "info:srw/cql-context-set/1/cql-v1.1"}
-
-XCQLNamespace = "http://www.loc.gov/zing/cql/xcql/"
-
-# End of 'configurable' stuff
-
-class PrefixableObject:
- "Root object for triple and searchClause"
- prefixes = {}
- parent = None
- config = None
-
- def __init__(self):
- self.prefixes = {}
- self.parent = None
- self.config = None
-
- def toXCQL(self, depth=0):
- # Just generate our prefixes
- space = " " * depth
- xml = ['%s\n' % (space)]
- for p in self.prefixes.keys():
- xml.append("%s \n%s %s\n%s %s\n%s \n" % (space, space, escape(p), space, escape(self.prefixes[p]), space))
- xml.append("%s\n" % (space))
- return ''.join(xml)
-
-
- def addPrefix(self, name, identifier):
- if (errorOnDuplicatePrefix and (self.prefixes.has_key(name) or reservedPrefixes.has_key(name))):
- # Maybe error
- diag = Diagnostic45()
- diag.details = name
- raise diag;
- self.prefixes[name] = identifier
-
- def resolvePrefix(self, name):
- # Climb tree
- if (reservedPrefixes.has_key(name)):
- return reservedPrefixes[name]
- elif (self.prefixes.has_key(name)):
- return self.prefixes[name]
- elif (self.parent <> None):
- return self.parent.resolvePrefix(name)
- elif (self.config <> None):
- # Config is some sort of server config which specifies defaults
- return self.config.resolvePrefix(name)
- else:
- # Top of tree, no config, no resolution->Unknown indexset
- # For client we need to allow no prefix?
-
- #diag = Diagnostic15()
- #diag.details = name
- #raise diag
- return None
-
-
-class PrefixedObject:
- "Root object for relation, relationModifier and index"
- prefix = ""
- prefixURI = ""
- value = ""
- parent = None
-
- def __init__(self, val):
- # All prefixed things are case insensitive
- val = val.lower()
- if val and val[0] == '"' and val[-1] == '"':
- if errorOnQuotedIdentifier:
- diag = Diagnostic14()
- diag.details = val
- raise diag
- else:
- val = val[1:-1]
- self.value = val
- self.splitValue()
-
- def __str__(self):
- if (self.prefix):
- return "%s.%s" % (self.prefix, self.value)
- else:
- return self.value
-
- def splitValue(self):
- f = self.value.find(".")
- if (self.value.count('.') > 1):
- diag = Diagnostic15()
- diag.details = "Multiple '.' characters: %s" % (self.value)
- raise(diag)
- elif (f == 0):
- diag = Diagnostic15()
- diag.details = "Null indexset: %s" % (irt.index)
- raise(diag)
- elif f >= 0:
- self.prefix = self.value[:f].lower()
- self.value = self.value[f+1:].lower()
-
- def resolvePrefix(self):
- if (not self.prefixURI):
- self.prefixURI = self.parent.resolvePrefix(self.prefix)
- return self.prefixURI
-
-class ModifiableObject:
- # Treat modifiers as keys on boolean/relation?
- modifiers = []
-
- def __getitem__(self, k):
- if (type(k) == types.IntType):
- try:
- return self.modifiers[k]
- except:
- return None
- for m in self.modifiers:
- if (str(m.type) == k or m.type.value == k):
- return m
- return None
-
-class Triple (PrefixableObject):
- "Object to represent a CQL triple"
- leftOperand = None
- boolean = None
- rightOperand = None
-
- def toXCQL(self, depth=0):
- "Create the XCQL representation of the object"
- space = " " * depth
- if (depth == 0):
- xml = ['\n' % (XCQLNamespace)]
- else:
- xml = ['%s\n' % (space)]
-
- if self.prefixes:
- xml.append(PrefixableObject.toXCQL(self, depth+1))
-
- xml.append(self.boolean.toXCQL(depth+1))
- xml.append("%s \n" % (space))
- xml.append(self.leftOperand.toXCQL(depth+2))
- xml.append("%s \n" % (space))
- xml.append("%s \n" % (space))
- xml.append(self.rightOperand.toXCQL(depth+2))
- xml.append("%s \n" % (space))
- xml.append("%s\n" % (space))
- return ''.join(xml)
-
- def toCQL(self):
- txt = []
- if (self.prefixes):
- for p in self.prefixes.keys():
- if (p <> ''):
- txt.append('>%s="%s"' % (p, self.prefixes[p]))
- else:
- txt.append('>"%s"' % (self.prefixes[p]))
- prefs = ' '.join(txt)
- return "(%s %s %s %s)" % (prefs, self.leftOperand.toCQL(), self.boolean.toCQL(), self.rightOperand.toCQL())
- else:
- return "(%s %s %s)" % (self.leftOperand.toCQL(), self.boolean.toCQL(), self.rightOperand.toCQL())
-
-
- def getResultSetId(self, top=None):
-
- if fullResultSetNameCheck == 0 or self.boolean.value in ['not', 'prox']:
- return ""
-
- if top == None:
- topLevel = 1
- top = self;
- else:
- topLevel = 0
-
- # Iterate over operands and build a list
- rsList = []
- if isinstance(self.leftOperand, Triple):
- rsList.extend(self.leftOperand.getResultSetId(top))
- else:
- rsList.append(self.leftOperand.getResultSetId(top))
- if isinstance(self.rightOperand, Triple):
- rsList.extend(self.rightOperand.getResultSetId(top))
- else:
- rsList.append(self.rightOperand.getResultSetId(top))
-
- if topLevel == 1:
- # Check all elements are the same, if so we're a fubar form of present
- if (len(rsList) == rsList.count(rsList[0])):
- return rsList[0]
- else:
- return ""
- else:
- return rsList
-
-class SearchClause (PrefixableObject):
- "Object to represent a CQL searchClause"
- index = None
- relation = None
- term = None
-
- def __init__(self, ind, rel, t):
- PrefixableObject.__init__(self)
- self.index = ind
- self.relation = rel
- self.term = t
- ind.parent = self
- rel.parent = self
- t.parent = self
-
- def toXCQL(self, depth=0):
- "Produce XCQL version of the object"
- space = " " * depth
- if (depth == 0):
- xml = ['\n' % (XCQLNamespace)]
- else:
- xml = ['%s\n' % (space)]
-
- if self.prefixes:
- xml.append(PrefixableObject.toXCQL(self, depth+1))
-
- xml.append(self.index.toXCQL(depth+1))
- xml.append(self.relation.toXCQL(depth+1))
- xml.append(self.term.toXCQL(depth+1))
- xml.append("%s\n" % (space))
- return ''.join(xml)
-
- def toCQL(self):
- text = []
- for p in self.prefixes.keys():
- if (p <> ''):
- text.append('>%s="%s"' % (p, self.prefixes[p]))
- else:
- text.append('>"%s"' % (self.prefixes[p]))
- text.append('%s %s "%s"' % (self.index, self.relation.toCQL(), self.term))
- return ' '.join(text)
-
- def getResultSetId(self, top=None):
- idx = self.index
- idx.resolvePrefix()
- if (idx.prefixURI == reservedPrefixes['cql'] and idx.value.lower() == 'resultsetid'):
- return self.term.value
- else:
- return ""
-
-class Index(PrefixedObject):
- "Object to represent a CQL index"
-
- def toXCQL(self, depth=0):
- if (depth == 0):
- ns = ' xmlns="%s"' % (XCQLNamespace)
- else:
- ns = ""
- return "%s%s\n" % (" "*depth, ns, escape(str(self)))
-
- def toCQL(self):
- return str(self)
-
-class Relation(PrefixedObject, ModifiableObject):
- "Object to represent a CQL relation"
- def __init__(self, rel, mods=[]):
- self.prefix = "cql"
- PrefixedObject.__init__(self, rel)
- self.modifiers = mods
- for m in mods:
- m.parent = self
-
- def toXCQL(self, depth=0):
- "Create XCQL representation of object"
- if (depth == 0):
- ns = ' xmlns="%s"' % (XCQLNamespace)
- else:
- ns = ""
-
- space = " " * depth
-
- xml = ["%s\n" % (space, ns)]
- xml.append("%s %s\n" % (space, escape(self.value)))
- if self.modifiers:
- xml.append("%s \n" % (space))
- for m in self.modifiers:
- xml.append(m.toXCQL(depth+2))
- xml.append("%s \n" % (space))
- xml.append("%s\n" % (space))
- return ''.join(xml)
-
- def toCQL(self):
- txt = [self.value]
- txt.extend(map(str, self.modifiers))
- return '/'.join(txt)
-
-class Term:
- value = ""
- def __init__(self, v):
- if (v <> ""):
- # Unquoted literal
- if v in ['>=', '<=', '>', '<', '<>', "/", '=']:
- diag = Diagnostic25()
- diag.details = self.value
- raise diag
-
- # Check existence of meaningful term
- nonanchor = 0
- for c in v:
- if c != "^":
- nonanchor = 1
- break
- if not nonanchor:
- diag = Diagnostic32()
- diag.details = "Only anchoring charater(s) in term: " + v
- raise diag
-
- # Unescape quotes
- if (v[0] == '"' and v[-1] == '"'):
- v = v[1:-1]
- v = v.replace('\\"', '"')
-
- if (not v and errorOnEmptyTerm):
- diag = Diagnostic27()
- raise diag
-
- # Check for badly placed \s
- startidx = 0
- idx = v.find("\\", startidx)
- while (idx > -1):
- startidx = idx+1
- if not irt.term[idx+1] in ['?', '\\', '*', '^']:
- diag = Diagnostic26()
- diag.details = irt.term
- raise diag
- v = v.find("\\", startidx)
-
- elif (errorOnEmptyTerm):
- diag = Diagnostic27()
- raise diag
-
- self.value = v
-
- def __str__(self):
- return self.value
-
- def toXCQL(self, depth=0):
- if (depth == 0):
- ns = ' xmlns="%s"' % (XCQLNamespace)
- else:
- ns = ""
- return "%s%s\n" % (" "*depth, ns, escape(self.value))
-
-class Boolean(ModifiableObject):
- "Object to represent a CQL boolean"
- value = ""
- parent = None
- def __init__(self, bool, mods=[]):
- self.value = bool
- self.modifiers = mods
- self.parent = None
-
- def toXCQL(self, depth=0):
- "Create XCQL representation of object"
- space = " " * depth
- xml = ["%s\n" % (space)]
- xml.append("%s %s\n" % (space, escape(self.value)))
- if self.modifiers:
- xml.append("%s \n" % (space))
- for m in self.modifiers:
- xml.append(m.toXCQL(depth+2))
- xml.append("%s \n" % (space))
- xml.append("%s\n" % (space))
- return ''.join(xml)
-
- def toCQL(self):
- txt = [self.value]
- for m in self.modifiers:
- txt.append(m.toCQL())
- return '/'.join(txt)
-
- def resolvePrefix(self, name):
- return self.parent.resolvePrefix(name)
-
-class ModifierType(PrefixedObject):
- # Same as index, but we'll XCQLify in ModifierClause
- parent = None
- prefix = "cql"
-
-class ModifierClause:
- "Object to represent a relation modifier"
- parent = None
- type = None
- comparison = ""
- value = ""
-
- def __init__(self, type, comp="", val=""):
- self.type = ModifierType(type)
- self.type.parent = self
- self.comparison = comp
- self.value = val
-
- def __str__(self):
- if (self.value):
- return "%s%s%s" % (str(self.type), self.comparison, self.value)
- else:
- return "%s" % (str(self.type))
-
- def toXCQL(self, depth=0):
- if (self.value):
- return "%s\n%s%s\n%s%s\n%s%s\n%s\n" % (" " * depth, " " * (depth+1), escape(str(self.type)), " " * (depth+1), escape(self.comparison), " " * (depth+1), escape(self.value), " " * depth)
- else:
- return "%s%s\n" % (" " * depth, escape(str(self.type)))
-
- def toCQL(self):
- return str(self)
-
- def resolvePrefix(self, name):
- # Need to skip parent, which has its own resolvePrefix
- # eg boolean or relation, neither of which is prefixable
- return self.parent.parent.resolvePrefix(name)
-
-
-
-# Requires changes for: <= >= <>, and escaped \" in "
-# From shlex.py (std library for 2.2+)
-class CQLshlex(shlex):
- "shlex with additions for CQL parsing"
- quotes = '"'
- commenters = ""
- nextToken = ""
-
- def __init__(self, thing):
- shlex.__init__(self, thing)
- self.wordchars += "!@#$%^&*-+{}[];,.?|~`:\\"
- self.wordchars += ''.join(map(chr, range(128,254)))
-
- def read_token(self):
- "Read a token from the input stream (no pushback or inclusions)"
-
- while 1:
- if (self.nextToken != ""):
- self.token = self.nextToken
- self.nextToken = ""
- # Bah. SUPER ugly non portable
- if self.token == "/":
- self.state = ' '
- break
-
- nextchar = self.instream.read(1)
- if nextchar == '\n':
- self.lineno = self.lineno + 1
- if self.debug >= 3:
- print "shlex: in state ", repr(self.state), " I see character:", repr(nextchar)
-
- if self.state is None:
- self.token = '' # past end of file
- break
- elif self.state == ' ':
- if not nextchar:
- self.state = None # end of file
- break
- elif nextchar in self.whitespace:
- if self.debug >= 2:
- print "shlex: I see whitespace in whitespace state"
- if self.token:
- break # emit current token
- else:
- continue
- elif nextchar in self.commenters:
- self.instream.readline()
- self.lineno = self.lineno + 1
- elif nextchar in self.wordchars:
- self.token = nextchar
- self.state = 'a'
- elif nextchar in self.quotes:
- self.token = nextchar
- self.state = nextchar
- elif nextchar in ['<', '>']:
- self.token = nextchar
- self.state = '<'
- else:
- self.token = nextchar
- if self.token:
- break # emit current token
- else:
- continue
- elif self.state == '<':
- # Only accumulate <=, >= or <>
-
- if self.token == ">" and nextchar == "=":
- self.token = self.token + nextchar
- self.state = ' '
- break
- elif self.token == "<" and nextchar in ['>', '=']:
- self.token = self.token + nextchar
- self.state = ' '
- break
- elif not nextchar:
- self.state = None
- break
- elif nextchar == "/":
- self.state = "/"
- self.nextToken = "/"
- break
- elif nextchar in self.wordchars:
- self.state='a'
- self.nextToken = nextchar
- break
- elif nextchar in self.quotes:
- self.state=nextchar
- self.nextToken = nextchar
- break
- else:
- self.state = ' '
- break
-
- elif self.state in self.quotes:
- self.token = self.token + nextchar
- # Allow escaped quotes
- if nextchar == self.state and self.token[-2] != '\\':
- self.state = ' '
- break
- elif not nextchar: # end of file
- if self.debug >= 2:
- print "shlex: I see EOF in quotes state"
- # Override SHLEX's ValueError to throw diagnostic
- diag = Diagnostic14()
- diag.details = self.token[:-1]
- raise diag
- elif self.state == 'a':
- if not nextchar:
- self.state = None # end of file
- break
- elif nextchar in self.whitespace:
- if self.debug >= 2:
- print "shlex: I see whitespace in word state"
- self.state = ' '
- if self.token:
- break # emit current token
- else:
- continue
- elif nextchar in self.commenters:
- self.instream.readline()
- self.lineno = self.lineno + 1
- elif nextchar in self.wordchars or nextchar in self.quotes:
- self.token = self.token + nextchar
- elif nextchar in ['>', '<']:
- self.nextToken = nextchar
- self.state = '<'
- break
- else:
- self.pushback = [nextchar] + self.pushback
- if self.debug >= 2:
- print "shlex: I see punctuation in word state"
- self.state = ' '
- if self.token:
- break # emit current token
- else:
- continue
- result = self.token
- self.token = ''
- if self.debug > 1:
- if result:
- print "shlex: raw token=" + `result`
- else:
- print "shlex: raw token=EOF"
- return result
-
-class CQLParser:
- "Token parser to create object structure for CQL"
- parser = ""
- currentToken = ""
- nextToken = ""
-
- def __init__(self, p):
- """ Initialise with shlex parser """
- self.parser = p
- self.fetch_token() # Fetches to next
- self.fetch_token() # Fetches to curr
-
- def is_boolean(self, token):
- "Is the token a boolean"
- token = token.lower()
- return token in booleans
-
- def fetch_token(self):
- """ Read ahead one token """
- tok = self.parser.get_token()
- self.currentToken = self.nextToken
- self.nextToken = tok
-
- def prefixes(self):
- "Create prefixes dictionary"
- prefs = {}
- while (self.currentToken == ">"):
- # Strip off maps
- self.fetch_token()
- if self.nextToken == "=":
- # Named map
- name = self.currentToken
- self.fetch_token() # = is current
- self.fetch_token() # id is current
- identifier = self.currentToken
- self.fetch_token()
- else:
- name = ""
- identifier = self.currentToken
- self.fetch_token()
- if (errorOnDuplicatePrefix and prefs.has_key(name)):
- # Error condition
- diag = Diagnostic45()
- diag.details = name
- raise diag;
- if len(identifier) > 1 and identifier[0] == '"' and identifier[-1] == '"':
- identifier = identifier[1:-1]
- prefs[name.lower()] = identifier
-
- return prefs
-
-
- def query(self):
- """ Parse query """
- prefs = self.prefixes()
- left = self.subQuery()
- while 1:
- if not self.currentToken:
- break;
- bool = self.is_boolean(self.currentToken)
- if bool:
- boolobject = self.boolean()
- right = self.subQuery()
- # Setup Left Object
- trip = tripleType()
- trip.leftOperand = left
- trip.boolean = boolobject
- trip.rightOperand = right
- left.parent = trip
- right.parent = trip
- boolobject.parent = trip
- left = trip
- else:
- break;
-
- for p in prefs.keys():
- left.addPrefix(p, prefs[p])
- return left
-
- def subQuery(self):
- """ Find either query or clause """
- if self.currentToken == "(":
- self.fetch_token() # Skip (
- object = self.query()
- if self.currentToken == ")":
- self.fetch_token() # Skip )
- else:
- diag = Diagnostic13()
- diag.details = self.currentToken
- raise diag
- else:
- prefs = self.prefixes()
- if (prefs):
- object = self.query()
- for p in prefs.keys():
- object.addPrefix(p, prefs[p])
- else:
- object = self.clause()
- return object
-
- def clause(self):
- """ Find searchClause """
- bool = self.is_boolean(self.nextToken)
- if not bool and not (self.nextToken in [')', '(', '']):
-
- index = indexType(self.currentToken)
- self.fetch_token() # Skip Index
- rel = self.relation()
- if (self.currentToken == ''):
- diag = Diagnostic10()
- diag.details = "Expected Term, got end of query."
- raise(diag)
- term = termType(self.currentToken)
- self.fetch_token() # Skip Term
-
- irt = searchClauseType(index, rel, term)
-
- elif self.currentToken and (bool or self.nextToken in [')', '']):
-
- irt = searchClauseType(indexType(serverChoiceIndex), relationType(serverChoiceRelation), termType(self.currentToken))
- self.fetch_token()
-
- elif self.currentToken == ">":
- prefs = self.prefixes()
- # iterate to get object
- object = self.clause()
- for p in prefs.keys():
- object.addPrefix(p, prefs[p]);
- return object
-
- else:
- diag = Diagnostic10()
- diag.details = "Expected Boolean or Relation but got: " + self.currentToken
- raise diag
-
- return irt
-
- def modifiers(self):
- mods = []
- while (self.currentToken == modifierSeparator):
- self.fetch_token()
- mod = self.currentToken
- mod = mod.lower()
- if (mod == modifierSeparator):
- diag = Diagnostic20()
- diag.details = "Null modifier"
- raise diag
- self.fetch_token()
- comp = self.currentToken
- if (comp in order):
- self.fetch_token()
- value = self.currentToken
- self.fetch_token()
- else:
- comp = ""
- value = ""
- mods.append(ModifierClause(mod, comp, value))
- return mods
-
-
- def boolean(self):
- """ Find boolean """
- self.currentToken = self.currentToken.lower()
- if self.currentToken in booleans:
- bool = booleanType(self.currentToken)
- self.fetch_token()
- bool.modifiers = self.modifiers()
- for b in bool.modifiers:
- b.parent = bool
-
- else:
- diag = Diagnostic37()
- diag.details = self.currentToken
- raise diag
-
- return bool
-
- def relation(self):
- """ Find relation """
- self.currentToken = self.currentToken.lower()
- rel = relationType(self.currentToken)
- self.fetch_token()
- rel.modifiers = self.modifiers()
- for r in rel.modifiers:
- r.parent = rel
-
- return rel
-
-
-
-class XCQLParser:
- """ Parser for XCQL using some very simple DOM """
-
- def firstChildElement(self, elem):
- """ Find first child which is an Element """
- for c in elem.childNodes:
- if c.nodeType == Node.ELEMENT_NODE:
- return c
- return None
-
- def firstChildData(self,elem):
- """ Find first child which is Data """
- for c in elem.childNodes:
- if c.nodeType == Node.TEXT_NODE:
- return c
- return None
-
- def searchClause(self, elem):
- """ Process a """
- sc = searchClauseType()
- for c in elem.childNodes:
- if c.nodeType == Node.ELEMENT_NODE:
- if c.localName == "index":
- sc.index = indexType(self.firstChildData(c).data.lower())
- elif c.localName == "term":
- sc.term = termType(self.firstChildData(c).data)
- elif c.localName == "relation":
- sc.relation = self.relation(c)
- elif c.localName == "prefixes":
- sc.prefixes = self.prefixes(c)
- else:
- raise(ValueError, c.localName)
- return sc
-
- def triple(self, elem):
- """ Process a """
- trip = tripleType()
- for c in elem.childNodes:
- if c.nodeType == Node.ELEMENT_NODE:
- if c.localName == "boolean":
- trip.boolean = self.boolean(c)
- elif c.localName == "prefixes":
- trip.prefixes = self.prefixes(c)
- elif c.localName == "leftOperand":
- c2 = self.firstChildElement(c)
- if c2.localName == "searchClause":
- trip.leftOperand = self.searchClause(c2)
- else:
- trip.leftOperand = self.triple(c2)
- else:
- c2 = self.firstChildElement(c)
- if c2.localName == "searchClause":
- trip.rightOperand = self.searchClause(c2)
- else:
- trip.rightOperand = self.triple(c2)
- return trip
-
- def relation(self, elem):
- """ Process a """
- rel = relationType()
- for c in elem.childNodes:
- if c.nodeType == Node.ELEMENT_NODE:
- if c.localName == "value":
- rel.value = c.firstChild.data.lower()
- elif c.localName == "modifiers":
- mods = []
- for c2 in c.childNodes:
- if c2.nodeType == Node.ELEMENT_NODE:
- if c2.localName == "modifier":
- for c3 in c2.childNodes:
- if c3.localName == "value":
- val = self.firstChildData(c2).data.lower()
- mods.append(val)
- rel.modifiers = mods
- return rel
-
- def boolean(self, elem):
- "Process a "
- bool = booleanType()
- for c in elem.childNodes:
- if c.nodeType == Node.ELEMENT_NODE:
- if c.localName == "value":
- bool.value = self.firstChildData(c).data.lower()
- else:
- # Can be in any order, so we need to extract, then order
- mods = {}
- for c2 in c.childNodes:
- if c2.nodeType == Node.ELEMENT_NODE:
- if c2.localName == "modifier":
- type = ""
- value = ""
- for c3 in c2.childNodes:
- if c3.nodeType == Node.ELEMENT_NODE:
- if c3.localName == "value":
- value = self.firstChildData(c3).data.lower()
- elif c3.localName == "type":
- type = self.firstChildData(c3).data
- mods[type] = value
-
- modlist = []
- for t in booleanModifierTypes[1:]:
- if mods.has_key(t):
- modlist.append(mods[t])
- else:
- modlist.append('')
- bool.modifiers = modlist
- return bool
-
- def prefixes(self, elem):
- "Process "
- prefs = {}
- for c in elem.childNodes:
- if c.nodeType == Node.ELEMENT_NODE:
- # prefix
- name = ""
- identifier = ""
- for c2 in c.childNodes:
- if c2.nodeType == Node.ELEMENT_NODE:
- if c2.localName == "name":
- name = self.firstChildData(c2).data.lower()
- elif c2.localName == "identifier":
- identifier = self.firstChildData(c2).data
- prefs[name] = identifier
- return prefs
-
-
-def xmlparse(s):
- """ API. Return a seachClause/triple object from XML string """
- doc = parseString(s)
- q = xcqlparse(doc.firstChild)
- return q
-
-def xcqlparse(query):
- """ API. Return a searchClause/triple object from XML DOM objects"""
- # Requires only properties of objects so we don't care how they're generated
-
- p = XCQLParser()
- if query.localName == "searchClause":
- return p.searchClause(query)
- else:
- return p.triple(query)
-
-
-def parse(query):
- """ API. Return a searchClause/triple object from CQL string"""
-
- try:
- query = query.encode("utf-8")
- except:
- diag = Diagnostic10()
- diag.details = "Cannot parse non utf-8 characters"
- raise diag
-
- q = StringIO(query)
- lexer = CQLshlex(q)
- parser = CQLParser(lexer)
- object = parser.query()
- if parser.currentToken != '':
- diag = Diagnostic10()
- diag.details = "Unprocessed tokens remain: " + repr(parser.currentToken)
- raise diag
- else:
- del lexer
- del parser
- del q
- return object
-
-
-# Assign our objects to generate
-tripleType = Triple
-booleanType = Boolean
-relationType = Relation
-searchClauseType = SearchClause
-modifierClauseType = ModifierClause
-modifierTypeType = ModifierType
-indexType = Index
-termType = Term
-
-try:
- from CQLUtils import *
- tripleType = CTriple
- booleanType = CBoolean
- relationType = CRelation
- searchClauseType = CSearchClause
- modifierClauseType = CModifierClause
- modifierTypeType = CModifierType
- indexType = CIndex
- termType = CTerm
-except:
- # Nested scopes. Utils needs our classes to parent
- # We need its classes to build (maybe)
- pass
-
-
-if (__name__ == "__main__"):
- import sys;
- s = sys.stdin.readline()
- try:
- q = parse(s);
- except SRWDiagnostic, diag:
- # Print a full version, not just str()
- print "Diagnostic Generated."
- print " Code: " + str(diag.code)
- print " Details: " + str(diag.details)
- print " Message: " + str(diag.message)
- else:
- print q.toXCQL()[:-1];
-
diff --git a/python/PyZ3950/CQLUtils.py b/python/PyZ3950/CQLUtils.py
deleted file mode 100644
index d5eb793..0000000
--- a/python/PyZ3950/CQLUtils.py
+++ /dev/null
@@ -1,544 +0,0 @@
-
-"""CQL utility functions and subclasses"""
-
-from CQLParser import *
-from types import ListType, IntType
-from SRWDiagnostics import *
-
-from PyZ3950 import z3950, asn1, oids
-from PyZ3950.zdefs import make_attr
-
-asn1.register_oid (oids.Z3950_QUERY_CQL, asn1.GeneralString)
-
-class ZCQLConfig:
-
- contextSets = {'dc' : 'info:srw/cql-context-set/1/dc-v1.1',
- 'cql' : 'info:srw/cql-context-set/1/cql-v1.1',
- 'bath' : 'http://zing.z3950.org/cql/bath/2.0/',
- 'zthes' : 'http://zthes.z3950.org/cql/1.0/',
- 'ccg' : 'http://srw.cheshire3.org/contextSets/ccg/1.1/ ',
- 'rec' : 'info:srw/cql-context-set/2/rec-1.0',
- 'net' : 'info:srw/cql-context-set/2/net-1.0'}
-
- dc = {'title' : 4,
- 'subject' : 21,
- 'creator' : 1003,
- 'author' : 1003,
- 'editor' : 1020,
- 'contributor' : 1018,
- 'publisher' : 1018,
- 'description' : 62,
- 'date' : 30,
- 'resourceType' : 1031,
- 'type' : 1031,
- 'format' : 1034,
- 'identifier' : 12,
- 'source' : 1019,
- 'language' : 54,
- 'relation' : 1016,
- 'coverage' : 1016,
- 'rights' : 1016
- }
-
- cql = {'anywhere' : 1016,
- 'serverChoice' : 1016}
-
- # The common bib1 points
- bib1 = {"personal_name" : 1,
- "corporate_name" : 2,
- "conference_name" : 3,
- "title" : 4,
- "title_series" : 5,
- "title_uniform" : 6,
- "isbn" : 7,
- "issn" : 8,
- "lccn" : 9,
- "local_number" : 12,
- "dewey_number" : 13,
- "lccn" : 16,
- "local_classification" : 20,
- "subject" : 21,
- "subject_lc" : 27,
- "subject_local" : 29,
- "date" : 30,
- "date_publication" : 31,
- "date_acquisition" : 32,
- "local_call_number" : 53,
- "abstract" : 62,
- "note" : 63,
- "record_type" : 1001,
- "name" : 1002,
- "author" : 1003,
- "author_personal" : 1004,
- "identifier" : 1007,
- "text_body" : 1010,
- "date_modified" : 1012,
- "date_added" : 1011,
- "concept_text" : 1014,
- "any" : 1016,
- "default" : 1017,
- "publisher" : 1018,
- "record_source" : 1019,
- "editor" : 1020,
- "docid" : 1032,
- "anywhere" : 1035,
- "sici" : 1037
- }
-
- exp1 = {"explainCategory" :1,
- "humanStringLanguage" : 2,
- "databaseName" : 3,
- "serverName" : 4,
- "attributeSetOID" : 5,
- "recordSyntaxOID" : 6,
- "tagSetOID" : 7,
- "extendedServiceOID" : 8,
- "dateAdded" : 9,
- "dateChanged" : 10,
- "dateExpires" : 11,
- "elementSetName" : 12,
- "processingContext" : 13,
- "processingName" : 14,
- "termListName" : 15,
- "schemaOID" : 16,
- "producer" : 17,
- "supplier" : 18,
- "availability" : 19,
- "proprietary" : 20,
- "userFee" : 21,
- "variantSetOID" : 22,
- "unitSystem" : 23,
- "keyword" : 24,
- "explainDatabase" : 25,
- "processingOID" : 26
- }
-
- xd1 = {"title" : 1,
- "subject" : 2,
- "name" : 3,
- "description" : 4,
- "date" : 5,
- "type" : 6,
- "format" : 7,
- "identifier" : 8,
- "source" : 9,
- "langauge" : 10,
- "relation" : 11,
- "coverage" : 12,
- "rights" : 13}
-
- util = {"record_date" : 1,
- "record_agent" : 2,
- "record_language" : 3,
- "control_number" : 4,
- "cost" : 5,
- "record_syntax" : 6,
- "database_schema" : 7,
- "score" : 8,
- "rank" : 9,
- "result_set_position" : 10,
- "all" : 11,
- "anywhere" : 12,
- "server_choice" : 13,
- "wildcard" : 14,
- "wildpath" : 15}
-
- defaultAttrSet = z3950.Z3950_ATTRS_BIB1_ov
-
- def __init__(self):
- self.util1 = self.util
- self.xd = self.xd1
-
- def attrsToCql(self, attrs):
- hash = {}
- for c in attrs:
- if (not c[0]):
- c[0] = self.defaultAttrSet
- hash[(c[0], c[1])] = c[2]
- bib1 = z3950.Z3950_ATTRS_BIB1_ov
- use = hash.get((bib1, 1), 4)
- rel = hash.get((bib1, 2), 3)
- posn = hash.get((bib1, 3), None)
- struct = hash.get((bib1, 4), None)
- trunc = hash.get((bib1, 5), None)
- comp = hash.get((bib1, 6), None)
-
- index = None
- if (not isinstance(use, int)):
- index = indexType(use)
- else:
- for v in self.dc.items():
- if use == v[1]:
- index = indexType("dc.%s" % (v[0]))
- break
- if not index:
- for v in self.bib1.items():
- if (use == v[1]):
- index = indexType("bib1.%s" % (v[0]))
- break
- if not index:
- index = indexType("bib1.%i" % (use))
-
- relations = ['', '<', '<=', '=', '>=', '>', '<>']
- if (comp == 3):
- relation = relationType("exact")
- elif (rel > 6):
- if struct in [2, 6]:
- relation = relationType('any')
- else:
- relation = relationType('=')
- else:
- relation = relationType(relations[rel])
-
- if (rel == 100):
- relation.modifiers.append(modifierClauseType('phonetic'))
- elif (rel == 101):
- relation.modifiers.append(modifierClauseType('stem'))
- elif (rel == 102):
- relation.modifiers.append(modifierClauseType('relevant'))
-
- if (struct in [2, 6]):
- relation.modifiers.append(modifierClauseType('word'))
- elif (struct in [4, 5, 100]):
- relation.modifiers.append(modifierClauseType('date'))
- elif (struct == 109):
- relation.modifiers.append(modifierClauseType('number'))
- elif (struct in [1, 108]):
- relation.modifiers.append(modifierClauseType('string'))
- elif (struct == 104):
- relation.modifiers.append(modifierClauseType('uri'))
-
- return (index, relation)
-
-zConfig = ZCQLConfig()
-
-def rpn2cql(rpn, config=zConfig, attrSet=None):
- if rpn[0] == 'op':
- # single search clause
- op = rpn[1]
- type = op[0]
- if type == 'attrTerm':
- attrs = op[1].attributes
- term = op[1].term
- combs = []
- for comb in attrs:
- if hasattr(comb, 'attributeSet'):
- attrSet = comb.attributeSet
- if hasattr(comb, 'attributeType'):
- aType = comb.attributeType
- else:
- # Broken!
- aType = 1
- vstruct = comb.attributeValue
- if (vstruct[0] == 'numeric'):
- aValue = vstruct[1]
- else:
- # Complex attr value
- vstruct = vstruct[1]
- if (hasattr(vstruct, 'list')):
- aValue = vstruct.list[0][1]
- else:
- # semanticAction?
- aValue = vstruct.semanticAction[0][1]
- combs.append([attrSet, aType, aValue])
- # Now let config do its thing
- (index, relation) = config.attrsToCql(combs)
- return searchClauseType(index, relation, termType(term[1]))
-
- elif type == 'resultSet':
- return searchClauseType(indexType('cql.resultSetId'), relationType('='), termType(op[0]))
-
- elif rpn[0] == 'rpnRpnOp':
- triple = rpn[1]
- bool = triple.op
- lhs = triple.rpn1
- rhs = triple.rpn2
- ctrip = tripleType()
- ctrip.leftOperation = rpn2cql(lhs, config)
- ctrip.rightOperand = rpn2cql(rhs, config)
- ctrip.boolean = booleanType(bool[0])
- if bool[0] == 'prox':
- distance = bool[1].distance
- order = bool[1].ordered
- if order:
- order = "ordered"
- else:
- order = "unordered"
- relation = bool[1].relationType
- rels = ["", "<", "<=", "=", ">=", ">", "<>"]
- relation = rels[relation]
- unit = bool[1].proximityUnitCode
- units = ["", "character", "word", "sentence", "paragraph", "section", "chapter", "document", "element", "subelement", "elementType", "byte"]
- if unit[0] == "known":
- unit = units[unit[1]]
- mods = [cql.modifierClauseType('distance', relation, str(distance)), cql.modifierClauseType('word', '=', unit), cql.modifierClauseType(order)]
- ctrip.boolean.modifiers = mods
- return ctrip
-
- elif rpn[0] == 'type_1':
- q = rpn[1]
- return rpn2cql(q.rpn, config, q.attributeSet)
-
-
-
-
-class CSearchClause(SearchClause):
-
- def convertMetachars(self, t):
- "Convert SRW meta characters in to Cheshire's meta characters"
- # Fail on ?, ^ or * not at the end.
- if (count(t, "?") != count(t, "\\?")):
- diag = Diagnostic28()
- diag.details = "? Unsupported"
- raise diag
- elif (count(t, "^") != count(t, "\\^")):
- diag = Diagnostic31()
- diag.details = "^ Unsupported"
- raise diag
- elif (count(t, "*") != count(t, "\\*")):
- if t[-1] != "*" or t[-2] == "\\":
- diag = Diagnostic28()
- diag.details = "Non trailing * unsupported"
- raise diag
- else:
- t[-1] = "#"
- t = replace(t, "\\^", "^")
- t = replace(t, "\\?", "?")
- t = replace(t, "\\*", "*")
- return t
-
- def toRPN(self, top=None):
- if not top:
- top = self
-
- if (self.relation.value in ['any', 'all']):
- # Need to split this into and/or tree
- if (self.relation.value == 'any'):
- bool = " or "
- else:
- bool = " and "
- words = self.term.value.split()
- self.relation.value = '='
- # Add 'word' relationModifier
- self.relation.modifiers.append(CModifierClause('cql.word'))
-
- # Create CQL, parse it, walk new tree
- idxrel = "%s %s" % (self.index.toCQL(), self.relation.toCQL())
- text = []
- for w in words:
- text.append('%s "%s"' % (idxrel, w))
- cql = bool.join(text)
- tree = parse(cql)
- tree.prefixes = self.prefixes
- tree.parent = self.parent
- tree.config = self.config
- return tree.toRPN(top)
- else:
- # attributes, term
- # AttributeElement: attributeType, attributeValue
- # attributeValue ('numeric', n) or ('complex', struct)
- if (self.index.value == 'resultsetid'):
- return ('op', ('resultSet', self.term.value))
-
- clause = z3950.AttributesPlusTerm()
- attrs = self.index.toRPN(top)
- if (self.term.value.isdigit()):
- self.relation.modifiers.append(CModifierClause('cql.number'))
- relattrs = self.relation.toRPN(top)
- attrs.update(relattrs)
- butes =[]
- for e in attrs.iteritems():
- butes.append((e[0][0], e[0][1], e[1]))
-
- clause.attributes = [make_attr(*e) for e in butes]
- clause.term = self.term.toRPN(top)
-
- return ('op', ('attrTerm', clause))
-
-
-class CBoolean(Boolean):
-
- def toRPN(self, top):
- op = self.value
- if (self.value == 'not'):
- op = 'and-not'
- elif (self.value == 'prox'):
- # Create ProximityOperator
- prox = z3950.ProximityOperator()
- # distance, ordered, proximityUnitCode, relationType
- u = self['unit']
- try:
- units = ["", "character", "word", "sentence", "paragraph", "section", "chapter", "document", "element", "subelement", "elementType", "byte"]
- if (u.value in units):
- prox.unit = ('known', units.index(u.value))
- else:
- # Uhhhh.....
- prox.unit = ('private', int(u.value))
- except:
- prox.unit = ('known', 2)
-
- d = self['distance']
- try:
- prox.distance = int(d.value)
- except:
- if (prox.unit == ('known', 2)):
- prox.distance = 1
- else:
- prox.distance = 0
- try:
- rels = ["", "<", "<=", "=", ">=", ">", "<>"]
- prox.relationType = rels.index(d.comparison)
- except:
- prox.relationType = 2
-
- prox.ordered = bool(self['ordered'])
- return ('op', ('prox', prox))
-
- return (op, None)
-
-class CTriple(Triple):
-
- def toRPN(self, top=None):
- """rpnRpnOp"""
- if not top:
- top = self
-
- op = z3950.RpnRpnOp()
- op.rpn1 = self.leftOperand.toRPN(top)
- op.rpn2 = self.rightOperand.toRPN(top)
- op.op = self.boolean.toRPN(top)
- return ('rpnRpnOp', op)
-
-
-class CIndex(Index):
- def toRPN(self, top):
- self.resolvePrefix()
- pf = self.prefix
- if (not pf and self.prefixURI):
- # We have a default
- for k in zConfig.contextSets:
- if zConfig.contextSets[k] == self.prefixURI:
- pf = k
- break
-
- # Default BIB1
- set = oids.oids['Z3950']['ATTRS']['BIB1']['oid']
-
- if (hasattr(top, 'config') and top.config):
- config = top.config
- # Check SRW Configuration
- cql = config.contextSetNamespaces['cql']
- index = self.value
- if self.prefixURI == cql and self.value == "serverchoice":
- # Have to resolve our prefixes etc, so create an index object to do it
- index = config.defaultIndex
- cidx = CIndex(index)
- cidx.config = config
- cidx.parent = config
- cidx.resolvePrefix()
- pf = cidx.prefix
- index = cidx.value
-
- if config.indexHash.has_key(pf):
- if config.indexHash[pf].has_key(index):
- idx = config.indexHash[pf][index]
- # Need to map from this list to RPN list
- attrs = {}
- for i in idx:
- set = asn1.OidVal(map(int, i[0].split('.')))
- type = int(i[1])
- if (i[2].isdigit()):
- val = int(i[2])
- else:
- val = i[2]
- attrs[(set, type)] = val
- return attrs
- else:
- diag = Diagnostic16()
- diag.details = index
- diag.message = "Unknown index"
- raise diag
- else:
- diag = Diagnostic15()
- diag.details = pf
- diag.message = "Unknown context set"
- raise diag
- elif (hasattr(zConfig, pf)):
- mp = getattr(zConfig, pf)
- if (mp.has_key(self.value)):
- val = mp[self.value]
- else:
- val = self.value
- elif (oids.oids['Z3950']['ATTRS'].has_key(pf.upper())):
- set = oids.oids['Z3950']['ATTRS'][pf.upper()]['oid']
- if (self.value.isdigit()):
- # bib1.1018
- val = int(self.value)
- else:
- # complex attribute for bib1
- val = self.value
- else:
- print "Can't resolve %s" % pf
- raise(ValueError)
-
- return {(set, 1) : val}
-
-
-class CRelation(Relation):
- def toRPN(self, top):
- rels = ['', '<', '<=', '=', '>=', '>', '<>']
- set = z3950.Z3950_ATTRS_BIB1_ov
- vals = [None, None, None, None, None, None, None]
-
- if self.value in rels:
- vals[2] = rels.index(self.value)
- elif self.value in ['exact', 'scr']:
- vals[2] = 3
- elif (self.value == 'within'):
- vals[2] = 104
-
- if self['relevant']:
- vals[2] = 102
- elif self['stem']:
- vals[2] = 101
- elif self['phonetic']:
- vals[2] = 100
-
- if self['number']:
- vals[4] = 109
- vals[5] = 100
- elif self['date']:
- vals[4] = 5
- elif self['word']:
- vals[4] = 2
-
- if self.value == 'exact':
- vals[3] = 1
- vals[5] = 100
- # vals[6] = 3
- else:
- vals[3] = 3
- # vals[6] = 1
-
- attrs = {}
- for x in range(1,7):
- if vals[x]:
- attrs[(z3950.Z3950_ATTRS_BIB1_ov, x)] = vals[x]
-
- return attrs
-
-
-class CTerm(Term):
- def toRPN(self, top):
- return ('general', self.value)
-
-class CModifierClause(ModifierClause):
- pass
-
-class CModifierType(ModifierType):
- pass
-
-
-
-
-
diff --git a/python/PyZ3950/PyZ3950_parsetab.py b/python/PyZ3950/PyZ3950_parsetab.py
deleted file mode 100644
index 804de29..0000000
--- a/python/PyZ3950/PyZ3950_parsetab.py
+++ /dev/null
@@ -1,40 +0,0 @@
-
-# PyZ3950_parsetab.py
-# This file is automatically generated. Do not edit.
-
-_lr_method = 'SLR'
-
-_lr_signature = '\xfc\xb2\xa8\xb7\xd9\xe7\xad\xba"\xb2Ss\'\xcd\x08\x16'
-
-_lr_action_items = {'QUOTEDVALUE':([5,26,0,19,16,],[1,1,1,1,1,]),'LOGOP':([3,25,4,14,9,6,27,23,13,20,22,1,],[-5,-9,-14,-13,16,-8,16,-7,16,-6,-4,-12,]),'SET':([0,16,5,26,],[11,11,11,11,]),'RPAREN':([27,23,3,22,1,25,13,4,20,6,14,],[28,-7,-5,-4,-12,-9,20,-14,-6,-8,-13,]),'$':([8,14,2,23,3,20,28,25,9,1,4,6,22,],[0,-13,-1,-7,-5,-6,-3,-9,-2,-12,-14,-8,-4,]),'SLASH':([21,],[26,]),'ATTRSET':([0,],[7,]),'QUAL':([0,26,16,18,5,],[10,10,10,24,10,]),'COMMA':([10,12,24,],[-10,18,-11,]),'LPAREN':([26,0,16,7,5,],[5,5,5,15,5,]),'WORD':([19,17,14,0,5,26,6,16,15,1,4,25,],[4,23,-13,4,4,4,14,4,21,-12,-14,14,]),'RELOP':([11,24,10,12,],[17,-11,-10,19,]),}
-
-_lr_action = { }
-for _k, _v in _lr_action_items.items():
- for _x,_y in zip(_v[0],_v[1]):
- _lr_action[(_x,_k)] = _y
-del _lr_action_items
-
-_lr_goto_items = {'cclfind_or_attrset':([0,],[2,]),'elements':([5,26,16,0,],[3,3,22,3,]),'quallist':([5,26,0,16,],[12,12,12,12,]),'val':([5,16,26,19,0,],[6,6,6,25,6,]),'top':([0,],[8,]),'cclfind':([5,0,26,],[13,9,27,]),}
-
-_lr_goto = { }
-for _k, _v in _lr_goto_items.items():
- for _x,_y in zip(_v[0],_v[1]):
- _lr_goto[(_x,_k)] = _y
-del _lr_goto_items
-_lr_productions = [
- ("S'",1,None,None,None),
- ('top',1,'p_top','./ccl.py',154),
- ('cclfind_or_attrset',1,'p_cclfind_or_attrset_1','./ccl.py',158),
- ('cclfind_or_attrset',6,'p_cclfind_or_attrset_2','./ccl.py',162),
- ('cclfind',3,'p_ccl_find_1','./ccl.py',166),
- ('cclfind',1,'p_ccl_find_2','./ccl.py',170),
- ('elements',3,'p_elements_1','./ccl.py',174),
- ('elements',3,'p_elements_2','./ccl.py',196),
- ('elements',1,'p_elements_3','./ccl.py',202),
- ('elements',3,'p_elements_4','./ccl.py',206),
- ('quallist',1,'p_quallist_1','./ccl.py',213),
- ('quallist',3,'p_quallist_2','./ccl.py',217),
- ('val',1,'p_val_1','./ccl.py',221),
- ('val',2,'p_val_2','./ccl.py',225),
- ('val',1,'p_val_3','./ccl.py',229),
-]
diff --git a/python/PyZ3950/SRWDiagnostics.py b/python/PyZ3950/SRWDiagnostics.py
deleted file mode 100644
index 2829bba..0000000
--- a/python/PyZ3950/SRWDiagnostics.py
+++ /dev/null
@@ -1,451 +0,0 @@
-
-# Base Class
-
-class SRWDiagnostic (Exception):
- """ Base Diagnostic Class"""
- code = 0
- uri = "info:srw/diagnostic/1/"
- details = ""
- message = ""
-
- surrogate = 0
- fatal = 1
-
- def __str__(self):
- return "%s [%s]: %s" % (self.uri, self.message, self.details)
-
- # NB 'Need' name for serialization in SRW
- def __init__(self, name=None):
- if (self.code):
- self.uri = "%s%d" % (self.uri, self.code)
- Exception.__init__(self)
-
-# Diagnostic Types
-
-class GeneralDiagnostic (SRWDiagnostic):
- pass
-
-class CQLDiagnostic (SRWDiagnostic):
- pass
-
-class RecordDiagnostic (SRWDiagnostic):
- pass
-
-class ResultSetDiagnostic (SRWDiagnostic):
- pass
-
-class SortDiagnostic (SRWDiagnostic):
- pass
-
-class StyleDiagnostic (SRWDiagnostic):
- pass
-
-class ScanDiagnostic (SRWDiagnostic):
- pass
-
-class DeprecatedDiagnostic(SRWDiagnostic):
- def __init__(self, name=None):
- print "WARNING: Use of deprecated diagnostic %s" % (self)
- SRWDiagnostic.__init__(self)
-
-class ExplainDiagnostic (DeprecatedDiagnostic):
- pass
-
-
-# Rob's (empty) diagnostic set
-class RobDiagnostic (SRWDiagnostic):
- uri = "info:srw/diagnostic/2/"
-
-
-# Individual Diagnostics
-
-class Diagnostic1 (GeneralDiagnostic):
- code = 1
- message = "General system error"
-
-class Diagnostic2 (GeneralDiagnostic):
- code = 2
- message = "System temporarily unavailable"
-
-class Diagnostic3 (GeneralDiagnostic):
- code = 3
- message = "Authentication error"
-
-class Diagnostic4 (GeneralDiagnostic):
- code = 4
- message = "Unsupported operation"
-
-class Diagnostic5 (GeneralDiagnostic):
- code = 5
- message = "Unsupported version"
-
-class Diagnostic6 (GeneralDiagnostic):
- code = 6
- message = "Unsupported parameter value"
-
-class Diagnostic7 (GeneralDiagnostic):
- code = 7
- message = "Mandatory parameter not supplied"
-
-class Diagnostic8 (GeneralDiagnostic):
- code = 8
- message = "Unknown parameter"
-
-
-
-class Diagnostic10 (CQLDiagnostic):
- code = 10
- message = "Malformed query"
-
-class Diagnostic13 (CQLDiagnostic):
- code = 13
- message = "Unsupported use of parentheses"
-
-class Diagnostic14 (CQLDiagnostic):
- code = 14
- message = "Unsupported use of quotes"
-
-class Diagnostic15 (CQLDiagnostic):
- code = 15
- message = "Unsupported context set"
-
-class Diagnostic16 (CQLDiagnostic):
- code = 16
- message = "Unsupported index"
-
-class Diagnostic18 (CQLDiagnostic):
- code = 18
- message = "Unsupported combination of indexes"
-
-class Diagnostic19 (CQLDiagnostic):
- code = 19
- message = "Unsupported relation"
-
-class Diagnostic20 (CQLDiagnostic):
- code = 20
- message = "Unsupported relation modifier"
-
-class Diagnostic21 (CQLDiagnostic):
- code = 21
- message = "Unsupported combination of relation modifiers"
-
-class Diagnostic22 (CQLDiagnostic):
- code = 22
- message = "Unsupported combination of relation and index"
-
-class Diagnostic23 (CQLDiagnostic):
- code = 23
- message = "Too many characters in term"
-
-class Diagnostic24 (CQLDiagnostic):
- code = 24
- message = "Unsupported combination of relation and term"
-
-class Diagnostic26 (CQLDiagnostic):
- code = 26
- message = "Non special character escaped in term"
-
-class Diagnostic27 (CQLDiagnostic):
- code = 27
- message = "Empty term unsupported"
-
-class Diagnostic28 (CQLDiagnostic):
- code = 28
- message = "Masking character not supported"
-
-class Diagnostic29 (CQLDiagnostic):
- code = 29
- message = "Masked words too short"
-
-class Diagnostic30 (CQLDiagnostic):
- code = 30
- message = "Too many masking characters in term"
-
-class Diagnostic31 (CQLDiagnostic):
- code = 31
- message = "Anchoring character not supported"
-
-class Diagnostic32 (CQLDiagnostic):
- code = 32
- message = "Anchoring character in unsupported position."
-
-class Diagnostic33 (CQLDiagnostic):
- code = 33
- message = "Combination of proximity/adjacency and masking characters not supported"
-
-class Diagnostic34 (CQLDiagnostic):
- code = 34
- message = "Combination of proximity/adjacency and anchoring characters not supported"
-
-class Diagnostic35 (CQLDiagnostic):
- code = 35
- message = "Term only stopwords"
-
-class Diagnostic36 (CQLDiagnostic):
- code = 36
- message = "Term in invalid format for index or relation"
-
-class Diagnostic37 (CQLDiagnostic):
- code = 37
- message = "Unsupported boolean operator"
-
-class Diagnostic38 (CQLDiagnostic):
- code = 38
- message = "Too many boolean operators"
-
-class Diagnostic39 (CQLDiagnostic):
- code = 39
- message = "Proximity not supported"
-
-class Diagnostic40 (CQLDiagnostic):
- code = 40
- message = "Unsupported proximity relation"
-
-class Diagnostic41 (CQLDiagnostic):
- code = 41
- message = "Unsupported proximity distance"
-
-class Diagnostic42 (CQLDiagnostic):
- code = 42
- message = "Unsupported proximity unit"
-
-class Diagnostic43 (CQLDiagnostic):
- code = 43
- message = "Unsupported proximity ordering"
-
-class Diagnostic44 (CQLDiagnostic):
- code = 44
- message = "Unsupported combination of proximity modifiers"
-
-
-
-class Diagnostic50 (ResultSetDiagnostic):
- code = 50
- message = "Result sets not supported"
-
-class Diagnostic51 (ResultSetDiagnostic):
- code = 51
- message = "Result set does not exist"
-
-class Diagnostic52 (ResultSetDiagnostic):
- code = 52
- message = "Result set temporarily unavailable"
-
-class Diagnostic53 (ResultSetDiagnostic):
- code = 53
- message = "Result sets only supported for retrieval"
-
-class Diagnostic55 (ResultSetDiagnostic):
- code = 55
- message = "Combination of result sets with search terms not supported"
-
-class Diagnostic58 (ResultSetDiagnostic):
- code = 58
- message = "Result set created with unpredictable partial results available"
-
-class Diagnostic59 (ResultSetDiagnostic):
- code = 59
- message = "Result set created with valid partial results available"
-
-
-class Diagnostic60 (RecordDiagnostic):
- code = 60
- message = "Too many records retrieved"
-
-class Diagnostic61 (RecordDiagnostic):
- code = 61
- message = "First record position out of range"
-
-class Diagnostic64 (RecordDiagnostic):
- code = 64
- message = "Record temporarily unavailable"
- surrogate = 1
-
-class Diagnostic65 (RecordDiagnostic):
- code = 65
- message = "Record does not exist"
- surrogate = 1
-
-class Diagnostic66 (RecordDiagnostic):
- code = 66
- message = "Unknown schema for retrieval"
-
-class Diagnostic67 (RecordDiagnostic):
- code = 67
- message = "Record not available in this schema"
- surrogate = 1
-
-class Diagnostic68 (RecordDiagnostic):
- code = 68
- message = "Not authorised to send record"
- surrogate = 1
-
-class Diagnostic69 (RecordDiagnostic):
- code = 69
- message = "Not authorised to send record in this schema"
- surrogate = 1
-
-class Diagnostic70 (RecordDiagnostic):
- code = 70
- message = "Record too large to send"
- surrogate = 1
-
-class Diagnostic71 (RecordDiagnostic):
- code = 71
- message = "Unsupported record packing"
-
-class Diagnostic72 (RecordDiagnostic):
- code = 72
- message = "XPath retrieval unsupported"
-
-class Diagnostic73 (RecordDiagnostic):
- code = 73
- message = "XPath expression contains unsupported feature"
-
-class Diagnostic74 (RecordDiagnostic):
- code = 74
- message = "Unable to evaluate XPath expression"
-
-
-
-class Diagnostic80 (SortDiagnostic):
- code = 80
- message = "Sort not supported"
-
-class Diagnostic82 (SortDiagnostic):
- code = 82
- message = "Unsupported sort sequence"
-
-class Diagnostic83 (SortDiagnostic):
- code = 83
- message = "Too many records to sort"
-
-class Diagnostic84 (SortDiagnostic):
- code = 84
- message = "Too many sort keys"
-
-class Diagnostic86 (SortDiagnostic):
- code = 86
- message = "Incompatible record formats"
-
-class Diagnostic87 (SortDiagnostic):
- code = 87
- message = "Unsupported schema for sort"
-
-class Diagnostic88 (SortDiagnostic):
- code = 88
- message = "Unsupported tag path for sort"
-
-class Diagnostic89 (SortDiagnostic):
- code = 89
- message = "Tag path unsupported for schema"
-
-class Diagnostic90 (SortDiagnostic):
- code = 90
- message = "Unsupported direction value"
-
-class Diagnostic91 (SortDiagnostic):
- code = 91
- message = "Unsupported case value"
-
-class Diagnostic92 (SortDiagnostic):
- code = 92
- message = "Unsupported missing value action"
-
-
-class Diagnostic110 (StyleDiagnostic):
- code = 110
- message = "Stylesheets not supported"
-
-class Diagnostic111 (StyleDiagnostic):
- code = 111
- message = "Unsupported stylesheet"
-
-class Diagnostic120 (ScanDiagnostic):
- code = 120
- message = "Response position out of range"
-
-class Diagnostic121 (ScanDiagnostic):
- code = 121
- message = "Too many terms requested"
-
-
-
-
-
-# Deprecated diagnostics
-
-class Diagnostic11 (DeprecatedDiagnostic):
- code = 11
- message = "Unsupported query type"
-
-class Diagnostic12 (DeprecatedDiagnostic):
- code = 12
- message = "Too many characters in query"
-
-class Diagnostic17 (DeprecatedDiagnostic):
- code = 17
- message = "Illegal or unsupported combination of index and index set."
-
-class Diagnostic25 (DeprecatedDiagnostic):
- code = 25
- message = "Special characters not quoted in term"
-
-class Diagnostic45 (DeprecatedDiagnostic):
- code = 45
- message = "Index set name (prefix) assigned to multiple identifiers"
-
-class Diagnostic54 (DeprecatedDiagnostic):
- code = 54
- message = "Retrieval may only occur from an existing result set"
-
-class Diagnostic56 (DeprecatedDiagnostic):
- code = 56
- message = "Only combination of single result set with search terms supported"
-
-class Diagnostic57 (DeprecatedDiagnostic):
- code = 57
- message = "Result set created but no records available"
-
-class Diagnostic62 (DeprecatedDiagnostic):
- code = 62
- message = "Negative number of records requested"
-
-class Diagnostic63 (DeprecatedDiagnostic):
- code = 63
- message = "System error in retrieving records"
-
-class Diagnostic81 (DeprecatedDiagnostic):
- code = 81
- message = "Unsupported sort type"
-
-class Diagnostic85 (DeprecatedDiagnostic):
- code = 85
- message = "Duplicate sort keys"
-
-class Diagnostic100 (ExplainDiagnostic):
- code = 100
- message = "Explain not supported"
-
-class Diagnostic101 (ExplainDiagnostic):
- code = 101
- message = "Explain request type not supported"
-
-class Diagnostic102 (ExplainDiagnostic):
- code = 102
- message = "Explain record temporarily unavailable"
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/python/PyZ3950/__init__.py b/python/PyZ3950/__init__.py
deleted file mode 100644
index 08d3b71..0000000
--- a/python/PyZ3950/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-"""Python Z3950/MARC/ASN.1 package, supporting ZOOM API.
-"""
-
-__all__ = ['zoom', 'zmarc']
-# only pieces most users need: if you need asn1, import it explicitly
diff --git a/python/PyZ3950/asn1.py b/python/PyZ3950/asn1.py
deleted file mode 100644
index 2a01d88..0000000
--- a/python/PyZ3950/asn1.py
+++ /dev/null
@@ -1,2036 +0,0 @@
-#!/usr/bin/env python
-# This file should be available from
-# http://www.pobox.com/~asl2/software/PyZ3950/
-# and is licensed under the X Consortium license:
-# Copyright (c) 2001, Aaron S. Lav, asl2@pobox.com
-# All rights reserved.
-
-# Permission is hereby granted, free of charge, to any person obtaining a
-# copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, and/or sell copies of the Software, and to permit persons
-# to whom the Software is furnished to do so, provided that the above
-# copyright notice(s) and this permission notice appear in all copies of
-# the Software and that both the above copyright notice(s) and this
-# permission notice appear in supporting documentation.
-
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
-# OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
-# HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL
-# INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-# Except as contained in this notice, the name of a copyright holder
-# shall not be used in advertising or otherwise to promote the sale, use
-# or other dealings in this Software without prior written authorization
-# of the copyright holder.
-# Change history:
-# 2001/9/22 - fix test code to be slightly more elegant and fix test
-# comments to be correct. Due to Roy Smith, roy.smith@micromuse.com
-
-# 2002/05/23 - fix handling of ANY. Needed for handling of SUTRS records
-# by Z3950.
-
-
-
-
-"""
asn1 is a relatively general-purpose ASN.1 BER encoder and decoder.
-Encoding and
-decoding functions (asn1.encode and asn1.decode) take an ASN.1 spec, and
-transform back and forth between a byte stream and what I consider a natural
-Python representation of the data.
-
Separating the ASN.1 specification from the code would allow
-compilation of the specification to inline Python or C code, or to a
-specification for a C-based engine.
-
This module supports the following ASN.1 types:
-
-
BOOLEAN - encoder takes any Python value, decoder produces 0 or 1
-
INTEGER - as in Python
-
BITSTRING - asn1.BitStringVal
-
OCTSTRING - Python string
-
NULL - ignore value on encoding, return None on decoding
-
REAL - asn1.REAL (I hope to make this look more like python floats someday: read the source if you need this)
-
OID - OidVal
-
EXTERNAL - as SEQUENCE, see below (or read the source)
-
all string types - Python string (no validity checking is done)
-
-
For all the above types, the ASN.1 spec is just the name of the type.
-Inherently constructed types:
-
-
SEQUENCE_OF (ASN1.spec) - Python representation is a list of values.
-
-SEQUENCE, CHOICE - the ASN.1 spec is a list of tuples, either (name, tag, type)
-or, for SEQUENCE, (name, tag, type, optionalflag), where:
-tag can be None, asn1.EXPLICIT(tag), asn1.IMPLICIT(tag), or an integer
-interpreted as either an explicit or an implicit tag according to the setting
-of asn1.implicit_default; type is recursively an ASN.1 spec.
-
-
For CHOICE, the Python representation is (name, val). For SEQUENCE, on
-decoding,
-the Python representation is an instance of an asn1-synthesized class.
-On encoding, any class with the appropriate attributes is acceptable,
-calling the SEQUENCE specification but you
-can obtain a fresh instance of the synthesized class by calling the SEQUENCE
-specification: this class overrides setattr to provide attribute-name error
-checking. (The rationale for the seemingly unPythonic errorchecking is that
-misspelled optional attributes would otherwise be hard to detect. If you don't
-like it, it should be easy to turn off.)
-
-The definition of enumerated values for INTEGER and BITSTRING types is
-supported via the compiler (or see their definitions). For
-BITSTRING types, __getitem__ (value_name) and __setitem__
-(value_name) are implemented as BitStringVal methods, allowing,
-e.g., bitstringval['version_1'] = 1, or
-if bitstringval['version_1']. An appropriate BitStringVal for
-encoding can be constructed by calling the specification.
-For INTEGER types, call get_num_from_name (name) or
-get_name_from_num(num) methods of the type specification.
-(Note that code like if val == defn.get_num_from_name ('failure'):
-is less prone to typo-induced errors than if
-failure' == defn.get_name_from_num (val):
-
-In order to obtain definitions nested inside other definitions
-(e.g. INTEGER and BITSTRING, above), __getitem__ methods are provided
-for SEQUENCE, CHOICE, and SEQUENCE_OF. For SEQUENCE and CHOICE, the
-key is the name of the sequence element or arm (respectively).
-For SEQUENCE_OF, the key is 0 (arbitrarily chosen).
-
-APPLICATION and PRIVATE class tags are supported via the compiler,
-or pass the cls= keyword to IMPLICIT or EXPLICIT.
-
-For examples, see the test code at the end of this file, or the
-Z39.50 code that should be distributed with this file as part of PyZ3950.
-
-"""
-
-from __future__ import nested_scopes
-vers = "0.83"
-
-import array
-import string
-import copy
-import math
-
-
-# - elements should expose a list of possible tags, instead of just one tag,
-# bringing CHOICE into line with other elements
-# - make test cases more comprehensive
-# - write test cases for INTEGER and BITSTRING enum. values
-# - write test cases for asst. character set encodings
-
-
-# Parameters you can tweak for BER encoding
-
-implicit_default = 1
-# Treat bare numeric tags as IMPLICIT if 1, EXPLICIT if 0. Set at
-# definition time. This requires that all ASN.1 definitions be done
-# single-threadedly - no big deal. It'd be somewhat more modular to
-# look this up in the caller's module dictionary. The compiler produces
-# explicit indications of whether a tag is EXPLICIT or IMPLICIT, so is
-# unaffected by this setting.
-
-indef_len_encodings = 0
-# Generate indefinite-length encodings whenever we'd otherwise have to
-# go back and fix up the length bytes if length octets were 1-byte long.
-# Set at encoding time. Might make encoding run a little faster.
-
-cons_encoding = 0
-# Generate constructed encodings for string types. Useful only for
-# testing the decoding of such encodings, I think.
-
-# In theory, indef_len_encodings and cons_encoding are designed for
-# cases where the length of the data isn't known ahead of time, and
-# one doesn't want to have to buffer the entire thing. It is possible
-# to pass lazy sequences into the appropriate functions, but ...
-
-# For debugging the asn.1 code only
-trace_seq = 0
-trace_choice = 0
-trace_tag = 0
-trace_seq_of = 0
-trace_int = 0
-trace_external = 0
-trace_byte = 0
-trace_bitstring = 0
-trace_string = 0
-trace_codec = 0
-
-# Note: BERError is only for decoding errors (either input data is illegal
-# BER, or input data is legal BER but we don't support it.)
-
-# call encode_per and decode_per for PER? Or check Ctx?
-
-class BERError (Exception): pass
-
-class EncodingError (Exception): pass
-
-
-# Neither of these provides support for character set codecs. Instantiate
-# the appropriate context and call set_codec for the set of string types
-# you need with appropriate codecs.
-
-def encode (spec, data):
- ctx = Ctx ()
- spec.encode (ctx, data)
- return ctx.get_data ()
-
-def decode (spec, buf, readproc = None):
- ctx = IncrementalDecodeCtx(spec)
- ctx.feed (buf)
- while ctx.val_count () == 0:
- ctx.feed (readproc ())
- rv = ctx.get_first_decoded ()
- # decode also ignores the possibility of leftover bytes.
- # use IncrementalDecodeCtx.get_bytes_inprocess_count if you
- # need to know about leftovers
-
- return rv
-
-
-UNIVERSAL_FLAG = 0
-APPLICATION_FLAG = 0x40
-CONTEXT_FLAG = 0x80
-PRIVATE_FLAG = 0xC0
-
-CONS_FLAG = 0x20
-
-
-ANY_TAG = -1 # pseudotag
-
-END_INDEF_CONS_TAG = 0x0
-
-BOOL_TAG = 0x1
-INT_TAG = 0x2
-BITSTRING_TAG = 0x3
-OCTSTRING_TAG = 0x4
-NULL_TAG = 0x5
-OID_TAG = 0x6
-OBJECTDESCRIPTOR_TAG = 0x7
-EXTERNAL_TAG = 0x8
-REAL_TAG = 0x9
-SEQUENCE_TAG = 0x10
-UTF8STRING_TAG = 0xC
-NUMERICSTRING_TAG = 0x12
-PRINTABLESTRING_TAG = 0x13
-T61STRING_TAG = 0x14
-VIDEOTEXSTRING_TAG = 0x15
-IA5STRING_TAG = 0x16
-GENERALIZEDTIME_TAG = 0x18
-GRAPHICSTRING_TAG = 0x19
-VISIBLESTRING_TAG = 0x1A
-GENERALSTRING_TAG = 0x1B
-UNIVERSALSTRING_TAG = 0x1C
-BMPSTRING_TAG = 0x1E
-
-class StructBase:
- # replace _allowed_attrib_list with __slots__ mechanism
- # once we no longer need to support Python 2.1
- _allowed_attrib_list = []
- def __init__ (self, **kw):
- self.set_allowed_attribs (self._allowed_attrib_list)
- # we don't just self.__dict__.update (...) b/c
- # we want error-checking in setattr below
- for k, v in kw.items ():
- setattr (self, k, v)
- def __repr__ (self):
- s = 'Struct: %s [\n' % (self.__class__)
- i = self.__dict__.items ()
- i.sort ()
- i = filter (lambda it: it[0][0] <> '_', i)
- s = s + string.join (map (lambda it: repr (it[0]) +
- ' ' + repr (it[1]), i), '\n')
- s = s + ']\n'
- return s
- def __cmp__ (self, other):
- keys = self.__dict__.keys ()
- keys.sort () # to ensure reproduciblity
- for k in keys:
- s = getattr (self, k, None)
- o = getattr (other, k, None)
- def is_seq (val):
- return (isinstance (val, type ((0,))) or
- isinstance (val, type ([])))
- if is_seq (s) and is_seq (o):
- if len (s) <> len (o):
- return -1
- for selt, oelt in zip (s, o):
- c = cmp (selt, oelt)
- if c <> 0:
- return c
- else:
- c = cmp (s, o)
- if c <> 0:
- return c
- okeys = other.__dict__.keys ()
- okeys.sort ()
- if okeys <> keys:
- return 1
- return 0
- def set_allowed_attribs (self, l):
- self._allowed_attribs = {}
- for e in l:
- self._allowed_attribs [e] = 1
- def is_allowed (self, k):
- if self._allowed_attrib_list == []: return 1
- if k == '_allowed_attribs': return 1
- return self._allowed_attribs.has_key (k)
-# I implemented setattr b/c otherwise it can be difficult to tell when one
-# has mistyped an OPTIONAL element of a SEQUENCE. This is probably a matter
-# of taste, and derived classes should feel welcome to override this.
- def __setattr__ (self, key, val):
- if not self.is_allowed (key):
- raise AttributeError (key)
- self.__dict__ [key] = val
-
-# tags can match if only constructedness of encoding is different. Not
-# quite right, since, e.g., SEQUENCE must always be a constructed type,
-# and INTEGER never is, but this is OK because once we've matched,
-# trying to decode an obligate cons. type with a noncons tag will fail
-# because there's no decode_val attribute, and the other mismatch
-# will fail because there's no start_cons attribute.
-# This isn't a problem in CHOICEs because the uniqueness requirements
-# are expressed in terms of numeric tags, regardless of cons flag.
-
-def match_tag (a, b):
- if trace_tag: print "Match_tag", a, b
- cons_match = (a[0] & ~CONS_FLAG == b[0] & ~CONS_FLAG)
- if (a[1] == ANY_TAG or b[1] == ANY_TAG):
- return cons_match
- return a[1] == b[1] and cons_match
-
-def encode_base128 (val):
- if val == 0:
- return [0x00]
- l = []
- while val:
- l.append ((val % 128) | 0x80)
- val = val / 128
- if len (l) > 0:
- l[0] = l[0] & 0x7F
- l.reverse ()
- return l
-
-def read_base128 (buf, start):
- val = 0
- while 1:
- b = buf [start]
- start += 1
- val = val * 128 + (b & 0x7F)
- if b & 0x80 == 0:
- break
- return (start, val)
-
-class CtxBase:
- """Charset codec functionality, shared among all contexts."""
- def __init__ (self):
- self.charset_switch_oids = {}
- self.codec_dict_stack = [{}]
- # We may need different codecs for differing string types, e.g.
- # VisibleString vs. UTF8String, thus the dict
- def register_charset_switcher (self, oid, fn):
- self.charset_switch_oids [oid] = fn
-
- def set_codec (self, defn_inst, codec, strip_bom = 0):
- if trace_codec:
- print "setting codec", defn_inst, codec, strip_bom
- print self.codec_dict_stack
- # Note: really only need [0] and [1] elements of codec, encoder and decoder
- self.codec_dict_stack[-1][defn_inst.base_tag] = (codec, strip_bom)
- def get_codec (self, base_tag):
- def default_enc (x):
- if isinstance (x, type (u"")):
- return (x.encode ('ascii'), 0)
- return (x, 0)
- identity = ((default_enc, lambda x:(x,0)), 0)
- # we ignore lengths consumed. I don't think this means
- # we miss out on any error checking, b/c if there were
- # a stray pad byte or something, the codec would complain
- # about truncation.
- return self.codec_dict_stack[-1].get (base_tag, identity)
- def get_enc (self, base_tag):
- t = self.get_codec (base_tag)
- return (t[0][0], t[1])
- def get_dec (self, base_tag):
- return self.get_codec (base_tag)[0][1]
- def push_codec (self):
- if trace_codec:
- print "pushing codec"
- self.codec_dict_stack.append ({})
- def pop_codec (self):
- if trace_codec:
- print "popping codec"
- self.codec_dict_stack.pop ()
-
-class IncrementalDecodeCtx(CtxBase):
- states = ["tag_first", "tag_rest", "len_first", "len_rest", "data",
- "indef_end"]
- class StackElt:
- def __init__ (self, start_offset, cur_len, parent_typ, tag,
- parent_ctx, cname = None):
- self.start_offset = start_offset
- self.len = cur_len
- self.cname = cname
- self.cons = parent_typ.start_cons (tag, cur_len, parent_ctx)
- # methods: get_cur_def (), handle_val (val), finish ()
- def __init__ (self, asn1_def):
- CtxBase.__init__ (self)
- self.offset = 0
- self.last_begin_offset = 0
- self.state = "tag_first"
- self.asn1_def = asn1_def
- self.decoded_vals = []
- self.stack = []
- self.state_fns = {}
- for state in self.states:
- self.state_fns [state] = getattr (self, 'feed_' + state)
- def get_bytes_inprocess_count (self):
- return self.offset - self.last_begin_offset
- def val_count (self):
- l = len (self.decoded_vals)
- return l
- def get_first_decoded (self):
- rv = self.decoded_vals [0]
- self.decoded_vals = self.decoded_vals [1:]
- return rv
-
- def get_cur_def (self):
- if len (self.stack) == 0:
- return self.asn1_def
- else:
- tos = self.stack [-1]
- return tos.cons.get_cur_def (self.decoded_tag)
-
- def match_tag (self, seen):
- typ = self.get_cur_def ()
- # Note: use 'is' instead of '=' to avoid problem w/
- # OCTSTRING_class wanting __eq__ and failing b/c of getattr
- if typ is None: # falling off end of SEQUENCE with optional bits
- return 0
- return typ.check_tag (seen)
-
- # XXX calling check_tag is inefficient for SEQUENCE and CHOICE,
- # b/c we have to iterate to check_tag, then use the tag again
- # to figure out what type to decode.
-
- def set_state (self, new_state):
- self.state = new_state
- def push (self, decoded_len):
- new_typ = self.get_cur_def ()
- cname = None
- if isinstance (new_typ, CHOICE):
- (cname, new_typ) = new_typ.check_tag (self.decoded_tag)
- self.stack.append (self.StackElt (self.offset, decoded_len, new_typ,
- self.decoded_tag, self,
- cname = cname))
-
- def pop (self):
- if len (self.stack) == 0:
- self.raise_error ("bad end of cons type")
- tos = self.stack.pop ()
- if tos.len <> None:
- if tos.len <> (self.offset - tos.start_offset):
- self.raise_error ("constructed len mismatch (%d %d %d)" %
- (tos.len, self.offset, tos.start_offset))
- val = tos.cons.finish ()
- if tos.cname <> None:
- val = (tos.cname, val)
- self.handle_decoded (val)
-
- def raise_error (self, descr):
- raise BERError (descr + " offset %d" % (self.offset,))
- def feed (self, data):
- for char in data:
- if trace_byte:
- print hex (char), self.state, "offset:", self.offset
- self.state_fns [self.state] (char)
- self.offset += 1
-
- def feed_tag_first (self, char):
- if char == 0x00:
- stacklen = len (self.stack)
- if stacklen == 0 or self.stack[-1].len <> None:
- if stacklen == 0:
- tos_len_str = "irrelevant"
- else:
- tos_len_str = str (self.stack[-1].len)
-
- self.raise_error ("0x00 tag found, stacklen %d tos len %s" %
- (stacklen, tos_len_str))
- self.set_state ("indef_end")
- return
-
- flags = char & 0xE0
- val = char & 0x1f
- self.decoded_tag = (flags, val)
- if val == 0x1f:
- self.set_state ("tag_rest")
- self.tag_accum = 0
- else:
- self.finish_tag ()
-
- def feed_tag_rest (self, char):
- self.tag_accum = self.tag_accum * 128 + (char & 0x7f)
- if char & 0x80 == 0:
- self.decoded_tag = (self.decoded_tag[0], self.tag_accum)
- self.tag_accum = None
- self.finish_tag ()
-
-
- def finish_tag (self):
- if not self.match_tag (self.decoded_tag):
- self.raise_error ("Saw tag %s expecting %s" %
- (str(self.decoded_tag),
- self.get_cur_def().str_tag ()))
- self.set_state ("len_first")
-
- def feed_len_first (self, char):
- if char >= 128:
- rest_len = char & 0x7f
- if rest_len == 0:
- self.decoded_len = None
- self.finish_len ()
- else:
- self.rest_len = rest_len
- self.decoded_len = 0
- self.set_state ("len_rest")
- else:
- self.decoded_len = char
- self.finish_len ()
- def feed_len_rest (self, char):
- self.decoded_len = self.decoded_len * 256 + char
- self.rest_len -= 1
- if self.rest_len == 0:
- self.finish_len ()
-
- def finish_len (self):
- if self.decoded_tag == (0,0):
- if self.decoded_len <> 0:
- self.raise_error ("Bad len %d for tag 0" % (self.decoded_len,))
- self.pop ()
- return
- self.data_buf = []
- cons_flag = self.decoded_tag [0] & CONS_FLAG
- if cons_flag:
- self.push (self.decoded_len)
- new_state = "tag_first"
- else:
- new_state = "data"
- if self.decoded_len == 0:
- if cons_flag:
- assert (self.check_pop ())
- self.set_state ("tag_first")
- else:
- self.finish_data ()
- else:
- self.set_state (new_state)
-
- def feed_data (self, char):
- self.data_buf.append (char)
- self.decoded_len -= 1
- if self.decoded_len == 0:
- self.finish_data ()
- def finish_data (self):
- cur_def = self.get_cur_def ()
- if isinstance (cur_def, CHOICE):
- (cname, ctyp) = cur_def.check_tag (self.decoded_tag)
- cur_def = ctyp
- else:
- cname = None
- if not (cur_def is None): # we haven't fallen off end of a SEQ
- rv = cur_def.decode_val (self, self.data_buf)
- if cname <> None:
- rv = (cname, rv)
- self.handle_decoded (rv)
- else:
- assert (self.check_pop ())
- self.set_state ("tag_first")
- def handle_decoded (self, val):
- if len (self.stack) == 0:
- self.decoded_vals.append (val)
- self.last_begin_offset = self.offset + 1
- # +1 because self.offset will be incremented on return
- else:
- self.stack[-1].cons.handle_val (val)
- self.check_pop ()
- def check_pop (self):
- if self.stack[-1].len == self.offset - self.stack[-1].start_offset:
- self.pop ()
- return 1
- return 0
-
- def feed_indef_end (self, char):
- if char <> 0x00:
- self.raise_error ("end cons indef-len encoding %x" % (char,))
- self.pop ()
- self.set_state ("tag_first")
-
-def tag_to_buf (tag, orig_flags = None):
- (flags, val) = tag
- # Constructed encoding is property of original tag, not of
- # implicit tag override
- if orig_flags <> None:
- flags = flags | (orig_flags & CONS_FLAG)
- extra = 0
- if val >=0x1F:
- extra = val
- val = 0x1F
- l = [flags | val]
- if extra:
- l2 = encode_base128 (extra)
- l.extend (l2)
- return l
-
-def len_to_buf (mylen):
- if mylen < 128:
- return [mylen]
- else:
- l = []
- while mylen:
- l.append (mylen % 256)
- mylen = mylen / 256
- assert (len (l) < 0x80)
- l.append (len (l) | 0x80)
- l.reverse ()
- return l
-
-class WriteCtx (CtxBase):
- def __init__ (self):
- CtxBase.__init__ (self)
- self.clear ()
- def clear (self):
- self.buf = array.array ('B')
- def encode (self, spec, data):
- self.clear ()
- spec.encode (self, data)
- return self.get_data ()
- def get_data (self):
- return self.buf
- def bytes_write (self, data):
- # type-checking is icky but required by array i/f
- if isinstance (data, type ([])):
- self.buf.fromlist (data)
- elif isinstance (data, type ('')):
- self.buf.fromstring (data)
- else:
- raise EncodingError, "Bad type to bytes_write"
-
-BYTE_BITS = 8
-
-def extract_bits (val, lo_bit, hi_bit):
- tmp = (val & (~0L << (lo_bit))) >> lo_bit
- tmp = tmp & ((1L << (hi_bit - lo_bit + 1)) - 1)
- return tmp
-
-log_of_2 = math.log (2)
-
-def log2 (x):
- return int(math.log (x) / log_of_2)
-
-class PERWriteCtx(WriteCtx):
- def __init__ (self, aligned = 0, canonical = 0):
- self.aligned = aligned
- self.canonical = canonical
- self.bit_offset = 0
-
- WriteCtx.__init__ (self)
- def write_bits_unaligned (self, val, bit_len):
- # write starting at bit_offset, no matter what
- byte_count = (bit_len + self.bit_offset) / BYTE_BITS
- if (bit_len + self.bit_offset) % BYTE_BITS <> 0:
- byte_count += 1
- my_range = range (byte_count - 1, -1, -1)
- lo_bits = map (lambda x: x * BYTE_BITS, my_range)
- def extract_val (lo_bit):
- return extract_bits (val, lo_bit, lo_bit + BYTE_BITS - 1)
- bytes = map (extract_val, lo_bits)
-
- new_bit_offset = (bit_len + self.bit_offset) % BYTE_BITS
- if new_bit_offset <> 0:
- bytes [-1] = bytes [-1] << (BYTE_BITS - new_bit_offset)
- if self.bit_offset <> 0:
- self.buf[-1] = self.buf[-1] | bytes [0]
- self.bytes_write (bytes[1:])
- else:
- self.bytes_write (bytes)
- self.bit_offset = new_bit_offset
-
- def write_bits (self, val, bit_len):
- if self.aligned and self.bit_offset <> 0:
- self.write_bits_unaligned (0, BYTE_BITS - self.bit_offset)
- self.bit_offset = 0
- self.write_bits_unaligned (val, bit_len)
-
- # for {read,write}_*_int, see Dubuisson 20.4
- def write_constrained_int (self, val, lo, hi):
- assert (hi >= lo)
- # XXX what if hi = lo + 1
- rng = hi - lo + 1
- print rng, val, log2(rng)
- if not self.aligned:
- self.write_bits (val, log2(rng))
- return
-
- if rng == 1:
- return # known value, don't encode
- if rng < 256:
- return # calc minimum # of bits
- if rng == 256:
- self.write_bits (val - lo, 8)
- return
- if rng <= 65536:
- self.write_bits (val - lo, 16)
- return
- assert (0)
-
- def write_semiconstrained_int (self, val, lo):
- # write len field, then len, then min octets log_256(val-lo)
- assert (0)
- pass
- def write_unconstrained_int (self, val): # might have upper bd, but not used
- assert (0)
- pass
- def write_usually_small_int (self, val):
- assert (val >= 0)
- if val < 64:
- self.write_bits_unaligned (0, 1)
- self.write_bits_unaligned (val, 6)
- else:
- self.write_bits_unaligned (1,1)
- self.write_semiconstrained_int (val, 0)
-
-
-
-class BERWriteCtx(WriteCtx):
- def __init__ (self):
- WriteCtx.__init__ (self)
- def clear (self):
- self.cur_tag = None
- WriteCtx.clear (self)
- def set_implicit_tag (self, tag):
- if self.cur_tag == None:
- self.cur_tag = tag
- def tag_write (self, tag):
- if trace_tag: print "Writing tag", tag
- (orig_flags, _) = tag
- if self.cur_tag <> None:
- tag = self.cur_tag
- self.cur_tag = None
- l = tag_to_buf (tag, orig_flags)
- self.bytes_write (l)
- def get_pos (self):
- return len (self.buf)
- class LenPlaceHolder:
- def __init__ (self, ctx, estlen = 127):
- if not indef_len_encodings:
- self.ctx = ctx
- self.oldpos = ctx.get_pos ()
- self.estlen = estlen
- self.lenlen = ctx.est_len_write (estlen)
- else:
- self.ctx = ctx
- ctx.bytes_write ([0x80])
- def finish (self):
- if not indef_len_encodings:
- real_len = self.ctx.get_pos() - self.oldpos - 1
- self.ctx._len_write_at (self.ctx.get_pos () - self.oldpos - 1,
- self.oldpos, self.lenlen)
- else:
- self.ctx.bytes_write ([0,0])
- def len_write (self, mylen = 0):
- return Ctx.LenPlaceHolder (self, mylen)
- def len_write_known (self, mylen):
- return self.est_len_write (mylen)
- def est_len_write (self, mylen):
- l = len_to_buf (mylen)
- self.buf.fromlist (l)
- return len (l)
- def _len_write_at (self, mylen, pos, lenlen):
- l = len_to_buf (mylen)
- assert (len(l) >= lenlen)
- # array.pop not available in Python 1.5.2. We could just use a
- # less efficient length encoding (long form w/leading 0 bytes
- # where necessary), but ...
-
- # XXX fix to use more efficient code, now that we don't support 1.5.2!
-
- for i in range (len(l) - lenlen):
- self.buf.insert (pos, 0)
- for i in range(len(l)):
- self.buf[pos + i] = l [i]
-
-
- def raise_error (self, descr):
- offset = len (self.buf)
- raise BERError, (descr, offset)
-
-Ctx = BERWriteCtx # Old synonym for historical reasons
-
-
-# EXPLICIT, IMPLICIT, CHOICE can't derive from eltbase b/c they need to do
-# tag manipulation
-class ELTBASE:
- # known_len is 1 if len can easily be calculated w/o encoding
- # val (e.g. OCTET STRING),
- # 0 if otherwise and we have to go back and fix up (e.g. SEQUENCE).
- def encode (self, ctx, val):
- ctx.tag_write (self.tag)
- if not self.known_len: lph = ctx.len_write ()
- self.encode_val (ctx, val)
- if not self.known_len: lph.finish ()
-
- def check_tag (self, seen_tag):
- return match_tag (seen_tag, self.tag)
- def str_tag (self):
- if hasattr (self, 'tag'):
- return str (self.tag)
- else:
- return self.__class__.__name__
- def fulfill_promises (self, promises):
- return
-
-class TAG: # base class for IMPLICIT and EXPLICIT
- def __init__ (self, tag, cls=CONTEXT_FLAG):
- if type (tag) == type (0):
- tag = (CONTEXT_FLAG, tag)
- self.tag = (tag[0] | self.flags, tag[1])
- def set_typ (self, typ):
- self.typ = typ
- def __call__ (self):
- return self.typ ()
- def __getitem__ (self, *args):
- return self.typ.__getitem__ (*args)
- def __setitem__ (self, *args):
- return self.typ.__setitem__ (*args)
-
- def get_num_from_name (self, *args):
- return self.typ.get_num_from_name (*args)
- def get_name_from_num (self, *args):
- return self.typ.get_name_from_num (*args)
-
- def decode_val (self, ctx, buf):
- return self.typ.decode_val (ctx, buf)
- def str_tag (self):
- return str (self.tag)
- def check_tag (self, seen_tag):
- return match_tag (seen_tag, self.tag)
-
- def fulfill_promises (self, promises):
- if isinstance (self.typ, Promise):
- self.typ = self.typ.get_promised (promises)
- else:
- self.typ.fulfill_promises (promises)
-
-# Note: IMPLICIT and EXPLICIT have dual use: they can be instantiated by
-# users of this module to indicate tagging, but when TAG.set_typ is
-# called, they become asn.1 type descriptors themselves. Maybe these
-# two uses should have separate classes, making four classes overall.
-
-class IMPLICIT(TAG):
- flags = 0
- def __repr__ (self):
- return "IMPLICIT: " + repr (self.tag) + " " + repr (self.typ)
- def __cmp__ (self, other):
- if not isinstance (other, IMPLICIT):
- return -1
- return cmp (self.tag, other.tag)
- def start_cons (self, tag, cur_len, ctx):
- return self.typ.start_cons (tag, cur_len, ctx)
- def encode (self, ctx, val):
- ctx.set_implicit_tag (self.tag)
- self.typ.encode (ctx, val)
- def encode_per (self, ctx, val):
- self.typ.encode_per (ctx, val)
-
-
-class EXPLICIT (TAG):
- flags = CONS_FLAG # Explicit tag is always a constructed encoding
- def __repr__ (self):
- return "EXPLICIT: " + repr (self.tag) + " " + repr (self.typ)
- def __cmp__ (self, other):
- if not isinstance (other, EXPLICIT):
- return -1
- return cmp (self.tag, other.tag)
-
- class ConsElt:
- def __init__ (self, typ):
- self.typ = typ
- self.ind = 0
- def get_cur_def (self, seen_tag):
- return self.typ
- def handle_val (self, val):
- self.tmp = val
- self.ind += 1
- def finish (self):
- if self.ind <> 1:
- raise BERError ("wrong number of elts %d for EXPLICIT %s" %
- (self.ind, self.typ))
- return self.tmp
-
- def start_cons (self, tag, cur_len, ctx):
- return self.ConsElt (self.typ)
-
- def encode (self, ctx, val):
- ctx.cur_tag = None
- ctx.tag_write (self.tag)
- lph = ctx.len_write ()
- self.typ.encode (ctx, val)
- lph.finish ()
-
-
-def make_tag (tag):
- if implicit_default:
- return IMPLICIT (tag)
- else:
- return EXPLICIT (tag)
-
-def TYPE (tag, typ):
- if tag == None:
- return typ
- if not isinstance (tag, TAG):
- tag = make_tag (tag)
- tag.set_typ (typ)
- return tag
-
-class OidVal:
- def __init__ (self, lst):
- self.lst = tuple (lst)
- self.encoded = self.encode (lst)
- def __hash__ (self):
- return hash (self.lst)
- def __repr__ (self):
- s = 'OID:'
- for i in self.lst:
- s = s + ' %d' % i
- return s
- def __cmp__ (self, other):
- if not hasattr (other, 'lst'):
- return -1
- return cmp (self.lst, other.lst)
- def encode (self, lst):
- encoded = [40 * lst [0] + lst [1]]
- for val in lst [2:]:
- encoded = encoded + encode_base128 (val)
- return encoded
-
-class OID_class (ELTBASE):
- tag = (0, OID_TAG)
- known_len = 1
- def encode_val (self, ctx, val):
- ctx.len_write_known (len (val.encoded))
- ctx.bytes_write (val.encoded)
- def decode_val (self, ctx, buf):
- b1 = buf [0]
- oid = [b1 / 40, b1 % 40]
- start = 1
- mylen = len (buf)
- while start < mylen:
- (start, val) = read_base128 (buf, start)
- oid.append (val)
- return OidVal (oid)
-
-OID = OID_class ()
-
-# XXX need to translate into offset in list for PER encoding
-class NamedBase:
- def __init__ (self, names_list = [], lo = None, hi = None):
- self.lo = lo
- self.hi = hi
- if names_list == None:
- names_list = []
- self.name_to_num = {}
- self.num_to_name = {}
- self.names_list = names_list
- for (name, num) in names_list:
- self.num_to_name [num] = name
- self.name_to_num [name] = num
- num_keys = self.num_to_name.keys ()
- if len (num_keys) > 0:
- self.max = max (self.num_to_name.keys ())
- else:
- self.max = 0
- def get_num_from_name (self, *args):
- return self.name_to_num.get (*args)
- def get_name_from_num (self, *args):
- return self.num_to_name.get (*args)
-
-
-class INTEGER_class (ELTBASE, NamedBase):
- tag = (0, INT_TAG)
- known_len = 1
- def __init__ (self, *args):
- NamedBase.__init__ (self, *args)
- if self.max <> 0:
- self.hi = self.max # XXX reorganize!
- self.extensible = 0 # XXX
- def encode_val (self, ctx, val):
- # based on ber.py in pysnmp
- l = []
- if val == 0:
- l = [0]
- elif val == -1:
- l = [0xFF]
- else:
- if sgn (val) == -1:
- term_cond = -1
- last_hi = 1
- else:
- term_cond = 0
- last_hi = 0
- while val <> term_cond:
- val, res = val >> 8, (val & 0xFF)
- l.append (res)
- if (l[-1] & 0x80 <> 0) ^ last_hi:
- l.append (last_hi * 0xFF)
- ctx.len_write_known (len(l))
- l.reverse ()
- ctx.bytes_write (l)
- def encode_per (self, ctx, val):
- assert (not self.extensible)
- assert (self.lo <> None)
- print "encoding", val, self.lo, self.hi
- if self.hi == None:
- ctx.write_semiconstrained_int (val, self.lo)
- else:
- ctx.write_constrained_int (val, self.lo, self.hi)
-
-
- def decode_val (self, ctx, buf):
- val = 0
- if buf[0] >= 128: sgn = -1
- else: sgn = 1
- for b in buf:
- if trace_int: print "Reading INTEGER byte", b
- val = 256 * val + sgn * b
- if sgn == -1:
- val = - (val + pow (2, 8 * len (buf)))
- # XXX should be much more efficient decoder here
- return val
-
-INTEGER = INTEGER_class ()
-
-class ConditionalConstr:
- def __getattr__ (self, attr): # XXX replace with properties when can require 2.2.
- if attr == 'tag':
- base_tag = self.__dict__ ['base_tag']
- if cons_encoding:
- return (CONS_FLAG, base_tag)
- else:
- return (0, base_tag)
- elif attr == 'known_len' and self.override_known_len:
- return not cons_encoding
- else:
- return self.__dict__ [attr]
-
-class OCTSTRING_class (ConditionalConstr, ELTBASE):
- def __init__ (self, tag = None, lo = None, hi = None):
- if tag <> None:
- self.base_tag = tag
- else:
- self.base_tag = OCTSTRING_TAG
- self.override_known_len = 1
- self.extensible = 0 # XXX
- self.lo = lo
- self.hi = hi
- def __repr__ (self):
- return 'OCTSTRING: ' + repr (self.tag)
- class ConsElt:
- def __init__ (self):
- self.lst = []
- def get_cur_def (self, seen_tag):
- return OCTSTRING
- def handle_val (self, val):
- self.lst.append (val)
- def finish (self):
- return "".join (self.lst)
- def start_cons (self, tag, cur_len, ctx):
- return self.ConsElt ()
- def handle_charset (self, ctx, val):
- encoder, strip_bom = ctx.get_enc (self.base_tag)
- if trace_string:
- print "encoding", type (val), encoder, self.base_tag, strip_bom,
- (val, l) = encoder (val)
- if strip_bom:
- val = val[2:]
- if trace_string:
- print "encoded", val
- return val
- def encode_val (self, ctx, val):
- val = self.handle_charset (ctx, val)
- if cons_encoding:
- # Dubuisson, _ASN.1 ..._, 18.2.10 says that string
- # types are encoded like OCTETSTRING, so no worries
- # about preserving character boundaries in constructed
- # encodings.
- tag = (0, OCTSTRING_TAG)
- for i in range (len (val)):
- ctx.tag_write (tag)
- ctx.len_write_known (1)
- ctx.bytes_write ([ord(val[i])])
- else:
- ctx.len_write_known (len (val))
- ctx.bytes_write (val)
- def encode_per (self, ctx, val):
- val = handle_charset (ctx, val)
- assert (not self.extensible)
- l = len (val)
- if self.lo <> None and self.lo == self.hi:
- if l <= 2:
- ctx.write_bits_unaligned (val, l * BYTE_BITS)
- elif l <= 8192:
- ctx.write_bits (val, l * BYTE_BITS)
- else:
- assert (0) # XXX need to fragment!
-
- assert (len < 65536)
- if self.hi == None:
- ctx.write_semiconstrained_int (l, self.lo)
- else:
- ctx.write_constrained_int (l, self.lo, self.hi)
- ctx.write_bits (val, l * BYTE_BITS)
-
- def decode_val (self, ctx, buf):
- tmp_str = ''.join (map (chr, buf))
- decoder = ctx.get_dec (self.base_tag)
- if trace_string:
- print "decoding", repr(tmp_str), decoder, self.base_tag
- rv = decoder (tmp_str)
- if trace_string:
- print repr (rv)
- return rv [0]
-
-
-
-_STRING_TAGS = (UTF8STRING_TAG, NUMERICSTRING_TAG, PRINTABLESTRING_TAG,
- T61STRING_TAG, VIDEOTEXSTRING_TAG, IA5STRING_TAG,
- GRAPHICSTRING_TAG, VISIBLESTRING_TAG, GENERALSTRING_TAG,
- UNIVERSALSTRING_TAG, BMPSTRING_TAG, GENERALIZEDTIME_TAG,
- OBJECTDESCRIPTOR_TAG)
-
-OCTSTRING = OCTSTRING_class ()
-(UTF8String, NumericString, PrintableString, T61String, VideotexString,
- IA5String, GraphicString, VisibleString, GeneralString, UniversalString,
- BMPString, GeneralizedTime, ObjectDescriptor) = \
- map (OCTSTRING_class, _STRING_TAGS)
-
-class CHOICE:
- choice_type = 1
- # No class.tag, tag derives from chosen arm of CHOICE
- def __init__ (self, c):
- self.promises_fulfilled = 0
- # XXX self.promises_fulfilled is only needed for CHOICE,
- # but could speed up by adding checking to SEQUENCE, SEQUENCE_OF, etc.
-
- self.choice = []
- # XXX rework this to use dict by arm name, dict by tag?
- # but CHOICE of CHOICE constructs mean that a typ can have
- # multiple possible tags, so a little more difficult
- for arm in c:
- self.choice.append (self.mung (arm))
- def __getitem__ (self, key):
- for (cname, ctyp) in self.choice:
- if key == cname:
- return ctyp
- raise KeyError (key)
- def __setitem__ (self, key, val): # warning: may raise KeyError!
- for i in range (len (self.choice)):
- (cname, ctyp) = self.choice [i]
- if cname == key:
- self.set_arm (i, val)
- return
- raise KeyError (key)
- def fulfill_promises (self, promises):
- if self.promises_fulfilled:
- return
- self.promises_fulfilled = 1
- for i in range (len (self.choice)):
- if isinstance (self.choice [i][1], Promise):
- self.choice [i][1] = self.choice[i][1].get_promised (promises)
- else:
- self.choice[i][1].fulfill_promises (promises)
-
- def set_arm (self, i, new_arm):
- self.choice[i] = self.mung (new_arm)
- def mung (self, arm):
- (cname, ctag, ctyp) = arm
- ctyp = TYPE (ctag, ctyp)
- return [cname, ctyp]
- def str_tag (self):
- return repr (self)
- def check_tag (self, seen_tag):
- for (cname, ctyp) in self.choice:
- if ctyp.check_tag (seen_tag):
- return (cname, ctyp)
- return 0
- def __repr__ (self):
- return 'CHOICE: ' + string.join (map (lambda x:x[0],self.choice), '\n')
- # Note: we don't include types in the repr, because that can induce
- # infinite recursion.
- def encode (self, ctx, val):
- if trace_choice: print val
- (name, val) = val
- for (cname, ctyp) in self.choice:
- if cname == name:
- if trace_choice: print "Encoding arm", cname, "Val", val
- ctyp.encode (ctx, val)
- return
- err = ("Bogus, no arm for " + repr (name) + " val " +
- repr(val))
- raise EncodingError,err
-
-
-# Note: ANY can be any type, not just OCTSTRING. The ASN.1 spec
-# is supposed to indicate an OID (or something) which we can use to
-# figure out how to encode/decode the ANY type. For EXTERNAL,
-# this is well-defined and implemented via register_oid (although we
-# don't support indirect_reference).
-# On decoding, if the incremental decoder is used and no type
-# is specified, the result looks like:
-# (tag, , indef-len-flag)
-# or a list instead of a string for constructed types.
-# (The non-incremental decoder can't handle ANY types with indef-len
-# encoding, and just returns a byte string for def-len encoding. I
-# recommend the incremental decoder.)
-# The encoder expects the output of the incremental decoder.
-# Note that indef-len-flag can usually be ignored, and is only
-# valuable for applications which decode and then encode and expect
-# the results to be byte-identical, which is basically test apps.
-
-class ANY_class(OCTSTRING_class): # inherit decode_val
- tag = (CONS_FLAG, ANY_TAG)
- class ConsElt:
- def __init__ (self, tag, cur_len):
- self.tmp = []
- self.tag = tag
- self.indef_flag = cur_len == None
- def get_cur_def (self, seen_tag):
- return ANY
- def handle_val (self, val):
- self.tmp.append (val)
- def finish (self):
- return (self.tag, self.tmp, self.indef_flag)
-
- def start_cons (self, tag, cur_len, ctx):
- return self.ConsElt (tag, cur_len)
- def encode_aux (self, val):
- (tag, val, indef_flag) = val
- if isinstance (val, type ([])):
- buf = "".join (map (self.encode_aux, val))
- elif isinstance (val, type (())):
- buf = self.encode_aux (val)
- else:
- buf = val
- def tostr (lst):
- return "".join (map (chr, lst))
- if indef_flag:
- return tostr (tag_to_buf (tag)) + "\x80" + buf + "\0\0"
- else:
- buf_len = len (buf)
- return tostr (tag_to_buf (tag)) + tostr (len_to_buf (buf_len)) +buf
- def encode (self, ctx, val):
- ctx.bytes_write (self.encode_aux(val))
- def check_tag (self, seen_tag):
- return 1
- def decode_val (self, ctx, buf):
- v = OCTSTRING_class.decode_val (self, ctx, buf)
- return (ctx.decoded_tag, v, 0) # only called for primitive def-len encoding, thus "0"
-
-ANY = ANY_class ()
-
-class BitStringVal:
- def __init__ (self, top, bits = 0, defn = None):
- self.top_ind = top # 0-based, -1 is valid, indicating no sig. bits
- self.bits = bits
- self.defn = defn
- def __repr__ (self):
- names = []
- for i in range (self.top_ind + 1):
- if self.is_set (i):
- def mk_unk ():
- return "Unknown(%d)" % (i,)
- if (not hasattr (self.defn, 'num_to_name') or
- self.defn.num_to_name == None):
- names.append (mk_unk ())
- else:
- names.append (self.defn.num_to_name.get (i, mk_unk ()))
- return "Top: %s Bits %s Names %s" % (repr(self.top_ind),
- repr(self.bits),
- ",".join (names))
- def __cmp__ (self, other):
- return cmp ((self.top_ind, self.bits), (other.top_ind, other.bits))
-
- def check_extend (self, bit):
- if bit > self.top_ind:
- self.bits = self.bits << (bit - self.top_ind)
- self.top_ind = bit
-
- def set (self, bit):
- self.check_extend (bit)
- self.bits = self.bits | (1L << (self.top_ind - bit))
- def clear (self, bit):
- self.check_extend (bit)
- self.bits = self.bits & ~(1L << (self.top_ind - bit))
-
- def set_bits (self, bitseq):
- for bit in bitseq:
- self.set (bit)
- def is_set (self, bit):
- if self.top_ind - bit < 0:
- return 0
- return self.bits & (1L << ( self.top_ind - bit))
- def __getitem__ (self, bit_name):
- bit_ind = self.defn.get_num_from_name (bit_name)
- return self.is_set (bit_ind)
- def __setitem__ (self, key, val):
- ind = self.defn.get_num_from_name (key)
- if val:
- self.set (ind)
- else:
- self.clear (ind)
-
-class BITSTRING_class (ConditionalConstr, ELTBASE, NamedBase):
- known_len = 1
- def __init__ (self, *args):
- self.base_tag = BITSTRING_TAG
- self.override_known_len = 0
- NamedBase.__init__ (self, *args)
- def __call__ (self):
- return BitStringVal (self.max, 0, self)
- class ConsElt:
- def __init__ (self, parent):
- self.lst = []
- self.parent = parent
- def get_cur_def (self, seen_tag):
- return BITSTRING
- def handle_val (self, val):
- self.lst.append (val)
- def finish (self):
- bits = 0L
- for v in self.lst[:-1]:
- bits *= 256L
- assert (v.top_ind == 7)
- bits += v.bits
- v = self.lst [-1]
- bits *= 256L
-
- pad_count = 7 - v.top_ind
- bits = bits >> pad_count
- bits += v.bits # v.bits have already been right-shifted by decoder
- return BitStringVal (8 * len (self.lst) - pad_count - 1, bits, self.parent)
-
- def start_cons (self, tag, cur_len, ctx):
- return self.ConsElt (self)
- def encode_val (self, ctx, val):
- def top_ind_to_pad_bits (top_ind):
- bit_count = (top_ind + 1) % 8 # top_ind is 0-based
- if bit_count == 0: return 0
- return (8 - bit_count)
- assert (top_ind_to_pad_bits (0) == 7)
- assert (top_ind_to_pad_bits (7) == 0)
- assert (top_ind_to_pad_bits (8) == 7)
- assert (top_ind_to_pad_bits (10) == 5)
- assert (top_ind_to_pad_bits (15) == 0)
-
- pad_bits_count = top_ind_to_pad_bits (val.top_ind)
-
- val_len = ((val.top_ind + 1) / 8) + 1
- # + 1 for count of padding bits, count always 1 byte
- if pad_bits_count <> 0:
- val_len += 1
- l = []
- to_write = (1L * val.bits) << pad_bits_count
- for i in range (val_len - 1):
- l.append (to_write % 256)
- to_write = to_write / 256
-
- assert (to_write >= 0)
- if not cons_encoding:
- ctx.len_write_known (val_len)
- l.append (pad_bits_count)
- l.reverse ()
- ctx.bytes_write (l)
- else:
- ctx.bytes_write ([0x80]) # Dubuisson p. 403 says indef-len req'd
- l.reverse ()
- for i in range (len (l) - 1):
- v = [0x3, 0x2, 0x0, l[i]]
- if trace_bitstring: print "encoding", v
- ctx.bytes_write (v)
- v = [0x3, 0x2, pad_bits_count, l[-1]]
- if trace_bitstring: print "encoding last", v
- ctx.bytes_write (v)
- ctx.bytes_write ([0x00,0x00])
- def decode_val (self, ctx, buf):
- if trace_bitstring:
- print "bitstring", buf
- pad_bits = buf [0]
- bits = 0
- for b in buf [1:]:
- bits = 256L * bits + b
- bits = bits >> pad_bits
- return BitStringVal ((len(buf) - 1) * 8 - pad_bits - 1 , bits,
- self)
-
-BITSTRING = BITSTRING_class ()
-
-class SeqConsElt:
- def __init__ (self, seq):
- self.index = 0
- self.seq = seq
- self.tmp = seq.klass ()
- def get_cur_def (self, seen_tag):
- r = range (self.index, len (self.seq.seq))
-
- for i in r:
- (name, typ, optional) = self.seq.seq [i]
- if typ.check_tag (seen_tag):
- self.index = i
- return typ
- if not optional:
- raise BERError ("SEQUENCE tag %s not found in %s (%d/%d)" %
- (str (seen_tag), str (self.seq),
- self.index, i))
-
- # OK, we fell off the end. Must just be absent OPTIONAL types.
- return None
-
- def handle_val (self,val):
- setattr (self.tmp, self.seq.seq[self.index][0], val)
- self.index += 1
- def finish (self):
- for i in range (self.index, len (self.seq.seq)):
- (name, typ, optional) = self.seq.seq[i]
- if not optional:
- raise BERError (
- "non-opt data missing from seq %s at %d (so far %s)" %
- (str (self.seq), self.index, str (self.tmp)))
- return self.tmp
-
-class SEQUENCE_BASE (ELTBASE):
- tag = (CONS_FLAG, SEQUENCE_TAG)
- known_len = 0
- def __init__ (self, klass, seq):
- self.klass = klass
- self.seq = []
- for e in seq:
- self.seq.append (self.mung (e))
- self.extensible = 0
- def __call__ (self, **kw):
- return apply (self.klass, (), kw)
- def mung (self, e):
- if len (e) == 3:
- (name, tag, typ) = e
- optional = 0
- elif len (e) == 4:
- (name, tag, typ, optional) = e
- else: assert (len(e) == 3 or len(e) == 4)
- typ = TYPE (tag, typ)
- return (name, typ, optional)
- def __repr__ (self):
- return ('SEQUENCE: ' + repr (self.klass) +
- '\n' + string.join (map (repr, self.seq), '\n'))
- def __getitem__ (self, key):
- for e in self.seq:
- if e[0] == key:
- return e[1]
- raise KeyError (key)
- def __setitem__ (self, key, val):
- for i in range (len (self.seq)):
- if self.seq[i][0] == key:
- self.seq[i] = self.mung (val)
- return
- raise "not found" + str (key)
- def fulfill_promises (self, promises):
- for i in range (len(self.seq)):
- (name, typ, optional) = self.seq[i]
- if isinstance (typ, Promise):
- self.seq[i] = (name, typ.get_promised (promises), optional)
- else:
- typ.fulfill_promises (promises)
-
- def get_attribs (self):
- return map (lambda e: e[0], self.seq)
-
- def start_cons (self, tag, cur_len, ctx):
- return SeqConsElt (self)
-
- def encode_per (self, ctx, val):
- any_optional = 0 # XXX replace w/ every
- for (attrname, typ, optional) in self.seq:
- any_optional = any_optional or optional
- if any_optional:
- for (attrname, typ, optional) in self.seq:
- ctx.write_bits_unaligned (hasattr (val, attrname), 1)
- for (attrname, typ, optional) in self.seq:
- try:
- v = getattr (val, attrname)
- # XXX need to handle DEFAULT,not encode
- except AttributeError:
- if optional: continue
- else: raise EncodingError, ("Val " + repr(val) +
- " missing attribute: " +
- str(attrname))
- if trace_seq: print "Encoding", attrname, v
- typ.encode_per (ctx, v)
-
-
- def encode_val (self, ctx, val):
- for (attrname, typ, optional) in self.seq:
- try:
- v = getattr (val, attrname)
- except AttributeError:
- if optional: continue
- else: raise EncodingError, ("Val " + repr(val) +
- " missing attribute: " +
- str(attrname))
- if trace_seq: print "Encoding", attrname, v
- typ.encode (ctx, v)
-
-import new
-
-# SEQUENCE returns an object which is both an asn.1 spec and a callable
-# which generates a struct template to fill in.
-
-# I used to have SEQUENCE taking a classname and, using ~8 lines of
-# black (OK, grayish) magic (throw an exn, catch it, and futz with the
-# caller's locals dicts), bind the klass below in the caller's namespace.
-# This meant I could provide bindings for SEQUENCEs nested inside other
-# definitions (making my specs look more like the original ASN.1), and
-# that I got the correct name for debugging purposes instead of using
-# mk_klass_name (). I took it out b/c I didn't like the magic or the
-# funny syntax it used (a mere function call caused an alteration to the
-# caller's ns)
-
-# Now, the compiler takes care of generating the correct names for
-# top-level SEQUENCE definitions, and should be extended to handle
-# SEQUENCEs nested inside others.
-
-class Ctr:
- def __init__ (self):
- self.count = 0
- def __call__ (self):
- self.count = self.count + 1
- return self.count
-
-class_count = Ctr ()
-
-# This name only appears in debugging displays, so no big deal.
-def mk_seq_class_name ():
- return "seq_class_%d" % class_count ()
-
-
-class EXTERNAL_class (SEQUENCE_BASE):
- tag = (CONS_FLAG, EXTERNAL_TAG)
- def __repr__ (self):
- return ('EXTERNAL: ' + repr (self.klass) +
- '\n' + string.join (map (repr, self.seq), '\n'))
- class ConsElt(SeqConsElt):
- def __init__ (self, seq, ctx):
- self.ctx = ctx
- self.codec_pushed = 0
- SeqConsElt.__init__ (self, seq)
- def get_cur_def (self, seen_tag):
- self.found_ext_ANY = 0
- r = range (self.index, len (self.seq.seq))
- for i in r:
- (name, typ, optional) = self.seq.seq [i]
- if typ.check_tag (seen_tag):
- self.index = i
- if name == 'encoding' and seen_tag [1] == 0:
- asn = check_EXTERNAL_ASN (self.tmp)
- if asn <> None:
- self.found_ext_ANY = 1
- typ = asn
- new_codec_fn = self.ctx.charset_switch_oids.get (
- getattr (self.tmp, 'direct_reference',
- None), None)
- if new_codec_fn <> None:
- self.ctx.push_codec ()
- new_codec_fn ()
- self.codec_pushed = 1
- return typ
- if not optional:
- raise BERError ("EXTERNAL tag %s not found in %s (%d/%d)" %
- (str (seen_tag), str (self.seq),
- self.index, i))
- # This is, in fact, an error, because the last bit of
- # external isn't optional
- raise BERError ("EXTERNAL tag %s not found" % (str (seen_tag),))
- def handle_val (self,val):
- if self.found_ext_ANY:
- val = ('single-ASN1-type', val)
- if self.codec_pushed:
- self.ctx.pop_codec ()
- SeqConsElt.handle_val (self, val)
-
- def start_cons (self, tag, cur_len, ctx):
- return self.ConsElt (self, ctx)
-
- def encode_val (self, ctx, val):
- new_codec_fn = None
- for (attrname, typ, optional) in self.seq:
- try:
- v = getattr (val, attrname)
- except AttributeError:
- if optional: continue
- else: raise EncodingError, ("Val " + repr(val) +
- " missing attribute: " +
- str(attrname))
- if attrname == 'encoding' and v[0] == 'single-ASN1-type':
- asn = check_EXTERNAL_ASN (val)
- if asn <> None:
- typ = asn
- v = v[1]
- new_codec_fn = ctx.charset_switch_oids.get (
- getattr (val, 'direct_reference', None), None)
- if new_codec_fn <> None:
- ctx.push_codec ()
- new_codec_fn ()
- if trace_seq: print "Encoding", attrname, v
- typ.encode (ctx, v)
- if new_codec_fn <> None:
- ctx.pop_codec ()
-
-# XXX rename all these
-def SEQUENCE (spec, base_typ = SEQUENCE_BASE, seq_name = None,
- extra_bases = None):
- if seq_name == None:
- seq_name = mk_seq_class_name ()
- bases = [StructBase]
- if extra_bases <> None:
- bases = extra_bases + bases
- klass = new.classobj (seq_name, tuple (bases), {})
- seq = base_typ (klass, spec)
- klass._allowed_attrib_list = seq.get_attribs ()
- seq.klass = klass
- return seq
-
-# This is the pre-1994 def'n. Note that post-1994 removes the ANY
-# and BITSTRING options
-EXTERNAL = SEQUENCE ([('direct_reference', None, OID, 1),
- ('indirect_reference', None, INTEGER, 1),
- ('data_value_descriptor', None, ObjectDescriptor, 1),
- ('encoding', None,
- CHOICE([('single-ASN1-type', EXPLICIT(0), ANY),
- ('octet-aligned', 1, OCTSTRING),
- ('arbitrary', 2, BITSTRING)]))],
- EXTERNAL_class,
- seq_name = 'EXTERNAL')
-
-
-import math
-
-class REAL_class (SEQUENCE_BASE):
- tag = (CONS_FLAG, REAL_TAG)
-
-
-
-# note clients are allowed to treat equal numbers in different bases as
-# different, so keep mantissa/base/exponent
-
-
-class REAL_val:
- _mantissa_bits = 20 # XXX is there no way to auto-determine correct val?
- def __repr__ (self):
- return 'REAL %f' % (self.get_val ())
-
- def set_val (self, val):
- m, e = math.frexp (val)
- self.mantissa = int (m * pow (2, self._mantissa_bits))
- self.base = 2
- self.exponent = e - self._mantissa_bits
- return self
-
- def get_val (self):
- return self.mantissa * pow (self.base, self.exponent)
-
-
-REAL = SEQUENCE([('mantissa', None, INTEGER),
- ('base', None, INTEGER),
- ('exponent', None, INTEGER)],
- REAL_class,
- seq_name='REAL',
- extra_bases = [REAL_val])
-
-REAL.get_val = lambda self: (self.mantissa * 1.0 / self.base) * pow (self.base, self.exponent)
-REAL.__str__ = lambda self: "REAL %f" % (self.get_val (),)
-
-_oid_to_asn1_dict = {}
-
-
-
-def register_oid (oid, asn):
- tmp = EXPLICIT(0) # b/c ANY is EXPLICIT 0 arm of EXTERNAL CHOICE
- tmp.set_typ (asn)
- _oid_to_asn1_dict [OidVal (oid)] = tmp
-
-
-def check_EXTERNAL_ASN (so_far):
- if trace_external:
- print "in check", so_far, EXTERNAL.klass
- print "check 2", so_far.__class__
- assert (so_far.__class__ == EXTERNAL.klass) # only called from w/in EXTERNAL
- dir_ref = getattr (so_far, 'direct_reference', None)
- if dir_ref == None:
- return
- # in theory, should provide support for indirect_reference
- # indicating encoding type, but callers can receive asn1.ANY
- # decoded data, reencode it, and then redecode it with a proper
- # spec as a workaround. Let me know if you actually use
- # indirect_reference.
- if trace_external:
- print "so_far", so_far, dir_ref
- rv = _oid_to_asn1_dict.get (dir_ref, None)
- if trace_external:
- print rv, _oid_to_asn1_dict
- return rv
-
-
-class SEQUENCE_OF(ELTBASE):
- tag = (CONS_FLAG, SEQUENCE_TAG)
- known_len = 0
- def __init__ (self, typ):
- self.typ = typ
-
- def __getitem__ (self, key):
- if key == 0:
- return self.typ
- raise KeyError (key)
- def fulfill_promises (self, promises):
- if isinstance (self.typ, Promise):
- self.typ = self.typ.get_promised (promises)
- else:
- self.typ.fulfill_promises (promises)
-
-
- class ConsElt:
- def __init__ (self, typ):
- self.typ = typ
- self.lst = []
- def get_cur_def (self, seen_tag):
- return self.typ
- def handle_val (self, val):
- self.lst.append (val)
- def finish (self):
- return self.lst
- def start_cons (self, tag, cur_len, ctx):
- return self.ConsElt (self.typ)
-
- def encode_val (self, ctx, val):
- for e in val:
- self.typ.encode (ctx, e)
-
-class SET_OF(SEQUENCE_OF): # XXX SET_OF needs more implementation
- pass
-
-
-def sgn(val):
- if val < 0: return -1
- if val == 0: return 0
- return 1
-
-class BOOLEAN_class (ELTBASE):
- tag = (0, BOOL_TAG)
- known_len = 1
- def encode_val (self, ctx, val):
- ctx.len_write_known (1)
- ctx.bytes_write ([val <> 0])
- # if val is multiple of 256, Python would treat as true, but
- # just writing val would truncate. Thus, write val <> 0
- def encode_per (self, ctx, val):
- ctx.write_bits_unaligned (val <> 0, 1)
- def decode_val (self, ctx,buf):
- mylen = len (buf)
- if mylen <> 1: ctx.raise_error ("Bogus length for bool " +
- repr (mylen))
- # "not not" to canonicalize. Really only needed for round-trip
- # decode - reencode - redecode testing
- return not not buf [0]
-
-
-BOOLEAN = BOOLEAN_class ()
-
-class NULL_class (ELTBASE):
- tag = (0, NULL_TAG)
- known_len = 1
- def encode_val (self, ctx, val):
- ctx.len_write_known (0)
- def encode_per (self, ctx, val):
- pass
- def decode_val (self, ctx, buf):
- if len (buf) > 0: ctx.raise_error ("Bad length for NULL" + str (buf))
- return None
-
-NULL = NULL_class ()
-
-
-class ENUM (INTEGER_class):
- def __init__ (self, **kw):
- self.__dict__.update (kw)
-
-OBJECT_IDENTIFIER = OID # for convenience of compiler
-
-class Promise(ELTBASE):
- """Placeholder for generating recursive data structures.
- Replaced by calling fulfill_promises method."""
- def __init__ (self, type_name):
- self.type_name = type_name
- def get_promised (self, promises_dict):
- return promises_dict[self.type_name]
- def __str__ (self):
- return 'Promise: ' + self.type_name
-
-class Tester:
- def __init__ (self, print_test):
- self.idc1 = IncrementalDecodeCtx (NULL)
- self.idc2 = IncrementalDecodeCtx (ANY)
- self.print_test = print_test
-
- def test (self, spec, val, assertflag = 1):
- # XXX add an optional correct encoding to check against, and cmpfn
- buf = encode (spec, val)
- if self.print_test:
- for byte in buf:
- print hex (byte)[2:],
- print
-
- self.idc1.asn1_def = spec
- self.idc1.feed (buf)
- self.idc2.feed (buf)
- print self.idc1.get_bytes_inprocess_count ()
- print self.idc2.get_bytes_inprocess_count ()
-
-
- assert (self.idc1.get_bytes_inprocess_count () == 0)
- assert (self.idc2.get_bytes_inprocess_count () == 0)
-
- assert (self.idc1.val_count () == 1)
- assert (self.idc2.val_count () == 1)
- idec = self.idc1.get_first_decoded ()
- idec2 = self.idc2.get_first_decoded ()
- buf2 = encode (ANY, idec2)
- if self.print_test:
- for byte in buf2:
- print hex (byte)[2:],
- print
-
- if self.print_test:
- print "Val",repr(val), "idec", repr (idec), "any", idec2
-
- if assertflag:
- if buf2 <> buf:
- print "buf1, buf2 differ"
- assert (idec == val)
-
-
- def run (self):
-
- int_spec = TYPE (EXPLICIT(3), INTEGER)
- string_spec = TYPE (5, GeneralString)
- bitstring_spec = TYPE (5, BITSTRING)
- octstring_spec = TYPE (5, OCTSTRING)
- bool_spec = TYPE(100, BOOLEAN)
-
-
- self.test (bool_spec, 0)
- self.test (bool_spec, 1)
- self.test (bool_spec, -1, 0)
- self.test (bool_spec, 1024, 0)
- self.test (int_spec, 4)
- self.test (int_spec, 256)
- self.test (int_spec, -128)
- self.test (int_spec, -129) # should be 83 02 FF 7F
- self.test (int_spec, -1)
- self.test (int_spec, 0)
- self.test (int_spec, -27066) # should be 83 02 96 46
- self.test (string_spec, '')
- self.test (string_spec, 'Lemon curry?')
- self.test (octstring_spec, '\xFF\x00\x99 Foo')
-
- oid_spec = TYPE (4, OID)
- oid = OidVal ([1, 2, 840, 10003, 0, 1])
- self.test (oid_spec, oid)
- null_spec = TYPE (65536, NULL)
- self.test (null_spec, None)
-
- real_spec = TYPE(3,REAL)
- real_spec2 = REAL
- rval = REAL ()
- rval.set_val (4.0)
- assert 4.0 == rval.get_val ()
- self.test (real_spec, rval)
- self.test (real_spec2, rval)
-
-
- bs_test = BitStringVal (17, 0x1B977L) # 011011100101110111
- print "bs_test", bs_test
- for i in range (10):
- print "bitstring", i, bs_test
- self.test (bitstring_spec, bs_test)
- bs_test.top_ind = bs_test.top_ind + 1
-
- seq_of_spec = SEQUENCE_OF (int_spec)
- self.test (seq_of_spec, [1,44,131072])
- seq_of_spec = SEQUENCE_OF (TYPE(1, INTEGER))
- self.test (seq_of_spec, [1,44,131072])
- seq_of_spec = SEQUENCE_OF (INTEGER)
- self.test (seq_of_spec, [1,44,131072])
-
- seq_of_spec2 = TYPE (18, SEQUENCE_OF (TYPE(105,GeneralString)))
- self.test (seq_of_spec2, ['db'])
- self.test (seq_of_spec2, ['db1', 'db2', 'db3'])
- self.test (seq_of_spec2, [])
-
- seq_of3 = SEQUENCE_OF(Promise('s'))
- seq_of3.fulfill_promises ({'s': seq_of3})
- self.test (seq_of3, [[[],[],[[[[]]]]]])
- # stupendously useless without a CHOICE in the SEQUENCE_OF
- # to introduce ground terms, but hey.
-
- choice_spec = CHOICE ([('foo', 1, INTEGER),
- ('bar', None, INTEGER),
- ('baz', None, string_spec),
- ('foobar', None, seq_of_spec2)])
- self.test (choice_spec, ('foo', 2))
- self.test (choice_spec, ('bar', 3))
- self.test (choice_spec, ('baz', 'choose wisely'))
- self.test (choice_spec, ('foobar', ['choose wisely', 'choose stupidly']))
-
-
- choice2_spec = CHOICE ([('a', 1, INTEGER),
- ('b', EXPLICIT(2), Promise('choice2')),
- ('c', 3, SEQUENCE_OF(Promise('choice2')))])
- # EXPLICIT is necessary to avoid CHOICE of CHOICE without tag
- # to figure out which arm to take
- choice2_spec.fulfill_promises ({'choice2' : choice2_spec})
- c2 = ('c', [('a', 4),
- ('b', ('c', [('a', 5), ('b', ('a', 6))]))])
- self.test (choice2_spec, c2)
-
- seq_spec = SEQUENCE (
- [('a',5, INTEGER),
- ('c', 51, INTEGER, 1),
- ('b',6, INTEGER)])
-
- class Foo (seq_spec.klass):
- def __init__ (self, a = 0,b = 0):
- StructBase.__init__ (self)
- self.a = a
- self.b = b
-
- seq_test = Foo (4,5)
- self.test (seq_spec, seq_test)
-
- seq_test = Foo (4,5)
- seq_test.c = 9
- self.test (seq_spec, seq_test)
-
- v = EXTERNAL ()
- v.direct_reference = oid
- v.data_value_descriptor = "infrequently used field"
- v.encoding = ('octet-aligned', 'foo bar')
- self.test (EXTERNAL, v)
- v.direct_reference = OidVal(SUTRS)
- v.encoding = ('single-ASN1-type', 'This is a SUTRS string')
- self.test (EXTERNAL, v)
-
- big_spec_test = SEQUENCE ([('a', 5, INTEGER),
- ('b', 4096, GeneralString)])
- sq = big_spec_test ()
- sq.a = 1
- sq.b = '34' * 8192
- self.test (big_spec_test, sq)
- sq.b = '35' * (65536 * 2)
- self.test (big_spec_test, sq)
- try:
- sq.c = 'bogus'
- except AttributeError, exn:
- assert (exn.args == ('c',))
- else: assert (0)
- bitstringval = BitStringVal (12, 0x16eb)
- encoded_val = encode (BITSTRING, bitstringval)
- Dubuisson_prim_val = [0x3, 0x3, 0x3, 0xB7, 0x58]
- Dubuisson_cons_val = [0x23, 0x80, # see pp. 402-404
- 0x3, 0x2, 0x0, 0xB7,
- 0x3, 0x2, 0x3, 0x58,
- 0x0, 0x0]
- if cons_encoding:
- Dubuisson_val = Dubuisson_cons_val
- else:
- Dubuisson_val = Dubuisson_prim_val
- print encoded_val, Dubuisson_val
- assert (len (encoded_val) == len (Dubuisson_val))
- for v1, v2 in zip (encoded_val, Dubuisson_val):
- assert (v1 == v2)
-
- self.idc1.asn1_def = BITSTRING
- self.idc1.feed (Dubuisson_val)
- assert (self.idc1.val_count () == 1)
- idec = self.idc1.get_first_decoded ()
- print idec
- assert (idec.top_ind == 12)
- assert (idec.bits == 0x16eb)
-
-
-SUTRS = [1,2,840,10003,5,101]
-
-def run (print_flag):
- t = Tester (print_flag)
- global cons_encoding, indef_len_encodings # XXX why is global needed?
-
- register_oid (SUTRS, GeneralString)
- for indef_len_encodings in [0,1]:
- for cons_encoding in [0,1]:
- print "Starting", indef_len_encodings, cons_encoding
- t.run ()
- print "byte offset", t.idc1.offset, t.idc2.offset
-
-
-
-
-
-import profile
-
-if __name__ == '__main__':
- pwc = PERWriteCtx (aligned = 0)
- inner_seq_def = SEQUENCE ([
- ('d1', 0, BOOLEAN),
- ('d2', 0, BOOLEAN)])
-
- test_def = SEQUENCE ([
- ('a', 0, INTEGER_class (None, 0,7)),
- ('b', 0, BOOLEAN),
- ('c', 0, INTEGER_class (None, 0,3)),
- ('d', 0, inner_seq_def)])
- test = test_def ()
- test.a = 5
- test.b = 1
- test.c = 1
- test.d = inner_seq_def ()
- test.d.d1 = 1
- test.d.d2 = 1
- test_def.encode_per (pwc, test)
- print "bit offset", pwc.bit_offset
- print map (hex, pwc.get_data ())
- if 0:
- profile.run ("run (0)")
- else:
- run (1)
-
diff --git a/python/PyZ3950/bib1msg.py b/python/PyZ3950/bib1msg.py
deleted file mode 100644
index 35be57f..0000000
--- a/python/PyZ3950/bib1msg.py
+++ /dev/null
@@ -1,191 +0,0 @@
-"""Translate bib-1 error numbers to messages."""
-
-from PyZ3950 import asn1
-from PyZ3950 import z3950
-from PyZ3950 import oids
-
-msg_dict = {
-1: 'permanent system error', # (unspecified),
-2: 'temporary system error', # (unspecified),
-3: 'unsupported search', # (unspecified),
-4: 'Terms only exclusion (stop) words', # (unspecified),
-5: 'Too many argument words', # (unspecified),
-6: 'Too many boolean operators', # (unspecified),
-7: 'Too many truncated words', # (unspecified),
-8: 'Too many incomplete subfields', # (unspecified),
-9: 'Truncated words too short', # (unspecified),
-10: 'Invalid format for record number (search term)', # (unspecified),
-11: 'Too many characters in search statement', # (unspecified),
-12: 'Too many records retrieved', # (unspecified),
-13: 'Present request out-of-range', # (unspecified),
-14: 'System error in presenting records', # (unspecified),
-15: 'Record not authorized to be sent intersystem', # (unspecified),
-16: 'Record exceeds Preferred-message-size', # (unspecified),
-17: 'Record exceeds Exceptional-record-size', # (unspecified),
-18: 'Result set not supported as a search term', # (unspecified),
-19: 'Only single result set as search term supported', # (unspecified),
-20: 'Only ANDing of a single result set as search term', # (unspecified),
-21: 'Result set exists and replace indicator off', # (unspecified),
-22: 'Result set naming not supported', # (unspecified),
-23: 'Specified combination of databases not supported', # (unspecified),
-24: 'Element set names not supported', # (unspecified),
-25: 'Specified element set name not valid for specified database', # (unspecified),
-26: 'Only generic form of element set name supported', # (unspecified),
-27: 'Result set no longer exists - unilaterally deleted by target', # (unspecified),
-28: 'Result set is in use', # (unspecified),
-29: 'One of the specified databases is locked', # (unspecified),
-30: 'Specified result set does not exist', # (unspecified),
-31: 'Resources exhausted - no results available', # (unspecified),
-32: 'Resources exhausted - unpredictable partial results available', # (unspecified),
-33: 'Resources exhausted - valid subset of results available', # (unspecified),
-100: '(unspecified) error', # (unspecified),
-101: 'Access-control failure', # (unspecified),
-102: 'Challenge required, could not be issued - operation terminated', # (unspecified),
-103: 'Challenge required, could not be issued - record not included', # (unspecified),
-104: 'Challenge failed - record not included', # (unspecified),
-105: 'Terminated at origin request', # (unspecified),
-106: 'No abstract syntaxes agreed to for this record', # (unspecified),
-107: 'Query type not supported', # (unspecified),
-108: 'Malformed query', # (unspecified),
-109: 'Database unavailable', # database name,
-110: 'Operator unsupported', # operator,
-111: 'Too many databases specified', # maximum,
-112: 'Too many result sets created', # maximum,
-113: 'Unsupported attribute type', # type,
-114: 'Unsupported Use attribute', # value,
-115: 'Unsupported term value for Use attribute', # term,
-116: 'Use attribute required but not supplied', # (unspecified),
-117: 'Unsupported Relation attribute', # value,
-118: 'Unsupported Structure attribute', # value,
-119: 'Unsupported Position attribute', # value,
-120: 'Unsupported Truncation attribute', # value,
-121: 'Unsupported Attribute Set', # oid,
-122: 'Unsupported Completeness attribute', # value,
-123: 'Unsupported attribute combination', # (unspecified),
-124: 'Unsupported coded value for term', # value,
-125: 'Malformed search term', # (unspecified),
-126: 'Illegal term value for attribute', # term,
-127: 'Unparsable format for un-normalized value', # value,
-128: 'Illegal result set name', # name,
-129: 'Proximity search of sets not supported', # (unspecified),
-130: 'Illegal result set in proximity search', # result set name,
-131: 'Unsupported proximity relation', # value,
-132: 'Unsupported proximity unit code', # value,
-201: 'Proximity not supported with this attribute combination attribute', # list,
-202: 'Unsupported distance for proximity', # distance,
-203: 'Ordered flag not supported for proximity', # (unspecified),
-205: 'Only zero step size supported for Scan', # (unspecified),
-206: 'Specified step size not supported for Scan step', # size,
-207: 'Cannot sort according to sequence', # sequence,
-208: 'No result set name supplied on Sort', # (unspecified),
-209: 'Generic sort not supported (database-specific sort only supported)', # (unspecified),
-210: 'Database specific sort not supported', # (unspecified),
-211: 'Too many sort keys', # number,
-212: 'Duplicate sort keys', # key,
-213: 'Unsupported missing data action', # value,
-214: 'Illegal sort relation', # relation,
-215: 'Illegal case value', # value,
-216: 'Illegal missing data action', # value,
-217: 'Segmentation: Cannot guarantee records will fit in specified segments', # (unspecified),
-218: 'ES: Package name already in use', # name,
-219: 'ES: no such package, on modify/delete', # name,
-220: 'ES: quota exceeded', # (unspecified),
-221: 'ES: extended service type not supported', # type,
-222: 'ES: permission denied on ES - id not authorized', # (unspecified),
-223: 'ES: permission denied on ES - cannot modify or delete', # (unspecified),
-224: 'ES: immediate execution failed', # (unspecified),
-225: 'ES: immediate execution not supported for this service', # (unspecified),
-226: 'ES: immediate execution not supported for these parameters', # (unspecified),
-227: 'No data available in requested record syntax', # (unspecified),
-228: 'Scan: malformed scan', # (unspecified),
-229: 'Term type not supported', # type,
-230: 'Sort: too many input results', # max,
-231: 'Sort: incompatible record formats', # (unspecified),
-232: 'Scan: term list not supported', # alternative term list,
-233: 'Scan: unsupported value of position-in-response', # value,
-234: 'Too many index terms processed', # number of terms,
-235: 'Database does not exist', # database name,
-236: 'Access to specified database denied', # database name,
-237: 'Sort: illegal sort', # (unspecified),
-238: 'Record not available in requested syntax', # alternative suggested syntax(es),
-239: 'Record syntax not supported', # syntax,
-240: 'Scan: Resources exhausted looking for satisfying terms', # (unspecified),
-241: 'Scan: Beginning or end of term list', # (unspecified),
-242: 'Segmentation: max-segment-size too small to segment record', # smallest acceptable size,
-243: 'Present: additional-ranges parameter not supported', # (unspecified),
-244: 'Present: comp-spec parameter not supported', # (unspecified),
-245: "Type-1 query: restriction ('resultAttr') operand not supported:", # (unspecified),
-246: "Type-1 query: 'complex' attributeValue not supported", # (unspecified),
-247: "Type-1 query: 'attributeSet' as part of AttributeElement not supported", # (unspecified),
-1001: 'Malformed APDU',
-1002: 'ES: EXTERNAL form of Item Order request not supported.', # ,
-1003: 'ES: Result set item form of Item Order request not supported.', # ,
-1004: 'ES: Extended services not supported unless access control is in effect.', # ,
-1005: 'Response records in Search response not supported.', # ,
-1006: 'Response records in Search response not possible for specified database (or database combination). See note 1.', # ,
-1007: 'No Explain server. See note 2.', # pointers to servers that have a surrogate Explain database for this server.,
-1008: 'ES: missing mandatory parameter for specified function', # parameter,
-1009: 'ES: Item Order, unsupported OID in itemRequest.', # OID,
-1010: 'Init/AC: Bad Userid', # ,
-1011: 'Init/AC: Bad Userid and/or Password', # ,
-1012: 'Init/AC: No searches remaining (pre-purchased searches exhausted)', # ,
-1013: 'Init/AC: Incorrect interface type (specified id valid only when used with a particular access method or client)', # ,
-1014: 'Init/AC: Authentication System error', # ,
-1015: 'Init/AC: Maximum number of simultaneous sessions for Userid', # ,
-1016: 'Init/AC: Blocked network address', # ,
-1017: 'Init/AC: No databases available for specified userId', # ,
-1018: 'Init/AC: System temporarily out of resources', # ,
-1019: 'Init/AC: System not available due to maintenance', # when it's expected back up,
-1020: 'Init/AC: System temporarily unavailable', # when it's expected back up,
-1021: 'Init/AC: Account has expired', # ,
-1022: 'Init/AC: Password has expired so a new one must be supplied', # ,
-1023: 'Init/AC: Password has been changed by an administrator so a new one must be supplied', # ,
-1024: 'Unsupported Attribute. See note 3.', # an unstructured string indicating the object identifier of the attribute set id, the numeric value of the attribute type, and the numeric value of the attribute.,
-1025: 'Service not supported for this database', # ,
-1026: 'Record cannot be opened because it is locked', # ,
-1027: 'SQL error', # ,
-1028: 'Record deleted', # ,
-1029: 'Scan: too many terms requested.', # Addinfo: max terms supported,
-1040: 'ES: Invalid function', # function,
-1041: 'ES: Error in retention time', # (unspecified),
-1042: 'ES: Permissions data not understood', # permissions,
-1043: 'ES: Invalid OID for task specific parameters', # oid,
-1044: 'ES: Invalid action', # action,
-1045: 'ES: Unknown schema', # schema,
-1046: 'ES: Too many records in package', # maximum number allowed,
-1047: 'ES: Invalid wait action', # wait action,
-1048: 'ES: Cannot create task package -- exceeds maximum permissable size (see note 4)', # maximum task package size,
-1049: 'ES: Cannot return task package -- exceeds maximum permissable size for ES response (see note 5)', # maximum task package size for ES response,
-1050: 'ES: Extended services request too large (see note 6)', # maximum size of extended services request,
-1051: 'Scan: Attribute set id required -- not supplied', # ,
-1052: 'ES: Cannot process task package record -- exceeds maximum permissible record size for ES (see note 7)', # maximum record size for ES,
-1053: 'ES: Cannot return task package record -- exceeds maximum permissible record size for ES response (see note 8)', # maximum record size for ES response,
-1054: 'Init: Required negotiation record not included', # oid(s) of required negotiation record(s),
-1055: 'Init: negotiation option required', # ,
-1056: 'Attribute not supported for database', # attribute (oid, type, and value), and database name,
-1057: 'ES: Unsupported value of task package parameter (See Note 9)', # parameter and value,
-1058: 'Duplicate Detection: Cannot dedup on requested record portion', # ,
-1059: 'Duplicate Detection: Requested detection criterion not supported', # detection criterion,
-1060: 'Duplicate Detection: Requested level of match not supported', # ,
-1061: 'Duplicate Detection: Requested regular expression not supported', # ,
-1062: 'Duplicate Detection: Cannot do clustering', # ,
-1063: 'Duplicate Detection: Retention criterion not supported', # retention criterion,
-1064: 'Duplicate Detection: Requested number (or percentage) of entries for retention too large', # ,
-1065: 'Duplicate Detection: Requested sort criterion not supported', # sort criterion,
-1066: 'CompSpec: Unknown schema, or schema not supported.', # ,
-1067: 'Encapsulation: Encapsulated sequence of PDUs not supported.', # specific unsupported sequence,
-1068: 'Encapsulation: Base operation (and encapsulated PDUs) not executed based on pre-screening analysis.', # ,
-1069: 'No syntaxes available for this request. See note 10.', # ,
-1070: 'user not authorized to receive record(s) in requested syntax', # ,
-1071: 'preferredRecordSyntax not supplied', # ,
-1072: 'Query term includes characters that do not translate into the target character set.', # Characters that do not translate
-}
-
-
-def lookup_errmsg (condition, oid):
- if oid <> oids.Z3950_DIAG_BIB1_ov:
- return "Unknown oid: %s condition %d" % (str (oid), condition)
- if msg_dict.has_key (condition):
- return msg_dict[condition]
- else:
- return "Unknown BIB-1 error condition %d" % (condition,)
diff --git a/python/PyZ3950/c2query.py b/python/PyZ3950/c2query.py
deleted file mode 100644
index f9c4a10..0000000
--- a/python/PyZ3950/c2query.py
+++ /dev/null
@@ -1,406 +0,0 @@
-
-#!/usr/local/bin/python2.3
-
-try:
- from cStringIO import StringIO
-except:
- from StringIO import StringIO
-from PyZ3950 import z3950, oids
-from types import IntType, StringType, ListType
-# We need "\"\"" to be one token
-from PyZ3950.CQLParser import CQLshlex
-from PyZ3950.CQLUtils import ZCQLConfig
-from PyZ3950.zdefs import make_attr
-zconfig = ZCQLConfig()
-
-"""
-http://cheshire.berkeley.edu/cheshire2.html#zfind
-
-top ::= query ['resultsetid' name]
-query ::= query boolean clause | clause
-clause ::= '(' query ')'
- | attributes [relation] term
- | resultset
-attributes ::= '[' { [set] type '=' value } ']' | name
-boolean ::= 'and' | 'or' | 'not' | (synonyms)
-prox ::= ('!PROX' | (synonyms)) {'/' name}
-relation ::= '>' | '<' | ...
-
-[bib1 1=5, bib1 3=6] > term and title @ fish
-"""
-
-booleans = {'AND' : 'and',
- '.AND.' : 'and',
- '&&' : 'and',
- 'OR' : 'or',
- '.OR.' : 'or',
- '||' : 'or',
- 'NOT' : 'and-not',
- '.NOT.' : 'and-not',
- 'ANDNOT' : 'and-not',
- '.ANDNOT.' : 'and-not',
- '!!' : 'and-not'
- }
-
-relations = {'<' : 1,
- 'LT' : 1,
- '.LT.' : 1,
- '<=' : 2,
- 'LE' : 2,
- '.LE.' : 2,
- '=' : 3,
- '>=' : 4,
- 'GE' : 4,
- '.GE.' : 4,
- '>' : 5,
- 'GT' : 5,
- '.GT.' : 5,
- '<>' : 6,
- '!=' : 6,
- 'NE' : 6,
- '.NE.' : 6,
- '?' : 100,
- 'PHON' : 100,
- '.PHON.' : 100,
- '%' : 101,
- 'STEM' : 101,
- '.STEM.' : 101,
- '@' : 102,
- 'REL' : 102,
- '.REL.' : 102,
- '<=>' : 104,
- 'WITHIN' : 104,
- '.WITHIN.' : 104}
-
-geoRelations = {'>=<' : 7,
- '.OVERLAPS.' : 7,
- '>#<' : 8,
- '.FULLY_ENCLOSED_WITHIN.' : 8,
- '<#>' : 9,
- '.ENCLOSES.' : 9,
- '<>#' : 10,
- '.OUTSIDE_OF.' : 10,
- '+-+' : 11,
- '.NEAR.' : 11,
- '.#.' : 12,
- '.MEMBERS_CONTAIN.' : 12,
- '!.#.' : 13,
- '.MEMBERS_NOT_CONTAIN.' : 13,
- ':<:' : 14,
- '.BEFORE.' : 14,
- ':<=:' : 15,
- '.BEFORE_OR_DURING.' : 15,
- ':=:' : 16,
- '.DURING.' : 16,
- ':>=:' : 17,
- '.DURING_OR_AFTER.' : 17,
- ':>:' : 18,
- '.AFTER.' : 18}
-
-proxBooleans = {'!PROX' : (2, 0, 2),
- '!ADJ' : (2, 0, 2),
- '!NEAR' : (20, 0, 2),
- '!FAR' : (20, 0, 4),
- '!OPROX' : (2, 1, 2),
- '!OADJ' : (2, 1, 2),
- '!ONEAR' : (20, 1, 2),
- '!OFAR' : (20, 1, 4)}
-
-proxUnits = {'C' : 1,
- 'CHAR' : 1,
- 'W' : 2,
- 'WORD' : 2,
- 'S' : 3,
- 'SENT' : 3,
- 'SENTENCE' : 3,
- 'P' : 4,
- 'PARA' : 4,
- 'PARAGRAPH' : 4,
- 'SECTION' : 5,
- 'CHAPTER' : 6,
- 'DOCUMENT' : 7,
- 'ELEMENT' : 8,
- 'SUBELEMENT' : 9,
- 'ELEMENTTYPE' : 10,
- 'BYTE' : 11}
-
-privateBooleans = {'!FUZZY_AND' : 1,
- '!FUZZY_OR' : 2,
- '!FUZZY_NOT' : 3,
- '!RESTRICT_FROM' : 4,
- '!RESTRICT_TO' : 5,
- '!MERGE_SUM' : 6,
- '!MERGE_MEAN' : 7,
- '!MERGE_NORM' : 8}
-
-xzconfig = ZCQLConfig()
-
-class C2Parser:
- lexer = None
- currentToken = None
- nextToken = None
-
- def __init__(self, l):
- self.lexer = l
- self.fetch_token()
-
-
- def fetch_token(self):
- tok = self.lexer.get_token()
- self.currentToken = self.nextToken
- self.nextToken = tok
-
- def is_boolean(self, tok=None):
- if (tok == None):
- tok = self.currentToken
- if (privateBooleans.has_key(tok.upper())):
- return 1
- elif (booleans.has_key(tok.upper())):
- return 2
- elif (proxBooleans.has_key(tok.upper())):
- return 3
- else:
- return 0
-
-
- def top(self):
-
- rpn = self.query()
- # Check for resultsetid
- if (self.currentToken.lower() == 'resultsetid'):
- self.fetch_token()
- resultset = self.currentToken
- else:
- resultset = None
-
- rpnq = z3950.RPNQuery()
- rpnq.attributeSet = oids.Z3950_ATTRS_BIB1_ov
- rpnq.rpn = rpn
- q = ('type_1', rpnq)
- return (q, resultset)
-
- def query(self):
- self.fetch_token()
- left = self.subquery()
- while 1:
- if not self.currentToken:
- break
- bool = self.is_boolean()
- if bool:
- bool = self.boolean()
- right = self.subquery()
- # Put left into triple, make triple new left
- op = z3950.RpnRpnOp()
- op.rpn1 = left
- op.rpn2 = right
- op.op = bool
- wrap = ('rpnRpnOp', op)
- left = wrap
- else:
- break
- return left
-
-
- def subquery(self):
- if self.currentToken == "(":
- object = self.query()
- if (self.currentToken <> ")"):
- raise ValueError
- else:
- self.fetch_token()
- else:
- object = self.clause()
- return object
-
- def boolean(self):
- tok = self.currentToken.upper()
- self.fetch_token()
- if (booleans.has_key(tok)):
- return (booleans[tok], None)
- elif (privateBooleans.has_key(tok)):
- # Generate cutesie prox trick
- type = privateBooleans[tok]
- prox = z3950.ProximityOperator()
- prox.proximityUnitCode = ('private', type)
- prox.distance = 0
- prox.ordered = 0
- prox.relationType = 3
- return ('op', ('prox', prox))
-
- elif (proxBooleans.has_key(tok)):
- # Generate prox
- prox = z3950.ProximityOperator()
- stuff = proxBooleans[tok]
- prox.distance = stuff[0]
- prox.ordered = stuff[1]
- prox.relationType = stuff[2]
- prox.proximityUnitCode = ('known', 2)
-
- # Now look for /
- while (self.currentToken == "/"):
- self.fetch_token()
- if (self.currentToken.isdigit()):
- prox.distance = int(self.currentToken)
- elif (proxUnits.has_key(self.currentToken.upper())):
- prox.proximityUnitCode = ('known', proxUnits[self.currentToken.upper()])
- else:
- raise ValueError
- self.fetch_token()
- return ('op', ('prox', prox))
- else:
- # Argh!
- raise ValueError
-
- def clause(self):
-
- if (self.is_boolean(self.nextToken) or not self.nextToken or self.nextToken.lower() == 'resultsetid' or self.nextToken == ")"):
- # Must be a resultset
- tok = self.currentToken
- self.fetch_token()
- return ('op', ('resultSet', tok))
-
- elif (self.currentToken == '['):
- # List of attributes
- attrs = []
- oidHash = oids.oids['Z3950']['ATTRS']
- while (1):
- self.fetch_token()
-
- if (self.currentToken == ']'):
- break
-
- if (oidHash.has_key(self.currentToken)):
- attrSet = oidHash[self.currentToken]['ov']
- self.fetch_token()
- elif (self.currentToken[:8] == '1.2.840.'):
- attrSet = asn1.OidVal(map(int, self.currentToken.split('.')))
- self.fetch_token()
- else:
- attrSet = None
-
- if (self.currentToken[-1] == ','):
- tok = self.currentToken[:-1]
- else:
- tok = self.currentToken
-
- if (tok.isdigit()):
- # 1 = foo
- atype = int(tok)
- self.fetch_token()
- if (self.currentToken == '='):
- # = foo
- self.fetch_token()
-
- if (self.currentToken[0] == '='):
- # =foo
- tok = self.currentToken[1:]
- else:
- tok = self.currentToken
-
- if (tok[-1] == ','):
- tok = tok[:-1]
-
- if (tok.isdigit()):
- val = int(tok)
- else:
- val = tok
- if (val[0] == "'" and val[-1] == "'"):
- val = val[1:-1]
- elif (tok[-1] == '='):
- #1= foo
- tok = tok[:-1]
- if (tok.isdigit()):
- atype = int(tok)
- self.fetch_token()
- if (self.currentToken[-1] == ","):
- tok = self.currentToken[:-1]
- else:
- tok = self.currentToken
- if (tok.isdigit()):
- val = int(self.currentToken)
- else:
- val = tok
- if (val[0] == "'" and val[-1] == "'"):
- val = val[1:-1]
-
- elif (tok.find('=') > -1):
- # 1=foo
- (atype, val) = self.currentToken.split('=')
- atype = int(atype)
- if (val[-1] == ","):
- val = val[:-1]
- if (val.isdigit()):
- val = int(val)
- elif (val[0] == "'" and val[-1] == "'"):
- val = val[1:-1]
- else:
- # ???
- raise ValueError
- attrs.append([attrSet, atype, val])
-
- else:
- # Check for named index
- if (zconfig.BIB1.has_key(self.currentToken.lower())):
- attrs = [[oids.Z3950_ATTRS_BIB1_ov, 1, zconfig.BIB1[self.currentToken.lower()]]]
- else:
- # Just pass through the name
- attrs = [[oids.Z3950_ATTRS_BIB1_ov, 1, self.currentToken]]
-
- self.fetch_token()
- # Check for relation
- tok = self.currentToken.upper()
- if (relations.has_key(tok)):
- val = relations[tok]
- found = 0
- for a in attrs:
- if (a[0] in [oids.Z3950_ATTRS_BIB1, None] and a[1] == 2):
- found =1
- a[2] = val
- break
- if (not found):
- attrs.append([None, 2, val])
- self.fetch_token()
- elif (geoRelations.has_key(tok)):
- val = geoRelations[tok]
- found = 0
- for a in attrs:
- if (a[0] in [oids.Z3950_ATTRS_BIB1, oids.Z3950_ATTRS_GEO, None] and a[1] == 2):
- found = 1
- a[2] = val
- break
- if (not found):
- attrs.append([oids.Z3950_ATTRS_GEO, 2, val])
- self.fetch_token()
-
- if (self.currentToken.find(' ')):
- # Already quoted
- term = self.currentToken
- else:
- # Accumulate
- term = []
- while (self.currentToken and not self.is_boolean(self.currentToken) and self.currentToken.lower() != 'resultsetid'):
- term.append(self.currenToken)
- term = ' '.join(term)
-
- self.fetch_token()
-
- # Phew. Now build AttributesPlusTerm
- clause = z3950.AttributesPlusTerm()
- clause.attributes = [make_attr(*e) for e in attrs]
- clause.term = ('general', term)
- return ('op', ('attrTerm', clause))
-
-
-def parse(q):
-
- query = StringIO(q)
- lexer = CQLshlex(query)
- # Override CQL's wordchars list to include /=><
- lexer.wordchars += "!@#$%^&*-+;,.?|~`:\\><='"
- lexer.wordchars = lexer.wordchars.replace('[', '')
- lexer.wordchars = lexer.wordchars.replace(']', '')
-
-
- parser = C2Parser(lexer)
- return parser.top()
-
diff --git a/python/PyZ3950/ccl.py b/python/PyZ3950/ccl.py
deleted file mode 100644
index 77c3068..0000000
--- a/python/PyZ3950/ccl.py
+++ /dev/null
@@ -1,365 +0,0 @@
-#!/usr/bin/env python
-
-"""Implements part of CCL, the Common Command Language, ISO 8777. I'm
-working from the description in the YAZ toolkit
-(http://www.indexdata.dk/yaz/doc/tools.php), rather than the ISO
-spec. Two extensions:
-- qualifiers can be literal "(attrtyp, attrval)" pairs, so, e.g., the
-following is a legitimate for ISBN: "(1,7)=0312033095"
-- the optional ATTRSET (attrset/query) which must appear at the beginning
-of the string.
-Allowed values are:
-BIB1 (default)
-XD1
-UTIL
-ZTHES1
-EXP1
-or an oid expressed as a dotted string. (A leading dot implies a
-prefix of 1.2.840.1003.3, so, e.g., .1 is the same as BIB1.)
-
-Eventually I will support v3-style mixing attribute sets within
-a single query, but for now I don't.
-"""
-
-from __future__ import nested_scopes
-import string
-
-in_setup = 0
-
-try:
- from PyZ3950 import z3950
- from PyZ3950 import oids
- from PyZ3950 import asn1
-
- _attrdict = {
- 'bib1' : oids.Z3950_ATTRS_BIB1_ov,
- 'zthes1': oids.Z3950_ATTRS_ZTHES_ov,
- 'xd1': oids.Z3950_ATTRS_XD1_ov,
- 'utility': oids.Z3950_ATTRS_UTIL_ov,
- 'exp1': oids.Z3950_ATTRS_EXP1_ov
- }
-
-except ImportError, err:
- print "Error importing (OK during setup)", err
- in_setup = 1
-
-class QuerySyntaxError(Exception): pass
-class ParseError(QuerySyntaxError): pass
-class LexError(QuerySyntaxError): pass
-class UnimplError(QuerySyntaxError): pass
-
-tokens = ('LPAREN', 'RPAREN', 'COMMA',
- 'SET', 'ATTRSET','QUAL', 'QUOTEDVALUE', 'RELOP', 'WORD',
- 'LOGOP', 'SLASH')
-
-t_LPAREN= r'\('
-t_RPAREN= r'\)'
-t_COMMA = r','
-t_SLASH = r'/'
-def t_ATTRSET(t):
- r'(?i)ATTRSET'
- return t
-
-def t_SET (t): # need to def as function to override parsing as WORD, gr XXX
- r'(SET)'
- return t
-
-relop_to_attrib = {
- '<': 1,
- '<=': 2,
- '=': 3,
- '>=': 4,
- '>': 5,
- '<>': 6}
-
-t_RELOP = "|".join (["(%s)" % r for r in relop_to_attrib.keys()])
-# XXX Index Data docs say 'doesn't follow ... ISO8777'?
-
-# XXX expand to rd. addt'l defns from file?
-
-qual_dict = { # These are bib-1 attribute values, see
-# http://www.loc.gov/z3950/agency/defns/bib1.html and ftp://ftp.loc.gov/pub/z3950/defs/bib1.txt
- 'TI': (1,4),
- 'AU': (1,1003), # use 1003 to work w/ both NLC-BNC and LC
- 'ISBN': (1,7),
- 'LCCN': (1,9),
- 'ANY': (1,1016),
- 'FIF': (3, 1), # first-in-field
- 'AIF': (3,3), # any-in-field (default)
- 'RTRUNC': (5,1),
- 'NOTRUNC': (5,100) # (default)
- }
-default_quals = ['ANY'] # XXX should be per-attr-set
-default_relop = '='
-
-def t_QUAL(t):
- return t
-
-def mk_quals ():
- quals = ("|".join (map (lambda x: '(' + x + ')', qual_dict.keys())))
- t_QUAL.__doc__ = "(?i)" + quals + r"|(\([0-9]+,[0-9]+\))"
-
-def t_QUOTEDVALUE(t):
- r"(\".*?\")"
- if t.value[0] == '"':
- t.value = t.value[1:-1]
- return t
-
-word_init = "[a-z]|[A-Z]|[0-9]|&|:"
-word_non_init = ",|\.|\'"
-
-t_WORD = "(%s)(%s|%s)*" % (word_init, word_init, word_non_init)
-
-def t_LOGOP(t):
- r'(?i)(AND)|(OR)|(NOT)'
- return t
-
-
-t_ignore = " \t"
-
-def t_error(t):
- raise LexError ('t_error: ' + str (t))
-
-
-from ply import lex
-
-
-
-def relex ():
- global lexer
- mk_quals ()
- lexer = lex.lex()
-
-relex ()
-
-def add_qual (qual_name, val):
- """Add a qualifier definition, and regenerate the lexer."""
- qual_dict[qual_name] = val
- relex ()
-
-from ply import yacc
-
-#if in_setup:
-# import yacc
-#else:
-# from PyZ3950 import yacc
-
-class Node:
- def __init__(self,type,children=None,leaf=None):
- self.type = type
- if children:
- self.children = children
- else:
- self.children = [ ]
- self.leaf = leaf
- def str_child (self, child, depth):
- if isinstance (child, Node): # ugh
- return child.str_depth (depth)
- indent = " " * (4 * depth)
- return indent + str (child) + "\n"
- def str_depth (self, depth): # ugh
- indent = " " * (4 * depth)
- l = ["%s%s %s" % (indent, self.type, self.leaf)]
- l.append ("".join (map (lambda s: self.str_child (s, depth + 1),
- self.children)))
- return "\n".join (l)
- def __str__(self):
- return "\n" + self.str_depth (0)
-
-def p_top (t):
- 'top : cclfind_or_attrset'
- t[0] = t[1]
-
-def p_cclfind_or_attrset_1 (t):
- 'cclfind_or_attrset : cclfind'
- t[0] = t[1]
-
-def p_cclfind_or_attrset_2 (t):
- 'cclfind_or_attrset : ATTRSET LPAREN WORD SLASH cclfind RPAREN'
- t[0] = Node ('attrset', [t[5]], t[3])
-
-def p_ccl_find_1(t):
- 'cclfind : cclfind LOGOP elements'
- t[0] = Node ('op', [t[1],t[3]], t[2])
-
-def p_ccl_find_2(t):
- 'cclfind : elements'
- t[0] = t[1]
-
-def p_elements_1(t):
- 'elements : LPAREN cclfind RPAREN'
- t[0] = t[2]
-
-class QuallistVal:
- def __init__ (self, quallist, val):
- self.quallist = quallist
- self.val = val
- def __str__ (self):
- return "QV: %s %s" % (str(self.quallist),str (self.val))
- def __getitem__ (self, i):
- if i == 0: return self.quallist
- if i == 1: return self.val
- raise IndexError ('QuallistVal err ' + str (i))
-
-def xlate_qualifier (x):
- if x[0] == '(' and x[-1] == ')':
- t = x[1:-1].split (',') # t must be of len 2 b/c of lexer
- return (string.atoi (t[0]), string.atoi (t[1]))
- return qual_dict[(x.upper ())]
-
-
-def p_elements_2 (t):
- 'elements : SET RELOP WORD'
- if t[2] <> '=':
- raise QuerySyntaxError (str (t[1], str (t[2]), str (t[3])))
- t[0] = Node ('set', leaf = t[3])
-
-def p_elements_3(t):
- 'elements : val'
- t[0] = Node ('relop', QuallistVal (map (xlate_qualifier, default_quals), t[1]), default_relop)
-
-def p_elements_4(t):
- 'elements : quallist RELOP val'
- t[0] = Node ('relop', QuallistVal(map (xlate_qualifier, t[1]),t[3]), t[2])
-
-# XXX p_elements_5 would be quals followed by recursive def'n, not yet implemented
-# XXX p_elements_6 would be quals followed by range, not yet implemented.
-
-def p_quallist_1 (t):
- 'quallist : QUAL'
- t[0] = [t[1]]
-
-def p_quallist_2 (t):
- 'quallist : quallist COMMA QUAL'
- t[0] = t[1] + [t[3]]
-
-def p_val_1(t):
- 'val : QUOTEDVALUE'
- t[0] = t[1]
-
-def p_val_2(t):
- 'val : val WORD'
- t[0] = t[1] + " " + t[2]
-
-def p_val_3(t):
- 'val : WORD'
- t[0] = t[1]
-
-
-# XXX also don't yet handle proximity operator
-
-def p_error(t):
- raise ParseError ('Parse p_error ' + str (t))
-
-precedence = (
- ('left', 'LOGOP'),
- )
-
-yacc.yacc (debug=0, tabmodule = 'PyZ3950_parsetab')
-#yacc.yacc (debug=0, tabpackage = 'PyZ3950', tabmodule='PyZ3950_parsetab')
-
-
-def attrset_to_oid (attrset):
- l = attrset.lower ()
- if _attrdict.has_key (l):
- return _attrdict [l]
- split_l = l.split ('.')
- if split_l[0] == '':
- split_l = oids.Z3950_ATTRS + split_l[1:]
- try:
- intlist = map (string.atoi, split_l)
- except ValueError:
- raise ParseError ('Bad OID: ' + l)
- return asn1.OidVal (intlist)
-
-
-def tree_to_q (ast):
- if ast.type == 'op':
- myrpnRpnOp = z3950.RpnRpnOp ()
- myrpnRpnOp.rpn1 = tree_to_q(ast.children[0])
- myrpnRpnOp.rpn2 = tree_to_q(ast.children[1])
- op = ast.leaf.lower ()
- if op == 'not': op = 'and-not' # CCL spec of 'not' vs. Z39.50 spec of 'and-not'
- myrpnRpnOp.op = (op, None)
- return ('rpnRpnOp', myrpnRpnOp)
- elif ast.type == 'relop':
- # XXX but e.g. LC (http://lcweb.loc.gov/z3950/lcserver.html)
- # doesn't support other relation attributes, either.
- try:
- relattr = relop_to_attrib [ast.leaf]
- except KeyError: # should never happen, how could we have lexed it?
- raise UnimplError (ast.leaf)
- def make_aelt (qual):
- val = ('numeric', qual [1])
- return z3950.AttributeElement (attributeType = qual[0],
- attributeValue = val)
- apt = z3950.AttributesPlusTerm ()
- quallist = ast.children.quallist
- if ast.leaf <> '=':
- quallist.append ((2,relattr)) # 2 is relation attribute
- # see http://www.loc.gov/z3950/agency/markup/13.html ATR.1.1
- apt.attributes = map (make_aelt, quallist)
- apt.term = ('general', ast.children.val) # XXX update for V3?
- return ('op', ('attrTerm', apt))
- elif ast.type == 'set':
- return ('op', ('resultSet', ast.leaf))
-
- raise UnimplError("Bad ast type " + str(ast.type))
-
-def mk_rpn_query (query):
- """Transform a CCL query into an RPN query."""
- # need to copy or create a new lexer because it contains globals
- # PLY 1.0 lacks __copy__
- # PLY 1.3.1-1.5 have __copy__, but it's broken and returns None
- # I sent David Beazley a patch, so future PLY releases will
- # presumably work correctly.
- # Recreating the lexer each time is noticeably slower, so this solution
- # is suboptimal for PLY <= 1.5, but better than being thread-unsafe.
- # Perhaps I should have per-thread lexer instead XXX
- # with example/twisted/test.py set to parse_only, I get 277 parses/sec
- # with fixed PLY, vs. 63 parses/sec with broken PLY, on my 500 MHz PIII
- # laptop.
-
- copiedlexer = None
- if hasattr (lexer, '__copy__'):
- copiedlexer = lexer.__copy__ ()
- if copiedlexer == None:
- copiedlexer = lex.lex ()
- ast = yacc.parse (query, copiedlexer)
- return ast_to_rpn (ast)
-
-def ast_to_rpn (ast):
- if ast.type == 'attrset':
- attrset = attrset_to_oid (ast.leaf)
- ast = ast.children [0]
- else:
- attrset = oids.Z3950_ATTRS_BIB1_ov
- rpnq = z3950.RPNQuery (attributeSet = attrset)
- rpnq.rpn = tree_to_q (ast)
- return ('type_1', rpnq)
-
-def testlex (s):
- lexer.input (s)
- while 1:
- token = lexer.token ()
- if not token:
- break
- print token
-
-def testyacc (s):
- copylex = lexer.__copy__ ()
- ast = yacc.parse (s, lexer = copylex)
- print "AST:", ast
- print "RPN Query:", ast_to_rpn (ast)
-
-if __name__ == '__main__':
- testfn = testyacc
- # testfn = testlex
- testfn ('attrset (BIB1/ au="Gaiman, Neil" or ti=Sandman)')
- while 1:
- s = raw_input ('Query: ')
- if len (s) == 0:
- break
- testfn (s)
-# testyacc ()
-# testlex ()
diff --git a/python/PyZ3950/charneg.py b/python/PyZ3950/charneg.py
deleted file mode 100644
index 390f45d..0000000
--- a/python/PyZ3950/charneg.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env python
-
-assert (0)
-# XXX shouldn't use, absorbed into z3950_2001.py
-
-#from PyZ3950 import asn1
-import asn1
-
-InitialSet=asn1.SEQUENCE ([('g0',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER),1),
- ('g1',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER),1),
- ('g2',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER),1),
- ('g3',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER),1),
- ('c0',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER),0),
- ('c1',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.INTEGER),1)])
-
-PrivateCharacterSet=asn1.CHOICE ([('viaOid',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.OBJECT_IDENTIFIER))),
- ('externallySpecified',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL)),
- ('previouslyAgreedUpon',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-
-LeftAndRight=asn1.SEQUENCE ([('gLeft',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER),0),
- ('gRight',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER),1)])
-
-Iso10646=asn1.SEQUENCE ([('collections',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('encodingLevel',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.OID),0)])
-
-LanguageCode=asn1.GeneralString
-
-Environment=asn1.CHOICE ([('sevenBit',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('eightBit',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-
-Iso2022=asn1.CHOICE ([('originProposal',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('proposedEnvironment',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),Environment),1),
- ('proposedSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.INTEGER)),0),
- ('proposedInitialSets',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InitialSet)),0),
- ('proposedLeftAndRight',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),LeftAndRight),0)]))),
- ('targetResponse',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('selectedEnvironment',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),Environment),0),
- ('selectedSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.INTEGER)),0),
- ('selectedinitialSet',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InitialSet),0),
- ('selectedLeftAndRight',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),LeftAndRight),0)])))])
-
-TargetResponse=asn1.SEQUENCE ([('selectedCharSets',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('iso2022',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),Iso2022)),
- ('iso10646',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Iso10646)),
- ('private',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),PrivateCharacterSet)),
- ('none',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.NULL))])),1),
- ('selectedLanguage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),LanguageCode),1),
- ('recordsInSelectedCharSets',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1)])
-
-OriginProposal=asn1.SEQUENCE ([('proposedCharSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.CHOICE ([('iso2022',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),Iso2022)),
- ('iso10646',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Iso10646)),
- ('private',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),PrivateCharacterSet))]))),1),
- ('proposedlanguages',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (LanguageCode)),1),
- ('recordsInSelectedCharSets',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1)])
-CharSetandLanguageNegotiation=asn1.CHOICE ([('proposal',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),OriginProposal)),
- ('response',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),TargetResponse))])
diff --git a/python/PyZ3950/compile_oids.py b/python/PyZ3950/compile_oids.py
deleted file mode 100644
index 5721265..0000000
--- a/python/PyZ3950/compile_oids.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python
-
-# Original by Robert Sanderson, modifications by Aaron Lav
-
-import sys
-from PyZ3950 import asn1
-
-inh = file("oids.txt")
-outh = file("oids.py", "w")
-outh.write('from PyZ3950 import asn1\n')
-# from ... to get same globals as others importing asn1
-outh.write('oids = {}\n')
-
-oids = {}
-vars = {}
-
-for line in inh:
- if (not line.isspace()):
- flds = line.split(None)
- name = flds[0]
- number = flds[1]
- if (len(flds) > 2):
- aliasList = flds[2:]
- else:
- aliasList = []
-
- if (number[0] == "."):
-
- # add to previous
- splitname = name.split("_")
- cur = oids
- for n in splitname[:-1]:
- cur = cur[n]
-
- val = cur['val'] + [int(number[1:])]
- oid = asn1.OidVal(val)
-
- cur [splitname[-1]] = {'oid': oid, 'val' : val}
-
- vars[name] = val
- tree = "oids['%s']" % "']['".join (splitname)
- outh.write(tree + " = " + "{'oid': asn1.OidVal(" + str(val) + "), 'val': " + str(val) + "}\n")
-
- else:
- # base
- splitnums = number.split('.')
- numlist = map(int, splitnums)
-
- oids[name] = {}
- oids[name]['oid'] = asn1.OidVal(numlist)
- oids[name]['val'] = numlist
- vars[name] = numlist
-
- outh.write("oids['" + name + "'] = {'oid': asn1.OidVal(" + str(numlist) + "), 'val': " + str(numlist) + "}\n")
-
-
-inh.close()
-
-items = vars.items()
-items.sort()
-for k,v in items:
- outh.write(k + " = " + str(v) + "\n")
- outh.write(k + "_ov = asn1.OidVal(" + str (v) + ")\n")
-
-outh.close()
diff --git a/python/PyZ3950/grs1.py b/python/PyZ3950/grs1.py
deleted file mode 100644
index 326f44b..0000000
--- a/python/PyZ3950/grs1.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python
-
-"""Utility functions for GRS-1 data"""
-
-from __future__ import nested_scopes
-# XXX still need to tag non-leaf nodes w/ (tagType, tagValue)
-# XXX tagType can be omitted. If so, default either supplied
-# dynamically by tagSet-M or statically spec'd by schema
-
-# from TAG (Z39.50-1995 App 12): tagType 1 is tagSet-M, 2 tagSet-G,
-# 3 locally defined.
-
-class Node:
- """Defined members are:
- tag - tag (always present, except for top node)
- metadata - metadata (opt, seriesOrder only for nonleaf - v. RET.3.2.3 )
- children - list of Node
- leaf - leaf data (children and leaf are mutually exclusive)
- """
- def __init__ (self, **kw):
- self.__dict__.update (kw)
- self.tab_size = 3 # controls str() indentation width
- def str_depth (self, depth):
- l = []
- children = getattr (self, 'children', [])
- leaf = getattr (self, 'leaf', None)
- tag = getattr (self, 'tag', None)
- indent = " " * (self.tab_size * depth)
- if leaf <> None:
- l.append ("%s%s %s" % (
- indent, str (tag), leaf.content))
- else:
- if tag <> None:
- l.append (indent + str (tag))
- meta = getattr (self, 'metadata', None)
- if meta <> None:
- l.append (indent + 'metadata: ' + str (meta))
- l.append ("".join (map (
- lambda n: n.str_depth (depth + 1), children)))
- return "\n".join (l)
- def __str__ (self):
- return "\n" + self.str_depth (-1)
-
-
-def preproc (raw):
- """Transform the raw output of the asn.1 decoder into something
- a bit more programmer-friendly. (This is automatically called
- by the ZOOM API, so you don't need to worry about it unless you're
- using the raw z3950 API.)
- """
- if isinstance (raw, type ([])):
- return Node (children = map (preproc, raw))
- else: # TaggedElement
- kw = {}
- tag = (raw.tagType, raw.tagValue [1])
- # Value [0] is str vs. num indicator
- kw ['tag'] = tag
- meta = getattr (raw, 'metaData', None)
- if meta <> None:
- kw ['metadata'] = meta
- if raw.content[0] == 'subtree':
- return Node (children = map (preproc, raw.content [1]), **kw)
- else:
- # tag and metadata are here redundantly encoded as
- # both attributes of leaf and of Node. Use the Node
- # attribs, I'll try to clean this up sometime.
- return Node (leaf = raw, **kw)
-
-
-
-
diff --git a/python/PyZ3950/marc_to_unicode.py b/python/PyZ3950/marc_to_unicode.py
deleted file mode 100644
index b211257..0000000
--- a/python/PyZ3950/marc_to_unicode.py
+++ /dev/null
@@ -1,16434 +0,0 @@
-# auto-generated by parse_marc_codetable.py
-charset_34 = { # Extended Arabic
- 0xa1: (0x6fd, 0),# DOUBLE ALEF WITH HAMZA ABOVE / ARABIC SIGN SINDHI AMPERSAND
- 0xa2: (0x672, 0),# ARABIC LETTER ALEF WITH WAVY HAMZA ABOVE
- 0xa3: (0x673, 0),# ARABIC LETTER ALEF WITH WAVY HAMZA BELOW
- 0xa4: (0x679, 0),# ARABIC LETTER TTEH
- 0xa5: (0x67a, 0),# ARABIC LETTER TTEHEH
- 0xa6: (0x67b, 0),# ARABIC LETTER BBEH
- 0xa7: (0x67c, 0),# ARABIC LETTER TEH WITH RING
- 0xa8: (0x67d, 0),# ARABIC LETTER TEH WITH THREE DOTS ABOVE DOWNWARDS
- 0xa9: (0x67e, 0),# ARABIC LETTER PEH
- 0xaa: (0x67f, 0),# ARABIC LETTER TEHEH
- 0xab: (0x680, 0),# ARABIC LETTER BEHEH
- 0xac: (0x681, 0),# ARABIC LETTER HAH WITH HAMZA ABOVE
- 0xad: (0x682, 0),# ARABIC LETTER HAH WITH TWO ABOVE DOTS VERTICAL ABOVE
- 0xae: (0x683, 0),# ARABIC LETTER NYEH
- 0xaf: (0x684, 0),# ARABIC LETTER DYEH
- 0xb0: (0x685, 0),# ARABIC LETTER HAH WITH THREE DOTS ABOVE
- 0xb1: (0x686, 0),# ARABIC LETTER TCHEH
- 0xb2: (0x6bf, 0),# ARABIC LETTER TCHEH WITH DOT ABOVE
- 0xb3: (0x687, 0),# ARABIC LETTER TCHEHEH
- 0xb4: (0x688, 0),# ARABIC LETTER DDAL
- 0xb5: (0x689, 0),# ARABIC LETTER DAL WITH RING
- 0xb6: (0x68a, 0),# ARABIC LETTER DAL WITH DOT BELOW
- 0xb7: (0x68b, 0),# ARABIC LETTER DAL WITH DOT BELOW AND SMALL TAH
- 0xb8: (0x68c, 0),# ARABIC LETTER DAHAL
- 0xb9: (0x68d, 0),# ARABIC LETTER DDAHAL
- 0xba: (0x68e, 0),# ARABIC LETTER DUL
- 0xbb: (0x68f, 0),# ARABIC LETTER DAL WITH THREE DOTS ABOVE DOWNWARDS
- 0xbc: (0x690, 0),# ARABIC LETTER DAL WITH FOUR DOTS ABOVE
- 0xbd: (0x691, 0),# ARABIC LETTER RREH
- 0xbe: (0x692, 0),# ARABIC LETTER REH WITH SMALL V
- 0xbf: (0x693, 0),# ARABIC LETTER REH WITH RING
- 0xc0: (0x694, 0),# ARABIC LETTER REH WITH DOT BELOW
- 0xc1: (0x695, 0),# ARABIC LETTER REH WITH SMALL V BELOW
- 0xc2: (0x696, 0),# ARABIC LETTER REH WITH DOT BELOW AND DOT ABOVE
- 0xc3: (0x697, 0),# ARABIC LETTER REH WITH TWO DOTS ABOVE
- 0xc4: (0x698, 0),# ARABIC LETTER JEH
- 0xc5: (0x699, 0),# ARABIC LETTER REH WITH FOUR DOTS ABOVE
- 0xc6: (0x69a, 0),# ARABIC LETTER SEEN WITH DOT BELOW AND DOT ABOVE
- 0xc7: (0x69b, 0),# ARABIC LETTER SEEN WITH THREE DOTS BELOW
- 0xc8: (0x69c, 0),# ARABIC LETTER SEEN WITH THREE DOTS BELOW AND THREE DOTS ABOVE
- 0xc9: (0x6fa, 0),# ARABIC LETTER SHEEN WITH DOT BELOW
- 0xca: (0x69d, 0),# ARABIC LETTER SAD WITH TWO DOTS BELOW
- 0xcb: (0x69e, 0),# ARABIC LETTER SAD WITH THREE DOTS ABOVE
- 0xcc: (0x6fb, 0),# ARABIC LETTER DAD WITH DOT BELOW
- 0xcd: (0x69f, 0),# ARABIC LETTER TAH WITH THREE DOTS ABOVE
- 0xce: (0x6a0, 0),# ARABIC LETTER AIN WITH THREE DOTS ABOVE
- 0xcf: (0x6fc, 0),# ARABIC LETTER GHAIN WITH DOT BELOW
- 0xd0: (0x6a1, 0),# ARABIC LETTER DOTLESS FEH
- 0xd1: (0x6a2, 0),# ARABIC LETTER FEH WITH DOT MOVED BELOW
- 0xd2: (0x6a3, 0),# ARABIC LETTER FEH WITH DOT BELOW
- 0xd3: (0x6a4, 0),# ARABIC LETTER VEH
- 0xd4: (0x6a5, 0),# ARABIC LETTER FEH WITH THREE DOTS BELOW
- 0xd5: (0x6a6, 0),# ARABIC LETTER PEHEH
- 0xd6: (0x6a7, 0),# ARABIC LETTER QAF WITH DOT ABOVE
- 0xd7: (0x6a8, 0),# ARABIC LETTER QAF WITH THREE DOTS ABOVE
- 0xd8: (0x6a9, 0),# ARABIC LETTER KEHEH
- 0xd9: (0x6aa, 0),# ARABIC LETTER SWASH KAF
- 0xda: (0x6ab, 0),# ARABIC LETTER KAF WITH RING
- 0xdb: (0x6ac, 0),# ARABIC LETTER KAF WITH DOT ABOVE
- 0xdc: (0x6ad, 0),# ARABIC LETTER NG
- 0xdd: (0x6ae, 0),# ARABIC LETTER KAF WITH THREE DOTS BELOW
- 0xde: (0x6af, 0),# ARABIC LETTER GAF
- 0xdf: (0x6b0, 0),# ARABIC LETTER GAF WITH RING
- 0xe0: (0x6b1, 0),# ARABIC LETTER NGOEH
- 0xe1: (0x6b2, 0),# ARABIC LETTER GAF WITH TWO DOTS BELOW
- 0xe2: (0x6b3, 0),# ARABIC LETTER GUEH
- 0xe3: (0x6b4, 0),# ARABIC LETTER GAF WITH THREE DOTS ABOVE
- 0xe4: (0x6b5, 0),# ARABIC LETTER LAM WITH SMALL V
- 0xe5: (0x6b6, 0),# ARABIC LETTER LAM WITH DOT ABOVE
- 0xe6: (0x6b7, 0),# ARABIC LETTER LAM WITH THREE DOTS ABOVE
- 0xe7: (0x6b8, 0),# ARABIC LETTER LAM WITH THREE DOTS BELOW
- 0xe8: (0x6ba, 0),# ARABIC LETTER NOON GHUNNA
- 0xe9: (0x6bb, 0),# ARABIC LETTER RNOON
- 0xea: (0x6bc, 0),# ARABIC LETTER NOON WITH RING
- 0xeb: (0x6bd, 0),# ARABIC LETTER NOON WITH THREE DOTS ABOVE
- 0xec: (0x6b9, 0),# ARABIC LETTER NOON WITH DOT BELOW
- 0xed: (0x6be, 0),# ARABIC LETTER HEH DOACHASHMEE
- 0xee: (0x6c0, 0),# HEH WITH HAMZA ABOVE / ARABIC LETTER HEH WITH YEH ABOVE
- 0xef: (0x6c4, 0),# ARABIC LETTER WAW WITH RING
- 0xf0: (0x6c5, 0),# KYRGHYZ OE / ARABIC LETTER KIRGHIZ OE
- 0xf1: (0x6c6, 0),# ARABIC LETTER OE
- 0xf2: (0x6ca, 0),# ARABIC LETTER WAW WITH TWO DOTS ABOVE
- 0xf3: (0x6cb, 0),# ARABIC LETTER VE
- 0xf4: (0x6cd, 0),# ARABIC LETTER YEH WITH TAIL
- 0xf5: (0x6ce, 0),# ARABIC LETTER YEH WITH SMALL V
- 0xf6: (0x6d0, 0),# ARABIC LETTER E
- 0xf7: (0x6d2, 0),# ARABIC LETTER YEH BARREE
- 0xf8: (0x6d3, 0),# ARABIC LETTER YEH BARREE WITH HAMZA ABOVE
- 0xfd: (0x306, 1),# SHORT E / COMBINING BREVE
- 0xfe: (0x30c, 1)# SHORT U / COMBINING CARON
-}
-charset_45 = { # Extended Latin (ANSEL)
- 0x88: (0x98, 0),# NON-SORT BEGIN / START OF STRING
- 0x89: (0x9c, 0),# NON-SORT END / STRING TERMINATOR
- 0x8d: (0x200d, 0),# JOINER / ZERO WIDTH JOINER
- 0x8e: (0x200c, 0),# NON-JOINER / ZERO WIDTH NON-JOINER
- 0xa1: (0x141, 0),# UPPERCASE POLISH L / LATIN CAPITAL LETTER L WITH STROKE
- 0xa2: (0xd8, 0),# UPPERCASE SCANDINAVIAN O / LATIN CAPITAL LETTER O WITH STROKE
- 0xa3: (0x110, 0),# UPPERCASE D WITH CROSSBAR / LATIN CAPITAL LETTER D WITH STROKE
- 0xa4: (0xde, 0),# UPPERCASE ICELANDIC THORN / LATIN CAPITAL LETTER THORN (Icelandic)
- 0xa5: (0xc6, 0),# UPPERCASE DIGRAPH AE / LATIN CAPITAL LIGATURE AE
- 0xa6: (0x152, 0),# UPPERCASE DIGRAPH OE / LATIN CAPITAL LIGATURE OE
- 0xa7: (0x2b9, 0),# SOFT SIGN, PRIME / MODIFIER LETTER PRIME
- 0xa8: (0xb7, 0),# MIDDLE DOT
- 0xa9: (0x266d, 0),# MUSIC FLAT SIGN
- 0xaa: (0xae, 0),# PATENT MARK / REGISTERED SIGN
- 0xab: (0xb1, 0),# PLUS OR MINUS / PLUS-MINUS SIGN
- 0xac: (0x1a0, 0),# UPPERCASE O-HOOK / LATIN CAPITAL LETTER O WITH HORN
- 0xad: (0x1af, 0),# UPPERCASE U-HOOK / LATIN CAPITAL LETTER U WITH HORN
- 0xae: (0x2be, 0),# ALIF / MODIFIER LETTER RIGHT HALF RING
- 0xb0: (0x2bb, 0),# AYN / MODIFIER LETTER TURNED COMMA
- 0xb1: (0x142, 0),# LOWERCASE POLISH L / LATIN SMALL LETTER L WITH STROKE
- 0xb2: (0xf8, 0),# LOWERCASE SCANDINAVIAN O / LATIN SMALL LETTER O WITH STROKE
- 0xb3: (0x111, 0),# LOWERCASE D WITH CROSSBAR / LATIN SMALL LETTER D WITH STROKE
- 0xb4: (0xfe, 0),# LOWERCASE ICELANDIC THORN / LATIN SMALL LETTER THORN (Icelandic)
- 0xb5: (0xe6, 0),# LOWERCASE DIGRAPH AE / LATIN SMALL LIGATURE AE
- 0xb6: (0x153, 0),# LOWERCASE DIGRAPH OE / LATIN SMALL LIGATURE OE
- 0xb7: (0x2ba, 0),# HARD SIGN, DOUBLE PRIME / MODIFIER LETTER DOUBLE PRIME
- 0xb8: (0x131, 0),# LOWERCASE TURKISH I / LATIN SMALL LETTER DOTLESS I
- 0xb9: (0xa3, 0),# BRITISH POUND / POUND SIGN
- 0xba: (0xf0, 0),# LOWERCASE ETH / LATIN SMALL LETTER ETH (Icelandic)
- 0xbc: (0x1a1, 0),# LOWERCASE O-HOOK / LATIN SMALL LETTER O WITH HORN
- 0xbd: (0x1b0, 0),# LOWERCASE U-HOOK / LATIN SMALL LETTER U WITH HORN
- 0xc0: (0xb0, 0),# DEGREE SIGN
- 0xc1: (0x2113, 0),# SCRIPT SMALL L
- 0xc2: (0x2117, 0),# SOUND RECORDING COPYRIGHT
- 0xc3: (0xa9, 0),# COPYRIGHT SIGN
- 0xc4: (0x266f, 0),# MUSIC SHARP SIGN
- 0xc5: (0xbf, 0),# INVERTED QUESTION MARK
- 0xc6: (0xa1, 0),# INVERTED EXCLAMATION MARK
- 0xe0: (0x309, 1),# PSEUDO QUESTION MARK / COMBINING HOOK ABOVE
- 0xe1: (0x300, 1),# GRAVE / COMBINING GRAVE ACCENT (Varia)
- 0xe2: (0x301, 1),# ACUTE / COMBINING ACUTE ACCENT (Oxia)
- 0xe3: (0x302, 1),# CIRCUMFLEX / COMBINING CIRCUMFLEX ACCENT
- 0xe4: (0x303, 1),# TILDE / COMBINING TILDE
- 0xe5: (0x304, 1),# MACRON / COMBINING MACRON
- 0xe6: (0x306, 1),# BREVE / COMBINING BREVE (Vrachy)
- 0xe7: (0x307, 1),# SUPERIOR DOT / COMBINING DOT ABOVE
- 0xe8: (0x308, 1),# UMLAUT, DIAERESIS / COMBINING DIAERESIS (Dialytika)
- 0xe9: (0x30c, 1),# HACEK / COMBINING CARON
- 0xea: (0x30a, 1),# CIRCLE ABOVE, ANGSTROM / COMBINING RING ABOVE
- 0xeb: (0xfe20, 1),# LIGATURE, FIRST HALF / COMBINING LIGATURE LEFT HALF
- 0xec: (0xfe21, 1),# LIGATURE, SECOND HALF / COMBINING LIGATURE RIGHT HALF
- 0xed: (0x315, 1),# HIGH COMMA, OFF CENTER / COMBINING COMMA ABOVE RIGHT
- 0xee: (0x30b, 1),# DOUBLE ACUTE / COMBINING DOUBLE ACUTE ACCENT
- 0xef: (0x310, 1),# CANDRABINDU / COMBINING CANDRABINDU
- 0xf0: (0x327, 1),# CEDILLA / COMBINING CEDILLA
- 0xf1: (0x328, 1),# RIGHT HOOK, OGONEK / COMBINING OGONEK
- 0xf2: (0x323, 1),# DOT BELOW / COMBINING DOT BELOW
- 0xf3: (0x324, 1),# DOUBLE DOT BELOW / COMBINING DIAERESIS BELOW
- 0xf4: (0x325, 1),# CIRCLE BELOW / COMBINING RING BELOW
- 0xf5: (0x333, 1),# DOUBLE UNDERSCORE / COMBINING DOUBLE LOW LINE
- 0xf6: (0x332, 1),# UNDERSCORE / COMBINING LOW LINE
- 0xf7: (0x326, 1),# LEFT HOOK (COMMA BELOW) / COMBINING COMMA BELOW
- 0xf8: (0x31c, 1),# RIGHT CEDILLA / COMBINING LEFT HALF RING BELOW
- 0xf9: (0x32e, 1),# UPADHMANIYA / COMBINING BREVE BELOW
- 0xfa: (0xfe22, 1),# DOUBLE TILDE, FIRST HALF / COMBINING DOUBLE TILDE LEFT HALF
- 0xfb: (0xfe23, 1),# DOUBLE TILDE, SECOND HALF / COMBINING DOUBLE TILDE RIGHT HALF
- 0xfe: (0x313, 1)# HIGH COMMA, CENTERED / COMBINING COMMA ABOVE (Psili)
-}
-charset_33 = { # Basic Arabic
- 0x21: (0x21, 0),# EXCLAMATION MARK
- 0x22: (0x22, 0),# QUOTATION MARK
- 0x23: (0x23, 0),# NUMBER SIGN
- 0x24: (0x24, 0),# DOLLAR SIGN
- 0x25: (0x66a, 0),# PERCENT SIGN / ARABIC PERCENT SIGN
- 0x26: (0x26, 0),# AMPERSAND
- 0x27: (0x27, 0),# APOSTROPHE
- 0x28: (0x28, 0),# OPENING PARENTHESIS / LEFT PARENTHESIS
- 0x29: (0x29, 0),# CLOSING PARENTHESIS / RIGHT PARENTHESIS
- 0x2a: (0x66d, 0),# ASTERISK / ARABIC FIVE POINTED STAR
- 0x2b: (0x2b, 0),# PLUS SIGN
- 0x2c: (0x60c, 0),# ARABIC COMMA
- 0x2d: (0x2d, 0),# HYPHEN-MINUS
- 0x2e: (0x2e, 0),# PERIOD, DECIMAL POINT / FULL STOP
- 0x2f: (0x2f, 0),# SLASH / SOLIDUS
- 0x30: (0x660, 0),# ARABIC-INDIC DIGIT ZERO
- 0x31: (0x661, 0),# ARABIC-INDIC DIGIT ONE
- 0x32: (0x662, 0),# ARABIC-INDIC DIGIT TWO
- 0x33: (0x663, 0),# ARABIC-INDIC DIGIT THREE
- 0x34: (0x664, 0),# ARABIC-INDIC DIGIT FOUR
- 0x35: (0x665, 0),# ARABIC-INDIC DIGIT FIVE
- 0x36: (0x666, 0),# ARABIC-INDIC DIGIT SIX
- 0x37: (0x667, 0),# ARABIC-INDIC DIGIT SEVEN
- 0x38: (0x668, 0),# ARABIC-INDIC DIGIT EIGHT
- 0x39: (0x669, 0),# ARABIC-INDIC DIGIT NINE
- 0x3a: (0x3a, 0),# COLON
- 0x3b: (0x61b, 0),# ARABIC SEMICOLON
- 0x3c: (0x3c, 0),# LESS-THAN SIGN
- 0x3d: (0x3d, 0),# EQUALS SIGN
- 0x3e: (0x3e, 0),# GREATER-THAN SIGN
- 0x3f: (0x61f, 0),# ARABIC QUESTION MARK
- 0x41: (0x621, 0),# HAMZAH / ARABIC LETTER HAMZA
- 0x42: (0x622, 0),# ARABIC LETTER ALEF WITH MADDA ABOVE
- 0x43: (0x623, 0),# ARABIC LETTER ALEF WITH HAMZA ABOVE
- 0x44: (0x624, 0),# ARABIC LETTER WAW WITH HAMZA ABOVE
- 0x45: (0x625, 0),# ARABIC LETTER ALEF WITH HAMZA BELOW
- 0x46: (0x626, 0),# ARABIC LETTER YEH WITH HAMZA ABOVE
- 0x47: (0x627, 0),# ARABIC LETTER ALEF
- 0x48: (0x628, 0),# ARABIC LETTER BEH
- 0x49: (0x629, 0),# ARABIC LETTER TEH MARBUTA
- 0x4a: (0x62a, 0),# ARABIC LETTER TEH
- 0x4b: (0x62b, 0),# ARABIC LETTER THEH
- 0x4c: (0x62c, 0),# ARABIC LETTER JEEM
- 0x4d: (0x62d, 0),# ARABIC LETTER HAH
- 0x4e: (0x62e, 0),# ARABIC LETTER KHAH
- 0x4f: (0x62f, 0),# ARABIC LETTER DAL
- 0x50: (0x630, 0),# ARABIC LETTER THAL
- 0x51: (0x631, 0),# ARABIC LETTER REH
- 0x52: (0x632, 0),# ARABIC LETTER ZAIN
- 0x53: (0x633, 0),# ARABIC LETTER SEEN
- 0x54: (0x634, 0),# ARABIC LETTER SHEEN
- 0x55: (0x635, 0),# ARABIC LETTER SAD
- 0x56: (0x636, 0),# ARABIC LETTER DAD
- 0x57: (0x637, 0),# ARABIC LETTER TAH
- 0x58: (0x638, 0),# ARABIC LETTER ZAH
- 0x59: (0x639, 0),# ARABIC LETTER AIN
- 0x5a: (0x63a, 0),# ARABIC LETTER GHAIN
- 0x5b: (0x5b, 0),# OPENING SQUARE BRACKET / LEFT SQUARE BRACKET
- 0x5d: (0x5d, 0),# CLOSING SQUARE BRACKET / RIGHT SQUARE BRACKET
- 0x60: (0x640, 0),# ARABIC TATWEEL
- 0x61: (0x641, 0),# ARABIC LETTER FEH
- 0x62: (0x642, 0),# ARABIC LETTER QAF
- 0x63: (0x643, 0),# ARABIC LETTER KAF
- 0x64: (0x644, 0),# ARABIC LETTER LAM
- 0x65: (0x645, 0),# ARABIC LETTER MEEM
- 0x66: (0x646, 0),# ARABIC LETTER NOON
- 0x67: (0x647, 0),# ARABIC LETTER HEH
- 0x68: (0x648, 0),# ARABIC LETTER WAW
- 0x69: (0x649, 0),# ARABIC LETTER ALEF MAKSURA
- 0x6a: (0x64a, 0),# ARABIC LETTER YEH
- 0x6b: (0x64b, 1),# ARABIC FATHATAN
- 0x6c: (0x64c, 1),# ARABIC DAMMATAN
- 0x6d: (0x64d, 1),# ARABIC KASRATAN
- 0x6e: (0x64e, 1),# ARABIC FATHA
- 0x6f: (0x64f, 1),# ARABIC DAMMA
- 0x70: (0x650, 1),# ARABIC KASRA
- 0x71: (0x651, 1),# ARABIC SHADDA
- 0x72: (0x652, 1),# ARABIC SUKUN
- 0x73: (0x671, 0),# ARABIC LETTER ALEF WASLA
- 0x74: (0x670, 0),# ARABIC LETTER SUPERSCRIPT ALEF
- 0x78: (0x66c, 0),# ARABIC THOUSANDS SEPARATOR
- 0x79: (0x201d, 0),# RIGHT DOUBLE QUOTATION MARK
- 0x7a: (0x201c, 0)# LEFT DOUBLE QUOTATION MARK
-}
-charset_32 = { # Basic Hebrew
- 0x21: (0x21, 0),# EXCLAMATION MARK
- 0x22: (0x5f4, 0),# QUOTATION MARK, GERSHAYIM / HEBREW PUNCTUATION GERSHAYIM
- 0x23: (0x23, 0),# NUMBER SIGN
- 0x24: (0x24, 0),# DOLLAR SIGN
- 0x25: (0x25, 0),# PERCENT SIGN
- 0x26: (0x26, 0),# AMPERSAND
- 0x27: (0x5f3, 0),# APOSTROPHE, GERESH / HEBREW PUNCTUATION GERESH
- 0x28: (0x28, 0),# OPENING PARENTHESIS / LEFT PARENTHESIS
- 0x29: (0x29, 0),# CLOSING PARENTHESIS / RIGHT PARENTHESIS
- 0x2a: (0x2a, 0),# ASTERISK
- 0x2b: (0x2b, 0),# PLUS SIGN
- 0x2c: (0x2c, 0),# COMMA
- 0x2d: (0x5be, 0),# HYPHEN-MINUS, MAKEF / HEBREW PUNCTUATION MAQAF
- 0x2e: (0x2e, 0),# PERIOD, DECIMAL POINT / FULL STOP
- 0x2f: (0x2f, 0),# SLASH / SOLIDUS
- 0x30: (0x30, 0),# DIGIT ZERO
- 0x31: (0x31, 0),# DIGIT ONE
- 0x32: (0x32, 0),# DIGIT TWO
- 0x33: (0x33, 0),# DIGIT THREE
- 0x34: (0x34, 0),# DIGIT FOUR
- 0x35: (0x35, 0),# DIGIT FIVE
- 0x36: (0x36, 0),# DIGIT SIX
- 0x37: (0x37, 0),# DIGIT SEVEN
- 0x38: (0x38, 0),# DIGIT EIGHT
- 0x39: (0x39, 0),# DIGIT NINE
- 0x3a: (0x3a, 0),# COLON
- 0x3b: (0x3b, 0),# SEMICOLON
- 0x3c: (0x3c, 0),# LESS-THAN SIGN
- 0x3d: (0x3d, 0),# EQUALS SIGN
- 0x3e: (0x3e, 0),# GREATER-THAN SIGN
- 0x3f: (0x3f, 0),# QUESTION MARK
- 0x40: (0x5b7, 1),# HEBREW POINT PATAH
- 0x41: (0x5b8, 1),# KAMATS / HEBREW POINT QAMATS
- 0x42: (0x5b6, 1),# HEBREW POINT SEGOL
- 0x43: (0x5b5, 1),# TSEREH / HEBREW POINT TSERE
- 0x44: (0x5b4, 1),# HIRIK / HEBREW POINT HIRIQ
- 0x45: (0x5b9, 1),# HOLAM, LEFT SIN DOT / HEBREW POINT HOLAM
- 0x46: (0x5bb, 1),# KUBUTS / HEBREW POINT QUBUTS
- 0x47: (0x5b0, 1),# HEBREW POINT SHEVA
- 0x48: (0x5b2, 1),# HEBREW POINT HATAF PATAH
- 0x49: (0x5b3, 1),# HATAF KAMATS / HEBREW POINT HATAF QAMATS
- 0x4a: (0x5b1, 1),# HEBREW POINT HATAF SEGOL
- 0x4b: (0x5bc, 1),# HEBREW POINT DAGESH OR MAPIQ
- 0x4c: (0x5bf, 1),# RAFEH / HEBREW POINT RAFE
- 0x4d: (0x5c1, 1),# RIGHT SHIN DOT / HEBREW POINT SHIN DOT
- 0x4e: (0xfb1e, 1),# VARIKA / HEBREW POINT JUDEO-SPANISH VARIKA
- 0x5b: (0x5b, 0),# OPENING SQUARE BRACKET / LEFT SQUARE BRACKET
- 0x5d: (0x5d, 0),# CLOSING SQUARE BRACKET / RIGHT SQUARE BRACKET
- 0x60: (0x5d0, 0),# HEBREW LETTER ALEF
- 0x61: (0x5d1, 0),# HEBREW LETTER BET
- 0x62: (0x5d2, 0),# HEBREW LETTER GIMEL
- 0x63: (0x5d3, 0),# HEBREW LETTER DALET
- 0x64: (0x5d4, 0),# HEBREW LETTER HE
- 0x65: (0x5d5, 0),# HEBREW LETTER VAV
- 0x66: (0x5d6, 0),# HEBREW LETTER ZAYIN
- 0x67: (0x5d7, 0),# HEBREW LETTER HET
- 0x68: (0x5d8, 0),# HEBREW LETTER TET
- 0x69: (0x5d9, 0),# HEBREW LETTER YOD
- 0x6a: (0x5da, 0),# HEBREW LETTER FINAL KAF
- 0x6b: (0x5db, 0),# HEBREW LETTER KAF
- 0x6c: (0x5dc, 0),# HEBREW LETTER LAMED
- 0x6d: (0x5dd, 0),# HEBREW LETTER FINAL MEM
- 0x6e: (0x5de, 0),# HEBREW LETTER MEM
- 0x6f: (0x5df, 0),# HEBREW LETTER FINAL NUN
- 0x70: (0x5e0, 0),# HEBREW LETTER NUN
- 0x71: (0x5e1, 0),# HEBREW LETTER SAMEKH
- 0x72: (0x5e2, 0),# HEBREW LETTER AYIN
- 0x73: (0x5e3, 0),# HEBREW LETTER FINAL PE
- 0x74: (0x5e4, 0),# HEBREW LETTER PE
- 0x75: (0x5e5, 0),# HEBREW LETTER FINAL TSADI
- 0x76: (0x5e6, 0),# HEBREW LETTER TSADI
- 0x77: (0x5e7, 0),# HEBREW LETTER QOF / KOF
- 0x78: (0x5e8, 0),# HEBREW LETTER RESH
- 0x79: (0x5e9, 0),# HEBREW LETTER SHIN
- 0x7a: (0x5ea, 0),# HEBREW LETTER TAV
- 0x7b: (0x5f0, 0),# HEBREW LIGATURE YIDDISH DOUBLE VAV / TSVEY VOVN
- 0x7c: (0x5f1, 0),# HEBREW LIGATURE YIDDISH VAV YOD / VOV YUD
- 0x7d: (0x5f2, 0)# HEBREW LIGATURE YIDDISH DOUBLE YOD / TSVEY YUDN
-}
-charset_31 = { # Chinese, Japanese, Korean (EACC)
- 0x215556: (0x8461, 0),# East Asian ideograph
- 0x6f5557: (0xc5d1, 0),# Korean hangul
- 0x456324: (0x9f61, 0),# East Asian ideograph
- 0x6f5140: (0xbccf, 0),# Korean hangul
- 0x6f5558: (0xc5d4, 0),# Korean hangul
- 0x213536: (0x53ec, 0),# East Asian ideograph
- 0x6f5d3c: (0xd64b, 0),# Korean hangul
- 0x215559: (0x8438, 0),# East Asian ideograph
- 0x2d555a: (0x8386, 0),# East Asian ideograph
- 0x6f5c7c: (0xd5d0, 0),# Korean hangul
- 0x295b60: (0x9e55, 0),# East Asian ideograph
- 0x2d555b: (0x8385, 0),# East Asian ideograph
- 0x6f555c: (0xc5e3, 0),# Korean hangul
- 0x6f5141: (0xbcd0, 0),# Korean hangul
- 0x27555d: (0x5e2d, 0),# East Asian ideograph
- 0x23555e: (0x9b1f, 0),# East Asian ideograph
- 0x333f24: (0x7718, 0),# East Asian ideograph
- 0x6f555f: (0xc5ed, 0),# Korean hangul
- 0x6f4f5c: (0xb9ac, 0),# Korean hangul
- 0x6f5560: (0xc5ee, 0),# Korean hangul
- 0x6f4e21: (0xb540, 0),# Korean hangul
- 0x4b4146: (0x6362, 0),# East Asian ideograph
- 0x235031: (0x9874, 0),# East Asian ideograph
- 0x225561: (0x7273, 0),# East Asian ideograph
- 0x274257: (0x6bd9, 0),# East Asian ideograph
- 0x295c28: (0x9e58, 0),# East Asian ideograph
- 0x6f5142: (0xbcd1, 0),# Korean hangul
- 0x6f5562: (0xc5f4, 0),# Korean hangul
- 0x213727: (0x5616, 0),# East Asian ideograph
- 0x215563: (0x84c0, 0),# East Asian ideograph
- 0x215564: (0x8499, 0),# East Asian ideograph
- 0x6f562e: (0xc679, 0),# Korean hangul
- 0x2d4674: (0x51b2, 0),# East Asian ideograph
- 0x6f5565: (0xc5fc, 0),# Korean hangul
- 0x4b4147: (0x633f, 0),# East Asian ideograph
- 0x215566: (0x8490, 0),# East Asian ideograph
- 0x6f5143: (0xbcd2, 0),# Korean hangul
- 0x275567: (0x82cd, 0),# East Asian ideograph
- 0x215568: (0x853d, 0),# East Asian ideograph
- 0x6f5569: (0xc600, 0),# Korean hangul
- 0x27314c: (0x6765, 0),# East Asian ideograph
- 0x276071: (0x517b, 0),# East Asian ideograph
- 0x6f556a: (0xc601, 0),# Korean hangul
- 0x33325d: (0x4fa1, 0),# East Asian ideograph
- 0x6f5839: (0xc9dd, 0),# Korean hangul
- 0x2d6b5f: (0x5273, 0),# East Asian ideograph
- 0x21556b: (0x851a, 0),# East Asian ideograph
- 0x6f5144: (0xbcd5, 0),# Korean hangul
- 0x27556c: (0x83b2, 0),# East Asian ideograph
- 0x22556d: (0x727c, 0),# East Asian ideograph
- 0x21556e: (0x852d, 0),# East Asian ideograph
- 0x6f556f: (0xc610, 0),# Korean hangul
- 0x295721: (0x9c86, 0),# East Asian ideograph
- 0x466074: (0x76b2, 0),# East Asian ideograph
- 0x333529: (0x53dc, 0),# East Asian ideograph
- 0x6f5145: (0xbcf4, 0),# Korean hangul
- 0x225571: (0x727f, 0),# East Asian ideograph
- 0x225d42: (0x7521, 0),# East Asian ideograph
- 0x275949: (0x8a89, 0),# East Asian ideograph
- 0x6f5037: (0xba84, 0),# Korean hangul
- 0x215573: (0x8514, 0),# East Asian ideograph
- 0x215574: (0x84ec, 0),# East Asian ideograph
- 0x4c2330: (0x5c53, 0),# East Asian ideograph
- 0x69656e: (0x7dd5, 0),# East Asian ideograph
- 0x6f5146: (0xbcf5, 0),# Korean hangul
- 0x215576: (0x8569, 0),# East Asian ideograph
- 0x282441: (0x5c98, 0),# East Asian ideograph
- 0x234021: (0x9132, 0),# East Asian ideograph
- 0x4d4176: (0x91db, 0),# East Asian ideograph
- 0x335577: (0x8602, 0),# East Asian ideograph
- 0x394022: (0x6443, 0),# East Asian ideograph
- 0x6f5578: (0xc634, 0),# Korean hangul
- 0x6f4f5d: (0xb9ad, 0),# Korean hangul
- 0x2d3749: (0x5650, 0),# East Asian ideograph
- 0x287139: (0x7ee8, 0),# East Asian ideograph
- 0x234024: (0x9126, 0),# East Asian ideograph
- 0x6f557a: (0xc637, 0),# Korean hangul
- 0x213c35: (0x5dde, 0),# East Asian ideograph
- 0x6f5147: (0xbcf6, 0),# Korean hangul
- 0x6f557b: (0xc639, 0),# Korean hangul
- 0x215945: (0x8b66, 0),# East Asian ideograph
- 0x21372c: (0x5606, 0),# East Asian ideograph (variant of 4B372C which maps to 5606)
- 0x27557c: (0x829c, 0),# East Asian ideograph
- 0x224027: (0x69bf, 0),# East Asian ideograph
- 0x23557d: (0x9b34, 0),# East Asian ideograph
- 0x6f557e: (0xc640, 0),# Korean hangul
- 0x2d4029: (0x5214, 0),# East Asian ideograph
- 0x6f5148: (0xbcf8, 0),# Korean hangul
- 0x23402b: (0x9134, 0),# East Asian ideograph
- 0x21372d: (0x5609, 0),# East Asian ideograph
- 0x23402c: (0x9136, 0),# East Asian ideograph
- 0x6f5876: (0xcc14, 0),# Korean hangul
- 0x22402d: (0x69a3, 0),# East Asian ideograph
- 0x22507c: (0x70dc, 0),# East Asian ideograph
- 0x22402e: (0x69a4, 0),# East Asian ideograph
- 0x6f5149: (0xbcfc, 0),# Korean hangul
- 0x6f575f: (0xc8b0, 0),# Korean hangul
- 0x295a75: (0x9e41, 0),# East Asian ideograph
- 0x4b525a: (0x7ffa, 0),# East Asian ideograph
- 0x234031: (0x913a, 0),# East Asian ideograph
- 0x2d383f: (0x575a, 0),# East Asian ideograph
- 0x294371: (0x94fd, 0),# East Asian ideograph
- 0x234032: (0x913b, 0),# East Asian ideograph
- 0x6f5c27: (0xd38c, 0),# Korean hangul
- 0x224034: (0x69d4, 0),# East Asian ideograph
- 0x6f514a: (0xbd04, 0),# Korean hangul
- 0x335f73: (0x9759, 0),# East Asian ideograph
- 0x6f4c33: (0xb17c, 0),# Korean hangul
- 0x4b525b: (0x66dc, 0),# East Asian ideograph (variant of 39525B which maps to 66DC)
- 0x2e7c2e: (0x831c, 0),# East Asian ideograph
- 0x224038: (0x69c3, 0),# East Asian ideograph
- 0x6f5b4d: (0xd280, 0),# Korean hangul
- 0x2d4039: (0x67c6, 0),# East Asian ideograph
- 0x6f514b: (0xbd05, 0),# Korean hangul
- 0x276036: (0x54cd, 0),# East Asian ideograph
- 0x395477: (0x85a6, 0),# East Asian ideograph
- 0x213730: (0x5617, 0),# East Asian ideograph
- 0x4b525c: (0x8002, 0),# East Asian ideograph
- 0x23403b: (0x9143, 0),# East Asian ideograph
- 0x295b6b: (0x9e57, 0),# East Asian ideograph
- 0x2d5963: (0x8c98, 0),# East Asian ideograph
- 0x224c3c: (0x6f3b, 0),# East Asian ideograph
- 0x22403e: (0x6a11, 0),# East Asian ideograph
- 0x6f5a73: (0xd0c8, 0),# Korean hangul
- 0x6f514c: (0xbd07, 0),# Korean hangul
- 0x213b74: (0x5cfd, 0),# East Asian ideograph
- 0x23403f: (0x9145, 0),# East Asian ideograph
- 0x21594a: (0x8b80, 0),# East Asian ideograph
- 0x213731: (0x560d, 0),# East Asian ideograph
- 0x225d49: (0x752f, 0),# East Asian ideograph
- 0x234040: (0x9148, 0),# East Asian ideograph
- 0x224041: (0x6a00, 0),# East Asian ideograph
- 0x295b6c: (0x9e4b, 0),# East Asian ideograph
- 0x234042: (0x9150, 0),# East Asian ideograph
- 0x234043: (0x914e, 0),# East Asian ideograph
- 0x6f514d: (0xbd09, 0),# Korean hangul
- 0x213b75: (0x5ced, 0),# East Asian ideograph
- 0x394a60: (0x9ae6, 0),# East Asian ideograph
- 0x213732: (0x562e, 0),# East Asian ideograph
- 0x334045: (0x629b, 0),# East Asian ideograph
- 0x292a34: (0x86ac, 0),# East Asian ideograph
- 0x224046: (0x69e6, 0),# East Asian ideograph
- 0x2f2d79: (0x88b5, 0),# East Asian ideograph
- 0x234048: (0x9159, 0),# East Asian ideograph
- 0x6f514e: (0xbd10, 0),# Korean hangul
- 0x276039: (0x9877, 0),# East Asian ideograph
- 0x234049: (0x915c, 0),# East Asian ideograph
- 0x33494a: (0x70d6, 0),# East Asian ideograph
- 0x294372: (0x9513, 0),# East Asian ideograph
- 0x22404b: (0x6a0b, 0),# East Asian ideograph
- 0x22404c: (0x69e5, 0),# East Asian ideograph
- 0x2e2f7a: (0x6738, 0),# East Asian ideograph
- 0x22404d: (0x69e9, 0),# East Asian ideograph
- 0x6f514f: (0xbd14, 0),# Korean hangul
- 0x27603a: (0x9879, 0),# East Asian ideograph
- 0x2d4f29: (0x9f9d, 0),# East Asian ideograph
- 0x213734: (0x564e, 0),# East Asian ideograph
- 0x2d404f: (0x6294, 0),# East Asian ideograph
- 0x6f5039: (0xba87, 0),# Korean hangul
- 0x224050: (0x69fc, 0),# East Asian ideograph
- 0x6f5b4e: (0xd284, 0),# Korean hangul
- 0x234052: (0x915a, 0),# East Asian ideograph
- 0x6f5150: (0xbd24, 0),# Korean hangul
- 0x213b78: (0x5cf0, 0),# East Asian ideograph
- 0x234053: (0x9161, 0),# East Asian ideograph
- 0x6f596b: (0xce6b, 0),# Korean hangul
- 0x225d4d: (0x753a, 0),# East Asian ideograph
- 0x224054: (0x6a17, 0),# East Asian ideograph
- 0x4b4c3c: (0x7573, 0),# East Asian ideograph
- 0x224056: (0x69e7, 0),# East Asian ideograph
- 0x224057: (0x69eb, 0),# East Asian ideograph
- 0x6f5151: (0xbd48, 0),# Korean hangul
- 0x213b79: (0x5cf6, 0),# East Asian ideograph
- 0x294621: (0x9553, 0),# East Asian ideograph
- 0x4b6266: (0x9ed2, 0),# East Asian ideograph
- 0x6f5631: (0xc688, 0),# Korean hangul
- 0x22405b: (0x69f1, 0),# East Asian ideograph
- 0x6f5152: (0xbd49, 0),# Korean hangul
- 0x27603d: (0x9884, 0),# East Asian ideograph
- 0x22405e: (0x6a2b, 0),# East Asian ideograph
- 0x29444d: (0x952b, 0),# East Asian ideograph
- 0x22405f: (0x69ff, 0),# East Asian ideograph
- 0x224060: (0x6a20, 0),# East Asian ideograph
- 0x234061: (0x916f, 0),# East Asian ideograph
- 0x6f5153: (0xbd4c, 0),# Korean hangul
- 0x27603e: (0x987c, 0),# East Asian ideograph
- 0x234062: (0x916e, 0),# East Asian ideograph
- 0x275d60: (0x94e8, 0),# East Asian ideograph
- 0x6f5a71: (0xd0c1, 0),# Korean hangul
- 0x4b4e21: (0x7b36, 0),# East Asian ideograph
- 0x224064: (0x69ed, 0),# East Asian ideograph
- 0x28355b: (0x6484, 0),# East Asian ideograph
- 0x6f547d: (0xc545, 0),# Korean hangul
- 0x234066: (0x917a, 0),# East Asian ideograph
- 0x6f582e: (0xc9ca, 0),# Korean hangul
- 0x6f5154: (0xbd50, 0),# Korean hangul
- 0x27603f: (0x987d, 0),# East Asian ideograph
- 0x224067: (0x6a1b, 0),# East Asian ideograph
- 0x213739: (0x5657, 0),# East Asian ideograph
- 0x2d7143: (0x55e2, 0),# East Asian ideograph
- 0x234068: (0x9172, 0),# East Asian ideograph
- 0x2d5a63: (0x8de5, 0),# East Asian ideograph
- 0x2d384a: (0x5872, 0),# East Asian ideograph
- 0x234069: (0x9179, 0),# East Asian ideograph
- 0x27632c: (0x9f9a, 0),# East Asian ideograph
- 0x23406a: (0x9176, 0),# East Asian ideograph
- 0x4c233f: (0x5c76, 0),# East Asian ideograph
- 0x23406b: (0x9174, 0),# East Asian ideograph
- 0x6f5155: (0xbd58, 0),# Korean hangul
- 0x213b7d: (0x5d1b, 0),# East Asian ideograph
- 0x276040: (0x987f, 0),# East Asian ideograph
- 0x23406c: (0x9173, 0),# East Asian ideograph
- 0x23406d: (0x9185, 0),# East Asian ideograph
- 0x22406e: (0x6a18, 0),# East Asian ideograph
- 0x23406f: (0x9182, 0),# East Asian ideograph
- 0x4b5f30: (0x9686, 0),# East Asian ideograph (variant of 215F30 which maps to 9686)
- 0x234070: (0x918a, 0),# East Asian ideograph
- 0x6f5a75: (0xd0d0, 0),# Korean hangul
- 0x213c38: (0x5de8, 0),# East Asian ideograph
- 0x6f5156: (0xbd59, 0),# Korean hangul
- 0x276041: (0x9881, 0),# East Asian ideograph
- 0x234071: (0x9186, 0),# East Asian ideograph
- 0x21373b: (0x5653, 0),# East Asian ideograph
- 0x234072: (0x918c, 0),# East Asian ideograph
- 0x234073: (0x9181, 0),# East Asian ideograph
- 0x224075: (0x6a0c, 0),# East Asian ideograph
- 0x6f5157: (0xbd64, 0),# Korean hangul
- 0x224076: (0x6a0f, 0),# East Asian ideograph
- 0x21373c: (0x563f, 0),# East Asian ideograph
- 0x4b3f74: (0x623b, 0),# East Asian ideograph
- 0x282f43: (0x6206, 0),# East Asian ideograph
- 0x454738: (0x6cfa, 0),# East Asian ideograph
- 0x275e6a: (0x9610, 0),# East Asian ideograph
- 0x274f36: (0x5e0c, 0),# East Asian ideograph
- 0x6f5e21: (0xd79d, 0),# Korean hangul
- 0x232b24: (0x876a, 0),# East Asian ideograph
- 0x212b25: (0x300c, 0),# Ideographic left corner bracket
- 0x2f5158: (0x7cc7, 0),# East Asian ideograph
- 0x23407b: (0x9191, 0),# East Asian ideograph
- 0x225d55: (0x754a, 0),# East Asian ideograph
- 0x294628: (0x9552, 0),# East Asian ideograph
- 0x4b3050: (0x4e8a, 0),# East Asian ideograph
- 0x22407c: (0x69ee, 0),# East Asian ideograph
- 0x232b27: (0x874e, 0),# East Asian ideograph
- 0x695b37: (0x6737, 0),# East Asian ideograph
- 0x23407d: (0x9190, 0),# East Asian ideograph
- 0x23407e: (0x918e, 0),# East Asian ideograph
- 0x4c6775: (0x7962, 0),# Unrelated variant of EACC 293032 which maps to 7962
- 0x6f5159: (0xbd81, 0),# Korean hangul
- 0x21373e: (0x5637, 0),# East Asian ideograph
- 0x294629: (0x84e5, 0),# East Asian ideograph
- 0x4b3051: (0x5f10, 0),# East Asian ideograph
- 0x6f503b: (0xbaa9, 0),# Korean hangul
- 0x222b2d: (0x602b, 0),# East Asian ideograph
- 0x6f5b50: (0xd290, 0),# Korean hangul
- 0x455847: (0x8a25, 0),# East Asian ideograph (variant of 215847 which maps to 8A25)
- 0x396b2f: (0x521f, 0),# East Asian ideograph
- 0x6f515a: (0xbd84, 0),# Korean hangul
- 0x6f5861: (0xcad9, 0),# Korean hangul
- 0x222b30: (0x6019, 0),# East Asian ideograph
- 0x225d57: (0x754e, 0),# East Asian ideograph
- 0x4b3052: (0x6275, 0),# East Asian ideograph
- 0x212b31: (0xff3b, 0),# Ideographic left square bracket
- 0x4b3749: (0x5668, 0),# East Asian ideograph (variant of 213749 which maps to 5668)
- 0x3a3b7d: (0x67b1, 0),# East Asian ideograph
- 0x216b33: (0x5231, 0),# East Asian ideograph
- 0x23504a: (0x98bf, 0),# East Asian ideograph
- 0x4b5f35: (0x6b92, 0),# East Asian ideograph
- 0x474270: (0x94bc, 0),# East Asian ideograph
- 0x6f515b: (0xbd87, 0),# Korean hangul
- 0x212b35: (0x3001, 0),# Ideographic comma
- 0x216b36: (0x5235, 0),# East Asian ideograph
- 0x4b6268: (0x9ed9, 0),# East Asian ideograph
- 0x3f404f: (0x638a, 0),# East Asian ideograph
- 0x695b7b: (0x6926, 0),# East Asian ideograph
- 0x222b38: (0x601b, 0),# East Asian ideograph
- 0x216b39: (0x5233, 0),# East Asian ideograph
- 0x6f485f: (0xac00, 0),# Korean hangul
- 0x276047: (0x988a, 0),# East Asian ideograph
- 0x212b3a: (0xff1a, 0),# Ideographic colon
- 0x225d59: (0x754b, 0),# East Asian ideograph
- 0x333f3f: (0x51f4, 0),# East Asian ideograph
- 0x212b3b: (0xff1f, 0),# Ideographic question mark
- 0x2d3852: (0x51a2, 0),# East Asian ideograph
- 0x295739: (0x9c9e, 0),# East Asian ideograph
- 0x222b3d: (0x6033, 0),# East Asian ideograph
- 0x276b3e: (0x522d, 0),# East Asian ideograph
- 0x6f515d: (0xbd89, 0),# Korean hangul
- 0x276048: (0x9888, 0),# East Asian ideograph
- 0x275a28: (0x8d42, 0),# East Asian ideograph
- 0x225d5a: (0x7548, 0),# East Asian ideograph
- 0x29462d: (0x9549, 0),# East Asian ideograph
- 0x6f4b45: (0xb07d, 0),# Korean hangul
- 0x274f3c: (0x79f0, 0),# East Asian ideograph
- 0x706b42: (0x80bc, 0),# East Asian ideograph
- 0x6f515e: (0xbd90, 0),# Korean hangul
- 0x276049: (0x9891, 0),# East Asian ideograph
- 0x217b75: (0x5aa0, 0),# East Asian ideograph
- 0x706b44: (0x80bd, 0),# East Asian ideograph
- 0x33615a: (0x8eb0, 0),# East Asian ideograph
- 0x222b45: (0x600d, 0),# East Asian ideograph
- 0x2d3854: (0x5896, 0),# East Asian ideograph
- 0x274f3d: (0x79cd, 0),# East Asian ideograph
- 0x28533c: (0x709d, 0),# East Asian ideograph
- 0x69573b: (0x5f41, 0),# East Asian ideograph
- 0x216b47: (0x5260, 0),# East Asian ideograph
- 0x6f5b51: (0xd291, 0),# Korean hangul
- 0x6f515f: (0xbd91, 0),# Korean hangul
- 0x27604a: (0x9893, 0),# East Asian ideograph
- 0x213744: (0x5678, 0),# East Asian ideograph
- 0x4b3057: (0x4e99, 0),# East Asian ideograph
- 0x6f2477: (0x3154, 0),# Korean hangul
- 0x2f4053: (0x914f, 0),# East Asian ideograph
- 0x226b4b: (0x7b37, 0),# East Asian ideograph
- 0x29573c: (0x9c91, 0),# East Asian ideograph
- 0x706b4c: (0x80e9, 0),# East Asian ideograph
- 0x4b5f3a: (0x967a, 0),# East Asian ideograph
- 0x216b4d: (0x525e, 0),# East Asian ideograph
- 0x6f5160: (0xbd93, 0),# Korean hangul
- 0x6f5940: (0xccad, 0),# Korean hangul
- 0x29573d: (0x9c92, 0),# East Asian ideograph
- 0x6f5161: (0xbd95, 0),# Korean hangul
- 0x216b53: (0x5255, 0),# East Asian ideograph
- 0x705f50: (0x549d, 0),# East Asian ideograph
- 0x2e6b54: (0x7b04, 0),# East Asian ideograph
- 0x292b55: (0x86f3, 0),# East Asian ideograph
- 0x6f555a: (0xc5e0, 0),# Korean hangul
- 0x70622a: (0x7339, 0),# East Asian ideograph
- 0x6f5162: (0xbd99, 0),# Korean hangul
- 0x212b59: (0xff0f, 0),# Ideographic solidus
- 0x2d562e: (0x8024, 0),# East Asian ideograph
- 0x213563: (0x5439, 0),# East Asian ideograph
- 0x216b5b: (0x526e, 0),# East Asian ideograph
- 0x6f4b67: (0xb0c8, 0),# Korean hangul
- 0x4b3d24: (0x53a6, 0),# East Asian ideograph
- 0x6f5163: (0xbd9c, 0),# Korean hangul
- 0x225d60: (0x755b, 0),# East Asian ideograph
- 0x276b5f: (0x672d, 0),# East Asian ideograph
- 0x2d433e: (0x667b, 0),# East Asian ideograph
- 0x235053: (0x98c6, 0),# East Asian ideograph
- 0x345452: (0x7118, 0),# East Asian ideograph
- 0x6f5b40: (0xd1d8, 0),# Korean hangul
- 0x213569: (0x5462, 0),# East Asian ideograph
- 0x4c6b62: (0x7b4c, 0),# East Asian ideograph (variant of 226B62 which maps to 7B4C)
- 0x394634: (0x6b96, 0),# East Asian ideograph (variant of 214634 which maps to 6B96)
- 0x274171: (0x629a, 0),# East Asian ideograph
- 0x6f5165: (0xbdf0, 0),# Korean hangul
- 0x28356d: (0x6512, 0),# East Asian ideograph
- 0x274f44: (0x79ef, 0),# East Asian ideograph
- 0x295742: (0x9c95, 0),# East Asian ideograph
- 0x4b3d27: (0x5ec3, 0),# East Asian ideograph
- 0x6f5166: (0xbe0c, 0),# Korean hangul
- 0x6f4d23: (0xb310, 0),# Korean hangul
- 0x213b61: (0x5c64, 0),# East Asian ideograph (variant of 4B3B61 which maps to 5C64)
- 0x4b5277: (0x8068, 0),# East Asian ideograph
- 0x336162: (0x9a23, 0),# East Asian ideograph
- 0x705f51: (0x54d0, 0),# East Asian ideograph
- 0x6f4a46: (0xae50, 0),# Korean hangul
- 0x292b6e: (0x86f0, 0),# East Asian ideograph
- 0x2d4a60: (0x6c02, 0),# East Asian ideograph
- 0x222b6f: (0x604c, 0),# East Asian ideograph
- 0x6f5167: (0xbe0d, 0),# Korean hangul
- 0x276052: (0x989b, 0),# East Asian ideograph
- 0x6f4d24: (0xb311, 0),# Korean hangul
- 0x232b72: (0x87ac, 0),# East Asian ideograph
- 0x6f496c: (0xad49, 0),# Korean hangul
- 0x216b74: (0x5282, 0),# East Asian ideograph
- 0x2d5f2e: (0x661c, 0),# East Asian ideograph
- 0x216b75: (0x5281, 0),# East Asian ideograph
- 0x6f5168: (0xbe10, 0),# Korean hangul
- 0x215966: (0x8c8c, 0),# East Asian ideograph
- 0x6f5621: (0xc641, 0),# Korean hangul
- 0x33515c: (0x7dab, 0),# East Asian ideograph
- 0x275622: (0x8427, 0),# East Asian ideograph
- 0x215623: (0x859b, 0),# East Asian ideograph
- 0x226b79: (0x7b72, 0),# East Asian ideograph
- 0x215624: (0x8591, 0),# East Asian ideograph
- 0x2d496b: (0x70df, 0),# East Asian ideograph
- 0x226b7a: (0x7b78, 0),# East Asian ideograph
- 0x6f5169: (0xbe14, 0),# Korean hangul
- 0x275626: (0x8537, 0),# East Asian ideograph
- 0x23487c: (0x9481, 0),# East Asian ideograph
- 0x226b7c: (0x7b67, 0),# East Asian ideograph
- 0x6f5627: (0xc654, 0),# Korean hangul
- 0x2d5635: (0x846f, 0),# East Asian ideograph
- 0x215628: (0x8587, 0),# East Asian ideograph
- 0x29352d: (0x8c30, 0),# East Asian ideograph
- 0x275629: (0x84dd, 0),# East Asian ideograph
- 0x21562a: (0x85a9, 0),# East Asian ideograph
- 0x276055: (0x613f, 0),# East Asian ideograph
- 0x6f4d27: (0xb315, 0),# Korean hangul
- 0x225d67: (0x7563, 0),# East Asian ideograph
- 0x273d2f: (0x5385, 0),# East Asian ideograph
- 0x335d23: (0x8a76, 0),# East Asian ideograph
- 0x6f562d: (0xc678, 0),# Korean hangul
- 0x2e2968: (0x5f51, 0),# East Asian ideograph
- 0x21562e: (0x85c9, 0),# East Asian ideograph
- 0x4b3d2c: (0x53b0, 0),# East Asian ideograph
- 0x21562f: (0x85b0, 0),# East Asian ideograph
- 0x4b527c: (0x8080, 0),# East Asian ideograph
- 0x233f4e: (0x9100, 0),# East Asian ideograph
- 0x4b4e39: (0x5cfa, 0),# East Asian ideograph
- 0x275631: (0x827a, 0),# East Asian ideograph
- 0x215632: (0x85ea, 0),# East Asian ideograph
- 0x695633: (0x5cbe, 0),# East Asian ideograph
- 0x6f516c: (0xbe1f, 0),# Korean hangul
- 0x21596a: (0x8ca0, 0),# East Asian ideograph
- 0x213751: (0x5687, 0),# East Asian ideograph
- 0x275635: (0x836f, 0),# East Asian ideograph
- 0x6f5529: (0xc558, 0),# Korean hangul
- 0x235636: (0x9b43, 0),# East Asian ideograph
- 0x275637: (0x853c, 0),# East Asian ideograph
- 0x6f5c2e: (0xd39c, 0),# Korean hangul
- 0x4c5638: (0x729f, 0),# East Asian ideograph
- 0x275639: (0x853a, 0),# East Asian ideograph
- 0x21563a: (0x8606, 0),# East Asian ideograph
- 0x233f50: (0x9107, 0),# East Asian ideograph
- 0x225927: (0x73ea, 0),# East Asian ideograph
- 0x21563b: (0x860b, 0),# East Asian ideograph
- 0x6f5a22: (0xceac, 0),# Korean hangul
- 0x21563c: (0x8607, 0),# East Asian ideograph
- 0x224c5e: (0x6f36, 0),# East Asian ideograph
- 0x21563d: (0x860a, 0),# East Asian ideograph
- 0x4b3d2f: (0x5ef0, 0),# East Asian ideograph
- 0x696576: (0x7e90, 0),# East Asian ideograph
- 0x21563e: (0x862d, 0),# East Asian ideograph
- 0x276059: (0x9885, 0),# East Asian ideograph
- 0x21596c: (0x8ca1, 0),# East Asian ideograph
- 0x2d563f: (0x6a97, 0),# East Asian ideograph
- 0x276023: (0x5de9, 0),# East Asian ideograph
- 0x275640: (0x85d3, 0),# East Asian ideograph
- 0x346126: (0x6900, 0),# East Asian ideograph
- 0x2d3421: (0x5294, 0),# East Asian ideograph
- 0x235641: (0x9b4b, 0),# East Asian ideograph
- 0x215642: (0x863f, 0),# East Asian ideograph
- 0x4b5f49: (0x51cb, 0),# East Asian ideograph
- 0x2d4971: (0x70a4, 0),# East Asian ideograph
- 0x395643: (0x4e55, 0),# East Asian ideograph
- 0x6f4d2c: (0xb35c, 0),# Korean hangul
- 0x213754: (0x5695, 0),# East Asian ideograph
- 0x275644: (0x4e47, 0),# East Asian ideograph
- 0x70602d: (0x55b9, 0),# East Asian ideograph
- 0x692426: (0x3046, 0),# Hiragana letter U
- 0x2d5a7e: (0x8e7b, 0),# East Asian ideograph
- 0x6f5645: (0xc6cc, 0),# Korean hangul
- 0x6f5646: (0xc6cd, 0),# Korean hangul
- 0x2d572b: (0x8797, 0),# East Asian ideograph
- 0x275647: (0x5904, 0),# East Asian ideograph
- 0x215648: (0x865c, 0),# East Asian ideograph
- 0x27605b: (0x98ce, 0),# East Asian ideograph
- 0x28645a: (0x7817, 0),# East Asian ideograph
- 0x6f4d2d: (0xb35f, 0),# Korean hangul
- 0x225d6d: (0x7579, 0),# East Asian ideograph
- 0x21564a: (0x865f, 0),# East Asian ideograph
- 0x2d563c: (0x8613, 0),# East Asian ideograph
- 0x45564b: (0x865e, 0),# East Asian ideograph (variant of 21564B which maps to 865E)
- 0x21564c: (0x8667, 0),# East Asian ideograph
- 0x6f5171: (0xbe4c, 0),# Korean hangul
- 0x27605c: (0x98d2, 0),# East Asian ideograph
- 0x69564e: (0x5d76, 0),# East Asian ideograph
- 0x29302d: (0x88e3, 0),# East Asian ideograph
- 0x22564f: (0x72b4, 0),# East Asian ideograph
- 0x6f496e: (0xad6c, 0),# Korean hangul
- 0x277169: (0x5522, 0),# East Asian ideograph
- 0x6f5650: (0xc6ec, 0),# Korean hangul
- 0x6f5c2f: (0xd3a0, 0),# Korean hangul
- 0x235061: (0x98e4, 0),# East Asian ideograph
- 0x4b5f4c: (0x9d8f, 0),# East Asian ideograph
- 0x225652: (0x72b5, 0),# East Asian ideograph
- 0x27605d: (0x53f0, 0),# East Asian ideograph (duplicate simplified)
- 0x6f4d2f: (0xb365, 0),# Korean hangul
- 0x6f5653: (0xc704, 0),# Korean hangul
- 0x294642: (0x94e0, 0),# East Asian ideograph
- 0x692429: (0x3049, 0),# Hiragana letter small O
- 0x333f55: (0x5b3e, 0),# East Asian ideograph
- 0x6f5654: (0xc705, 0),# Korean hangul
- 0x6f5655: (0xc708, 0),# Korean hangul
- 0x225656: (0x72bc, 0),# East Asian ideograph
- 0x695657: (0x5d90, 0),# East Asian ideograph
- 0x27605e: (0x522e, 0),# East Asian ideograph
- 0x6f4d30: (0xb367, 0),# Korean hangul
- 0x225658: (0x72c3, 0),# East Asian ideograph
- 0x217747: (0x5853, 0),# East Asian ideograph
- 0x6f5659: (0xc719, 0),# Korean hangul
- 0x21565a: (0x86cb, 0),# East Asian ideograph
- 0x293537: (0x8c20, 0),# East Asian ideograph
- 0x235063: (0x98e5, 0),# East Asian ideograph
- 0x6f5174: (0xbe55, 0),# Korean hangul
- 0x27605f: (0x98d3, 0),# East Asian ideograph
- 0x6f4d31: (0xb368, 0),# Korean hangul
- 0x23565d: (0x9b74, 0),# East Asian ideograph
- 0x6f4b23: (0xafb9, 0),# Korean hangul
- 0x4b306c: (0x96e0, 0),# East Asian ideograph
- 0x6f565e: (0xc730, 0),# Korean hangul
- 0x6f5638: (0xc6a7, 0),# Korean hangul
- 0x6f565f: (0xc735, 0),# Korean hangul
- 0x4c6074: (0x76b9, 0),# East Asian ideograph
- 0x224c65: (0x6f2d, 0),# East Asian ideograph
- 0x215660: (0x86df, 0),# East Asian ideograph
- 0x4b5052: (0x7c56, 0),# East Asian ideograph
- 0x6f5175: (0xbe57, 0),# Korean hangul
- 0x6f4d32: (0xb369, 0),# Korean hangul
- 0x213b64: (0x5c6f, 0),# East Asian ideograph
- 0x6f5662: (0xc73d, 0),# Korean hangul
- 0x396223: (0x9bfd, 0),# East Asian ideograph (variant of 216223)
- 0x333f58: (0x61f4, 0),# East Asian ideograph
- 0x235663: (0x9b68, 0),# East Asian ideograph
- 0x2d3428: (0x5226, 0),# East Asian ideograph
- 0x2e5a40: (0x73b3, 0),# East Asian ideograph
- 0x215664: (0x86db, 0),# East Asian ideograph
- 0x6f583b: (0xc9e2, 0),# Korean hangul
- 0x293539: (0x8c33, 0),# East Asian ideograph
- 0x215665: (0x86e4, 0),# East Asian ideograph
- 0x4b5f50: (0x96e3, 0),# East Asian ideograph
- 0x4b3b37: (0x51a9, 0),# East Asian ideograph
- 0x6f5176: (0xbe59, 0),# Korean hangul
- 0x2f2f5d: (0x7e48, 0),# East Asian ideograph
- 0x276061: (0x98d5, 0),# East Asian ideograph
- 0x286460: (0x7856, 0),# East Asian ideograph
- 0x21375b: (0x56b6, 0),# East Asian ideograph
- 0x215667: (0x86f9, 0),# East Asian ideograph
- 0x225930: (0x73db, 0),# East Asian ideograph
- 0x6f5668: (0xc751, 0),# Korean hangul
- 0x22403d: (0x6a12, 0),# East Asian ideograph
- 0x6f5669: (0xc758, 0),# Korean hangul
- 0x224c67: (0x6f34, 0),# East Asian ideograph
- 0x4b566a: (0x8708, 0),# East Asian ideograph (variant of 21566A which maps to 8708)
- 0x21566b: (0x8700, 0),# East Asian ideograph
- 0x276062: (0x98d8, 0),# East Asian ideograph
- 0x285e7a: (0x75d6, 0),# East Asian ideograph
- 0x6f4d34: (0xb36b, 0),# Korean hangul
- 0x22566c: (0x72cc, 0),# East Asian ideograph
- 0x294647: (0x954f, 0),# East Asian ideograph
- 0x6f566d: (0xc77c, 0),# Korean hangul
- 0x334730: (0x6e5f, 0),# East Asian ideograph
- 0x6f5041: (0xbabb, 0),# Korean hangul
- 0x22566e: (0x72db, 0),# East Asian ideograph
- 0x22566f: (0x72cd, 0),# East Asian ideograph
- 0x21356d: (0x5496, 0),# East Asian ideograph
- 0x276063: (0x98de, 0),# East Asian ideograph
- 0x6f4d35: (0xb36e, 0),# Korean hangul
- 0x21375d: (0x56c1, 0),# East Asian ideograph
- 0x225d75: (0x7571, 0),# East Asian ideograph
- 0x333f5b: (0x6133, 0),# East Asian ideograph
- 0x235672: (0x9b80, 0),# East Asian ideograph
- 0x235673: (0x9b8c, 0),# East Asian ideograph
- 0x2f5e42: (0x9ec9, 0),# East Asian ideograph
- 0x6f5674: (0xc789, 0),# Korean hangul
- 0x6f543c: (0xc318, 0),# Korean hangul
- 0x2d5675: (0x9f05, 0),# East Asian ideograph
- 0x6f4d36: (0xb370, 0),# Korean hangul
- 0x21375e: (0x56c2, 0),# East Asian ideograph
- 0x275676: (0x8680, 0),# East Asian ideograph
- 0x692430: (0x3050, 0),# Hiragana letter GU
- 0x4d386f: (0x544b, 0),# East Asian ideograph
- 0x2d4122: (0x6485, 0),# East Asian ideograph
- 0x224123: (0x69f0, 0),# East Asian ideograph
- 0x295756: (0x9ca9, 0),# East Asian ideograph
- 0x215679: (0x8774, 0),# East Asian ideograph
- 0x224124: (0x69f2, 0),# East Asian ideograph
- 0x21567a: (0x8766, 0),# East Asian ideograph
- 0x234125: (0x9193, 0),# East Asian ideograph
- 0x6f4d37: (0xb371, 0),# Korean hangul
- 0x23567b: (0x9b7d, 0),# East Asian ideograph
- 0x21774e: (0x5856, 0),# East Asian ideograph
- 0x4b3072: (0x4eed, 0),# East Asian ideograph
- 0x6f4a4a: (0xae60, 0),# Korean hangul
- 0x33567c: (0x8671, 0),# East Asian ideograph
- 0x6f567d: (0xc798, 0),# Korean hangul
- 0x224128: (0x6a14, 0),# East Asian ideograph
- 0x21567e: (0x8757, 0),# East Asian ideograph
- 0x224129: (0x6a63, 0),# East Asian ideograph
- 0x295030: (0x989e, 0),# East Asian ideograph
- 0x6f517b: (0xbe60, 0),# Korean hangul
- 0x4b412a: (0x6323, 0),# East Asian ideograph
- 0x6f4d38: (0xb374, 0),# Korean hangul
- 0x225742: (0x731e, 0),# East Asian ideograph
- 0x23412b: (0x919d, 0),# East Asian ideograph
- 0x212b33: (0x3002, 0),# Ideographic full stop
- 0x23412c: (0x919a, 0),# East Asian ideograph
- 0x2d342e: (0x8274, 0),# East Asian ideograph
- 0x6f5538: (0xc580, 0),# Korean hangul
- 0x276067: (0x9968, 0),# East Asian ideograph
- 0x6f4d39: (0xb378, 0),# Korean hangul
- 0x234130: (0x91a2, 0),# East Asian ideograph
- 0x3f476f: (0x51c8, 0),# East Asian ideograph
- 0x334131: (0x6425, 0),# East Asian ideograph
- 0x6f5042: (0xbabd, 0),# Korean hangul
- 0x2d4132: (0x642f, 0),# East Asian ideograph
- 0x287130: (0x7edb, 0),# East Asian ideograph
- 0x234c29: (0x9708, 0),# East Asian ideograph
- 0x6f517d: (0xbe64, 0),# Korean hangul
- 0x234134: (0x919b, 0),# East Asian ideograph (variant of 4D4134 which maps to 919B)
- 0x6f4d3a: (0xb380, 0),# Korean hangul
- 0x213762: (0x56c8, 0),# East Asian ideograph
- 0x336179: (0x9c7b, 0),# East Asian ideograph
- 0x4c3474: (0x631d, 0),# East Asian ideograph
- 0x235d36: (0x9e15, 0),# East Asian ideograph
- 0x274136: (0x62a1, 0),# East Asian ideograph
- 0x274f5c: (0x6d3c, 0),# East Asian ideograph
- 0x6f4f68: (0xb9cc, 0),# Korean hangul
- 0x224137: (0x6a67, 0),# East Asian ideograph
- 0x344138: (0x8022, 0),# East Asian ideograph
- 0x6f517e: (0xbe68, 0),# Korean hangul
- 0x224139: (0x6a43, 0),# East Asian ideograph
- 0x6f4d3b: (0xb383, 0),# Korean hangul
- 0x22413a: (0x6a33, 0),# East Asian ideograph
- 0x225938: (0x73e3, 0),# East Asian ideograph
- 0x22413b: (0x6a32, 0),# East Asian ideograph
- 0x274f5d: (0x7a9d, 0),# East Asian ideograph
- 0x27413c: (0x62e3, 0),# East Asian ideograph
- 0x706247: (0x9987, 0),# East Asian ideograph
- 0x23413d: (0x91aa, 0),# East Asian ideograph
- 0x27606a: (0x996e, 0),# East Asian ideograph
- 0x6f4d3c: (0xb385, 0),# Korean hangul
- 0x213b66: (0x5c79, 0),# East Asian ideograph
- 0x213764: (0x56d1, 0),# East Asian ideograph
- 0x22413f: (0x6a28, 0),# East Asian ideograph
- 0x213321: (0x5167, 0),# East Asian ideograph
- 0x4c3f68: (0x69c7, 0),# East Asian ideograph
- 0x224140: (0x6a48, 0),# East Asian ideograph
- 0x224141: (0x6a50, 0),# East Asian ideograph
- 0x224142: (0x6a52, 0),# East Asian ideograph
- 0x334c2c: (0x754d, 0),# East Asian ideograph
- 0x336058: (0x9855, 0),# East Asian ideograph
- 0x224143: (0x6a72, 0),# East Asian ideograph
- 0x6f4d3d: (0xb38c, 0),# Korean hangul
- 0x274570: (0x6743, 0),# East Asian ideograph
- 0x285836: (0x7315, 0),# East Asian ideograph
- 0x224145: (0x6a3e, 0),# East Asian ideograph
- 0x224146: (0x6a77, 0),# East Asian ideograph
- 0x287134: (0x7ed7, 0),# East Asian ideograph
- 0x224147: (0x6a5b, 0),# East Asian ideograph
- 0x214148: (0x63ea, 0),# East Asian ideograph
- 0x6f4d3e: (0xb3c4, 0),# Korean hangul
- 0x225d7e: (0x757f, 0),# East Asian ideograph
- 0x217755: (0x589a, 0),# East Asian ideograph
- 0x2d3877: (0x5900, 0),# East Asian ideograph
- 0x22414a: (0x6a5e, 0),# East Asian ideograph
- 0x21414b: (0x643e, 0),# East Asian ideograph
- 0x21414c: (0x6413, 0),# East Asian ideograph
- 0x23414d: (0x91b5, 0),# East Asian ideograph
- 0x3f4a60: (0x7266, 0),# East Asian ideograph
- 0x6f4d3f: (0xb3c5, 0),# Korean hangul
- 0x6f4c78: (0xb2f7, 0),# Korean hangul
- 0x335d3b: (0x57dc, 0),# East Asian ideograph
- 0x22414f: (0x6a51, 0),# East Asian ideograph
- 0x2d4150: (0x6428, 0),# East Asian ideograph
- 0x29575f: (0x9ca0, 0),# East Asian ideograph
- 0x224151: (0x6a56, 0),# East Asian ideograph
- 0x2d4152: (0x6447, 0),# East Asian ideograph
- 0x6f4d40: (0xb3c8, 0),# Korean hangul
- 0x224153: (0x6a36, 0),# East Asian ideograph
- 0x2d4154: (0x635c, 0),# East Asian ideograph
- 0x2d3436: (0x52f3, 0),# East Asian ideograph
- 0x274155: (0x62a2, 0),# East Asian ideograph
- 0x217c30: (0x5a93, 0),# East Asian ideograph
- 0x224156: (0x6a7a, 0),# East Asian ideograph
- 0x234157: (0x91bd, 0),# East Asian ideograph
- 0x6f4d41: (0xb3cb, 0),# Korean hangul
- 0x213769: (0x56e4, 0),# East Asian ideograph
- 0x224158: (0x6a3f, 0),# East Asian ideograph
- 0x4b4f7b: (0x7b7a, 0),# East Asian ideograph
- 0x21623b: (0x9d12, 0),# East Asian ideograph (variant of 4B623B which maps to 9D12)
- 0x4b523e: (0x7f9a, 0),# East Asian ideograph (variant of 21523E which maps to 7F9A)
- 0x23415a: (0x91c2, 0),# East Asian ideograph
- 0x293651: (0x8d36, 0),# East Asian ideograph
- 0x23415b: (0x91c4, 0),# East Asian ideograph
- 0x33347d: (0x53c1, 0),# East Asian ideograph
- 0x23415c: (0x91c3, 0),# East Asian ideograph
- 0x275a30: (0x8d24, 0),# East Asian ideograph
- 0x29415d: (0x917d, 0),# East Asian ideograph
- 0x29586a: (0x9ccb, 0),# East Asian ideograph
- 0x274f64: (0x7a83, 0),# East Asian ideograph
- 0x27415f: (0x6402, 0),# East Asian ideograph
- 0x70624e: (0x9995, 0),# East Asian ideograph
- 0x224c76: (0x6efa, 0),# East Asian ideograph
- 0x224833: (0x6cb4, 0),# East Asian ideograph
- 0x4b5164: (0x770c, 0),# East Asian ideograph
- 0x4c4146: (0x8538, 0),# East Asian ideograph
- 0x234161: (0x91d4, 0),# East Asian ideograph
- 0x284257: (0x68bc, 0),# East Asian ideograph
- 0x294656: (0x955b, 0),# East Asian ideograph
- 0x234162: (0x91d3, 0),# East Asian ideograph
- 0x234163: (0x91d5, 0),# East Asian ideograph
- 0x217639: (0x580e, 0),# East Asian ideograph
- 0x234164: (0x91d9, 0),# East Asian ideograph
- 0x274b22: (0x72ef, 0),# East Asian ideograph
- 0x6f5b59: (0xd2b8, 0),# Korean hangul
- 0x274165: (0x635e, 0),# East Asian ideograph
- 0x286272: (0x770d, 0),# East Asian ideograph
- 0x274166: (0x62e8, 0),# East Asian ideograph
- 0x6f4d44: (0xb3d4, 0),# Korean hangul
- 0x234168: (0x91e2, 0),# East Asian ideograph
- 0x6f534a: (0xc1a9, 0),# Korean hangul
- 0x234169: (0x91ed, 0),# East Asian ideograph
- 0x23416a: (0x91f7, 0),# East Asian ideograph
- 0x333d2f: (0x5e81, 0),# East Asian ideograph
- 0x23416b: (0x91fa, 0),# East Asian ideograph
- 0x6f4d45: (0xb3d5, 0),# Korean hangul
- 0x21775c: (0x5889, 0),# East Asian ideograph
- 0x22416c: (0x69f9, 0),# East Asian ideograph
- 0x215543: (0x83cc, 0),# East Asian ideograph
- 0x4b4e56: (0x78fa, 0),# East Asian ideograph
- 0x22416d: (0x6a64, 0),# East Asian ideograph
- 0x295427: (0x9a85, 0),# East Asian ideograph
- 0x27416e: (0x6251, 0),# East Asian ideograph
- 0x217c31: (0x5aac, 0),# East Asian ideograph
- 0x23416f: (0x91f2, 0),# East Asian ideograph
- 0x234171: (0x91e8, 0),# East Asian ideograph
- 0x294458: (0x952c, 0),# East Asian ideograph
- 0x4b434d: (0x663f, 0),# East Asian ideograph
- 0x234172: (0x91f6, 0),# East Asian ideograph
- 0x2d343c: (0x52a2, 0),# East Asian ideograph
- 0x334c3e: (0x8e08, 0),# East Asian ideograph
- 0x234173: (0x91ee, 0),# East Asian ideograph
- 0x274174: (0x62e5, 0),# East Asian ideograph
- 0x4b3d4b: (0x5f3e, 0),# East Asian ideograph
- 0x334c36: (0x753b, 0),# East Asian ideograph (variant of 274C36 which maps to 753B)
- 0x4b4c67: (0x761f, 0),# East Asian ideograph (variant of 214C67 which maps to 761F)
- 0x224175: (0x6aa8, 0),# East Asian ideograph
- 0x29465a: (0x955f, 0),# East Asian ideograph
- 0x274176: (0x51fb, 0),# East Asian ideograph
- 0x692441: (0x3061, 0),# Hiragana letter TI
- 0x21332c: (0x5178, 0),# East Asian ideograph
- 0x235d43: (0x9e7b, 0),# East Asian ideograph
- 0x224177: (0x6aa5, 0),# East Asian ideograph
- 0x2d343d: (0x52e7, 0),# East Asian ideograph
- 0x224179: (0x6a96, 0),# East Asian ideograph
- 0x4b3d4c: (0x5f25, 0),# East Asian ideograph (variant of 273D4C)
- 0x222c24: (0x608a, 0),# East Asian ideograph
- 0x27417a: (0x6321, 0),# East Asian ideograph
- 0x6f4d48: (0xb3db, 0),# Korean hangul
- 0x707360: (0x7b7b, 0),# East Asian ideograph
- 0x286032: (0x75ac, 0),# East Asian ideograph
- 0x21332d: (0x517c, 0),# East Asian ideograph
- 0x27417c: (0x636e, 0),# East Asian ideograph
- 0x216c27: (0x5296, 0),# East Asian ideograph
- 0x27417d: (0x63b3, 0),# East Asian ideograph
- 0x284f26: (0x6cf7, 0),# East Asian ideograph
- 0x224a44: (0x6e12, 0),# East Asian ideograph
- 0x22417e: (0x6a7d, 0),# East Asian ideograph
- 0x6f5d6a: (0xd750, 0),# Korean hangul
- 0x226c29: (0x7b73, 0),# East Asian ideograph
- 0x2e2b74: (0x609b, 0),# East Asian ideograph
- 0x276077: (0x997f, 0),# East Asian ideograph
- 0x6f4d49: (0xb3fc, 0),# Korean hangul
- 0x217760: (0x589b, 0),# East Asian ideograph
- 0x223378: (0x647d, 0),# East Asian ideograph
- 0x232c2c: (0x87ee, 0),# East Asian ideograph
- 0x274b28: (0x72de, 0),# East Asian ideograph
- 0x222c2f: (0x609e, 0),# East Asian ideograph
- 0x6f4b28: (0xafc9, 0),# Korean hangul
- 0x217761: (0x587c, 0),# East Asian ideograph
- 0x222c30: (0x6083, 0),# East Asian ideograph
- 0x4b4e5b: (0x783f, 0),# East Asian ideograph
- 0x22483b: (0x6d28, 0),# East Asian ideograph
- 0x216c33: (0x52ae, 0),# East Asian ideograph
- 0x4c4345: (0x67a6, 0),# East Asian ideograph
- 0x222c34: (0x60a7, 0),# East Asian ideograph
- 0x6f4d4b: (0xb410, 0),# Korean hangul
- 0x213773: (0x5718, 0),# East Asian ideograph
- 0x233b2e: (0x8ec9, 0),# East Asian ideograph
- 0x216c38: (0x52bc, 0),# East Asian ideograph
- 0x2d454e: (0x697d, 0),# East Asian ideograph
- 0x217763: (0x5888, 0),# East Asian ideograph
- 0x692446: (0x3066, 0),# Hiragana letter TE
- 0x232c3a: (0x87d6, 0),# East Asian ideograph
- 0x235d48: (0x9e83, 0),# East Asian ideograph
- 0x39302d: (0x534b, 0),# East Asian ideograph
- 0x6f4d4d: (0xb41c, 0),# Korean hangul
- 0x213775: (0x571f, 0),# East Asian ideograph
- 0x286037: (0x763f, 0),# East Asian ideograph
- 0x692447: (0x3067, 0),# Hiragana letter DE
- 0x223731: (0x65c6, 0),# East Asian ideograph
- 0x294478: (0x9522, 0),# East Asian ideograph
- 0x274b2c: (0x730e, 0),# East Asian ideograph
- 0x287144: (0x7ee0, 0),# East Asian ideograph
- 0x234c3d: (0x971d, 0),# East Asian ideograph
- 0x706c43: (0x70c0, 0),# East Asian ideograph
- 0x213333: (0x5191, 0),# East Asian ideograph
- 0x333066: (0x5fc8, 0),# East Asian ideograph
- 0x69613a: (0x7549, 0),# East Asian ideograph
- 0x6f4f6c: (0xb9d1, 0),# Korean hangul
- 0x216c46: (0x52d4, 0),# East Asian ideograph
- 0x234c3e: (0x9719, 0),# East Asian ideograph
- 0x453666: (0x5ad0, 0),# East Asian ideograph
- 0x232c48: (0x87d3, 0),# East Asian ideograph
- 0x6f4d4f: (0xb428, 0),# Korean hangul
- 0x6f4b29: (0xafcb, 0),# Korean hangul
- 0x294662: (0x956a, 0),# East Asian ideograph
- 0x692449: (0x3069, 0),# Hiragana letter DO
- 0x275f50: (0x96be, 0),# East Asian ideograph
- 0x235d4b: (0x9e88, 0),# East Asian ideograph
- 0x274b2e: (0x732e, 0),# East Asian ideograph
- 0x235a6b: (0x9d3d, 0),# East Asian ideograph
- 0x292c4c: (0x866e, 0),# East Asian ideograph
- 0x4b346b: (0x5df5, 0),# East Asian ideograph
- 0x69595e: (0x63b5, 0),# East Asian ideograph
- 0x472c4d: (0x8801, 0),# East Asian ideograph (variant of 232C4D which maps to 8801)
- 0x28603a: (0x75c8, 0),# East Asian ideograph
- 0x454774: (0x6e15, 0),# East Asian ideograph
- 0x6f5564: (0xc5f7, 0),# Korean hangul
- 0x694664: (0x51ea, 0),# East Asian ideograph
- 0x294221: (0x9495, 0),# East Asian ideograph
- 0x6f4975: (0xad7c, 0),# Korean hangul
- 0x292c55: (0x86cf, 0),# East Asian ideograph
- 0x6f5762: (0xc8c8, 0),# Korean hangul
- 0x4b5f6f: (0x970a, 0),# East Asian ideograph
- 0x2d4f37: (0x7980, 0),# East Asian ideograph
- 0x216c58: (0x52f0, 0),# East Asian ideograph
- 0x294222: (0x9490, 0),# East Asian ideograph
- 0x6f5a48: (0xcf78, 0),# Korean hangul
- 0x216c5a: (0x52f1, 0),# East Asian ideograph
- 0x2d632b: (0x5c28, 0),# East Asian ideograph
- 0x4b4135: (0x6368, 0),# East Asian ideograph
- 0x275c3e: (0x8fc7, 0),# East Asian ideograph
- 0x223b7a: (0x67f6, 0),# East Asian ideograph
- 0x69244d: (0x306d, 0),# Hiragana letter NE
- 0x222c5d: (0x60c4, 0),# East Asian ideograph
- 0x3a284c: (0x53a9, 0),# East Asian ideograph (variant of 4C284C)
- 0x294223: (0x94ad, 0),# East Asian ideograph
- 0x235d4f: (0x9e87, 0),# East Asian ideograph
- 0x3f5e60: (0x9586, 0),# East Asian ideograph
- 0x395773: (0x7daf, 0),# East Asian ideograph
- 0x4b5f71: (0x9756, 0),# East Asian ideograph
- 0x224844: (0x6d39, 0),# East Asian ideograph
- 0x292c61: (0x86f4, 0),# East Asian ideograph
- 0x6f4d54: (0xb451, 0),# Korean hangul
- 0x6f4b2a: (0xafcd, 0),# Korean hangul
- 0x69544b: (0x5870, 0),# East Asian ideograph
- 0x213339: (0x51a5, 0),# East Asian ideograph
- 0x294224: (0x94aa, 0),# East Asian ideograph
- 0x6f563f: (0xc6b9, 0),# Korean hangul
- 0x292c64: (0x877e, 0),# East Asian ideograph
- 0x4b5f72: (0x975b, 0),# East Asian ideograph
- 0x4b5365: (0x8133, 0),# East Asian ideograph
- 0x222c66: (0x60e2, 0),# East Asian ideograph
- 0x6f4a50: (0xae6c, 0),# Korean hangul
- 0x294225: (0x94ab, 0),# East Asian ideograph
- 0x2d5664: (0x9f04, 0),# East Asian ideograph
- 0x6f542a: (0xc2ed, 0),# Korean hangul
- 0x4b5f73: (0x975c, 0),# East Asian ideograph (variant of 215F73 which maps to 975C)
- 0x335228: (0x94b5, 0),# East Asian ideograph
- 0x336c6b: (0x6031, 0),# East Asian ideograph
- 0x6f4d56: (0xb458, 0),# Korean hangul
- 0x692450: (0x3070, 0),# Hiragana letter BA
- 0x4b4e67: (0x79d8, 0),# East Asian ideograph
- 0x6f5c37: (0xd3bc, 0),# Korean hangul
- 0x28714d: (0x7ee1, 0),# East Asian ideograph
- 0x216c6f: (0x530b, 0),# East Asian ideograph
- 0x6f4d57: (0xb460, 0),# Korean hangul
- 0x222c73: (0x6103, 0),# East Asian ideograph
- 0x6f5a21: (0xcea5, 0),# Korean hangul
- 0x4b3d5c: (0x5f83, 0),# East Asian ideograph
- 0x6f4d58: (0xb461, 0),# Korean hangul
- 0x6f5425: (0xc2e0, 0),# Korean hangul
- 0x692452: (0x3072, 0),# Hiragana letter HI
- 0x21333d: (0x51b6, 0),# East Asian ideograph
- 0x215721: (0x8759, 0),# East Asian ideograph
- 0x6f4f6e: (0xb9d9, 0),# Korean hangul
- 0x6f5723: (0xc7a4, 0),# Korean hangul
- 0x225724: (0x72f4, 0),# East Asian ideograph
- 0x6f5d74: (0xd769, 0),# Korean hangul
- 0x6f4d59: (0xb463, 0),# Korean hangul
- 0x215725: (0x879e, 0),# East Asian ideograph
- 0x695438: (0x57b3, 0),# East Asian ideograph
- 0x692453: (0x3073, 0),# Hiragana letter BI
- 0x6f5726: (0xc7a7, 0),# Korean hangul
- 0x6f5727: (0xc7ac, 0),# Korean hangul
- 0x2f5e66: (0x9b12, 0),# East Asian ideograph
- 0x4b4b71: (0x7f3e, 0),# East Asian ideograph (variant of 2D4B71 which maps to 7F3E)
- 0x6f5728: (0xc7ad, 0),# Korean hangul
- 0x225729: (0x7302, 0),# East Asian ideograph
- 0x697323: (0x9d64, 0),# East Asian ideograph
- 0x6f4d5a: (0xb465, 0),# Korean hangul
- 0x6f572a: (0xc7b4, 0),# Korean hangul
- 0x2e4873: (0x6fa3, 0),# East Asian ideograph
- 0x21572b: (0x87b3, 0),# East Asian ideograph
- 0x333330: (0x518a, 0),# East Asian ideograph
- 0x21572c: (0x87bb, 0),# East Asian ideograph
- 0x29577a: (0x9cad, 0),# East Asian ideograph
- 0x21572d: (0x87c8, 0),# East Asian ideograph
- 0x39563c: (0x56cc, 0),# East Asian ideograph
- 0x222632: (0x5db8, 0),# East Asian ideograph
- 0x21572e: (0x87d2, 0),# East Asian ideograph
- 0x4b5d58: (0x9234, 0),# East Asian ideograph
- 0x6f4d5b: (0xb46c, 0),# Korean hangul
- 0x21572f: (0x87ba, 0),# East Asian ideograph
- 0x2d5730: (0x87c7, 0),# East Asian ideograph
- 0x235731: (0x9b92, 0),# East Asian ideograph
- 0x6f5c38: (0xd3c4, 0),# Korean hangul
- 0x275732: (0x86f2, 0),# East Asian ideograph
- 0x6f4e75: (0xb7ac, 0),# Korean hangul
- 0x275733: (0x866b, 0),# East Asian ideograph
- 0x6f4d5c: (0xb480, 0),# Korean hangul
- 0x275734: (0x8749, 0),# East Asian ideograph
- 0x215735: (0x87fb, 0),# East Asian ideograph
- 0x215736: (0x8805, 0),# East Asian ideograph
- 0x2d5228: (0x9262, 0),# East Asian ideograph
- 0x29577c: (0x9cb0, 0),# East Asian ideograph
- 0x695737: (0x5f16, 0),# East Asian ideograph
- 0x222634: (0x5dbf, 0),# East Asian ideograph
- 0x213d21: (0x5ebe, 0),# East Asian ideograph
- 0x6f4d5d: (0xb488, 0),# Korean hangul
- 0x235739: (0x9b9d, 0),# East Asian ideograph
- 0x6f573a: (0xc811, 0),# Korean hangul
- 0x2d3453: (0x758b, 0),# East Asian ideograph
- 0x21573b: (0x8822, 0),# East Asian ideograph
- 0x213132: (0x4f5b, 0),# East Asian ideograph
- 0x21573c: (0x8823, 0),# East Asian ideograph
- 0x21573d: (0x8821, 0),# East Asian ideograph
- 0x23417a: (0x91f8, 0),# East Asian ideograph
- 0x6f4d5e: (0xb4a4, 0),# Korean hangul
- 0x21573e: (0x881f, 0),# East Asian ideograph
- 0x275e69: (0x5173, 0),# East Asian ideograph
- 0x6f5b7a: (0xd330, 0),# Korean hangul
- 0x692458: (0x3078, 0),# Hiragana letter HE
- 0x21573f: (0x8831, 0),# East Asian ideograph
- 0x235d5a: (0x9e95, 0),# East Asian ideograph
- 0x4b5740: (0x8827, 0),# East Asian ideograph
- 0x2d572d: (0x8748, 0),# East Asian ideograph
- 0x215741: (0x8836, 0),# East Asian ideograph
- 0x69533b: (0x555d, 0),# East Asian ideograph
- 0x275742: (0x86ee, 0),# East Asian ideograph
- 0x27614f: (0x9a74, 0),# East Asian ideograph
- 0x6f4d5f: (0xb4b7, 0),# Korean hangul
- 0x215743: (0x8840, 0),# East Asian ideograph
- 0x4c5c3a: (0x73f1, 0),# East Asian ideograph
- 0x6f5744: (0xc82d, 0),# Korean hangul
- 0x694823: (0x7872, 0),# East Asian ideograph
- 0x6f5745: (0xc82f, 0),# Korean hangul
- 0x2d522b: (0x9475, 0),# East Asian ideograph
- 0x215746: (0x8853, 0),# East Asian ideograph (variant of 4B5746 which maps to 8853)
- 0x275747: (0x4e8d, 0),# East Asian ideograph
- 0x2d4562: (0x681d, 0),# East Asian ideograph
- 0x275a36: (0x8d56, 0),# East Asian ideograph
- 0x6f4d60: (0xb4c0, 0),# Korean hangul
- 0x69245a: (0x307a, 0),# Hiragana letter PE
- 0x213345: (0x51dd, 0),# East Asian ideograph
- 0x215749: (0x885b, 0),# East Asian ideograph
- 0x235d5c: (0x9e91, 0),# East Asian ideograph
- 0x2d3164: (0x7ae2, 0),# East Asian ideograph
- 0x4b4a2e: (0x55b6, 0),# East Asian ideograph
- 0x21574a: (0x885d, 0),# East Asian ideograph
- 0x474236: (0x949a, 0),# East Asian ideograph
- 0x2d4425: (0x686e, 0),# East Asian ideograph
- 0x29533d: (0x9a90, 0),# East Asian ideograph
- 0x21574c: (0x8862, 0),# East Asian ideograph
- 0x3b3922: (0x8db5, 0),# East Asian ideograph
- 0x21574d: (0x8863, 0),# East Asian ideograph
- 0x23574e: (0x9ba0, 0),# East Asian ideograph
- 0x2d3457: (0x62fe, 0),# East Asian ideograph
- 0x21574f: (0x8868, 0),# East Asian ideograph
- 0x6f5750: (0xc885, 0),# Korean hangul
- 0x213d22: (0x5eca, 0),# East Asian ideograph
- 0x2d4564: (0x68b9, 0),# East Asian ideograph
- 0x215752: (0x8881, 0),# East Asian ideograph
- 0x27602e: (0x97e9, 0),# East Asian ideograph
- 0x6f5753: (0xc88b, 0),# Korean hangul
- 0x69613e: (0x7569, 0),# East Asian ideograph
- 0x6f5754: (0xc88c, 0),# Korean hangul
- 0x215755: (0x8888, 0),# East Asian ideograph
- 0x4b3d67: (0x5f84, 0),# East Asian ideograph (variant of 273D67)
- 0x215756: (0x88ab, 0),# East Asian ideograph
- 0x6f4d63: (0xb4dd, 0),# Korean hangul
- 0x29367e: (0x8d59, 0),# East Asian ideograph
- 0x217337: (0x568a, 0),# East Asian ideograph
- 0x215759: (0x888d, 0),# East Asian ideograph
- 0x6f5967: (0xce60, 0),# Korean hangul
- 0x21575a: (0x888b, 0),# East Asian ideograph
- 0x21575b: (0x889e, 0),# East Asian ideograph
- 0x4d3c6c: (0x8fb6, 0),# East Asian ideograph
- 0x21575c: (0x88c1, 0),# East Asian ideograph
- 0x273a36: (0x5988, 0),# East Asian ideograph
- 0x6f4921: (0xac70, 0),# Korean hangul
- 0x23575d: (0x9bc6, 0),# East Asian ideograph
- 0x23575e: (0x9bbf, 0),# East Asian ideograph
- 0x22575f: (0x733b, 0),# East Asian ideograph
- 0x2d5760: (0x5e2c, 0),# East Asian ideograph
- 0x6f4d65: (0xb4e3, 0),# Korean hangul
- 0x4c7265: (0x7dfc, 0),# East Asian ideograph
- 0x69245f: (0x307f, 0),# Hiragana letter MI
- 0x6f4922: (0xac71, 0),# Korean hangul
- 0x225762: (0x733a, 0),# East Asian ideograph
- 0x6f5125: (0xbc45, 0),# Korean hangul
- 0x2e4670: (0x6cd0, 0),# East Asian ideograph
- 0x284539: (0x6b9a, 0),# East Asian ideograph
- 0x2d345b: (0x6607, 0),# East Asian ideograph
- 0x275763: (0x91cc, 0),# East Asian ideograph
- 0x393054: (0x4f0d, 0),# East Asian ideograph
- 0x226260: (0x777a, 0),# East Asian ideograph
- 0x6f5764: (0xc8d4, 0),# Korean hangul
- 0x232739: (0x85da, 0),# East Asian ideograph
- 0x215765: (0x88dd, 0),# East Asian ideograph
- 0x395564: (0x6726, 0),# East Asian ideograph
- 0x235766: (0x9bb9, 0),# East Asian ideograph
- 0x6f4e2e: (0xb560, 0),# Korean hangul
- 0x6f5767: (0xc8e0, 0),# Korean hangul
- 0x6f504b: (0xbb3b, 0),# Korean hangul
- 0x6f2469: (0x3138, 0),# Korean hangul
- 0x215768: (0x88f3, 0),# East Asian ideograph
- 0x2d5232: (0x8fa0, 0),# East Asian ideograph
- 0x6f5769: (0xc8f0, 0),# Korean hangul
- 0x6f5b60: (0xd2cb, 0),# Korean hangul
- 0x69576a: (0x603a, 0),# East Asian ideograph
- 0x22576b: (0x7352, 0),# East Asian ideograph
- 0x21334c: (0x51f9, 0),# East Asian ideograph
- 0x27576c: (0x5236, 0),# East Asian ideograph
- 0x225521: (0x721d, 0),# East Asian ideograph
- 0x2d345d: (0x5349, 0),# East Asian ideograph
- 0x6f576d: (0xc8fd, 0),# Korean hangul
- 0x2d5233: (0x7f78, 0),# East Asian ideograph
- 0x23576e: (0x9bc0, 0),# East Asian ideograph
- 0x29312b: (0x89d1, 0),# East Asian ideograph
- 0x2d5361: (0x811a, 0),# East Asian ideograph
- 0x4b576f: (0x8910, 0),# East Asian ideograph (variant of 21576F which maps to 8910)
- 0x6f4d68: (0xb4ed, 0),# Korean hangul
- 0x6f4b2e: (0xafe9, 0),# Korean hangul
- 0x6f4925: (0xac77, 0),# Korean hangul
- 0x275771: (0x8934, 0),# East Asian ideograph
- 0x215772: (0x8912, 0),# East Asian ideograph
- 0x294164: (0x948b, 0),# East Asian ideograph
- 0x275773: (0x88e4, 0),# East Asian ideograph
- 0x215774: (0x892a, 0),# East Asian ideograph
- 0x6f4d69: (0xb4ef, 0),# Korean hangul
- 0x29467c: (0x9546, 0),# East Asian ideograph
- 0x3f4926: (0x6e08, 0),# East Asian ideograph
- 0x21334e: (0x51fd, 0),# East Asian ideograph
- 0x6f5776: (0xc92c, 0),# Korean hangul
- 0x234221: (0x91f9, 0),# East Asian ideograph
- 0x215777: (0x893b, 0),# East Asian ideograph
- 0x224222: (0x6a7f, 0),# East Asian ideograph
- 0x6f5a33: (0xcf11, 0),# Korean hangul
- 0x234223: (0x9204, 0),# East Asian ideograph
- 0x216231: (0x9cf6, 0),# East Asian ideograph
- 0x215779: (0x8938, 0),# East Asian ideograph
- 0x224224: (0x6a91, 0),# East Asian ideograph
- 0x21577a: (0x8944, 0),# East Asian ideograph
- 0x214225: (0x64e0, 0),# East Asian ideograph
- 0x6f4927: (0xac79, 0),# Korean hangul
- 0x22577b: (0x7358, 0),# East Asian ideograph
- 0x224226: (0x6a9f, 0),# East Asian ideograph
- 0x4b4a38: (0x71d7, 0),# East Asian ideograph
- 0x21577c: (0x8960, 0),# East Asian ideograph
- 0x234227: (0x920a, 0),# East Asian ideograph
- 0x27577d: (0x8884, 0),# East Asian ideograph
- 0x234228: (0x9225, 0),# East Asian ideograph
- 0x295347: (0x9a93, 0),# East Asian ideograph
- 0x216232: (0x9cf4, 0),# East Asian ideograph
- 0x21577e: (0x8964, 0),# East Asian ideograph
- 0x274229: (0x6401, 0),# East Asian ideograph
- 0x22422a: (0x6a92, 0),# East Asian ideograph
- 0x692465: (0x3085, 0),# Hiragana letter small YU
- 0x22422b: (0x6aa3, 0),# East Asian ideograph
- 0x6f504c: (0xbb3c, 0),# Korean hangul
- 0x6f545b: (0xc42c, 0),# Korean hangul
- 0x23422c: (0x9228, 0),# East Asian ideograph
- 0x6f5a35: (0xcf15, 0),# Korean hangul
- 0x6f5b61: (0xd2d4, 0),# Korean hangul
- 0x29556c: (0x960b, 0),# East Asian ideograph
- 0x4b5b46: (0x8f0c, 0),# East Asian ideograph
- 0x21422e: (0x64fe, 0),# East Asian ideograph
- 0x456260: (0x5e7a, 0),# East Asian ideograph
- 0x275c57: (0x8fd8, 0),# East Asian ideograph
- 0x23422f: (0x9203, 0),# East Asian ideograph
- 0x276030: (0x827d, 0),# East Asian ideograph
- 0x692466: (0x3086, 0),# Hiragana letter YU
- 0x274230: (0x6446, 0),# East Asian ideograph
- 0x2d567b: (0x8717, 0),# East Asian ideograph
- 0x234231: (0x9200, 0),# East Asian ideograph
- 0x393573: (0x5611, 0),# East Asian ideograph
- 0x234232: (0x9218, 0),# East Asian ideograph
- 0x295c3e: (0x9e37, 0),# East Asian ideograph
- 0x274233: (0x62e6, 0),# East Asian ideograph
- 0x6f4d6d: (0xb518, 0),# Korean hangul
- 0x6f4b2f: (0xaff0, 0),# Korean hangul
- 0x274234: (0x6400, 0),# East Asian ideograph
- 0x274235: (0x6444, 0),# East Asian ideograph
- 0x234236: (0x9208, 0),# East Asian ideograph
- 0x6f5a37: (0xcf20, 0),# Korean hangul
- 0x224237: (0x6a9b, 0),# East Asian ideograph
- 0x234238: (0x921c, 0),# East Asian ideograph
- 0x2d6079: (0x8218, 0),# East Asian ideograph
- 0x6f492b: (0xac83, 0),# Korean hangul
- 0x213353: (0x5206, 0),# East Asian ideograph
- 0x27423a: (0x6405, 0),# East Asian ideograph
- 0x287349: (0x7f30, 0),# East Asian ideograph
- 0x2d3464: (0x613d, 0),# East Asian ideograph
- 0x23423b: (0x9224, 0),# East Asian ideograph
- 0x2d3021: (0x5f0c, 0),# East Asian ideograph
- 0x6f5a38: (0xcf24, 0),# Korean hangul
- 0x335772: (0x8943, 0),# East Asian ideograph
- 0x293132: (0x89cc, 0),# East Asian ideograph
- 0x226635: (0x7911, 0),# East Asian ideograph
- 0x33423d: (0x53ce, 0),# East Asian ideograph
- 0x284d2b: (0x6d54, 0),# East Asian ideograph
- 0x2d486b: (0x6f82, 0),# East Asian ideograph
- 0x692469: (0x3089, 0),# Hiragana letter RA
- 0x6f576b: (0xc8f5, 0),# Korean hangul
- 0x2d3c7c: (0x83f4, 0),# East Asian ideograph
- 0x4b516d: (0x7dcf, 0),# East Asian ideograph
- 0x216237: (0x9d23, 0),# East Asian ideograph
- 0x224242: (0x6aa0, 0),# East Asian ideograph
- 0x6f4d70: (0xb524, 0),# Korean hangul
- 0x234243: (0x9212, 0),# East Asian ideograph
- 0x69246a: (0x308a, 0),# Hiragana letter RI
- 0x334244: (0x6559, 0),# East Asian ideograph
- 0x4b4a3e: (0x7235, 0),# East Asian ideograph
- 0x2f5e7d: (0x6641, 0),# East Asian ideograph
- 0x393577: (0x9fa2, 0),# East Asian ideograph
- 0x6f5b62: (0xd1f8, 0),# Korean hangul
- 0x214247: (0x6557, 0),# East Asian ideograph
- 0x6f4d71: (0xb525, 0),# Korean hangul
- 0x6f4c7a: (0xb2fa, 0),# Korean hangul
- 0x234248: (0x91ff, 0),# East Asian ideograph
- 0x285323: (0x8367, 0),# East Asian ideograph
- 0x69246b: (0x308b, 0),# Hiragana letter RU
- 0x217345: (0x5699, 0),# East Asian ideograph
- 0x224249: (0x6a9e, 0),# East Asian ideograph
- 0x22424a: (0x6a87, 0),# East Asian ideograph
- 0x6f5a3b: (0xcf2f, 0),# Korean hangul
- 0x22424b: (0x6a8e, 0),# East Asian ideograph
- 0x23424e: (0x9206, 0),# East Asian ideograph
- 0x27424f: (0x542f, 0),# East Asian ideograph
- 0x6f5a3c: (0xcf30, 0),# Korean hangul
- 0x21623a: (0x9d1b, 0),# East Asian ideograph
- 0x224251: (0x6aab, 0),# East Asian ideograph
- 0x6f4d73: (0xb528, 0),# Korean hangul
- 0x234252: (0x9249, 0),# East Asian ideograph
- 0x6f4930: (0xac89, 0),# Korean hangul
- 0x394243: (0x4ff2, 0),# East Asian ideograph
- 0x705f61: (0x54da, 0),# East Asian ideograph
- 0x234254: (0x924d, 0),# East Asian ideograph
- 0x6f556b: (0xc606, 0),# Korean hangul
- 0x224255: (0x6ac8, 0),# East Asian ideograph
- 0x224864: (0x6d19, 0),# East Asian ideograph
- 0x4c4177: (0x8223, 0),# East Asian ideograph
- 0x274256: (0x655b, 0),# East Asian ideograph
- 0x6f4d74: (0xb529, 0),# Korean hangul
- 0x224257: (0x6aae, 0),# East Asian ideograph
- 0x6f4931: (0xac8a, 0),# Korean hangul
- 0x6f497c: (0xad90, 0),# Korean hangul
- 0x234258: (0x923a, 0),# East Asian ideograph
- 0x2d346a: (0x5918, 0),# East Asian ideograph
- 0x6f4879: (0xac30, 0),# Korean hangul
- 0x2d3c7d: (0x53a2, 0),# East Asian ideograph
- 0x2d4f3e: (0x7a3e, 0),# East Asian ideograph
- 0x4d4134: (0x919b, 0),# East Asian ideograph
- 0x23425c: (0x922e, 0),# East Asian ideograph
- 0x6f4932: (0xac8b, 0),# Korean hangul
- 0x22425d: (0x6abf, 0),# East Asian ideograph
- 0x2d5241: (0x7fa3, 0),# East Asian ideograph
- 0x6f5a3f: (0xcf58, 0),# Korean hangul
- 0x23425f: (0x9233, 0),# East Asian ideograph
- 0x294260: (0x94b7, 0),# East Asian ideograph
- 0x234261: (0x9266, 0),# East Asian ideograph
- 0x6f4933: (0xac8c, 0),# Korean hangul
- 0x214263: (0x65ac, 0),# East Asian ideograph
- 0x29446d: (0x951b, 0),# East Asian ideograph
- 0x6f5a40: (0xcf5c, 0),# Korean hangul
- 0x224264: (0x6aca, 0),# East Asian ideograph
- 0x224867: (0x6d0e, 0),# East Asian ideograph
- 0x4b3938: (0x5942, 0),# East Asian ideograph
- 0x214266: (0x65b7, 0),# East Asian ideograph
- 0x6f4934: (0xac90, 0),# Korean hangul
- 0x22375b: (0x65fb, 0),# East Asian ideograph
- 0x4b4a45: (0x5c13, 0),# East Asian ideograph
- 0x234268: (0x9235, 0),# East Asian ideograph
- 0x4b4b77: (0x4ec0, 0),# East Asian ideograph
- 0x6f5a41: (0xcf64, 0),# Korean hangul
- 0x4b5b52: (0x8f42, 0),# East Asian ideograph
- 0x6f4d78: (0xb530, 0),# Korean hangul
- 0x23426b: (0x9250, 0),# East Asian ideograph
- 0x692472: (0x3092, 0),# Hiragana letter WO
- 0x21335d: (0x5228, 0),# East Asian ideograph
- 0x22375c: (0x65fc, 0),# East Asian ideograph
- 0x23426c: (0x926b, 0),# East Asian ideograph
- 0x23426d: (0x9239, 0),# East Asian ideograph
- 0x6f556c: (0xc607, 0),# Korean hangul
- 0x6f5a42: (0xcf65, 0),# Korean hangul
- 0x6f4b6c: (0xb0e5, 0),# Korean hangul
- 0x23426f: (0x926d, 0),# East Asian ideograph
- 0x234270: (0x926c, 0),# East Asian ideograph
- 0x6f4936: (0xac9c, 0),# Korean hangul
- 0x234271: (0x924f, 0),# East Asian ideograph
- 0x2d4272: (0x65e3, 0),# East Asian ideograph
- 0x6f5c3e: (0xd3ed, 0),# Korean hangul
- 0x2d3c7e: (0x53a0, 0),# East Asian ideograph
- 0x6f5a43: (0xcf67, 0),# Korean hangul
- 0x294274: (0x94bf, 0),# East Asian ideograph
- 0x6f4d7a: (0xb532, 0),# Korean hangul
- 0x6f4937: (0xac9f, 0),# Korean hangul
- 0x234277: (0x9260, 0),# East Asian ideograph
- 0x2d302d: (0x4e17, 0),# East Asian ideograph
- 0x6f4d62: (0xb4dc, 0),# Korean hangul
- 0x6f5a44: (0xcf69, 0),# Korean hangul
- 0x234c6a: (0x9741, 0),# East Asian ideograph
- 0x4b5b55: (0x8ee2, 0),# East Asian ideograph
- 0x224279: (0x6ae6, 0),# East Asian ideograph
- 0x216d24: (0x531c, 0),# East Asian ideograph
- 0x6f4d7b: (0xb534, 0),# Korean hangul
- 0x69562e: (0x5cbb, 0),# East Asian ideograph
- 0x6f4938: (0xaca0, 0),# Korean hangul
- 0x6f5b3a: (0xd1a4, 0),# Korean hangul
- 0x275823: (0x88ad, 0),# East Asian ideograph
- 0x29424b: (0x94a3, 0),# East Asian ideograph
- 0x235d77: (0x9ead, 0),# East Asian ideograph
- 0x6f4f75: (0xb9e5, 0),# Korean hangul
- 0x213d6a: (0x5f97, 0),# East Asian ideograph
- 0x2e3870: (0x714a, 0),# East Asian ideograph
- 0x6f5a45: (0xcf70, 0),# Korean hangul
- 0x22486c: (0x6d00, 0),# East Asian ideograph
- 0x234c6b: (0x9747, 0),# East Asian ideograph
- 0x23427e: (0x9236, 0),# East Asian ideograph
- 0x6f4d7c: (0xb537, 0),# Korean hangul
- 0x6f4b32: (0xb00c, 0),# Korean hangul
- 0x222d2a: (0x610a, 0),# East Asian ideograph
- 0x22442a: (0x6b35, 0),# East Asian ideograph
- 0x216d2e: (0x532d, 0),# East Asian ideograph
- 0x6f4d7d: (0xb538, 0),# Korean hangul
- 0x6f493a: (0xaca8, 0),# Korean hangul
- 0x213362: (0x5230, 0),# East Asian ideograph
- 0x276822: (0x507b, 0),# East Asian ideograph
- 0x2d3473: (0x5374, 0),# East Asian ideograph
- 0x6f5a47: (0xcf74, 0),# Korean hangul
- 0x4c3f7a: (0x6922, 0),# East Asian ideograph
- 0x29535a: (0x9a9f, 0),# East Asian ideograph
- 0x222d32: (0x6112, 0),# East Asian ideograph
- 0x4b5b58: (0x8ee3, 0),# East Asian ideograph
- 0x4b393f: (0x5333, 0),# East Asian ideograph
- 0x216d33: (0x5330, 0),# East Asian ideograph
- 0x2d486e: (0x6f97, 0),# East Asian ideograph (not in Unicode)
- 0x282d34: (0x607d, 0),# East Asian ideograph
- 0x6f497e: (0xada4, 0),# Korean hangul
- 0x224e32: (0x6fca, 0),# East Asian ideograph
- 0x6f5c3f: (0xd3f0, 0),# Korean hangul
- 0x2f5a48: (0x9d44, 0),# East Asian ideograph
- 0x22486f: (0x6d33, 0),# East Asian ideograph
- 0x4c5f58: (0x7640, 0),# East Asian ideograph
- 0x6f4e2f: (0xb561, 0),# Korean hangul
- 0x6f493c: (0xacaa, 0),# Korean hangul
- 0x39424f: (0x5554, 0),# East Asian ideograph
- 0x2d3032: (0x7add, 0),# East Asian ideograph
- 0x33392f: (0x9029, 0),# East Asian ideograph
- 0x284f5d: (0x6ca3, 0),# East Asian ideograph
- 0x22442d: (0x6b3b, 0),# East Asian ideograph
- 0x216d3e: (0x533d, 0),# East Asian ideograph
- 0x51513b: (0x7e9f, 0),# East Asian ideograph
- 0x222d3f: (0x6121, 0),# East Asian ideograph
- 0x6f4f76: (0xb9e8, 0),# Korean hangul
- 0x292768: (0x8572, 0),# East Asian ideograph
- 0x274b5f: (0x7410, 0),# East Asian ideograph
- 0x6f5a4a: (0xcf85, 0),# Korean hangul
- 0x232d41: (0x8841, 0),# East Asian ideograph
- 0x2d4277: (0x65ee, 0),# East Asian ideograph
- 0x293a6b: (0x8e7f, 0),# East Asian ideograph
- 0x232a57: (0x873e, 0),# East Asian ideograph
- 0x6f4b33: (0xb00d, 0),# Korean hangul
- 0x222d43: (0x6106, 0),# East Asian ideograph
- 0x294251: (0x94c8, 0),# East Asian ideograph
- 0x3b2d44: (0x8842, 0),# East Asian ideograph
- 0x295053: (0x98d9, 0),# East Asian ideograph
- 0x287178: (0x7ef6, 0),# East Asian ideograph
- 0x226d47: (0x7c00, 0),# East Asian ideograph
- 0x2d4141: (0x63b2, 0),# East Asian ideograph
- 0x6f493f: (0xacb0, 0),# Korean hangul
- 0x6f547e: (0xc548, 0),# Korean hangul
- 0x294252: (0x94c9, 0),# East Asian ideograph
- 0x296028: (0x9f86, 0),# East Asian ideograph
- 0x2f3833: (0x8d91, 0),# East Asian ideograph
- 0x216d4b: (0x535d, 0),# East Asian ideograph
- 0x6f4940: (0xacb8, 0),# Korean hangul
- 0x284f61: (0x6ee0, 0),# East Asian ideograph
- 0x6f5c40: (0xd3f4, 0),# Korean hangul
- 0x295360: (0x9a98, 0),# East Asian ideograph
- 0x4b5b5e: (0x5f01, 0),# East Asian ideograph
- 0x232d51: (0x884a, 0),# East Asian ideograph
- 0x6f4941: (0xacb9, 0),# Korean hangul
- 0x294254: (0x94cb, 0),# East Asian ideograph
- 0x286d54: (0x7b5a, 0),# East Asian ideograph
- 0x222d56: (0x53af, 0),# East Asian ideograph
- 0x232d57: (0x8850, 0),# East Asian ideograph
- 0x21336a: (0x524c, 0),# East Asian ideograph
- 0x294255: (0x94ca, 0),# East Asian ideograph
- 0x29602b: (0x9f85, 0),# East Asian ideograph
- 0x6f4f77: (0xb9ec, 0),# Korean hangul
- 0x21313a: (0x4f38, 0),# East Asian ideograph
- 0x3f462b: (0x5e30, 0),# East Asian ideograph
- 0x274b64: (0x7391, 0),# East Asian ideograph
- 0x6f5a4f: (0xcfb0, 0),# Korean hangul
- 0x696d5a: (0x8f4c, 0),# East Asian ideograph
- 0x29327e: (0x8c07, 0),# East Asian ideograph
- 0x6f4b34: (0xb010, 0),# Korean hangul
- 0x6f4943: (0xacbc, 0),# Korean hangul
- 0x21336b: (0x524b, 0),# East Asian ideograph
- 0x4d4862: (0x9229, 0),# East Asian ideograph
- 0x222d5e: (0x6137, 0),# East Asian ideograph
- 0x28717d: (0x7efa, 0),# East Asian ideograph
- 0x6f5a50: (0xcfc4, 0),# Korean hangul
- 0x6f4b75: (0xb113, 0),# Korean hangul
- 0x6f4a5a: (0xaebd, 0),# Korean hangul
- 0x6f4944: (0xacbd, 0),# Korean hangul
- 0x21735b: (0x56c3, 0),# East Asian ideograph
- 0x225541: (0x723f, 0),# East Asian ideograph
- 0x226d63: (0x7c20, 0),# East Asian ideograph
- 0x2d4147: (0x6271, 0),# East Asian ideograph
- 0x216d66: (0x5393, 0),# East Asian ideograph
- 0x6f4945: (0xacc1, 0),# Korean hangul
- 0x21336d: (0x5247, 0),# East Asian ideograph
- 0x294258: (0x94b0, 0),# East Asian ideograph
- 0x6f4b22: (0xafb8, 0),# Korean hangul
- 0x335941: (0x54d7, 0),# East Asian ideograph
- 0x274b67: (0x73af, 0),# East Asian ideograph
- 0x234c78: (0x975d, 0),# East Asian ideograph
- 0x234835: (0x942b, 0),# East Asian ideograph
- 0x6f5052: (0xbb4f, 0),# Korean hangul
- 0x276d6d: (0x538d, 0),# East Asian ideograph
- 0x274b68: (0x7477, 0),# East Asian ideograph
- 0x6f5a53: (0xcfe4, 0),# Korean hangul
- 0x225235: (0x712f, 0),# East Asian ideograph
- 0x294f23: (0x9880, 0),# East Asian ideograph
- 0x4b546d: (0x82d3, 0),# East Asian ideograph (variant of 21546D which maps to 82D3)
- 0x6f4c7b: (0xb2fb, 0),# Korean hangul
- 0x6f4947: (0xacc4, 0),# Korean hangul
- 0x21336f: (0x5256, 0),# East Asian ideograph
- 0x225544: (0x7242, 0),# East Asian ideograph
- 0x6f5724: (0xc7a5, 0),# Korean hangul
- 0x274932: (0x6cfb, 0),# East Asian ideograph
- 0x4b682e: (0x4ec2, 0),# East Asian ideograph
- 0x274b69: (0x73ba, 0),# East Asian ideograph
- 0x6f5a54: (0xcfe8, 0),# Korean hangul
- 0x234837: (0x9441, 0),# East Asian ideograph
- 0x213d3f: (0x5f17, 0),# East Asian ideograph
- 0x4d2d75: (0x8872, 0),# East Asian ideograph (variant of 232D75 which maps to 8872)
- 0x213370: (0x525b, 0),# East Asian ideograph
- 0x69252c: (0x30ac, 0),# Katakana letter GA
- 0x225821: (0x734b, 0),# East Asian ideograph
- 0x282d77: (0x60ad, 0),# East Asian ideograph
- 0x215822: (0x896f, 0),# East Asian ideograph
- 0x6f5a55: (0xcff0, 0),# Korean hangul
- 0x6f557d: (0xc63b, 0),# Korean hangul
- 0x234c7b: (0x975f, 0),# East Asian ideograph
- 0x222d79: (0x6164, 0),# East Asian ideograph
- 0x4b5824: (0x897e, 0),# East Asian ideograph
- 0x696d7a: (0x9027, 0),# East Asian ideograph
- 0x6f4949: (0xacd7, 0),# Korean hangul
- 0x215825: (0x8981, 0),# East Asian ideograph
- 0x29425c: (0x94cc, 0),# East Asian ideograph
- 0x695c29: (0x6925, 0),# East Asian ideograph
- 0x4b5826: (0x8983, 0),# East Asian ideograph (variant of 215826 which maps to 8983)
- 0x232d7c: (0x8879, 0),# East Asian ideograph
- 0x295827: (0x9cb2, 0),# East Asian ideograph
- 0x6f5a56: (0xcff3, 0),# Korean hangul
- 0x295369: (0x9a7a, 0),# East Asian ideograph
- 0x215828: (0x898b, 0),# East Asian ideograph
- 0x225829: (0x736c, 0),# East Asian ideograph
- 0x6f494a: (0xace0, 0),# Korean hangul
- 0x21582a: (0x8993, 0),# East Asian ideograph
- 0x21582b: (0x8996, 0),# East Asian ideograph
- 0x2d5259: (0x98dc, 0),# East Asian ideograph
- 0x21582c: (0x89aa, 0),# East Asian ideograph
- 0x284f6b: (0x6f13, 0),# East Asian ideograph
- 0x29536a: (0x9a9d, 0),# East Asian ideograph
- 0x21582d: (0x89a6, 0),# East Asian ideograph
- 0x27582e: (0x89ca, 0),# East Asian ideograph
- 0x6f494b: (0xace1, 0),# Korean hangul
- 0x22582f: (0x736f, 0),# East Asian ideograph
- 0x275830: (0x89c9, 0),# East Asian ideograph
- 0x215831: (0x89bd, 0),# East Asian ideograph
- 0x6f5a58: (0xcffc, 0),# Korean hangul
- 0x275832: (0x89c2, 0),# East Asian ideograph
- 0x222871: (0x5ee8, 0),# East Asian ideograph
- 0x22443c: (0x6b43, 0),# East Asian ideograph
- 0x33483b: (0x6cdd, 0),# East Asian ideograph
- 0x2d5833: (0x752a, 0),# East Asian ideograph
- 0x276037: (0x9875, 0),# East Asian ideograph
- 0x6f494c: (0xace4, 0),# Korean hangul
- 0x215834: (0x89e3, 0),# East Asian ideograph
- 0x29425f: (0x94b6, 0),# East Asian ideograph
- 0x275835: (0x89de, 0),# East Asian ideograph
- 0x274933: (0x6e0e, 0),# East Asian ideograph
- 0x235b52: (0x9d6f, 0),# East Asian ideograph
- 0x215836: (0x89f8, 0),# East Asian ideograph
- 0x455837: (0x8ba0, 0),# East Asian ideograph
- 0x4c5f69: (0x75eb, 0),# East Asian ideograph
- 0x275838: (0x8ba1, 0),# East Asian ideograph
- 0x6f4b36: (0xb01c, 0),# Korean hangul
- 0x6f494d: (0xace7, 0),# Korean hangul
- 0x275839: (0x8ba2, 0),# East Asian ideograph
- 0x4d2962: (0x86c9, 0),# East Asian ideograph (variant of 232962 which maps to 86C9)
- 0x223331: (0x640c, 0),# East Asian ideograph
- 0x22583b: (0x7381, 0),# East Asian ideograph
- 0x6f5a5a: (0xd018, 0),# Korean hangul
- 0x2e7431: (0x7f48, 0),# East Asian ideograph
- 0x4b625c: (0x9eb8, 0),# East Asian ideograph (variant of 27625C which maps to 9EB8)
- 0x27583c: (0x8bb0, 0),# East Asian ideograph
- 0x22443e: (0x6b48, 0),# East Asian ideograph
- 0x23483d: (0x9467, 0),# East Asian ideograph
- 0x27583d: (0x8ba8, 0),# East Asian ideograph
- 0x273a63: (0x5b6a, 0),# East Asian ideograph
- 0x6f494e: (0xace8, 0),# Korean hangul
- 0x27583e: (0x8ba7, 0),# East Asian ideograph
- 0x294261: (0x94b2, 0),# East Asian ideograph
- 0x22583f: (0x7388, 0),# East Asian ideograph
- 0x2d525d: (0x6537, 0),# East Asian ideograph
- 0x224c3f: (0x6f26, 0),# East Asian ideograph
- 0x6f5a5b: (0xd02d, 0),# Korean hangul
- 0x215841: (0x8a16, 0),# East Asian ideograph
- 0x215842: (0x8a17, 0),# East Asian ideograph
- 0x6f494f: (0xacea, 0),# Korean hangul
- 0x275843: (0x8bad, 0),# East Asian ideograph
- 0x275844: (0x8bbf, 0),# East Asian ideograph
- 0x233732: (0x8d0c, 0),# East Asian ideograph
- 0x2d3045: (0x4e57, 0),# East Asian ideograph
- 0x275845: (0x8bc0, 0),# East Asian ideograph
- 0x6f5a5c: (0xd034, 0),# Korean hangul
- 0x225846: (0x7395, 0),# East Asian ideograph
- 0x294f2c: (0x988f, 0),# East Asian ideograph
- 0x215847: (0x8a25, 0),# East Asian ideograph
- 0x6f4950: (0xacec, 0),# Korean hangul
- 0x225848: (0x7397, 0),# East Asian ideograph
- 0x6f5739: (0xc810, 0),# Korean hangul
- 0x285c3a: (0x748e, 0),# East Asian ideograph
- 0x6f5054: (0xbb54, 0),# Korean hangul
- 0x215849: (0x8a2d, 0),# East Asian ideograph
- 0x21584a: (0x8a1b, 0),# East Asian ideograph
- 0x6f5a5d: (0xd035, 0),# Korean hangul
- 0x295370: (0x9a9c, 0),# East Asian ideograph
- 0x286d47: (0x7ba6, 0),# East Asian ideograph
- 0x21584b: (0x8a1f, 0),# East Asian ideograph
- 0x21584c: (0x8a3b, 0),# East Asian ideograph
- 0x2d4153: (0x64e3, 0),# East Asian ideograph
- 0x276038: (0x9876, 0),# East Asian ideograph
- 0x6f4951: (0xacef, 0),# Korean hangul
- 0x22584d: (0x7394, 0),# East Asian ideograph
- 0x294264: (0x94ba, 0),# East Asian ideograph
- 0x21584e: (0x8a55, 0),# East Asian ideograph
- 0x21584f: (0x8a5e, 0),# East Asian ideograph
- 0x4b576c: (0x523e, 0),# East Asian ideograph
- 0x27486d: (0x6da6, 0),# East Asian ideograph
- 0x275851: (0x8bc2, 0),# East Asian ideograph
- 0x6f4b37: (0xb01d, 0),# Korean hangul
- 0x6f4952: (0xacf0, 0),# Korean hangul
- 0x225852: (0x73a6, 0),# East Asian ideograph
- 0x227768: (0x80b8, 0),# East Asian ideograph
- 0x223336: (0x6415, 0),# East Asian ideograph
- 0x275854: (0x8bc8, 0),# East Asian ideograph
- 0x6f5a5f: (0xd050, 0),# Korean hangul
- 0x275855: (0x8bcb, 0),# East Asian ideograph
- 0x275856: (0x8bc9, 0),# East Asian ideograph
- 0x6f4a5d: (0xaec4, 0),# Korean hangul
- 0x215857: (0x8a3a, 0),# East Asian ideograph
- 0x21736a: (0x56d4, 0),# East Asian ideograph
- 0x33333c: (0x6c37, 0),# East Asian ideograph
- 0x215858: (0x8a6b, 0),# East Asian ideograph
- 0x4b4621: (0x6b53, 0),# East Asian ideograph
- 0x235859: (0x9c12, 0),# East Asian ideograph
- 0x6f5a60: (0xd06c, 0),# Korean hangul
- 0x27585a: (0x8be6, 0),# East Asian ideograph
- 0x27585b: (0x8bd5, 0),# East Asian ideograph
- 0x45456d: (0x6a10, 0),# East Asian ideograph
- 0x2d5f2c: (0x5826, 0),# East Asian ideograph
- 0x6f4954: (0xacf3, 0),# Korean hangul
- 0x21585c: (0x8a69, 0),# East Asian ideograph
- 0x6f4b4a: (0xb08f, 0),# Korean hangul
- 0x225551: (0x724f, 0),# East Asian ideograph
- 0x21585d: (0x8a70, 0),# East Asian ideograph
- 0x29443e: (0x94e4, 0),# East Asian ideograph
- 0x21585e: (0x8a63, 0),# East Asian ideograph
- 0x6f5a61: (0xd070, 0),# Korean hangul
- 0x21625f: (0x9ebb, 0),# East Asian ideograph
- 0x21585f: (0x8a7c, 0),# East Asian ideograph
- 0x215860: (0x8aa0, 0),# East Asian ideograph
- 0x6f4e30: (0xb5a0, 0),# Korean hangul
- 0x2d5f2d: (0x964f, 0),# East Asian ideograph
- 0x275861: (0x5938, 0),# East Asian ideograph
- 0x275840: (0x8baf, 0),# East Asian ideograph
- 0x6f5055: (0xbb58, 0),# Korean hangul
- 0x215862: (0x8a85, 0),# East Asian ideograph
- 0x6f5441: (0xc329, 0),# Korean hangul
- 0x225863: (0x73a0, 0),# East Asian ideograph
- 0x6f5a62: (0xd074, 0),# Korean hangul
- 0x695375: (0x56ce, 0),# East Asian ideograph
- 0x6f5864: (0xcb18, 0),# Korean hangul
- 0x215865: (0x8a62, 0),# East Asian ideograph
- 0x2d575b: (0x886e, 0),# East Asian ideograph
- 0x3f4956: (0x7832, 0),# East Asian ideograph
- 0x215866: (0x8a71, 0),# East Asian ideograph
- 0x285c40: (0x74d2, 0),# East Asian ideograph
- 0x215867: (0x8a6e, 0),# East Asian ideograph
- 0x295132: (0x997d, 0),# East Asian ideograph
- 0x233739: (0x8d11, 0),# East Asian ideograph
- 0x2d5265: (0x79d0, 0),# East Asian ideograph
- 0x225868: (0x73cf, 0),# East Asian ideograph
- 0x6f5a63: (0xd07c, 0),# Korean hangul
- 0x282d5e: (0x607a, 0),# East Asian ideograph
- 0x6f4a25: (0xadd0, 0),# Korean hangul
- 0x275869: (0x8bf4, 0),# East Asian ideograph
- 0x21586a: (0x8aa6, 0),# East Asian ideograph
- 0x6f4b38: (0xb028, 0),# Korean hangul
- 0x21586b: (0x8aa1, 0),# East Asian ideograph
- 0x215155: (0x7dd6, 0),# East Asian ideograph
- 0x22333b: (0x6422, 0),# East Asian ideograph
- 0x27586d: (0x5fd7, 0),# East Asian ideograph
- 0x22456f: (0x6be7, 0),# East Asian ideograph
- 0x23586e: (0x9c23, 0),# East Asian ideograph
- 0x27586f: (0x8bec, 0),# East Asian ideograph
- 0x6f4f36: (0xb839, 0),# Korean hangul
- 0x6f4a5e: (0xaecc, 0),# Korean hangul
- 0x215870: (0x8a8d, 0),# East Asian ideograph
- 0x21736f: (0x56e1, 0),# East Asian ideograph
- 0x294469: (0x94fc, 0),# East Asian ideograph
- 0x215871: (0x8aa4, 0),# East Asian ideograph (variant of 4B5871 which maps to 8AA4)
- 0x23373b: (0x8d12, 0),# East Asian ideograph
- 0x2d5267: (0x79cf, 0),# East Asian ideograph
- 0x215872: (0x8aa8, 0),# East Asian ideograph
- 0x2d4e24: (0x6998, 0),# East Asian ideograph
- 0x215873: (0x8aa5, 0),# East Asian ideograph
- 0x217e60: (0x5bc9, 0),# East Asian ideograph
- 0x215874: (0x8a98, 0),# East Asian ideograph
- 0x4b5d65: (0x8217, 0),# East Asian ideograph
- 0x6f4959: (0xacfd, 0),# Korean hangul
- 0x215875: (0x8a91, 0),# East Asian ideograph
- 0x6f5130: (0xbc9a, 0),# Korean hangul
- 0x275876: (0x8c0a, 0),# East Asian ideograph
- 0x6f4b68: (0xb0c9, 0),# Korean hangul
- 0x215877: (0x8ac4, 0),# East Asian ideograph
- 0x6f5a66: (0xd0a4, 0),# Korean hangul
- 0x4b5176: (0x7e04, 0),# East Asian ideograph
- 0x295379: (0x9a96, 0),# East Asian ideograph
- 0x235878: (0x9c21, 0),# East Asian ideograph
- 0x234323: (0x924e, 0),# East Asian ideograph
- 0x275879: (0x8c08, 0),# East Asian ideograph
- 0x6f495a: (0xad00, 0),# Korean hangul
- 0x27587a: (0x8bf7, 0),# East Asian ideograph
- 0x224325: (0x6acc, 0),# East Asian ideograph
- 0x21587b: (0x8af8, 0),# East Asian ideograph
- 0x23373d: (0x8d14, 0),# East Asian ideograph
- 0x21587c: (0x8ab2, 0),# East Asian ideograph
- 0x234327: (0x9256, 0),# East Asian ideograph
- 0x29537a: (0x9aa2, 0),# East Asian ideograph
- 0x21587d: (0x8abf, 0),# East Asian ideograph
- 0x224328: (0x6ad1, 0),# East Asian ideograph
- 0x21587e: (0x8ac9, 0),# East Asian ideograph
- 0x4c7d4d: (0x8343, 0),# East Asian ideograph
- 0x2d4329: (0x668e, 0),# East Asian ideograph
- 0x6f495b: (0xad04, 0),# Korean hangul
- 0x6f5d31: (0xd611, 0),# Korean hangul
- 0x6f4f7c: (0xb9f9, 0),# Korean hangul
- 0x23432b: (0x925a, 0),# East Asian ideograph
- 0x2d3051: (0x5f0d, 0),# East Asian ideograph
- 0x6f5a68: (0xd0a8, 0),# Korean hangul
- 0x216266: (0x9ed1, 0),# East Asian ideograph
- 0x27432d: (0x65f6, 0),# East Asian ideograph
- 0x4b5736: (0x877f, 0),# East Asian ideograph
- 0x23432e: (0x9241, 0),# East Asian ideograph
- 0x6f495c: (0xad0c, 0),# Korean hangul
- 0x285252: (0x709c, 0),# East Asian ideograph
- 0x275847: (0x8bb7, 0),# East Asian ideograph
- 0x23432f: (0x9283, 0),# East Asian ideograph
- 0x335958: (0x8c4a, 0),# East Asian ideograph
- 0x394330: (0x6644, 0),# East Asian ideograph
- 0x2d526b: (0x7085, 0),# East Asian ideograph
- 0x234331: (0x92a5, 0),# East Asian ideograph
- 0x213065: (0x4eca, 0),# East Asian ideograph
- 0x6f5626: (0xc653, 0),# Korean hangul
- 0x274332: (0x663c, 0),# East Asian ideograph
- 0x234333: (0x9282, 0),# East Asian ideograph
- 0x2d5f35: (0x78d2, 0),# East Asian ideograph
- 0x6f495d: (0xad0d, 0),# Korean hangul
- 0x275848: (0x8bb8, 0),# East Asian ideograph
- 0x224334: (0x6acd, 0),# East Asian ideograph
- 0x234335: (0x92a8, 0),# East Asian ideograph
- 0x2d3053: (0x4e3c, 0),# East Asian ideograph
- 0x29572b: (0x9c90, 0),# East Asian ideograph
- 0x6f5a6a: (0xd0b4, 0),# Korean hangul
- 0x224337: (0x6aec, 0),# East Asian ideograph
- 0x215e25: (0x938a, 0),# East Asian ideograph
- 0x234338: (0x92a4, 0),# East Asian ideograph
- 0x6f495e: (0xad0f, 0),# Korean hangul
- 0x224339: (0x6af3, 0),# East Asian ideograph
- 0x22433a: (0x6ae7, 0),# East Asian ideograph
- 0x2d433b: (0x6662, 0),# East Asian ideograph
- 0x226225: (0x7735, 0),# East Asian ideograph
- 0x4b5b29: (0x8e8d, 0),# East Asian ideograph
- 0x6f495f: (0xad11, 0),# Korean hangul
- 0x23433e: (0x9276, 0),# East Asian ideograph
- 0x227775: (0x6711, 0),# East Asian ideograph
- 0x22433f: (0x6aeb, 0),# East Asian ideograph
- 0x224340: (0x6aea, 0),# East Asian ideograph
- 0x21626a: (0x9ede, 0),# East Asian ideograph
- 0x274341: (0x6655, 0),# East Asian ideograph
- 0x6f5428: (0xc2eb, 0),# Korean hangul
- 0x234342: (0x9288, 0),# East Asian ideograph
- 0x27603b: (0x987a, 0),# East Asian ideograph
- 0x6f4960: (0xad18, 0),# Korean hangul
- 0x274343: (0x7545, 0),# East Asian ideograph
- 0x6f4f7d: (0xb9fa, 0),# Korean hangul
- 0x223344: (0x6430, 0),# East Asian ideograph
- 0x224344: (0x6af1, 0),# East Asian ideograph
- 0x4c6c46: (0x7b9f, 0),# East Asian ideograph
- 0x234345: (0x928e, 0),# East Asian ideograph
- 0x6f562a: (0xc65d, 0),# Korean hangul
- 0x234346: (0x92a0, 0),# East Asian ideograph
- 0x4b7954: (0x5968, 0),# East Asian ideograph
- 0x6f4b3a: (0xb045, 0),# Korean hangul
- 0x234347: (0x9277, 0),# East Asian ideograph
- 0x6f4961: (0xad19, 0),# Korean hangul
- 0x274348: (0x6653, 0),# East Asian ideograph
- 0x274349: (0x5386, 0),# East Asian ideograph (duplicate simplified)
- 0x6f564f: (0xc6e9, 0),# Korean hangul
- 0x224571: (0x6be8, 0),# East Asian ideograph
- 0x213066: (0x4ec1, 0),# East Asian ideograph
- 0x6f562b: (0xc660, 0),# Korean hangul
- 0x27434b: (0x66a7, 0),# East Asian ideograph
- 0x6f4962: (0xad1c, 0),# Korean hangul
- 0x27434d: (0x65f7, 0),# East Asian ideograph
- 0x6f4a60: (0xaecf, 0),# Korean hangul
- 0x4b335b: (0x522b, 0),# East Asian ideograph
- 0x23595e: (0x9c6e, 0),# East Asian ideograph
- 0x22434e: (0x6afd, 0),# East Asian ideograph
- 0x2d3058: (0x4e9c, 0),# East Asian ideograph
- 0x29434f: (0x94de, 0),# East Asian ideograph
- 0x6f562c: (0xc671, 0),# Korean hangul
- 0x224350: (0x6afa, 0),# East Asian ideograph
- 0x347d24: (0x83c7, 0),# East Asian ideograph
- 0x6f552e: (0xc561, 0),# Korean hangul
- 0x2d5f3b: (0x96a0, 0),# East Asian ideograph
- 0x6f4963: (0xad20, 0),# Korean hangul
- 0x6f5132: (0xbca1, 0),# Korean hangul
- 0x224352: (0x6b01, 0),# East Asian ideograph
- 0x4b4a74: (0x731c, 0),# East Asian ideograph (variant of 214A74 which maps to 731C)
- 0x234354: (0x927e, 0),# East Asian ideograph
- 0x6f5c47: (0xd45c, 0),# Korean hangul
- 0x274355: (0x4e66, 0),# East Asian ideograph
- 0x6f4964: (0xad28, 0),# Korean hangul
- 0x334357: (0x6702, 0),# East Asian ideograph
- 0x223348: (0x6435, 0),# East Asian ideograph
- 0x224358: (0x6b03, 0),# East Asian ideograph
- 0x224359: (0x6af8, 0),# East Asian ideograph
- 0x21605f: (0x98b6, 0),# East Asian ideograph
- 0x4d6047: (0x816d, 0),# East Asian ideograph
- 0x27435a: (0x4f1a, 0),# East Asian ideograph
- 0x23435b: (0x9291, 0),# East Asian ideograph
- 0x27603c: (0x987b, 0),# East Asian ideograph
- 0x6f4965: (0xad29, 0),# Korean hangul
- 0x6f4f7e: (0xba00, 0),# Korean hangul
- 0x23435d: (0x929b, 0),# East Asian ideograph
- 0x233748: (0x8d6c, 0),# East Asian ideograph
- 0x2d305b: (0x4ebe, 0),# East Asian ideograph
- 0x217573: (0x57d5, 0),# East Asian ideograph
- 0x6f562f: (0xc67c, 0),# Korean hangul
- 0x284b43: (0x8365, 0),# East Asian ideograph
- 0x22435f: (0x6b0d, 0),# East Asian ideograph
- 0x6f4b3b: (0xb048, 0),# Korean hangul
- 0x224360: (0x6b09, 0),# East Asian ideograph
- 0x355053: (0x98c8, 0),# East Asian ideograph
- 0x6f4966: (0xad2d, 0),# Korean hangul
- 0x224361: (0x6b0e, 0),# East Asian ideograph
- 0x225563: (0x726e, 0),# East Asian ideograph
- 0x234362: (0x927f, 0),# East Asian ideograph
- 0x69243c: (0x305c, 0),# Hiragana letter ZE
- 0x6f5630: (0xc680, 0),# Korean hangul
- 0x234364: (0x92a3, 0),# East Asian ideograph
- 0x6f4967: (0xad34, 0),# Korean hangul
- 0x275852: (0x8bcf, 0),# East Asian ideograph
- 0x214366: (0x6727, 0),# East Asian ideograph
- 0x224367: (0x6b11, 0),# East Asian ideograph
- 0x2d4e33: (0x78aa, 0),# East Asian ideograph
- 0x3f5631: (0x517f, 0),# East Asian ideograph
- 0x334369: (0x5932, 0),# East Asian ideograph
- 0x234857: (0x9464, 0),# East Asian ideograph
- 0x21436a: (0x672b, 0),# East Asian ideograph
- 0x293032: (0x7962, 0),# East Asian ideograph
- 0x6f4968: (0xad38, 0),# Korean hangul
- 0x275853: (0x8bc5, 0),# East Asian ideograph
- 0x33496a: (0x934a, 0),# East Asian ideograph
- 0x22436d: (0x6b19, 0),# East Asian ideograph
- 0x4b5179: (0x7f0b, 0),# East Asian ideograph
- 0x6f5632: (0xc68b, 0),# Korean hangul
- 0x23436f: (0x92d0, 0),# East Asian ideograph
- 0x6f4969: (0xad3c, 0),# Korean hangul
- 0x2d4370: (0x6736, 0),# East Asian ideograph
- 0x215167: (0x7e46, 0),# East Asian ideograph
- 0x234371: (0x92f1, 0),# East Asian ideograph
- 0x234372: (0x92df, 0),# East Asian ideograph
- 0x275528: (0x835a, 0),# East Asian ideograph
- 0x215e31: (0x93c8, 0),# East Asian ideograph
- 0x6f496a: (0xad44, 0),# Korean hangul
- 0x214375: (0x6750, 0),# East Asian ideograph
- 0x215168: (0x7e37, 0),# East Asian ideograph
- 0x6f572b: (0xc7bc, 0),# Korean hangul
- 0x234376: (0x92b6, 0),# East Asian ideograph
- 0x4b4638: (0x6bb1, 0),# East Asian ideograph
- 0x234377: (0x92c0, 0),# East Asian ideograph
- 0x6f5634: (0xc694, 0),# Korean hangul
- 0x6f4d2b: (0xb35b, 0),# Korean hangul
- 0x6f4b3c: (0xb04a, 0),# Korean hangul
- 0x234379: (0x92be, 0),# East Asian ideograph
- 0x6f572e: (0xc7c0, 0),# Korean hangul
- 0x2d3061: (0x4eb0, 0),# East Asian ideograph
- 0x6f5a78: (0xd0d4, 0),# Korean hangul
- 0x6f5635: (0xc695, 0),# Korean hangul
- 0x29437d: (0x94d8, 0),# East Asian ideograph
- 0x696e28: (0x9056, 0),# East Asian ideograph
- 0x4b5746: (0x8853, 0),# East Asian ideograph
- 0x23437e: (0x92d5, 0),# East Asian ideograph
- 0x2d3d2b: (0x5ebf, 0),# East Asian ideograph
- 0x6f4a62: (0xaed1, 0),# Korean hangul
- 0x276e2a: (0x53a3, 0),# East Asian ideograph
- 0x2d527b: (0x8074, 0),# East Asian ideograph
- 0x6f5a79: (0xd0d5, 0),# Korean hangul
- 0x282d74: (0x6004, 0),# East Asian ideograph
- 0x6f5636: (0xc698, 0),# Korean hangul
- 0x23485c: (0x9465, 0),# East Asian ideograph
- 0x226233: (0x774a, 0),# East Asian ideograph
- 0x6f496d: (0xad50, 0),# Korean hangul
- 0x6f5c49: (0xd478, 0),# Korean hangul
- 0x6f5840: (0xc9ed, 0),# Korean hangul
- 0x222e2f: (0x615c, 0),# East Asian ideograph
- 0x223351: (0x640a, 0),# East Asian ideograph
- 0x21317d: (0x501a, 0),# East Asian ideograph
- 0x6f5a7a: (0xd0dc, 0),# Korean hangul
- 0x2e7451: (0x7f58, 0),# East Asian ideograph
- 0x6f5637: (0xc6a5, 0),# Korean hangul
- 0x215e35: (0x93dd, 0),# East Asian ideograph
- 0x23485d: (0x9455, 0),# East Asian ideograph
- 0x6f4e31: (0xb5a1, 0),# Korean hangul
- 0x225e2c: (0x7590, 0),# East Asian ideograph
- 0x2d3d2d: (0x5396, 0),# East Asian ideograph
- 0x275859: (0x8be5, 0),# East Asian ideograph
- 0x21516c: (0x7e2b, 0),# East Asian ideograph
- 0x225128: (0x70e0, 0),# East Asian ideograph
- 0x6f5a7b: (0xd0dd, 0),# Korean hangul
- 0x275529: (0x830e, 0),# East Asian ideograph
- 0x22445f: (0x6b6e, 0),# East Asian ideograph
- 0x33485e: (0x67d2, 0),# East Asian ideograph
- 0x226235: (0x7743, 0),# East Asian ideograph
- 0x2d4171: (0x62ca, 0),# East Asian ideograph
- 0x6f496f: (0xad6d, 0),# Korean hangul
- 0x6f572c: (0xc7bd, 0),# Korean hangul
- 0x27493a: (0x6fd1, 0),# East Asian ideograph
- 0x23596b: (0x9c7a, 0),# East Asian ideograph
- 0x235b59: (0x9da9, 0),# East Asian ideograph
- 0x27474e: (0x6cfe, 0),# East Asian ideograph
- 0x6f5639: (0xc6a9, 0),# Korean hangul
- 0x215e37: (0x93d8, 0),# East Asian ideograph
- 0x222e3d: (0x61a2, 0),# East Asian ideograph
- 0x2d3d2f: (0x539b, 0),# East Asian ideograph
- 0x6f5946: (0xccd0, 0),# Korean hangul
- 0x345d6b: (0x756d, 0),# East Asian ideograph
- 0x285d6b: (0x7572, 0),# East Asian ideograph
- 0x222e40: (0x61a8, 0),# East Asian ideograph
- 0x6f563a: (0xc6b0, 0),# Korean hangul
- 0x4b3c2b: (0x67c3, 0),# East Asian ideograph (Version J extension)
- 0x6f4f37: (0xb840, 0),# Korean hangul
- 0x6f4971: (0xad73, 0),# Korean hangul
- 0x6f4a63: (0xaed8, 0),# Korean hangul
- 0x22512b: (0x70d4, 0),# East Asian ideograph
- 0x23552a: (0x9aef, 0),# East Asian ideograph
- 0x6f5a7e: (0xd0ec, 0),# Korean hangul
- 0x222e45: (0x6196, 0),# East Asian ideograph
- 0x6f563b: (0xc6b1, 0),# Korean hangul
- 0x6f4972: (0xad74, 0),# Korean hangul
- 0x6f563c: (0xc6b4, 0),# Korean hangul
- 0x234862: (0x946a, 0),# East Asian ideograph
- 0x282e4c: (0x6126, 0),# East Asian ideograph
- 0x2d5f4b: (0x96d1, 0),# East Asian ideograph
- 0x6f4973: (0xad75, 0),# Korean hangul
- 0x215171: (0x7e54, 0),# East Asian ideograph
- 0x6f563d: (0xc6b7, 0),# Korean hangul
- 0x215e3b: (0x93fd, 0),# East Asian ideograph
- 0x2d4176: (0x6483, 0),# East Asian ideograph
- 0x2d5f4c: (0x9dc4, 0),# East Asian ideograph
- 0x6f4974: (0xad76, 0),# Korean hangul
- 0x6f5d32: (0xd613, 0),# Korean hangul
- 0x282e52: (0x6003, 0),# East Asian ideograph
- 0x27493b: (0x6ca5, 0),# East Asian ideograph
- 0x233941: (0x8dfd, 0),# East Asian ideograph
- 0x6f563e: (0xc6b8, 0),# Korean hangul
- 0x4b5773: (0x7ed4, 0),# East Asian ideograph
- 0x213c23: (0x5d22, 0),# East Asian ideograph
- 0x286e56: (0x7ba8, 0),# East Asian ideograph
- 0x2d3d34: (0x5efe, 0),# East Asian ideograph
- 0x215173: (0x7e5e, 0),# East Asian ideograph
- 0x223359: (0x6407, 0),# East Asian ideograph
- 0x216e58: (0x535f, 0),# East Asian ideograph
- 0x707523: (0x9170, 0),# East Asian ideograph
- 0x6f4b77: (0xb119, 0),# Korean hangul
- 0x222e5a: (0x61cb, 0),# East Asian ideograph
- 0x213c24: (0x5d29, 0),# East Asian ideograph
- 0x33355c: (0x5449, 0),# East Asian ideograph
- 0x273648: (0x95ee, 0),# East Asian ideograph
- 0x282e5c: (0x603f, 0),# East Asian ideograph
- 0x27514c: (0x7eff, 0),# East Asian ideograph
- 0x6f5579: (0xc635, 0),# Korean hangul
- 0x6f5640: (0xc6ba, 0),# Korean hangul
- 0x6f5951: (0xcd5c, 0),# Korean hangul
- 0x4b397b: (0x5a2f, 0),# East Asian ideograph
- 0x29402c: (0x90d0, 0),# East Asian ideograph
- 0x22623d: (0x7760, 0),# East Asian ideograph
- 0x6f4977: (0xad7f, 0),# Korean hangul
- 0x6f5136: (0xbcb0, 0),# Korean hangul
- 0x216e61: (0x5414, 0),# East Asian ideograph
- 0x22335b: (0x643b, 0),# East Asian ideograph
- 0x6f4a2e: (0xadfc, 0),# Korean hangul
- 0x6f5641: (0xc6c0, 0),# Korean hangul
- 0x213c26: (0x5d19, 0),# East Asian ideograph
- 0x275863: (0x8be1, 0),# East Asian ideograph
- 0x695a7e: (0x66bc, 0),# East Asian ideograph
- 0x287e61: (0x82cc, 0),# East Asian ideograph
- 0x232e68: (0x88d2, 0),# East Asian ideograph
- 0x6f5642: (0xc6c1, 0),# Korean hangul
- 0x234868: (0x946b, 0),# East Asian ideograph
- 0x6f5429: (0xc2ec, 0),# Korean hangul
- 0x334425: (0x76c3, 0),# East Asian ideograph
- 0x6f4979: (0xad82, 0),# Korean hangul
- 0x296062: (0x9f9b, 0),# East Asian ideograph
- 0x22335d: (0x643f, 0),# East Asian ideograph
- 0x23375c: (0x8d7a, 0),# East Asian ideograph
- 0x6f5643: (0xc6c3, 0),# Korean hangul
- 0x213c28: (0x5d50, 0),# East Asian ideograph
- 0x216e6f: (0x541a, 0),# East Asian ideograph
- 0x6f497a: (0xad88, 0),# Korean hangul
- 0x275422: (0x810f, 0),# East Asian ideograph (duplicate simplified)
- 0x222e71: (0x61e0, 0),# East Asian ideograph
- 0x2d3e40: (0x6052, 0),# East Asian ideograph
- 0x6f5644: (0xc6c5, 0),# Korean hangul
- 0x28702e: (0x56e2, 0),# East Asian ideograph (duplicate simplified)
- 0x6f497b: (0xad8c, 0),# Korean hangul
- 0x6f4a65: (0xaef4, 0),# Korean hangul
- 0x455164: (0x53bf, 0),# East Asian ideograph
- 0x215921: (0x8ac2, 0),# East Asian ideograph
- 0x222e77: (0x61e5, 0),# East Asian ideograph
- 0x275922: (0x8c01, 0),# East Asian ideograph
- 0x275923: (0x8bde, 0),# East Asian ideograph
- 0x232652: (0x85a2, 0),# East Asian ideograph
- 0x6f552f: (0xc564, 0),# Korean hangul
- 0x216e79: (0x5454, 0),# East Asian ideograph
- 0x275924: (0x8bba, 0),# East Asian ideograph
- 0x235925: (0x9c32, 0),# East Asian ideograph
- 0x215926: (0x8afa, 0),# East Asian ideograph
- 0x275927: (0x8c0f, 0),# East Asian ideograph
- 0x216e7d: (0x543d, 0),# East Asian ideograph
- 0x235928: (0x9c48, 0),# East Asian ideograph
- 0x282e7e: (0x603c, 0),# East Asian ideograph
- 0x215929: (0x8ae7, 0),# East Asian ideograph
- 0x275868: (0x8bdf, 0),# East Asian ideograph
- 0x6f505d: (0xbbc8, 0),# Korean hangul
- 0x23592a: (0x9c33, 0),# East Asian ideograph
- 0x21592b: (0x8b00, 0),# East Asian ideograph
- 0x6f5b72: (0xd31d, 0),# Korean hangul
- 0x27592c: (0x8c12, 0),# East Asian ideograph
- 0x6f5647: (0xc6d0, 0),# Korean hangul
- 0x29416b: (0x948e, 0),# East Asian ideograph
- 0x213e47: (0x6064, 0),# East Asian ideograph
- 0x23486d: (0x9471, 0),# East Asian ideograph
- 0x27592e: (0x8bfa, 0),# East Asian ideograph
- 0x22592f: (0x73d4, 0),# East Asian ideograph
- 0x27493d: (0x6f47, 0),# East Asian ideograph
- 0x233761: (0x8d84, 0),# East Asian ideograph
- 0x215930: (0x8aed, 0),# East Asian ideograph
- 0x335a7b: (0x8e28, 0),# East Asian ideograph
- 0x275931: (0x8c24, 0),# East Asian ideograph
- 0x6f5648: (0xc6d4, 0),# Korean hangul
- 0x697174: (0x9ade, 0),# East Asian ideograph
- 0x235932: (0x9c35, 0),# East Asian ideograph
- 0x275933: (0x8c1c, 0),# East Asian ideograph
- 0x27586a: (0x8bf5, 0),# East Asian ideograph
- 0x215934: (0x8b1b, 0),# East Asian ideograph
- 0x215935: (0x8b0a, 0),# East Asian ideograph
- 0x225936: (0x73e7, 0),# East Asian ideograph
- 0x6f5649: (0xc6dc, 0),# Korean hangul
- 0x275937: (0x8a8a, 0),# East Asian ideograph
- 0x215938: (0x8b1d, 0),# East Asian ideograph
- 0x27586b: (0x8beb, 0),# East Asian ideograph
- 0x6f4a66: (0xaf0d, 0),# Korean hangul
- 0x282f66: (0x6217, 0),# East Asian ideograph
- 0x215939: (0x8b39, 0),# East Asian ideograph
- 0x22513a: (0x70d0, 0),# East Asian ideograph
- 0x21593a: (0x8b2c, 0),# East Asian ideograph
- 0x21593b: (0x8b28, 0),# East Asian ideograph
- 0x6f564a: (0xc6dd, 0),# Korean hangul
- 0x224471: (0x6b82, 0),# East Asian ideograph
- 0x21593c: (0x8b58, 0),# East Asian ideograph
- 0x27593d: (0x8c31, 0),# East Asian ideograph
- 0x6f5138: (0xbcb3, 0),# Korean hangul
- 0x27586c: (0x8bed, 0),# East Asian ideograph
- 0x27593e: (0x8c32, 0),# East Asian ideograph
- 0x22513b: (0x70c7, 0),# East Asian ideograph
- 0x22593f: (0x73e9, 0),# East Asian ideograph
- 0x274e5b: (0x77ff, 0),# East Asian ideograph
- 0x215940: (0x8b5a, 0),# East Asian ideograph
- 0x6f564b: (0xc6df, 0),# Korean hangul
- 0x395577: (0x854b, 0),# East Asian ideograph
- 0x213c30: (0x5dcd, 0),# East Asian ideograph
- 0x215942: (0x8b4f, 0),# East Asian ideograph
- 0x6f573b: (0xc813, 0),# Korean hangul
- 0x275943: (0x8bae, 0),# East Asian ideograph
- 0x22472c: (0x6c54, 0),# East Asian ideograph
- 0x22513c: (0x70da, 0),# East Asian ideograph
- 0x6f5944: (0xccbc, 0),# Korean hangul
- 0x2d435f: (0x6716, 0),# East Asian ideograph
- 0x6f5b73: (0xd31f, 0),# Korean hangul
- 0x235945: (0x9c51, 0),# East Asian ideograph
- 0x6f564c: (0xc6e0, 0),# Korean hangul
- 0x69255a: (0x30da, 0),# Katakana letter PE
- 0x213c31: (0x5dd2, 0),# East Asian ideograph
- 0x215947: (0x8b74, 0),# East Asian ideograph
- 0x215948: (0x8b77, 0),# East Asian ideograph
- 0x2e4c7b: (0x6e86, 0),# East Asian ideograph
- 0x22513d: (0x70c6, 0),# East Asian ideograph
- 0x235949: (0x9c63, 0),# East Asian ideograph
- 0x333323: (0x4e21, 0),# East Asian ideograph
- 0x22594a: (0x73f8, 0),# East Asian ideograph
- 0x6f564d: (0xc6e1, 0),# Korean hangul
- 0x6f5275: (0xc0db, 0),# Korean hangul
- 0x27594b: (0x53d8, 0),# East Asian ideograph
- 0x21594c: (0x8b93, 0),# East Asian ideograph
- 0x27594d: (0x8c36, 0),# East Asian ideograph
- 0x28582b: (0x7303, 0),# East Asian ideograph
- 0x27594e: (0x8c17, 0),# East Asian ideograph
- 0x21594f: (0x8b9a, 0),# East Asian ideograph
- 0x6f564e: (0xc6e8, 0),# Korean hangul
- 0x4b3c2f: (0x5dba, 0),# East Asian ideograph
- 0x287030: (0x7c9d, 0),# East Asian ideograph
- 0x213c33: (0x5dd6, 0),# East Asian ideograph
- 0x6f5a2c: (0xcef9, 0),# Korean hangul
- 0x2d3253: (0x50e3, 0),# East Asian ideograph
- 0x6f4a67: (0xaf2c, 0),# Korean hangul
- 0x6f5952: (0xcd78, 0),# Korean hangul
- 0x333768: (0x8ff4, 0),# East Asian ideograph
- 0x21373f: (0x5659, 0),# East Asian ideograph
- 0x393439: (0x61c3, 0),# East Asian ideograph
- 0x215954: (0x8c48, 0),# East Asian ideograph
- 0x395652: (0x87a1, 0),# East Asian ideograph
- 0x215955: (0x8c49, 0),# East Asian ideograph
- 0x215956: (0x8c4c, 0),# East Asian ideograph
- 0x396167: (0x9b2a, 0),# East Asian ideograph
- 0x706b5b: (0x810e, 0),# East Asian ideograph
- 0x275957: (0x7ad6, 0),# East Asian ideograph
- 0x215958: (0x8c50, 0),# East Asian ideograph
- 0x474931: (0x95f6, 0),# East Asian ideograph
- 0x2d5959: (0x8277, 0),# East Asian ideograph
- 0x213665: (0x558a, 0),# East Asian ideograph
- 0x22595a: (0x73fd, 0),# East Asian ideograph
- 0x6f4e32: (0xb5a4, 0),# Korean hangul
- 0x217c24: (0x5aa7, 0),# East Asian ideograph
- 0x2f5973: (0x9cec, 0),# East Asian ideograph
- 0x6f595b: (0xcdb0, 0),# Korean hangul
- 0x21595c: (0x8c62, 0),# East Asian ideograph
- 0x4b4655: (0x6c17, 0),# East Asian ideograph
- 0x6f595d: (0xcdcc, 0),# Korean hangul
- 0x455d3e: (0x9485, 0),# East Asian ideograph
- 0x6f5b74: (0xd320, 0),# Korean hangul
- 0x21595e: (0x8c6b, 0),# East Asian ideograph
- 0x6f5651: (0xc6f0, 0),# Korean hangul
- 0x69717d: (0x9af1, 0),# East Asian ideograph
- 0x2d595f: (0x732a, 0),# East Asian ideograph
- 0x2d5960: (0x72b2, 0),# East Asian ideograph
- 0x6f5731: (0xc7c9, 0),# Korean hangul
- 0x513a47: (0x8885, 0),# East Asian ideograph
- 0x6f5962: (0xce30, 0),# Korean hangul
- 0x39505b: (0x9b3b, 0),# East Asian ideograph
- 0x215963: (0x8c8a, 0),# East Asian ideograph
- 0x6f5652: (0xc6f8, 0),# Korean hangul
- 0x224479: (0x6b8d, 0),# East Asian ideograph
- 0x4b5964: (0x72e2, 0),# East Asian ideograph
- 0x234435: (0x92dd, 0),# East Asian ideograph
- 0x2d5965: (0x72f8, 0),# East Asian ideograph
- 0x2d3d48: (0x5f4a, 0),# East Asian ideograph
- 0x275966: (0x7683, 0),# East Asian ideograph
- 0x213f79: (0x6249, 0),# East Asian ideograph
- 0x215967: (0x8c93, 0),# East Asian ideograph
- 0x215968: (0x8c9d, 0),# East Asian ideograph
- 0x295b77: (0x9e63, 0),# East Asian ideograph
- 0x215969: (0x8c9e, 0),# East Asian ideograph
- 0x217c27: (0x5a9c, 0),# East Asian ideograph
- 0x6f5c2d: (0xd399, 0),# Korean hangul
- 0x27596a: (0x8d1f, 0),# East Asian ideograph
- 0x6f4a68: (0xaf2d, 0),# Korean hangul
- 0x706b5f: (0x8112, 0),# East Asian ideograph
- 0x21596b: (0x8ca2, 0),# East Asian ideograph
- 0x52735d: (0x7e8a, 0),# East Asian ideograph (variant of 22735D which maps to 7E8A)
- 0x695c30: (0x6923, 0),# East Asian ideograph
- 0x225144: (0x7104, 0),# East Asian ideograph
- 0x22596c: (0x7430, 0),# East Asian ideograph
- 0x33332a: (0x4e93, 0),# East Asian ideograph
- 0x21596d: (0x8cac, 0),# East Asian ideograph
- 0x21596e: (0x8cab, 0),# East Asian ideograph
- 0x217c28: (0x5a7c, 0),# East Asian ideograph
- 0x697260: (0x9c30, 0),# East Asian ideograph
- 0x27596f: (0x8d27, 0),# East Asian ideograph
- 0x215970: (0x8caa, 0),# East Asian ideograph
- 0x695c31: (0x6921, 0),# East Asian ideograph
- 0x215971: (0x8ca7, 0),# East Asian ideograph
- 0x6f5c4f: (0xd48b, 0),# Korean hangul
- 0x275e46: (0x9576, 0),# East Asian ideograph
- 0x215972: (0x8ca9, 0),# East Asian ideograph
- 0x215973: (0x8caf, 0),# East Asian ideograph
- 0x217c29: (0x5a96, 0),# East Asian ideograph
- 0x6f5974: (0xce85, 0),# Korean hangul
- 0x275975: (0x8d39, 0),# East Asian ideograph
- 0x6f5060: (0xbbf9, 0),# Korean hangul
- 0x4b465a: (0x6c32, 0),# East Asian ideograph
- 0x275976: (0x8d32, 0),# East Asian ideograph
- 0x234421: (0x92c6, 0),# East Asian ideograph
- 0x215977: (0x8cc0, 0),# East Asian ideograph
- 0x6f5656: (0xc70c, 0),# Korean hangul
- 0x277748: (0x57b2, 0),# East Asian ideograph
- 0x215978: (0x8cb4, 0),# East Asian ideograph
- 0x275979: (0x8d34, 0),# East Asian ideograph
- 0x295b2a: (0x9e46, 0),# East Asian ideograph
- 0x6f503e: (0xbab0, 0),# Korean hangul
- 0x21597a: (0x8cb7, 0),# East Asian ideograph
- 0x234425: (0x92f4, 0),# East Asian ideograph
- 0x27597b: (0x8d2c, 0),# East Asian ideograph
- 0x274426: (0x4e1c, 0),# East Asian ideograph
- 0x27597c: (0x8d3b, 0),# East Asian ideograph
- 0x6f5657: (0xc714, 0),# Korean hangul
- 0x234427: (0x92cf, 0),# East Asian ideograph
- 0x292d51: (0x8511, 0),# East Asian ideograph
- 0x21597d: (0x8cb8, 0),# East Asian ideograph
- 0x23443a: (0x92ca, 0),# East Asian ideograph
- 0x27597e: (0x8d38, 0),# East Asian ideograph
- 0x23442a: (0x92b2, 0),# East Asian ideograph
- 0x225148: (0x70f3, 0),# East Asian ideograph
- 0x29442b: (0x9503, 0),# East Asian ideograph
- 0x6f5324: (0xc120, 0),# Korean hangul
- 0x6f5658: (0xc717, 0),# Korean hangul
- 0x23442c: (0x92e7, 0),# East Asian ideograph
- 0x294f6b: (0x98a1, 0),# East Asian ideograph
- 0x696d41: (0x8ec8, 0),# East Asian ideograph
- 0x23442d: (0x92c7, 0),# East Asian ideograph
- 0x277d40: (0x5af1, 0),# East Asian ideograph
- 0x2d3d4e: (0x7bf2, 0),# East Asian ideograph
- 0x23442e: (0x92f0, 0),# East Asian ideograph
- 0x6f4a69: (0xaf30, 0),# Korean hangul
- 0x23442f: (0x92db, 0),# East Asian ideograph
- 0x6f4e5e: (0xb768, 0),# Korean hangul
- 0x234430: (0x92dc, 0),# East Asian ideograph
- 0x2d4e5b: (0x945b, 0),# East Asian ideograph
- 0x234431: (0x92d8, 0),# East Asian ideograph
- 0x224432: (0x6b39, 0),# East Asian ideograph
- 0x234433: (0x92e9, 0),# East Asian ideograph
- 0x224435: (0x6b3f, 0),# East Asian ideograph
- 0x274e5e: (0x783e, 0),# East Asian ideograph
- 0x6f565a: (0xc720, 0),# Korean hangul
- 0x27375a: (0x4e25, 0),# East Asian ideograph
- 0x224437: (0x6b46, 0),# East Asian ideograph
- 0x2d3d50: (0x5f5c, 0),# East Asian ideograph
- 0x224438: (0x6b41, 0),# East Asian ideograph
- 0x234439: (0x92d1, 0),# East Asian ideograph
- 0x6f5061: (0xbbfc, 0),# Korean hangul
- 0x283561: (0x64ba, 0),# East Asian ideograph
- 0x22443a: (0x6b40, 0),# East Asian ideograph
- 0x6f565b: (0xc721, 0),# Korean hangul
- 0x22443b: (0x6b42, 0),# East Asian ideograph
- 0x6f5973: (0xce84, 0),# Korean hangul
- 0x6f4c7e: (0xb301, 0),# Korean hangul
- 0x23443c: (0x92c2, 0),# East Asian ideograph
- 0x6f4f29: (0xb810, 0),# Korean hangul
- 0x454c3c: (0x7589, 0),# East Asian ideograph
- 0x6f5733: (0xc7d8, 0),# Korean hangul
- 0x23443e: (0x92cc, 0),# East Asian ideograph
- 0x22443f: (0x6b4a, 0),# East Asian ideograph
- 0x235b60: (0x9d98, 0),# East Asian ideograph
- 0x2e525d: (0x715b, 0),# East Asian ideograph
- 0x6f565c: (0xc724, 0),# Korean hangul
- 0x234440: (0x92ef, 0),# East Asian ideograph
- 0x213c41: (0x5df7, 0),# East Asian ideograph
- 0x234441: (0x92e8, 0),# East Asian ideograph
- 0x6f5d35: (0xd61c, 0),# Korean hangul
- 0x287739: (0x8069, 0),# East Asian ideograph
- 0x27587e: (0x8bff, 0),# East Asian ideograph
- 0x234443: (0x92eb, 0),# East Asian ideograph
- 0x695c39: (0x697e, 0),# East Asian ideograph
- 0x295d36: (0x9e2c, 0),# East Asian ideograph
- 0x2d4444: (0x69c5, 0),# East Asian ideograph
- 0x6f565d: (0xc728, 0),# Korean hangul
- 0x234445: (0x92f5, 0),# East Asian ideograph
- 0x224446: (0x6b4e, 0),# East Asian ideograph (variant of 4C4446 which maps to 6B4E)
- 0x6f4a6a: (0xaf34, 0),# Korean hangul
- 0x234448: (0x92f2, 0),# East Asian ideograph
- 0x28422b: (0x6a2f, 0),# East Asian ideograph
- 0x334449: (0x6144, 0),# East Asian ideograph
- 0x22444a: (0x6b57, 0),# East Asian ideograph
- 0x2d444b: (0x6852, 0),# East Asian ideograph
- 0x6f5530: (0xc568, 0),# Korean hangul
- 0x6f513c: (0xbcc0, 0),# Korean hangul
- 0x22444c: (0x6b54, 0),# East Asian ideograph
- 0x23444d: (0x9307, 0),# East Asian ideograph
- 0x22444e: (0x6b55, 0),# East Asian ideograph
- 0x6f5c51: (0xd4cc, 0),# Korean hangul
- 0x515e5d: (0x9616, 0),# East Asian ideograph
- 0x2d4450: (0x8308, 0),# East Asian ideograph
- 0x224451: (0x6b5c, 0),# East Asian ideograph
- 0x287a56: (0x8114, 0),# East Asian ideograph
- 0x6f5062: (0xbbff, 0),# Korean hangul
- 0x212b38: (0xff0c, 0),# Ideographic variant comma
- 0x225150: (0x70f4, 0),# East Asian ideograph
- 0x23554f: (0x9b10, 0),# East Asian ideograph
- 0x224453: (0x6b5e, 0),# East Asian ideograph
- 0x6f5b77: (0xd328, 0),# Korean hangul
- 0x224454: (0x6b60, 0),# East Asian ideograph
- 0x22625d: (0x777e, 0),# East Asian ideograph
- 0x217c34: (0x5aae, 0),# East Asian ideograph
- 0x4b4456: (0x6813, 0),# East Asian ideograph
- 0x6f5734: (0xc800, 0),# Korean hangul
- 0x294457: (0x9529, 0),# East Asian ideograph
- 0x212b39: (0xff1b, 0),# Ideographic semicolon
- 0x234458: (0x931f, 0),# East Asian ideograph
- 0x6f5661: (0xc73c, 0),# Korean hangul
- 0x23445a: (0x9331, 0),# East Asian ideograph
- 0x4d5f70: (0x9f44, 0),# East Asian ideograph
- 0x22445b: (0x6b6b, 0),# East Asian ideograph
- 0x22736b: (0x7e95, 0),# East Asian ideograph
- 0x22445d: (0x6b6c, 0),# East Asian ideograph
- 0x4c284c: (0x53a9, 0),# East Asian ideograph
- 0x4b3c33: (0x5dcc, 0),# East Asian ideograph
- 0x215e60: (0x95bb, 0),# East Asian ideograph
- 0x23445f: (0x930f, 0),# East Asian ideograph
- 0x6f4a6b: (0xaf3c, 0),# Korean hangul
- 0x224461: (0x6b71, 0),# East Asian ideograph
- 0x6f5d2c: (0xd600, 0),# Korean hangul
- 0x234462: (0x9302, 0),# East Asian ideograph
- 0x6f5170: (0xbe4b, 0),# Korean hangul
- 0x274463: (0x6761, 0),# East Asian ideograph
- 0x234464: (0x9324, 0),# East Asian ideograph
- 0x2d5b2f: (0x8eb1, 0),# East Asian ideograph
- 0x214466: (0x6885, 0),# East Asian ideograph
- 0x274468: (0x67ad, 0),# East Asian ideograph
- 0x294f77: (0x989f, 0),# East Asian ideograph
- 0x6f5221: (0xbe70, 0),# Korean hangul
- 0x6f4b7a: (0xb11d, 0),# Korean hangul
- 0x274469: (0x6800, 0),# East Asian ideograph
- 0x2d5f73: (0x975a, 0),# Unrelated variant of EACC 234C76 which maps to 975A
- 0x23446a: (0x9323, 0),# East Asian ideograph
- 0x22446b: (0x6b7e, 0),# East Asian ideograph
- 0x212b3d: (0xff01, 0),# Ideographic exclamation point
- 0x225155: (0x7111, 0),# East Asian ideograph
- 0x23446c: (0x9321, 0),# East Asian ideograph
- 0x4c683e: (0x79eb, 0),# East Asian ideograph
- 0x27446d: (0x5f03, 0),# East Asian ideograph
- 0x6f5222: (0xbe71, 0),# Korean hangul
- 0x27446e: (0x6816, 0),# East Asian ideograph
- 0x2d4e79: (0x5fa1, 0),# East Asian ideograph
- 0x6f5735: (0xc801, 0),# Korean hangul
- 0x2d4b43: (0x746f, 0),# East Asian ideograph
- 0x274471: (0x680b, 0),# East Asian ideograph
- 0x4b5a68: (0x8df5, 0),# East Asian ideograph (variant of 275A68 which maps to 8DF5)
- 0x234472: (0x9301, 0),# East Asian ideograph
- 0x6f5223: (0xbe73, 0),# Korean hangul
- 0x6f5326: (0xc124, 0),# Korean hangul
- 0x224473: (0x6b84, 0),# East Asian ideograph
- 0x2d3332: (0x5190, 0),# East Asian ideograph
- 0x234474: (0x9315, 0),# East Asian ideograph
- 0x47366f: (0x8d4d, 0),# East Asian ideograph
- 0x294475: (0x9494, 0),# East Asian ideograph
- 0x235556: (0x9b1d, 0),# East Asian ideograph
- 0x234476: (0x9329, 0),# East Asian ideograph
- 0x23386f: (0x8dba, 0),# East Asian ideograph
- 0x232f21: (0x88fc, 0),# East Asian ideograph
- 0x2d4a26: (0x713c, 0),# East Asian ideograph
- 0x287035: (0x7c74, 0),# East Asian ideograph
- 0x6f5224: (0xbe74, 0),# Korean hangul
- 0x234478: (0x932e, 0),# East Asian ideograph
- 0x234479: (0x932a, 0),# East Asian ideograph
- 0x6f4a6c: (0xaf3d, 0),# Korean hangul
- 0x27447a: (0x67a3, 0),# East Asian ideograph
- 0x6f5d2d: (0xd601, 0),# Korean hangul
- 0x22447b: (0x6b95, 0),# East Asian ideograph
- 0x21447c: (0x6912, 0),# East Asian ideograph
- 0x216f27: (0x5423, 0),# East Asian ideograph
- 0x213c4d: (0x5e11, 0),# East Asian ideograph
- 0x2d447d: (0x684c, 0),# East Asian ideograph
- 0x2d3d5e: (0x9af4, 0),# East Asian ideograph
- 0x23447e: (0x9335, 0),# East Asian ideograph
- 0x706058: (0x562d, 0),# East Asian ideograph
- 0x273671: (0x54df, 0),# East Asian ideograph
- 0x6f5226: (0xbe7b, 0),# Korean hangul
- 0x232f2d: (0x8909, 0),# East Asian ideograph
- 0x23444c: (0x9303, 0),# East Asian ideograph
- 0x4d5b35: (0x9dab, 0),# East Asian ideograph
- 0x232f2f: (0x8918, 0),# East Asian ideograph
- 0x6f5b79: (0xd32c, 0),# Korean hangul
- 0x213d3c: (0x5f15, 0),# East Asian ideograph
- 0x6f566a: (0xc774, 0),# Korean hangul
- 0x6f5227: (0xbe7c, 0),# Korean hangul
- 0x213c4f: (0x5e25, 0),# East Asian ideograph
- 0x2d3632: (0x8a7b, 0),# East Asian ideograph
- 0x346622: (0x589d, 0),# East Asian ideograph
- 0x295c47: (0x9e68, 0),# East Asian ideograph
- 0x293a2e: (0x8dc4, 0),# East Asian ideograph
- 0x2e6f35: (0x6cd4, 0),# East Asian ideograph
- 0x6f566b: (0xc775, 0),# Korean hangul
- 0x6f5228: (0xbe7d, 0),# Korean hangul
- 0x22516d: (0x7134, 0),# East Asian ideograph
- 0x23444e: (0x931e, 0),# East Asian ideograph
- 0x2f5d49: (0x9ea4, 0),# East Asian ideograph
- 0x2e313a: (0x6332, 0),# East Asian ideograph
- 0x216f3a: (0x546d, 0),# East Asian ideograph
- 0x6f566c: (0xc778, 0),# Korean hangul
- 0x282458: (0x5d03, 0),# East Asian ideograph
- 0x216f3b: (0x5491, 0),# East Asian ideograph
- 0x6f5229: (0xbe80, 0),# Korean hangul
- 0x4d5f7b: (0x97f2, 0),# East Asian ideograph
- 0x222f3d: (0x6201, 0),# East Asian ideograph
- 0x295c49: (0x9e47, 0),# East Asian ideograph
- 0x4b577e: (0x7e7f, 0),# East Asian ideograph
- 0x217c41: (0x5ac4, 0),# East Asian ideograph
- 0x215a28: (0x8cc2, 0),# East Asian ideograph
- 0x697265: (0x9c5a, 0),# East Asian ideograph
- 0x216f42: (0x5494, 0),# East Asian ideograph
- 0x232f43: (0x8915, 0),# East Asian ideograph
- 0x333344: (0x51db, 0),# East Asian ideograph
- 0x6f566e: (0xc77d, 0),# Korean hangul
- 0x274340: (0x6656, 0),# East Asian ideograph
- 0x6f522b: (0xbe8c, 0),# Korean hangul
- 0x213c53: (0x5e36, 0),# East Asian ideograph
- 0x4c5175: (0x8315, 0),# East Asian ideograph
- 0x225e37: (0x75a2, 0),# East Asian ideograph
- 0x222f47: (0x6214, 0),# East Asian ideograph
- 0x2d3921: (0x591f, 0),# East Asian ideograph
- 0x233345: (0x8ae2, 0),# East Asian ideograph
- 0x216f49: (0x548d, 0),# East Asian ideograph
- 0x355d5c: (0x8c8e, 0),# East Asian ideograph
- 0x6f566f: (0xc783, 0),# Korean hangul
- 0x216f4a: (0x5463, 0),# East Asian ideograph
- 0x6f522c: (0xbe8f, 0),# Korean hangul
- 0x6f5737: (0xc808, 0),# Korean hangul
- 0x225160: (0x70f6, 0),# East Asian ideograph
- 0x6f5670: (0xc784, 0),# Korean hangul
- 0x234453: (0x931d, 0),# East Asian ideograph
- 0x216f7b: (0x551a, 0),# East Asian ideograph
- 0x6f5d68: (0xd744, 0),# Korean hangul
- 0x6f5671: (0xc785, 0),# Korean hangul
- 0x6f522e: (0xbe91, 0),# Korean hangul
- 0x294427: (0x94d7, 0),# East Asian ideograph
- 0x234454: (0x92fa, 0),# East Asian ideograph
- 0x2d3d67: (0x9015, 0),# East Asian ideograph
- 0x6f4a6e: (0xaf41, 0),# Korean hangul
- 0x222f56: (0x6223, 0),# East Asian ideograph
- 0x6f4f43: (0xb8e8, 0),# Korean hangul
- 0x6f5d2f: (0xd608, 0),# Korean hangul
- 0x335561: (0x8462, 0),# East Asian ideograph
- 0x216f58: (0x54a1, 0),# East Asian ideograph
- 0x6f5672: (0xc787, 0),# Korean hangul
- 0x274344: (0x6682, 0),# East Asian ideograph
- 0x6f522f: (0xbe98, 0),# Korean hangul
- 0x213c57: (0x5e3d, 0),# East Asian ideograph
- 0x4b356a: (0x55ec, 0),# East Asian ideograph
- 0x23223c: (0x83f3, 0),# East Asian ideograph
- 0x696f5b: (0x958a, 0),# East Asian ideograph
- 0x273238: (0x4fa6, 0),# East Asian ideograph
- 0x695c4f: (0x69dd, 0),# East Asian ideograph
- 0x222f5d: (0x6224, 0),# East Asian ideograph
- 0x6f5673: (0xc788, 0),# Korean hangul
- 0x216f5e: (0x54be, 0),# East Asian ideograph
- 0x6f5230: (0xbea8, 0),# Korean hangul
- 0x213c58: (0x5e40, 0),# East Asian ideograph
- 0x692433: (0x3053, 0),# Hiragana letter KO
- 0x292f60: (0x88e2, 0),# East Asian ideograph
- 0x6f5066: (0xbc0b, 0),# Korean hangul
- 0x4c2f61: (0x622c, 0),# East Asian ideograph
- 0x4b4235: (0x6442, 0),# East Asian ideograph
- 0x6f5b7b: (0xd338, 0),# Korean hangul
- 0x6f5c32: (0xd3ab, 0),# Korean hangul
- 0x6f5231: (0xbed0, 0),# Korean hangul
- 0x213c59: (0x5e4c, 0),# East Asian ideograph
- 0x216f64: (0x54b5, 0),# East Asian ideograph
- 0x225e2e: (0x7594, 0),# East Asian ideograph
- 0x6f5738: (0xc80a, 0),# Korean hangul
- 0x2d3f24: (0x661a, 0),# East Asian ideograph
- 0x226f66: (0x7cce, 0),# East Asian ideograph
- 0x4b4236: (0x643a, 0),# East Asian ideograph
- 0x6f5a30: (0xcf04, 0),# Korean hangul
- 0x2d4a34: (0x718f, 0),# East Asian ideograph
- 0x226f68: (0x7cc8, 0),# East Asian ideograph
- 0x6f5a28: (0xcef4, 0),# Korean hangul
- 0x6f5232: (0xbed1, 0),# Korean hangul
- 0x222f69: (0x97ef, 0),# East Asian ideograph
- 0x333428: (0x523c, 0),# East Asian ideograph
- 0x6f5676: (0xc78e, 0),# Korean hangul
- 0x216f6d: (0x54ae, 0),# East Asian ideograph
- 0x6f5233: (0xbed4, 0),# Korean hangul
- 0x2d3d6c: (0x5f93, 0),# East Asian ideograph
- 0x6f4a6f: (0xaf42, 0),# Korean hangul
- 0x232f6f: (0x894f, 0),# East Asian ideograph
- 0x2d5b42: (0x8f19, 0),# East Asian ideograph
- 0x393e7d: (0x7609, 0),# East Asian ideograph
- 0x695c53: (0x6a2e, 0),# East Asian ideograph
- 0x225167: (0x70ef, 0),# East Asian ideograph
- 0x235566: (0x9b23, 0),# East Asian ideograph
- 0x216f71: (0x54bf, 0),# East Asian ideograph
- 0x6f5677: (0xc655, 0),# Korean hangul
- 0x292f72: (0x88e5, 0),# East Asian ideograph
- 0x6f5234: (0xbed7, 0),# Korean hangul
- 0x213c5c: (0x5e57, 0),# East Asian ideograph
- 0x4d2f73: (0x7e5d, 0),# East Asian ideograph
- 0x6f4f4c: (0xb93c, 0),# Korean hangul
- 0x2d5b43: (0x8efd, 0),# East Asian ideograph
- 0x226f75: (0x7cd7, 0),# East Asian ideograph
- 0x225168: (0x7100, 0),# East Asian ideograph
- 0x33334e: (0x51fe, 0),# East Asian ideograph
- 0x225a21: (0x7428, 0),# East Asian ideograph
- 0x215a22: (0x8cc7, 0),# East Asian ideograph
- 0x23445b: (0x9306, 0),# East Asian ideograph
- 0x215a23: (0x8cca, 0),# East Asian ideograph
- 0x6f536c: (0xc274, 0),# Korean hangul
- 0x4b312d: (0x4f2b, 0),# East Asian ideograph
- 0x235a24: (0x9d04, 0),# East Asian ideograph
- 0x21322a: (0x5003, 0),# East Asian ideograph
- 0x222f7a: (0x6250, 0),# East Asian ideograph
- 0x215a25: (0x8cc4, 0),# East Asian ideograph
- 0x6f5d60: (0xd71c, 0),# Korean hangul
- 0x335568: (0x8406, 0),# East Asian ideograph
- 0x226f7b: (0x7ce8, 0),# East Asian ideograph (variant of 4C6F7B which maps to 7CE8)
- 0x6f5b7c: (0xd339, 0),# Korean hangul
- 0x275a26: (0x8d40, 0),# East Asian ideograph
- 0x6f5679: (0xc790, 0),# Korean hangul
- 0x2e2f7c: (0x634d, 0),# East Asian ideograph
- 0x215a27: (0x8cc3, 0),# East Asian ideograph
- 0x213c5e: (0x5e63, 0),# East Asian ideograph
- 0x276121: (0x998a, 0),# East Asian ideograph
- 0x2d6f7d: (0x8123, 0),# East Asian ideograph
- 0x235a28: (0x9d07, 0),# East Asian ideograph
- 0x2d4472: (0x68ca, 0),# East Asian ideograph
- 0x215a29: (0x8cd3, 0),# East Asian ideograph
- 0x215a2a: (0x8cd1, 0),# East Asian ideograph
- 0x217d2e: (0x5b0d, 0),# East Asian ideograph
- 0x215a2b: (0x8cd2, 0),# East Asian ideograph
- 0x395063: (0x9939, 0),# East Asian ideograph
- 0x6f567a: (0xc791, 0),# Korean hangul
- 0x275a2c: (0x8d54, 0),# East Asian ideograph
- 0x23445d: (0x92f9, 0),# East Asian ideograph
- 0x215a2d: (0x8ce6, 0),# East Asian ideograph
- 0x295c57: (0x9e6b, 0),# East Asian ideograph
- 0x215a2f: (0x8ce3, 0),# East Asian ideograph
- 0x213076: (0x4ed9, 0),# East Asian ideograph
- 0x215a30: (0x8ce2, 0),# East Asian ideograph
- 0x4b3c38: (0x949c, 0),# East Asian ideograph
- 0x275a31: (0x8d31, 0),# East Asian ideograph
- 0x276123: (0x9992, 0),# East Asian ideograph
- 0x225a32: (0x743b, 0),# East Asian ideograph
- 0x4b3130: (0x4fab, 0),# East Asian ideograph
- 0x6f4a70: (0xaf43, 0),# Korean hangul
- 0x215a33: (0x8cdc, 0),# East Asian ideograph
- 0x275a34: (0x8d28, 0),# East Asian ideograph
- 0x215a35: (0x8ced, 0),# East Asian ideograph
- 0x2d4a3b: (0x4e89, 0),# East Asian ideograph
- 0x225a36: (0x7444, 0),# East Asian ideograph
- 0x275a37: (0x8d5b, 0),# East Asian ideograph
- 0x215a38: (0x8cfa, 0),# East Asian ideograph
- 0x215a39: (0x8d05, 0),# East Asian ideograph
- 0x293a40: (0x8df8, 0),# East Asian ideograph
- 0x23556c: (0x9b29, 0),# East Asian ideograph
- 0x215a3a: (0x8cfc, 0),# East Asian ideograph
- 0x215a3b: (0x8d08, 0),# East Asian ideograph (variant of 4B5A3B which maps to 8D08)
- 0x6f5352: (0xc1f0, 0),# Korean hangul
- 0x215a3c: (0x8d0b, 0),# East Asian ideograph
- 0x215a3d: (0x8d0a, 0),# East Asian ideograph
- 0x215a3e: (0x8d0f, 0),# East Asian ideograph
- 0x6f5b7d: (0xd33b, 0),# Korean hangul
- 0x215a3f: (0x8d0d, 0),# East Asian ideograph
- 0x6f567e: (0xc7a0, 0),# Korean hangul
- 0x213d40: (0x5f1b, 0),# East Asian ideograph
- 0x215a40: (0x8d13, 0),# East Asian ideograph
- 0x276126: (0x990d, 0),# East Asian ideograph
- 0x215a41: (0x8d16, 0),# East Asian ideograph
- 0x215a42: (0x8d1b, 0),# East Asian ideograph
- 0x295c5b: (0x9e6c, 0),# East Asian ideograph
- 0x225a43: (0x7458, 0),# East Asian ideograph
- 0x235a44: (0x9d1d, 0),# East Asian ideograph
- 0x225a45: (0x7442, 0),# East Asian ideograph
- 0x6f5a46: (0xcf71, 0),# Korean hangul
- 0x2d3d75: (0x60ea, 0),# East Asian ideograph
- 0x2e4174: (0x6aa9, 0),# East Asian ideograph
- 0x225a47: (0x744b, 0),# East Asian ideograph
- 0x215a48: (0x8d70, 0),# East Asian ideograph
- 0x6f5660: (0xc737, 0),# Korean hangul
- 0x337345: (0x9f67, 0),# East Asian ideograph
- 0x6f5a49: (0xcf80, 0),# Korean hangul
- 0x225a4a: (0x744a, 0),# East Asian ideograph
- 0x213c65: (0x5e74, 0),# East Asian ideograph
- 0x6f5a4b: (0xcf8c, 0),# Korean hangul
- 0x6f5938: (0xcc71, 0),# Korean hangul
- 0x2d3d76: (0x5fb4, 0),# East Asian ideograph
- 0x2d5476: (0x8318, 0),# East Asian ideograph
- 0x6f5a4c: (0xcf8d, 0),# Korean hangul
- 0x6f5573: (0xc628, 0),# Korean hangul
- 0x6f5a4d: (0xcfa1, 0),# Korean hangul
- 0x2d5a4e: (0x8d82, 0),# East Asian ideograph
- 0x215a4f: (0x8d99, 0),# East Asian ideograph
- 0x4b3864: (0x58c7, 0),# East Asian ideograph
- 0x275a50: (0x8d76, 0),# East Asian ideograph
- 0x2d392f: (0x7287, 0),# East Asian ideograph
- 0x6f5a51: (0xcfe0, 0),# Korean hangul
- 0x704c2a: (0x915e, 0),# East Asian ideograph
- 0x224e37: (0x6faf, 0),# East Asian ideograph
- 0x6f5a52: (0xcfe1, 0),# Korean hangul
- 0x6f5c58: (0xd515, 0),# Korean hangul
- 0x215a53: (0x8da8, 0),# East Asian ideograph
- 0x6f492e: (0xac86, 0),# Korean hangul
- 0x6f537d: (0xc2b4, 0),# Korean hangul
- 0x6f523f: (0xbf40, 0),# Korean hangul
- 0x225a55: (0x7457, 0),# East Asian ideograph
- 0x225a56: (0x7451, 0),# East Asian ideograph
- 0x6f5069: (0xbc0f, 0),# Korean hangul
- 0x6f5a57: (0xcff5, 0),# Korean hangul
- 0x293a46: (0x8e70, 0),# East Asian ideograph
- 0x23512f: (0x9916, 0),# East Asian ideograph
- 0x4b3642: (0x8bf6, 0),# East Asian ideograph
- 0x2e4e41: (0x7032, 0),# East Asian ideograph
- 0x215a59: (0x8ddb, 0),# East Asian ideograph
- 0x6f5240: (0xbf41, 0),# Korean hangul
- 0x706131: (0x5c9c, 0),# East Asian ideograph
- 0x4b357b: (0x54cc, 0),# East Asian ideograph
- 0x225a5a: (0x745d, 0),# East Asian ideograph
- 0x225a5b: (0x7454, 0),# East Asian ideograph
- 0x225174: (0x7131, 0),# East Asian ideograph
- 0x235573: (0x9b2d, 0),# East Asian ideograph
- 0x235130: (0x9914, 0),# East Asian ideograph
- 0x33386e: (0x576f, 0),# East Asian ideograph
- 0x6f5a5e: (0xd038, 0),# Korean hangul
- 0x6f5241: (0xbf44, 0),# Korean hangul
- 0x2d5a5f: (0x8e5f, 0),# East Asian ideograph
- 0x6f5949: (0xcd09, 0),# Korean hangul
- 0x225a60: (0x746d, 0),# East Asian ideograph
- 0x277239: (0x54d4, 0),# East Asian ideograph
- 0x225a61: (0x7462, 0),# East Asian ideograph
- 0x235574: (0x9b2e, 0),# East Asian ideograph (not in Unicode)
- 0x29506c: (0x996b, 0),# East Asian ideograph
- 0x6f532f: (0xc131, 0),# Korean hangul
- 0x215a62: (0x8df3, 0),# East Asian ideograph
- 0x215a63: (0x8dfa, 0),# East Asian ideograph
- 0x6f5242: (0xbf48, 0),# Korean hangul
- 0x27612d: (0x51af, 0),# East Asian ideograph
- 0x6f5a64: (0xd07d, 0),# Korean hangul
- 0x4c796b: (0x815f, 0),# East Asian ideograph
- 0x235a65: (0x9d30, 0),# East Asian ideograph
- 0x275021: (0x7b0b, 0),# East Asian ideograph
- 0x235132: (0x9911, 0),# East Asian ideograph
- 0x2f3b63: (0x5e32, 0),# East Asian ideograph (not in Unicode)
- 0x2d4a45: (0x5c12, 0),# East Asian ideograph
- 0x275a68: (0x8df5, 0),# East Asian ideograph
- 0x6f5243: (0xbf50, 0),# Korean hangul
- 0x213c6b: (0x5e7e, 0),# East Asian ideograph
- 0x215a69: (0x8e22, 0),# East Asian ideograph
- 0x225a6a: (0x7471, 0),# East Asian ideograph
- 0x225a6b: (0x7468, 0),# East Asian ideograph
- 0x4b5936: (0x8b20, 0),# East Asian ideograph
- 0x4d5a6c: (0x9d46, 0),# East Asian ideograph
- 0x216035: (0x97fb, 0),# East Asian ideograph
- 0x2d4a46: (0x58bb, 0),# East Asian ideograph
- 0x6f5a6d: (0xd0b9, 0),# Korean hangul
- 0x4b4857: (0x6f22, 0),# East Asian ideograph
- 0x235a70: (0x9d5c, 0),# East Asian ideograph
- 0x224d35: (0x6f90, 0),# East Asian ideograph
- 0x282951: (0x5f2a, 0),# East Asian ideograph
- 0x275a71: (0x8e0a, 0),# East Asian ideograph
- 0x6f5a72: (0xd0c4, 0),# Korean hangul
- 0x6f5245: (0xbf55, 0),# Korean hangul
- 0x276130: (0x9a6e, 0),# East Asian ideograph
- 0x6f4c27: (0xb141, 0),# Korean hangul
- 0x695a73: (0x6683, 0),# East Asian ideograph
- 0x454b7a: (0x7523, 0),# East Asian ideograph
- 0x6f5a74: (0xd0c9, 0),# Korean hangul
- 0x2d377c: (0x962c, 0),# East Asian ideograph
- 0x215a75: (0x8e4b, 0),# East Asian ideograph
- 0x295822: (0x9cae, 0),# East Asian ideograph
- 0x6f5a76: (0xd0d1, 0),# Korean hangul
- 0x6f5a77: (0xd0d3, 0),# Korean hangul
- 0x6f5246: (0xbfb0, 0),# Korean hangul
- 0x234522: (0x9314, 0),# East Asian ideograph
- 0x225a78: (0x7460, 0),# East Asian ideograph
- 0x225a79: (0x7472, 0),# East Asian ideograph
- 0x225a7a: (0x7484, 0),# East Asian ideograph
- 0x224525: (0x6b99, 0),# East Asian ideograph
- 0x224d37: (0x6f8d, 0),# East Asian ideograph
- 0x225a7b: (0x7487, 0),# East Asian ideograph
- 0x274526: (0x6781, 0),# East Asian ideograph
- 0x6f5a7c: (0xd0e0, 0),# Korean hangul
- 0x6f5247: (0xbfc0, 0),# Korean hangul
- 0x276132: (0x9a73, 0),# East Asian ideograph
- 0x6f5a7d: (0xd0e4, 0),# Korean hangul
- 0x234528: (0x92fe, 0),# East Asian ideograph
- 0x215a7e: (0x8e7a, 0),# East Asian ideograph
- 0x224529: (0x6b9b, 0),# East Asian ideograph
- 0x27452a: (0x6768, 0),# East Asian ideograph
- 0x3f4c3c: (0x7582, 0),# East Asian ideograph
- 0x27452b: (0x6862, 0),# East Asian ideograph
- 0x2e7062: (0x7d4f, 0),# East Asian ideograph
- 0x6f5248: (0xbfc5, 0),# Korean hangul
- 0x276133: (0x9a7b, 0),# East Asian ideograph
- 0x4b3b61: (0x5c64, 0),# East Asian ideograph
- 0x69242b: (0x304b, 0),# Hiragana letter KA
- 0x27452d: (0x4e1a, 0),# East Asian ideograph
- 0x2d3931: (0x67f0, 0),# East Asian ideograph
- 0x2d7e6a: (0x51a4, 0),# East Asian ideograph
- 0x27452f: (0x67ab, 0),# East Asian ideograph
- 0x6f5c5a: (0xd53d, 0),# Korean hangul
- 0x224d39: (0x6f92, 0),# East Asian ideograph
- 0x692434: (0x3054, 0),# Hiragana letter GO
- 0x6f5249: (0xbfcc, 0),# Korean hangul
- 0x234531: (0x9341, 0),# East Asian ideograph
- 0x217c60: (0x5adc, 0),# East Asian ideograph
- 0x273764: (0x5631, 0),# East Asian ideograph
- 0x234532: (0x9319, 0),# East Asian ideograph
- 0x6f506b: (0xbbb4, 0),# Korean hangul
- 0x4b4534: (0x6994, 0),# East Asian ideograph (variant of 214534)
- 0x224d3a: (0x6f89, 0),# East Asian ideograph
- 0x234535: (0x934c, 0),# East Asian ideograph
- 0x6f524a: (0xbfcd, 0),# Korean hangul
- 0x224536: (0x6ba2, 0),# East Asian ideograph
- 0x6f4c28: (0xb144, 0),# Korean hangul
- 0x21382f: (0x577c, 0),# East Asian ideograph
- 0x274537: (0x8363, 0),# East Asian ideograph
- 0x224538: (0x6baa, 0),# East Asian ideograph
- 0x274539: (0x6784, 0),# East Asian ideograph
- 0x235b6a: (0x9da1, 0),# East Asian ideograph
- 0x2d453a: (0x6760, 0),# East Asian ideograph
- 0x22453b: (0x6bad, 0),# East Asian ideograph
- 0x234471: (0x9340, 0),# East Asian ideograph
- 0x692531: (0x30b1, 0),# Katakana letter KE
- 0x22453d: (0x6bb0, 0),# East Asian ideograph
- 0x6f4f66: (0xb9c8, 0),# Korean hangul
- 0x6f5663: (0xc740, 0),# Korean hangul
- 0x274871: (0x6d47, 0),# East Asian ideograph
- 0x224d3c: (0x6f8c, 0),# East Asian ideograph
- 0x22453f: (0x6bb3, 0),# East Asian ideograph
- 0x274540: (0x67aa, 0),# East Asian ideograph
- 0x234541: (0x9379, 0),# East Asian ideograph
- 0x4b3144: (0x4f36, 0),# East Asian ideograph (variant of 213144 which maps to 4F36)
- 0x295c6c: (0x9e6a, 0),# East Asian ideograph
- 0x2d4543: (0x6901, 0),# East Asian ideograph
- 0x224d3d: (0x6f62, 0),# East Asian ideograph (variant of 4C4D3D which maps to 6F62)
- 0x33513c: (0x7d4c, 0),# East Asian ideograph
- 0x214544: (0x6a23, 0),# East Asian ideograph
- 0x2d5036: (0x84d1, 0),# East Asian ideograph
- 0x6f524d: (0xbfdc, 0),# Korean hangul
- 0x234a5e: (0x967f, 0),# East Asian ideograph
- 0x223553: (0x6509, 0),# East Asian ideograph
- 0x4b7577: (0x57d3, 0),# East Asian ideograph
- 0x225e4a: (0x75c2, 0),# East Asian ideograph
- 0x214546: (0x6a01, 0),# East Asian ideograph
- 0x2d3932: (0x7ad2, 0),# East Asian ideograph
- 0x274547: (0x6807, 0),# East Asian ideograph
- 0x234548: (0x935c, 0),# East Asian ideograph
- 0x6f5c5b: (0xd540, 0),# Korean hangul
- 0x216037: (0x9801, 0),# East Asian ideograph
- 0x274549: (0x67a2, 0),# East Asian ideograph
- 0x6f524e: (0xbfdd, 0),# Korean hangul
- 0x27454a: (0x697c, 0),# East Asian ideograph
- 0x213833: (0x57ae, 0),# East Asian ideograph
- 0x4b602d: (0x9771, 0),# East Asian ideograph
- 0x2d5b5d: (0x8fa2, 0),# East Asian ideograph
- 0x2d3944: (0x511e, 0),# East Asian ideograph
- 0x27454c: (0x6868, 0),# East Asian ideograph
- 0x23454d: (0x9347, 0),# East Asian ideograph
- 0x293373: (0x8c21, 0),# East Asian ideograph
- 0x27454e: (0x4e50, 0),# East Asian ideograph
- 0x6f524f: (0xbfe1, 0),# Korean hangul
- 0x27454f: (0x679e, 0),# East Asian ideograph
- 0x2d3f2a: (0x5abf, 0),# East Asian ideograph
- 0x2d4550: (0x58ab, 0),# East Asian ideograph
- 0x2d5b5e: (0x8fa7, 0),# East Asian ideograph
- 0x234551: (0x937a, 0),# East Asian ideograph
- 0x29582c: (0x9cb1, 0),# East Asian ideograph
- 0x274553: (0x692d, 0),# East Asian ideograph
- 0x27767a: (0x57da, 0),# East Asian ideograph
- 0x224554: (0x6bc8, 0),# East Asian ideograph
- 0x274555: (0x6811, 0),# East Asian ideograph
- 0x234556: (0x937c, 0),# East Asian ideograph
- 0x293a57: (0x8dfb, 0),# East Asian ideograph
- 0x274557: (0x6866, 0),# East Asian ideograph
- 0x29582d: (0x9cb7, 0),# East Asian ideograph
- 0x235140: (0x991c, 0),# East Asian ideograph
- 0x274558: (0x6734, 0),# East Asian ideograph
- 0x474e5c: (0x97de, 0),# East Asian ideograph (variant of 234E5C which maps to 97DE)
- 0x274366: (0x80e7, 0),# East Asian ideograph
- 0x6f5251: (0xc059, 0),# Korean hangul
- 0x27613c: (0x9a86, 0),# East Asian ideograph
- 0x2d3261: (0x5039, 0),# East Asian ideograph
- 0x27455b: (0x6865, 0),# East Asian ideograph
- 0x275030: (0x8303, 0),# East Asian ideograph
- 0x23455c: (0x9377, 0),# East Asian ideograph
- 0x6f5172: (0xbe4e, 0),# Korean hangul
- 0x27455d: (0x673a, 0),# East Asian ideograph
- 0x213f50: (0x61ca, 0),# East Asian ideograph
- 0x6f5252: (0xc05c, 0),# Korean hangul
- 0x23455e: (0x9358, 0),# East Asian ideograph
- 0x2d5f63: (0x873a, 0),# East Asian ideograph
- 0x27455f: (0x6863, 0),# East Asian ideograph
- 0x224560: (0x6bda, 0),# East Asian ideograph
- 0x33327a: (0x5150, 0),# East Asian ideograph
- 0x274561: (0x68c0, 0),# East Asian ideograph
- 0x29582f: (0x9cb5, 0),# East Asian ideograph
- 0x274562: (0x6867, 0),# East Asian ideograph
- 0x3f347d: (0x84e1, 0),# East Asian ideograph
- 0x6f5253: (0xc060, 0),# Korean hangul
- 0x274563: (0x67e0, 0),# East Asian ideograph
- 0x235131: (0x9917, 0),# East Asian ideograph
- 0x234564: (0x9376, 0),# East Asian ideograph
- 0x274565: (0x67dc, 0),# East Asian ideograph
- 0x213230: (0x5065, 0),# East Asian ideograph
- 0x224a4c: (0x6e1f, 0),# East Asian ideograph
- 0x274566: (0x69db, 0),# East Asian ideograph
- 0x294567: (0x9537, 0),# East Asian ideograph
- 0x6f5254: (0xc068, 0),# Korean hangul
- 0x27613f: (0x9a88, 0),# East Asian ideograph
- 0x6f4c2a: (0xb151, 0),# Korean hangul
- 0x2d4569: (0x6ac9, 0),# East Asian ideograph
- 0x4b314c: (0x5f95, 0),# East Asian ideograph
- 0x6f573f: (0xc81c, 0),# Korean hangul
- 0x23456a: (0x9348, 0),# East Asian ideograph
- 0x27325d: (0x4ef7, 0),# East Asian ideograph
- 0x27456b: (0x691f, 0),# East Asian ideograph
- 0x213f3b: (0x616b, 0),# East Asian ideograph
- 0x295831: (0x9cb6, 0),# East Asian ideograph
- 0x27456c: (0x6809, 0),# East Asian ideograph
- 0x2e4e56: (0x9800, 0),# East Asian ideograph
- 0x6f5255: (0xc069, 0),# Korean hangul
- 0x27456d: (0x6a79, 0),# East Asian ideograph
- 0x276140: (0x9a91, 0),# East Asian ideograph
- 0x23447b: (0x933f, 0),# East Asian ideograph
- 0x27456e: (0x680f, 0),# East Asian ideograph
- 0x27456f: (0x6a31, 0),# East Asian ideograph
- 0x224570: (0x6bea, 0),# East Asian ideograph
- 0x3f456d: (0x8263, 0),# East Asian ideograph
- 0x235145: (0x9927, 0),# East Asian ideograph
- 0x274571: (0x6984, 0),# East Asian ideograph
- 0x2d4a58: (0x7f9d, 0),# East Asian ideograph
- 0x6f5256: (0xc090, 0),# Korean hangul
- 0x217c6d: (0x5ae5, 0),# East Asian ideograph
- 0x286540: (0x7800, 0),# East Asian ideograph
- 0x23447c: (0x933a, 0),# East Asian ideograph
- 0x234573: (0x9360, 0),# East Asian ideograph
- 0x2d4574: (0x5ffb, 0),# East Asian ideograph
- 0x6f5d37: (0xd639, 0),# Korean hangul
- 0x233021: (0x894d, 0),# East Asian ideograph
- 0x6f5257: (0xc091, 0),# Korean hangul
- 0x234577: (0x936e, 0),# East Asian ideograph
- 0x287022: (0x7cc1, 0),# East Asian ideograph
- 0x274578: (0x94a6, 0),# East Asian ideograph
- 0x6f5844: (0xc9f8, 0),# Korean hangul
- 0x213023: (0x4e03, 0),# East Asian ideograph
- 0x225b2d: (0x7486, 0),# East Asian ideograph
- 0x234579: (0x938f, 0),# East Asian ideograph
- 0x233024: (0x895a, 0),# East Asian ideograph
- 0x293a5e: (0x8df9, 0),# East Asian ideograph
- 0x23457a: (0x93ac, 0),# East Asian ideograph
- 0x6f5c5d: (0xd54c, 0),# Korean hangul
- 0x233025: (0x895e, 0),# East Asian ideograph
- 0x335147: (0x7eee, 0),# East Asian ideograph
- 0x23457b: (0x9395, 0),# East Asian ideograph
- 0x213026: (0x4e0a, 0),# East Asian ideograph
- 0x4b4e7b: (0x7985, 0),# East Asian ideograph (variant of 274E7B which maps to 7985)
- 0x6f5258: (0xc094, 0),# Korean hangul
- 0x27457c: (0x6b27, 0),# East Asian ideograph
- 0x295175: (0x9993, 0),# East Asian ideograph
- 0x21383d: (0x57df, 0),# East Asian ideograph
- 0x2e3028: (0x640b, 0),# East Asian ideograph
- 0x27457e: (0x6b24, 0),# East Asian ideograph
- 0x213029: (0x4e10, 0),# East Asian ideograph
- 0x293a5f: (0x8dde, 0),# East Asian ideograph
- 0x2d4a5b: (0x7282, 0),# East Asian ideograph
- 0x22302b: (0x625a, 0),# East Asian ideograph
- 0x6f5259: (0xc098, 0),# Korean hangul
- 0x276144: (0x817e, 0),# East Asian ideograph
- 0x21302c: (0x4e19, 0),# East Asian ideograph
- 0x21383e: (0x580a, 0),# East Asian ideograph
- 0x22302d: (0x6266, 0),# East Asian ideograph
- 0x22702e: (0x7cf0, 0),# East Asian ideograph
- 0x6f5664: (0xc744, 0),# Korean hangul
- 0x293a60: (0x8e2c, 0),# East Asian ideograph
- 0x21302f: (0x4e18, 0),# East Asian ideograph
- 0x217030: (0x5504, 0),# East Asian ideograph
- 0x6f525a: (0xc0a0, 0),# Korean hangul
- 0x234469: (0x9338, 0),# East Asian ideograph
- 0x692126: (0x30fb, 0),# Ideographic centered point
- 0x223031: (0x6286, 0),# East Asian ideograph
- 0x21383f: (0x5805, 0),# East Asian ideograph
- 0x273032: (0x5e76, 0),# East Asian ideograph
- 0x6f594a: (0xcd0c, 0),# Korean hangul
- 0x2d5b69: (0x5ef5, 0),# East Asian ideograph
- 0x6f4c55: (0xb284, 0),# Korean hangul
- 0x275039: (0x7b03, 0),# East Asian ideograph
- 0x6f5666: (0xc74c, 0),# Korean hangul
- 0x692139: (0x3005, 0),# Ideographic iteration mark
- 0x69213c: (0x30fc, 0),# Vowel elongation mark for kana
- 0x213035: (0x4e32, 0),# East Asian ideograph
- 0x695130: (0x5116, 0),# East Asian ideograph
- 0x6f525b: (0xc0a3, 0),# Korean hangul
- 0x276146: (0x9aa0, 0),# East Asian ideograph
- 0x217c72: (0x5aea, 0),# East Asian ideograph
- 0x6f4a77: (0xaf64, 0),# Korean hangul
- 0x23403e: (0x9146, 0),# East Asian ideograph
- 0x2d3263: (0x4fad, 0),# East Asian ideograph
- 0x4b4f29: (0x7a50, 0),# East Asian ideograph
- 0x692152: (0x3008, 0),# Ideographic less than sign
- 0x27503a: (0x7b51, 0),# East Asian ideograph
- 0x692154: (0x300a, 0),# Ideographic left double angle bracket
- 0x692155: (0x300b, 0),# Ideographic right double angle bracket
- 0x217039: (0x54f7, 0),# East Asian ideograph
- 0x21303a: (0x4e43, 0),# East Asian ideograph
- 0x2e4e5d: (0x6de0, 0),# East Asian ideograph
- 0x6f525c: (0xc0a5, 0),# Korean hangul
- 0x21303b: (0x4e45, 0),# East Asian ideograph
- 0x213841: (0x5806, 0),# East Asian ideograph
- 0x217830: (0x58a3, 0),# East Asian ideograph
- 0x6f576a: (0xc8f1, 0),# Korean hangul
- 0x233376: (0x8b1f, 0),# East Asian ideograph
- 0x33514c: (0x7dd1, 0),# East Asian ideograph
- 0x213e21: (0x5fcd, 0),# East Asian ideograph
- 0x276148: (0x84e6, 0),# East Asian ideograph
- 0x2d4b71: (0x7f3e, 0),# East Asian ideograph
- 0x223041: (0x62a3, 0),# East Asian ideograph
- 0x213042: (0x4e52, 0),# East Asian ideograph
- 0x277255: (0x54d3, 0),# East Asian ideograph
- 0x213043: (0x4e53, 0),# East Asian ideograph
- 0x227044: (0x7d03, 0),# East Asian ideograph
- 0x234544: (0x9386, 0),# East Asian ideograph
- 0x287045: (0x7ea8, 0),# East Asian ideograph
- 0x227c31: (0x82af, 0),# East Asian ideograph
- 0x234041: (0x9147, 0),# East Asian ideograph
- 0x2d3954: (0x7385, 0),# East Asian ideograph
- 0x213047: (0x4e5d, 0),# East Asian ideograph
- 0x213049: (0x4e5e, 0),# East Asian ideograph
- 0x6f525f: (0xc0ac, 0),# Korean hangul
- 0x28255a: (0x5d5d, 0),# East Asian ideograph
- 0x273f31: (0x60ed, 0),# East Asian ideograph
- 0x225e5c: (0x75cf, 0),# East Asian ideograph
- 0x29472f: (0x94e9, 0),# East Asian ideograph
- 0x21304b: (0x4e73, 0),# East Asian ideograph
- 0x21304c: (0x4e7e, 0),# East Asian ideograph
- 0x27503e: (0x7bd3, 0),# East Asian ideograph
- 0x6f5667: (0xc74d, 0),# Korean hangul
- 0x27304d: (0x4e71, 0),# East Asian ideograph
- 0x23514f: (0x992e, 0),# East Asian ideograph
- 0x6f5870: (0xcbd4, 0),# Korean hangul
- 0x275b36: (0x8f69, 0),# East Asian ideograph
- 0x6f5260: (0xc0ad, 0),# Korean hangul
- 0x27614b: (0x60ca, 0),# East Asian ideograph
- 0x6f4a78: (0xaf65, 0),# Korean hangul
- 0x227050: (0x7d18, 0),# East Asian ideograph
- 0x227051: (0x7d1e, 0),# East Asian ideograph
- 0x277258: (0x6076, 0),# East Asian ideograph (duplicate simplified)
- 0x393052: (0x65bc, 0),# East Asian ideograph
- 0x235150: (0x992c, 0),# East Asian ideograph
- 0x213053: (0x4e95, 0),# East Asian ideograph
- 0x6f5261: (0xc0ae, 0),# Korean hangul
- 0x294040: (0x90e6, 0),# East Asian ideograph
- 0x213055: (0x4e92, 0),# East Asian ideograph
- 0x2e5f6f: (0x75b8, 0),# East Asian ideograph
- 0x27326a: (0x4fea, 0),# East Asian ideograph
- 0x223057: (0x62d1, 0),# East Asian ideograph
- 0x235151: (0x992a, 0),# East Asian ideograph
- 0x213058: (0x4e9e, 0),# East Asian ideograph
- 0x2d4621: (0x61fd, 0),# East Asian ideograph
- 0x213059: (0x4e9b, 0),# East Asian ideograph
- 0x284333: (0x680e, 0),# East Asian ideograph
- 0x294732: (0x94f4, 0),# East Asian ideograph
- 0x33625e: (0x9eaa, 0),# East Asian ideograph
- 0x22705a: (0x7d3d, 0),# East Asian ideograph
- 0x6f5070: (0xbc18, 0),# Korean hangul
- 0x232223: (0x83fd, 0),# East Asian ideograph
- 0x222224: (0x5bf0, 0),# East Asian ideograph
- 0x232225: (0x841e, 0),# East Asian ideograph
- 0x693c36: (0x96eb, 0),# East Asian ideograph
- 0x232229: (0x83c9, 0),# East Asian ideograph
- 0x23222a: (0x83df, 0),# East Asian ideograph
- 0x23222c: (0x841f, 0),# East Asian ideograph
- 0x23222e: (0x840f, 0),# East Asian ideograph
- 0x69705d: (0x9786, 0),# East Asian ideograph
- 0x232230: (0x8411, 0),# East Asian ideograph
- 0x222233: (0x5c00, 0),# East Asian ideograph
- 0x222235: (0x5c57, 0),# East Asian ideograph
- 0x232236: (0x839a, 0),# East Asian ideograph
- 0x707438: (0x823e, 0),# East Asian ideograph
- 0x33625f: (0x8534, 0),# East Asian ideograph
- 0x21305f: (0x4ea8, 0),# East Asian ideograph
- 0x22223c: (0x5c15, 0),# East Asian ideograph
- 0x333060: (0x4eaf, 0),# East Asian ideograph
- 0x6f5742: (0xc824, 0),# Korean hangul
- 0x232243: (0x83d1, 0),# East Asian ideograph
- 0x275042: (0x7bab, 0),# East Asian ideograph
- 0x222246: (0x5c22, 0),# East Asian ideograph
- 0x223061: (0x62e4, 0),# East Asian ideograph
- 0x222248: (0x5c25, 0),# East Asian ideograph
- 0x23224a: (0x848e, 0),# East Asian ideograph
- 0x22224b: (0x5c2a, 0),# East Asian ideograph
- 0x23224c: (0x8439, 0),# East Asian ideograph
- 0x23224d: (0x8476, 0),# East Asian ideograph
- 0x23224e: (0x8479, 0),# East Asian ideograph
- 0x213c6e: (0x5e8a, 0),# East Asian ideograph
- 0x222252: (0x5c2f, 0),# East Asian ideograph
- 0x217c7b: (0x5ada, 0),# East Asian ideograph
- 0x284335: (0x6a7c, 0),# East Asian ideograph
- 0x4c3b22: (0x6860, 0),# East Asian ideograph
- 0x294734: (0x9566, 0),# East Asian ideograph
- 0x213064: (0x4eba, 0),# East Asian ideograph
- 0x22225b: (0x5c32, 0),# East Asian ideograph
- 0x23225c: (0x8451, 0),# East Asian ideograph
- 0x23225f: (0x847d, 0),# East Asian ideograph
- 0x232262: (0x845a, 0),# East Asian ideograph
- 0x222263: (0x5c3b, 0),# East Asian ideograph
- 0x222265: (0x5c44, 0),# East Asian ideograph
- 0x232266: (0x8459, 0),# East Asian ideograph
- 0x222267: (0x5c49, 0),# East Asian ideograph
- 0x232269: (0x8473, 0),# East Asian ideograph
- 0x23226e: (0x843e, 0),# East Asian ideograph
- 0x6f5265: (0xc0b4, 0),# Korean hangul
- 0x276150: (0x9aa5, 0),# East Asian ideograph
- 0x232271: (0x846d, 0),# East Asian ideograph
- 0x394735: (0x6c3e, 0),# East Asian ideograph
- 0x223069: (0x62b6, 0),# East Asian ideograph
- 0x232278: (0x847a, 0),# East Asian ideograph
- 0x222279: (0x5c59, 0),# East Asian ideograph
- 0x22227b: (0x5c5d, 0),# East Asian ideograph
- 0x22227c: (0x5c5f, 0),# East Asian ideograph
- 0x22706a: (0x7d3f, 0),# East Asian ideograph
- 0x21306b: (0x4ecd, 0),# East Asian ideograph
- 0x2d306c: (0x8b8e, 0),# East Asian ideograph
- 0x6f5266: (0xc0b5, 0),# Korean hangul
- 0x276151: (0x9a8a, 0),# East Asian ideograph
- 0x284337: (0x6987, 0),# East Asian ideograph
- 0x225e63: (0x75e7, 0),# East Asian ideograph
- 0x33512e: (0x7e8d, 0),# East Asian ideograph
- 0x4b306e: (0x4ee4, 0),# East Asian ideograph (variant of 21306E which maps to 4EE4)
- 0x21306f: (0x4ed8, 0),# East Asian ideograph
- 0x235156: (0x992b, 0),# East Asian ideograph
- 0x454146: (0x63db, 0),# East Asian ideograph
- 0x213071: (0x4ed6, 0),# East Asian ideograph
- 0x6f5267: (0xc0b6, 0),# Korean hangul
- 0x213072: (0x4ede, 0),# East Asian ideograph
- 0x6f4e24: (0xb544, 0),# Korean hangul
- 0x6f5a24: (0xcee4, 0),# Korean hangul
- 0x225254: (0x714f, 0),# East Asian ideograph
- 0x215b21: (0x8e76, 0),# East Asian ideograph
- 0x6f5268: (0xc0bc, 0),# Korean hangul
- 0x276153: (0x80ae, 0),# East Asian ideograph
- 0x213077: (0x4ee5, 0),# East Asian ideograph
- 0x215b22: (0x8e7c, 0),# East Asian ideograph
- 0x21384d: (0x5857, 0),# East Asian ideograph
- 0x333078: (0x5f77, 0),# East Asian ideograph
- 0x225d69: (0x756f, 0),# East Asian ideograph
- 0x335e21: (0x9221, 0),# East Asian ideograph
- 0x213079: (0x4f09, 0),# East Asian ideograph
- 0x6f5b24: (0xd0f1, 0),# Korean hangul
- 0x6f5b25: (0xd130, 0),# Korean hangul
- 0x235158: (0x9931, 0),# East Asian ideograph
- 0x4b3e2a: (0x6035, 0),# East Asian ideograph
- 0x275b26: (0x8db8, 0),# East Asian ideograph
- 0x6f5269: (0xc0bd, 0),# Korean hangul
- 0x6f4b2c: (0xafd4, 0),# Korean hangul
- 0x225b27: (0x7482, 0),# East Asian ideograph
- 0x21384e: (0x5858, 0),# East Asian ideograph
- 0x21307d: (0x4f0a, 0),# East Asian ideograph
- 0x215b28: (0x8e8a, 0),# East Asian ideograph
- 0x215b29: (0x8e8d, 0),# East Asian ideograph (variant of 4B5B29 which maps to 8E8D)
- 0x275048: (0x5e18, 0),# East Asian ideograph
- 0x275b2a: (0x8e2f, 0),# East Asian ideograph
- 0x4b6044: (0x9818, 0),# East Asian ideograph
- 0x4d4a6c: (0x9667, 0),# East Asian ideograph
- 0x275b2b: (0x8e51, 0),# East Asian ideograph
- 0x6f526a: (0xc0bf, 0),# Korean hangul
- 0x293b7a: (0x8f8f, 0),# East Asian ideograph
- 0x215a68: (0x8e10, 0),# East Asian ideograph
- 0x23404d: (0x9156, 0),# East Asian ideograph
- 0x215b2d: (0x8eab, 0),# East Asian ideograph
- 0x235b2e: (0x9d8a, 0),# East Asian ideograph
- 0x3f5f34: (0x9699, 0),# East Asian ideograph (not in Unicode)
- 0x274224: (0x6361, 0),# East Asian ideograph
- 0x212320: (0x3000, 0),# Ideographic space in some implementations
- 0x215b30: (0x8eba, 0),# East Asian ideograph
- 0x222323: (0x5c63, 0),# East Asian ideograph
- 0x232324: (0x8432, 0),# East Asian ideograph
- 0x225772: (0x735e, 0),# East Asian ideograph
- 0x212328: (0xff08, 0),# Ideographic left parenthesis
- 0x222329: (0x5c67, 0),# East Asian ideograph
- 0x22232b: (0x5c68, 0),# East Asian ideograph
- 0x23232d: (0x842a, 0),# East Asian ideograph
- 0x23232e: (0x8429, 0),# East Asian ideograph
- 0x222330: (0x5c6d, 0),# East Asian ideograph
- 0x222331: (0x5c6e, 0),# East Asian ideograph
- 0x232332: (0x8471, 0),# East Asian ideograph
- 0x215b33: (0x8ecb, 0),# East Asian ideograph
- 0x232335: (0x845f, 0),# East Asian ideograph
- 0x232336: (0x8460, 0),# East Asian ideograph
- 0x222337: (0x5c74, 0),# East Asian ideograph
- 0x222339: (0x5c73, 0),# East Asian ideograph
- 0x23233a: (0x8446, 0),# East Asian ideograph
- 0x22233b: (0x5c77, 0),# East Asian ideograph
- 0x22233c: (0x5c7a, 0),# East Asian ideograph
- 0x29233d: (0x836e, 0),# East Asian ideograph
- 0x235b35: (0x9d87, 0),# East Asian ideograph
- 0x222340: (0x5c7c, 0),# East Asian ideograph
- 0x6f526c: (0xc0c1, 0),# Korean hangul
- 0x232345: (0x844e, 0),# East Asian ideograph
- 0x222346: (0x5c8f, 0),# East Asian ideograph
- 0x29473c: (0x9568, 0),# East Asian ideograph
- 0x222349: (0x5c88, 0),# East Asian ideograph
- 0x275b37: (0x8f6b, 0),# East Asian ideograph
- 0x22234d: (0x5c99, 0),# East Asian ideograph
- 0x232350: (0x84a1, 0),# East Asian ideograph
- 0x225b38: (0x7480, 0),# East Asian ideograph
- 0x232353: (0x849f, 0),# East Asian ideograph
- 0x222355: (0x5ca6, 0),# East Asian ideograph
- 0x232356: (0x84ba, 0),# East Asian ideograph
- 0x222357: (0x5ca0, 0),# East Asian ideograph
- 0x23515c: (0x993b, 0),# East Asian ideograph
- 0x69255e: (0x30de, 0),# Katakana letter MA
- 0x22235c: (0x5ca2, 0),# East Asian ideograph
- 0x275b3a: (0x8f72, 0),# East Asian ideograph
- 0x23235e: (0x84c1, 0),# East Asian ideograph
- 0x23235f: (0x84bb, 0),# East Asian ideograph
- 0x222360: (0x5cb5, 0),# East Asian ideograph
- 0x222361: (0x5ca7, 0),# East Asian ideograph
- 0x215b3b: (0x8ef8, 0),# East Asian ideograph
- 0x222366: (0x5ca8, 0),# East Asian ideograph
- 0x222367: (0x5cac, 0),# East Asian ideograph
- 0x234050: (0x9158, 0),# East Asian ideograph
- 0x225b3c: (0x7481, 0),# East Asian ideograph
- 0x22236b: (0x5ca3, 0),# East Asian ideograph
- 0x22236c: (0x5cb6, 0),# East Asian ideograph
- 0x22236d: (0x5cc1, 0),# East Asian ideograph
- 0x23456e: (0x9351, 0),# East Asian ideograph
- 0x22236f: (0x5cad, 0),# East Asian ideograph
- 0x232370: (0x84b1, 0),# East Asian ideograph
- 0x232371: (0x849d, 0),# East Asian ideograph
- 0x232372: (0x84d0, 0),# East Asian ideograph
- 0x224666: (0x6c2d, 0),# East Asian ideograph
- 0x232375: (0x8494, 0),# East Asian ideograph
- 0x222378: (0x5cd3, 0),# East Asian ideograph
- 0x232379: (0x84c7, 0),# East Asian ideograph
- 0x23237a: (0x84bd, 0),# East Asian ideograph
- 0x215b3f: (0x8f09, 0),# East Asian ideograph
- 0x23237c: (0x84c2, 0),# East Asian ideograph
- 0x6f526e: (0xc0c8, 0),# Korean hangul
- 0x282569: (0x5d02, 0),# East Asian ideograph
- 0x225b40: (0x7497, 0),# East Asian ideograph
- 0x29473e: (0x94f9, 0),# East Asian ideograph
- 0x23572e: (0x9b93, 0),# East Asian ideograph
- 0x275b41: (0x8f85, 0),# East Asian ideograph
- 0x215b42: (0x8f12, 0),# East Asian ideograph
- 0x27504d: (0x7b79, 0),# East Asian ideograph
- 0x224d5f: (0x6f72, 0),# East Asian ideograph
- 0x225b43: (0x7498, 0),# East Asian ideograph
- 0x6f5b44: (0xd22d, 0),# Korean hangul
- 0x6f526f: (0xc0c9, 0),# Korean hangul
- 0x2e6c46: (0x7be6, 0),# East Asian ideograph
- 0x225b45: (0x749a, 0),# East Asian ideograph
- 0x284340: (0x67a5, 0),# East Asian ideograph
- 0x692526: (0x30a6, 0),# Katakana letter U
- 0x275b46: (0x8f86, 0),# East Asian ideograph
- 0x2d573b: (0x60f7, 0),# East Asian ideograph
- 0x215b47: (0x8f1f, 0),# East Asian ideograph
- 0x275b48: (0x8f89, 0),# East Asian ideograph
- 0x275b49: (0x8f88, 0),# East Asian ideograph
- 0x6f5270: (0xc0cc, 0),# Korean hangul
- 0x27615b: (0x810f, 0),# East Asian ideograph
- 0x275b4a: (0x8f6e, 0),# East Asian ideograph
- 0x234a65: (0x9688, 0),# East Asian ideograph
- 0x6f5845: (0xc9f9, 0),# Korean hangul
- 0x215b4b: (0x8f1c, 0),# East Asian ideograph
- 0x33422a: (0x62e1, 0),# East Asian ideograph
- 0x275b4c: (0x8f90, 0),# East Asian ideograph
- 0x225b4d: (0x74a4, 0),# East Asian ideograph
- 0x235160: (0x993a, 0),# East Asian ideograph
- 0x275b4e: (0x8f93, 0),# East Asian ideograph
- 0x6f516b: (0xbe1d, 0),# Korean hangul
- 0x276131: (0x9a6f, 0),# East Asian ideograph
- 0x215b4f: (0x8f44, 0),# East Asian ideograph
- 0x225a2b: (0x7424, 0),# East Asian ideograph
- 0x2d3b40: (0x5c06, 0),# East Asian ideograph (variant of 273B40 which maps to 5C06)
- 0x275b51: (0x8f95, 0),# East Asian ideograph
- 0x28544f: (0x70ec, 0),# East Asian ideograph
- 0x224d62: (0x6f57, 0),# East Asian ideograph
- 0x29337a: (0x8bcc, 0),# East Asian ideograph
- 0x235161: (0x9941, 0),# East Asian ideograph
- 0x275b53: (0x8206, 0),# East Asian ideograph
- 0x215b54: (0x8f4d, 0),# East Asian ideograph
- 0x692533: (0x30b3, 0),# Katakana letter KO
- 0x4b316a: (0x723c, 0),# East Asian ideograph
- 0x215b55: (0x8f49, 0),# East Asian ideograph
- 0x2d3f31: (0x6159, 0),# East Asian ideograph
- 0x6f5665: (0xc74a, 0),# Korean hangul
- 0x225b56: (0x748d, 0),# East Asian ideograph
- 0x224667: (0x6c30, 0),# East Asian ideograph
- 0x215b57: (0x8f4e, 0),# East Asian ideograph
- 0x275b58: (0x8f70, 0),# East Asian ideograph
- 0x213c71: (0x5e96, 0),# East Asian ideograph
- 0x275b59: (0x8f94, 0),# East Asian ideograph
- 0x234056: (0x9164, 0),# East Asian ideograph
- 0x225a2d: (0x742d, 0),# East Asian ideograph
- 0x6f4c56: (0xb289, 0),# Korean hangul
- 0x232421: (0x8495, 0),# East Asian ideograph
- 0x692422: (0x3042, 0),# Hiragana letter A
- 0x692423: (0x3043, 0),# Hiragana letter small I
- 0x692424: (0x3044, 0),# Hiragana letter I
- 0x692425: (0x3045, 0),# Hiragana letter small U
- 0x222426: (0x5ce0, 0),# East Asian ideograph
- 0x232427: (0x84af, 0),# East Asian ideograph
- 0x222428: (0x5cd2, 0),# East Asian ideograph
- 0x232429: (0x84ad, 0),# East Asian ideograph
- 0x69242a: (0x304a, 0),# Hiragana letter O
- 0x22242b: (0x5ccb, 0),# East Asian ideograph
- 0x69242c: (0x304c, 0),# Hiragana letter GA
- 0x69242d: (0x304d, 0),# Hiragana letter KI
- 0x69242e: (0x304e, 0),# Hiragana letter GI
- 0x215b5d: (0x8fa3, 0),# East Asian ideograph
- 0x222430: (0x5cc7, 0),# East Asian ideograph
- 0x222431: (0x5cdc, 0),# East Asian ideograph
- 0x232432: (0x84a8, 0),# East Asian ideograph
- 0x232433: (0x84d6, 0),# East Asian ideograph
- 0x222434: (0x5d00, 0),# East Asian ideograph
- 0x215b5e: (0x8fa8, 0),# East Asian ideograph
- 0x213859: (0x5875, 0),# East Asian ideograph
- 0x692437: (0x3057, 0),# Hiragana letter SI
- 0x692438: (0x3058, 0),# Hiragana letter ZI
- 0x692439: (0x3059, 0),# Hiragana letter SU
- 0x23243a: (0x8493, 0),# East Asian ideograph
- 0x22243b: (0x5cff, 0),# East Asian ideograph
- 0x22243c: (0x5ceb, 0),# East Asian ideograph
- 0x69243d: (0x305d, 0),# Hiragana letter SO
- 0x69243e: (0x305e, 0),# Hiragana letter ZO
- 0x23243f: (0x84cf, 0),# East Asian ideograph
- 0x692440: (0x3060, 0),# Hiragana letter DA
- 0x232441: (0x84ca, 0),# East Asian ideograph
- 0x692442: (0x3062, 0),# Hiragana letter DI
- 0x692443: (0x3063, 0),# Hiragana letter small TU
- 0x692444: (0x3064, 0),# Hiragana letter TU
- 0x692445: (0x3065, 0),# Hiragana letter DU
- 0x232446: (0x8506, 0),# East Asian ideograph
- 0x232447: (0x850b, 0),# East Asian ideograph
- 0x692448: (0x3068, 0),# Hiragana letter TO
- 0x222449: (0x5d1e, 0),# East Asian ideograph
- 0x22244a: (0x5d12, 0),# East Asian ideograph
- 0x69244b: (0x306b, 0),# Hiragana letter NI
- 0x69244c: (0x306c, 0),# Hiragana letter NU
- 0x23244d: (0x8500, 0),# East Asian ideograph
- 0x69244e: (0x306e, 0),# Hiragana letter NO
- 0x69244f: (0x306f, 0),# Hiragana letter HA
- 0x222450: (0x5d1a, 0),# East Asian ideograph
- 0x692451: (0x3071, 0),# Hiragana letter PA
- 0x222452: (0x5d0c, 0),# East Asian ideograph
- 0x222453: (0x5d20, 0),# East Asian ideograph
- 0x222454: (0x5d21, 0),# East Asian ideograph
- 0x692455: (0x3075, 0),# Hiragana letter HU
- 0x692456: (0x3076, 0),# Hiragana letter BU
- 0x222457: (0x5d27, 0),# East Asian ideograph
- 0x222458: (0x5d0d, 0),# East Asian ideograph
- 0x232459: (0x851f, 0),# East Asian ideograph
- 0x22245a: (0x5d26, 0),# East Asian ideograph
- 0x69245b: (0x307b, 0),# Hiragana letter HO
- 0x23245c: (0x853b, 0),# East Asian ideograph
- 0x22245d: (0x5d2e, 0),# East Asian ideograph
- 0x69245e: (0x307e, 0),# Hiragana letter MA
- 0x23245f: (0x84ea, 0),# East Asian ideograph
- 0x692460: (0x3080, 0),# Hiragana letter MU
- 0x692461: (0x3081, 0),# Hiragana letter ME
- 0x692462: (0x3082, 0),# Hiragana letter MO
- 0x692463: (0x3083, 0),# Hiragana letter small YA
- 0x692464: (0x3084, 0),# Hiragana letter YA
- 0x235b66: (0x9da4, 0),# East Asian ideograph
- 0x232466: (0x84f4, 0),# East Asian ideograph
- 0x692467: (0x3087, 0),# Hiragana letter small YO
- 0x692468: (0x3088, 0),# Hiragana letter YO
- 0x222469: (0x5d24, 0),# East Asian ideograph
- 0x23246a: (0x850c, 0),# East Asian ideograph
- 0x215b67: (0x8fc5, 0),# East Asian ideograph
- 0x69246c: (0x308c, 0),# Hiragana letter RE
- 0x69246d: (0x308d, 0),# Hiragana letter RO
- 0x69246e: (0x308e, 0),# Hiragana letter small WA
- 0x69246f: (0x308f, 0),# Hiragana letter WA
- 0x692470: (0x3090, 0),# Hiragana letter WI
- 0x222471: (0x5d36, 0),# East Asian ideograph
- 0x222472: (0x5d3e, 0),# East Asian ideograph
- 0x692473: (0x3093, 0),# Hiragana letter N
- 0x222474: (0x5d4b, 0),# East Asian ideograph
- 0x232475: (0x8515, 0),# East Asian ideograph
- 0x222476: (0x5d57, 0),# East Asian ideograph
- 0x222477: (0x5d34, 0),# East Asian ideograph
- 0x6f2478: (0x3155, 0),# Korean hangul
- 0x335e2f: (0x5257, 0),# East Asian ideograph
- 0x23247a: (0x84fc, 0),# East Asian ideograph
- 0x6f247b: (0x3158, 0),# Korean hangul
- 0x23247c: (0x84eb, 0),# East Asian ideograph
- 0x23247d: (0x84fd, 0),# East Asian ideograph
- 0x6f247e: (0x315b, 0),# Korean hangul
- 0x235b6b: (0x9d9a, 0),# East Asian ideograph
- 0x235166: (0x993c, 0),# East Asian ideograph
- 0x225b6c: (0x74a5, 0),# East Asian ideograph
- 0x6f5277: (0xc0dd, 0),# Korean hangul
- 0x6f4c31: (0xb179, 0),# Korean hangul
- 0x215b6d: (0x8ff0, 0),# East Asian ideograph (variant of 275B6D which maps to 8FF0)
- 0x21385c: (0x5885, 0),# East Asian ideograph
- 0x69252e: (0x30ae, 0),# Katakana letter GI
- 0x225b6e: (0x74a8, 0),# East Asian ideograph
- 0x235e30: (0x9ec1, 0),# East Asian ideograph
- 0x295921: (0x9cd9, 0),# East Asian ideograph
- 0x6f5b6f: (0xd310, 0),# Korean hangul
- 0x295854: (0x9cc4, 0),# East Asian ideograph
- 0x215b70: (0x8fea, 0),# East Asian ideograph
- 0x705b71: (0x57b4, 0),# East Asian ideograph
- 0x6f5278: (0xc0e4, 0),# Korean hangul
- 0x276163: (0x677e, 0),# East Asian ideograph (duplicate simplified)
- 0x6f4e35: (0xb5b0, 0),# Korean hangul
- 0x69252f: (0x30af, 0),# Katakana letter KU
- 0x234b66: (0x96d8, 0),# East Asian ideograph
- 0x235b74: (0x9db1, 0),# East Asian ideograph
- 0x6f4b7c: (0xb123, 0),# Korean hangul
- 0x4b6053: (0x985e, 0),# East Asian ideograph
- 0x224926: (0x6d6f, 0),# East Asian ideograph
- 0x235b76: (0x9db6, 0),# East Asian ideograph
- 0x234621: (0x93b5, 0),# East Asian ideograph
- 0x276164: (0x80e1, 0),# East Asian ideograph (duplicate simplified)
- 0x235b77: (0x9dbc, 0),# East Asian ideograph
- 0x336275: (0x76bc, 0),# East Asian ideograph
- 0x234623: (0x9388, 0),# East Asian ideograph
- 0x295b79: (0x9e5a, 0),# East Asian ideograph
- 0x51496b: (0x852b, 0),# East Asian ideograph
- 0x215b7a: (0x9003, 0),# East Asian ideograph
- 0x234625: (0x93b9, 0),# East Asian ideograph
- 0x215b7b: (0x8ffd, 0),# East Asian ideograph
- 0x6f5173: (0xbe54, 0),# Korean hangul
- 0x6f527a: (0xc0e8, 0),# Korean hangul
- 0x276165: (0x987b, 0),# East Asian ideograph (duplicate simplified)
- 0x235b7c: (0x9dba, 0),# East Asian ideograph
- 0x21385f: (0x5880, 0),# East Asian ideograph
- 0x234627: (0x93a1, 0),# East Asian ideograph
- 0x215635: (0x85e5, 0),# East Asian ideograph
- 0x275b7d: (0x8fd9, 0),# East Asian ideograph
- 0x234628: (0x93b0, 0),# East Asian ideograph
- 0x235b7e: (0x9dcf, 0),# East Asian ideograph
- 0x234629: (0x93a3, 0),# East Asian ideograph
- 0x21462a: (0x6b77, 0),# East Asian ideograph
- 0x224928: (0x6d61, 0),# East Asian ideograph
- 0x23462b: (0x939b, 0),# East Asian ideograph
- 0x276166: (0x9b13, 0),# East Asian ideograph
- 0x4b3f50: (0x61ca, 0),# East Asian ideograph (variant of 213F50)
- 0x22462c: (0x6bf3, 0),# East Asian ideograph
- 0x692532: (0x30b2, 0),# Katakana letter GE
- 0x23462d: (0x9398, 0),# East Asian ideograph
- 0x213238: (0x5075, 0),# East Asian ideograph
- 0x282626: (0x5cc4, 0),# East Asian ideograph
- 0x4b462e: (0x6b81, 0),# East Asian ideograph
- 0x2f5f45: (0x86a1, 0),# East Asian ideograph
- 0x2d4b22: (0x736a, 0),# East Asian ideograph
- 0x29576e: (0x9ca7, 0),# East Asian ideograph
- 0x692521: (0x30a1, 0),# Katakana letter small A
- 0x692522: (0x30a2, 0),# Katakana letter A
- 0x276167: (0x6597, 0),# East Asian ideograph
- 0x232524: (0x851e, 0),# East Asian ideograph
- 0x222525: (0x5d3f, 0),# East Asian ideograph
- 0x222526: (0x5d52, 0),# East Asian ideograph
- 0x222527: (0x5d3d, 0),# East Asian ideograph
- 0x222528: (0x5d4e, 0),# East Asian ideograph
- 0x692529: (0x30a9, 0),# Katakana letter small O
- 0x23252a: (0x8518, 0),# East Asian ideograph
- 0x69252b: (0x30ab, 0),# Katakana letter KA
- 0x22252c: (0x5d59, 0),# East Asian ideograph
- 0x23252d: (0x8526, 0),# East Asian ideograph
- 0x23252e: (0x8507, 0),# East Asian ideograph (variant of 2F252E which maps to 8507)
- 0x22252f: (0x5d32, 0),# East Asian ideograph
- 0x692530: (0x30b0, 0),# Katakana letter GU
- 0x222531: (0x5d42, 0),# East Asian ideograph
- 0x4c2532: (0x5d5b, 0),# East Asian ideograph
- 0x224633: (0x6bf8, 0),# East Asian ideograph
- 0x232534: (0x84f0, 0),# East Asian ideograph
- 0x232535: (0x84ef, 0),# East Asian ideograph
- 0x232536: (0x8556, 0),# East Asian ideograph
- 0x692537: (0x30b7, 0),# Katakana letter SI
- 0x692538: (0x30b8, 0),# Katakana letter ZI
- 0x222539: (0x5d6f, 0),# East Asian ideograph
- 0x22253a: (0x5d6b, 0),# East Asian ideograph
- 0x696136: (0x753c, 0),# East Asian ideograph
- 0x69253c: (0x30bc, 0),# Katakana letter ZE
- 0x69253d: (0x30bd, 0),# Katakana letter SO
- 0x69253e: (0x30be, 0),# Katakana letter ZO
- 0x274635: (0x6b87, 0),# East Asian ideograph
- 0x692540: (0x30c0, 0),# Katakana letter DA
- 0x276168: (0x95f9, 0),# East Asian ideograph
- 0x692542: (0x30c2, 0),# Katakana letter DI
- 0x692543: (0x30c3, 0),# Katakana letter small TU
- 0x222544: (0x5d4a, 0),# East Asian ideograph
- 0x225e7a: (0x7602, 0),# East Asian ideograph
- 0x232546: (0x8541, 0),# East Asian ideograph
- 0x692534: (0x30b4, 0),# Katakana letter GO
- 0x692548: (0x30c8, 0),# Katakana letter TO
- 0x222549: (0x5d6c, 0),# East Asian ideograph
- 0x22254a: (0x5d62, 0),# East Asian ideograph
- 0x23254b: (0x8558, 0),# East Asian ideograph
- 0x69254c: (0x30cc, 0),# Katakana letter NU
- 0x22254d: (0x5d82, 0),# East Asian ideograph
- 0x23254e: (0x8561, 0),# East Asian ideograph
- 0x23254f: (0x8540, 0),# East Asian ideograph
- 0x222550: (0x5d79, 0),# East Asian ideograph
- 0x224638: (0x6bf9, 0),# East Asian ideograph
- 0x692552: (0x30d2, 0),# Katakana letter HI
- 0x692553: (0x30d3, 0),# Katakana letter BI
- 0x692554: (0x30d4, 0),# Katakana letter PI
- 0x287231: (0x7f03, 0),# East Asian ideograph
- 0x692556: (0x30d6, 0),# Katakana letter BU
- 0x33516d: (0x6374, 0),# East Asian ideograph
- 0x4d222a: (0x83b5, 0),# East Asian ideograph
- 0x692559: (0x30d9, 0),# Katakana letter BE
- 0x22255a: (0x5d81, 0),# East Asian ideograph
- 0x69255b: (0x30db, 0),# Katakana letter HO
- 0x23255c: (0x8564, 0),# East Asian ideograph
- 0x23255d: (0x855e, 0),# East Asian ideograph
- 0x23255e: (0x8573, 0),# East Asian ideograph
- 0x23255f: (0x8551, 0),# East Asian ideograph
- 0x222560: (0x5d7e, 0),# East Asian ideograph
- 0x692561: (0x30e1, 0),# Katakana letter ME
- 0x692562: (0x30e2, 0),# Katakana letter MO
- 0x27463b: (0x6740, 0),# East Asian ideograph
- 0x232564: (0x8562, 0),# East Asian ideograph
- 0x292535: (0x82c1, 0),# East Asian ideograph
- 0x222566: (0x5d92, 0),# East Asian ideograph
- 0x292567: (0x836c, 0),# East Asian ideograph
- 0x222568: (0x5d99, 0),# East Asian ideograph
- 0x27463c: (0x58f3, 0),# East Asian ideograph
- 0x22256a: (0x5da2, 0),# East Asian ideograph
- 0x23256b: (0x8563, 0),# East Asian ideograph
- 0x23256c: (0x848d, 0),# East Asian ideograph
- 0x23256d: (0x8542, 0),# East Asian ideograph
- 0x69256e: (0x30ee, 0),# Katakana letter small WA
- 0x23463d: (0x93a4, 0),# East Asian ideograph
- 0x692570: (0x30f0, 0),# Katakana letter WI
- 0x232571: (0x854e, 0),# East Asian ideograph
- 0x692572: (0x30f2, 0),# Katakana letter WO
- 0x222573: (0x5da1, 0),# East Asian ideograph
- 0x232574: (0x8555, 0),# East Asian ideograph
- 0x222575: (0x5d93, 0),# East Asian ideograph
- 0x232576: (0x855d, 0),# East Asian ideograph
- 0x222577: (0x5da0, 0),# East Asian ideograph
- 0x4b3e40: (0x6046, 0),# East Asian ideograph
- 0x22257b: (0x5d94, 0),# East Asian ideograph
- 0x27616a: (0x90c1, 0),# East Asian ideograph
- 0x22257e: (0x5dac, 0),# East Asian ideograph
- 0x6f4e3c: (0xb5c0, 0),# Korean hangul
- 0x284350: (0x68c2, 0),# East Asian ideograph
- 0x234640: (0x93bc, 0),# East Asian ideograph
- 0x692536: (0x30b6, 0),# Katakana letter ZA
- 0x513421: (0x91d6, 0),# East Asian ideograph
- 0x224642: (0x6bff, 0),# East Asian ideograph
- 0x3f5f49: (0x7431, 0),# East Asian ideograph
- 0x29585c: (0x9cc7, 0),# East Asian ideograph
- 0x6f5c65: (0xd55c, 0),# Korean hangul
- 0x224644: (0x6c06, 0),# East Asian ideograph
- 0x276134: (0x9a7c, 0),# East Asian ideograph
- 0x28656a: (0x789b, 0),# East Asian ideograph
- 0x213865: (0x58c5, 0),# East Asian ideograph
- 0x294750: (0x950e, 0),# East Asian ideograph
- 0x4b3178: (0x5029, 0),# East Asian ideograph (variant of 213178 which maps to 5029)
- 0x234647: (0x93a6, 0),# East Asian ideograph
- 0x29337d: (0x8c27, 0),# East Asian ideograph
- 0x224648: (0x6c04, 0),# East Asian ideograph
- 0x22492e: (0x6d8a, 0),# East Asian ideograph
- 0x453755: (0x56ae, 0),# East Asian ideograph
- 0x2d516a: (0x7db3, 0),# East Asian ideograph
- 0x6f4e3e: (0xb5cc, 0),# Korean hangul
- 0x23464a: (0x93aa, 0),# East Asian ideograph
- 0x294751: (0x950f, 0),# East Asian ideograph
- 0x33627d: (0x6589, 0),# East Asian ideograph
- 0x213423: (0x5291, 0),# East Asian ideograph
- 0x235060: (0x98e3, 0),# East Asian ideograph
- 0x333c21: (0x7895, 0),# East Asian ideograph
- 0x6f5748: (0xc84c, 0),# Korean hangul
- 0x295153: (0x9967, 0),# East Asian ideograph
- 0x22464c: (0x6c08, 0),# East Asian ideograph
- 0x23464d: (0x939e, 0),# East Asian ideograph
- 0x213c74: (0x5ea0, 0),# East Asian ideograph
- 0x6f4e3f: (0xb5cf, 0),# Korean hangul
- 0x23464f: (0x9397, 0),# East Asian ideograph
- 0x692539: (0x30b9, 0),# Katakana letter SU
- 0x27727a: (0x54d5, 0),# East Asian ideograph
- 0x234651: (0x93bb, 0),# East Asian ideograph
- 0x295825: (0x9cba, 0),# East Asian ideograph
- 0x224d73: (0x6fb6, 0),# East Asian ideograph
- 0x224652: (0x6c0d, 0),# East Asian ideograph
- 0x234653: (0x93f1, 0),# East Asian ideograph
- 0x225851: (0x739e, 0),# East Asian ideograph
- 0x6f4e40: (0xb5d1, 0),# Korean hangul
- 0x213868: (0x58d5, 0),# East Asian ideograph
- 0x69253a: (0x30ba, 0),# Katakana letter ZU
- 0x274655: (0x6c14, 0),# East Asian ideograph
- 0x234656: (0x93de, 0),# East Asian ideograph
- 0x234657: (0x93ee, 0),# East Asian ideograph
- 0x234d30: (0x976e, 0),# East Asian ideograph
- 0x274658: (0x6c22, 0),# East Asian ideograph
- 0x27384a: (0x573a, 0),# East Asian ideograph
- 0x223244: (0x63e5, 0),# East Asian ideograph
- 0x224659: (0x6c15, 0),# East Asian ideograph
- 0x51563f: (0x8616, 0),# East Asian ideograph
- 0x23465a: (0x93c7, 0),# East Asian ideograph
- 0x23465b: (0x93f2, 0),# East Asian ideograph
- 0x232625: (0x8580, 0),# East Asian ideograph
- 0x222626: (0x5da7, 0),# East Asian ideograph
- 0x232628: (0x858f, 0),# East Asian ideograph
- 0x22465c: (0x6c1a, 0),# East Asian ideograph
- 0x22262a: (0x5db0, 0),# East Asian ideograph
- 0x23262d: (0x8579, 0),# East Asian ideograph
- 0x22262e: (0x5db4, 0),# East Asian ideograph
- 0x23465d: (0x93d4, 0),# East Asian ideograph
- 0x222630: (0x5db6, 0),# East Asian ideograph
- 0x232632: (0x857f, 0),# East Asian ideograph
- 0x232633: (0x8577, 0),# East Asian ideograph
- 0x232634: (0x8578, 0),# East Asian ideograph
- 0x22465e: (0x6c1d, 0),# East Asian ideograph
- 0x222636: (0x5db7, 0),# East Asian ideograph
- 0x6f4c37: (0xb18b, 0),# Korean hangul
- 0x2d6132: (0x99ee, 0),# East Asian ideograph
- 0x23263d: (0x85a4, 0),# East Asian ideograph
- 0x22263e: (0x5dc3, 0),# East Asian ideograph
- 0x224660: (0x6c20, 0),# East Asian ideograph
- 0x232642: (0x857a, 0),# East Asian ideograph
- 0x222644: (0x5dc7, 0),# East Asian ideograph
- 0x232645: (0x8557, 0),# East Asian ideograph
- 0x222646: (0x5dc9, 0),# East Asian ideograph
- 0x222647: (0x5dcb, 0),# East Asian ideograph
- 0x232649: (0x85a8, 0),# East Asian ideograph
- 0x213d4f: (0x5f59, 0),# East Asian ideograph
- 0x234d32: (0x9778, 0),# East Asian ideograph
- 0x224662: (0x6c21, 0),# East Asian ideograph
- 0x22264e: (0x5dd8, 0),# East Asian ideograph
- 0x232650: (0x8599, 0),# East Asian ideograph
- 0x232651: (0x858a, 0),# East Asian ideograph
- 0x222652: (0x5ddc, 0),# East Asian ideograph
- 0x234663: (0x93ca, 0),# East Asian ideograph
- 0x232654: (0x8590, 0),# East Asian ideograph
- 0x232656: (0x8585, 0),# East Asian ideograph
- 0x232657: (0x8588, 0),# East Asian ideograph
- 0x225a40: (0x7447, 0),# East Asian ideograph
- 0x224664: (0x6c2a, 0),# East Asian ideograph
- 0x23265a: (0x85b8, 0),# East Asian ideograph
- 0x6f5749: (0xc870, 0),# Korean hangul
- 0x23265d: (0x85c1, 0),# East Asian ideograph
- 0x334665: (0x6c61, 0),# East Asian ideograph
- 0x232661: (0x85ba, 0),# East Asian ideograph
- 0x222662: (0x5e00, 0),# East Asian ideograph
- 0x222664: (0x51e7, 0),# East Asian ideograph
- 0x234666: (0x93e8, 0),# East Asian ideograph
- 0x224934: (0x6d79, 0),# East Asian ideograph
- 0x232668: (0x85ce, 0),# East Asian ideograph
- 0x23266a: (0x85c2, 0),# East Asian ideograph
- 0x23266b: (0x85b7, 0),# East Asian ideograph
- 0x23266c: (0x85b9, 0),# East Asian ideograph
- 0x23266e: (0x85b3, 0),# East Asian ideograph
- 0x23266f: (0x85bd, 0),# East Asian ideograph
- 0x232670: (0x85c4, 0),# East Asian ideograph
- 0x224668: (0x6c2c, 0),# East Asian ideograph
- 0x222672: (0x5e14, 0),# East Asian ideograph
- 0x222673: (0x5e17, 0),# East Asian ideograph
- 0x232675: (0x85be, 0),# East Asian ideograph
- 0x222676: (0x5e19, 0),# East Asian ideograph
- 0x224669: (0x6c31, 0),# East Asian ideograph (not in Unicode)
- 0x222678: (0x5e1f, 0),# East Asian ideograph
- 0x22267a: (0x5e23, 0),# East Asian ideograph
- 0x22267b: (0x5e21, 0),# East Asian ideograph
- 0x23267e: (0x85b6, 0),# East Asian ideograph
- 0x295421: (0x9aa3, 0),# East Asian ideograph
- 0x2d4647: (0x6bd8, 0),# East Asian ideograph
- 0x284359: (0x6989, 0),# East Asian ideograph
- 0x2d466d: (0x51b3, 0),# East Asian ideograph
- 0x294758: (0x9561, 0),# East Asian ideograph
- 0x69253f: (0x30bf, 0),# Katakana letter TA
- 0x227c5b: (0x82d0, 0),# East Asian ideograph
- 0x28723c: (0x7f08, 0),# East Asian ideograph
- 0x224670: (0x6c3b, 0),# East Asian ideograph
- 0x295422: (0x9a81, 0),# East Asian ideograph
- 0x234d35: (0x9773, 0),# East Asian ideograph
- 0x276174: (0x9c7c, 0),# East Asian ideograph
- 0x234672: (0x93da, 0),# East Asian ideograph
- 0x234673: (0x93d0, 0),# East Asian ideograph
- 0x335e42: (0x9452, 0),# East Asian ideograph
- 0x2d353c: (0x6b62, 0),# East Asian ideograph
- 0x234674: (0x93ef, 0),# East Asian ideograph
- 0x6f4e37: (0xb5b3, 0),# Korean hangul
- 0x4b4676: (0x6c89, 0),# East Asian ideograph
- 0x213121: (0x4f11, 0),# East Asian ideograph
- 0x276136: (0x9a77, 0),# East Asian ideograph
- 0x21386f: (0x58e2, 0),# East Asian ideograph
- 0x223c6e: (0x68b2, 0),# East Asian ideograph
- 0x6f2472: (0x314f, 0),# Korean hangul
- 0x224678: (0x6c46, 0),# East Asian ideograph
- 0x6f5078: (0xbc29, 0),# Korean hangul
- 0x28723e: (0x7f0c, 0),# East Asian ideograph
- 0x29364e: (0x8d33, 0),# East Asian ideograph
- 0x22467a: (0x6c52, 0),# East Asian ideograph
- 0x213125: (0x4f01, 0),# East Asian ideograph
- 0x234d37: (0x9783, 0),# East Asian ideograph
- 0x215f69: (0x9739, 0),# East Asian ideograph
- 0x276176: (0x9c81, 0),# East Asian ideograph
- 0x6f4e48: (0xb69d, 0),# Korean hangul
- 0x23467c: (0x93cc, 0),# East Asian ideograph
- 0x6f574a: (0xc871, 0),# Korean hangul
- 0x224d7c: (0x6fc6, 0),# East Asian ideograph
- 0x23517b: (0x9954, 0),# East Asian ideograph
- 0x21312a: (0x4f4f, 0),# East Asian ideograph
- 0x234d38: (0x977a, 0),# East Asian ideograph
- 0x213c76: (0x5eab, 0),# East Asian ideograph
- 0x21312b: (0x4f4d, 0),# East Asian ideograph
- 0x6f4e49: (0xb6a4, 0),# Korean hangul
- 0x213871: (0x58e9, 0),# East Asian ideograph
- 0x21312c: (0x4f34, 0),# East Asian ideograph
- 0x6f594c: (0xcd18, 0),# Korean hangul
- 0x21342e: (0x52c3, 0),# East Asian ideograph
- 0x21312d: (0x4f47, 0),# East Asian ideograph
- 0x2d5758: (0x890e, 0),# East Asian ideograph
- 0x21312f: (0x4f3a, 0),# East Asian ideograph
- 0x275b3f: (0x8f7d, 0),# East Asian ideograph
- 0x6f4f3d: (0xb86d, 0),# Korean hangul
- 0x28704a: (0x7ebe, 0),# East Asian ideograph
- 0x222722: (0x5e22, 0),# East Asian ideograph
- 0x286577: (0x789c, 0),# East Asian ideograph
- 0x222724: (0x5e28, 0),# East Asian ideograph
- 0x213872: (0x58eb, 0),# East Asian ideograph
- 0x232728: (0x85f7, 0),# East Asian ideograph
- 0x6f5424: (0xc2dd, 0),# Korean hangul
- 0x23272c: (0x85e6, 0),# East Asian ideograph
- 0x223132: (0x6360, 0),# East Asian ideograph
- 0x23272e: (0x85d4, 0),# East Asian ideograph
- 0x232731: (0x85ed, 0),# East Asian ideograph
- 0x6f5d42: (0xd65c, 0),# Korean hangul
- 0x222735: (0x5e44, 0),# East Asian ideograph
- 0x222736: (0x5e43, 0),# East Asian ideograph
- 0x222739: (0x5e42, 0),# East Asian ideograph
- 0x22273f: (0x5e4e, 0),# East Asian ideograph
- 0x6f4e4b: (0xb6ac, 0),# Korean hangul
- 0x232743: (0x85df, 0),# East Asian ideograph
- 0x232745: (0x85d8, 0),# East Asian ideograph
- 0x692545: (0x30c5, 0),# Katakana letter DU
- 0x222747: (0x5e58, 0),# East Asian ideograph
- 0x222748: (0x5e48, 0),# East Asian ideograph
- 0x513b52: (0x6c3d, 0),# East Asian ideograph
- 0x213137: (0x4f3d, 0),# East Asian ideograph
- 0x23274c: (0x85dc, 0),# East Asian ideograph
- 0x23274e: (0x85f5, 0),# East Asian ideograph
- 0x273138: (0x5e03, 0),# East Asian ideograph
- 0x232752: (0x8622, 0),# East Asian ideograph
- 0x232754: (0x8610, 0),# East Asian ideograph
- 0x285029: (0x6edf, 0),# East Asian ideograph
- 0x232757: (0x85fc, 0),# East Asian ideograph
- 0x222758: (0x5e61, 0),# East Asian ideograph
- 0x23275b: (0x85ff, 0),# East Asian ideograph
- 0x23313a: (0x89d6, 0),# East Asian ideograph
- 0x23275e: (0x85fe, 0),# East Asian ideograph
- 0x22275f: (0x5e6c, 0),# East Asian ideograph
- 0x222760: (0x5e6a, 0),# East Asian ideograph
- 0x222763: (0x5e6e, 0),# East Asian ideograph
- 0x222764: (0x5e6d, 0),# East Asian ideograph
- 0x222765: (0x5e70, 0),# East Asian ideograph
- 0x232768: (0x8604, 0),# East Asian ideograph
- 0x27313c: (0x5360, 0),# East Asian ideograph
- 0x227c6e: (0x8314, 0),# East Asian ideograph
- 0x22276d: (0x5e75, 0),# East Asian ideograph
- 0x232771: (0x8605, 0),# East Asian ideograph
- 0x216757: (0x50a3, 0),# East Asian ideograph
- 0x232775: (0x862b, 0),# East Asian ideograph
- 0x213d51: (0x5f62, 0),# East Asian ideograph
- 0x222777: (0x5e80, 0),# East Asian ideograph
- 0x21313f: (0x4f5c, 0),# East Asian ideograph
- 0x22277e: (0x5e8b, 0),# East Asian ideograph
- 0x275d38: (0x91ca, 0),# East Asian ideograph
- 0x294760: (0x9563, 0),# East Asian ideograph
- 0x213432: (0x52d2, 0),# East Asian ideograph
- 0x33572e: (0x880e, 0),# East Asian ideograph
- 0x2d3543: (0x4edd, 0),# East Asian ideograph
- 0x27506f: (0x7ea0, 0),# East Asian ideograph
- 0x215b27: (0x8e85, 0),# East Asian ideograph
- 0x213142: (0x4f4e, 0),# East Asian ideograph
- 0x217d40: (0x5b19, 0),# East Asian ideograph
- 0x213143: (0x4f5d, 0),# East Asian ideograph
- 0x213c77: (0x5ea7, 0),# East Asian ideograph
- 0x213144: (0x4f36, 0),# East Asian ideograph
- 0x6f4b5c: (0xb0af, 0),# Korean hangul
- 0x6f4e4e: (0xb6f4, 0),# Korean hangul
- 0x213876: (0x58fa, 0),# East Asian ideograph
- 0x223145: (0x6335, 0),# East Asian ideograph
- 0x706067: (0x567b, 0),# East Asian ideograph
- 0x223832: (0x665f, 0),# East Asian ideograph
- 0x295828: (0x9cb4, 0),# East Asian ideograph
- 0x233147: (0x89e1, 0),# East Asian ideograph
- 0x29586e: (0x9ca5, 0),# East Asian ideograph
- 0x213148: (0x4f8d, 0),# East Asian ideograph
- 0x275b40: (0x8f7e, 0),# East Asian ideograph
- 0x6f4e4f: (0xb6f8, 0),# Korean hangul
- 0x275736: (0x8747, 0),# East Asian ideograph
- 0x21314a: (0x4f7f, 0),# East Asian ideograph
- 0x692549: (0x30c9, 0),# Katakana letter DO
- 0x2f5476: (0x9ae1, 0),# East Asian ideograph
- 0x21314b: (0x4f9b, 0),# East Asian ideograph
- 0x275071: (0x7ea3, 0),# East Asian ideograph
- 0x21314c: (0x4f86, 0),# East Asian ideograph
- 0x2d3730: (0x751e, 0),# East Asian ideograph
- 0x21314d: (0x4f6c, 0),# East Asian ideograph
- 0x6f4e50: (0xb700, 0),# Korean hangul
- 0x21314f: (0x4f96, 0),# East Asian ideograph
- 0x213435: (0x52de, 0),# East Asian ideograph
- 0x233c33: (0x8f47, 0),# East Asian ideograph
- 0x213151: (0x4f83, 0),# East Asian ideograph
- 0x287247: (0x7f11, 0),# East Asian ideograph
- 0x697152: (0x99f2, 0),# East Asian ideograph
- 0x453768: (0x5efb, 0),# East Asian ideograph
- 0x213153: (0x4f88, 0),# East Asian ideograph
- 0x276138: (0x9a79, 0),# East Asian ideograph
- 0x4b3f51: (0x61d1, 0),# East Asian ideograph
- 0x6f4e51: (0xb701, 0),# Korean hangul
- 0x213154: (0x4f69, 0),# East Asian ideograph
- 0x69254b: (0x30cb, 0),# Katakana letter NI
- 0x213436: (0x52db, 0),# East Asian ideograph
- 0x6f5826: (0xc998, 0),# Korean hangul
- 0x275073: (0x7eab, 0),# East Asian ideograph
- 0x354156: (0x91be, 0),# East Asian ideograph
- 0x295871: (0x9cce, 0),# East Asian ideograph
- 0x287248: (0x7f0f, 0),# East Asian ideograph
- 0x4b606f: (0x991d, 0),# East Asian ideograph
- 0x233158: (0x89f1, 0),# East Asian ideograph
- 0x294531: (0x9528, 0),# East Asian ideograph
- 0x6f4e52: (0xb728, 0),# Korean hangul
- 0x284366: (0x6924, 0),# East Asian ideograph
- 0x217159: (0x55d0, 0),# East Asian ideograph
- 0x225a4f: (0x7452, 0),# East Asian ideograph
- 0x21315a: (0x4faf, 0),# East Asian ideograph
- 0x232822: (0x8627, 0),# East Asian ideograph
- 0x21715b: (0x55cd, 0),# East Asian ideograph
- 0x274c31: (0x7544, 0),# East Asian ideograph
- 0x232826: (0x8629, 0),# East Asian ideograph
- 0x23315c: (0x89f3, 0),# East Asian ideograph
- 0x224943: (0x6d94, 0),# East Asian ideograph
- 0x213a61: (0x5b7a, 0),# East Asian ideograph
- 0x21315d: (0x4fe0, 0),# East Asian ideograph
- 0x6f4b5d: (0xb0b1, 0),# Korean hangul
- 0x232832: (0x8637, 0),# East Asian ideograph
- 0x395230: (0x5bd8, 0),# East Asian ideograph
- 0x222835: (0x5ea5, 0),# East Asian ideograph
- 0x222836: (0x5eaf, 0),# East Asian ideograph
- 0x213438: (0x52e2, 0),# East Asian ideograph
- 0x232838: (0x8636, 0),# East Asian ideograph
- 0x21315f: (0x4fb6, 0),# East Asian ideograph
- 0x23283e: (0x863c, 0),# East Asian ideograph
- 0x23283f: (0x8640, 0),# East Asian ideograph
- 0x232840: (0x863a, 0),# East Asian ideograph
- 0x233160: (0x89f6, 0),# East Asian ideograph
- 0x222842: (0x5eb9, 0),# East Asian ideograph
- 0x39365a: (0x8ae0, 0),# East Asian ideograph
- 0x227161: (0x7da3, 0),# East Asian ideograph
- 0x22284b: (0x5eb3, 0),# East Asian ideograph
- 0x22284c: (0x5ec4, 0),# East Asian ideograph
- 0x217162: (0x55dd, 0),# East Asian ideograph
- 0x6f4e54: (0xb72c, 0),# Korean hangul
- 0x275d3f: (0x9488, 0),# East Asian ideograph
- 0x294767: (0x94e7, 0),# East Asian ideograph
- 0x69254e: (0x30ce, 0),# Katakana letter NO
- 0x222855: (0x5ecb, 0),# East Asian ideograph
- 0x222857: (0x5ecd, 0),# East Asian ideograph
- 0x213164: (0x4fdf, 0),# East Asian ideograph
- 0x22285a: (0x5ed2, 0),# East Asian ideograph
- 0x22285b: (0x5ed1, 0),# East Asian ideograph
- 0x22285c: (0x5ed5, 0),# East Asian ideograph
- 0x23285e: (0x8659, 0),# East Asian ideograph
- 0x22285f: (0x5ed4, 0),# East Asian ideograph
- 0x222860: (0x5ed9, 0),# East Asian ideograph
- 0x222861: (0x5ece, 0),# East Asian ideograph
- 0x21232d: (0xff0d, 0),# Ideographic hyphen minus
- 0x232866: (0x8661, 0),# East Asian ideograph
- 0x4c2867: (0x5edb, 0),# East Asian ideograph
- 0x222868: (0x5ee1, 0),# East Asian ideograph
- 0x232869: (0x8662, 0),# East Asian ideograph
- 0x23286a: (0x8663, 0),# East Asian ideograph
- 0x287167: (0x7eef, 0),# East Asian ideograph
- 0x22286d: (0x5ee7, 0),# East Asian ideograph
- 0x232871: (0x8669, 0),# East Asian ideograph
- 0x69254f: (0x30cf, 0),# Katakana letter HA
- 0x2d5b7a: (0x8fef, 0),# East Asian ideograph
- 0x217169: (0x55e9, 0),# East Asian ideograph
- 0x232878: (0x866c, 0),# East Asian ideograph
- 0x23287b: (0x8672, 0),# East Asian ideograph
- 0x22287c: (0x5eed, 0),# East Asian ideograph
- 0x21316a: (0x4fce, 0),# East Asian ideograph
- 0x23287e: (0x867b, 0),# East Asian ideograph
- 0x274c34: (0x5f02, 0),# East Asian ideograph
- 0x23462a: (0x93b7, 0),# East Asian ideograph
- 0x21316b: (0x4fd7, 0),# East Asian ideograph
- 0x23316c: (0x8a06, 0),# East Asian ideograph
- 0x276139: (0x9a78, 0),# East Asian ideograph
- 0x6f4e56: (0xb730, 0),# Korean hangul
- 0x294769: (0x9564, 0),# East Asian ideograph
- 0x6f507b: (0xbc31, 0),# Korean hangul
- 0x21316e: (0x500d, 0),# East Asian ideograph
- 0x4b5861: (0x4f89, 0),# East Asian ideograph
- 0x21716f: (0x55cf, 0),# East Asian ideograph
- 0x213170: (0x5026, 0),# East Asian ideograph
- 0x4b3e5b: (0x60c5, 0),# East Asian ideograph
- 0x213171: (0x500c, 0),# East Asian ideograph
- 0x6f4e57: (0xb738, 0),# Korean hangul
- 0x223172: (0x639e, 0),# East Asian ideograph
- 0x692551: (0x30d1, 0),# Katakana letter PA
- 0x21343c: (0x52f5, 0),# East Asian ideograph
- 0x273173: (0x4eec, 0),# East Asian ideograph
- 0x4c4f24: (0x6f46, 0),# East Asian ideograph
- 0x235b7a: (0x9dc1, 0),# East Asian ideograph
- 0x287174: (0x7ef2, 0),# East Asian ideograph
- 0x274c36: (0x753b, 0),# East Asian ideograph
- 0x39365e: (0x559e, 0),# East Asian ideograph
- 0x6f5b21: (0xd0ed, 0),# Korean hangul
- 0x4b5c32: (0x9038, 0),# East Asian ideograph
- 0x6f4b5e: (0xb0b3, 0),# Korean hangul
- 0x6f4e58: (0xb739, 0),# Korean hangul
- 0x2d3177: (0x5e78, 0),# East Asian ideograph
- 0x21343d: (0x52f8, 0),# East Asian ideograph
- 0x217178: (0x55c1, 0),# East Asian ideograph
- 0x6f5c23: (0xd37c, 0),# Korean hangul
- 0x273179: (0x4fe9, 0),# East Asian ideograph
- 0x6f5c24: (0xd37d, 0),# Korean hangul
- 0x29365f: (0x8d47, 0),# East Asian ideograph
- 0x6f5b22: (0xd0ef, 0),# Korean hangul
- 0x6f5c25: (0xd380, 0),# Korean hangul
- 0x21317b: (0x5012, 0),# East Asian ideograph
- 0x6f5c26: (0xd384, 0),# Korean hangul
- 0x6f4e59: (0xb73b, 0),# Korean hangul
- 0x4b317c: (0x5024, 0),# East Asian ideograph
- 0x235c27: (0x9dc3, 0),# East Asian ideograph
- 0x22317d: (0x63ab, 0),# East Asian ideograph
- 0x215c28: (0x901a, 0),# East Asian ideograph
- 0x4c4f26: (0x6edd, 0),# East Asian ideograph
- 0x69717e: (0x9af7, 0),# East Asian ideograph
- 0x215c29: (0x9020, 0),# East Asian ideograph
- 0x216764: (0x5095, 0),# East Asian ideograph
- 0x6f5c2a: (0xd390, 0),# Korean hangul
- 0x6f5c2b: (0xd391, 0),# Korean hangul
- 0x6f4e5a: (0xb744, 0),# Korean hangul
- 0x6f5c2c: (0xd398, 0),# Korean hangul
- 0x695c2d: (0x6928, 0),# East Asian ideograph
- 0x4b372f: (0x5c1c, 0),# East Asian ideograph
- 0x274c39: (0x5f53, 0),# East Asian ideograph
- 0x287251: (0x7f1f, 0),# East Asian ideograph
- 0x6f5c6b: (0xd56c, 0),# Korean hangul
- 0x215c2f: (0x902e, 0),# East Asian ideograph
- 0x27613a: (0x9a7d, 0),# East Asian ideograph
- 0x225c30: (0x74c8, 0),# East Asian ideograph
- 0x6f4e5b: (0xb748, 0),# Korean hangul
- 0x222923: (0x5ef4, 0),# East Asian ideograph
- 0x232925: (0x867a, 0),# East Asian ideograph
- 0x232926: (0x8673, 0),# East Asian ideograph
- 0x225c31: (0x74c5, 0),# East Asian ideograph
- 0x29432b: (0x94c6, 0),# East Asian ideograph
- 0x6f5828: (0xc99d, 0),# Korean hangul
- 0x215c32: (0xfa25, 0),# East Asian ideograph
- 0x23292e: (0x8696, 0),# East Asian ideograph
- 0x27507d: (0x7eaf, 0),# East Asian ideograph
- 0x217671: (0x5827, 0),# East Asian ideograph
- 0x29333b: (0x8c00, 0),# East Asian ideograph
- 0x215c33: (0x9032, 0),# East Asian ideograph
- 0x222935: (0x5f07, 0),# East Asian ideograph
- 0x232936: (0x8691, 0),# East Asian ideograph
- 0x232937: (0x869c, 0),# East Asian ideograph
- 0x235c34: (0x9dac, 0),# East Asian ideograph
- 0x22293a: (0x5f0b, 0),# East Asian ideograph
- 0x23293c: (0x868d, 0),# East Asian ideograph
- 0x23293d: (0x868b, 0),# East Asian ideograph
- 0x6f5c35: (0xd3b5, 0),# Korean hangul
- 0x232940: (0x86a6, 0),# East Asian ideograph
- 0x232942: (0x869d, 0),# East Asian ideograph
- 0x39476f: (0x51c0, 0),# East Asian ideograph
- 0x235c36: (0x9db2, 0),# East Asian ideograph
- 0x232946: (0x86a0, 0),# East Asian ideograph
- 0x2d3f3a: (0x6185, 0),# East Asian ideograph
- 0x232948: (0x86a7, 0),# East Asian ideograph
- 0x22294a: (0x5f28, 0),# East Asian ideograph
- 0x22294b: (0x5f22, 0),# East Asian ideograph
- 0x22294c: (0x5f23, 0),# East Asian ideograph
- 0x22294d: (0x5f24, 0),# East Asian ideograph
- 0x235b7b: (0x9db8, 0),# East Asian ideograph
- 0x225c38: (0x74d6, 0),# East Asian ideograph
- 0x222952: (0x5f30, 0),# East Asian ideograph
- 0x6f5a27: (0xceec, 0),# Korean hangul
- 0x215c39: (0x9054, 0),# East Asian ideograph
- 0x232958: (0x86ba, 0),# East Asian ideograph
- 0x232959: (0x86b0, 0),# East Asian ideograph
- 0x22295c: (0x5f40, 0),# East Asian ideograph
- 0x215c3a: (0x9055, 0),# East Asian ideograph
- 0x6f4e5d: (0xb764, 0),# Korean hangul
- 0x22295f: (0x5f44, 0),# East Asian ideograph
- 0x232960: (0x86b3, 0),# East Asian ideograph
- 0x232962: (0x86c9, 0),# East Asian ideograph
- 0x215c3b: (0x903c, 0),# East Asian ideograph
- 0x232967: (0x86d8, 0),# East Asian ideograph
- 0x222968: (0x5f50, 0),# East Asian ideograph
- 0x215c3c: (0x9047, 0),# East Asian ideograph
- 0x22296a: (0x5f56, 0),# East Asian ideograph
- 0x22296c: (0x5f58, 0),# East Asian ideograph
- 0x2d3e60: (0x6075, 0),# East Asian ideograph
- 0x23296e: (0x86e3, 0),# East Asian ideograph
- 0x225c3d: (0x74d8, 0),# East Asian ideograph
- 0x222970: (0x5f60, 0),# East Asian ideograph
- 0x232971: (0x86ec, 0),# East Asian ideograph
- 0x222972: (0x5f63, 0),# East Asian ideograph
- 0x222973: (0x809c, 0),# East Asian ideograph
- 0x222974: (0x5f67, 0),# East Asian ideograph
- 0x215c3e: (0x904e, 0),# East Asian ideograph
- 0x232977: (0x86d0, 0),# East Asian ideograph
- 0x222978: (0x5f72, 0),# East Asian ideograph
- 0x222979: (0x5f73, 0),# East Asian ideograph
- 0x23297a: (0x86d1, 0),# East Asian ideograph
- 0x2d5c3f: (0x5fa7, 0),# East Asian ideograph
- 0x22297c: (0x5f74, 0),# East Asian ideograph
- 0x23297e: (0x86de, 0),# East Asian ideograph
- 0x225c40: (0x74da, 0),# East Asian ideograph
- 0x213443: (0x5308, 0),# East Asian ideograph
- 0x215c41: (0x9041, 0),# East Asian ideograph
- 0x4c4f2b: (0x701e, 0),# East Asian ideograph
- 0x6f5c42: (0xd3fd, 0),# Korean hangul
- 0x333251: (0x5fba, 0),# East Asian ideograph
- 0x6f5b28: (0xd138, 0),# Korean hangul
- 0x22494f: (0x6d96, 0),# East Asian ideograph
- 0x695c43: (0x6981, 0),# East Asian ideograph
- 0x4b5c39: (0x9039, 0),# East Asian ideograph
- 0x215c44: (0x9060, 0),# East Asian ideograph
- 0x6f4e5f: (0xb770, 0),# Korean hangul
- 0x273b31: (0x5b9e, 0),# East Asian ideograph
- 0x215c45: (0x905c, 0),# East Asian ideograph
- 0x29432f: (0x94f3, 0),# East Asian ideograph
- 0x696325: (0x7907, 0),# East Asian ideograph
- 0x235c46: (0x9dde, 0),# East Asian ideograph
- 0x215c47: (0x9065, 0),# East Asian ideograph
- 0x6f5b29: (0xd140, 0),# Korean hangul
- 0x215c48: (0x905e, 0),# East Asian ideograph
- 0x6f4e38: (0xb5b4, 0),# Korean hangul
- 0x27613b: (0x9a87, 0),# East Asian ideograph
- 0x275c49: (0x9002, 0),# East Asian ideograph
- 0x6f4e60: (0xb771, 0),# Korean hangul
- 0x273b32: (0x5b81, 0),# East Asian ideograph
- 0x29255a: (0x8487, 0),# East Asian ideograph
- 0x6f5c4a: (0xd479, 0),# Korean hangul
- 0x225a5d: (0x7440, 0),# East Asian ideograph
- 0x235e5c: (0x9ee7, 0),# East Asian ideograph
- 0x6f5c4b: (0xd47c, 0),# Korean hangul
- 0x2d3556: (0x7343, 0),# East Asian ideograph
- 0x2d532c: (0x6bd3, 0),# East Asian ideograph
- 0x6f5c4c: (0xd480, 0),# Korean hangul
- 0x336321: (0x6b6f, 0),# East Asian ideograph
- 0x6f5b2a: (0xd141, 0),# Korean hangul
- 0x215c4d: (0x9075, 0),# East Asian ideograph
- 0x6f4c3a: (0xb193, 0),# Korean hangul
- 0x6f5c4e: (0xd489, 0),# Korean hangul
- 0x6f4e61: (0xb775, 0),# Korean hangul
- 0x294774: (0x9571, 0),# East Asian ideograph
- 0x215c4f: (0x9078, 0),# East Asian ideograph
- 0x217435: (0x571a, 0),# East Asian ideograph
- 0x215c50: (0x9072, 0),# East Asian ideograph
- 0x4b3231: (0x4eee, 0),# East Asian ideograph
- 0x275c51: (0x8fc1, 0),# East Asian ideograph
- 0x6f5b2b: (0xd143, 0),# Korean hangul
- 0x275c52: (0x8fbd, 0),# East Asian ideograph
- 0x215c53: (0x907a, 0),# East Asian ideograph
- 0x4d503a: (0x98d1, 0),# East Asian ideograph
- 0x69255c: (0x30dc, 0),# Katakana letter BO
- 0x225c54: (0x74e9, 0),# East Asian ideograph
- 0x217436: (0x571b, 0),# East Asian ideograph
- 0x6f5c55: (0xd508, 0),# Korean hangul
- 0x213a36: (0x5abd, 0),# East Asian ideograph
- 0x215c56: (0x9081, 0),# East Asian ideograph
- 0x6f5b2c: (0xd144, 0),# Korean hangul
- 0x455f35: (0x9668, 0),# East Asian ideograph (Version J extension)
- 0x215c57: (0x9084, 0),# East Asian ideograph
- 0x6f4f3e: (0xb86f, 0),# Korean hangul
- 0x6f5c33: (0xd3ad, 0),# Korean hangul
- 0x225c58: (0x74f1, 0),# East Asian ideograph
- 0x69255d: (0x30dd, 0),# Katakana letter PO
- 0x6f5c59: (0xd53c, 0),# Korean hangul
- 0x215c5a: (0x9087, 0),# East Asian ideograph
- 0x212a21: (0xe8d0, 0),# EACC component character
- 0x212a22: (0xe8d1, 0),# EACC component character
- 0x215c5b: (0x908a, 0),# East Asian ideograph
- 0x212a24: (0xe8d3, 0),# EACC component character
- 0x232a25: (0x870b, 0),# East Asian ideograph
- 0x212a26: (0xe8d5, 0),# EACC component character
- 0x222a27: (0x5f89, 0),# East Asian ideograph
- 0x212a28: (0xe8d6, 0),# EACC component character
- 0x215c5c: (0x9090, 0),# East Asian ideograph
- 0x212a2a: (0xe8d8, 0),# EACC component character
- 0x222a2b: (0x5f94, 0),# East Asian ideograph
- 0x212a2c: (0xe8da, 0),# EACC component character
- 0x212a2d: (0xe8db, 0),# EACC component character
- 0x69727e: (0x9d48, 0),# East Asian ideograph
- 0x215c5d: (0x908f, 0),# East Asian ideograph
- 0x2e284c: (0x5ecf, 0),# East Asian ideograph
- 0x6f4e64: (0xb77c, 0),# Korean hangul
- 0x275d4f: (0x94b4, 0),# East Asian ideograph
- 0x232a33: (0x86f8, 0),# East Asian ideograph
- 0x232a34: (0x8706, 0),# East Asian ideograph
- 0x4b5c5e: (0x961d, 0),# East Asian ideograph
- 0x232a36: (0x870e, 0),# East Asian ideograph
- 0x212a37: (0xe8e4, 0),# EACC component character
- 0x232a38: (0x8709, 0),# East Asian ideograph
- 0x222a39: (0x5f9c, 0),# East Asian ideograph
- 0x232a3a: (0x870a, 0),# East Asian ideograph
- 0x235c5f: (0x9deb, 0),# East Asian ideograph
- 0x212a3c: (0xe8e9, 0),# EACC component character
- 0x222a3d: (0x5f9a, 0),# East Asian ideograph
- 0x232a3e: (0x870d, 0),# East Asian ideograph
- 0x212a3f: (0xe8ec, 0),# EACC component character
- 0x212a40: (0xe8ed, 0),# EACC component character
- 0x6f5c60: (0xd551, 0),# Korean hangul
- 0x232a42: (0x874a, 0),# East Asian ideograph
- 0x232a43: (0x8723, 0),# East Asian ideograph
- 0x232a44: (0x8737, 0),# East Asian ideograph
- 0x232a45: (0x8728, 0),# East Asian ideograph
- 0x222a46: (0x5faf, 0),# East Asian ideograph
- 0x225c61: (0x74f4, 0),# East Asian ideograph
- 0x232a49: (0x8740, 0),# East Asian ideograph
- 0x232a4b: (0x872e, 0),# East Asian ideograph
- 0x232a4c: (0x873d, 0),# East Asian ideograph
- 0x232a4e: (0x871e, 0),# East Asian ideograph
- 0x6f4e65: (0xb77d, 0),# Korean hangul
- 0x222a50: (0x5fbc, 0),# East Asian ideograph
- 0x69255f: (0x30df, 0),# Katakana letter MI
- 0x232a53: (0x8743, 0),# East Asian ideograph
- 0x232a55: (0x8744, 0),# East Asian ideograph
- 0x6f507e: (0xbc38, 0),# Korean hangul
- 0x222a57: (0x5fc9, 0),# East Asian ideograph
- 0x232a59: (0x8729, 0),# East Asian ideograph
- 0x232a5a: (0x8739, 0),# East Asian ideograph
- 0x213241: (0x5091, 0),# East Asian ideograph
- 0x232a5f: (0x871a, 0),# East Asian ideograph
- 0x222a61: (0x5fd2, 0),# East Asian ideograph
- 0x222a63: (0x5fd0, 0),# East Asian ideograph
- 0x232a64: (0x8731, 0),# East Asian ideograph
- 0x232a65: (0x8711, 0),# East Asian ideograph
- 0x232a66: (0x8712, 0),# East Asian ideograph
- 0x222a67: (0x5fce, 0),# East Asian ideograph
- 0x222a68: (0x5fed, 0),# East Asian ideograph
- 0x6f4c3b: (0xb194, 0),# Korean hangul
- 0x232a6b: (0x874f, 0),# East Asian ideograph
- 0x232a6c: (0x8771, 0),# East Asian ideograph
- 0x232a6d: (0x8763, 0),# East Asian ideograph
- 0x275d51: (0x94b8, 0),# East Asian ideograph
- 0x232a71: (0x8764, 0),# East Asian ideograph
- 0x222a72: (0x5fee, 0),# East Asian ideograph
- 0x232a73: (0x8765, 0),# East Asian ideograph
- 0x232a74: (0x877d, 0),# East Asian ideograph
- 0x6f5c69: (0xd569, 0),# Korean hangul
- 0x222a78: (0x5fe1, 0),# East Asian ideograph
- 0x232a79: (0x8758, 0),# East Asian ideograph
- 0x222a7b: (0x5fe4, 0),# East Asian ideograph
- 0x215c6a: (0x90e1, 0),# East Asian ideograph
- 0x28725d: (0x7f1c, 0),# East Asian ideograph
- 0x6f5b30: (0xd150, 0),# Korean hangul
- 0x275c6b: (0x5369, 0),# East Asian ideograph
- 0x3f516d: (0x6403, 0),# East Asian ideograph
- 0x235c6c: (0x9de6, 0),# East Asian ideograph
- 0x6f4e67: (0xb784, 0),# Korean hangul
- 0x275d52: (0x94c0, 0),# East Asian ideograph
- 0x215c6d: (0x90f5, 0),# East Asian ideograph
- 0x33303a: (0x8ffa, 0),# East Asian ideograph
- 0x6f5c6e: (0xd574, 0),# Korean hangul
- 0x6f5c6f: (0xd575, 0),# Korean hangul
- 0x28725e: (0x7f19, 0),# East Asian ideograph
- 0x6f5873: (0xcc0c, 0),# Korean hangul
- 0x705f30: (0x7519, 0),# East Asian ideograph
- 0x215c70: (0x9109, 0),# East Asian ideograph
- 0x215c71: (0x9112, 0),# East Asian ideograph
- 0x6f4e68: (0xb78c, 0),# Korean hangul
- 0x275e68: (0x9617, 0),# East Asian ideograph
- 0x4b5c72: (0x9119, 0),# East Asian ideograph (variant of 215C72 which maps to 9119)
- 0x275153: (0x7eac, 0),# East Asian ideograph
- 0x215c73: (0x912d, 0),# East Asian ideograph
- 0x235a21: (0x9d02, 0),# East Asian ideograph
- 0x215c74: (0x9130, 0),# East Asian ideograph
- 0x28725f: (0x7f1b, 0),# East Asian ideograph
- 0x6f5b32: (0xd15c, 0),# Korean hangul
- 0x224959: (0x6dab, 0),# East Asian ideograph
- 0x215c75: (0x9127, 0),# East Asian ideograph
- 0x6f5c76: (0xd589, 0),# Korean hangul
- 0x2d4228: (0x5117, 0),# East Asian ideograph
- 0x215c77: (0x9139, 0),# East Asian ideograph (variant of 4B5C77 which maps to 9139)
- 0x455e60: (0x95eb, 0),# East Asian ideograph (Version J extension)
- 0x6f5c78: (0xd5a5, 0),# Korean hangul
- 0x235a22: (0x9d03, 0),# East Asian ideograph
- 0x2f3d5d: (0x900e, 0),# East Asian ideograph
- 0x6f5c79: (0xd5c8, 0),# Korean hangul
- 0x224724: (0x6c5c, 0),# East Asian ideograph
- 0x6f5c7a: (0xd5c9, 0),# Korean hangul
- 0x27613d: (0x9a8b, 0),# East Asian ideograph
- 0x2e4a6b: (0x6ea6, 0),# East Asian ideograph
- 0x224726: (0x6c5b, 0),# East Asian ideograph
- 0x275d55: (0x94c5, 0),# East Asian ideograph
- 0x292564: (0x8489, 0),# East Asian ideograph
- 0x224727: (0x6c4d, 0),# East Asian ideograph
- 0x225c7d: (0x7507, 0),# East Asian ideograph
- 0x22474d: (0x6c93, 0),# East Asian ideograph
- 0x235a23: (0x9cf7, 0),# East Asian ideograph
- 0x235c7e: (0x9dfd, 0),# East Asian ideograph
- 0x2d4729: (0x6d29, 0),# East Asian ideograph
- 0x213d57: (0x5f6d, 0),# East Asian ideograph
- 0x234d5a: (0x979f, 0),# East Asian ideograph
- 0x6f4c3c: (0xb1a8, 0),# Korean hangul
- 0x294532: (0x9531, 0),# East Asian ideograph
- 0x22472b: (0x6c4b, 0),# East Asian ideograph
- 0x3f4a28: (0x9df0, 0),# East Asian ideograph
- 0x225a68: (0x7474, 0),# East Asian ideograph
- 0x6f7649: (0xe8bb, 0),# Korean hangul
- 0x6f5751: (0xc886, 0),# Korean hangul
- 0x22472d: (0x6c63, 0),# East Asian ideograph
- 0x6f5b35: (0xd160, 0),# Korean hangul
- 0x4b3d2a: (0x5ee3, 0),# East Asian ideograph
- 0x23472f: (0x93a9, 0),# East Asian ideograph
- 0x28773f: (0x804d, 0),# East Asian ideograph
- 0x232b21: (0x8761, 0),# East Asian ideograph
- 0x692535: (0x30b5, 0),# Katakana letter SA
- 0x222b24: (0x5fea, 0),# East Asian ideograph
- 0x692566: (0x30e6, 0),# Katakana letter YU
- 0x212b26: (0x300d, 0),# Ideographic right corner bracket
- 0x225a69: (0x746e, 0),# East Asian ideograph
- 0x232b28: (0x875f, 0),# East Asian ideograph
- 0x222b2a: (0x6026, 0),# East Asian ideograph
- 0x222b2c: (0x6029, 0),# East Asian ideograph
- 0x232b2d: (0x876f, 0),# East Asian ideograph
- 0x232b2e: (0x875d, 0),# East Asian ideograph
- 0x232b30: (0x876e, 0),# East Asian ideograph
- 0x222b31: (0x6008, 0),# East Asian ideograph
- 0x212b32: (0xff3d, 0),# Ideographic right square bracket
- 0x224733: (0x6c76, 0),# East Asian ideograph
- 0x212b34: (0xff0e, 0),# Ideographic variant full stop
- 0x232b35: (0x8753, 0),# East Asian ideograph
- 0x222b36: (0x600a, 0),# East Asian ideograph
- 0x222b37: (0x600c, 0),# East Asian ideograph
- 0x234d5c: (0x979a, 0),# East Asian ideograph
- 0x214734: (0x6cbb, 0),# East Asian ideograph
- 0x232b3a: (0x87a3, 0),# East Asian ideograph
- 0x4b5e69: (0x95a2, 0),# East Asian ideograph
- 0x222b3c: (0x6017, 0),# East Asian ideograph
- 0x232b3d: (0x8793, 0),# East Asian ideograph
- 0x6f245f: (0x3148, 0),# Korean hangul
- 0x2d4735: (0x6c4e, 0),# East Asian ideograph
- 0x275d58: (0x94c3, 0),# East Asian ideograph
- 0x273b3f: (0x4e13, 0),# East Asian ideograph
- 0x692567: (0x30e7, 0),# Katakana letter small YO
- 0x213452: (0x5331, 0),# East Asian ideograph
- 0x232b45: (0x8799, 0),# East Asian ideograph
- 0x222b46: (0x6010, 0),# East Asian ideograph
- 0x232b48: (0x8788, 0),# East Asian ideograph
- 0x222b4b: (0x6039, 0),# East Asian ideograph
- 0x232b4c: (0x8798, 0),# East Asian ideograph
- 0x222b50: (0x6013, 0),# East Asian ideograph
- 0x224738: (0x6c6c, 0),# East Asian ideograph
- 0x222b53: (0x6054, 0),# East Asian ideograph
- 0x232b54: (0x878b, 0),# East Asian ideograph
- 0x232b55: (0x8784, 0),# East Asian ideograph
- 0x222b57: (0x605d, 0),# East Asian ideograph
- 0x232b58: (0x87a9, 0),# East Asian ideograph
- 0x336b33: (0x524f, 0),# East Asian ideograph
- 0x222b5a: (0x6047, 0),# East Asian ideograph
- 0x2e2b5b: (0x605a, 0),# East Asian ideograph
- 0x232b5d: (0x8789, 0),# East Asian ideograph
- 0x222b5e: (0x6049, 0),# East Asian ideograph
- 0x222b5f: (0x6053, 0),# East Asian ideograph
- 0x232b60: (0x87ad, 0),# East Asian ideograph
- 0x4b4e37: (0x7814, 0),# East Asian ideograph
- 0x23473b: (0x940f, 0),# East Asian ideograph
- 0x232b66: (0x87be, 0),# East Asian ideograph
- 0x222b68: (0x6067, 0),# East Asian ideograph
- 0x23473c: (0x9420, 0),# East Asian ideograph (not in Unicode)
- 0x232b6e: (0x87c4, 0),# East Asian ideograph
- 0x232b6f: (0x87af, 0),# East Asian ideograph
- 0x222b71: (0x6041, 0),# East Asian ideograph
- 0x222b72: (0x6077, 0),# East Asian ideograph
- 0x222b74: (0x6042, 0),# East Asian ideograph
- 0x22473e: (0x6c94, 0),# East Asian ideograph
- 0x222b76: (0x605f, 0),# East Asian ideograph
- 0x232b78: (0x87ae, 0),# East Asian ideograph
- 0x2e6c27: (0x7b2e, 0),# East Asian ideograph
- 0x222b7a: (0x6061, 0),# East Asian ideograph
- 0x6f4e6f: (0xb799, 0),# Korean hangul
- 0x232b7e: (0x87bf, 0),# East Asian ideograph
- 0x692569: (0x30e9, 0),# Katakana letter RA
- 0x224740: (0x6c8f, 0),# East Asian ideograph
- 0x333c52: (0x8cec, 0),# East Asian ideograph
- 0x4b4741: (0x51bd, 0),# East Asian ideograph
- 0x23233c: (0x8452, 0),# East Asian ideograph
- 0x224742: (0x6c65, 0),# East Asian ideograph
- 0x213d58: (0x5f70, 0),# East Asian ideograph
- 0x2d5e61: (0x6ff6, 0),# East Asian ideograph
- 0x6f4c3d: (0xb1cc, 0),# Korean hangul
- 0x69256a: (0x30ea, 0),# Katakana letter RI
- 0x294340: (0x94d6, 0),# East Asian ideograph
- 0x6f5752: (0xc887, 0),# Korean hangul
- 0x4b4b3e: (0xf9ad, 0),# East Asian ideograph
- 0x2d4746: (0x6c79, 0),# East Asian ideograph
- 0x2d6147: (0x99c8, 0),# East Asian ideograph
- 0x224747: (0x6c6f, 0),# East Asian ideograph
- 0x705f39: (0x5416, 0),# East Asian ideograph
- 0x224749: (0x6c9d, 0),# East Asian ideograph
- 0x275d5c: (0x94dc, 0),# East Asian ideograph
- 0x295433: (0x9aa7, 0),# East Asian ideograph
- 0x2f4a2e: (0x90b4, 0),# East Asian ideograph
- 0x22474a: (0x6c69, 0),# East Asian ideograph
- 0x22474b: (0x6c9a, 0),# East Asian ideograph
- 0x21383b: (0x57f7, 0),# East Asian ideograph
- 0x22474c: (0x6c6d, 0),# East Asian ideograph
- 0x23474d: (0x9419, 0),# East Asian ideograph
- 0x275b47: (0x8f8d, 0),# East Asian ideograph
- 0x23474e: (0x940d, 0),# East Asian ideograph
- 0x275d5d: (0x94ed, 0),# East Asian ideograph
- 0x6f4a2f: (0xadff, 0),# Korean hangul
- 0x234750: (0x9426, 0),# East Asian ideograph
- 0x6f5d4a: (0xd68c, 0),# Korean hangul
- 0x224751: (0x6c87, 0),# East Asian ideograph
- 0x39345b: (0x965e, 0),# East Asian ideograph
- 0x224752: (0x6c6e, 0),# East Asian ideograph
- 0x6f4e73: (0xb7a9, 0),# Korean hangul
- 0x69256d: (0x30ed, 0),# Katakana letter RO
- 0x4d4754: (0x9544, 0),# East Asian ideograph
- 0x294343: (0x94d2, 0),# East Asian ideograph
- 0x276d2e: (0x5326, 0),# East Asian ideograph
- 0x235a2c: (0x9cf8, 0),# East Asian ideograph
- 0x224756: (0x6c95, 0),# East Asian ideograph
- 0x234758: (0x9414, 0),# East Asian ideograph
- 0x275d5f: (0x94ec, 0),# East Asian ideograph
- 0x6f4a31: (0xae01, 0),# Korean hangul
- 0x274759: (0x6cea, 0),# East Asian ideograph
- 0x6f582d: (0xc9c8, 0),# Korean hangul
- 0x4c715a: (0x7ee6, 0),# East Asian ideograph
- 0x22475a: (0x6c82, 0),# East Asian ideograph
- 0x2d5340: (0x812c, 0),# East Asian ideograph
- 0x2d3b6e: (0x5d17, 0),# East Asian ideograph
- 0x232c24: (0x87bd, 0),# East Asian ideograph
- 0x23475c: (0x9422, 0),# East Asian ideograph
- 0x222c2b: (0x6092, 0),# East Asian ideograph
- 0x222c2c: (0x609d, 0),# East Asian ideograph
- 0x222c2d: (0x6081, 0),# East Asian ideograph
- 0x23475d: (0x9406, 0),# East Asian ideograph
- 0x232c30: (0x87f3, 0),# East Asian ideograph
- 0x232c31: (0x87f0, 0),# East Asian ideograph
- 0x222c32: (0x6097, 0),# East Asian ideograph
- 0x215673: (0x8725, 0),# East Asian ideograph
- 0x232c34: (0x87ea, 0),# East Asian ideograph
- 0x29475e: (0x9562, 0),# East Asian ideograph
- 0x232c36: (0x87db, 0),# East Asian ideograph
- 0x232c37: (0x87e2, 0),# East Asian ideograph
- 0x232c39: (0x87eb, 0),# East Asian ideograph
- 0x222c3a: (0x6095, 0),# East Asian ideograph
- 0x2d475f: (0x51c4, 0),# East Asian ideograph
- 0x4d2c3c: (0x87e5, 0),# East Asian ideograph
- 0x222c3e: (0x60c7, 0),# East Asian ideograph
- 0x232c3f: (0x87f5, 0),# East Asian ideograph
- 0x217d48: (0x5b21, 0),# East Asian ideograph
- 0x234760: (0x9410, 0),# East Asian ideograph
- 0x222c42: (0x60b0, 0),# East Asian ideograph
- 0x6f4d33: (0xb36a, 0),# Korean hangul
- 0x222c46: (0x60be, 0),# East Asian ideograph
- 0x232c47: (0x87e0, 0),# East Asian ideograph
- 0x222c48: (0x60d4, 0),# East Asian ideograph
- 0x232c49: (0x87dc, 0),# East Asian ideograph
- 0x232c4c: (0x87e3, 0),# East Asian ideograph
- 0x232c4d: (0x8801, 0),# East Asian ideograph
- 0x222c4e: (0x60ce, 0),# East Asian ideograph
- 0x232c4f: (0x8803, 0),# East Asian ideograph
- 0x232c50: (0x880a, 0),# East Asian ideograph
- 0x222c51: (0x60cf, 0),# East Asian ideograph
- 0x222c53: (0x60d9, 0),# East Asian ideograph
- 0x222c54: (0x60b3, 0),# East Asian ideograph
- 0x232c55: (0x87f6, 0),# East Asian ideograph
- 0x222c56: (0x60dd, 0),# East Asian ideograph
- 0x232c57: (0x87f7, 0),# East Asian ideograph
- 0x235a2f: (0x9d2a, 0),# East Asian ideograph
- 0x232c5c: (0x880b, 0),# East Asian ideograph
- 0x232c5d: (0x8806, 0),# East Asian ideograph
- 0x232c5f: (0x87fe, 0),# East Asian ideograph
- 0x222c60: (0x60b1, 0),# East Asian ideograph
- 0x232c61: (0x8810, 0),# East Asian ideograph
- 0x222c62: (0x60e3, 0),# East Asian ideograph
- 0x232c63: (0x8819, 0),# East Asian ideograph
- 0x232c64: (0x8811, 0),# East Asian ideograph
- 0x224766: (0x6cef, 0),# East Asian ideograph
- 0x232c66: (0x8818, 0),# East Asian ideograph
- 0x222c67: (0x60e5, 0),# East Asian ideograph
- 0x222c69: (0x60db, 0),# East Asian ideograph
- 0x232c6a: (0x8813, 0),# East Asian ideograph
- 0x232c6b: (0x8816, 0),# East Asian ideograph
- 0x6f5236: (0xbee0, 0),# Korean hangul
- 0x275d62: (0x950c, 0),# East Asian ideograph
- 0x222c6e: (0x60e9, 0),# East Asian ideograph
- 0x692571: (0x30f1, 0),# Katakana letter WE
- 0x222c70: (0x6114, 0),# East Asian ideograph
- 0x274768: (0x6d45, 0),# East Asian ideograph
- 0x232c72: (0x8834, 0),# East Asian ideograph
- 0x232c73: (0x881c, 0),# East Asian ideograph
- 0x222c75: (0x6119, 0),# East Asian ideograph
- 0x234769: (0x93f7, 0),# East Asian ideograph
- 0x232c7a: (0x881b, 0),# East Asian ideograph
- 0x222c7c: (0x60fd, 0),# East Asian ideograph
- 0x222c7d: (0x610d, 0),# East Asian ideograph
- 0x6f5b41: (0xd1f4, 0),# Korean hangul
- 0x29323b: (0x8bce, 0),# East Asian ideograph
- 0x287042: (0x7ea1, 0),# East Asian ideograph
- 0x4b476c: (0x51c5, 0),# East Asian ideograph
- 0x275d63: (0x9511, 0),# East Asian ideograph
- 0x6f4a35: (0xae0d, 0),# Korean hangul
- 0x223e61: (0x6971, 0),# East Asian ideograph
- 0x22476e: (0x6cad, 0),# East Asian ideograph (variant of 4C476E which maps to 6CAD)
- 0x2d5344: (0x8107, 0),# East Asian ideograph
- 0x23476f: (0x940e, 0),# East Asian ideograph
- 0x6f5b2e: (0xd14c, 0),# Korean hangul
- 0x29323c: (0x8bd2, 0),# East Asian ideograph
- 0x6f4e39: (0xb5b5, 0),# Korean hangul
- 0x224770: (0x6caf, 0),# East Asian ideograph
- 0x295925: (0x9ccc, 0),# East Asian ideograph
- 0x234771: (0x9411, 0),# East Asian ideograph
- 0x692573: (0x30f3, 0),# Katakana letter N
- 0x275921: (0x8c04, 0),# East Asian ideograph
- 0x294349: (0x94d5, 0),# East Asian ideograph
- 0x2d386e: (0x58ca, 0),# East Asian ideograph
- 0x217e23: (0x5b62, 0),# East Asian ideograph
- 0x274774: (0x6e0a, 0),# East Asian ideograph
- 0x6f5b43: (0xd22c, 0),# Korean hangul
- 0x275551: (0x80e1, 0),# East Asian ideograph (duplicate simplified)
- 0x22496a: (0x6dac, 0),# East Asian ideograph
- 0x6f5b3e: (0xd1b3, 0),# Korean hangul
- 0x225265: (0x7168, 0),# East Asian ideograph
- 0x2d467c: (0x6cb2, 0),# East Asian ideograph
- 0x29464a: (0x953c, 0),# East Asian ideograph
- 0x3f3e47: (0x5379, 0),# East Asian ideograph
- 0x21345f: (0x5352, 0),# East Asian ideograph
- 0x274777: (0x6ca6, 0),# East Asian ideograph
- 0x2d6159: (0x9ac4, 0),# East Asian ideograph
- 0x213223: (0x5021, 0),# East Asian ideograph
- 0x234779: (0x9429, 0),# East Asian ideograph
- 0x213224: (0x500b, 0),# East Asian ideograph
- 0x695457: (0x58b8, 0),# East Asian ideograph
- 0x22477a: (0x6cba, 0),# East Asian ideograph
- 0x223225: (0x6387, 0),# East Asian ideograph
- 0x22477b: (0x7553, 0),# East Asian ideograph
- 0x223226: (0x637a, 0),# East Asian ideograph
- 0x6f4b65: (0xb0c5, 0),# Korean hangul
- 0x6f4a38: (0xae34, 0),# Korean hangul
- 0x213460: (0x5354, 0),# East Asian ideograph
- 0x233227: (0x8a51, 0),# East Asian ideograph
- 0x27477d: (0x6d8c, 0),# East Asian ideograph
- 0x213228: (0x4ff3, 0),# East Asian ideograph
- 0x6f5330: (0xc136, 0),# Korean hangul
- 0x3f3d6f: (0x8986, 0),# East Asian ideograph
- 0x287272: (0x7f21, 0),# East Asian ideograph
- 0x6f4c44: (0xb205, 0),# Korean hangul
- 0x213229: (0x502d, 0),# East Asian ideograph
- 0x28742e: (0x7f42, 0),# East Asian ideograph
- 0x6f4f3f: (0xb871, 0),# Korean hangul
- 0x22322a: (0x6386, 0),# East Asian ideograph
- 0x4c5c61: (0x74f4, 0),# East Asian ideograph (variant of 225C61 which maps to 74F4)
- 0x6f4e7c: (0xb7f0, 0),# Korean hangul
- 0x6f5237: (0xbee3, 0),# Korean hangul
- 0x21722b: (0x55f9, 0),# East Asian ideograph
- 0x692576: (0x30f6, 0),# Katakana letter small KE
- 0x223860: (0x6673, 0),# East Asian ideograph
- 0x21322d: (0x502b, 0),# East Asian ideograph
- 0x6f5d4c: (0xd69f, 0),# Korean hangul
- 0x6f5b46: (0xd234, 0),# Korean hangul
- 0x21322e: (0x505c, 0),# East Asian ideograph
- 0x22496d: (0x6dd5, 0),# East Asian ideograph
- 0x232b53: (0x8785, 0),# East Asian ideograph
- 0x21322f: (0x504f, 0),# East Asian ideograph
- 0x225f2f: (0x760f, 0),# East Asian ideograph
- 0x292657: (0x835f, 0),# East Asian ideograph
- 0x233230: (0x8a56, 0),# East Asian ideograph
- 0x232d23: (0x8828, 0),# East Asian ideograph
- 0x217231: (0x560c, 0),# East Asian ideograph
- 0x232d2a: (0x8832, 0),# East Asian ideograph
- 0x222d2c: (0x6110, 0),# East Asian ideograph
- 0x232d2e: (0x882e, 0),# East Asian ideograph
- 0x213e35: (0x600e, 0),# East Asian ideograph
- 0x232d32: (0x882d, 0),# East Asian ideograph
- 0x213233: (0x5049, 0),# East Asian ideograph
- 0x222d34: (0x60f2, 0),# East Asian ideograph
- 0x222d37: (0x6125, 0),# East Asian ideograph
- 0x277234: (0x551b, 0),# East Asian ideograph
- 0x222d3b: (0x60f8, 0),# East Asian ideograph
- 0x232d3c: (0x883c, 0),# East Asian ideograph
- 0x6f4e7e: (0xb7fc, 0),# Korean hangul
- 0x273235: (0x4fa7, 0),# East Asian ideograph
- 0x222d41: (0x60fc, 0),# East Asian ideograph
- 0x232d42: (0x4610, 0),# East Asian ideograph (not in Unicode)
- 0x275926: (0x8c1a, 0),# East Asian ideograph
- 0x232d44: (0x8844, 0),# East Asian ideograph
- 0x227236: (0x7df9, 0),# East Asian ideograph
- 0x222d48: (0x6149, 0),# East Asian ideograph
- 0x222d4a: (0x614a, 0),# East Asian ideograph
- 0x232d4b: (0x8847, 0),# East Asian ideograph
- 0x222d4e: (0x612b, 0),# East Asian ideograph
- 0x287275: (0x7d77, 0),# East Asian ideograph
- 0x222d50: (0x6129, 0),# East Asian ideograph
- 0x222d51: (0x6150, 0),# East Asian ideograph
- 0x232d53: (0x884e, 0),# East Asian ideograph
- 0x232d56: (0x8852, 0),# East Asian ideograph
- 0x233239: (0x8a48, 0),# East Asian ideograph
- 0x222d58: (0x6130, 0),# East Asian ideograph
- 0x232d59: (0x8856, 0),# East Asian ideograph
- 0x232d5a: (0x8855, 0),# East Asian ideograph
- 0x222d5b: (0x6141, 0),# East Asian ideograph
- 0x21323a: (0x5055, 0),# East Asian ideograph
- 0x232d5e: (0x885c, 0),# East Asian ideograph
- 0x232d5f: (0x885a, 0),# East Asian ideograph
- 0x222d61: (0x6146, 0),# East Asian ideograph
- 0x22323b: (0x6390, 0),# East Asian ideograph
- 0x222d66: (0x615e, 0),# East Asian ideograph
- 0x222d67: (0x6175, 0),# East Asian ideograph
- 0x222d68: (0x6174, 0),# East Asian ideograph
- 0x232d69: (0x8869, 0),# East Asian ideograph
- 0x21316c: (0x5009, 0),# East Asian ideograph
- 0x222d6b: (0x6183, 0),# East Asian ideograph
- 0x232d6d: (0x886d, 0),# East Asian ideograph
- 0x232d6e: (0x887a, 0),# East Asian ideograph
- 0x21323d: (0x508d, 0),# East Asian ideograph
- 0x222d70: (0x6171, 0),# East Asian ideograph
- 0x232d71: (0x8875, 0),# East Asian ideograph
- 0x222757: (0x5e5e, 0),# East Asian ideograph
- 0x222d74: (0x616a, 0),# East Asian ideograph
- 0x232d75: (0x8872, 0),# East Asian ideograph
- 0x6f5045: (0xbb0f, 0),# Korean hangul
- 0x222d77: (0x6173, 0),# East Asian ideograph
- 0x232d79: (0x887d, 0),# East Asian ideograph
- 0x455564: (0x6fdb, 0),# East Asian ideograph (Version J extension)
- 0x222d7b: (0x6153, 0),# East Asian ideograph
- 0x224234: (0x6a99, 0),# East Asian ideograph
- 0x232d7d: (0x887f, 0),# East Asian ideograph
- 0x232d7e: (0x887e, 0),# East Asian ideograph
- 0x275928: (0x8bb3, 0),# East Asian ideograph
- 0x294350: (0x94df, 0),# East Asian ideograph
- 0x233240: (0x8a3d, 0),# East Asian ideograph
- 0x696126: (0x74f2, 0),# East Asian ideograph
- 0x6f567b: (0xc794, 0),# Korean hangul
- 0x273241: (0x6770, 0),# East Asian ideograph
- 0x6f5874: (0xcc0d, 0),# Korean hangul
- 0x6f5b4a: (0xd241, 0),# Korean hangul
- 0x213242: (0x5080, 0),# East Asian ideograph
- 0x4b5c5b: (0x8fba, 0),# East Asian ideograph
- 0x223243: (0x63de, 0),# East Asian ideograph
- 0x6f5238: (0xbee4, 0),# Korean hangul
- 0x213244: (0x5098, 0),# East Asian ideograph
- 0x6f4a3e: (0xae44, 0),# Korean hangul
- 0x275929: (0x8c10, 0),# East Asian ideograph
- 0x2d4539: (0x6406, 0),# East Asian ideograph
- 0x213246: (0x50b3, 0),# East Asian ideograph
- 0x274c60: (0x75a1, 0),# East Asian ideograph
- 0x6f5b4b: (0xd264, 0),# Korean hangul
- 0x273247: (0x503a, 0),# East Asian ideograph
- 0x227248: (0x7df6, 0),# East Asian ideograph
- 0x23492e: (0x9585, 0),# East Asian ideograph
- 0x213249: (0x50c5, 0),# East Asian ideograph
- 0x6f4a3f: (0xae45, 0),# Korean hangul
- 0x27592a: (0x8c0d, 0),# East Asian ideograph
- 0x223866: (0x666d, 0),# East Asian ideograph
- 0x21724b: (0x564b, 0),# East Asian ideograph
- 0x274c61: (0x759f, 0),# East Asian ideograph
- 0x6f5b4c: (0xd277, 0),# Korean hangul
- 0x21324c: (0x50b7, 0),# East Asian ideograph
- 0x393246: (0x4f1d, 0),# East Asian ideograph
- 0x21324d: (0x50af, 0),# East Asian ideograph
- 0x275d6e: (0x9530, 0),# East Asian ideograph
- 0x6f4a40: (0xae4a, 0),# Korean hangul
- 0x27592b: (0x8c0b, 0),# East Asian ideograph
- 0x6f5830: (0xc9d1, 0),# Korean hangul
- 0x21324f: (0x50ee, 0),# East Asian ideograph
- 0x213250: (0x50f1, 0),# East Asian ideograph
- 0x274c62: (0x75ea, 0),# East Asian ideograph
- 0x333963: (0x59c9, 0),# East Asian ideograph
- 0x213251: (0x50e5, 0),# East Asian ideograph
- 0x217252: (0x5640, 0),# East Asian ideograph
- 0x292433: (0x8298, 0),# East Asian ideograph (duplicate simplified)
- 0x69515e: (0x51e9, 0),# East Asian ideograph
- 0x287253: (0x7f12, 0),# East Asian ideograph
- 0x6f575a: (0xc89f, 0),# Korean hangul
- 0x223b63: (0x67d8, 0),# East Asian ideograph
- 0x213255: (0x50d5, 0),# East Asian ideograph
- 0x274c63: (0x75af, 0),# East Asian ideograph
- 0x4c523a: (0x717a, 0),# East Asian ideograph
- 0x213256: (0x507d, 0),# East Asian ideograph
- 0x234d74: (0x97ab, 0),# East Asian ideograph
- 0x22674b: (0x798a, 0),# East Asian ideograph
- 0x213257: (0x50cf, 0),# East Asian ideograph
- 0x234931: (0x958c, 0),# East Asian ideograph
- 0x213258: (0x50d1, 0),# East Asian ideograph
- 0x224235: (0x6a9d, 0),# East Asian ideograph
- 0x27592d: (0x8c13, 0),# East Asian ideograph
- 0x294355: (0x94eb, 0),# East Asian ideograph
- 0x273259: (0x4eea, 0),# East Asian ideograph
- 0x6f567c: (0xc796, 0),# Korean hangul
- 0x21325a: (0x5104, 0),# East Asian ideograph
- 0x6f5b4f: (0xd288, 0),# Korean hangul
- 0x222e23: (0x618b, 0),# East Asian ideograph
- 0x2d5129: (0x7d25, 0),# East Asian ideograph
- 0x232e28: (0x88a2, 0),# East Asian ideograph
- 0x21325c: (0x50f5, 0),# East Asian ideograph
- 0x232e2a: (0x88a4, 0),# East Asian ideograph
- 0x222e2c: (0x616f, 0),# East Asian ideograph
- 0x222e2d: (0x6165, 0),# East Asian ideograph
- 0x6f5239: (0xbee5, 0),# Korean hangul
- 0x232e2f: (0x88aa, 0),# East Asian ideograph
- 0x276135: (0x9a7e, 0),# East Asian ideograph
- 0x222e32: (0x619d, 0),# East Asian ideograph
- 0x222e33: (0x61a6, 0),# East Asian ideograph
- 0x232e34: (0x889a, 0),# East Asian ideograph
- 0x27325e: (0x4fac, 0),# East Asian ideograph
- 0x235a3f: (0x9d1e, 0),# East Asian ideograph
- 0x232e3a: (0x8890, 0),# East Asian ideograph
- 0x232e3b: (0x888c, 0),# East Asian ideograph
- 0x232e3d: (0x88a0, 0),# East Asian ideograph
- 0x232e40: (0x8899, 0),# East Asian ideograph
- 0x213260: (0x5108, 0),# East Asian ideograph
- 0x222e42: (0x619c, 0),# East Asian ideograph
- 0x222e43: (0x61af, 0),# East Asian ideograph
- 0x232e45: (0x8897, 0),# East Asian ideograph
- 0x222e46: (0x6197, 0),# East Asian ideograph
- 0x222e47: (0x61ad, 0),# East Asian ideograph
- 0x232e48: (0x88c9, 0),# East Asian ideograph
- 0x232e49: (0x88bf, 0),# East Asian ideograph
- 0x232e4a: (0x88ba, 0),# East Asian ideograph
- 0x222e4c: (0x6192, 0),# East Asian ideograph
- 0x213262: (0x5110, 0),# East Asian ideograph
- 0x232e4f: (0x88c0, 0),# East Asian ideograph
- 0x6f4a44: (0xae4d, 0),# Korean hangul
- 0x232e51: (0x88b2, 0),# East Asian ideograph
- 0x222e52: (0x61ae, 0),# East Asian ideograph
- 0x213263: (0x5118, 0),# East Asian ideograph
- 0x232e54: (0x88bc, 0),# East Asian ideograph
- 0x222e55: (0x618d, 0),# East Asian ideograph
- 0x232e57: (0x88b7, 0),# East Asian ideograph
- 0x232e59: (0x88bd, 0),# East Asian ideograph
- 0x232e5a: (0x88c4, 0),# East Asian ideograph
- 0x2d313a: (0x62bb, 0),# East Asian ideograph
- 0x222e5c: (0x61cc, 0),# East Asian ideograph
- 0x222e5d: (0x61c6, 0),# East Asian ideograph
- 0x232e5e: (0x88cb, 0),# East Asian ideograph
- 0x213265: (0x5114, 0),# East Asian ideograph
- 0x232e60: (0x88cc, 0),# East Asian ideograph
- 0x232e62: (0x88db, 0),# East Asian ideograph
- 0x232e64: (0x88ce, 0),# East Asian ideograph
- 0x224535: (0x6ba3, 0),# East Asian ideograph
- 0x234934: (0x9597, 0),# East Asian ideograph
- 0x222e68: (0x61ba, 0),# East Asian ideograph
- 0x222e6a: (0x61b8, 0),# East Asian ideograph
- 0x273267: (0x507f, 0),# East Asian ideograph
- 0x6f4a45: (0xae4e, 0),# Korean hangul
- 0x275930: (0x8c15, 0),# East Asian ideograph
- 0x294358: (0x94ef, 0),# East Asian ideograph
- 0x232e71: (0x88f1, 0),# East Asian ideograph
- 0x232e72: (0x88fe, 0),# East Asian ideograph
- 0x6f5d66: (0xd734, 0),# Korean hangul
- 0x232e75: (0x88f2, 0),# East Asian ideograph
- 0x233269: (0x8a8f, 0),# East Asian ideograph
- 0x232e78: (0x8900, 0),# East Asian ideograph
- 0x213f2e: (0x6176, 0),# East Asian ideograph
- 0x232e7a: (0x88f0, 0),# East Asian ideograph
- 0x6f5b52: (0xd293, 0),# Korean hangul
- 0x222e7d: (0x61dc, 0),# East Asian ideograph
- 0x222e7e: (0x61df, 0),# East Asian ideograph
- 0x69723b: (0x9b96, 0),# East Asian ideograph
- 0x21326b: (0x513c, 0),# East Asian ideograph
- 0x276069: (0x996a, 0),# East Asian ideograph
- 0x294359: (0x94e5, 0),# East Asian ideograph
- 0x4b5434: (0x6319, 0),# East Asian ideograph
- 0x6f5b53: (0xd295, 0),# Korean hangul
- 0x21326f: (0x5145, 0),# East Asian ideograph
- 0x223270: (0x63be, 0),# East Asian ideograph
- 0x234936: (0x958e, 0),# East Asian ideograph
- 0x2d4249: (0x53d9, 0),# East Asian ideograph
- 0x213271: (0x5146, 0),# East Asian ideograph
- 0x224236: (0x6a7e, 0),# East Asian ideograph
- 0x6f4a47: (0xae54, 0),# Korean hangul
- 0x275932: (0x8c26, 0),# East Asian ideograph
- 0x217272: (0x5660, 0),# East Asian ideograph
- 0x295834: (0x9cbb, 0),# East Asian ideograph
- 0x223273: (0x63dd, 0),# East Asian ideograph
- 0x6f5b54: (0xd29c, 0),# Korean hangul
- 0x22497b: (0x6dbf, 0),# East Asian ideograph
- 0x213275: (0x514c, 0),# East Asian ideograph
- 0x6f523a: (0xbeec, 0),# Korean hangul
- 0x2d3a26: (0x5a3f, 0),# East Asian ideograph
- 0x6f5d21: (0xd5d9, 0),# Korean hangul
- 0x283f5c: (0x6769, 0),# East Asian ideograph
- 0x29435b: (0x94e3, 0),# East Asian ideograph
- 0x213277: (0x514d, 0),# East Asian ideograph
- 0x6f5d22: (0xd5db, 0),# Korean hangul
- 0x2d5d23: (0x9167, 0),# East Asian ideograph
- 0x274c6a: (0x75ae, 0),# East Asian ideograph
- 0x6f4c2e: (0xb158, 0),# Korean hangul
- 0x213279: (0x5154, 0),# East Asian ideograph
- 0x705f54: (0x54b4, 0),# East Asian ideograph
- 0x6f5d24: (0xd5e4, 0),# Korean hangul
- 0x29324f: (0x8bd6, 0),# East Asian ideograph
- 0x393460: (0x604a, 0),# East Asian ideograph
- 0x273859: (0x5c18, 0),# East Asian ideograph
- 0x225d25: (0x750e, 0),# East Asian ideograph
- 0x21327b: (0x5157, 0),# East Asian ideograph
- 0x6f5d26: (0xd5e8, 0),# Korean hangul
- 0x275934: (0x8bb2, 0),# East Asian ideograph
- 0x223870: (0x6684, 0),# East Asian ideograph
- 0x235d27: (0x9e0e, 0),# East Asian ideograph
- 0x6f577c: (0xc961, 0),# Korean hangul
- 0x21327d: (0x5162, 0),# East Asian ideograph
- 0x225d28: (0x750d, 0),# East Asian ideograph
- 0x213e38: (0x6059, 0),# East Asian ideograph
- 0x23327e: (0x8ab6, 0),# East Asian ideograph
- 0x295d29: (0x9e71, 0),# East Asian ideograph
- 0x293250: (0x8bd3, 0),# East Asian ideograph
- 0x275d2a: (0x9154, 0),# East Asian ideograph
- 0x235d2b: (0x9e11, 0),# East Asian ideograph
- 0x2f4a4a: (0x5f8f, 0),# East Asian ideograph
- 0x275935: (0x8c0e, 0),# East Asian ideograph
- 0x225d2c: (0x7511, 0),# East Asian ideograph
- 0x225d2d: (0x750f, 0),# East Asian ideograph
- 0x2d3140: (0x4f32, 0),# East Asian ideograph
- 0x6f5b57: (0xd2ac, 0),# Korean hangul
- 0x6f5d2e: (0xd604, 0),# Korean hangul
- 0x215d2f: (0x919e, 0),# East Asian ideograph
- 0x275d79: (0x952e, 0),# East Asian ideograph
- 0x275d30: (0x4e11, 0),# East Asian ideograph
- 0x6f4a4b: (0xae61, 0),# Korean hangul
- 0x232f23: (0x88ef, 0),# East Asian ideograph
- 0x232f24: (0x8903, 0),# East Asian ideograph
- 0x39456d: (0x826b, 0),# East Asian ideograph
- 0x6f5758: (0xc89c, 0),# Korean hangul
- 0x215d31: (0x91ab, 0),# East Asian ideograph
- 0x225648: (0x72a8, 0),# East Asian ideograph
- 0x222f29: (0x61f3, 0),# East Asian ideograph
- 0x275d32: (0x9171, 0),# East Asian ideograph
- 0x274c6d: (0x75e8, 0),# East Asian ideograph
- 0x212f30: (0x3007, 0),# East Asian ideograph (number zero)
- 0x225d33: (0x7513, 0),# East Asian ideograph
- 0x232f35: (0x8906, 0),# East Asian ideograph
- 0x232f36: (0x890c, 0),# East Asian ideograph
- 0x232f37: (0x8919, 0),# East Asian ideograph
- 0x215d34: (0x91c0, 0),# East Asian ideograph
- 0x232f3d: (0x890a, 0),# East Asian ideograph
- 0x6f4b69: (0xb0d0, 0),# Korean hangul
- 0x215d35: (0x91c1, 0),# East Asian ideograph
- 0x6f4a4c: (0xae62, 0),# Korean hangul
- 0x222f41: (0x6204, 0),# East Asian ideograph
- 0x235742: (0x9b9e, 0),# East Asian ideograph
- 0x222f43: (0x6207, 0),# East Asian ideograph
- 0x222f44: (0x6209, 0),# East Asian ideograph
- 0x232f45: (0x892f, 0),# East Asian ideograph
- 0x232f47: (0x8930, 0),# East Asian ideograph
- 0x345e47: (0x75fe, 0),# East Asian ideograph
- 0x235d37: (0x9e18, 0),# East Asian ideograph
- 0x274c6e: (0x7597, 0),# East Asian ideograph
- 0x232f4e: (0x8921, 0),# East Asian ideograph
- 0x232f4f: (0x8927, 0),# East Asian ideograph
- 0x232f51: (0x891f, 0),# East Asian ideograph
- 0x232f53: (0x8931, 0),# East Asian ideograph
- 0x232f54: (0x891e, 0),# East Asian ideograph
- 0x295029: (0x98a5, 0),# East Asian ideograph
- 0x232f56: (0x8926, 0),# East Asian ideograph
- 0x232f57: (0x8922, 0),# East Asian ideograph
- 0x232f5a: (0x8935, 0),# East Asian ideograph
- 0x222f5b: (0x6225, 0),# East Asian ideograph
- 0x232f5d: (0x8941, 0),# East Asian ideograph
- 0x275938: (0x8c22, 0),# East Asian ideograph
- 0x232f60: (0x8933, 0),# East Asian ideograph
- 0x222f61: (0x6229, 0),# East Asian ideograph
- 0x235d3b: (0x9e1d, 0),# East Asian ideograph
- 0x232f66: (0x8954, 0),# East Asian ideograph
- 0x222f67: (0x622d, 0),# East Asian ideograph
- 0x6f5d50: (0xd6c5, 0),# Korean hangul
- 0x215d3c: (0x91cf, 0),# East Asian ideograph
- 0x6f5b5a: (0xd2b9, 0),# Korean hangul
- 0x222f6e: (0x6239, 0),# East Asian ideograph
- 0x222f6f: (0x623a, 0),# East Asian ideograph
- 0x222f70: (0x623d, 0),# East Asian ideograph
- 0x232f72: (0x8947, 0),# East Asian ideograph
- 0x27385a: (0x57ab, 0),# East Asian ideograph
- 0x222f75: (0x6243, 0),# East Asian ideograph
- 0x222f77: (0x6246, 0),# East Asian ideograph
- 0x222f78: (0x6245, 0),# East Asian ideograph
- 0x222f79: (0x624a, 0),# East Asian ideograph
- 0x232f7a: (0x894c, 0),# East Asian ideograph
- 0x232f7b: (0x8946, 0),# East Asian ideograph
- 0x222f7c: (0x625e, 0),# East Asian ideograph
- 0x295859: (0x9cc6, 0),# East Asian ideograph
- 0x275d40: (0x9489, 0),# East Asian ideograph
- 0x275d41: (0x948a, 0),# East Asian ideograph
- 0x6f5b5b: (0xd2bc, 0),# Korean hangul
- 0x215d42: (0x91dc, 0),# East Asian ideograph
- 0x6f4e3a: (0xb5bb, 0),# Korean hangul
- 0x275d43: (0x9497, 0),# East Asian ideograph
- 0x45604e: (0x984f, 0),# East Asian ideograph
- 0x215d44: (0x91e6, 0),# East Asian ideograph
- 0x6f4a4f: (0xae69, 0),# Korean hangul
- 0x27593a: (0x8c2c, 0),# East Asian ideograph
- 0x273721: (0x545c, 0),# East Asian ideograph
- 0x275d45: (0x9493, 0),# East Asian ideograph
- 0x2d535e: (0x8193, 0),# East Asian ideograph
- 0x275d46: (0x948f, 0),# East Asian ideograph
- 0x33632b: (0x7adc, 0),# East Asian ideograph
- 0x6f5b5c: (0xd2bf, 0),# Korean hangul
- 0x705f5b: (0x54a3, 0),# East Asian ideograph
- 0x215d47: (0x9223, 0),# East Asian ideograph
- 0x293256: (0x8be9, 0),# East Asian ideograph
- 0x2e493b: (0x6e7c, 0),# East Asian ideograph
- 0x275d48: (0x949d, 0),# East Asian ideograph
- 0x4b4921: (0x6ca2, 0),# East Asian ideograph
- 0x235d49: (0x9e84, 0),# East Asian ideograph
- 0x27606b: (0x996d, 0),# East Asian ideograph
- 0x27593b: (0x8c1f, 0),# East Asian ideograph
- 0x6f5759: (0xc89d, 0),# Korean hangul
- 0x275d4a: (0x94a0, 0),# East Asian ideograph
- 0x215d4b: (0x9214, 0),# East Asian ideograph
- 0x6f5b5d: (0xd2c0, 0),# Korean hangul
- 0x275d4c: (0x94a7, 0),# East Asian ideograph
- 0x284c2e: (0x6d52, 0),# East Asian ideograph
- 0x697246: (0x9bd1, 0),# East Asian ideograph
- 0x275d4d: (0x94a4, 0),# East Asian ideograph
- 0x2d3356: (0x5211, 0),# East Asian ideograph (not in Unicode)
- 0x6f4b6a: (0xb0d1, 0),# Korean hangul
- 0x6f5d4e: (0xd6a8, 0),# Korean hangul
- 0x6f4a51: (0xae70, 0),# Korean hangul
- 0x27593c: (0x8bc6, 0),# East Asian ideograph
- 0x294364: (0x94f7, 0),# East Asian ideograph
- 0x233c77: (0x8fcb, 0),# East Asian ideograph
- 0x213a38: (0x5ac2, 0),# East Asian ideograph
- 0x275d50: (0x94b9, 0),# East Asian ideograph
- 0x2d3147: (0x5002, 0),# East Asian ideograph
- 0x6f5b5e: (0xd2c8, 0),# Korean hangul
- 0x215d51: (0x923d, 0),# East Asian ideograph
- 0x396074: (0x55b0, 0),# East Asian ideograph
- 0x215d52: (0x923e, 0),# East Asian ideograph
- 0x6f523c: (0xbf09, 0),# Korean hangul
- 0x275d53: (0x94be, 0),# East Asian ideograph
- 0x21347a: (0x53ad, 0),# East Asian ideograph
- 0x213037: (0x4e38, 0),# East Asian ideograph
- 0x4b4b63: (0x749c, 0),# East Asian ideograph
- 0x215d55: (0x925b, 0),# East Asian ideograph
- 0x6f5b5f: (0xd2c9, 0),# Korean hangul
- 0x275d56: (0x94a9, 0),# East Asian ideograph
- 0x22675c: (0x799a, 0),# East Asian ideograph
- 0x275d57: (0x94c2, 0),# East Asian ideograph
- 0x234942: (0x95ac, 0),# East Asian ideograph
- 0x225d58: (0x7547, 0),# East Asian ideograph
- 0x6f4a53: (0xae79, 0),# Korean hangul
- 0x224830: (0x6cd1, 0),# East Asian ideograph
- 0x275d59: (0x94f0, 0),# East Asian ideograph
- 0x235a4f: (0x9d41, 0),# East Asian ideograph
- 0x275d5a: (0x94f6, 0),# East Asian ideograph
- 0x274c75: (0x765e, 0),# East Asian ideograph
- 0x213021: (0x4e00, 0),# East Asian ideograph
- 0x213022: (0x4e01, 0),# East Asian ideograph
- 0x215d5b: (0x92ac, 0),# East Asian ideograph
- 0x213024: (0x4e09, 0),# East Asian ideograph
- 0x213025: (0x4e0b, 0),# East Asian ideograph
- 0x223026: (0x6268, 0),# East Asian ideograph
- 0x213027: (0x4e08, 0),# East Asian ideograph
- 0x223028: (0x6260, 0),# East Asian ideograph
- 0x233029: (0x895b, 0),# East Asian ideograph
- 0x21302a: (0x4e0d, 0),# East Asian ideograph
- 0x21302b: (0x4e14, 0),# East Asian ideograph
- 0x22302c: (0x6262, 0),# East Asian ideograph
- 0x21302d: (0x4e16, 0),# East Asian ideograph
- 0x21302e: (0x4e15, 0),# East Asian ideograph
- 0x215d5d: (0x9298, 0),# East Asian ideograph
- 0x213030: (0x4e22, 0),# East Asian ideograph
- 0x233031: (0x8966, 0),# East Asian ideograph
- 0x223032: (0x628e, 0),# East Asian ideograph
- 0x213034: (0x4e2d, 0),# East Asian ideograph
- 0x275d5e: (0x94e2, 0),# East Asian ideograph
- 0x213036: (0x51e1, 0),# East Asian ideograph
- 0x233037: (0x896d, 0),# East Asian ideograph
- 0x213038: (0x4e39, 0),# East Asian ideograph
- 0x213039: (0x4e3b, 0),# East Asian ideograph
- 0x23303a: (0x896b, 0),# East Asian ideograph
- 0x23303b: (0x896e, 0),# East Asian ideograph
- 0x23303c: (0x896c, 0),# East Asian ideograph
- 0x21303d: (0x4e4b, 0),# East Asian ideograph
- 0x21303e: (0x5c39, 0),# East Asian ideograph
- 0x21303f: (0x4e4f, 0),# East Asian ideograph
- 0x213040: (0x4e4e, 0),# East Asian ideograph
- 0x233041: (0x8976, 0),# East Asian ideograph
- 0x233042: (0x8974, 0),# East Asian ideograph
- 0x223043: (0x6282, 0),# East Asian ideograph
- 0x213044: (0x4e56, 0),# East Asian ideograph
- 0x213045: (0x4e58, 0),# East Asian ideograph
- 0x213046: (0x4e59, 0),# East Asian ideograph
- 0x215d61: (0x929c, 0),# East Asian ideograph
- 0x213048: (0x4e5f, 0),# East Asian ideograph
- 0x233049: (0x897b, 0),# East Asian ideograph
- 0x23304a: (0x897c, 0),# East Asian ideograph
- 0x22304b: (0x629d, 0),# East Asian ideograph
- 0x27304c: (0x5e72, 0),# East Asian ideograph
- 0x225d62: (0x7564, 0),# East Asian ideograph
- 0x6f4a55: (0xae7c, 0),# Korean hangul
- 0x213050: (0x4e8b, 0),# East Asian ideograph
- 0x213051: (0x4e8c, 0),# East Asian ideograph
- 0x213052: (0x4e8e, 0),# East Asian ideograph
- 0x233053: (0x8984, 0),# East Asian ideograph
- 0x213054: (0x4e94, 0),# East Asian ideograph
- 0x233055: (0x8985, 0),# East Asian ideograph
- 0x223056: (0x62a6, 0),# East Asian ideograph
- 0x213057: (0x4e99, 0),# East Asian ideograph (variant of 4B3057 which maps to 4E99)
- 0x273058: (0x4e9a, 0),# East Asian ideograph
- 0x215d64: (0x92b3, 0),# East Asian ideograph
- 0x21305a: (0x4e9f, 0),# East Asian ideograph
- 0x274c77: (0x7663, 0),# East Asian ideograph
- 0x21305c: (0x4ea6, 0),# East Asian ideograph
- 0x21305d: (0x4ea5, 0),# East Asian ideograph
- 0x21305e: (0x4ea4, 0),# East Asian ideograph
- 0x215d65: (0x92ea, 0),# East Asian ideograph
- 0x213060: (0x4eab, 0),# East Asian ideograph
- 0x213061: (0x4eac, 0),# East Asian ideograph
- 0x233062: (0x8991, 0),# East Asian ideograph
- 0x213063: (0x4eae, 0),# East Asian ideograph
- 0x233064: (0x8997, 0),# East Asian ideograph
- 0x215d66: (0x92b7, 0),# East Asian ideograph
- 0x233066: (0x8998, 0),# East Asian ideograph
- 0x4b5830: (0x899a, 0),# East Asian ideograph
- 0x213068: (0x4ec3, 0),# East Asian ideograph
- 0x213069: (0x4ec4, 0),# East Asian ideograph
- 0x22306a: (0x62c3, 0),# East Asian ideograph
- 0x23306b: (0x899c, 0),# East Asian ideograph
- 0x21306c: (0x4ec7, 0),# East Asian ideograph
- 0x21306d: (0x4ecb, 0),# East Asian ideograph
- 0x21306e: (0x4ee4, 0),# East Asian ideograph
- 0x23306f: (0x89a1, 0),# East Asian ideograph
- 0x213070: (0x4ed5, 0),# East Asian ideograph
- 0x275d68: (0x9504, 0),# East Asian ideograph
- 0x223072: (0x630d, 0),# East Asian ideograph
- 0x213073: (0x4ee3, 0),# East Asian ideograph
- 0x213074: (0x4ed4, 0),# East Asian ideograph
- 0x213075: (0x4ed7, 0),# East Asian ideograph
- 0x233076: (0x89a5, 0),# East Asian ideograph
- 0x275d69: (0x9509, 0),# East Asian ideograph
- 0x213078: (0x4eff, 0),# East Asian ideograph
- 0x233079: (0x89a9, 0),# East Asian ideograph
- 0x6f5b63: (0xd2f0, 0),# Korean hangul
- 0x21307c: (0x4efb, 0),# East Asian ideograph
- 0x275d6a: (0x950b, 0),# East Asian ideograph
- 0x21307e: (0x4f15, 0),# East Asian ideograph
- 0x224547: (0x6bbd, 0),# East Asian ideograph
- 0x215d6b: (0x9320, 0),# East Asian ideograph
- 0x292a2f: (0x86f1, 0),# East Asian ideograph
- 0x6f5d6c: (0xd754, 0),# Korean hangul
- 0x29436a: (0x9512, 0),# East Asian ideograph
- 0x215d6d: (0x92f8, 0),# East Asian ideograph
- 0x235a53: (0x9d36, 0),# East Asian ideograph
- 0x225d6e: (0x757a, 0),# East Asian ideograph
- 0x6f535b: (0xc218, 0),# Korean hangul
- 0x274c79: (0x766b, 0),# East Asian ideograph
- 0x6f5b64: (0xd2f1, 0),# Korean hangul
- 0x275d6f: (0x9519, 0),# East Asian ideograph
- 0x29325e: (0x8bdc, 0),# East Asian ideograph
- 0x6f5721: (0xc7a1, 0),# Korean hangul
- 0x2f575f: (0x9abe, 0),# East Asian ideograph
- 0x275d70: (0x94b1, 0),# East Asian ideograph
- 0x4b5832: (0x89b3, 0),# East Asian ideograph
- 0x225d71: (0x7577, 0),# East Asian ideograph
- 0x6f4a58: (0xae85, 0),# Korean hangul
- 0x275d72: (0x9521, 0),# East Asian ideograph
- 0x22343c: (0x649d, 0),# East Asian ideograph
- 0x275d73: (0x94ee, 0),# East Asian ideograph
- 0x6f5b65: (0xd2f4, 0),# Korean hangul
- 0x275d74: (0x5f55, 0),# East Asian ideograph
- 0x6f5722: (0xc7a3, 0),# Korean hangul
- 0x69724e: (0x9bf2, 0),# East Asian ideograph
- 0x215d75: (0x9310, 0),# East Asian ideograph
- 0x234948: (0x95bc, 0),# East Asian ideograph
- 0x4b325f: (0x50bb, 0),# East Asian ideograph
- 0x215d76: (0x9326, 0),# East Asian ideograph
- 0x6f5835: (0xc9da, 0),# Korean hangul
- 0x692153: (0x3009, 0),# Ideographic greater than sign
- 0x215d77: (0x934d, 0),# East Asian ideograph
- 0x2d632d: (0x4e80, 0),# East Asian ideograph
- 0x215d78: (0x9382, 0),# East Asian ideograph
- 0x274c7b: (0x53d1, 0),# East Asian ideograph
- 0x6f5b66: (0xd2f8, 0),# Korean hangul
- 0x225d79: (0x757d, 0),# East Asian ideograph
- 0x6f5432: (0xc2fc, 0),# Korean hangul
- 0x224824: (0x6cd8, 0),# East Asian ideograph
- 0x4b5c77: (0x9139, 0),# East Asian ideograph
- 0x235d7a: (0x9eb0, 0),# East Asian ideograph
- 0x6f5d7b: (0xd790, 0),# Korean hangul
- 0x27606d: (0x9974, 0),# East Asian ideograph
- 0x224826: (0x6cc6, 0),# East Asian ideograph
- 0x6f575b: (0xc8a0, 0),# Korean hangul
- 0x275d7c: (0x9505, 0),# East Asian ideograph
- 0x2e5452: (0x71fe, 0),# East Asian ideograph
- 0x234827: (0x93f4, 0),# East Asian ideograph
- 0x275d7d: (0x951a, 0),# East Asian ideograph
- 0x234828: (0x9436, 0),# East Asian ideograph
- 0x6f5b67: (0xd300, 0),# Korean hangul
- 0x275d7e: (0x953e, 0),# East Asian ideograph
- 0x224829: (0x6ce9, 0),# East Asian ideograph
- 0x226764: (0x799d, 0),# East Asian ideograph
- 0x23494a: (0x95cd, 0),# East Asian ideograph
- 0x395e3d: (0x9295, 0),# East Asian ideograph
- 0x6f4a5b: (0xaebe, 0),# Korean hangul
- 0x23482b: (0x943b, 0),# East Asian ideograph
- 0x275946: (0x8bd1, 0),# East Asian ideograph
- 0x22527c: (0x717b, 0),# East Asian ideograph
- 0x23482d: (0x9424, 0),# East Asian ideograph
- 0x6f5b68: (0xd301, 0),# Korean hangul
- 0x6f5725: (0xc7a6, 0),# Korean hangul
- 0x284e42: (0x6d4d, 0),# East Asian ideograph
- 0x21482f: (0x6e34, 0),# East Asian ideograph
- 0x6f523e: (0xbf1d, 0),# Korean hangul
- 0x6f4a5c: (0xaec0, 0),# Korean hangul
- 0x234830: (0x9437, 0),# East Asian ideograph
- 0x213122: (0x4f10, 0),# East Asian ideograph
- 0x213123: (0x4f0f, 0),# East Asian ideograph
- 0x213124: (0x4ef2, 0),# East Asian ideograph
- 0x223125: (0x62f5, 0),# East Asian ideograph
- 0x213126: (0x4ef3, 0),# East Asian ideograph
- 0x213127: (0x4ef6, 0),# East Asian ideograph
- 0x213128: (0x4ef0, 0),# East Asian ideograph
- 0x23312a: (0x89b8, 0),# East Asian ideograph
- 0x23312b: (0x89b7, 0),# East Asian ideograph
- 0x23312c: (0x89b6, 0),# East Asian ideograph
- 0x234832: (0x9440, 0),# East Asian ideograph
- 0x21312e: (0x4f57, 0),# East Asian ideograph
- 0x23312f: (0x89bc, 0),# East Asian ideograph
- 0x213130: (0x4f5e, 0),# East Asian ideograph
- 0x223131: (0x630c, 0),# East Asian ideograph
- 0x233132: (0x89bf, 0),# East Asian ideograph
- 0x213133: (0x4f55, 0),# East Asian ideograph
- 0x213134: (0x4f30, 0),# East Asian ideograph
- 0x213135: (0x4f50, 0),# East Asian ideograph
- 0x213136: (0x4f51, 0),# East Asian ideograph
- 0x223137: (0x62f6, 0),# East Asian ideograph
- 0x213138: (0x4f48, 0),# East Asian ideograph
- 0x213139: (0x4f46, 0),# East Asian ideograph
- 0x22313a: (0x6331, 0),# East Asian ideograph
- 0x23313b: (0x89d5, 0),# East Asian ideograph
- 0x21313c: (0x4f54, 0),# East Asian ideograph
- 0x21313d: (0x4f3c, 0),# East Asian ideograph
- 0x21313e: (0x4f63, 0),# East Asian ideograph
- 0x23313f: (0x89da, 0),# East Asian ideograph
- 0x213140: (0x4f60, 0),# East Asian ideograph
- 0x213141: (0x4f2f, 0),# East Asian ideograph
- 0x223142: (0x6345, 0),# East Asian ideograph
- 0x233143: (0x89e5, 0),# East Asian ideograph
- 0x223144: (0x6343, 0),# East Asian ideograph
- 0x234836: (0x942d, 0),# East Asian ideograph
- 0x213146: (0x4f6f, 0),# East Asian ideograph
- 0x223147: (0x6353, 0),# East Asian ideograph
- 0x223148: (0x6364, 0),# East Asian ideograph
- 0x223149: (0x6336, 0),# East Asian ideograph
- 0x22314a: (0x6344, 0),# East Asian ideograph
- 0x224837: (0x6d1d, 0),# East Asian ideograph
- 0x23314c: (0x89e9, 0),# East Asian ideograph
- 0x23314d: (0x89eb, 0),# East Asian ideograph
- 0x21314e: (0x4f8b, 0),# East Asian ideograph
- 0x27314f: (0x4ed1, 0),# East Asian ideograph
- 0x234838: (0x9431, 0),# East Asian ideograph
- 0x213152: (0x4f7b, 0),# East Asian ideograph
- 0x233153: (0x89ed, 0),# East Asian ideograph
- 0x223154: (0x6339, 0),# East Asian ideograph
- 0x213155: (0x4f8f, 0),# East Asian ideograph
- 0x213156: (0x4f7e, 0),# East Asian ideograph
- 0x213157: (0x4fe1, 0),# East Asian ideograph
- 0x223158: (0x6357, 0),# East Asian ideograph
- 0x213159: (0x4fb5, 0),# East Asian ideograph
- 0x22315a: (0x633c, 0),# East Asian ideograph
- 0x22315b: (0x6358, 0),# East Asian ideograph
- 0x21315c: (0x4fde, 0),# East Asian ideograph
- 0x27315d: (0x4fa0, 0),# East Asian ideograph
- 0x21315e: (0x4fcf, 0),# East Asian ideograph
- 0x22315f: (0x6354, 0),# East Asian ideograph
- 0x213160: (0x4fda, 0),# East Asian ideograph
- 0x213161: (0x4fdd, 0),# East Asian ideograph
- 0x213162: (0x4fc3, 0),# East Asian ideograph
- 0x213163: (0x4fd8, 0),# East Asian ideograph
- 0x233164: (0x89f7, 0),# East Asian ideograph
- 0x213165: (0x4fca, 0),# East Asian ideograph
- 0x213166: (0x4fae, 0),# East Asian ideograph
- 0x213167: (0x4fd0, 0),# East Asian ideograph
- 0x223168: (0x637d, 0),# East Asian ideograph
- 0x273169: (0x7cfb, 0),# East Asian ideograph (duplicate simplified)
- 0x22316a: (0x63b6, 0),# East Asian ideograph
- 0x22316b: (0x6382, 0),# East Asian ideograph
- 0x27316c: (0x4ed3, 0),# East Asian ideograph
- 0x23316d: (0x8a07, 0),# East Asian ideograph
- 0x22316e: (0x639f, 0),# East Asian ideograph
- 0x21483d: (0x6e9d, 0),# East Asian ideograph
- 0x233170: (0x8a0f, 0),# East Asian ideograph
- 0x233171: (0x8a11, 0),# East Asian ideograph
- 0x233172: (0x8a12, 0),# East Asian ideograph
- 0x233173: (0x8a0d, 0),# East Asian ideograph
- 0x213174: (0x4ff8, 0),# East Asian ideograph
- 0x213175: (0x5028, 0),# East Asian ideograph
- 0x213176: (0x5014, 0),# East Asian ideograph
- 0x213177: (0x5016, 0),# East Asian ideograph
- 0x213178: (0x5029, 0),# East Asian ideograph
- 0x223179: (0x6381, 0),# East Asian ideograph
- 0x23317a: (0x8a27, 0),# East Asian ideograph
- 0x22317b: (0x6397, 0),# East Asian ideograph
- 0x21317c: (0x503c, 0),# East Asian ideograph
- 0x23317d: (0x8a29, 0),# East Asian ideograph
- 0x21317e: (0x4ffa, 0),# East Asian ideograph
- 0x234840: (0x9445, 0),# East Asian ideograph
- 0x274841: (0x6c85, 0),# East Asian ideograph
- 0x6f5b6c: (0xd30c, 0),# Korean hangul
- 0x234842: (0x9450, 0),# East Asian ideograph
- 0x273266: (0x4f18, 0),# East Asian ideograph
- 0x4b513b: (0x7cf8, 0),# East Asian ideograph
- 0x6f4b6d: (0xb0ec, 0),# Korean hangul
- 0x274844: (0x6e7f, 0),# East Asian ideograph
- 0x2d4845: (0x6e29, 0),# East Asian ideograph
- 0x4b4846: (0x78c6, 0),# East Asian ideograph
- 0x226f69: (0x7cc5, 0),# East Asian ideograph
- 0x274848: (0x6ca7, 0),# East Asian ideograph
- 0x4b3622: (0x8c18, 0),# East Asian ideograph
- 0x6f4a61: (0xaed0, 0),# Korean hangul
- 0x273733: (0x5578, 0),# East Asian ideograph
- 0x23484a: (0x944a, 0),# East Asian ideograph
- 0x27484b: (0x51c6, 0),# East Asian ideograph
- 0x6f5b6e: (0xd30e, 0),# Korean hangul
- 0x2f3639: (0x8c7c, 0),# East Asian ideograph
- 0x4b484c: (0x6f91, 0),# East Asian ideograph
- 0x22484d: (0x6d26, 0),# East Asian ideograph
- 0x22484e: (0x6d27, 0),# East Asian ideograph
- 0x294375: (0x9514, 0),# East Asian ideograph
- 0x22484f: (0x6d0f, 0),# East Asian ideograph
- 0x224850: (0x6d0a, 0),# East Asian ideograph
- 0x2d4466: (0x6973, 0),# East Asian ideograph
- 0x224851: (0x6d3f, 0),# East Asian ideograph
- 0x226329: (0x77be, 0),# East Asian ideograph
- 0x234853: (0x9466, 0),# East Asian ideograph
- 0x47594e: (0x9c3a, 0),# East Asian ideograph
- 0x274854: (0x6e0d, 0),# East Asian ideograph
- 0x514e5b: (0x9271, 0),# East Asian ideograph
- 0x274855: (0x6da8, 0),# East Asian ideograph
- 0x6f5b70: (0xd314, 0),# Korean hangul
- 0x274842: (0x706d, 0),# East Asian ideograph
- 0x6f572d: (0xc7bf, 0),# Korean hangul
- 0x284c41: (0x6ca4, 0),# East Asian ideograph
- 0x234560: (0x93be, 0),# East Asian ideograph (not in Unicode)
- 0x29243a: (0x83bc, 0),# East Asian ideograph
- 0x274857: (0x6c49, 0),# East Asian ideograph
- 0x273b79: (0x5c9b, 0),# East Asian ideograph
- 0x234858: (0x9462, 0),# East Asian ideograph
- 0x2f252d: (0x6a22, 0),# East Asian ideograph
- 0x6f575d: (0xc8a8, 0),# Korean hangul
- 0x3f4621: (0x9a69, 0),# East Asian ideograph
- 0x274859: (0x6d9f, 0),# East Asian ideograph
- 0x286622: (0x7857, 0),# East Asian ideograph
- 0x22485a: (0x6d07, 0),# East Asian ideograph
- 0x4b4d7b: (0x77d7, 0),# East Asian ideograph (variant of 214D7B which maps to 77D7)
- 0x6f5b71: (0xd31c, 0),# Korean hangul
- 0x213221: (0x5018, 0),# East Asian ideograph
- 0x213222: (0x4ff1, 0),# East Asian ideograph
- 0x22485b: (0x6d04, 0),# East Asian ideograph
- 0x273224: (0x4e2a, 0),# East Asian ideograph
- 0x213225: (0x5019, 0),# East Asian ideograph
- 0x273226: (0x4f25, 0),# East Asian ideograph
- 0x223227: (0x638e, 0),# East Asian ideograph
- 0x233228: (0x8a4a, 0),# East Asian ideograph
- 0x22485c: (0x6cda, 0),# East Asian ideograph
- 0x23322a: (0x8a4e, 0),# East Asian ideograph
- 0x21322b: (0x4ffe, 0),# East Asian ideograph
- 0x21322c: (0x502a, 0),# East Asian ideograph
- 0x27322d: (0x4f26, 0),# East Asian ideograph
- 0x27322e: (0x4ec3, 0),# East Asian ideograph (duplicate simplified)
- 0x22322f: (0x6375, 0),# East Asian ideograph
- 0x223230: (0x63af, 0),# East Asian ideograph
- 0x213231: (0x5047, 0),# East Asian ideograph
- 0x213232: (0x505a, 0),# East Asian ideograph
- 0x273233: (0x4f1f, 0),# East Asian ideograph
- 0x213234: (0x5043, 0),# East Asian ideograph
- 0x23485e: (0x945e, 0),# East Asian ideograph
- 0x213236: (0x5076, 0),# East Asian ideograph
- 0x213237: (0x504e, 0),# East Asian ideograph
- 0x223238: (0x63b0, 0),# East Asian ideograph
- 0x223239: (0x63ae, 0),# East Asian ideograph
- 0x22323a: (0x637c, 0),# East Asian ideograph
- 0x27485f: (0x6ede, 0),# East Asian ideograph
- 0x21323c: (0x5077, 0),# East Asian ideograph
- 0x22323d: (0x63ad, 0),# East Asian ideograph
- 0x27323e: (0x5bb6, 0),# East Asian ideograph
- 0x21323f: (0x5085, 0),# East Asian ideograph
- 0x273240: (0x5907, 0),# East Asian ideograph
- 0x224860: (0x6d2e, 0),# East Asian ideograph
- 0x233242: (0x8a45, 0),# East Asian ideograph
- 0x273243: (0x4f27, 0),# East Asian ideograph
- 0x273244: (0x4f1e, 0),# East Asian ideograph
- 0x213245: (0x50ad, 0),# East Asian ideograph
- 0x273246: (0x4f20, 0),# East Asian ideograph
- 0x224861: (0x6d35, 0),# East Asian ideograph
- 0x213248: (0x50b2, 0),# East Asian ideograph
- 0x273249: (0x4ec5, 0),# East Asian ideograph
- 0x27324a: (0x503e, 0),# East Asian ideograph
- 0x21324b: (0x50ac, 0),# East Asian ideograph
- 0x27324c: (0x4f24, 0),# East Asian ideograph
- 0x224862: (0x6d3a, 0),# East Asian ideograph
- 0x21324e: (0x50e7, 0),# East Asian ideograph
- 0x22324f: (0x63bd, 0),# East Asian ideograph
- 0x223250: (0x63c3, 0),# East Asian ideograph
- 0x273251: (0x4fa5, 0),# East Asian ideograph
- 0x223252: (0x63f5, 0),# East Asian ideograph
- 0x213253: (0x50ed, 0),# East Asian ideograph
- 0x213254: (0x50da, 0),# East Asian ideograph
- 0x273255: (0x4ec6, 0),# East Asian ideograph
- 0x273256: (0x4f2a, 0),# East Asian ideograph
- 0x273257: (0x8c61, 0),# East Asian ideograph
- 0x273258: (0x4fa8, 0),# East Asian ideograph
- 0x233259: (0x8a82, 0),# East Asian ideograph
- 0x27325a: (0x4ebf, 0),# East Asian ideograph
- 0x22325b: (0x63e0, 0),# East Asian ideograph
- 0x22325c: (0x63d5, 0),# East Asian ideograph
- 0x23325d: (0x8a84, 0),# East Asian ideograph
- 0x23325e: (0x8a75, 0),# East Asian ideograph
- 0x274865: (0x6e14, 0),# East Asian ideograph
- 0x273260: (0x4fa9, 0),# East Asian ideograph
- 0x273261: (0x4fed, 0),# East Asian ideograph
- 0x273262: (0x50a7, 0),# East Asian ideograph
- 0x273263: (0x5c3d, 0),# East Asian ideograph (duplicate simplified)
- 0x213264: (0x5112, 0),# East Asian ideograph
- 0x273265: (0x4fe6, 0),# East Asian ideograph
- 0x223266: (0x63c5, 0),# East Asian ideograph (not in Unicode)
- 0x213267: (0x511f, 0),# East Asian ideograph
- 0x213268: (0x5121, 0),# East Asian ideograph
- 0x273269: (0x50a8, 0),# East Asian ideograph
- 0x21326a: (0x5137, 0),# East Asian ideograph
- 0x27326b: (0x4fe8, 0),# East Asian ideograph
- 0x21326c: (0x5140, 0),# East Asian ideograph
- 0x21326d: (0x5143, 0),# East Asian ideograph
- 0x21326e: (0x5141, 0),# East Asian ideograph
- 0x23326f: (0x8a96, 0),# East Asian ideograph
- 0x213270: (0x5144, 0),# East Asian ideograph
- 0x233271: (0x8a9a, 0),# East Asian ideograph
- 0x213272: (0x5149, 0),# East Asian ideograph
- 0x273273: (0x51f6, 0),# East Asian ideograph
- 0x213274: (0x5148, 0),# East Asian ideograph
- 0x274c3c: (0x8fed, 0),# East Asian ideograph
- 0x223276: (0x63d1, 0),# East Asian ideograph (not in Unicode)
- 0x234869: (0x946d, 0),# East Asian ideograph
- 0x213278: (0x5155, 0),# East Asian ideograph
- 0x223279: (0x63c4, 0),# East Asian ideograph
- 0x27327a: (0x513f, 0),# East Asian ideograph
- 0x27327b: (0x5156, 0),# East Asian ideograph
- 0x21327c: (0x515c, 0),# East Asian ideograph
- 0x22486a: (0x6d2b, 0),# East Asian ideograph
- 0x22327e: (0x6412, 0),# East Asian ideograph
- 0x2d4f7c: (0x7b5e, 0),# East Asian ideograph
- 0x22486b: (0x6d11, 0),# East Asian ideograph
- 0x27486c: (0x6cfc, 0),# East Asian ideograph
- 0x6f5838: (0xc9dc, 0),# Korean hangul
- 0x21304d: (0x4e82, 0),# East Asian ideograph
- 0x22486d: (0x6d24, 0),# East Asian ideograph
- 0x6f4e5c: (0xb760, 0),# Korean hangul
- 0x235c65: (0x9def, 0),# East Asian ideograph
- 0x293b3f: (0x8f7a, 0),# East Asian ideograph
- 0x27486e: (0x6da7, 0),# East Asian ideograph
- 0x6f5b75: (0xd321, 0),# Korean hangul
- 0x27486f: (0x6d01, 0),# East Asian ideograph
- 0x6f4870: (0xac1b, 0),# Korean hangul
- 0x6f4c49: (0xb214, 0),# Korean hangul
- 0x234871: (0x9477, 0),# East Asian ideograph
- 0x275954: (0x5c82, 0),# East Asian ideograph
- 0x2f252e: (0x8507, 0),# East Asian ideograph
- 0x6f4872: (0xac1d, 0),# Korean hangul
- 0x2d315f: (0x4fa3, 0),# East Asian ideograph
- 0x235622: (0x9b35, 0),# East Asian ideograph
- 0x6f5b76: (0xd325, 0),# Korean hangul
- 0x2d4874: (0x6f5c, 0),# East Asian ideograph
- 0x6f4875: (0xac24, 0),# Korean hangul
- 0x29457a: (0x9550, 0),# East Asian ideograph
- 0x6f4876: (0xac2c, 0),# Korean hangul
- 0x227321: (0x7e35, 0),# East Asian ideograph
- 0x224877: (0x6da5, 0),# East Asian ideograph
- 0x213322: (0x5168, 0),# East Asian ideograph
- 0x4b5361: (0x89d2, 0),# East Asian ideograph (duplicate simplified)
- 0x274878: (0x6e83, 0),# East Asian ideograph
- 0x213323: (0x5169, 0),# East Asian ideograph
- 0x455e21: (0x953a, 0),# East Asian ideograph
- 0x293271: (0x8bee, 0),# East Asian ideograph
- 0x213324: (0x516b, 0),# East Asian ideograph
- 0x22455b: (0x6bd6, 0),# East Asian ideograph
- 0x6f487a: (0xac31, 0),# Korean hangul
- 0x213325: (0x516d, 0),# East Asian ideograph
- 0x23487b: (0x9482, 0),# East Asian ideograph
- 0x27373d: (0x5480, 0),# East Asian ideograph
- 0x27487c: (0x6d53, 0),# East Asian ideograph
- 0x213327: (0x516c, 0),# East Asian ideograph
- 0x6f5d56: (0xd6e0, 0),# Korean hangul
- 0x22487d: (0x6d92, 0),# East Asian ideograph
- 0x217328: (0x568c, 0),# East Asian ideograph
- 0x6f487e: (0xac54, 0),# Korean hangul
- 0x213329: (0x5175, 0),# East Asian ideograph
- 0x215f33: (0x9694, 0),# East Asian ideograph
- 0x276225: (0x9ccf, 0),# East Asian ideograph
- 0x2d332a: (0x4e0c, 0),# East Asian ideograph
- 0x2d3e2b: (0x6060, 0),# East Asian ideograph
- 0x21332b: (0x5177, 0),# East Asian ideograph
- 0x4b5f62: (0x7668, 0),# East Asian ideograph
- 0x3f4629: (0x4e97, 0),# East Asian ideograph
- 0x513051: (0x8cae, 0),# East Asian ideograph
- 0x22332c: (0x6424, 0),# East Asian ideograph
- 0x274c3b: (0x7574, 0),# East Asian ideograph
- 0x23463c: (0x9389, 0),# East Asian ideograph
- 0x22732d: (0x7e52, 0),# East Asian ideograph
- 0x22534a: (0x719b, 0),# East Asian ideograph
- 0x6f5736: (0xc804, 0),# Korean hangul
- 0x215f34: (0x9699, 0),# East Asian ideograph
- 0x21332f: (0x5189, 0),# East Asian ideograph
- 0x6f4a6d: (0xaf3f, 0),# Korean hangul
- 0x275958: (0x4e30, 0),# East Asian ideograph
- 0x233321: (0x8abe, 0),# East Asian ideograph
- 0x223322: (0x6410, 0),# East Asian ideograph
- 0x273323: (0x4e24, 0),# East Asian ideograph
- 0x223324: (0x6434, 0),# East Asian ideograph
- 0x233325: (0x8acf, 0),# East Asian ideograph
- 0x213326: (0x516e, 0),# East Asian ideograph
- 0x233327: (0x8ac6, 0),# East Asian ideograph
- 0x213328: (0x5171, 0),# East Asian ideograph
- 0x223329: (0x641b, 0),# East Asian ideograph
- 0x21332a: (0x5176, 0),# East Asian ideograph
- 0x22332b: (0x6420, 0),# East Asian ideograph
- 0x23332c: (0x8ad1, 0),# East Asian ideograph
- 0x23332d: (0x8ad3, 0),# East Asian ideograph
- 0x21332e: (0x5180, 0),# East Asian ideograph
- 0x22332f: (0x6426, 0),# East Asian ideograph
- 0x213330: (0x518c, 0),# East Asian ideograph
- 0x233331: (0x8aaf, 0),# East Asian ideograph
- 0x213332: (0x5192, 0),# East Asian ideograph
- 0x233333: (0x8ad4, 0),# East Asian ideograph
- 0x213334: (0x5195, 0),# East Asian ideograph
- 0x213335: (0x6700, 0),# East Asian ideograph
- 0x213336: (0x5197, 0),# East Asian ideograph
- 0x213337: (0x51a0, 0),# East Asian ideograph
- 0x233338: (0x8ab9, 0),# East Asian ideograph
- 0x223339: (0x3013, 0),# East Asian ideograph (not found in unified han)
- 0x23333b: (0x8adb, 0),# East Asian ideograph
- 0x21333c: (0x51b0, 0),# East Asian ideograph
- 0x22333d: (0x6421, 0),# East Asian ideograph
- 0x21333e: (0x51b7, 0),# East Asian ideograph
- 0x23333f: (0x8ad0, 0),# East Asian ideograph
- 0x233340: (0x8ad7, 0),# East Asian ideograph
- 0x213341: (0x51cc, 0),# East Asian ideograph
- 0x233344: (0x8af3, 0),# East Asian ideograph
- 0x233336: (0x8acd, 0),# East Asian ideograph
- 0x213347: (0x51f0, 0),# East Asian ideograph
- 0x273348: (0x51ef, 0),# East Asian ideograph
- 0x233349: (0x8b4c, 0),# East Asian ideograph
- 0x223337: (0x6418, 0),# East Asian ideograph
- 0x22334c: (0x6409, 0),# East Asian ideograph
- 0x21334d: (0x51f8, 0),# East Asian ideograph
- 0x23334e: (0x8af6, 0),# East Asian ideograph
- 0x21334f: (0x5200, 0),# East Asian ideograph
- 0x213350: (0x5201, 0),# East Asian ideograph
- 0x223338: (0x640e, 0),# East Asian ideograph
- 0x213352: (0x5207, 0),# East Asian ideograph
- 0x223353: (0x6440, 0),# East Asian ideograph
- 0x213354: (0x5208, 0),# East Asian ideograph
- 0x213355: (0x520a, 0),# East Asian ideograph
- 0x233356: (0x8b03, 0),# East Asian ideograph
- 0x233357: (0x8ae4, 0),# East Asian ideograph
- 0x233359: (0x8b14, 0),# East Asian ideograph
- 0x21335a: (0x5224, 0),# East Asian ideograph
- 0x21335b: (0x5225, 0),# East Asian ideograph
- 0x235749: (0x9b86, 0),# East Asian ideograph
- 0x23335d: (0x8afc, 0),# East Asian ideograph
- 0x21335e: (0x5229, 0),# East Asian ideograph
- 0x21335f: (0x5238, 0),# East Asian ideograph
- 0x213360: (0x523b, 0),# East Asian ideograph
- 0x213361: (0x5237, 0),# East Asian ideograph
- 0x233362: (0x8ade, 0),# East Asian ideograph
- 0x233363: (0x8ae1, 0),# East Asian ideograph
- 0x233364: (0x8b07, 0),# East Asian ideograph
- 0x2d537e: (0x81c8, 0),# East Asian ideograph
- 0x213366: (0x5241, 0),# East Asian ideograph
- 0x213367: (0x5239, 0),# East Asian ideograph
- 0x223368: (0x645b, 0),# East Asian ideograph
- 0x213369: (0x524d, 0),# East Asian ideograph
- 0x22336a: (0x644f, 0),# East Asian ideograph
- 0x27336b: (0x514b, 0),# East Asian ideograph
- 0x21336c: (0x524a, 0),# East Asian ideograph
- 0x27336d: (0x5219, 0),# East Asian ideograph
- 0x21336e: (0x525c, 0),# East Asian ideograph
- 0x22336f: (0x6476, 0),# East Asian ideograph
- 0x273370: (0x521a, 0),# East Asian ideograph
- 0x215f37: (0x969b, 0),# East Asian ideograph
- 0x213372: (0x525d, 0),# East Asian ideograph
- 0x233373: (0x8b16, 0),# East Asian ideograph
- 0x213374: (0x526f, 0),# East Asian ideograph
- 0x213375: (0x5272, 0),# East Asian ideograph
- 0x223376: (0x6474, 0),# East Asian ideograph
- 0x213377: (0x5269, 0),# East Asian ideograph
- 0x273378: (0x521b, 0),# East Asian ideograph
- 0x233379: (0x8b06, 0),# East Asian ideograph
- 0x23337a: (0x8b05, 0),# East Asian ideograph
- 0x21337b: (0x527f, 0),# East Asian ideograph
- 0x27337c: (0x5212, 0),# East Asian ideograph
- 0x21337d: (0x5288, 0),# East Asian ideograph
- 0x27337e: (0x5267, 0),# East Asian ideograph
- 0x213340: (0x51cd, 0),# East Asian ideograph
- 0x217341: (0x569c, 0),# East Asian ideograph
- 0x27484f: (0x6caa, 0),# East Asian ideograph
- 0x276226: (0x9ca2, 0),# East Asian ideograph
- 0x234960: (0x95d5, 0),# East Asian ideograph
- 0x6f773b: (0xc6fd, 0),# Korean hangul
- 0x213344: (0x51dc, 0),# East Asian ideograph
- 0x23337c: (0x8b0f, 0),# East Asian ideograph
- 0x223345: (0x6441, 0),# East Asian ideograph
- 0x6f5b7e: (0xd33c, 0),# Korean hangul
- 0x282e79: (0x6079, 0),# East Asian ideograph
- 0x213e40: (0x6046, 0),# East Asian ideograph (variant of 4B3E40 which maps to 6046)
- 0x286e68: (0x7b3e, 0),# East Asian ideograph
- 0x22677b: (0x79b4, 0),# East Asian ideograph
- 0x224562: (0x6bdc, 0),# East Asian ideograph
- 0x213348: (0x51f1, 0),# East Asian ideograph
- 0x695626: (0x4e62, 0),# East Asian ideograph
- 0x6f4a72: (0xaf49, 0),# Korean hangul
- 0x213349: (0x51f3, 0),# East Asian ideograph
- 0x513057: (0x4e98, 0),# East Asian ideograph
- 0x21334b: (0x51fa, 0),# East Asian ideograph
- 0x6f573c: (0xc814, 0),# Korean hangul
- 0x23334c: (0x8add, 0),# East Asian ideograph
- 0x224563: (0x6bdd, 0),# East Asian ideograph
- 0x234962: (0x95d2, 0),# East Asian ideograph
- 0x6f2525: (0x3160, 0),# Korean hangul
- 0x4c3a33: (0x80ad, 0),# East Asian ideograph (variant of 2E3A33 which maps to 80AD)
- 0x276072: (0x9975, 0),# East Asian ideograph
- 0x27595e: (0x4e88, 0),# East Asian ideograph
- 0x21734e: (0x56ac, 0),# East Asian ideograph
- 0x273745: (0x55b7, 0),# East Asian ideograph
- 0x23334f: (0x8af4, 0),# East Asian ideograph
- 0x233350: (0x8af5, 0),# East Asian ideograph
- 0x6f573d: (0xc815, 0),# Korean hangul
- 0x213351: (0x5203, 0),# East Asian ideograph
- 0x395050: (0x7bed, 0),# East Asian ideograph
- 0x22633a: (0x77d1, 0),# East Asian ideograph
- 0x287352: (0x7f34, 0),# East Asian ideograph
- 0x6f4b71: (0xb10b, 0),# Korean hangul
- 0x6f4a74: (0xaf58, 0),# Korean hangul
- 0x233353: (0x8adf, 0),# East Asian ideograph
- 0x4b3354: (0x82c5, 0),# East Asian ideograph
- 0x4b3355: (0x520b, 0),# East Asian ideograph
- 0x6f573e: (0xc816, 0),# Korean hangul
- 0x213356: (0x5211, 0),# East Asian ideograph
- 0x224565: (0x6bdf, 0),# East Asian ideograph
- 0x213357: (0x5217, 0),# East Asian ideograph
- 0x2d5c48: (0x9013, 0),# East Asian ideograph
- 0x273747: (0x54dd, 0),# East Asian ideograph
- 0x213359: (0x520e, 0),# East Asian ideograph
- 0x6f5d58: (0xd6e8, 0),# Korean hangul
- 0x27735a: (0x55be, 0),# East Asian ideograph
- 0x2d4f41: (0x4e69, 0),# East Asian ideograph
- 0x4b5d2b: (0x9162, 0),# East Asian ideograph
- 0x273421: (0x5251, 0),# East Asian ideograph
- 0x213422: (0x5289, 0),# East Asian ideograph
- 0x273423: (0x5242, 0),# East Asian ideograph
- 0x223424: (0x6464, 0),# East Asian ideograph
- 0x213425: (0x529f, 0),# East Asian ideograph
- 0x213426: (0x52a0, 0),# East Asian ideograph
- 0x223427: (0x6482, 0),# East Asian ideograph
- 0x223428: (0x645e, 0),# East Asian ideograph
- 0x21335c: (0x5220, 0),# East Asian ideograph
- 0x21342a: (0x52ac, 0),# East Asian ideograph
- 0x21342b: (0x52aa, 0),# East Asian ideograph
- 0x22342c: (0x647b, 0),# East Asian ideograph
- 0x23342d: (0x8b26, 0),# East Asian ideograph
- 0x22342e: (0x645c, 0),# East Asian ideograph
- 0x27342f: (0x52b2, 0),# East Asian ideograph
- 0x233430: (0x8b33, 0),# East Asian ideograph
- 0x213431: (0x52d8, 0),# East Asian ideograph
- 0x21305b: (0x4ea1, 0),# East Asian ideograph
- 0x273433: (0x52a1, 0),# East Asian ideograph
- 0x273434: (0x52a8, 0),# East Asian ideograph
- 0x273435: (0x52b3, 0),# East Asian ideograph
- 0x273436: (0x52cb, 0),# East Asian ideograph
- 0x213437: (0x52dd, 0),# East Asian ideograph
- 0x273438: (0x52bf, 0),# East Asian ideograph
- 0x213439: (0x52e4, 0),# East Asian ideograph
- 0x23343a: (0x8b29, 0),# East Asian ideograph
- 0x2d335f: (0x52b5, 0),# East Asian ideograph
- 0x27343c: (0x52b1, 0),# East Asian ideograph
- 0x27343d: (0x529d, 0),# East Asian ideograph
- 0x21343e: (0x52fb, 0),# East Asian ideograph
- 0x22343f: (0x6499, 0),# East Asian ideograph
- 0x213440: (0x52ff, 0),# East Asian ideograph
- 0x217360: (0x56c5, 0),# East Asian ideograph
- 0x233442: (0x8b48, 0),# East Asian ideograph
- 0x215f3e: (0x96bb, 0),# East Asian ideograph
- 0x213444: (0x530d, 0),# East Asian ideograph
- 0x234966: (0x95da, 0),# East Asian ideograph
- 0x213446: (0x530f, 0),# East Asian ideograph
- 0x213447: (0x5315, 0),# East Asian ideograph
- 0x213448: (0x5316, 0),# East Asian ideograph
- 0x213449: (0x5317, 0),# East Asian ideograph
- 0x23344a: (0x8b46, 0),# East Asian ideograph
- 0x21344b: (0x53f5, 0),# East Asian ideograph
- 0x21344c: (0x531d, 0),# East Asian ideograph
- 0x22344d: (0x6496, 0),# East Asian ideograph
- 0x21344e: (0x5320, 0),# East Asian ideograph
- 0x21344f: (0x5323, 0),# East Asian ideograph
- 0x213450: (0x532a, 0),# East Asian ideograph
- 0x273451: (0x6c47, 0),# East Asian ideograph
- 0x273452: (0x532e, 0),# East Asian ideograph
- 0x213363: (0x523a, 0),# East Asian ideograph
- 0x213454: (0x533e, 0),# East Asian ideograph
- 0x273455: (0x533a, 0),# East Asian ideograph
- 0x213456: (0x533f, 0),# East Asian ideograph
- 0x213457: (0x5341, 0),# East Asian ideograph
- 0x213458: (0x5343, 0),# East Asian ideograph
- 0x213459: (0x5345, 0),# East Asian ideograph
- 0x21345a: (0x5348, 0),# East Asian ideograph
- 0x22345b: (0x64b6, 0),# East Asian ideograph
- 0x21345c: (0x534a, 0),# East Asian ideograph
- 0x21345d: (0x5349, 0),# East Asian ideograph (variant of 2D345D which maps to 5349)
- 0x6f5741: (0xc820, 0),# Korean hangul
- 0x27345f: (0x5346, 0),# East Asian ideograph
- 0x273460: (0x534f, 0),# East Asian ideograph
- 0x213461: (0x5353, 0),# East Asian ideograph
- 0x223462: (0x649f, 0),# East Asian ideograph
- 0x213463: (0x5357, 0),# East Asian ideograph
- 0x213464: (0x535a, 0),# East Asian ideograph
- 0x223465: (0x64a7, 0),# East Asian ideograph (not in Unicode)
- 0x213466: (0x535e, 0),# East Asian ideograph
- 0x213467: (0x5361, 0),# East Asian ideograph
- 0x233468: (0x8b6b, 0),# East Asian ideograph
- 0x213469: (0x5366, 0),# East Asian ideograph
- 0x22346a: (0x64d7, 0),# East Asian ideograph
- 0x21346b: (0x536e, 0),# East Asian ideograph
- 0x21346c: (0x5370, 0),# East Asian ideograph
- 0x21346d: (0x5371, 0),# East Asian ideograph
- 0x21346e: (0x537d, 0),# East Asian ideograph
- 0x21346f: (0x5375, 0),# East Asian ideograph
- 0x233470: (0x8b78, 0),# East Asian ideograph
- 0x213368: (0x5243, 0),# East Asian ideograph
- 0x213473: (0x537b, 0),# East Asian ideograph
- 0x223474: (0x64be, 0),# East Asian ideograph
- 0x223475: (0x64d0, 0),# East Asian ideograph
- 0x213476: (0x539a, 0),# East Asian ideograph
- 0x213477: (0x539d, 0),# East Asian ideograph
- 0x213478: (0x539f, 0),# East Asian ideograph
- 0x233479: (0x8b81, 0),# East Asian ideograph
- 0x27347a: (0x538c, 0),# East Asian ideograph
- 0x27347b: (0x5389, 0),# East Asian ideograph
- 0x21347c: (0x53bb, 0),# East Asian ideograph
- 0x27347d: (0x53c2, 0),# East Asian ideograph
- 0x21347e: (0x53c8, 0),# East Asian ideograph
- 0x334968: (0x7133, 0),# East Asian ideograph
- 0x23336b: (0x8b0c, 0),# East Asian ideograph
- 0x6f4b72: (0xb10c, 0),# Korean hangul
- 0x6f4a79: (0xaf79, 0),# Korean hangul
- 0x22336c: (0x646b, 0),# East Asian ideograph
- 0x225676: (0x72eb, 0),# East Asian ideograph
- 0x29583e: (0x9cca, 0),# East Asian ideograph
- 0x21736d: (0x56dd, 0),# East Asian ideograph
- 0x2d4f45: (0x9834, 0),# East Asian ideograph
- 0x274858: (0x6ee1, 0),# East Asian ideograph
- 0x6f5743: (0xc82c, 0),# Korean hangul
- 0x23336f: (0x8b1c, 0),# East Asian ideograph
- 0x234969: (0x95de, 0),# East Asian ideograph
- 0x694677: (0x5302, 0),# East Asian ideograph
- 0x217370: (0x56df, 0),# East Asian ideograph
- 0x6f4a7a: (0xaf80, 0),# Korean hangul
- 0x213371: (0x5254, 0),# East Asian ideograph
- 0x333377: (0x5270, 0),# East Asian ideograph
- 0x2d3372: (0x5265, 0),# East Asian ideograph
- 0x6f5d59: (0xd6f0, 0),# Korean hangul
- 0x6f502a: (0xba53, 0),# Korean hangul
- 0x696f27: (0x933b, 0),# East Asian ideograph
- 0x295c65: (0x9e69, 0),# East Asian ideograph
- 0x213373: (0x526a, 0),# East Asian ideograph
- 0x395e2f: (0x5277, 0),# East Asian ideograph
- 0x287374: (0x7f35, 0),# East Asian ideograph
- 0x225f3c: (0x760a, 0),# East Asian ideograph
- 0x23496a: (0x95e0, 0),# East Asian ideograph
- 0x217375: (0x56eb, 0),# East Asian ideograph
- 0x334527: (0x6918, 0),# East Asian ideograph
- 0x273376: (0x5240, 0),# East Asian ideograph
- 0x6f516a: (0xbe1c, 0),# Korean hangul
- 0x275e21: (0x949f, 0),# East Asian ideograph
- 0x2d3377: (0x8cf8, 0),# East Asian ideograph
- 0x215e22: (0x9318, 0),# East Asian ideograph
- 0x27615f: (0x53d1, 0),# East Asian ideograph (duplicate simplified)
- 0x233378: (0x8b0b, 0),# East Asian ideograph
- 0x215e23: (0x936c, 0),# East Asian ideograph
- 0x27485a: (0x6e10, 0),# East Asian ideograph
- 0x275e24: (0x953b, 0),# East Asian ideograph
- 0x21337a: (0x527d, 0),# East Asian ideograph
- 0x225e25: (0x7583, 0),# East Asian ideograph
- 0x6f583c: (0xc9e4, 0),# Korean hangul
- 0x22337b: (0x6473, 0),# East Asian ideograph
- 0x27374e: (0x549b, 0),# East Asian ideograph
- 0x2d5e26: (0x7194, 0),# East Asian ideograph
- 0x293f4c: (0x90d3, 0),# East Asian ideograph
- 0x21337c: (0x5283, 0),# East Asian ideograph
- 0x215e27: (0x93ae, 0),# East Asian ideograph
- 0x335635: (0x85ac, 0),# East Asian ideograph
- 0x3f3f24: (0x614e, 0),# East Asian ideograph
- 0x23337d: (0x8b10, 0),# East Asian ideograph
- 0x215e28: (0x9396, 0),# East Asian ideograph
- 0x6f5746: (0xc838, 0),# Korean hangul
- 0x21337e: (0x5287, 0),# East Asian ideograph
- 0x275e29: (0x94a8, 0),# East Asian ideograph
- 0x6f4c4d: (0xb233, 0),# Korean hangul
- 0x215e2a: (0x93b3, 0),# East Asian ideograph
- 0x215e2b: (0x93e1, 0),# East Asian ideograph
- 0x213062: (0x4ead, 0),# East Asian ideograph
- 0x692564: (0x30e4, 0),# Katakana letter YA
- 0x223461: (0x6498, 0),# East Asian ideograph
- 0x29516d: (0x9991, 0),# East Asian ideograph
- 0x215e2c: (0x93d1, 0),# East Asian ideograph
- 0x225e2d: (0x7592, 0),# East Asian ideograph
- 0x4c4d63: (0x6f99, 0),# East Asian ideograph
- 0x6f5747: (0xc83c, 0),# Korean hangul
- 0x215e2e: (0x93c3, 0),# East Asian ideograph
- 0x213d2c: (0x5ee0, 0),# East Asian ideograph
- 0x275e2f: (0x94f2, 0),# East Asian ideograph
- 0x2d6056: (0x980b, 0),# East Asian ideograph
- 0x6f4a7e: (0xaf95, 0),# Korean hangul
- 0x275969: (0x8d1e, 0),# East Asian ideograph
- 0x215e30: (0x93d7, 0),# East Asian ideograph
- 0x213522: (0x53cb, 0),# East Asian ideograph
- 0x233523: (0x8b8b, 0),# East Asian ideograph
- 0x213524: (0x53cd, 0),# East Asian ideograph
- 0x213525: (0x53d6, 0),# East Asian ideograph
- 0x233526: (0x8b87, 0),# East Asian ideograph
- 0x225e31: (0x7595, 0),# East Asian ideograph
- 0x213528: (0x53db, 0),# East Asian ideograph
- 0x213529: (0x53df, 0),# East Asian ideograph
- 0x22352a: (0x64ef, 0),# East Asian ideograph
- 0x27352b: (0x4e1b, 0),# East Asian ideograph
- 0x21352c: (0x53e3, 0),# East Asian ideograph
- 0x22352d: (0x64e1, 0),# East Asian ideograph
- 0x22352e: (0x64e5, 0),# East Asian ideograph
- 0x224873: (0x6d63, 0),# East Asian ideograph
- 0x213530: (0x53ef, 0),# East Asian ideograph
- 0x213531: (0x53e9, 0),# East Asian ideograph
- 0x213532: (0x53f3, 0),# East Asian ideograph
- 0x223533: (0x64e2, 0),# East Asian ideograph
- 0x213534: (0x53e8, 0),# East Asian ideograph
- 0x213535: (0x53e6, 0),# East Asian ideograph
- 0x223536: (0x64ed, 0),# East Asian ideograph
- 0x233537: (0x8b9c, 0),# East Asian ideograph
- 0x223538: (0x64e4, 0),# East Asian ideograph
- 0x275e34: (0x9542, 0),# East Asian ideograph
- 0x21353a: (0x53f1, 0),# East Asian ideograph
- 0x21353b: (0x53ed, 0),# East Asian ideograph
- 0x21353c: (0x53ea, 0),# East Asian ideograph
- 0x23353d: (0x8c3a, 0),# East Asian ideograph
- 0x235e35: (0x9ec6, 0),# East Asian ideograph
- 0x213540: (0x5409, 0),# East Asian ideograph
- 0x213541: (0x5410, 0),# East Asian ideograph
- 0x213542: (0x540f, 0),# East Asian ideograph
- 0x235a7b: (0x9d59, 0),# East Asian ideograph
- 0x233544: (0x8c40, 0),# East Asian ideograph
- 0x233545: (0x8c42, 0),# East Asian ideograph
- 0x213546: (0x5404, 0),# East Asian ideograph
- 0x213547: (0x5403, 0),# East Asian ideograph
- 0x213548: (0x5412, 0),# East Asian ideograph
- 0x2d7164: (0x55d4, 0),# East Asian ideograph (variant of 217164 which maps to 55D4)
- 0x21354a: (0x5406, 0),# East Asian ideograph (not in Unicode)
- 0x23354b: (0x8c47, 0),# East Asian ideograph
- 0x21354d: (0x542d, 0),# East Asian ideograph
- 0x21354e: (0x541d, 0),# East Asian ideograph
- 0x21354f: (0x541e, 0),# East Asian ideograph
- 0x213550: (0x541b, 0),# East Asian ideograph
- 0x213551: (0x544e, 0),# East Asian ideograph
- 0x233552: (0x8c55, 0),# East Asian ideograph
- 0x23496f: (0x95e5, 0),# East Asian ideograph
- 0x233554: (0x8c57, 0),# East Asian ideograph
- 0x213555: (0x5431, 0),# East Asian ideograph
- 0x233556: (0x8c5d, 0),# East Asian ideograph
- 0x213557: (0x543c, 0),# East Asian ideograph
- 0x213558: (0x5443, 0),# East Asian ideograph
- 0x213559: (0x5426, 0),# East Asian ideograph
- 0x21355a: (0x5420, 0),# East Asian ideograph
- 0x22355b: (0x6516, 0),# East Asian ideograph
- 0x23355c: (0x86c3, 0),# East Asian ideograph
- 0x21355d: (0x5435, 0),# East Asian ideograph
- 0x234e26: (0x97b5, 0),# East Asian ideograph
- 0x21355f: (0x544a, 0),# East Asian ideograph
- 0x213560: (0x5448, 0),# East Asian ideograph
- 0x223561: (0x651b, 0),# East Asian ideograph
- 0x213562: (0x5438, 0),# East Asian ideograph
- 0x233563: (0x8c68, 0),# East Asian ideograph
- 0x213564: (0x5442, 0),# East Asian ideograph
- 0x233565: (0x8c6d, 0),# East Asian ideograph
- 0x213566: (0x541f, 0),# East Asian ideograph
- 0x213567: (0x5429, 0),# East Asian ideograph
- 0x213568: (0x5473, 0),# East Asian ideograph
- 0x223569: (0x6527, 0),# East Asian ideograph
- 0x21356a: (0x5475, 0),# East Asian ideograph
- 0x21356b: (0x5495, 0),# East Asian ideograph
- 0x21356c: (0x5478, 0),# East Asian ideograph
- 0x22356d: (0x6522, 0),# East Asian ideograph
- 0x21356e: (0x5477, 0),# East Asian ideograph
- 0x22356f: (0x6529, 0),# East Asian ideograph
- 0x213571: (0x5492, 0),# East Asian ideograph
- 0x223572: (0x6525, 0),# East Asian ideograph
- 0x213573: (0x547c, 0),# East Asian ideograph
- 0x233574: (0x8c76, 0),# East Asian ideograph
- 0x225e3e: (0x75ba, 0),# East Asian ideograph
- 0x213576: (0x548b, 0),# East Asian ideograph
- 0x213577: (0x548c, 0),# East Asian ideograph
- 0x213578: (0x5490, 0),# East Asian ideograph
- 0x213579: (0x547d, 0),# East Asian ideograph
- 0x21357a: (0x5476, 0),# East Asian ideograph
- 0x23357b: (0x8c78, 0),# East Asian ideograph
- 0x22357c: (0x6541, 0),# East Asian ideograph
- 0x23357d: (0x8c7b, 0),# East Asian ideograph
- 0x21357e: (0x54a9, 0),# East Asian ideograph
- 0x275e40: (0x956f, 0),# East Asian ideograph
- 0x23563a: (0x9b48, 0),# East Asian ideograph
- 0x333421: (0x91fc, 0),# East Asian ideograph
- 0x6f574b: (0xc874, 0),# Korean hangul
- 0x234566: (0x9355, 0),# East Asian ideograph
- 0x235e42: (0x9ecc, 0),# East Asian ideograph
- 0x213d30: (0x5ef7, 0),# East Asian ideograph
- 0x6f4c4e: (0xb234, 0),# Korean hangul
- 0x225e43: (0x75b0, 0),# East Asian ideograph
- 0x225e44: (0x75c3, 0),# East Asian ideograph
- 0x27552a: (0x82cb, 0),# East Asian ideograph
- 0x6f5763: (0xc8cc, 0),# Korean hangul
- 0x275e45: (0x94c4, 0),# East Asian ideograph
- 0x225e46: (0x75bf, 0),# East Asian ideograph
- 0x2d5927: (0x8acc, 0),# East Asian ideograph
- 0x234c38: (0x971b, 0),# East Asian ideograph
- 0x6f574c: (0xc878, 0),# Korean hangul
- 0x225e47: (0x75b4, 0),# East Asian ideograph
- 0x6f5d29: (0xd5f5, 0),# Korean hangul
- 0x6f4b74: (0xb110, 0),# Korean hangul
- 0x23452f: (0x9342, 0),# East Asian ideograph
- 0x27596e: (0x8d2f, 0),# East Asian ideograph
- 0x273755: (0x5411, 0),# East Asian ideograph
- 0x275e49: (0x9523, 0),# East Asian ideograph
- 0x215e4a: (0x947d, 0),# East Asian ideograph
- 0x23563c: (0x9b4e, 0),# East Asian ideograph
- 0x333423: (0x5264, 0),# East Asian ideograph
- 0x275e4b: (0x51ff, 0),# East Asian ideograph
- 0x6f574d: (0xc87a, 0),# Korean hangul
- 0x235e4c: (0x9ed3, 0),# East Asian ideograph
- 0x275e4d: (0x95e8, 0),# East Asian ideograph
- 0x34492f: (0x6d34, 0),# East Asian ideograph
- 0x225e4e: (0x75c1, 0),# East Asian ideograph
- 0x277745: (0x57d9, 0),# East Asian ideograph
- 0x275e4f: (0x95ea, 0),# East Asian ideograph
- 0x4c6f43: (0x7ccd, 0),# East Asian ideograph
- 0x225e50: (0x75b1, 0),# East Asian ideograph
- 0x274863: (0x6d46, 0),# East Asian ideograph
- 0x6f574e: (0xc880, 0),# Korean hangul
- 0x284c62: (0x988d, 0),# East Asian ideograph
- 0x225e51: (0x75c4, 0),# East Asian ideograph
- 0x213d33: (0x5efa, 0),# East Asian ideograph
- 0x275e52: (0x95f0, 0),# East Asian ideograph
- 0x275970: (0x8d2a, 0),# East Asian ideograph
- 0x215e53: (0x958b, 0),# East Asian ideograph
- 0x275e54: (0x95f2, 0),# East Asian ideograph
- 0x23563e: (0x9b4d, 0),# East Asian ideograph
- 0x215e55: (0x9593, 0),# East Asian ideograph
- 0x274864: (0x6e17, 0),# East Asian ideograph
- 0x6f574f: (0xc881, 0),# Korean hangul
- 0x6f4d29: (0xb355, 0),# Korean hangul
- 0x235e57: (0x9ee3, 0),# East Asian ideograph
- 0x275971: (0x8d2b, 0),# East Asian ideograph
- 0x225e58: (0x75cd, 0),# East Asian ideograph
- 0x215e59: (0x95a8, 0),# East Asian ideograph
- 0x275e5a: (0x95fd, 0),# East Asian ideograph
- 0x6f7727: (0xadd5, 0),# Korean hangul
- 0x213621: (0x54aa, 0),# East Asian ideograph
- 0x213622: (0x54a8, 0),# East Asian ideograph
- 0x213623: (0x54ac, 0),# East Asian ideograph
- 0x213624: (0x54c0, 0),# East Asian ideograph
- 0x213625: (0x54b3, 0),# East Asian ideograph
- 0x213626: (0x54a6, 0),# East Asian ideograph
- 0x213627: (0x54ab, 0),# East Asian ideograph
- 0x213628: (0x54c7, 0),# East Asian ideograph
- 0x213629: (0x54c9, 0),# East Asian ideograph
- 0x21362a: (0x54c4, 0),# East Asian ideograph
- 0x21362b: (0x54c2, 0),# East Asian ideograph
- 0x22362c: (0x6538, 0),# East Asian ideograph
- 0x21362d: (0x54c1, 0),# East Asian ideograph
- 0x23362e: (0x8c88, 0),# East Asian ideograph
- 0x21362f: (0x54ce, 0),# East Asian ideograph
- 0x213630: (0x54b1, 0),# East Asian ideograph
- 0x213631: (0x54bb, 0),# East Asian ideograph
- 0x213632: (0x54af, 0),# East Asian ideograph
- 0x213633: (0x54c8, 0),# East Asian ideograph
- 0x223634: (0x6542, 0),# East Asian ideograph
- 0x225e5e: (0x75cc, 0),# East Asian ideograph
- 0x213636: (0x5510, 0),# East Asian ideograph
- 0x213637: (0x54ea, 0),# East Asian ideograph
- 0x213638: (0x5514, 0),# East Asian ideograph
- 0x233639: (0x8c94, 0),# East Asian ideograph
- 0x21363a: (0x54e5, 0),# East Asian ideograph
- 0x225e5f: (0x75d0, 0),# East Asian ideograph
- 0x21363c: (0x54f2, 0),# East Asian ideograph
- 0x21363d: (0x54e8, 0),# East Asian ideograph
- 0x21363e: (0x54e1, 0),# East Asian ideograph
- 0x22363f: (0x6555, 0),# East Asian ideograph
- 0x213640: (0x54ed, 0),# East Asian ideograph
- 0x233641: (0x8c9b, 0),# East Asian ideograph
- 0x213642: (0x5509, 0),# East Asian ideograph
- 0x213643: (0x54e6, 0),# East Asian ideograph
- 0x233644: (0x8ca4, 0),# East Asian ideograph
- 0x223645: (0x6567, 0),# East Asian ideograph
- 0x213646: (0x5546, 0),# East Asian ideograph
- 0x223647: (0x6561, 0),# East Asian ideograph
- 0x213648: (0x554f, 0),# East Asian ideograph
- 0x273649: (0x54d1, 0),# East Asian ideograph
- 0x21364a: (0x5566, 0),# East Asian ideograph
- 0x21364b: (0x556a, 0),# East Asian ideograph
- 0x21364c: (0x554a, 0),# East Asian ideograph
- 0x21364d: (0x5544, 0),# East Asian ideograph
- 0x21364e: (0x555c, 0),# East Asian ideograph
- 0x22364f: (0x656d, 0),# East Asian ideograph
- 0x213650: (0x5543, 0),# East Asian ideograph
- 0x213651: (0x552c, 0),# East Asian ideograph
- 0x213652: (0x5561, 0),# East Asian ideograph
- 0x233653: (0x8cb9, 0),# East Asian ideograph
- 0x223654: (0x657a, 0),# East Asian ideograph
- 0x213655: (0x5555, 0),# East Asian ideograph
- 0x213656: (0x552f, 0),# East Asian ideograph
- 0x233657: (0x8ccd, 0),# East Asian ideograph
- 0x213658: (0x5564, 0),# East Asian ideograph
- 0x213659: (0x5538, 0),# East Asian ideograph
- 0x21365a: (0x55a7, 0),# East Asian ideograph
- 0x21365b: (0x5580, 0),# East Asian ideograph
- 0x21365c: (0x557b, 0),# East Asian ideograph
- 0x21365d: (0x557c, 0),# East Asian ideograph
- 0x21365e: (0x5527, 0),# East Asian ideograph
- 0x21365f: (0x5594, 0),# East Asian ideograph
- 0x213660: (0x5587, 0),# East Asian ideograph
- 0x213661: (0x559c, 0),# East Asian ideograph
- 0x213662: (0x558b, 0),# East Asian ideograph
- 0x273663: (0x4e27, 0),# East Asian ideograph
- 0x213664: (0x55b3, 0),# East Asian ideograph
- 0x225e66: (0x75e1, 0),# East Asian ideograph
- 0x213666: (0x5583, 0),# East Asian ideograph
- 0x213667: (0x55b1, 0),# East Asian ideograph
- 0x273668: (0x5355, 0),# East Asian ideograph
- 0x213669: (0x5582, 0),# East Asian ideograph
- 0x21366a: (0x559f, 0),# East Asian ideograph
- 0x225e67: (0x75e6, 0),# East Asian ideograph
- 0x21366c: (0x5598, 0),# East Asian ideograph
- 0x21366d: (0x559a, 0),# East Asian ideograph
- 0x22366e: (0x658c, 0),# East Asian ideograph
- 0x27366f: (0x4e54, 0),# East Asian ideograph
- 0x223670: (0x6592, 0),# East Asian ideograph
- 0x213671: (0x55b2, 0),# East Asian ideograph
- 0x233672: (0x8cdd, 0),# East Asian ideograph
- 0x213673: (0x55e8, 0),# East Asian ideograph
- 0x233674: (0x8cd9, 0),# East Asian ideograph
- 0x223675: (0x659b, 0),# East Asian ideograph
- 0x213676: (0x55dc, 0),# East Asian ideograph
- 0x223677: (0x659d, 0),# East Asian ideograph
- 0x213678: (0x55c7, 0),# East Asian ideograph
- 0x213679: (0x55d3, 0),# East Asian ideograph
- 0x21367a: (0x55ce, 0),# East Asian ideograph
- 0x21367b: (0x55e3, 0),# East Asian ideograph
- 0x23367c: (0x8cf5, 0),# East Asian ideograph
- 0x21367d: (0x55e4, 0),# East Asian ideograph
- 0x23367e: (0x8cfb, 0),# East Asian ideograph
- 0x232760: (0x8600, 0),# East Asian ideograph
- 0x275e6b: (0x8f9f, 0),# East Asian ideograph (duplicate simplified)
- 0x285424: (0x70e8, 0),# East Asian ideograph
- 0x27375c: (0x556d, 0),# East Asian ideograph
- 0x4b5e6c: (0x961d, 0),# East Asian ideograph (duplicate simplified)
- 0x293f5a: (0x90e7, 0),# East Asian ideograph
- 0x235e6f: (0x9ef6, 0),# East Asian ideograph
- 0x4b5422: (0x81d3, 0),# East Asian ideograph
- 0x6f583f: (0xc9ec, 0),# Korean hangul
- 0x27375d: (0x55eb, 0),# East Asian ideograph
- 0x225e71: (0x75e4, 0),# East Asian ideograph
- 0x225e72: (0x75e0, 0),# East Asian ideograph
- 0x4b4759: (0x6d99, 0),# East Asian ideograph
- 0x6f4b66: (0xb0c7, 0),# Korean hangul
- 0x225e73: (0x75d7, 0),# East Asian ideograph
- 0x6f5755: (0xc88d, 0),# Korean hangul
- 0x235e74: (0x9ef9, 0),# East Asian ideograph
- 0x275977: (0x8d3a, 0),# East Asian ideograph
- 0x27375e: (0x56a3, 0),# East Asian ideograph
- 0x235e76: (0x9efb, 0),# East Asian ideograph
- 0x274921: (0x6cfd, 0),# East Asian ideograph
- 0x293f5c: (0x90ac, 0),# East Asian ideograph
- 0x2d616a: (0x6b1d, 0),# East Asian ideograph
- 0x215e77: (0x964c, 0),# East Asian ideograph
- 0x274922: (0x6d4a, 0),# East Asian ideograph
- 0x6f5756: (0xc890, 0),# Korean hangul
- 0x6f4924: (0xac74, 0),# Korean hangul
- 0x6f4b76: (0xb118, 0),# Korean hangul
- 0x215e7a: (0x9662, 0),# East Asian ideograph
- 0x224925: (0x6d6d, 0),# East Asian ideograph
- 0x275978: (0x8d35, 0),# East Asian ideograph
- 0x235e7b: (0x9efe, 0),# East Asian ideograph (not in Unicode)
- 0x274926: (0x6d4e, 0),# East Asian ideograph
- 0x215e7c: (0x965b, 0),# East Asian ideograph
- 0x274927: (0x6cde, 0),# East Asian ideograph
- 0x235e7d: (0x9f02, 0),# East Asian ideograph
- 0x274928: (0x6ee8, 0),# East Asian ideograph
- 0x6f5757: (0xc894, 0),# Korean hangul
- 0x215e7e: (0x965d, 0),# East Asian ideograph
- 0x224929: (0x6d91, 0),# East Asian ideograph
- 0x287065: (0x7ec2, 0),# East Asian ideograph
- 0x2f4231: (0x8019, 0),# Unrelated variant of EACC 215266 which maps to 8019
- 0x6f492a: (0xac81, 0),# Korean hangul
- 0x33492e: (0x6f81, 0),# East Asian ideograph
- 0x27492b: (0x6ee5, 0),# East Asian ideograph
- 0x33337b: (0x52e6, 0),# East Asian ideograph
- 0x233871: (0x8dc2, 0),# East Asian ideograph
- 0x22492c: (0x6d81, 0),# East Asian ideograph
- 0x235647: (0x9b51, 0),# East Asian ideograph
- 0x33463c: (0x6bbb, 0),# East Asian ideograph
- 0x27492d: (0x6d9b, 0),# East Asian ideograph
- 0x6f553a: (0xc587, 0),# Korean hangul
- 0x27492e: (0x6da9, 0),# East Asian ideograph
- 0x22413c: (0x6a5a, 0),# East Asian ideograph
- 0x2e492f: (0x6cd9, 0),# East Asian ideograph
- 0x27597a: (0x4e70, 0),# East Asian ideograph
- 0x213331: (0x518d, 0),# East Asian ideograph
- 0x273761: (0x7f57, 0),# East Asian ideograph (duplicate simplified)
- 0x213721: (0x55da, 0),# East Asian ideograph
- 0x223722: (0x65a8, 0),# East Asian ideograph
- 0x223723: (0x65a6, 0),# East Asian ideograph
- 0x213724: (0x5600, 0),# East Asian ideograph
- 0x233725: (0x8d04, 0),# East Asian ideograph
- 0x213726: (0x55fe, 0),# East Asian ideograph
- 0x273727: (0x5567, 0),# East Asian ideograph
- 0x213728: (0x55f7, 0),# East Asian ideograph
- 0x213729: (0x5608, 0),# East Asian ideograph
- 0x22372a: (0x65b6, 0),# East Asian ideograph
- 0x21372b: (0x55fd, 0),# East Asian ideograph
- 0x22372c: (0x65b8, 0),# East Asian ideograph
- 0x23372d: (0x8d09, 0),# East Asian ideograph
- 0x21372e: (0x5614, 0),# East Asian ideograph
- 0x22372f: (0x65bf, 0),# East Asian ideograph
- 0x273730: (0x5c1d, 0),# East Asian ideograph
- 0x273731: (0x55bd, 0),# East Asian ideograph
- 0x273732: (0x5520, 0),# East Asian ideograph
- 0x213733: (0x562f, 0),# East Asian ideograph
- 0x223734: (0x65c2, 0),# East Asian ideograph
- 0x213735: (0x5636, 0),# East Asian ideograph
- 0x213736: (0x5632, 0),# East Asian ideograph
- 0x213737: (0x563b, 0),# East Asian ideograph
- 0x213738: (0x5639, 0),# East Asian ideograph
- 0x274934: (0x6e85, 0),# East Asian ideograph
- 0x23373a: (0x8d10, 0),# East Asian ideograph
- 0x22373b: (0x65d0, 0),# East Asian ideograph
- 0x22373c: (0x65d2, 0),# East Asian ideograph
- 0x21373d: (0x5634, 0),# East Asian ideograph
- 0x23373e: (0x8d18, 0),# East Asian ideograph
- 0x224935: (0x6def, 0),# East Asian ideograph
- 0x213740: (0x5630, 0),# East Asian ideograph
- 0x213741: (0x566b, 0),# East Asian ideograph
- 0x213742: (0x5664, 0),# East Asian ideograph
- 0x213743: (0x5669, 0),# East Asian ideograph
- 0x223744: (0x65db, 0),# East Asian ideograph
- 0x213745: (0x5674, 0),# East Asian ideograph
- 0x273746: (0x5f53, 0),# East Asian ideograph (duplicate simplified)
- 0x213747: (0x5665, 0),# East Asian ideograph
- 0x213748: (0x566a, 0),# East Asian ideograph
- 0x213749: (0x5668, 0),# East Asian ideograph
- 0x22374a: (0x65e1, 0),# East Asian ideograph
- 0x27374b: (0x55f3, 0),# East Asian ideograph
- 0x225a23: (0x7429, 0),# East Asian ideograph
- 0x21374d: (0x566c, 0),# East Asian ideograph
- 0x21374e: (0x5680, 0),# East Asian ideograph
- 0x21374f: (0x568e, 0),# East Asian ideograph
- 0x213750: (0x5685, 0),# East Asian ideograph
- 0x214938: (0x701b, 0),# East Asian ideograph
- 0x233752: (0x8d78, 0),# East Asian ideograph
- 0x213753: (0x568f, 0),# East Asian ideograph
- 0x223754: (0x65f4, 0),# East Asian ideograph
- 0x213755: (0x56ae, 0),# East Asian ideograph (variant of 453755 which maps to 56AE)
- 0x273756: (0x5499, 0),# East Asian ideograph
- 0x224939: (0x6d7f, 0),# East Asian ideograph
- 0x213758: (0x56a5, 0),# East Asian ideograph
- 0x213759: (0x56b7, 0),# East Asian ideograph
- 0x22375a: (0x6609, 0),# East Asian ideograph
- 0x27375b: (0x5624, 0),# East Asian ideograph
- 0x21375c: (0x56c0, 0),# East Asian ideograph
- 0x21493a: (0x7028, 0),# East Asian ideograph
- 0x22375e: (0x660a, 0),# East Asian ideograph
- 0x21375f: (0x56bc, 0),# East Asian ideograph
- 0x213760: (0x56ca, 0),# East Asian ideograph
- 0x213761: (0x56c9, 0),# East Asian ideograph
- 0x273762: (0x5453, 0),# East Asian ideograph
- 0x22493b: (0x6d85, 0),# East Asian ideograph
- 0x223764: (0x6603, 0),# East Asian ideograph
- 0x213765: (0x56db, 0),# East Asian ideograph
- 0x213766: (0x56da, 0),# East Asian ideograph
- 0x213767: (0x56e0, 0),# East Asian ideograph
- 0x213768: (0x56de, 0),# East Asian ideograph
- 0x21493c: (0x7015, 0),# East Asian ideograph
- 0x22376a: (0x6611, 0),# East Asian ideograph
- 0x22376b: (0x6615, 0),# East Asian ideograph
- 0x21376c: (0x56fa, 0),# East Asian ideograph
- 0x22376d: (0x6604, 0),# East Asian ideograph
- 0x22376e: (0x6631, 0),# East Asian ideograph
- 0x21376f: (0x570b, 0),# East Asian ideograph
- 0x213770: (0x570d, 0),# East Asian ideograph
- 0x233771: (0x8d94, 0),# East Asian ideograph
- 0x223772: (0x6621, 0),# East Asian ideograph
- 0x273773: (0x56e2, 0),# East Asian ideograph
- 0x273774: (0x56fe, 0),# East Asian ideograph
- 0x223775: (0x662c, 0),# East Asian ideograph
- 0x223777: (0x6635, 0),# East Asian ideograph
- 0x213778: (0x572f, 0),# East Asian ideograph
- 0x213779: (0x5730, 0),# East Asian ideograph
- 0x21377a: (0x5728, 0),# East Asian ideograph
- 0x21377b: (0x5733, 0),# East Asian ideograph
- 0x22377c: (0x661e, 0),# East Asian ideograph
- 0x22377d: (0x663a, 0),# East Asian ideograph
- 0x224940: (0x6d67, 0),# East Asian ideograph
- 0x274941: (0x6d12, 0),# East Asian ideograph
- 0x2e3144: (0x651f, 0),# East Asian ideograph
- 0x274942: (0x6ee9, 0),# East Asian ideograph
- 0x212a34: (0xe8e1, 0),# EACC component character
- 0x214943: (0x7063, 0),# East Asian ideograph
- 0x274944: (0x6ee6, 0),# East Asian ideograph
- 0x4b5a3b: (0x8d08, 0),# East Asian ideograph
- 0x4b4761: (0x6e05, 0),# East Asian ideograph
- 0x213f21: (0x6148, 0),# East Asian ideograph
- 0x224946: (0x6d60, 0),# East Asian ideograph
- 0x2d4b35: (0x73c9, 0),# East Asian ideograph
- 0x2d4947: (0x7ac8, 0),# East Asian ideograph
- 0x394c2d: (0x7546, 0),# East Asian ideograph
- 0x224948: (0x6d98, 0),# East Asian ideograph
- 0x6f516f: (0xbe48, 0),# Korean hangul
- 0x234949: (0x95be, 0),# East Asian ideograph
- 0x217068: (0x5530, 0),# East Asian ideograph
- 0x295d3a: (0x9e73, 0),# East Asian ideograph
- 0x27494a: (0x707e, 0),# East Asian ideograph
- 0x225352: (0x71a0, 0),# East Asian ideograph
- 0x234644: (0x93bd, 0),# East Asian ideograph
- 0x22494b: (0x6d7c, 0),# East Asian ideograph
- 0x6f575e: (0xc8ac, 0),# Korean hangul
- 0x22494c: (0x6d70, 0),# East Asian ideograph
- 0x21494d: (0x7092, 0),# East Asian ideograph
- 0x23494e: (0x95ba, 0),# East Asian ideograph
- 0x295d3b: (0x9e42, 0),# East Asian ideograph
- 0x23494f: (0x95b6, 0),# East Asian ideograph
- 0x2e3e3f: (0x7ba0, 0),# East Asian ideograph
- 0x234950: (0x95bf, 0),# East Asian ideograph
- 0x225278: (0x7178, 0),# East Asian ideograph
- 0x274951: (0x4e3a, 0),# East Asian ideograph
- 0x213d44: (0x5f29, 0),# East Asian ideograph
- 0x334674: (0x76c5, 0),# East Asian ideograph
- 0x234952: (0x95bd, 0),# East Asian ideograph
- 0x6f4953: (0xacf1, 0),# Korean hangul
- 0x21392a: (0x592e, 0),# East Asian ideograph
- 0x295d3c: (0x5364, 0),# East Asian ideograph
- 0x2d4954: (0x70f1, 0),# East Asian ideograph
- 0x6f4955: (0xacf5, 0),# Korean hangul
- 0x22526b: (0x7153, 0),# East Asian ideograph
- 0x6f5760: (0xc8b8, 0),# Korean hangul
- 0x2d4956: (0x70b0, 0),# East Asian ideograph
- 0x213d45: (0x5f2d, 0),# East Asian ideograph
- 0x4b5871: (0x8aa4, 0),# East Asian ideograph
- 0x6f4b78: (0xb11b, 0),# Korean hangul
- 0x6f4957: (0xacfa, 0),# Korean hangul
- 0x6f4958: (0xacfc, 0),# Korean hangul
- 0x284339: (0x680a, 0),# East Asian ideograph
- 0x22746a: (0x7f7f, 0),# East Asian ideograph
- 0x225251: (0x7146, 0),# East Asian ideograph
- 0x234959: (0x95c9, 0),# East Asian ideograph
- 0x22495a: (0x6db4, 0),# East Asian ideograph
- 0x6f5761: (0xc8c4, 0),# Korean hangul
- 0x213821: (0x5740, 0),# East Asian ideograph
- 0x233822: (0x8d96, 0),# East Asian ideograph
- 0x213823: (0x574d, 0),# East Asian ideograph
- 0x213824: (0x573e, 0),# East Asian ideograph
- 0x213825: (0x574e, 0),# East Asian ideograph
- 0x223827: (0x6633, 0),# East Asian ideograph
- 0x223828: (0x662b, 0),# East Asian ideograph
- 0x22495c: (0x6daa, 0),# East Asian ideograph
- 0x21382a: (0x5777, 0),# East Asian ideograph
- 0x22382b: (0x6634, 0),# East Asian ideograph
- 0x22382c: (0x6624, 0),# East Asian ideograph
- 0x21382d: (0x5766, 0),# East Asian ideograph
- 0x21382e: (0x5782, 0),# East Asian ideograph
- 0x21495d: (0x70cf, 0),# East Asian ideograph
- 0x213830: (0x57a0, 0),# East Asian ideograph
- 0x223831: (0x6645, 0),# East Asian ideograph
- 0x213832: (0x57a3, 0),# East Asian ideograph
- 0x233833: (0x8da6, 0),# East Asian ideograph
- 0x213834: (0x57a2, 0),# East Asian ideograph
- 0x213835: (0x57d4, 0),# East Asian ideograph
- 0x213836: (0x57c2, 0),# East Asian ideograph
- 0x213837: (0x57ce, 0),# East Asian ideograph
- 0x213838: (0x57cb, 0),# East Asian ideograph
- 0x213839: (0x57c3, 0),# East Asian ideograph
- 0x21383a: (0x57f9, 0),# East Asian ideograph
- 0x27383b: (0x6267, 0),# East Asian ideograph
- 0x21383c: (0x57fa, 0),# East Asian ideograph
- 0x22383d: (0x6665, 0),# East Asian ideograph
- 0x22383e: (0x665c, 0),# East Asian ideograph
- 0x22383f: (0x6661, 0),# East Asian ideograph
- 0x213840: (0x5802, 0),# East Asian ideograph
- 0x224960: (0x6dec, 0),# East Asian ideograph
- 0x213842: (0x57e4, 0),# East Asian ideograph
- 0x213843: (0x57e0, 0),# East Asian ideograph
- 0x273844: (0x62a5, 0),# East Asian ideograph
- 0x273845: (0x5c27, 0),# East Asian ideograph
- 0x213846: (0x5835, 0),# East Asian ideograph
- 0x213847: (0x582a, 0),# East Asian ideograph
- 0x223848: (0x665b, 0),# East Asian ideograph
- 0x223849: (0x6659, 0),# East Asian ideograph
- 0x22384a: (0x6667, 0),# East Asian ideograph
- 0x21384b: (0x5821, 0),# East Asian ideograph
- 0x21384c: (0x585e, 0),# East Asian ideograph
- 0x22384d: (0x6657, 0),# East Asian ideograph
- 0x275541: (0x83b1, 0),# East Asian ideograph
- 0x21384f: (0x5851, 0),# East Asian ideograph
- 0x213850: (0x586b, 0),# East Asian ideograph
- 0x223851: (0x666c, 0),# East Asian ideograph
- 0x233852: (0x8dab, 0),# East Asian ideograph
- 0x234963: (0x95d3, 0),# East Asian ideograph
- 0x213854: (0x5854, 0),# East Asian ideograph
- 0x273855: (0x575e, 0),# East Asian ideograph
- 0x213856: (0x584a, 0),# East Asian ideograph
- 0x213857: (0x5883, 0),# East Asian ideograph
- 0x213858: (0x587e, 0),# East Asian ideograph
- 0x234964: (0x95d1, 0),# East Asian ideograph
- 0x23385a: (0x8db0, 0),# East Asian ideograph
- 0x27385b: (0x5811, 0),# East Asian ideograph
- 0x216061: (0x98bc, 0),# East Asian ideograph
- 0x21385d: (0x5893, 0),# East Asian ideograph
- 0x21385e: (0x589e, 0),# East Asian ideograph
- 0x234965: (0x95c3, 0),# East Asian ideograph
- 0x273860: (0x575f, 0),# East Asian ideograph
- 0x273861: (0x5760, 0),# East Asian ideograph
- 0x273862: (0x5815, 0),# East Asian ideograph
- 0x213863: (0x589f, 0),# East Asian ideograph
- 0x273864: (0x575b, 0),# East Asian ideograph
- 0x274966: (0x65e0, 0),# East Asian ideograph
- 0x233866: (0x8db2, 0),# East Asian ideograph
- 0x273867: (0x57a6, 0),# East Asian ideograph
- 0x223868: (0x6677, 0),# East Asian ideograph
- 0x273869: (0x538b, 0),# East Asian ideograph
- 0x21386a: (0x58d1, 0),# East Asian ideograph
- 0x27386b: (0x5739, 0),# East Asian ideograph
- 0x21386c: (0x58d8, 0),# East Asian ideograph
- 0x27386d: (0x5784, 0),# East Asian ideograph
- 0x23386e: (0x8dbc, 0),# East Asian ideograph
- 0x27386f: (0x575c, 0),# East Asian ideograph
- 0x233870: (0x8db9, 0),# East Asian ideograph
- 0x223871: (0x668c, 0),# East Asian ideograph
- 0x233872: (0x8dc1, 0),# East Asian ideograph
- 0x213873: (0x58ec, 0),# East Asian ideograph
- 0x213874: (0x58ef, 0),# East Asian ideograph
- 0x223875: (0x668b, 0),# East Asian ideograph
- 0x273876: (0x58f6, 0),# East Asian ideograph
- 0x273877: (0x5bff, 0),# East Asian ideograph
- 0x213878: (0x590f, 0),# East Asian ideograph
- 0x223879: (0x6694, 0),# East Asian ideograph
- 0x22387a: (0x668a, 0),# East Asian ideograph
- 0x21387b: (0x5916, 0),# East Asian ideograph
- 0x22387c: (0x6698, 0),# East Asian ideograph
- 0x22387d: (0x668d, 0),# East Asian ideograph
- 0x21387e: (0x591c, 0),# East Asian ideograph
- 0x234547: (0x936a, 0),# East Asian ideograph
- 0x22496b: (0x6db7, 0),# East Asian ideograph
- 0x2d3f54: (0x61d0, 0),# East Asian ideograph
- 0x22496c: (0x6de2, 0),# East Asian ideograph
- 0x6f5768: (0xc8e4, 0),# Korean hangul
- 0x4b393e: (0x5965, 0),# East Asian ideograph
- 0x27496d: (0x70e6, 0),# East Asian ideograph
- 0x22496e: (0x6de9, 0),# East Asian ideograph
- 0x6f5765: (0xc8d5, 0),# Korean hangul
- 0x27496f: (0x7080, 0),# East Asian ideograph
- 0x21763e: (0x5810, 0),# East Asian ideograph
- 0x6f4b79: (0xb11c, 0),# Korean hangul
- 0x4d5053: (0x98da, 0),# East Asian ideograph
- 0x6f4970: (0xad70, 0),# Korean hangul
- 0x224247: (0x6a90, 0),# East Asian ideograph
- 0x224971: (0x6df6, 0),# East Asian ideograph
- 0x28433a: (0x69e0, 0),# East Asian ideograph
- 0x295d42: (0x9e7e, 0),# East Asian ideograph
- 0x234972: (0x95e4, 0),# East Asian ideograph
- 0x6f7622: (0x3186, 0),# Korean hangul
- 0x27487b: (0x6dc0, 0),# East Asian ideograph
- 0x6f5766: (0xc8d7, 0),# Korean hangul
- 0x215f64: (0x971c, 0),# East Asian ideograph
- 0x213d4b: (0x5f48, 0),# East Asian ideograph
- 0x274975: (0x6247, 0),# East Asian ideograph
- 0x6f4976: (0xad7d, 0),# Korean hangul
- 0x213421: (0x528d, 0),# East Asian ideograph
- 0x225257: (0x7160, 0),# East Asian ideograph
- 0x4b4977: (0x7188, 0),# East Asian ideograph
- 0x233422: (0x8b2b, 0),# East Asian ideograph
- 0x6f4978: (0xad81, 0),# Korean hangul
- 0x4b4339: (0x6674, 0),# East Asian ideograph
- 0x223423: (0x644e, 0),# East Asian ideograph
- 0x223d6a: (0x6910, 0),# East Asian ideograph
- 0x224979: (0x6e0f, 0),# East Asian ideograph
- 0x213424: (0x529b, 0),# East Asian ideograph
- 0x22414b: (0x6a5c, 0),# East Asian ideograph
- 0x23497a: (0x961e, 0),# East Asian ideograph
- 0x283671: (0x6593, 0),# East Asian ideograph
- 0x23497b: (0x9624, 0),# East Asian ideograph
- 0x23497c: (0x9622, 0),# East Asian ideograph
- 0x213427: (0x52a3, 0),# East Asian ideograph
- 0x4b5963: (0x734f, 0),# East Asian ideograph
- 0x27497d: (0x70ed, 0),# East Asian ideograph
- 0x213428: (0x52ab, 0),# East Asian ideograph
- 0x27497e: (0x70eb, 0),# East Asian ideograph
- 0x213429: (0x52a9, 0),# East Asian ideograph
- 0x275d47: (0x9499, 0),# East Asian ideograph
- 0x23342a: (0x8b37, 0),# East Asian ideograph
- 0x273771: (0x56ed, 0),# East Asian ideograph
- 0x6f5843: (0xc9f1, 0),# Korean hangul
- 0x21342c: (0x52be, 0),# East Asian ideograph
- 0x2d4f6b: (0x7af8, 0),# East Asian ideograph
- 0x4c2962: (0x5f4d, 0),# East Asian ideograph (variant of 222962 which maps to 5F4D)
- 0x21342d: (0x52c7, 0),# East Asian ideograph
- 0x334c37: (0x8e6f, 0),# East Asian ideograph
- 0x215f67: (0x9727, 0),# East Asian ideograph
- 0x21742e: (0x5707, 0),# East Asian ideograph
- 0x23454c: (0x934f, 0),# East Asian ideograph
- 0x2d6078: (0x9920, 0),# East Asian ideograph
- 0x21342f: (0x52c1, 0),# East Asian ideograph
- 0x273772: (0x5706, 0),# East Asian ideograph
- 0x233921: (0x8dcf, 0),# East Asian ideograph
- 0x233922: (0x8dd6, 0),# East Asian ideograph
- 0x273923: (0x4f19, 0),# East Asian ideograph
- 0x223924: (0x7a25, 0),# East Asian ideograph
- 0x213925: (0x5927, 0),# East Asian ideograph
- 0x213926: (0x592a, 0),# East Asian ideograph
- 0x233927: (0x8dd0, 0),# East Asian ideograph
- 0x213928: (0x5929, 0),# East Asian ideograph
- 0x213929: (0x592d, 0),# East Asian ideograph
- 0x22392a: (0x66a0, 0),# East Asian ideograph
- 0x23392b: (0x8dc5, 0),# East Asian ideograph
- 0x21392c: (0x5937, 0),# East Asian ideograph
- 0x217432: (0x5714, 0),# East Asian ideograph
- 0x27392e: (0x5939, 0),# East Asian ideograph
- 0x23392f: (0x8de4, 0),# East Asian ideograph
- 0x223930: (0x5c21, 0),# East Asian ideograph
- 0x213931: (0x5948, 0),# East Asian ideograph
- 0x223932: (0x669d, 0),# East Asian ideograph
- 0x213433: (0x52d9, 0),# East Asian ideograph
- 0x213934: (0x5955, 0),# East Asian ideograph
- 0x233935: (0x8deb, 0),# East Asian ideograph
- 0x233936: (0x8df4, 0),# East Asian ideograph
- 0x213937: (0x594f, 0),# East Asian ideograph
- 0x233938: (0x8de9, 0),# East Asian ideograph
- 0x213434: (0x52d5, 0),# East Asian ideograph
- 0x22393a: (0x66b2, 0),# East Asian ideograph
- 0x23393b: (0x8de3, 0),# East Asian ideograph
- 0x21393c: (0x5960, 0),# East Asian ideograph
- 0x23393d: (0x8de7, 0),# East Asian ideograph
- 0x21393e: (0x5967, 0),# East Asian ideograph
- 0x23393f: (0x8e09, 0),# East Asian ideograph
- 0x223940: (0x66b5, 0),# East Asian ideograph
- 0x273941: (0x594b, 0),# East Asian ideograph
- 0x213942: (0x5973, 0),# East Asian ideograph
- 0x223943: (0x66ac, 0),# East Asian ideograph
- 0x233944: (0x8dff, 0),# East Asian ideograph
- 0x213945: (0x5984, 0),# East Asian ideograph
- 0x233946: (0x8e05, 0),# East Asian ideograph
- 0x223947: (0x66b1, 0),# East Asian ideograph
- 0x213948: (0x597d, 0),# East Asian ideograph
- 0x233949: (0x8e01, 0),# East Asian ideograph
- 0x21394a: (0x5982, 0),# East Asian ideograph
- 0x21394b: (0x5981, 0),# East Asian ideograph
- 0x21394c: (0x59a8, 0),# East Asian ideograph
- 0x21394d: (0x5992, 0),# East Asian ideograph
- 0x23394e: (0x8e04, 0),# East Asian ideograph
- 0x22394f: (0x66be, 0),# East Asian ideograph
- 0x233950: (0x8e06, 0),# East Asian ideograph
- 0x233438: (0x8b3e, 0),# East Asian ideograph
- 0x233952: (0x8e2a, 0),# East Asian ideograph
- 0x273953: (0x5986, 0),# East Asian ideograph
- 0x223954: (0x66c0, 0),# East Asian ideograph
- 0x223955: (0x66c7, 0),# East Asian ideograph
- 0x213956: (0x598a, 0),# East Asian ideograph
- 0x233957: (0x8e2e, 0),# East Asian ideograph
- 0x233958: (0x8e21, 0),# East Asian ideograph
- 0x213959: (0x59bb, 0),# East Asian ideograph
- 0x22395a: (0x66bb, 0),# East Asian ideograph
- 0x21395b: (0x59d1, 0),# East Asian ideograph
- 0x22395c: (0x66c4, 0),# East Asian ideograph
- 0x21343a: (0x52df, 0),# East Asian ideograph
- 0x21395e: (0x59d0, 0),# East Asian ideograph
- 0x21395f: (0x59d7, 0),# East Asian ideograph
- 0x223960: (0x66cf, 0),# East Asian ideograph
- 0x213961: (0x59d2, 0),# East Asian ideograph
- 0x213962: (0x59d3, 0),# East Asian ideograph
- 0x213963: (0x59ca, 0),# East Asian ideograph
- 0x233964: (0x8e16, 0),# East Asian ideograph
- 0x213965: (0x59cb, 0),# East Asian ideograph
- 0x233966: (0x8e26, 0),# East Asian ideograph
- 0x213967: (0x59e3, 0),# East Asian ideograph
- 0x233968: (0x8e14, 0),# East Asian ideograph
- 0x213969: (0x59ff, 0),# East Asian ideograph
- 0x21396a: (0x59d8, 0),# East Asian ideograph
- 0x21396b: (0x5a03, 0),# East Asian ideograph
- 0x21396c: (0x59e8, 0),# East Asian ideograph
- 0x21396d: (0x59e5, 0),# East Asian ideograph
- 0x21396e: (0x59ea, 0),# East Asian ideograph
- 0x23396f: (0x8e41, 0),# East Asian ideograph
- 0x213970: (0x59fb, 0),# East Asian ideograph
- 0x223971: (0x66da, 0),# East Asian ideograph
- 0x223972: (0x66db, 0),# East Asian ideograph
- 0x223973: (0x66e2, 0),# East Asian ideograph
- 0x213974: (0x5a18, 0),# East Asian ideograph
- 0x213975: (0x5a23, 0),# East Asian ideograph
- 0x223976: (0x66e1, 0),# East Asian ideograph
- 0x233977: (0x8e40, 0),# East Asian ideograph
- 0x223978: (0x66e8, 0),# East Asian ideograph
- 0x233979: (0x8e36, 0),# East Asian ideograph
- 0x21397a: (0x5a1f, 0),# East Asian ideograph
- 0x21397b: (0x5a1b, 0),# East Asian ideograph
- 0x22397c: (0x66e9, 0),# East Asian ideograph
- 0x21397d: (0x5a29, 0),# East Asian ideograph
- 0x23397e: (0x8e3d, 0),# East Asian ideograph
- 0x27785a: (0x5785, 0),# East Asian ideograph
- 0x217441: (0x5724, 0),# East Asian ideograph
- 0x6f532a: (0xc12a, 0),# Korean hangul
- 0x213442: (0x5306, 0),# East Asian ideograph
- 0x334550: (0x7f47, 0),# East Asian ideograph
- 0x232337: (0x846e, 0),# East Asian ideograph
- 0x217443: (0x5729, 0),# East Asian ideograph
- 0x4b5521: (0x8332, 0),# East Asian ideograph
- 0x233444: (0x8b54, 0),# East Asian ideograph
- 0x235c71: (0x9e07, 0),# East Asian ideograph
- 0x22525e: (0x7176, 0),# East Asian ideograph
- 0x213445: (0x5310, 0),# East Asian ideograph
- 0x2d4b45: (0x6bec, 0),# East Asian ideograph
- 0x6f5435: (0xc309, 0),# Korean hangul
- 0x22527b: (0x7187, 0),# East Asian ideograph
- 0x6f576e: (0xc900, 0),# Korean hangul
- 0x6f532b: (0xc12c, 0),# Korean hangul
- 0x6f2527: (0x3162, 0),# Korean hangul
- 0x227447: (0x7f63, 0),# East Asian ideograph
- 0x4b3666: (0x5a1a, 0),# East Asian ideograph
- 0x233448: (0x8b53, 0),# East Asian ideograph
- 0x233449: (0x8b4a, 0),# East Asian ideograph
- 0x275124: (0x7eb8, 0),# East Asian ideograph
- 0x223046: (0x6285, 0),# East Asian ideograph
- 0x21344a: (0x5319, 0),# East Asian ideograph
- 0x6f576f: (0xc904, 0),# Korean hangul
- 0x6f532c: (0xc12d, 0),# Korean hangul
- 0x23356f: (0x8c74, 0),# East Asian ideograph
- 0x6f4b7b: (0xb11e, 0),# Korean hangul
- 0x215b2a: (0x8e91, 0),# East Asian ideograph
- 0x6f4f6f: (0xb9db, 0),# Korean hangul
- 0x21344d: (0x5321, 0),# East Asian ideograph
- 0x22344e: (0x64a2, 0),# East Asian ideograph
- 0x23344f: (0x8b3f, 0),# East Asian ideograph
- 0x2e7450: (0x7f82, 0),# East Asian ideograph
- 0x213451: (0x532f, 0),# East Asian ideograph
- 0x335230: (0x7f6e, 0),# East Asian ideograph (variant of 215230 which maps to 7F6E)
- 0x4b3668: (0x5358, 0),# East Asian ideograph
- 0x234553: (0x9356, 0),# East Asian ideograph
- 0x21725d: (0x5620, 0),# East Asian ideograph
- 0x27554f: (0x53f6, 0),# East Asian ideograph
- 0x213453: (0x5339, 0),# East Asian ideograph
- 0x223454: (0x6490, 0),# East Asian ideograph
- 0x213455: (0x5340, 0),# East Asian ideograph
- 0x215f6f: (0x9748, 0),# East Asian ideograph
- 0x215b2c: (0x8eaa, 0),# East Asian ideograph
- 0x234554: (0x9371, 0),# East Asian ideograph
- 0x6f5028: (0xba4d, 0),# Korean hangul
- 0x283457: (0x63b8, 0),# East Asian ideograph
- 0x274b2d: (0x736d, 0),# East Asian ideograph
- 0x2d3458: (0x4edf, 0),# East Asian ideograph
- 0x6f4c65: (0xb2d0, 0),# Korean hangul
- 0x284934: (0x6d43, 0),# East Asian ideograph
- 0x233459: (0x8b59, 0),# East Asian ideograph
- 0x6f5772: (0xc90d, 0),# Korean hangul
- 0x233a21: (0x8e30, 0),# East Asian ideograph
- 0x213a22: (0x5a49, 0),# East Asian ideograph
- 0x21345b: (0x5347, 0),# East Asian ideograph
- 0x233a24: (0x8e47, 0),# East Asian ideograph
- 0x213a25: (0x5a4a, 0),# East Asian ideograph
- 0x233a26: (0x8e46, 0),# East Asian ideograph
- 0x273a27: (0x5987, 0),# East Asian ideograph
- 0x273a28: (0x5a04, 0),# East Asian ideograph
- 0x213a29: (0x5a3c, 0),# East Asian ideograph
- 0x213a2a: (0x5a62, 0),# East Asian ideograph
- 0x213a2b: (0x5a5a, 0),# East Asian ideograph
- 0x213a2c: (0x5a77, 0),# East Asian ideograph
- 0x213a2d: (0x5a9a, 0),# East Asian ideograph
- 0x233a2e: (0x8e4c, 0),# East Asian ideograph
- 0x213a2f: (0x5a7f, 0),# East Asian ideograph
- 0x223a30: (0x670f, 0),# East Asian ideograph
- 0x224767: (0x6cac, 0),# East Asian ideograph
- 0x233a32: (0x8e4f, 0),# East Asian ideograph
- 0x223a33: (0x6712, 0),# East Asian ideograph
- 0x223a34: (0x6713, 0),# East Asian ideograph
- 0x233a35: (0x8e62, 0),# East Asian ideograph
- 0x233a36: (0x8e60, 0),# East Asian ideograph
- 0x213a37: (0x5ab2, 0),# East Asian ideograph
- 0x223a38: (0x6719, 0),# East Asian ideograph
- 0x223a39: (0x6718, 0),# East Asian ideograph
- 0x233a3a: (0x8e54, 0),# East Asian ideograph
- 0x273a3b: (0x59aa, 0),# East Asian ideograph
- 0x213a3c: (0x5ad6, 0),# East Asian ideograph
- 0x213a3d: (0x5ae3, 0),# East Asian ideograph
- 0x233a3e: (0x8e5a, 0),# East Asian ideograph
- 0x233a3f: (0x8e5e, 0),# East Asian ideograph
- 0x233a40: (0x8e55, 0),# East Asian ideograph
- 0x273a41: (0x5a34, 0),# East Asian ideograph
- 0x213a42: (0x5b09, 0),# East Asian ideograph
- 0x273a43: (0x5a75, 0),# East Asian ideograph
- 0x273a44: (0x5a07, 0),# East Asian ideograph
- 0x273a45: (0x59a9, 0),# East Asian ideograph
- 0x233a46: (0x8e95, 0),# East Asian ideograph
- 0x223a47: (0x6723, 0),# East Asian ideograph
- 0x233a48: (0x8e6d, 0),# East Asian ideograph
- 0x213a49: (0x5b24, 0),# East Asian ideograph
- 0x273a4a: (0x5a74, 0),# East Asian ideograph
- 0x273a4b: (0x5a76, 0),# East Asian ideograph
- 0x223a4c: (0x673e, 0),# East Asian ideograph
- 0x213462: (0x5351, 0),# East Asian ideograph
- 0x223a4e: (0x673f, 0),# East Asian ideograph
- 0x213a4f: (0x5b53, 0),# East Asian ideograph
- 0x213a50: (0x5b54, 0),# East Asian ideograph
- 0x213a51: (0x5b55, 0),# East Asian ideograph
- 0x213a52: (0x5b57, 0),# East Asian ideograph
- 0x213a53: (0x5b58, 0),# East Asian ideograph
- 0x213a54: (0x5b5d, 0),# East Asian ideograph
- 0x213a55: (0x5b5c, 0),# East Asian ideograph
- 0x233a57: (0x8e8b, 0),# East Asian ideograph
- 0x223a58: (0x6757, 0),# East Asian ideograph
- 0x213a59: (0x5b64, 0),# East Asian ideograph
- 0x213a5a: (0x5b69, 0),# East Asian ideograph
- 0x273a5b: (0x5b59, 0),# East Asian ideograph
- 0x223a5c: (0x6747, 0),# East Asian ideograph
- 0x213a5d: (0x5b73, 0),# East Asian ideograph
- 0x233a5e: (0x8e9a, 0),# East Asian ideograph
- 0x273a5f: (0x5b5a, 0),# East Asian ideograph
- 0x273a60: (0x5b66, 0),# East Asian ideograph
- 0x223a61: (0x6755, 0),# East Asian ideograph
- 0x213a62: (0x5b7d, 0),# East Asian ideograph
- 0x233a63: (0x8e98, 0),# East Asian ideograph
- 0x233a64: (0x8e9e, 0),# East Asian ideograph
- 0x223466: (0x64b3, 0),# East Asian ideograph
- 0x223a66: (0x674c, 0),# East Asian ideograph
- 0x223a67: (0x6759, 0),# East Asian ideograph
- 0x223a68: (0x6748, 0),# East Asian ideograph
- 0x213a69: (0x5b8c, 0),# East Asian ideograph
- 0x275553: (0x8364, 0),# East Asian ideograph
- 0x233a6b: (0x8ea5, 0),# East Asian ideograph
- 0x213a6c: (0x5b97, 0),# East Asian ideograph
- 0x213a6d: (0x5b9a, 0),# East Asian ideograph
- 0x213a6e: (0x5b9c, 0),# East Asian ideograph
- 0x233a6f: (0x8ea7, 0),# East Asian ideograph
- 0x213a70: (0x5b99, 0),# East Asian ideograph
- 0x223a71: (0x674a, 0),# East Asian ideograph
- 0x233a72: (0x8e99, 0),# East Asian ideograph
- 0x213a73: (0x5ba3, 0),# East Asian ideograph
- 0x213a74: (0x5ba6, 0),# East Asian ideograph
- 0x213a75: (0x5ba4, 0),# East Asian ideograph
- 0x213a76: (0x5ba2, 0),# East Asian ideograph
- 0x213a77: (0x5bb0, 0),# East Asian ideograph
- 0x213a78: (0x5bb8, 0),# East Asian ideograph
- 0x233a7a: (0x8ebc, 0),# East Asian ideograph
- 0x213a7b: (0x5bb4, 0),# East Asian ideograph
- 0x223a7c: (0x6785, 0),# East Asian ideograph
- 0x213a7d: (0x5bb9, 0),# East Asian ideograph
- 0x213a7e: (0x5bb3, 0),# East Asian ideograph
- 0x23233f: (0x844a, 0),# East Asian ideograph
- 0x4b763d: (0x57f4, 0),# East Asian ideograph (variant of 21763D which maps to 57F4)
- 0x22746b: (0x7f7e, 0),# East Asian ideograph
- 0x283b7d: (0x53f0, 0),# East Asian ideograph (duplicate simplified)
- 0x22346c: (0x64d3, 0),# East Asian ideograph
- 0x6f5927: (0xcc39, 0),# Korean hangul
- 0x393b39: (0x5bf3, 0),# East Asian ideograph
- 0x213f26: (0x614c, 0),# East Asian ideograph
- 0x235222: (0x9957, 0),# East Asian ideograph (variant of 475222 which maps to 9957)
- 0x2d346e: (0x5373, 0),# East Asian ideograph
- 0x276232: (0x9e23, 0),# East Asian ideograph
- 0x6f5333: (0xc13c, 0),# Korean hangul
- 0x213d5b: (0x5f79, 0),# East Asian ideograph
- 0x213471: (0x5378, 0),# East Asian ideograph
- 0x287472: (0x7f74, 0),# East Asian ideograph
- 0x23344d: (0x8b56, 0),# East Asian ideograph
- 0x335223: (0x7e8e, 0),# East Asian ideograph
- 0x233473: (0x8b45, 0),# East Asian ideograph
- 0x273f3f: (0x51ed, 0),# East Asian ideograph
- 0x213474: (0x537f, 0),# East Asian ideograph
- 0x213475: (0x5384, 0),# East Asian ideograph
- 0x21325d: (0x50f9, 0),# East Asian ideograph
- 0x225f21: (0x75f9, 0),# East Asian ideograph
- 0x217477: (0x576d, 0),# East Asian ideograph
- 0x225f22: (0x75fc, 0),# East Asian ideograph
- 0x23456f: (0x9364, 0),# East Asian ideograph
- 0x275f23: (0x9648, 0),# East Asian ideograph
- 0x213479: (0x53a5, 0),# East Asian ideograph
- 0x275f24: (0x9646, 0),# East Asian ideograph
- 0x22747a: (0x7f91, 0),# East Asian ideograph
- 0x21347b: (0x53b2, 0),# East Asian ideograph
- 0x21392f: (0x5954, 0),# East Asian ideograph
- 0x225269: (0x7150, 0),# East Asian ideograph
- 0x4b4835: (0x6da3, 0),# East Asian ideograph
- 0x21347d: (0x53c3, 0),# East Asian ideograph
- 0x2d5f28: (0x9665, 0),# East Asian ideograph
- 0x6f5336: (0xc149, 0),# Korean hangul
- 0x6f5329: (0xc127, 0),# Korean hangul
- 0x225f29: (0x7616, 0),# East Asian ideograph
- 0x275f2a: (0x9634, 0),# East Asian ideograph
- 0x275f2b: (0x961f, 0),# East Asian ideograph
- 0x216c41: (0x52d1, 0),# East Asian ideograph
- 0x225f2c: (0x7608, 0),# East Asian ideograph
- 0x6f577a: (0xc958, 0),# Korean hangul
- 0x225f2d: (0x7615, 0),# East Asian ideograph
- 0x295731: (0x9c8b, 0),# East Asian ideograph
- 0x276222: (0x9cc5, 0),# East Asian ideograph
- 0x225f2e: (0x760c, 0),# East Asian ideograph
- 0x23455d: (0x9349, 0),# East Asian ideograph
- 0x6f5567: (0xc5fe, 0),# Korean hangul
- 0x215f2f: (0x9685, 0),# East Asian ideograph
- 0x273340: (0x51bb, 0),# East Asian ideograph
- 0x223b21: (0x677b, 0),# East Asian ideograph
- 0x223b22: (0x6792, 0),# East Asian ideograph
- 0x223b23: (0x6776, 0),# East Asian ideograph
- 0x213b24: (0x5bc4, 0),# East Asian ideograph
- 0x223b25: (0x6791, 0),# East Asian ideograph
- 0x223b26: (0x6799, 0),# East Asian ideograph
- 0x215f31: (0x968d, 0),# East Asian ideograph
- 0x223b28: (0x67a4, 0),# East Asian ideograph
- 0x213b29: (0x5bd0, 0),# East Asian ideograph
- 0x213b2a: (0x5bd3, 0),# East Asian ideograph
- 0x213b2b: (0x5be1, 0),# East Asian ideograph
- 0x213b2c: (0x5be5, 0),# East Asian ideograph
- 0x215f32: (0x9698, 0),# East Asian ideograph
- 0x223b2e: (0x678f, 0),# East Asian ideograph
- 0x233b2f: (0x8ecf, 0),# East Asian ideograph
- 0x223b30: (0x6772, 0),# East Asian ideograph
- 0x223b31: (0x6798, 0),# East Asian ideograph (variant of 4C3B31 which maps to 6798)
- 0x223b32: (0x676a, 0),# East Asian ideograph
- 0x233b33: (0x8ed5, 0),# East Asian ideograph
- 0x213b34: (0x5bee, 0),# East Asian ideograph
- 0x273b35: (0x5bbd, 0),# East Asian ideograph
- 0x273b36: (0x5ba1, 0),# East Asian ideograph
- 0x273b37: (0x5199, 0),# East Asian ideograph
- 0x273b38: (0x5ba0, 0),# East Asian ideograph
- 0x223b39: (0x67ac, 0),# East Asian ideograph
- 0x213b3a: (0x5bf8, 0),# East Asian ideograph
- 0x223b3b: (0x67a0, 0),# East Asian ideograph
- 0x213b3c: (0x5c01, 0),# East Asian ideograph
- 0x213b3d: (0x5c04, 0),# East Asian ideograph
- 0x213b3e: (0x5c09, 0),# East Asian ideograph
- 0x233b3f: (0x8efa, 0),# East Asian ideograph
- 0x273b40: (0x5c06, 0),# East Asian ideograph
- 0x213b41: (0x5c0a, 0),# East Asian ideograph
- 0x233b42: (0x8ef9, 0),# East Asian ideograph
- 0x273b43: (0x5bf9, 0),# East Asian ideograph
- 0x223b44: (0x67f9, 0),# East Asian ideograph
- 0x213b45: (0x5c0f, 0),# East Asian ideograph
- 0x213b46: (0x5c11, 0),# East Asian ideograph
- 0x213b47: (0x5c16, 0),# East Asian ideograph
- 0x223b48: (0x678d, 0),# East Asian ideograph
- 0x223b49: (0x678c, 0),# East Asian ideograph
- 0x213b4a: (0x5c2c, 0),# East Asian ideograph
- 0x233b4b: (0x8ee8, 0),# East Asian ideograph
- 0x223b4c: (0x67fc, 0),# East Asian ideograph
- 0x213b4d: (0x5c38, 0),# East Asian ideograph
- 0x223b4e: (0x6810, 0),# East Asian ideograph
- 0x233b4f: (0x8eeb, 0),# East Asian ideograph
- 0x213b50: (0x5c40, 0),# East Asian ideograph
- 0x223b51: (0x67c8, 0),# East Asian ideograph
- 0x23455f: (0x935a, 0),# East Asian ideograph
- 0x213b53: (0x5c3e, 0),# East Asian ideograph
- 0x223b54: (0x67cc, 0),# East Asian ideograph
- 0x213b55: (0x5c45, 0),# East Asian ideograph
- 0x233b56: (0x8f00, 0),# East Asian ideograph
- 0x213b57: (0x5c4e, 0),# East Asian ideograph
- 0x223b58: (0x67c5, 0),# East Asian ideograph
- 0x233b59: (0x8f05, 0),# East Asian ideograph
- 0x233b5a: (0x8f08, 0),# East Asian ideograph
- 0x233b5b: (0x8f07, 0),# East Asian ideograph
- 0x223b5c: (0x67bb, 0),# East Asian ideograph
- 0x213b5d: (0x5c5b, 0),# East Asian ideograph (not in Unicode)
- 0x213b5e: (0x5c60, 0),# East Asian ideograph
- 0x223b5f: (0x67b0, 0),# East Asian ideograph
- 0x223b60: (0x6803, 0),# East Asian ideograph
- 0x223b61: (0x67f8, 0),# East Asian ideograph
- 0x213b62: (0x5c65, 0),# East Asian ideograph
- 0x273b63: (0x5c5e, 0),# East Asian ideograph
- 0x233b64: (0x8f2c, 0),# East Asian ideograph
- 0x213b65: (0x5c71, 0),# East Asian ideograph
- 0x225a2a: (0x741b, 0),# East Asian ideograph
- 0x213b67: (0x5c90, 0),# East Asian ideograph
- 0x213b68: (0x5c8c, 0),# East Asian ideograph
- 0x213b69: (0x5c91, 0),# East Asian ideograph
- 0x213b6a: (0x5c94, 0),# East Asian ideograph
- 0x233b6b: (0x8f1e, 0),# East Asian ideograph
- 0x213b6c: (0x5cb8, 0),# East Asian ideograph
- 0x233b6d: (0x8f25, 0),# East Asian ideograph
- 0x233b6e: (0x8f20, 0),# East Asian ideograph
- 0x223b6f: (0x67e4, 0),# East Asian ideograph
- 0x223b70: (0x67d9, 0),# East Asian ideograph
- 0x223b71: (0x67db, 0),# East Asian ideograph
- 0x223b72: (0x67b5, 0),# East Asian ideograph
- 0x213b73: (0x5d01, 0),# East Asian ideograph
- 0x273b74: (0x5ce1, 0),# East Asian ideograph
- 0x223b75: (0x67f7, 0),# East Asian ideograph
- 0x213b76: (0x5cfb, 0),# East Asian ideograph
- 0x223b77: (0x67b3, 0),# East Asian ideograph
- 0x233b78: (0x8f36, 0),# East Asian ideograph
- 0x233b79: (0x8f2e, 0),# East Asian ideograph
- 0x233b7a: (0x8f33, 0),# East Asian ideograph
- 0x215f3f: (0x96c0, 0),# East Asian ideograph
- 0x223b7c: (0x67ee, 0),# East Asian ideograph
- 0x223b7d: (0x6aaf, 0),# East Asian ideograph
- 0x223b7e: (0x67b2, 0),# East Asian ideograph
- 0x225f40: (0x761b, 0),# East Asian ideograph
- 0x6f577e: (0xc970, 0),# Korean hangul
- 0x6f533b: (0xc158, 0),# Korean hangul
- 0x213d63: (0x5f8a, 0),# East Asian ideograph
- 0x6f4b7e: (0xb125, 0),# Korean hangul
- 0x2d5d2f: (0x9196, 0),# East Asian ideograph
- 0x2d5f43: (0x9ceb, 0),# East Asian ideograph
- 0x6f2459: (0x3137, 0),# Korean hangul
- 0x293b42: (0x8f75, 0),# East Asian ideograph
- 0x235f45: (0x9f22, 0),# East Asian ideograph
- 0x2d5f46: (0x96bd, 0),# East Asian ideograph
- 0x6f533c: (0xc167, 0),# Korean hangul
- 0x213d64: (0x5f87, 0),# East Asian ideograph
- 0x225f47: (0x7619, 0),# East Asian ideograph
- 0x234562: (0x935f, 0),# East Asian ideograph
- 0x235f48: (0x9f2b, 0),# East Asian ideograph
- 0x2e604a: (0x7690, 0),# East Asian ideograph
- 0x235f49: (0x9f26, 0),# East Asian ideograph
- 0x225270: (0x7144, 0),# East Asian ideograph
- 0x6f5d65: (0xd72d, 0),# Korean hangul
- 0x224e2d: (0x6fc9, 0),# East Asian ideograph
- 0x2d4b3f: (0x73ce, 0),# East Asian ideograph
- 0x275f4b: (0x6742, 0),# East Asian ideograph
- 0x276234: (0x9e29, 0),# East Asian ideograph
- 0x6f533d: (0xc168, 0),# Korean hangul
- 0x225f4c: (0x761d, 0),# East Asian ideograph
- 0x275f4d: (0x96cf, 0),# East Asian ideograph
- 0x6f5773: (0xc90f, 0),# Korean hangul
- 0x6f245b: (0x3141, 0),# Korean hangul
- 0x275f4e: (0x53cc, 0),# East Asian ideograph
- 0x216c48: (0x52d6, 0),# East Asian ideograph
- 0x275f4f: (0x79bb, 0),# East Asian ideograph
- 0x215f50: (0x96e3, 0),# East Asian ideograph (variant of 4B5F50 which maps to 96E3)
- 0x6f533e: (0xc170, 0),# Korean hangul
- 0x213d66: (0x5f92, 0),# East Asian ideograph
- 0x215f51: (0x96e8, 0),# East Asian ideograph
- 0x225f3b: (0x7610, 0),# East Asian ideograph
- 0x284345: (0x680c, 0),# East Asian ideograph
- 0x6f5848: (0xca08, 0),# Korean hangul
- 0x6f245c: (0x3142, 0),# Korean hangul
- 0x235f53: (0x9f2f, 0),# East Asian ideograph
- 0x225f54: (0x762d, 0),# East Asian ideograph
- 0x234571: (0x936b, 0),# East Asian ideograph
- 0x6f505b: (0xbbc0, 0),# Korean hangul
- 0x275f55: (0x7535, 0),# East Asian ideograph
- 0x6f533f: (0xc18c, 0),# Korean hangul
- 0x213d67: (0x5f91, 0),# East Asian ideograph
- 0x6f4d64: (0xb4e0, 0),# Korean hangul
- 0x213924: (0x5922, 0),# East Asian ideograph
- 0x6f245d: (0x3145, 0),# Korean hangul
- 0x275128: (0x7ecb, 0),# East Asian ideograph
- 0x4b5f58: (0xf9b2, 0),# East Asian ideograph
- 0x227049: (0x7d0f, 0),# East Asian ideograph
- 0x224e30: (0x6fa0, 0),# East Asian ideograph
- 0x293338: (0x8bfd, 0),# East Asian ideograph
- 0x2e715a: (0x7e27, 0),# East Asian ideograph
- 0x215f5a: (0x9707, 0),# East Asian ideograph
- 0x6f5340: (0xc18d, 0),# Korean hangul
- 0x223c21: (0x67b9, 0),# East Asian ideograph
- 0x213c22: (0x5d11, 0),# East Asian ideograph
- 0x215b3e: (0x8efe, 0),# East Asian ideograph
- 0x223c24: (0x67e3, 0),# East Asian ideograph
- 0x213c25: (0x5d14, 0),# East Asian ideograph
- 0x233c26: (0x8f39, 0),# East Asian ideograph
- 0x233c27: (0x8f34, 0),# East Asian ideograph
- 0x273c28: (0x5c9a, 0),# East Asian ideograph
- 0x223c29: (0x67e2, 0),# East Asian ideograph
- 0x273c2a: (0x5d2d, 0),# East Asian ideograph
- 0x273c2b: (0x5c96, 0),# East Asian ideograph
- 0x213c2c: (0x5d9d, 0),# East Asian ideograph
- 0x273c2d: (0x5c7f, 0),# East Asian ideograph
- 0x273c2e: (0x5cb3, 0),# East Asian ideograph
- 0x223c2f: (0x67e7, 0),# East Asian ideograph
- 0x223c30: (0x6849, 0),# East Asian ideograph
- 0x223c31: (0x683e, 0),# East Asian ideograph
- 0x273c32: (0x5dc5, 0),# East Asian ideograph
- 0x214a32: (0x71ec, 0),# East Asian ideograph
- 0x213c34: (0x5ddd, 0),# East Asian ideograph
- 0x215f5e: (0x9711, 0),# East Asian ideograph
- 0x223c36: (0x6814, 0),# East Asian ideograph
- 0x223c37: (0x684b, 0),# East Asian ideograph
- 0x223c38: (0x681e, 0),# East Asian ideograph
- 0x213c39: (0x5de7, 0),# East Asian ideograph
- 0x213c3a: (0x5de6, 0),# East Asian ideograph
- 0x223c3b: (0x6833, 0),# East Asian ideograph
- 0x213c3c: (0x5dee, 0),# East Asian ideograph
- 0x233c3d: (0x8f52, 0),# East Asian ideograph
- 0x213c3e: (0x5df2, 0),# East Asian ideograph
- 0x213c3f: (0x5df3, 0),# East Asian ideograph
- 0x223c40: (0x6831, 0),# East Asian ideograph
- 0x215f60: (0x9716, 0),# East Asian ideograph
- 0x223c42: (0x6835, 0),# East Asian ideograph
- 0x223c43: (0x683b, 0),# East Asian ideograph
- 0x223c44: (0x684e, 0),# East Asian ideograph
- 0x213c46: (0x5e06, 0),# East Asian ideograph
- 0x234124: (0x918d, 0),# East Asian ideograph
- 0x233c48: (0x8f56, 0),# East Asian ideograph
- 0x213c49: (0x5e1a, 0),# East Asian ideograph
- 0x223c4a: (0x684d, 0),# East Asian ideograph
- 0x233c4b: (0x8f55, 0),# East Asian ideograph
- 0x233c4c: (0x8f58, 0),# East Asian ideograph
- 0x215f62: (0x970d, 0),# East Asian ideograph
- 0x233c4e: (0x8f5e, 0),# East Asian ideograph
- 0x273c4f: (0x5e05, 0),# East Asian ideograph
- 0x273c51: (0x5e08, 0),# East Asian ideograph
- 0x273c52: (0x5e10, 0),# East Asian ideograph
- 0x273c53: (0x5e26, 0),# East Asian ideograph
- 0x213c54: (0x5e38, 0),# East Asian ideograph
- 0x223c55: (0x685d, 0),# East Asian ideograph
- 0x223c56: (0x685e, 0),# East Asian ideograph
- 0x233c57: (0x8f62, 0),# East Asian ideograph
- 0x273c58: (0x5e27, 0),# East Asian ideograph
- 0x233c59: (0x8f63, 0),# East Asian ideograph
- 0x233c5a: (0x8f64, 0),# East Asian ideograph
- 0x213c5b: (0x5e54, 0),# East Asian ideograph
- 0x273c5c: (0x5e3c, 0),# East Asian ideograph
- 0x213c5d: (0x5e55, 0),# East Asian ideograph
- 0x273c5e: (0x5e01, 0),# East Asian ideograph
- 0x213c5f: (0x5e62, 0),# East Asian ideograph
- 0x273c60: (0x5e1c, 0),# East Asian ideograph
- 0x273c61: (0x5e2e, 0),# East Asian ideograph
- 0x213c63: (0x5e73, 0),# East Asian ideograph
- 0x6f5177: (0xbe5a, 0),# Korean hangul
- 0x223c65: (0x685a, 0),# East Asian ideograph
- 0x233c66: (0x8fa5, 0),# East Asian ideograph
- 0x273c67: (0x5e72, 0),# East Asian ideograph (Version J extension)
- 0x223c68: (0x686b, 0),# East Asian ideograph
- 0x223c69: (0x686c, 0),# East Asian ideograph
- 0x213c6a: (0x5e7d, 0),# East Asian ideograph
- 0x223c6b: (0x6879, 0),# East Asian ideograph
- 0x233c6c: (0x8fb5, 0),# East Asian ideograph
- 0x213c6d: (0x5e87, 0),# East Asian ideograph
- 0x233c6e: (0x8fbb, 0),# East Asian ideograph
- 0x213c6f: (0x5e9a, 0),# East Asian ideograph
- 0x233c70: (0x8fbc, 0),# East Asian ideograph
- 0x215f68: (0x9738, 0),# East Asian ideograph
- 0x223c72: (0x687e, 0),# East Asian ideograph
- 0x213c73: (0x5e95, 0),# East Asian ideograph
- 0x233c74: (0x8fbf, 0),# East Asian ideograph
- 0x233c75: (0x8fd2, 0),# East Asian ideograph
- 0x273c76: (0x5e93, 0),# East Asian ideograph
- 0x225f69: (0x7647, 0),# East Asian ideograph
- 0x213c78: (0x5ead, 0),# East Asian ideograph
- 0x213c79: (0x5eb7, 0),# East Asian ideograph
- 0x233c7a: (0x8fca, 0),# East Asian ideograph
- 0x233c7b: (0x8fd3, 0),# East Asian ideograph
- 0x213c7c: (0x5eb5, 0),# East Asian ideograph
- 0x215f6a: (0x9732, 0),# East Asian ideograph
- 0x273c7e: (0x5395, 0),# East Asian ideograph
- 0x275f6b: (0x9701, 0),# East Asian ideograph
- 0x6f5849: (0xca09, 0),# Korean hangul
- 0x6f2461: (0x314b, 0),# Korean hangul
- 0x275725: (0x8682, 0),# East Asian ideograph
- 0x275122: (0x7eb3, 0),# East Asian ideograph
- 0x6f5273: (0xc0d8, 0),# Korean hangul
- 0x235f6d: (0x9f45, 0),# East Asian ideograph
- 0x4c476e: (0x6cad, 0),# East Asian ideograph
- 0x225a2c: (0x7432, 0),# East Asian ideograph
- 0x225f6e: (0x764d, 0),# East Asian ideograph
- 0x6f2528: (0x3163, 0),# Korean hangul
- 0x2f312b: (0x89bb, 0),# East Asian ideograph
- 0x235f6f: (0x9f46, 0),# East Asian ideograph
- 0x4b5f70: (0x9752, 0),# East Asian ideograph
- 0x6f2462: (0x314c, 0),# Korean hangul
- 0x235f71: (0x9f48, 0),# East Asian ideograph
- 0x275123: (0x7ea7, 0),# East Asian ideograph
- 0x214a36: (0x720d, 0),# East Asian ideograph
- 0x224e35: (0x6fb4, 0),# East Asian ideograph
- 0x335234: (0x99e1, 0),# East Asian ideograph
- 0x2e3d62: (0x684a, 0),# East Asian ideograph
- 0x235f73: (0x9f49, 0),# East Asian ideograph
- 0x69253b: (0x30bb, 0),# Katakana letter SE
- 0x276230: (0x9e20, 0),# East Asian ideograph
- 0x23456b: (0x9374, 0),# East Asian ideograph
- 0x215f75: (0x9760, 0),# East Asian ideograph
- 0x692431: (0x3051, 0),# Hiragana letter KE
- 0x274a21: (0x70bd, 0),# East Asian ideograph
- 0x23345f: (0x8b4d, 0),# East Asian ideograph
- 0x224d63: (0x6f5f, 0),# East Asian ideograph
- 0x274a22: (0x7096, 0),# East Asian ideograph
- 0x4d446b: (0x954e, 0),# East Asian ideograph
- 0x6f4a23: (0xadc4, 0),# Korean hangul
- 0x6f5346: (0xc19f, 0),# Korean hangul
- 0x276231: (0x9e22, 0),# East Asian ideograph
- 0x215f79: (0x9768, 0),# East Asian ideograph
- 0x274a24: (0x706f, 0),# East Asian ideograph
- 0x345e3b: (0x80ac, 0),# East Asian ideograph
- 0x215f7a: (0x9769, 0),# East Asian ideograph
- 0x274a25: (0x7116, 0),# East Asian ideograph
- 0x275568: (0x8298, 0),# East Asian ideograph
- 0x215f7b: (0x9776, 0),# East Asian ideograph
- 0x274a26: (0x70e7, 0),# East Asian ideograph
- 0x6f5d67: (0xd73c, 0),# Korean hangul
- 0x215f7c: (0x9774, 0),# East Asian ideograph
- 0x6f4a27: (0xadd3, 0),# Korean hangul
- 0x2e3172: (0x5261, 0),# East Asian ideograph
- 0x276236: (0x9e35, 0),# East Asian ideograph
- 0x2d4a28: (0x8b8c, 0),# East Asian ideograph
- 0x6f5347: (0xc1a1, 0),# Korean hangul
- 0x213d6f: (0x5fa9, 0),# East Asian ideograph
- 0x215f7e: (0x9785, 0),# East Asian ideograph
- 0x33456d: (0x826a, 0),# East Asian ideograph
- 0x6f5178: (0xbe5b, 0),# Korean hangul
- 0x224a2a: (0x6ddf, 0),# East Asian ideograph
- 0x6f2465: (0x3132, 0),# Korean hangul
- 0x275126: (0x7eca, 0),# East Asian ideograph
- 0x23567a: (0x9b95, 0),# East Asian ideograph
- 0x6f4a2c: (0xadf8, 0),# Korean hangul
- 0x224a2d: (0x6dd3, 0),# East Asian ideograph
- 0x6f5348: (0xc1a5, 0),# Korean hangul
- 0x276233: (0x51e4, 0),# East Asian ideograph
- 0x274a2e: (0x8425, 0),# East Asian ideograph
- 0x2e3328: (0x6528, 0),# East Asian ideograph
- 0x6f5d6b: (0xd751, 0),# Korean hangul
- 0x234a2f: (0x9642, 0),# East Asian ideograph
- 0x4b462a: (0x6b74, 0),# East Asian ideograph
- 0x233d21: (0x8fda, 0),# East Asian ideograph
- 0x233d22: (0x8fd5, 0),# East Asian ideograph
- 0x213d23: (0x5ec9, 0),# East Asian ideograph
- 0x213d24: (0x5ec8, 0),# East Asian ideograph
- 0x223d25: (0x686d, 0),# East Asian ideograph
- 0x213d26: (0x5ed6, 0),# East Asian ideograph
- 0x273d27: (0x5e9f, 0),# East Asian ideograph
- 0x213d28: (0x5eda, 0),# East Asian ideograph
- 0x213d29: (0x5edd, 0),# East Asian ideograph
- 0x273d2a: (0x5e7f, 0),# East Asian ideograph
- 0x273d2b: (0x5e99, 0),# East Asian ideograph
- 0x273d2c: (0x5382, 0),# East Asian ideograph
- 0x273d2d: (0x5e9e, 0),# East Asian ideograph
- 0x273d2e: (0x5e90, 0),# East Asian ideograph
- 0x233d2f: (0x8fe4, 0),# East Asian ideograph
- 0x233d30: (0x8fee, 0),# East Asian ideograph
- 0x223d32: (0x688b, 0),# East Asian ideograph
- 0x274a33: (0x70e9, 0),# East Asian ideograph
- 0x213d34: (0x5eff, 0),# East Asian ideograph
- 0x233d35: (0x8ff9, 0),# East Asian ideograph
- 0x213d36: (0x5f04, 0),# East Asian ideograph
- 0x213d37: (0x5f08, 0),# East Asian ideograph
- 0x213d38: (0x5f0a, 0),# East Asian ideograph
- 0x223d39: (0x68a3, 0),# East Asian ideograph
- 0x213d3a: (0x5f12, 0),# East Asian ideograph
- 0x213d3b: (0x5f13, 0),# East Asian ideograph
- 0x233d3c: (0x8ffb, 0),# East Asian ideograph
- 0x213d3d: (0x5f14, 0),# East Asian ideograph
- 0x213d3e: (0x5f18, 0),# East Asian ideograph
- 0x214a35: (0x7206, 0),# East Asian ideograph
- 0x223d40: (0x688f, 0),# East Asian ideograph
- 0x213d41: (0x5f1f, 0),# East Asian ideograph
- 0x213d42: (0x5f26, 0),# East Asian ideograph
- 0x223d43: (0x687b, 0),# East Asian ideograph
- 0x233d44: (0x9011, 0),# East Asian ideograph
- 0x224a36: (0x6ddc, 0),# East Asian ideograph
- 0x213d46: (0x5f31, 0),# East Asian ideograph
- 0x273d47: (0x5f20, 0),# East Asian ideograph
- 0x213d48: (0x5f37, 0),# East Asian ideograph
- 0x233d49: (0x9021, 0),# East Asian ideograph
- 0x233d4a: (0x902d, 0),# East Asian ideograph
- 0x274a37: (0x7089, 0),# East Asian ideograph
- 0x273d4c: (0x5f25, 0),# East Asian ideograph
- 0x273d4d: (0x5f2f, 0),# East Asian ideograph
- 0x233d4e: (0x902c, 0),# East Asian ideograph
- 0x273d4f: (0x6c47, 0),# East Asian ideograph (duplicate simplified)
- 0x223d50: (0x692c, 0),# East Asian ideograph
- 0x274a38: (0x70c2, 0),# East Asian ideograph
- 0x213d52: (0x5f64, 0),# East Asian ideograph
- 0x223d53: (0x690c, 0),# East Asian ideograph
- 0x213d54: (0x5f6c, 0),# East Asian ideograph
- 0x213d55: (0x5f69, 0),# East Asian ideograph
- 0x233d56: (0x9037, 0),# East Asian ideograph
- 0x214a39: (0x7228, 0),# East Asian ideograph
- 0x223d58: (0x68d3, 0),# East Asian ideograph
- 0x213d59: (0x5f71, 0),# East Asian ideograph
- 0x223d5b: (0x690a, 0),# East Asian ideograph
- 0x223d5c: (0x6909, 0),# East Asian ideograph
- 0x233d5d: (0x9052, 0),# East Asian ideograph
- 0x213d5e: (0x5f7f, 0),# East Asian ideograph
- 0x213d5f: (0x5f7c, 0),# East Asian ideograph
- 0x213d60: (0x5f85, 0),# East Asian ideograph
- 0x213d61: (0x5f88, 0),# East Asian ideograph
- 0x223d62: (0x68ec, 0),# East Asian ideograph
- 0x223d63: (0x692a, 0),# East Asian ideograph
- 0x223d64: (0x68ea, 0),# East Asian ideograph
- 0x223d65: (0x681f, 0),# East Asian ideograph
- 0x223d66: (0x7439, 0),# East Asian ideograph
- 0x233d67: (0x9049, 0),# East Asian ideograph
- 0x213d68: (0x5f90, 0),# East Asian ideograph
- 0x234a3c: (0x9651, 0),# East Asian ideograph
- 0x233d6a: (0x9044, 0),# East Asian ideograph
- 0x213d6b: (0x5f99, 0),# East Asian ideograph
- 0x273d6c: (0x4ece, 0),# East Asian ideograph
- 0x223d6e: (0x68d6, 0),# East Asian ideograph
- 0x223d6f: (0x68eb, 0),# East Asian ideograph
- 0x225f48: (0x761e, 0),# East Asian ideograph
- 0x213d71: (0x5faa, 0),# East Asian ideograph
- 0x213d72: (0x5fac, 0),# East Asian ideograph
- 0x223d73: (0x68f1, 0),# East Asian ideograph
- 0x273d74: (0x5f7b, 0),# East Asian ideograph
- 0x233d75: (0x905d, 0),# East Asian ideograph
- 0x273d76: (0x5f81, 0),# East Asian ideograph
- 0x213d77: (0x5fbd, 0),# East Asian ideograph
- 0x233d78: (0x905b, 0),# East Asian ideograph
- 0x223d79: (0x68fc, 0),# East Asian ideograph
- 0x213d7a: (0x5fd9, 0),# East Asian ideograph
- 0x233d7b: (0x906b, 0),# East Asian ideograph
- 0x223d7c: (0x6913, 0),# East Asian ideograph
- 0x213d7d: (0x5fd6, 0),# East Asian ideograph
- 0x224e3c: (0x6fa8, 0),# East Asian ideograph
- 0x23523b: (0x99a3, 0),# East Asian ideograph
- 0x6f534c: (0xc1c4, 0),# Korean hangul
- 0x276237: (0x9e2a, 0),# East Asian ideograph
- 0x224173: (0x6a8d, 0),# East Asian ideograph
- 0x274a42: (0x7237, 0),# East Asian ideograph
- 0x223d30: (0x6898, 0),# East Asian ideograph
- 0x6f5925: (0xcc30, 0),# Korean hangul
- 0x2d5c5b: (0x9089, 0),# East Asian ideograph
- 0x6f4a43: (0xae4c, 0),# Korean hangul
- 0x234a44: (0x965c, 0),# East Asian ideograph
- 0x232435: (0x84da, 0),# East Asian ideograph
- 0x696e5c: (0x91df, 0),# East Asian ideograph
- 0x295929: (0x9ca3, 0),# East Asian ideograph
- 0x213e51: (0x608d, 0),# East Asian ideograph
- 0x274a45: (0x5c14, 0),# East Asian ideograph
- 0x233023: (0x8962, 0),# East Asian ideograph
- 0x214a46: (0x7246, 0),# East Asian ideograph
- 0x6f534d: (0xc1c8, 0),# Korean hangul
- 0x276238: (0x9e2d, 0),# East Asian ideograph
- 0x6f5740: (0xc81d, 0),# Korean hangul
- 0x213932: (0x5947, 0),# East Asian ideograph
- 0x295574: (0x9604, 0),# East Asian ideograph
- 0x6f4a48: (0xae5c, 0),# Korean hangul
- 0x277345: (0x556e, 0),# East Asian ideograph
- 0x6f4a49: (0xae5d, 0),# Korean hangul
- 0x29592a: (0x9cd3, 0),# East Asian ideograph
- 0x4b4352: (0x66f5, 0),# East Asian ideograph
- 0x234a4a: (0x965f, 0),# East Asian ideograph
- 0x234a4b: (0x9656, 0),# East Asian ideograph
- 0x6f534e: (0xc1d7, 0),# Korean hangul
- 0x213d76: (0x5fb5, 0),# East Asian ideograph
- 0x274a4c: (0x724d, 0),# East Asian ideograph
- 0x6f4a4d: (0xae65, 0),# Korean hangul
- 0x6f5771: (0xc90c, 0),# Korean hangul
- 0x295928: (0x9cd5, 0),# East Asian ideograph
- 0x6f4a4e: (0xae68, 0),# Korean hangul
- 0x334050: (0x62d5, 0),# East Asian ideograph
- 0x23523e: (0x99a6, 0),# East Asian ideograph
- 0x4c2539: (0x5d73, 0),# East Asian ideograph
- 0x4d3363: (0x8c25, 0),# East Asian ideograph
- 0x224a50: (0x6e27, 0),# East Asian ideograph
- 0x6f534f: (0xc1e0, 0),# Korean hangul
- 0x27623a: (0x9e33, 0),# East Asian ideograph
- 0x4b485f: (0x6ede, 0),# East Asian ideograph (variant of 27485F which maps to 6EDE)
- 0x234a51: (0x966c, 0),# East Asian ideograph
- 0x23235c: (0x84b4, 0),# East Asian ideograph
- 0x285a47: (0x73ae, 0),# East Asian ideograph
- 0x2d3165: (0x349e, 0),# East Asian ideograph (not found in unified han)
- 0x6f4a52: (0xae78, 0),# Korean hangul
- 0x275571: (0x848b, 0),# East Asian ideograph
- 0x274a53: (0x5b83, 0),# East Asian ideograph
- 0x227059: (0x7d35, 0),# East Asian ideograph
- 0x33523f: (0x8b71, 0),# East Asian ideograph
- 0x473422: (0x8c2a, 0),# East Asian ideograph
- 0x224a55: (0x6e49, 0),# East Asian ideograph
- 0x213d78: (0x5fc3, 0),# East Asian ideograph
- 0x213935: (0x5951, 0),# East Asian ideograph
- 0x223d34: (0x686f, 0),# East Asian ideograph
- 0x4b3248: (0x50b2, 0),# East Asian ideograph (not in Unicode)
- 0x6f4a57: (0xae84, 0),# Korean hangul
- 0x224a58: (0x6e3c, 0),# East Asian ideograph
- 0x6f5d69: (0xd749, 0),# Korean hangul
- 0x6f4a59: (0xaebc, 0),# Korean hangul
- 0x274a5a: (0x7275, 0),# East Asian ideograph
- 0x6f5351: (0xc1e8, 0),# Korean hangul
- 0x27623c: (0x9e3f, 0),# East Asian ideograph
- 0x223e21: (0x6907, 0),# East Asian ideograph
- 0x213e22: (0x5feb, 0),# East Asian ideograph
- 0x213e23: (0x5fe0, 0),# East Asian ideograph
- 0x213e24: (0x5ff1, 0),# East Asian ideograph
- 0x233e25: (0x906f, 0),# East Asian ideograph
- 0x233e26: (0x9079, 0),# East Asian ideograph
- 0x213e27: (0x5ff5, 0),# East Asian ideograph
- 0x233e28: (0x9076, 0),# East Asian ideograph
- 0x213e29: (0x6014, 0),# East Asian ideograph
- 0x223e2a: (0x68de, 0),# East Asian ideograph
- 0x223e2b: (0x691b, 0),# East Asian ideograph
- 0x233e2c: (0x9085, 0),# East Asian ideograph
- 0x223e2d: (0x68fb, 0),# East Asian ideograph
- 0x213e2e: (0x601d, 0),# East Asian ideograph
- 0x234a5d: (0x967b, 0),# East Asian ideograph
- 0x213e30: (0x6021, 0),# East Asian ideograph
- 0x213e31: (0x6020, 0),# East Asian ideograph
- 0x213e32: (0x6028, 0),# East Asian ideograph
- 0x223e33: (0x68e1, 0),# East Asian ideograph
- 0x213e34: (0x6027, 0),# East Asian ideograph
- 0x214a5e: (0x7296, 0),# East Asian ideograph
- 0x213e36: (0x6015, 0),# East Asian ideograph
- 0x223e37: (0x68d1, 0),# East Asian ideograph
- 0x223e38: (0x68d0, 0),# East Asian ideograph
- 0x223e39: (0x6908, 0),# East Asian ideograph
- 0x233e3a: (0x908b, 0),# East Asian ideograph
- 0x213e3b: (0x6043, 0),# East Asian ideograph
- 0x213e3c: (0x6065, 0),# East Asian ideograph
- 0x213e3d: (0x6050, 0),# East Asian ideograph
- 0x223e3e: (0x68e8, 0),# East Asian ideograph
- 0x223e3f: (0x68f0, 0),# East Asian ideograph
- 0x223e40: (0x68c3, 0),# East Asian ideograph
- 0x214a60: (0x729b, 0),# East Asian ideograph
- 0x235164: (0x9940, 0),# East Asian ideograph
- 0x233e43: (0x909b, 0),# East Asian ideograph
- 0x233e44: (0x909c, 0),# East Asian ideograph
- 0x213e45: (0x606f, 0),# East Asian ideograph
- 0x223e46: (0x68d4, 0),# East Asian ideograph
- 0x274a61: (0x728a, 0),# East Asian ideograph
- 0x233e48: (0x90a1, 0),# East Asian ideograph
- 0x223e49: (0x68c6, 0),# East Asian ideograph
- 0x213e4a: (0x608c, 0),# East Asian ideograph
- 0x223e4b: (0x68c7, 0),# East Asian ideograph
- 0x213e4c: (0x607f, 0),# East Asian ideograph
- 0x214a62: (0x72a7, 0),# East Asian ideograph
- 0x213e4e: (0x609a, 0),# East Asian ideograph
- 0x213e4f: (0x6096, 0),# East Asian ideograph
- 0x213e50: (0x6084, 0),# East Asian ideograph
- 0x233e51: (0x90a8, 0),# East Asian ideograph
- 0x224828: (0x6cce, 0),# East Asian ideograph
- 0x234a63: (0x9684, 0),# East Asian ideograph
- 0x233e54: (0x90a0, 0),# East Asian ideograph
- 0x223e55: (0x6938, 0),# East Asian ideograph
- 0x213e56: (0x60a8, 0),# East Asian ideograph
- 0x273e57: (0x5ff0, 0),# East Asian ideograph
- 0x233e58: (0x90af, 0),# East Asian ideograph
- 0x233e59: (0x90b3, 0),# East Asian ideograph
- 0x6f4c5d: (0xb2a1, 0),# Korean hangul
- 0x213e5b: (0x60c5, 0),# East Asian ideograph (variant of 4B3E5B which maps to 60C5)
- 0x273e5c: (0x95f7, 0),# East Asian ideograph
- 0x223e5d: (0x6958, 0),# East Asian ideograph
- 0x273e5e: (0x6005, 0),# East Asian ideograph
- 0x213e5f: (0x60bb, 0),# East Asian ideograph
- 0x213e60: (0x60e0, 0),# East Asian ideograph
- 0x233e61: (0x90b2, 0),# East Asian ideograph
- 0x213e62: (0x60dc, 0),# East Asian ideograph
- 0x213e63: (0x60d8, 0),# East Asian ideograph
- 0x223e64: (0x6945, 0),# East Asian ideograph
- 0x223e65: (0x695d, 0),# East Asian ideograph
- 0x223e66: (0x6932, 0),# East Asian ideograph
- 0x213e67: (0x60c6, 0),# East Asian ideograph
- 0x233e68: (0x90c9, 0),# East Asian ideograph
- 0x223e69: (0x696e, 0),# East Asian ideograph
- 0x223e6a: (0x6963, 0),# East Asian ideograph
- 0x223e6b: (0x6948, 0),# East Asian ideograph
- 0x273e6c: (0x60ec, 0),# East Asian ideograph
- 0x213e6d: (0x60f3, 0),# East Asian ideograph
- 0x223e6e: (0x6939, 0),# East Asian ideograph
- 0x233e6f: (0x90d5, 0),# East Asian ideograph
- 0x273e70: (0x607b, 0),# East Asian ideograph
- 0x214a68: (0x72c0, 0),# East Asian ideograph
- 0x233e72: (0x90be, 0),# East Asian ideograph
- 0x223e73: (0x6937, 0),# East Asian ideograph
- 0x213e74: (0x60f9, 0),# East Asian ideograph
- 0x213e75: (0x6123, 0),# East Asian ideograph
- 0x213e76: (0x60f4, 0),# East Asian ideograph
- 0x273e77: (0x7231, 0),# East Asian ideograph
- 0x233e78: (0x90c8, 0),# East Asian ideograph
- 0x233e79: (0x90c3, 0),# East Asian ideograph
- 0x223e7a: (0x696c, 0),# East Asian ideograph
- 0x223e7b: (0x694e, 0),# East Asian ideograph
- 0x213e7c: (0x6109, 0),# East Asian ideograph
- 0x224a6a: (0x6e51, 0),# East Asian ideograph
- 0x273e7e: (0x607c, 0),# East Asian ideograph
- 0x234137: (0x91a8, 0),# East Asian ideograph
- 0x224a6b: (0x6e44, 0),# East Asian ideograph
- 0x275576: (0x8361, 0),# East Asian ideograph
- 0x27734c: (0x5456, 0),# East Asian ideograph
- 0x21385b: (0x5879, 0),# East Asian ideograph
- 0x293b5b: (0x8f81, 0),# East Asian ideograph
- 0x234a6d: (0x9682, 0),# East Asian ideograph
- 0x234a6e: (0x9683, 0),# East Asian ideograph
- 0x6f5355: (0xc1fc, 0),# Korean hangul
- 0x335238: (0x8989, 0),# East Asian ideograph
- 0x694c68: (0x5301, 0),# East Asian ideograph
- 0x21393a: (0x5958, 0),# East Asian ideograph
- 0x2d5c5a: (0x8fe9, 0),# East Asian ideograph
- 0x2d3a41: (0x5afa, 0),# East Asian ideograph
- 0x214a70: (0x72f9, 0),# East Asian ideograph
- 0x6f4a71: (0xaf48, 0),# Korean hangul
- 0x213f2d: (0x6177, 0),# East Asian ideograph
- 0x295932: (0x9cd8, 0),# East Asian ideograph
- 0x274a72: (0x72c8, 0),# East Asian ideograph
- 0x23302c: (0x895c, 0),# East Asian ideograph
- 0x276239: (0x9e2f, 0),# East Asian ideograph
- 0x6f4a73: (0xaf4c, 0),# Korean hangul
- 0x6f5356: (0xc1fd, 0),# Korean hangul
- 0x276241: (0x9e45, 0),# East Asian ideograph
- 0x21393b: (0x595a, 0),# East Asian ideograph
- 0x4b324e: (0x50e7, 0),# East Asian ideograph (variant of 21324E which maps to 50E7)
- 0x2e4e72: (0x6f74, 0),# East Asian ideograph
- 0x6f5774: (0xc911, 0),# Korean hangul
- 0x6f4a75: (0xaf5c, 0),# Korean hangul
- 0x6f4a76: (0xaf5d, 0),# Korean hangul
- 0x213521: (0x53c9, 0),# East Asian ideograph
- 0x224a77: (0x6e4e, 0),# East Asian ideograph
- 0x23302d: (0x895d, 0),# East Asian ideograph
- 0x4b4a78: (0x72f0, 0),# East Asian ideograph
- 0x6f5357: (0xc200, 0),# Korean hangul
- 0x213523: (0x53ca, 0),# East Asian ideograph
- 0x276242: (0x9e51, 0),# East Asian ideograph
- 0x275d49: (0x94ae, 0),# East Asian ideograph
- 0x274a79: (0x72b9, 0),# East Asian ideograph
- 0x223d3b: (0x6874, 0),# East Asian ideograph
- 0x234a7a: (0x9697, 0),# East Asian ideograph
- 0x224d68: (0x6f5d, 0),# East Asian ideograph
- 0x6f4a7b: (0xaf84, 0),# Korean hangul
- 0x213526: (0x53d4, 0),# East Asian ideograph
- 0x227061: (0x7d3a, 0),# East Asian ideograph
- 0x6f4a7c: (0xaf88, 0),# Korean hangul
- 0x225a30: (0x7415, 0),# East Asian ideograph
- 0x213527: (0x53d7, 0),# East Asian ideograph
- 0x227c49: (0x82bc, 0),# East Asian ideograph
- 0x6f4a7d: (0xaf90, 0),# Korean hangul
- 0x6f5358: (0xc204, 0),# Korean hangul
- 0x6f7623: (0x317f, 0),# Korean hangul
- 0x274a7e: (0x72f1, 0),# East Asian ideograph
- 0x223d3c: (0x6875, 0),# East Asian ideograph
- 0x227d2b: (0x8344, 0),# East Asian ideograph
- 0x21352a: (0x66fc, 0),# East Asian ideograph
- 0x213936: (0x594e, 0),# East Asian ideograph
- 0x21352b: (0x53e2, 0),# East Asian ideograph
- 0x2d4b5b: (0x78af, 0),# East Asian ideograph
- 0x6f5359: (0xc20d, 0),# Korean hangul
- 0x23352d: (0x8b95, 0),# East Asian ideograph
- 0x276244: (0x9e4c, 0),# East Asian ideograph
- 0x23352e: (0x8b94, 0),# East Asian ideograph
- 0x3a7970: (0x81d5, 0),# East Asian ideograph
- 0x21352f: (0x53ee, 0),# East Asian ideograph
- 0x6f5331: (0xc138, 0),# Korean hangul
- 0x275138: (0x7edc, 0),# East Asian ideograph
- 0x223f21: (0x6952, 0),# East Asian ideograph
- 0x213f22: (0x6168, 0),# East Asian ideograph
- 0x233f23: (0x90df, 0),# East Asian ideograph
- 0x213f24: (0x613c, 0),# East Asian ideograph
- 0x223f25: (0x695b, 0),# East Asian ideograph
- 0x233f26: (0x90e2, 0),# East Asian ideograph
- 0x223531: (0x64eb, 0),# East Asian ideograph
- 0x233f28: (0x90db, 0),# East Asian ideograph
- 0x273f29: (0x5ffe, 0),# East Asian ideograph
- 0x233f2a: (0x90dc, 0),# East Asian ideograph
- 0x273f2b: (0x6006, 0),# East Asian ideograph
- 0x233f2c: (0x90d7, 0),# East Asian ideograph
- 0x233f2d: (0x90e4, 0),# East Asian ideograph
- 0x233f2e: (0x90ef, 0),# East Asian ideograph
- 0x233f2f: (0x90ea, 0),# East Asian ideograph
- 0x213f30: (0x6170, 0),# East Asian ideograph
- 0x213f31: (0x615a, 0),# East Asian ideograph
- 0x233f32: (0x90f0, 0),# East Asian ideograph
- 0x233f33: (0x90f4, 0),# East Asian ideograph
- 0x233f34: (0x90f2, 0),# East Asian ideograph
- 0x223f35: (0x6978, 0),# East Asian ideograph
- 0x273f36: (0x8651, 0),# East Asian ideograph
- 0x223f37: (0x697b, 0),# East Asian ideograph
- 0x273f38: (0x60e8, 0),# East Asian ideograph
- 0x273f39: (0x60ef, 0),# East Asian ideograph
- 0x273f3a: (0x6078, 0),# East Asian ideograph
- 0x273f3b: (0x6002, 0),# East Asian ideograph
- 0x273f3c: (0x6b32, 0),# East Asian ideograph
- 0x223f3d: (0x6944, 0),# East Asian ideograph
- 0x233f3e: (0x90eb, 0),# East Asian ideograph
- 0x233f3f: (0x90f3, 0),# East Asian ideograph
- 0x213f40: (0x618e, 0),# East Asian ideograph
- 0x273f41: (0x60af, 0),# East Asian ideograph
- 0x273f42: (0x6124, 0),# East Asian ideograph
- 0x213f43: (0x61ac, 0),# East Asian ideograph
- 0x273f44: (0x60ee, 0),# East Asian ideograph
- 0x273f45: (0x6187, 0),# East Asian ideograph
- 0x233f46: (0x90fc, 0),# East Asian ideograph
- 0x273f47: (0x60eb, 0),# East Asian ideograph
- 0x273f48: (0x5fc6, 0),# East Asian ideograph
- 0x233f49: (0x9104, 0),# East Asian ideograph
- 0x273f4a: (0x5e94, 0),# East Asian ideograph
- 0x213537: (0x53eb, 0),# East Asian ideograph
- 0x233f4c: (0x9106, 0),# East Asian ideograph
- 0x213f4d: (0x61c2, 0),# East Asian ideograph
- 0x273f4e: (0x6073, 0),# East Asian ideograph
- 0x213f4f: (0x61c8, 0),# East Asian ideograph
- 0x213940: (0x596a, 0),# East Asian ideograph
- 0x232368: (0x84cd, 0),# East Asian ideograph
- 0x213f52: (0x61e6, 0),# East Asian ideograph
- 0x213f53: (0x61f2, 0),# East Asian ideograph (variant of 4B3F53 which maps to 61F2)
- 0x273f54: (0x6000, 0),# East Asian ideograph
- 0x273f55: (0x61d2, 0),# East Asian ideograph
- 0x273f56: (0x60ac, 0),# East Asian ideograph
- 0x233f57: (0x910f, 0),# East Asian ideograph
- 0x273f58: (0x5fcf, 0),# East Asian ideograph
- 0x273f59: (0x6151, 0),# East Asian ideograph
- 0x233f5a: (0x9116, 0),# East Asian ideograph
- 0x273f5b: (0x60e7, 0),# East Asian ideograph
- 0x233f5c: (0x9114, 0),# East Asian ideograph
- 0x23353a: (0x8b9f, 0),# East Asian ideograph
- 0x213f5e: (0x620a, 0),# East Asian ideograph
- 0x213f5f: (0x620e, 0),# East Asian ideograph
- 0x223f60: (0x69bc, 0),# East Asian ideograph
- 0x223f61: (0x69a7, 0),# East Asian ideograph
- 0x233f62: (0x9123, 0),# East Asian ideograph (Version J extension)
- 0x233f63: (0x9118, 0),# East Asian ideograph
- 0x233f64: (0x911c, 0),# East Asian ideograph
- 0x213f65: (0x6216, 0),# East Asian ideograph
- 0x233f66: (0x9120, 0),# East Asian ideograph
- 0x233f67: (0x9122, 0),# East Asian ideograph
- 0x223f68: (0x69d9, 0),# East Asian ideograph
- 0x213f69: (0x621f, 0),# East Asian ideograph
- 0x223f6a: (0x698e, 0),# East Asian ideograph
- 0x213f6b: (0x6222, 0),# East Asian ideograph
- 0x213f6c: (0x622a, 0),# East Asian ideograph
- 0x223f6d: (0x69d6, 0),# East Asian ideograph
- 0x273f6e: (0x6218, 0),# East Asian ideograph
- 0x273f6f: (0x620f, 0),# East Asian ideograph
- 0x213f70: (0x6234, 0),# East Asian ideograph
- 0x233f71: (0x9124, 0),# East Asian ideograph
- 0x233f72: (0x911a, 0),# East Asian ideograph
- 0x213f73: (0x623f, 0),# East Asian ideograph
- 0x233f74: (0x9125, 0),# East Asian ideograph
- 0x223f75: (0x69a5, 0),# East Asian ideograph
- 0x213f76: (0x6241, 0),# East Asian ideograph
- 0x233f77: (0x912f, 0),# East Asian ideograph
- 0x223f78: (0x69d1, 0),# East Asian ideograph
- 0x27513b: (0x4e1d, 0),# East Asian ideograph
- 0x223f7a: (0x69f6, 0),# East Asian ideograph
- 0x21753f: (0x579e, 0),# East Asian ideograph
- 0x213f7d: (0x6253, 0),# East Asian ideograph
- 0x223f7e: (0x69d5, 0),# East Asian ideograph
- 0x217540: (0x57b5, 0),# East Asian ideograph
- 0x6f535d: (0xc21c, 0),# Korean hangul
- 0x276248: (0x9e5e, 0),# East Asian ideograph
- 0x223542: (0x64f7, 0),# East Asian ideograph
- 0x213543: (0x540c, 0),# East Asian ideograph
- 0x213544: (0x540a, 0),# East Asian ideograph
- 0x29593a: (0x9c85, 0),# East Asian ideograph
- 0x23524d: (0x99bf, 0),# East Asian ideograph
- 0x213545: (0x540d, 0),# East Asian ideograph
- 0x6f535e: (0xc21f, 0),# Korean hangul
- 0x223546: (0x6504, 0),# East Asian ideograph
- 0x234e5e: (0x97e0, 0),# East Asian ideograph
- 0x2d3547: (0x55ab, 0),# East Asian ideograph
- 0x6f5c66: (0xd560, 0),# Korean hangul
- 0x234141: (0x91af, 0),# East Asian ideograph
- 0x692436: (0x3056, 0),# Hiragana letter ZA
- 0x696868: (0x84d9, 0),# East Asian ideograph
- 0x233478: (0x8b85, 0),# East Asian ideograph
- 0x4c354a: (0x64b8, 0),# East Asian ideograph
- 0x22587d: (0x73cc, 0),# East Asian ideograph
- 0x22354b: (0x64fd, 0),# East Asian ideograph
- 0x333021: (0x58f9, 0),# East Asian ideograph
- 0x225f5c: (0x7633, 0),# East Asian ideograph
- 0x217933: (0x5940, 0),# East Asian ideograph
- 0x234142: (0x91b1, 0),# East Asian ideograph
- 0x23346b: (0x8b6d, 0),# East Asian ideograph
- 0x23354d: (0x8c4b, 0),# East Asian ideograph
- 0x2d7a44: (0x598d, 0),# East Asian ideograph
- 0x27513e: (0x7ee2, 0),# East Asian ideograph
- 0x23472c: (0x93d3, 0),# East Asian ideograph
- 0x22354f: (0x6508, 0),# East Asian ideograph
- 0x395e42: (0x9274, 0),# East Asian ideograph
- 0x233550: (0x8c4f, 0),# East Asian ideograph
- 0x3f5f35: (0x6b9e, 0),# East Asian ideograph
- 0x39303a: (0x5efc, 0),# East Asian ideograph
- 0x213552: (0x543e, 0),# East Asian ideograph
- 0x27513f: (0x7ee5, 0),# East Asian ideograph
- 0x213553: (0x5427, 0),# East Asian ideograph
- 0x213554: (0x5440, 0),# East Asian ideograph
- 0x274476: (0x6808, 0),# East Asian ideograph
- 0x233555: (0x8c5c, 0),# East Asian ideograph
- 0x6f5a26: (0xcee8, 0),# Korean hangul
- 0x213556: (0x5446, 0),# East Asian ideograph
- 0x217557: (0x57a1, 0),# East Asian ideograph
- 0x213266: (0x512a, 0),# East Asian ideograph
- 0x293c30: (0x8f98, 0),# East Asian ideograph
- 0x224b38: (0x6e69, 0),# East Asian ideograph
- 0x6f5332: (0xc139, 0),# Korean hangul
- 0x293725: (0x8d3d, 0),# East Asian ideograph
- 0x287229: (0x7f17, 0),# East Asian ideograph
- 0x223559: (0x651a, 0),# East Asian ideograph
- 0x6f5362: (0xc22b, 0),# Korean hangul
- 0x27624d: (0x9e6d, 0),# East Asian ideograph
- 0x214021: (0x6252, 0),# East Asian ideograph
- 0x214022: (0x625b, 0),# East Asian ideograph
- 0x214023: (0x6263, 0),# East Asian ideograph
- 0x214024: (0x6258, 0),# East Asian ideograph
- 0x214025: (0x6296, 0),# East Asian ideograph
- 0x214026: (0x6297, 0),# East Asian ideograph
- 0x214027: (0x6292, 0),# East Asian ideograph
- 0x214028: (0x6276, 0),# East Asian ideograph
- 0x214029: (0x6289, 0),# East Asian ideograph
- 0x21402a: (0x627f, 0),# East Asian ideograph
- 0x21402b: (0x6279, 0),# East Asian ideograph
- 0x21402c: (0x6280, 0),# East Asian ideograph
- 0x21402d: (0x628a, 0),# East Asian ideograph
- 0x21402e: (0x626d, 0),# East Asian ideograph
- 0x21402f: (0x627c, 0),# East Asian ideograph
- 0x214030: (0x627e, 0),# East Asian ideograph
- 0x214031: (0x626f, 0),# East Asian ideograph
- 0x214032: (0x6284, 0),# East Asian ideograph
- 0x214033: (0x6295, 0),# East Asian ideograph
- 0x214034: (0x6291, 0),# East Asian ideograph
- 0x214035: (0x6298, 0),# East Asian ideograph
- 0x214036: (0x626e, 0),# East Asian ideograph
- 0x214037: (0x6273, 0),# East Asian ideograph
- 0x214038: (0x6293, 0),# East Asian ideograph
- 0x214039: (0x62c9, 0),# East Asian ideograph
- 0x21403a: (0x62c4, 0),# East Asian ideograph
- 0x21403b: (0x62cc, 0),# East Asian ideograph
- 0x21403c: (0x62a8, 0),# East Asian ideograph
- 0x21403d: (0x62dc, 0),# East Asian ideograph
- 0x21403e: (0x62bf, 0),# East Asian ideograph
- 0x21403f: (0x62c2, 0),# East Asian ideograph
- 0x214040: (0x62b9, 0),# East Asian ideograph
- 0x214041: (0x62d2, 0),# East Asian ideograph
- 0x214042: (0x62d3, 0),# East Asian ideograph
- 0x214043: (0x62db, 0),# East Asian ideograph
- 0x214044: (0x62ab, 0),# East Asian ideograph
- 0x214045: (0x62cb, 0),# East Asian ideograph
- 0x214046: (0x62d4, 0),# East Asian ideograph
- 0x214047: (0x62bd, 0),# East Asian ideograph
- 0x214048: (0x62bc, 0),# East Asian ideograph
- 0x214049: (0x62d0, 0),# East Asian ideograph (variant of 4B4049 which maps to 62D0)
- 0x21404a: (0x62c8, 0),# East Asian ideograph
- 0x21404b: (0x62d9, 0),# East Asian ideograph
- 0x21404c: (0x62da, 0),# East Asian ideograph
- 0x21404d: (0x62ac, 0),# East Asian ideograph
- 0x21404e: (0x62c7, 0),# East Asian ideograph
- 0x21404f: (0x62b1, 0),# East Asian ideograph
- 0x214050: (0x62d6, 0),# East Asian ideograph
- 0x214051: (0x62d8, 0),# East Asian ideograph
- 0x214052: (0x62cd, 0),# East Asian ideograph
- 0x214053: (0x62b5, 0),# East Asian ideograph
- 0x214054: (0x62ce, 0),# East Asian ideograph
- 0x214055: (0x62d7, 0),# East Asian ideograph
- 0x214056: (0x62c6, 0),# East Asian ideograph
- 0x214057: (0x6309, 0),# East Asian ideograph
- 0x214058: (0x6316, 0),# East Asian ideograph
- 0x214059: (0x62fc, 0),# East Asian ideograph
- 0x21405a: (0x62f3, 0),# East Asian ideograph
- 0x21405b: (0x6308, 0),# East Asian ideograph
- 0x21405c: (0x62ed, 0),# East Asian ideograph
- 0x21405d: (0x6301, 0),# East Asian ideograph
- 0x21405e: (0x62ee, 0),# East Asian ideograph
- 0x21405f: (0x62ef, 0),# East Asian ideograph
- 0x214060: (0x62f7, 0),# East Asian ideograph
- 0x214061: (0x6307, 0),# East Asian ideograph
- 0x214062: (0x62f1, 0),# East Asian ideograph
- 0x214063: (0x62fd, 0),# East Asian ideograph
- 0x214064: (0x6311, 0),# East Asian ideograph
- 0x214065: (0x62ec, 0),# East Asian ideograph
- 0x214066: (0x62f4, 0),# East Asian ideograph (variant of 4B4066 which maps to 62F4)
- 0x214067: (0x62ff, 0),# East Asian ideograph
- 0x224068: (0x6a2d, 0),# East Asian ideograph
- 0x214069: (0x6342, 0),# East Asian ideograph
- 0x21406a: (0x632a, 0),# East Asian ideograph
- 0x21406b: (0x6355, 0),# East Asian ideograph
- 0x21406c: (0x633e, 0),# East Asian ideograph
- 0x21406d: (0x632f, 0),# East Asian ideograph
- 0x21406e: (0x634e, 0),# East Asian ideograph
- 0x21406f: (0x634f, 0),# East Asian ideograph
- 0x214070: (0x6350, 0),# East Asian ideograph
- 0x214071: (0x6349, 0),# East Asian ideograph
- 0x224072: (0x6a1d, 0),# East Asian ideograph
- 0x214073: (0x632b, 0),# East Asian ideograph
- 0x214074: (0x6328, 0),# East Asian ideograph
- 0x214075: (0x633a, 0),# East Asian ideograph
- 0x214076: (0x63a5, 0),# East Asian ideograph
- 0x214077: (0x6369, 0),# East Asian ideograph
- 0x214078: (0x63a0, 0),# East Asian ideograph
- 0x214079: (0x6396, 0),# East Asian ideograph
- 0x21407a: (0x63a7, 0),# East Asian ideograph
- 0x21407b: (0x6372, 0),# East Asian ideograph
- 0x21407c: (0x6377, 0),# East Asian ideograph
- 0x21407d: (0x6383, 0),# East Asian ideograph
- 0x21407e: (0x636b, 0),# East Asian ideograph
- 0x2d5c74: (0x96a3, 0),# East Asian ideograph
- 0x2d5831: (0x89a7, 0),# East Asian ideograph
- 0x21756c: (0x57be, 0),# East Asian ideograph
- 0x693729: (0x7c82, 0),# East Asian ideograph
- 0x23356d: (0x8c73, 0),# East Asian ideograph
- 0x6f5966: (0xce5c, 0),# Korean hangul
- 0x29252d: (0x8311, 0),# East Asian ideograph
- 0x6f5366: (0xc232, 0),# Korean hangul
- 0x276251: (0x76d0, 0),# East Asian ideograph
- 0x6f5463: (0xc46c, 0),# Korean hangul
- 0x6f4f23: (0xb800, 0),# Korean hangul
- 0x21356f: (0x547b, 0),# East Asian ideograph
- 0x6f4c71: (0xb2eb, 0),# Korean hangul
- 0x233571: (0x8c75, 0),# East Asian ideograph
- 0x28722a: (0x7f02, 0),# East Asian ideograph
- 0x213572: (0x5484, 0),# East Asian ideograph
- 0x6f4a54: (0xae7b, 0),# Korean hangul
- 0x39593f: (0x8a3c, 0),# East Asian ideograph
- 0x233573: (0x8c77, 0),# East Asian ideograph
- 0x276252: (0x7877, 0),# East Asian ideograph
- 0x213d7b: (0x5fd8, 0),# East Asian ideograph
- 0x6f4f24: (0xb801, 0),# Korean hangul
- 0x213574: (0x5468, 0),# East Asian ideograph
- 0x223d4b: (0x68b4, 0),# East Asian ideograph
- 0x692568: (0x30e8, 0),# Katakana letter YO
- 0x213575: (0x5486, 0),# East Asian ideograph
- 0x213939: (0x5957, 0),# East Asian ideograph
- 0x6f5a2f: (0xcf01, 0),# Korean hangul
- 0x393b6e: (0x5c97, 0),# East Asian ideograph
- 0x2d6021: (0x978c, 0),# East Asian ideograph
- 0x335652: (0x87c1, 0),# East Asian ideograph
- 0x223577: (0x652e, 0),# East Asian ideograph
- 0x216022: (0x978b, 0),# East Asian ideograph
- 0x216023: (0x978f, 0),# East Asian ideograph
- 0x215b66: (0x8fc6, 0),# East Asian ideograph
- 0x694838: (0x567a, 0),# East Asian ideograph
- 0x216024: (0x9798, 0),# East Asian ideograph
- 0x4b5036: (0x7c14, 0),# East Asian ideograph
- 0x277360: (0x5181, 0),# East Asian ideograph
- 0x21357b: (0x5471, 0),# East Asian ideograph
- 0x21357c: (0x549a, 0),# East Asian ideograph
- 0x454e43: (0x788c, 0),# East Asian ideograph (variant of 214E43 which maps to 788C)
- 0x21357d: (0x548e, 0),# East Asian ideograph
- 0x216028: (0x97ad, 0),# East Asian ideograph
- 0x6f4f26: (0xb808, 0),# Korean hangul
- 0x215724: (0x87a2, 0),# East Asian ideograph
- 0x275e7b: (0x9635, 0),# East Asian ideograph
- 0x6f5d6e: (0xd758, 0),# Korean hangul
- 0x21602b: (0x97c6, 0),# East Asian ideograph
- 0x335259: (0x7e59, 0),# East Asian ideograph
- 0x27602c: (0x97e6, 0),# East Asian ideograph
- 0x27623d: (0x9e3d, 0),# East Asian ideograph
- 0x4b4347: (0x66a8, 0),# East Asian ideograph
- 0x6f536a: (0xc26c, 0),# Korean hangul
- 0x27602d: (0x97e7, 0),# East Asian ideograph
- 0x6f4f27: (0xb809, 0),# Korean hangul
- 0x6f592b: (0xcc3e, 0),# Korean hangul
- 0x213938: (0x5950, 0),# East Asian ideograph
- 0x2d3a60: (0x6588, 0),# East Asian ideograph
- 0x27602f: (0x97ec, 0),# East Asian ideograph
- 0x214121: (0x6367, 0),# East Asian ideograph
- 0x214122: (0x6398, 0),# East Asian ideograph
- 0x214123: (0x639b, 0),# East Asian ideograph
- 0x214124: (0x63aa, 0),# East Asian ideograph
- 0x214125: (0x6371, 0),# East Asian ideograph
- 0x214126: (0x63a9, 0),# East Asian ideograph
- 0x214127: (0x638c, 0),# East Asian ideograph
- 0x214128: (0x6389, 0),# East Asian ideograph
- 0x214129: (0x63a2, 0),# East Asian ideograph
- 0x21412a: (0x6399, 0),# East Asian ideograph
- 0x21412b: (0x63a1, 0),# East Asian ideograph
- 0x21412c: (0x6388, 0),# East Asian ideograph
- 0x21412d: (0x63ac, 0),# East Asian ideograph
- 0x21412e: (0x633d, 0),# East Asian ideograph
- 0x21412f: (0x6392, 0),# East Asian ideograph
- 0x214130: (0x63a3, 0),# East Asian ideograph
- 0x214131: (0x6376, 0),# East Asian ideograph
- 0x214132: (0x638f, 0),# East Asian ideograph
- 0x214133: (0x63a8, 0),# East Asian ideograph
- 0x214134: (0x637b, 0),# East Asian ideograph
- 0x214135: (0x6368, 0),# East Asian ideograph (variant of 4B4135 which maps to 6368)
- 0x214136: (0x6384, 0),# East Asian ideograph
- 0x214137: (0x6380, 0),# East Asian ideograph
- 0x214138: (0x63c6, 0),# East Asian ideograph
- 0x214139: (0x63c9, 0),# East Asian ideograph
- 0x21413a: (0x63cd, 0),# East Asian ideograph
- 0x21413b: (0x63e1, 0),# East Asian ideograph
- 0x21413c: (0x63c0, 0),# East Asian ideograph
- 0x21413d: (0x63e9, 0),# East Asian ideograph
- 0x21413e: (0x63d0, 0),# East Asian ideograph
- 0x21413f: (0x63da, 0),# East Asian ideograph
- 0x214140: (0x63d6, 0),# East Asian ideograph
- 0x214141: (0x63ed, 0),# East Asian ideograph
- 0x214142: (0x63ee, 0),# East Asian ideograph
- 0x214143: (0x63cf, 0),# East Asian ideograph
- 0x214144: (0x63e3, 0),# East Asian ideograph
- 0x214145: (0x63f4, 0),# East Asian ideograph
- 0x214146: (0x63db, 0),# East Asian ideograph (variant of 454146 which maps to 63DB)
- 0x214147: (0x63d2, 0),# East Asian ideograph
- 0x234148: (0x91ae, 0),# East Asian ideograph
- 0x214149: (0x641e, 0),# East Asian ideograph
- 0x21414a: (0x642a, 0),# East Asian ideograph
- 0x23414b: (0x91b4, 0),# East Asian ideograph
- 0x23414c: (0x91b2, 0),# East Asian ideograph
- 0x21414d: (0x640f, 0),# East Asian ideograph
- 0x21414e: (0x6414, 0),# East Asian ideograph
- 0x21414f: (0x640d, 0),# East Asian ideograph
- 0x214150: (0x642d, 0),# East Asian ideograph
- 0x214151: (0x643d, 0),# East Asian ideograph
- 0x214152: (0x6416, 0),# East Asian ideograph
- 0x214153: (0x6417, 0),# East Asian ideograph
- 0x214154: (0x641c, 0),# East Asian ideograph
- 0x214155: (0x6436, 0),# East Asian ideograph
- 0x214156: (0x642c, 0),# East Asian ideograph
- 0x214157: (0x6458, 0),# East Asian ideograph
- 0x214158: (0x6469, 0),# East Asian ideograph
- 0x214159: (0x6454, 0),# East Asian ideograph
- 0x21415a: (0x6452, 0),# East Asian ideograph
- 0x21415b: (0x646f, 0),# East Asian ideograph
- 0x21415c: (0x6478, 0),# East Asian ideograph
- 0x21415d: (0x6479, 0),# East Asian ideograph
- 0x21415e: (0x647a, 0),# East Asian ideograph
- 0x21415f: (0x645f, 0),# East Asian ideograph
- 0x214160: (0x6451, 0),# East Asian ideograph
- 0x214161: (0x6467, 0),# East Asian ideograph
- 0x214162: (0x649e, 0),# East Asian ideograph
- 0x214163: (0x64a4, 0),# East Asian ideograph
- 0x214164: (0x6487, 0),# East Asian ideograph
- 0x214165: (0x6488, 0),# East Asian ideograph
- 0x214166: (0x64a5, 0),# East Asian ideograph
- 0x214167: (0x64b0, 0),# East Asian ideograph
- 0x214168: (0x6493, 0),# East Asian ideograph
- 0x214169: (0x6495, 0),# East Asian ideograph
- 0x21416a: (0x6492, 0),# East Asian ideograph
- 0x21416b: (0x64a9, 0),# East Asian ideograph
- 0x21416c: (0x6491, 0),# East Asian ideograph
- 0x21416d: (0x64ae, 0),# East Asian ideograph
- 0x21416e: (0x64b2, 0),# East Asian ideograph
- 0x21416f: (0x64ad, 0),# East Asian ideograph
- 0x214170: (0x649a, 0),# East Asian ideograph
- 0x214171: (0x64ab, 0),# East Asian ideograph
- 0x214172: (0x64ac, 0),# East Asian ideograph
- 0x214173: (0x64c5, 0),# East Asian ideograph
- 0x214174: (0x64c1, 0),# East Asian ideograph
- 0x214175: (0x64d8, 0),# East Asian ideograph
- 0x214176: (0x64ca, 0),# East Asian ideograph
- 0x214177: (0x64bb, 0),# East Asian ideograph
- 0x214178: (0x64c2, 0),# East Asian ideograph
- 0x214179: (0x64bc, 0),# East Asian ideograph
- 0x21417a: (0x64cb, 0),# East Asian ideograph
- 0x21417b: (0x64cd, 0),# East Asian ideograph
- 0x21417c: (0x64da, 0),# East Asian ideograph
- 0x21417d: (0x64c4, 0),# East Asian ideograph
- 0x21417e: (0x64c7, 0),# East Asian ideograph
- 0x705c50: (0x82c4, 0),# East Asian ideograph
- 0x216040: (0x9813, 0),# East Asian ideograph
- 0x393e61: (0x60aa, 0),# East Asian ideograph
- 0x6f536e: (0xc27d, 0),# Korean hangul
- 0x216041: (0x9812, 0),# East Asian ideograph
- 0x2d3571: (0x546a, 0),# East Asian ideograph
- 0x6f4f2b: (0xb819, 0),# Korean hangul
- 0x29483e: (0x9554, 0),# East Asian ideograph
- 0x276042: (0x9882, 0),# East Asian ideograph
- 0x276043: (0x9887, 0),# East Asian ideograph
- 0x6f5d6f: (0xd759, 0),# Korean hangul
- 0x276044: (0x9886, 0),# East Asian ideograph
- 0x69594b: (0x6327, 0),# East Asian ideograph
- 0x276045: (0x9889, 0),# East Asian ideograph
- 0x27623e: (0x9e49, 0),# East Asian ideograph
- 0x6f536f: (0xc27f, 0),# Korean hangul
- 0x276046: (0x5934, 0),# East Asian ideograph
- 0x6f4f2c: (0xb81b, 0),# Korean hangul
- 0x213954: (0x5999, 0),# East Asian ideograph
- 0x29483f: (0x9572, 0),# East Asian ideograph
- 0x236047: (0x9f76, 0),# East Asian ideograph
- 0x6f5775: (0xc918, 0),# Korean hangul
- 0x216048: (0x9838, 0),# East Asian ideograph
- 0x6f503a: (0xbaa8, 0),# Korean hangul
- 0x216049: (0x983b, 0),# East Asian ideograph
- 0x213e58: (0x60e6, 0),# East Asian ideograph
- 0x222c47: (0x60d3, 0),# East Asian ideograph
- 0x21604a: (0x9839, 0),# East Asian ideograph
- 0x6f5370: (0xc281, 0),# Korean hangul
- 0x27625b: (0x9ea6, 0),# East Asian ideograph
- 0x27604b: (0x9894, 0),# East Asian ideograph
- 0x6f4f2d: (0xb81d, 0),# Korean hangul
- 0x27604c: (0x9890, 0),# East Asian ideograph
- 0x225b2a: (0x748a, 0),# East Asian ideograph
- 0x27604d: (0x9897, 0),# East Asian ideograph
- 0x213269: (0x5132, 0),# East Asian ideograph
- 0x4c725d: (0x7a39, 0),# East Asian ideograph
- 0x27604e: (0x989c, 0),# East Asian ideograph
- 0x27604f: (0x989d, 0),# East Asian ideograph
- 0x2d4730: (0x51b5, 0),# East Asian ideograph
- 0x27625c: (0x9eb8, 0),# East Asian ideograph
- 0x276050: (0x9898, 0),# East Asian ideograph
- 0x276051: (0x989a, 0),# East Asian ideograph
- 0x216052: (0x9853, 0),# East Asian ideograph
- 0x393b78: (0x5cc4, 0),# East Asian ideograph (duplicate simplified)
- 0x276053: (0x7c7b, 0),# East Asian ideograph
- 0x284f39: (0x6cf8, 0),# East Asian ideograph
- 0x276054: (0x98a0, 0),# East Asian ideograph
- 0x6f5372: (0xc289, 0),# Korean hangul
- 0x216055: (0x9858, 0),# East Asian ideograph
- 0x6f4f2f: (0xb825, 0),# Korean hangul
- 0x4b4866: (0x6e89, 0),# East Asian ideograph
- 0x276056: (0x987e, 0),# East Asian ideograph
- 0x69243a: (0x305a, 0),# Hiragana letter ZU
- 0x276057: (0x98a4, 0),# East Asian ideograph
- 0x276058: (0x663e, 0),# East Asian ideograph
- 0x213861: (0x589c, 0),# East Asian ideograph
- 0x4b614d: (0x9a13, 0),# East Asian ideograph
- 0x216059: (0x9871, 0),# East Asian ideograph
- 0x4c4333: (0x6aaa, 0),# East Asian ideograph
- 0x27625e: (0x9762, 0),# East Asian ideograph
- 0x27605a: (0x98a6, 0),# East Asian ideograph
- 0x6f4f30: (0xb828, 0),# Korean hangul
- 0x214221: (0x64ce, 0),# East Asian ideograph
- 0x214222: (0x64d4, 0),# East Asian ideograph
- 0x214223: (0x64d2, 0),# East Asian ideograph
- 0x214224: (0x64bf, 0),# East Asian ideograph
- 0x234225: (0x9201, 0),# East Asian ideograph
- 0x214226: (0x64f0, 0),# East Asian ideograph
- 0x214227: (0x64e6, 0),# East Asian ideograph
- 0x214228: (0x64ec, 0),# East Asian ideograph
- 0x214229: (0x64f1, 0),# East Asian ideograph
- 0x21422a: (0x64f4, 0),# East Asian ideograph
- 0x21422b: (0x64f2, 0),# East Asian ideograph
- 0x21422c: (0x6506, 0),# East Asian ideograph
- 0x21422d: (0x6500, 0),# East Asian ideograph
- 0x27422e: (0x6270, 0),# East Asian ideograph
- 0x21422f: (0x64fb, 0),# East Asian ideograph
- 0x214230: (0x64fa, 0),# East Asian ideograph
- 0x214231: (0x650f, 0),# East Asian ideograph
- 0x214232: (0x6518, 0),# East Asian ideograph
- 0x214233: (0x6514, 0),# East Asian ideograph
- 0x214234: (0x6519, 0),# East Asian ideograph
- 0x214235: (0x651d, 0),# East Asian ideograph
- 0x214236: (0x651c, 0),# East Asian ideograph
- 0x214237: (0x6523, 0),# East Asian ideograph
- 0x214238: (0x6524, 0),# East Asian ideograph
- 0x214239: (0x652b, 0),# East Asian ideograph
- 0x21423a: (0x652a, 0),# East Asian ideograph
- 0x21423b: (0x652c, 0),# East Asian ideograph
- 0x21423c: (0x652f, 0),# East Asian ideograph
- 0x21423d: (0x6536, 0),# East Asian ideograph
- 0x21423e: (0x6539, 0),# East Asian ideograph
- 0x21423f: (0x653b, 0),# East Asian ideograph
- 0x214240: (0x653e, 0),# East Asian ideograph
- 0x214241: (0x653f, 0),# East Asian ideograph
- 0x214242: (0x6545, 0),# East Asian ideograph
- 0x214243: (0x6548, 0),# East Asian ideograph
- 0x214244: (0x654e, 0),# East Asian ideograph
- 0x214245: (0x6556, 0),# East Asian ideograph
- 0x214246: (0x6551, 0),# East Asian ideograph
- 0x274247: (0x8d25, 0),# East Asian ideograph
- 0x214248: (0x655d, 0),# East Asian ideograph
- 0x214249: (0x6558, 0),# East Asian ideograph
- 0x21424a: (0x654f, 0),# East Asian ideograph
- 0x21424b: (0x6566, 0),# East Asian ideograph
- 0x21424c: (0x6562, 0),# East Asian ideograph
- 0x21424d: (0x6563, 0),# East Asian ideograph
- 0x21424e: (0x655e, 0),# East Asian ideograph
- 0x21424f: (0x5553, 0),# East Asian ideograph
- 0x214250: (0x656c, 0),# East Asian ideograph
- 0x214251: (0x6572, 0),# East Asian ideograph
- 0x214252: (0x6575, 0),# East Asian ideograph
- 0x214253: (0x6577, 0),# East Asian ideograph
- 0x214254: (0x6578, 0),# East Asian ideograph
- 0x214255: (0x6574, 0),# East Asian ideograph
- 0x214256: (0x6582, 0),# East Asian ideograph
- 0x214257: (0x6583, 0),# East Asian ideograph
- 0x214258: (0x6587, 0),# East Asian ideograph
- 0x214259: (0x6591, 0),# East Asian ideograph
- 0x21425a: (0x6590, 0),# East Asian ideograph
- 0x6f4f32: (0xb834, 0),# Korean hangul
- 0x21425c: (0x6599, 0),# East Asian ideograph
- 0x21425d: (0x659c, 0),# East Asian ideograph
- 0x21425e: (0x659f, 0),# East Asian ideograph
- 0x21425f: (0x65a1, 0),# East Asian ideograph
- 0x214260: (0x65a4, 0),# East Asian ideograph
- 0x214261: (0x65a5, 0),# East Asian ideograph
- 0x214262: (0x65a7, 0),# East Asian ideograph
- 0x274263: (0x65a9, 0),# East Asian ideograph
- 0x214264: (0x65af, 0),# East Asian ideograph
- 0x214265: (0x65b0, 0),# East Asian ideograph
- 0x274266: (0x65ad, 0),# East Asian ideograph
- 0x214267: (0x65b9, 0),# East Asian ideograph
- 0x224268: (0x6ab4, 0),# East Asian ideograph
- 0x214269: (0x65bd, 0),# East Asian ideograph
- 0x21426a: (0x65c1, 0),# East Asian ideograph
- 0x21426b: (0x65c5, 0),# East Asian ideograph
- 0x21426c: (0x65ce, 0),# East Asian ideograph
- 0x21426d: (0x65cb, 0),# East Asian ideograph
- 0x21426e: (0x65cc, 0),# East Asian ideograph
- 0x21426f: (0x65cf, 0),# East Asian ideograph
- 0x214270: (0x65d7, 0),# East Asian ideograph
- 0x214271: (0x65d6, 0),# East Asian ideograph
- 0x214272: (0x65e2, 0),# East Asian ideograph
- 0x214273: (0x65e5, 0),# East Asian ideograph
- 0x234274: (0x923f, 0),# East Asian ideograph
- 0x214275: (0x65e9, 0),# East Asian ideograph
- 0x214276: (0x65ec, 0),# East Asian ideograph
- 0x214277: (0x65ed, 0),# East Asian ideograph
- 0x214278: (0x65e8, 0),# East Asian ideograph
- 0x214279: (0x65f1, 0),# East Asian ideograph
- 0x21427a: (0x65fa, 0),# East Asian ideograph
- 0x21427b: (0x6606, 0),# East Asian ideograph
- 0x21427c: (0x6614, 0),# East Asian ideograph
- 0x21427d: (0x660c, 0),# East Asian ideograph
- 0x21427e: (0x6600, 0),# East Asian ideograph
- 0x6f5779: (0xc954, 0),# Korean hangul
- 0x21606b: (0x98ef, 0),# East Asian ideograph
- 0x27606c: (0x9972, 0),# East Asian ideograph
- 0x453f6d: (0x52e0, 0),# East Asian ideograph
- 0x29373a: (0x8d46, 0),# East Asian ideograph
- 0x22606d: (0x76ad, 0),# East Asian ideograph
- 0x6f5377: (0xc2a4, 0),# Korean hangul
- 0x27606e: (0x9971, 0),# East Asian ideograph
- 0x6f4f34: (0xb837, 0),# Korean hangul
- 0x21395c: (0x59b9, 0),# East Asian ideograph
- 0x27606f: (0x9970, 0),# East Asian ideograph
- 0x69243b: (0x305b, 0),# Hiragana letter SE
- 0x276070: (0x997a, 0),# East Asian ideograph
- 0x2d362a: (0x95a7, 0),# East Asian ideograph
- 0x275156: (0x7f04, 0),# East Asian ideograph
- 0x277272: (0x54d2, 0),# East Asian ideograph
- 0x236071: (0x9fa5, 0),# East Asian ideograph
- 0x213862: (0x58ae, 0),# East Asian ideograph
- 0x216072: (0x990c, 0),# East Asian ideograph
- 0x6f5378: (0xc2a5, 0),# Korean hangul
- 0x276073: (0x9977, 0),# East Asian ideograph
- 0x215b76: (0x8ff7, 0),# East Asian ideograph
- 0x21395d: (0x59c6, 0),# East Asian ideograph
- 0x216074: (0x9910, 0),# East Asian ideograph
- 0x276075: (0x9981, 0),# East Asian ideograph
- 0x6f5d71: (0xd761, 0),# Korean hangul
- 0x276076: (0x4f59, 0),# East Asian ideograph
- 0x295955: (0x9c8e, 0),# East Asian ideograph
- 0x6f4b21: (0xaf9c, 0),# Korean hangul
- 0x216077: (0x9913, 0),# East Asian ideograph
- 0x214b22: (0x733e, 0),# East Asian ideograph
- 0x2d4738: (0x6ffc, 0),# East Asian ideograph
- 0x276078: (0x997c, 0),# East Asian ideograph
- 0x214b23: (0x7345, 0),# East Asian ideograph
- 0x276079: (0x9986, 0),# East Asian ideograph
- 0x4b553f: (0x83bd, 0),# East Asian ideograph (variant of 21553F which maps to 83BD)
- 0x224b24: (0x6e5c, 0),# East Asian ideograph
- 0x27607a: (0x996f, 0),# East Asian ideograph
- 0x27607b: (0x9984, 0),# East Asian ideograph
- 0x224e6a: (0x700d, 0),# East Asian ideograph
- 0x27607c: (0x9985, 0),# East Asian ideograph
- 0x214b27: (0x7368, 0),# East Asian ideograph
- 0x6f537a: (0xc2ac, 0),# Korean hangul
- 0x217334: (0x5686, 0),# East Asian ideograph
- 0x214b28: (0x7370, 0),# East Asian ideograph
- 0x29484a: (0x956c, 0),# East Asian ideograph
- 0x27607e: (0x998f, 0),# East Asian ideograph
- 0x6f5d6d: (0xd757, 0),# Korean hangul
- 0x214b29: (0x7372, 0),# East Asian ideograph
- 0x4c5447: (0x71e0, 0),# East Asian ideograph (variant of 225447 which maps to 71E0)
- 0x214b2a: (0x7377, 0),# East Asian ideograph
- 0x2e7374: (0x7e89, 0),# East Asian ideograph
- 0x214b2b: (0x7378, 0),# East Asian ideograph
- 0x2e6060: (0x76a1, 0),# East Asian ideograph
- 0x214b2c: (0x7375, 0),# East Asian ideograph
- 0x6f537b: (0xc2ad, 0),# Korean hangul
- 0x214b2d: (0x737a, 0),# East Asian ideograph
- 0x213960: (0x59af, 0),# East Asian ideograph
- 0x286222: (0x7726, 0),# East Asian ideograph
- 0x214b2e: (0x737b, 0),# East Asian ideograph
- 0x335f34: (0x90c4, 0),# East Asian ideograph
- 0x21393d: (0x5962, 0),# East Asian ideograph
- 0x274b2f: (0x7321, 0),# East Asian ideograph
- 0x295958: (0x9c9a, 0),# East Asian ideograph
- 0x214321: (0x660e, 0),# East Asian ideograph
- 0x214322: (0x6613, 0),# East Asian ideograph
- 0x214323: (0x6602, 0),# East Asian ideograph
- 0x214324: (0x660f, 0),# East Asian ideograph
- 0x214325: (0x6625, 0),# East Asian ideograph
- 0x214326: (0x6627, 0),# East Asian ideograph
- 0x214327: (0x662f, 0),# East Asian ideograph
- 0x214328: (0x662d, 0),# East Asian ideograph
- 0x214329: (0x6620, 0),# East Asian ideograph
- 0x21432a: (0x661f, 0),# East Asian ideograph
- 0x21432b: (0x6628, 0),# East Asian ideograph
- 0x21432c: (0x664f, 0),# East Asian ideograph
- 0x21432d: (0x6642, 0),# East Asian ideograph
- 0x21432e: (0x6652, 0),# East Asian ideograph
- 0x21432f: (0x6649, 0),# East Asian ideograph
- 0x214330: (0x6643, 0),# East Asian ideograph
- 0x214331: (0x664c, 0),# East Asian ideograph
- 0x214332: (0x665d, 0),# East Asian ideograph
- 0x214333: (0x6664, 0),# East Asian ideograph
- 0x214334: (0x6668, 0),# East Asian ideograph
- 0x214335: (0x6666, 0),# East Asian ideograph
- 0x214336: (0x665a, 0),# East Asian ideograph
- 0x214337: (0x666f, 0),# East Asian ideograph
- 0x214338: (0x666e, 0),# East Asian ideograph
- 0x214339: (0xfa12, 0),# East Asian ideograph
- 0x21433a: (0x6691, 0),# East Asian ideograph
- 0x21433b: (0x6670, 0),# East Asian ideograph
- 0x21433c: (0x6676, 0),# East Asian ideograph
- 0x21433d: (0x667a, 0),# East Asian ideograph
- 0x21433e: (0x6697, 0),# East Asian ideograph
- 0x21433f: (0x6687, 0),# East Asian ideograph
- 0x214340: (0x6689, 0),# East Asian ideograph
- 0x214341: (0x6688, 0),# East Asian ideograph
- 0x214342: (0x6696, 0),# East Asian ideograph
- 0x214343: (0x66a2, 0),# East Asian ideograph
- 0x214344: (0x66ab, 0),# East Asian ideograph
- 0x214345: (0x66b4, 0),# East Asian ideograph
- 0x214346: (0x66ae, 0),# East Asian ideograph
- 0x214347: (0x66c1, 0),# East Asian ideograph
- 0x214348: (0x66c9, 0),# East Asian ideograph
- 0x214349: (0x66c6, 0),# East Asian ideograph
- 0x21434a: (0x66b9, 0),# East Asian ideograph
- 0x21434b: (0x66d6, 0),# East Asian ideograph
- 0x21434c: (0x66d9, 0),# East Asian ideograph
- 0x21434d: (0x66e0, 0),# East Asian ideograph
- 0x21434e: (0x66dd, 0),# East Asian ideograph
- 0x21434f: (0x66e6, 0),# East Asian ideograph
- 0x214350: (0x66f0, 0),# East Asian ideograph
- 0x214351: (0x66f2, 0),# East Asian ideograph
- 0x214352: (0x66f3, 0),# East Asian ideograph
- 0x214353: (0x66f4, 0),# East Asian ideograph
- 0x214354: (0x66f7, 0),# East Asian ideograph
- 0x214355: (0x66f8, 0),# East Asian ideograph
- 0x214356: (0x66f9, 0),# East Asian ideograph
- 0x214357: (0x52d7, 0),# East Asian ideograph
- 0x214358: (0x66fe, 0),# East Asian ideograph
- 0x214359: (0x66ff, 0),# East Asian ideograph
- 0x21435a: (0x6703, 0),# East Asian ideograph
- 0x21435b: (0x6708, 0),# East Asian ideograph
- 0x21435c: (0x6709, 0),# East Asian ideograph
- 0x21435d: (0x670d, 0),# East Asian ideograph
- 0x21435e: (0x670b, 0),# East Asian ideograph
- 0x21435f: (0x6717, 0),# East Asian ideograph
- 0x214360: (0x6715, 0),# East Asian ideograph
- 0x214361: (0x6714, 0),# East Asian ideograph
- 0x214362: (0x671b, 0),# East Asian ideograph
- 0x214363: (0x671d, 0),# East Asian ideograph
- 0x214364: (0x671f, 0),# East Asian ideograph
- 0x6f537e: (0xc2b5, 0),# Korean hangul
- 0x234366: (0x92c8, 0),# East Asian ideograph
- 0x214367: (0x6728, 0),# East Asian ideograph
- 0x214369: (0x672c, 0),# East Asian ideograph
- 0x23436a: (0x92c3, 0),# East Asian ideograph
- 0x21436b: (0x672a, 0),# East Asian ideograph
- 0x29436c: (0x950d, 0),# East Asian ideograph
- 0x21436d: (0x673d, 0),# East Asian ideograph
- 0x22436e: (0x6b17, 0),# East Asian ideograph
- 0x21436f: (0x6731, 0),# East Asian ideograph
- 0x214370: (0x6735, 0),# East Asian ideograph
- 0x214371: (0x675e, 0),# East Asian ideograph
- 0x214372: (0x6751, 0),# East Asian ideograph
- 0x214373: (0x674e, 0),# East Asian ideograph
- 0x214374: (0x675c, 0),# East Asian ideograph
- 0x234375: (0x92e6, 0),# East Asian ideograph
- 0x214376: (0x6756, 0),# East Asian ideograph
- 0x214377: (0x675f, 0),# East Asian ideograph
- 0x214378: (0x674f, 0),# East Asian ideograph
- 0x214379: (0x6749, 0),# East Asian ideograph
- 0x23437a: (0x92d9, 0),# East Asian ideograph
- 0x21437b: (0x676d, 0),# East Asian ideograph
- 0x21437c: (0x678b, 0),# East Asian ideograph
- 0x21437d: (0x6795, 0),# East Asian ideograph
- 0x21437e: (0x6789, 0),# East Asian ideograph
- 0x21777b: (0x58a9, 0),# East Asian ideograph
- 0x4b3f40: (0x618e, 0),# East Asian ideograph (variant of 213F40)
- 0x214b40: (0x73ed, 0),# East Asian ideograph
- 0x27626a: (0x70b9, 0),# East Asian ideograph
- 0x214b41: (0x73ee, 0),# East Asian ideograph
- 0x214b42: (0x73e0, 0),# East Asian ideograph
- 0x214b43: (0x7405, 0),# East Asian ideograph
- 0x6f4b44: (0xb07c, 0),# Korean hangul
- 0x23457e: (0x938b, 0),# East Asian ideograph
- 0x214b45: (0x7403, 0),# East Asian ideograph
- 0x336062: (0x98c3, 0),# East Asian ideograph
- 0x214b46: (0x740a, 0),# East Asian ideograph
- 0x517954: (0x734e, 0),# East Asian ideograph
- 0x274b47: (0x73b0, 0),# East Asian ideograph
- 0x6f577b: (0xc960, 0),# Korean hangul
- 0x214b48: (0x7406, 0),# East Asian ideograph
- 0x214b49: (0x740d, 0),# East Asian ideograph
- 0x282577: (0x5ce4, 0),# East Asian ideograph
- 0x214b4a: (0x743a, 0),# East Asian ideograph
- 0x6f5338: (0xc14d, 0),# Korean hangul
- 0x6f4b4b: (0xb090, 0),# Korean hangul
- 0x213966: (0x59d4, 0),# East Asian ideograph
- 0x6f4b4c: (0xb091, 0),# Korean hangul
- 0x2d584d: (0x548f, 0),# East Asian ideograph
- 0x214b4d: (0x7434, 0),# East Asian ideograph
- 0x2e3a33: (0x80ad, 0),# East Asian ideograph
- 0x213864: (0x58c7, 0),# East Asian ideograph (variant of 4B3864 which maps to 58C7)
- 0x214b4f: (0x7433, 0),# East Asian ideograph
- 0x6f4b50: (0xb099, 0),# Korean hangul
- 0x6f5021: (0xba38, 0),# Korean hangul
- 0x214b51: (0x7425, 0),# East Asian ideograph
- 0x6f4b52: (0xb09c, 0),# Korean hangul
- 0x213f36: (0x616e, 0),# East Asian ideograph
- 0x234b53: (0x96ca, 0),# East Asian ideograph
- 0x6f4b54: (0xb0a0, 0),# Korean hangul
- 0x6f4b55: (0xb0a1, 0),# Korean hangul
- 0x6f4f40: (0xb8b0, 0),# Korean hangul
- 0x6f5930: (0xcc4c, 0),# Korean hangul
- 0x6f4b56: (0xb0a8, 0),# Korean hangul
- 0x274b57: (0x73f2, 0),# East Asian ideograph
- 0x6f4b58: (0xb0ab, 0),# Korean hangul
- 0x214b59: (0x745e, 0),# East Asian ideograph
- 0x214b5a: (0x745c, 0),# East Asian ideograph
- 0x6f4f41: (0xb8cc, 0),# Korean hangul
- 0x214421: (0x6787, 0),# East Asian ideograph
- 0x214422: (0x6777, 0),# East Asian ideograph
- 0x214423: (0x679d, 0),# East Asian ideograph
- 0x214424: (0x6797, 0),# East Asian ideograph
- 0x214425: (0x676f, 0),# East Asian ideograph
- 0x214426: (0x6771, 0),# East Asian ideograph
- 0x214427: (0x6773, 0),# East Asian ideograph
- 0x214428: (0x679c, 0),# East Asian ideograph
- 0x214429: (0x6775, 0),# East Asian ideograph
- 0x21442a: (0x679a, 0),# East Asian ideograph
- 0x21442b: (0x6790, 0),# East Asian ideograph
- 0x22442c: (0x6b37, 0),# East Asian ideograph
- 0x21442d: (0x677e, 0),# East Asian ideograph
- 0x21442e: (0x67d3, 0),# East Asian ideograph
- 0x21442f: (0x67f1, 0),# East Asian ideograph
- 0x214430: (0x67ff, 0),# East Asian ideograph
- 0x214431: (0x67d4, 0),# East Asian ideograph
- 0x214432: (0x67c4, 0),# East Asian ideograph
- 0x214433: (0x67af, 0),# East Asian ideograph
- 0x214434: (0x67d0, 0),# East Asian ideograph
- 0x214435: (0x67d1, 0),# East Asian ideograph
- 0x214436: (0x67ef, 0),# East Asian ideograph
- 0x214437: (0x67e9, 0),# East Asian ideograph
- 0x214438: (0x67b6, 0),# East Asian ideograph
- 0x214439: (0x67ec, 0),# East Asian ideograph
- 0x21443a: (0x67e5, 0),# East Asian ideograph
- 0x21443b: (0x67fa, 0),# East Asian ideograph
- 0x21443c: (0x67da, 0),# East Asian ideograph
- 0x21443d: (0x6805, 0),# East Asian ideograph
- 0x21443e: (0x67de, 0),# East Asian ideograph
- 0x21443f: (0x67b8, 0),# East Asian ideograph
- 0x214440: (0x67cf, 0),# East Asian ideograph
- 0x214441: (0x67f3, 0),# East Asian ideograph
- 0x214442: (0x6848, 0),# East Asian ideograph
- 0x214443: (0x6821, 0),# East Asian ideograph
- 0x214444: (0x6838, 0),# East Asian ideograph
- 0x214445: (0x6853, 0),# East Asian ideograph
- 0x214446: (0x6846, 0),# East Asian ideograph
- 0x214447: (0x6842, 0),# East Asian ideograph
- 0x214448: (0x6854, 0),# East Asian ideograph
- 0x214449: (0x6817, 0),# East Asian ideograph
- 0x21444a: (0x683d, 0),# East Asian ideograph
- 0x21444b: (0x6851, 0),# East Asian ideograph
- 0x21444c: (0x6829, 0),# East Asian ideograph
- 0x21444d: (0x6850, 0),# East Asian ideograph
- 0x21444e: (0x6839, 0),# East Asian ideograph
- 0x23444f: (0x9344, 0),# East Asian ideograph
- 0x214450: (0x67f4, 0),# East Asian ideograph
- 0x214451: (0x6843, 0),# East Asian ideograph
- 0x214452: (0x6840, 0),# East Asian ideograph
- 0x214453: (0x682a, 0),# East Asian ideograph
- 0x214454: (0x6845, 0),# East Asian ideograph
- 0x214455: (0x683c, 0),# East Asian ideograph
- 0x214456: (0x6813, 0),# East Asian ideograph (variant of 4B4456 which maps to 6813)
- 0x214457: (0x6881, 0),# East Asian ideograph
- 0x214458: (0x6893, 0),# East Asian ideograph
- 0x214459: (0x68af, 0),# East Asian ideograph
- 0x21445a: (0x6876, 0),# East Asian ideograph
- 0x21445b: (0x68b0, 0),# East Asian ideograph
- 0x21445c: (0x68a7, 0),# East Asian ideograph
- 0x21445d: (0x6897, 0),# East Asian ideograph
- 0x21445e: (0x68b5, 0),# East Asian ideograph
- 0x21445f: (0x68b3, 0),# East Asian ideograph
- 0x214460: (0x68a2, 0),# East Asian ideograph
- 0x214461: (0x687f, 0),# East Asian ideograph
- 0x214462: (0x68b1, 0),# East Asian ideograph
- 0x214463: (0x689d, 0),# East Asian ideograph
- 0x214464: (0x68ad, 0),# East Asian ideograph
- 0x214465: (0x6886, 0),# East Asian ideograph
- 0x234466: (0x9312, 0),# East Asian ideograph
- 0x214467: (0x68a8, 0),# East Asian ideograph
- 0x214468: (0x689f, 0),# East Asian ideograph
- 0x214469: (0x6894, 0),# East Asian ideograph
- 0x21446a: (0x6883, 0),# East Asian ideograph
- 0x21446b: (0x68d5, 0),# East Asian ideograph
- 0x21446c: (0x68fa, 0),# East Asian ideograph
- 0x21446d: (0x68c4, 0),# East Asian ideograph
- 0x21446e: (0x68f2, 0),# East Asian ideograph
- 0x21446f: (0x68d2, 0),# East Asian ideograph
- 0x214470: (0x68e3, 0),# East Asian ideograph
- 0x214471: (0x68df, 0),# East Asian ideograph
- 0x214472: (0x68cb, 0),# East Asian ideograph
- 0x214473: (0x68ee, 0),# East Asian ideograph
- 0x214474: (0x690d, 0),# East Asian ideograph
- 0x214475: (0x6905, 0),# East Asian ideograph
- 0x214476: (0x68e7, 0),# East Asian ideograph
- 0x214477: (0x68e0, 0),# East Asian ideograph
- 0x214478: (0x68f5, 0),# East Asian ideograph
- 0x214479: (0x68cd, 0),# East Asian ideograph
- 0x21447a: (0x68d7, 0),# East Asian ideograph
- 0x21447b: (0x68d8, 0),# East Asian ideograph
- 0x27447c: (0x832d, 0),# East Asian ideograph
- 0x21447d: (0x68f9, 0),# East Asian ideograph
- 0x21447e: (0x68da, 0),# East Asian ideograph
- 0x214b6b: (0x74cf, 0),# East Asian ideograph
- 0x275166: (0x7ee9, 0),# East Asian ideograph
- 0x214b6c: (0x74dc, 0),# East Asian ideograph
- 0x6f2457: (0x3131, 0),# Korean hangul
- 0x6f576c: (0xc8fc, 0),# Korean hangul
- 0x214b6d: (0x74e0, 0),# East Asian ideograph
- 0x6f4b6e: (0xb108, 0),# Korean hangul
- 0x70755d: (0x8e3a, 0),# East Asian ideograph
- 0x6f4b6f: (0xb109, 0),# Korean hangul
- 0x39525b: (0x66dc, 0),# East Asian ideograph
- 0x214b71: (0x74f6, 0),# East Asian ideograph
- 0x4b3f4a: (0x5fdc, 0),# East Asian ideograph
- 0x234e35: (0x97be, 0),# East Asian ideograph
- 0x29474d: (0x956b, 0),# East Asian ideograph
- 0x6f4b73: (0xb10f, 0),# Korean hangul
- 0x6f4f46: (0xb8f0, 0),# Korean hangul
- 0x214b74: (0x750c, 0),# East Asian ideograph
- 0x224b75: (0x6ea4, 0),# East Asian ideograph
- 0x214b76: (0x7518, 0),# East Asian ideograph
- 0x4b3f4b: (0x601c, 0),# East Asian ideograph (variant of 273F4B)
- 0x234b77: (0x96f4, 0),# East Asian ideograph
- 0x2d3622: (0x8aee, 0),# East Asian ideograph
- 0x6f4d67: (0xb4ec, 0),# Korean hangul
- 0x214b78: (0x751c, 0),# East Asian ideograph
- 0x275e32: (0x9556, 0),# East Asian ideograph
- 0x214b79: (0x751f, 0),# East Asian ideograph
- 0x6f577d: (0xc96c, 0),# Korean hangul
- 0x335f43: (0x9d08, 0),# East Asian ideograph
- 0x333d2a: (0x5e83, 0),# East Asian ideograph
- 0x2d5856: (0x612c, 0),# East Asian ideograph
- 0x274b7a: (0x4ea7, 0),# East Asian ideograph
- 0x4b5437: (0x820e, 0),# East Asian ideograph
- 0x694b7b: (0x9ebf, 0),# East Asian ideograph
- 0x213032: (0x4e26, 0),# East Asian ideograph
- 0x214b7c: (0x7525, 0),# East Asian ideograph
- 0x6f533a: (0xc154, 0),# Korean hangul
- 0x276276: (0x51ac, 0),# East Asian ideograph
- 0x214b7d: (0x7528, 0),# East Asian ideograph
- 0x6f4f48: (0xb8f9, 0),# Korean hangul
- 0x275e33: (0x9557, 0),# East Asian ideograph
- 0x21352d: (0x53f8, 0),# East Asian ideograph
- 0x217629: (0x57e3, 0),# East Asian ideograph
- 0x23362a: (0x8c86, 0),# East Asian ideograph
- 0x213866: (0x58c1, 0),# East Asian ideograph
- 0x23527b: (0x99e3, 0),# East Asian ideograph
- 0x21762c: (0x57f6, 0),# East Asian ideograph
- 0x6f4f49: (0xb8fb, 0),# Korean hangul
- 0x23362d: (0x8c85, 0),# East Asian ideograph
- 0x29485c: (0x9565, 0),# East Asian ideograph
- 0x21352e: (0x53e4, 0),# East Asian ideograph
- 0x4d5858: (0x9be3, 0),# East Asian ideograph
- 0x6f5d75: (0xd76c, 0),# Korean hangul
- 0x214521: (0x690e, 0),# East Asian ideograph
- 0x214522: (0x68c9, 0),# East Asian ideograph
- 0x214523: (0x6954, 0),# East Asian ideograph
- 0x214524: (0x6930, 0),# East Asian ideograph
- 0x214525: (0x6977, 0),# East Asian ideograph
- 0x214526: (0x6975, 0),# East Asian ideograph
- 0x214527: (0x695a, 0),# East Asian ideograph
- 0x214528: (0x6960, 0),# East Asian ideograph
- 0x214529: (0x696b, 0),# East Asian ideograph
- 0x21452a: (0x694a, 0),# East Asian ideograph
- 0x21452b: (0x6968, 0),# East Asian ideograph
- 0x21452c: (0x695e, 0),# East Asian ideograph
- 0x21452d: (0x696d, 0),# East Asian ideograph
- 0x21452e: (0x6979, 0),# East Asian ideograph
- 0x21452f: (0x6953, 0),# East Asian ideograph
- 0x214530: (0x6986, 0),# East Asian ideograph
- 0x214531: (0x69a8, 0),# East Asian ideograph
- 0x214532: (0x6995, 0),# East Asian ideograph
- 0x214533: (0x699c, 0),# East Asian ideograph
- 0x214534: (0x6994, 0),# East Asian ideograph
- 0x214535: (0x69c1, 0),# East Asian ideograph
- 0x214536: (0x69b7, 0),# East Asian ideograph
- 0x214537: (0x69ae, 0),# East Asian ideograph
- 0x214538: (0x699b, 0),# East Asian ideograph
- 0x214539: (0x69cb, 0),# East Asian ideograph
- 0x21453a: (0x69d3, 0),# East Asian ideograph
- 0x21453b: (0x69bb, 0),# East Asian ideograph
- 0x21453c: (0x69ab, 0),# East Asian ideograph
- 0x21453d: (0x69cc, 0),# East Asian ideograph
- 0x21453e: (0x69ad, 0),# East Asian ideograph
- 0x21453f: (0x69d0, 0),# East Asian ideograph
- 0x214540: (0x69cd, 0),# East Asian ideograph
- 0x214541: (0x69b4, 0),# East Asian ideograph
- 0x214542: (0x6a1f, 0),# East Asian ideograph
- 0x214543: (0x69e8, 0),# East Asian ideograph
- 0x274544: (0x6837, 0),# East Asian ideograph
- 0x214545: (0x69ea, 0),# East Asian ideograph
- 0x274546: (0x6869, 0),# East Asian ideograph
- 0x214547: (0x6a19, 0),# East Asian ideograph
- 0x214548: (0x69fd, 0),# East Asian ideograph
- 0x214549: (0x6a1e, 0),# East Asian ideograph
- 0x21454a: (0x6a13, 0),# East Asian ideograph
- 0x21454b: (0x6a21, 0),# East Asian ideograph
- 0x21454c: (0x69f3, 0),# East Asian ideograph
- 0x21454d: (0x6a0a, 0),# East Asian ideograph
- 0x21454e: (0x6a02, 0),# East Asian ideograph
- 0x21454f: (0x6a05, 0),# East Asian ideograph
- 0x214550: (0x6a3d, 0),# East Asian ideograph
- 0x214551: (0x6a58, 0),# East Asian ideograph
- 0x214552: (0x6a59, 0),# East Asian ideograph
- 0x214553: (0x6a62, 0),# East Asian ideograph
- 0x214554: (0x6a44, 0),# East Asian ideograph
- 0x214555: (0x6a39, 0),# East Asian ideograph
- 0x214556: (0x6a6b, 0),# East Asian ideograph
- 0x214557: (0x6a3a, 0),# East Asian ideograph
- 0x214558: (0x6a38, 0),# East Asian ideograph
- 0x214559: (0x6a47, 0),# East Asian ideograph
- 0x21455a: (0x6a61, 0),# East Asian ideograph
- 0x21455b: (0x6a4b, 0),# East Asian ideograph
- 0x21455c: (0x6a35, 0),# East Asian ideograph
- 0x21455d: (0x6a5f, 0),# East Asian ideograph
- 0x21455e: (0x6a80, 0),# East Asian ideograph
- 0x21455f: (0x6a94, 0),# East Asian ideograph
- 0x214560: (0x6a84, 0),# East Asian ideograph
- 0x214561: (0x6aa2, 0),# East Asian ideograph
- 0x214562: (0x6a9c, 0),# East Asian ideograph
- 0x214563: (0x6ab8, 0),# East Asian ideograph
- 0x214564: (0x6ab3, 0),# East Asian ideograph
- 0x214565: (0x6ac3, 0),# East Asian ideograph
- 0x214566: (0x6abb, 0),# East Asian ideograph
- 0x234567: (0x9354, 0),# East Asian ideograph
- 0x214568: (0x6aac, 0),# East Asian ideograph
- 0x214569: (0x6ae5, 0),# East Asian ideograph
- 0x21456a: (0x6ada, 0),# East Asian ideograph
- 0x21456b: (0x6add, 0),# East Asian ideograph
- 0x21456c: (0x6adb, 0),# East Asian ideograph
- 0x21456d: (0x6ad3, 0),# East Asian ideograph
- 0x21456e: (0x6b04, 0),# East Asian ideograph
- 0x21456f: (0x6afb, 0),# East Asian ideograph
- 0x214570: (0x6b0a, 0),# East Asian ideograph
- 0x214571: (0x6b16, 0),# East Asian ideograph
- 0x234572: (0x936d, 0),# East Asian ideograph
- 0x214573: (0x6b21, 0),# East Asian ideograph
- 0x214574: (0x6b23, 0),# East Asian ideograph
- 0x27363e: (0x5458, 0),# East Asian ideograph
- 0x214576: (0x6b3e, 0),# East Asian ideograph
- 0x214577: (0x6b3a, 0),# East Asian ideograph
- 0x214578: (0x6b3d, 0),# East Asian ideograph
- 0x214579: (0x6b47, 0),# East Asian ideograph
- 0x21457a: (0x6b49, 0),# East Asian ideograph
- 0x21457b: (0x6b4c, 0),# East Asian ideograph
- 0x21457c: (0x6b50, 0),# East Asian ideograph
- 0x21457d: (0x6b59, 0),# East Asian ideograph
- 0x21457e: (0x6b5f, 0),# East Asian ideograph
- 0x6f7640: (0xe8b2, 0),# Korean hangul
- 0x2d3b27: (0x51a8, 0),# East Asian ideograph
- 0x453421: (0x5271, 0),# East Asian ideograph
- 0x213641: (0x5506, 0),# East Asian ideograph
- 0x4c4d3d: (0x6f62, 0),# East Asian ideograph
- 0x2d3642: (0x6b38, 0),# East Asian ideograph
- 0x335f49: (0x9d70, 0),# East Asian ideograph
- 0x4d5b7e: (0x9dc6, 0),# East Asian ideograph
- 0x27516f: (0x7f2b, 0),# East Asian ideograph
- 0x213867: (0x58be, 0),# East Asian ideograph
- 0x213644: (0x5556, 0),# East Asian ideograph
- 0x213645: (0x5533, 0),# East Asian ideograph
- 0x6f4f4e: (0xb959, 0),# Korean hangul
- 0x275e39: (0x94d9, 0),# East Asian ideograph
- 0x6f5449: (0xc372, 0),# Korean hangul
- 0x234174: (0x91f4, 0),# East Asian ideograph
- 0x213647: (0x5537, 0),# East Asian ideograph (Version J extension)
- 0x2d3644: (0x5557, 0),# East Asian ideograph
- 0x275170: (0x7f2e, 0),# East Asian ideograph
- 0x6f553f: (0xc591, 0),# Korean hangul
- 0x213649: (0x555e, 0),# East Asian ideograph
- 0x276245: (0x9e4f, 0),# East Asian ideograph
- 0x275e3a: (0x9570, 0),# East Asian ideograph
- 0x6f764c: (0xe8be, 0),# Korean hangul
- 0x21764d: (0x57fd, 0),# East Asian ideograph
- 0x21764e: (0x57f8, 0),# East Asian ideograph
- 0x21364f: (0x5531, 0),# East Asian ideograph
- 0x2d5749: (0x885e, 0),# East Asian ideograph
- 0x275e3b: (0x9508, 0),# East Asian ideograph
- 0x21574e: (0x521d, 0),# East Asian ideograph
- 0x6f5859: (0xcac0, 0),# Korean hangul
- 0x233651: (0x8cba, 0),# East Asian ideograph
- 0x233652: (0x8cb5, 0),# East Asian ideograph
- 0x213653: (0x553e, 0),# East Asian ideograph
- 0x213654: (0x5563, 0),# East Asian ideograph
- 0x6f4f51: (0xb968, 0),# Korean hangul
- 0x275e3c: (0x956d, 0),# East Asian ideograph
- 0x234177: (0x91f1, 0),# East Asian ideograph
- 0x6f7656: (0xe8c8, 0),# Korean hangul
- 0x213657: (0x552e, 0),# East Asian ideograph
- 0x6f4a3a: (0xae38, 0),# Korean hangul
- 0x34682a: (0x7c7c, 0),# East Asian ideograph
- 0x275e3d: (0x94c1, 0),# East Asian ideograph
- 0x214621: (0x6b61, 0),# East Asian ideograph
- 0x234622: (0x938c, 0),# East Asian ideograph
- 0x214623: (0x6b63, 0),# East Asian ideograph
- 0x214624: (0x6b64, 0),# East Asian ideograph
- 0x214625: (0x6b65, 0),# East Asian ideograph
- 0x214627: (0x6b66, 0),# East Asian ideograph
- 0x214628: (0x6b6a, 0),# East Asian ideograph
- 0x214629: (0x6b72, 0),# East Asian ideograph
- 0x22462a: (0x6bf6, 0),# East Asian ideograph
- 0x21462b: (0x6b78, 0),# East Asian ideograph
- 0x21462c: (0x6b79, 0),# East Asian ideograph
- 0x21462d: (0x6b7b, 0),# East Asian ideograph
- 0x21462e: (0x6b7f, 0),# East Asian ideograph
- 0x21462f: (0x6b83, 0),# East Asian ideograph
- 0x214630: (0x6b86, 0),# East Asian ideograph
- 0x214631: (0x6b8a, 0),# East Asian ideograph
- 0x214632: (0x6b89, 0),# East Asian ideograph
- 0x214633: (0x6b98, 0),# East Asian ideograph
- 0x214634: (0x6b96, 0),# East Asian ideograph
- 0x214635: (0x6ba4, 0),# East Asian ideograph
- 0x214636: (0x6bae, 0),# East Asian ideograph
- 0x214637: (0x6baf, 0),# East Asian ideograph
- 0x214638: (0x6bb2, 0),# East Asian ideograph
- 0x214639: (0x6bb5, 0),# East Asian ideograph
- 0x21463a: (0x6bb7, 0),# East Asian ideograph
- 0x21463b: (0x6bba, 0),# East Asian ideograph
- 0x21463c: (0x6bbc, 0),# East Asian ideograph
- 0x21463d: (0x6bc0, 0),# East Asian ideograph
- 0x21463e: (0x6bbf, 0),# East Asian ideograph
- 0x21463f: (0x6bc5, 0),# East Asian ideograph
- 0x214640: (0x6bc6, 0),# East Asian ideograph
- 0x214641: (0x6bcb, 0),# East Asian ideograph
- 0x214642: (0x6bcd, 0),# East Asian ideograph
- 0x214643: (0x6bcf, 0),# East Asian ideograph
- 0x214644: (0x6bd2, 0),# East Asian ideograph
- 0x214646: (0x6bd4, 0),# East Asian ideograph
- 0x214647: (0x6bd7, 0),# East Asian ideograph
- 0x214648: (0x6bdb, 0),# East Asian ideograph
- 0x214649: (0x6beb, 0),# East Asian ideograph
- 0x21464a: (0x6bef, 0),# East Asian ideograph
- 0x21464b: (0x6bfd, 0),# East Asian ideograph
- 0x21464c: (0x6c0f, 0),# East Asian ideograph
- 0x21464d: (0x6c11, 0),# East Asian ideograph
- 0x21464e: (0x6c10, 0),# East Asian ideograph
- 0x21464f: (0x6c13, 0),# East Asian ideograph
- 0x214650: (0x6c16, 0),# East Asian ideograph
- 0x214651: (0x6c1b, 0),# East Asian ideograph
- 0x214652: (0x6c1f, 0),# East Asian ideograph
- 0x214653: (0x6c27, 0),# East Asian ideograph
- 0x214654: (0x6c26, 0),# East Asian ideograph
- 0x214655: (0x6c23, 0),# East Asian ideograph
- 0x214656: (0x6c28, 0),# East Asian ideograph
- 0x214657: (0x6c24, 0),# East Asian ideograph
- 0x214658: (0x6c2b, 0),# East Asian ideograph
- 0x214659: (0x6c2e, 0),# East Asian ideograph
- 0x21465a: (0x6c33, 0),# East Asian ideograph
- 0x21465b: (0x6c2f, 0),# East Asian ideograph (variant of 45465B which maps to 6C2F)
- 0x21465c: (0x6c34, 0),# East Asian ideograph
- 0x21465d: (0x6c38, 0),# East Asian ideograph
- 0x21465e: (0x6c41, 0),# East Asian ideograph
- 0x23465f: (0x93e5, 0),# East Asian ideograph
- 0x214660: (0x6c40, 0),# East Asian ideograph
- 0x214661: (0x6c42, 0),# East Asian ideograph
- 0x214662: (0x6c5e, 0),# East Asian ideograph
- 0x214663: (0x6c57, 0),# East Asian ideograph
- 0x214664: (0x6c5f, 0),# East Asian ideograph
- 0x214665: (0x6c59, 0),# East Asian ideograph
- 0x214666: (0x6c60, 0),# East Asian ideograph
- 0x214667: (0x6c55, 0),# East Asian ideograph
- 0x214668: (0x6c50, 0),# East Asian ideograph
- 0x214669: (0x6c5d, 0),# East Asian ideograph
- 0x21466a: (0x6c9b, 0),# East Asian ideograph
- 0x21466b: (0x6c81, 0),# East Asian ideograph
- 0x21466d: (0x6c7a, 0),# East Asian ideograph
- 0x21466e: (0x6c6a, 0),# East Asian ideograph
- 0x21466f: (0x6c8c, 0),# East Asian ideograph
- 0x214670: (0x6c90, 0),# East Asian ideograph
- 0x214671: (0x6c72, 0),# East Asian ideograph
- 0x214672: (0x6c70, 0),# East Asian ideograph
- 0x214673: (0x6c68, 0),# East Asian ideograph
- 0x214674: (0x6c96, 0),# East Asian ideograph
- 0x234675: (0x93db, 0),# East Asian ideograph
- 0x214676: (0x6c89, 0),# East Asian ideograph (variant of 4B4676 which maps to 6C89)
- 0x214677: (0x6c99, 0),# East Asian ideograph
- 0x214678: (0x6c7e, 0),# East Asian ideograph
- 0x214679: (0x6c7d, 0),# East Asian ideograph
- 0x21467a: (0x6c92, 0),# East Asian ideograph
- 0x21467b: (0x6c83, 0),# East Asian ideograph
- 0x21467c: (0x6cb1, 0),# East Asian ideograph
- 0x23366a: (0x8ce1, 0),# East Asian ideograph
- 0x21467e: (0x6cf3, 0),# East Asian ideograph
- 0x21366b: (0x559d, 0),# East Asian ideograph
- 0x2d5421: (0x9ad7, 0),# East Asian ideograph
- 0x6f4a56: (0xae7d, 0),# Korean hangul
- 0x4c4359: (0x6b05, 0),# East Asian ideograph
- 0x27366d: (0x5524, 0),# East Asian ideograph
- 0x21366e: (0x557e, 0),# East Asian ideograph
- 0x294869: (0x9567, 0),# East Asian ideograph
- 0x284027: (0x6864, 0),# East Asian ideograph
- 0x21366f: (0x55ac, 0),# East Asian ideograph
- 0x213670: (0x5589, 0),# East Asian ideograph
- 0x223671: (0x6595, 0),# East Asian ideograph
- 0x213672: (0x55bb, 0),# East Asian ideograph
- 0x27406c: (0x631f, 0),# East Asian ideograph
- 0x4c3b60: (0x6764, 0),# East Asian ideograph
- 0x294228: (0x94ac, 0),# East Asian ideograph
- 0x213674: (0x55df, 0),# East Asian ideograph
- 0x213675: (0x55d1, 0),# East Asian ideograph
- 0x213869: (0x58d3, 0),# East Asian ideograph
- 0x28734e: (0x7f32, 0),# East Asian ideograph
- 0x233676: (0x8cee, 0),# East Asian ideograph
- 0x216121: (0x993f, 0),# East Asian ideograph
- 0x213677: (0x55e6, 0),# East Asian ideograph
- 0x4b6122: (0x994b, 0),# East Asian ideograph
- 0x6f4f58: (0xb985, 0),# Korean hangul
- 0x273678: (0x556c, 0),# East Asian ideograph
- 0x275e43: (0x94f8, 0),# East Asian ideograph
- 0x216123: (0x9945, 0),# East Asian ideograph
- 0x6f5263: (0xc0b0, 0),# Korean hangul
- 0x21353d: (0x53f2, 0),# East Asian ideograph
- 0x276124: (0x9976, 0),# East Asian ideograph
- 0x27367a: (0x5417, 0),# East Asian ideograph
- 0x2d5424: (0x5367, 0),# East Asian ideograph
- 0x23367b: (0x8cf1, 0),# East Asian ideograph
- 0x216126: (0x995c, 0),# East Asian ideograph
- 0x6f5851: (0xca54, 0),# Korean hangul
- 0x21367c: (0x55ef, 0),# East Asian ideograph
- 0x2d475b: (0x51c9, 0),# East Asian ideograph
- 0x276127: (0x998b, 0),# East Asian ideograph
- 0x6f4f59: (0xb987, 0),# Korean hangul
- 0x21767d: (0x5844, 0),# East Asian ideograph
- 0x275e44: (0x9573, 0),# East Asian ideograph
- 0x21367e: (0x55c5, 0),# East Asian ideograph
- 0x396c6b: (0x60a4, 0),# East Asian ideograph
- 0x6f5b37: (0xd168, 0),# Korean hangul
- 0x213e61: (0x60e1, 0),# East Asian ideograph
- 0x224a4a: (0x6de6, 0),# East Asian ideograph
- 0x4b5d34: (0x91b8, 0),# East Asian ideograph
- 0x27612c: (0x9a6c, 0),# East Asian ideograph
- 0x217971: (0x59a0, 0),# East Asian ideograph
- 0x21353f: (0x540b, 0),# East Asian ideograph
- 0x27612e: (0x9a6d, 0),# East Asian ideograph
- 0x2d6260: (0x5e85, 0),# East Asian ideograph
- 0x27612f: (0x9a70, 0),# East Asian ideograph
- 0x287351: (0x7f33, 0),# East Asian ideograph
- 0x214721: (0x6ce3, 0),# East Asian ideograph
- 0x214722: (0x6cf0, 0),# East Asian ideograph
- 0x214723: (0x6cb8, 0),# East Asian ideograph
- 0x214724: (0x6cd3, 0),# East Asian ideograph
- 0x214725: (0x6cab, 0),# East Asian ideograph
- 0x214726: (0x6ce5, 0),# East Asian ideograph
- 0x214727: (0x6cbd, 0),# East Asian ideograph
- 0x214728: (0x6cb3, 0),# East Asian ideograph
- 0x214729: (0x6cc4, 0),# East Asian ideograph
- 0x21472a: (0x6cd5, 0),# East Asian ideograph
- 0x21472b: (0x6ce2, 0),# East Asian ideograph
- 0x21472c: (0x6cbc, 0),# East Asian ideograph
- 0x21472d: (0x6cae, 0),# East Asian ideograph
- 0x21472e: (0x6cb9, 0),# East Asian ideograph
- 0x21472f: (0x6cf1, 0),# East Asian ideograph
- 0x214730: (0x6cc1, 0),# East Asian ideograph
- 0x214731: (0x6cbe, 0),# East Asian ideograph
- 0x214732: (0x6cc5, 0),# East Asian ideograph
- 0x214733: (0x6cd7, 0),# East Asian ideograph
- 0x234734: (0x9413, 0),# East Asian ideograph
- 0x214735: (0x6cdb, 0),# East Asian ideograph
- 0x214736: (0x6ce1, 0),# East Asian ideograph
- 0x214737: (0x6cbf, 0),# East Asian ideograph
- 0x214738: (0x6cca, 0),# East Asian ideograph
- 0x214739: (0x6ccc, 0),# East Asian ideograph
- 0x21473a: (0x6cc9, 0),# East Asian ideograph
- 0x21473b: (0x6d41, 0),# East Asian ideograph
- 0x21473c: (0x6d0b, 0),# East Asian ideograph
- 0x21473d: (0x6d32, 0),# East Asian ideograph
- 0x21473e: (0x6d25, 0),# East Asian ideograph
- 0x21473f: (0x6d31, 0),# East Asian ideograph
- 0x214740: (0x6d2a, 0),# East Asian ideograph
- 0x214741: (0x6d0c, 0),# East Asian ideograph
- 0x214742: (0x6d1e, 0),# East Asian ideograph
- 0x214743: (0x6d17, 0),# East Asian ideograph
- 0x214744: (0x6d3b, 0),# East Asian ideograph
- 0x214745: (0x6d1b, 0),# East Asian ideograph
- 0x214746: (0x6d36, 0),# East Asian ideograph
- 0x214747: (0x6d3d, 0),# East Asian ideograph
- 0x214748: (0x6d3e, 0),# East Asian ideograph
- 0x214749: (0x6d6a, 0),# East Asian ideograph
- 0x21474a: (0x6d95, 0),# East Asian ideograph
- 0x21474b: (0x6d78, 0),# East Asian ideograph
- 0x21474c: (0x6d66, 0),# East Asian ideograph
- 0x21474d: (0x6d59, 0),# East Asian ideograph
- 0x21474e: (0x6d87, 0),# East Asian ideograph
- 0x21474f: (0x6d88, 0),# East Asian ideograph
- 0x214750: (0x6d6c, 0),# East Asian ideograph
- 0x214751: (0x6d93, 0),# East Asian ideograph
- 0x214752: (0x6d89, 0),# East Asian ideograph
- 0x214753: (0x6d6e, 0),# East Asian ideograph
- 0x214754: (0x6d74, 0),# East Asian ideograph
- 0x214755: (0x6d5a, 0),# East Asian ideograph
- 0x214756: (0x6d69, 0),# East Asian ideograph
- 0x214757: (0x6d77, 0),# East Asian ideograph
- 0x214758: (0x6dd9, 0),# East Asian ideograph
- 0x214759: (0x6dda, 0),# East Asian ideograph
- 0x21475a: (0x6df3, 0),# East Asian ideograph
- 0x21475b: (0x6dbc, 0),# East Asian ideograph
- 0x21475c: (0x6de4, 0),# East Asian ideograph
- 0x21475d: (0x6db2, 0),# East Asian ideograph
- 0x21475e: (0x6de1, 0),# East Asian ideograph
- 0x21475f: (0x6dd2, 0),# East Asian ideograph
- 0x214760: (0x6dae, 0),# East Asian ideograph
- 0x214761: (0x6df8, 0),# East Asian ideograph
- 0x214762: (0x6dc7, 0),# East Asian ideograph
- 0x214763: (0x6dcb, 0),# East Asian ideograph
- 0x214764: (0x6dc5, 0),# East Asian ideograph
- 0x214765: (0x6dde, 0),# East Asian ideograph
- 0x214766: (0x6daf, 0),# East Asian ideograph
- 0x214767: (0x6db5, 0),# East Asian ideograph
- 0x214768: (0x6dfa, 0),# East Asian ideograph
- 0x214769: (0x6df9, 0),# East Asian ideograph
- 0x21476a: (0x6dcc, 0),# East Asian ideograph
- 0x21476b: (0x6df7, 0),# East Asian ideograph
- 0x21476c: (0x6db8, 0),# East Asian ideograph
- 0x21476d: (0x6dd1, 0),# East Asian ideograph
- 0x21476e: (0x6df1, 0),# East Asian ideograph
- 0x21476f: (0x6de8, 0),# East Asian ideograph
- 0x214770: (0x6deb, 0),# East Asian ideograph
- 0x214771: (0x6dd8, 0),# East Asian ideograph
- 0x214772: (0x6dfb, 0),# East Asian ideograph
- 0x214773: (0x6dee, 0),# East Asian ideograph
- 0x214774: (0x6df5, 0),# East Asian ideograph
- 0x214775: (0x6d8e, 0),# East Asian ideograph
- 0x214776: (0x6dc6, 0),# East Asian ideograph
- 0x214777: (0x6dea, 0),# East Asian ideograph
- 0x214778: (0x6dc4, 0),# East Asian ideograph
- 0x214779: (0x6e54, 0),# East Asian ideograph
- 0x21477a: (0x6e21, 0),# East Asian ideograph
- 0x21477b: (0x6e38, 0),# East Asian ideograph
- 0x21477c: (0x6e32, 0),# East Asian ideograph
- 0x21477d: (0x6e67, 0),# East Asian ideograph
- 0x21477e: (0x6e20, 0),# East Asian ideograph
- 0x4b5d38: (0x91c8, 0),# East Asian ideograph
- 0x226140: (0x76ec, 0),# East Asian ideograph
- 0x6f4f5e: (0xb9b0, 0),# Korean hangul
- 0x6f5936: (0xcc64, 0),# Korean hangul
- 0x276141: (0x9a97, 0),# East Asian ideograph
- 0x6f5777: (0xc950, 0),# Korean hangul
- 0x29442e: (0x9502, 0),# East Asian ideograph
- 0x276142: (0x9a9b, 0),# East Asian ideograph
- 0x276143: (0x9a9e, 0),# East Asian ideograph
- 0x274d3d: (0x76d1, 0),# East Asian ideograph
- 0x6f5c28: (0xd38d, 0),# Korean hangul
- 0x216144: (0x9a30, 0),# East Asian ideograph
- 0x4b624f: (0x9d49, 0),# East Asian ideograph
- 0x276145: (0x9a9a, 0),# East Asian ideograph
- 0x6f4f5f: (0xb9b4, 0),# Korean hangul
- 0x275e4a: (0x94bb, 0),# East Asian ideograph
- 0x273c31: (0x5ce6, 0),# East Asian ideograph
- 0x21575d: (0x88c2, 0),# East Asian ideograph
- 0x6f585c: (0xcacd, 0),# Korean hangul
- 0x217533: (0x5788, 0),# East Asian ideograph
- 0x276147: (0x9a71, 0),# East Asian ideograph
- 0x213860: (0x58b3, 0),# East Asian ideograph
- 0x216148: (0x9a40, 0),# East Asian ideograph
- 0x6f772d: (0xae5f, 0),# Korean hangul
- 0x287236: (0x7f07, 0),# East Asian ideograph
- 0x6f5c29: (0xd38f, 0),# Korean hangul
- 0x276149: (0x9aa1, 0),# East Asian ideograph
- 0x27614a: (0x9a84, 0),# East Asian ideograph
- 0x6f4c6d: (0xb2e4, 0),# Korean hangul
- 0x6f4f60: (0xb9bc, 0),# Korean hangul
- 0x22614b: (0x7704, 0),# East Asian ideograph
- 0x225b5d: (0x74a1, 0),# East Asian ideograph
- 0x27614c: (0x9a7f, 0),# East Asian ideograph
- 0x27614d: (0x9a8c, 0),# East Asian ideograph
- 0x27614e: (0x9aa4, 0),# East Asian ideograph
- 0x21614f: (0x9a62, 0),# East Asian ideograph
- 0x6f4f61: (0xb9bd, 0),# Korean hangul
- 0x275e4c: (0x957f, 0),# East Asian ideograph
- 0x216150: (0x9a65, 0),# East Asian ideograph
- 0x21575f: (0x88df, 0),# East Asian ideograph
- 0x216151: (0x9a6a, 0),# East Asian ideograph
- 0x226153: (0x76f7, 0),# East Asian ideograph
- 0x293325: (0x8bf9, 0),# East Asian ideograph
- 0x216154: (0x9ab0, 0),# East Asian ideograph
- 0x6f4f62: (0xb9bf, 0),# Korean hangul
- 0x235f5e: (0x9f39, 0),# East Asian ideograph
- 0x213f3d: (0x61a7, 0),# East Asian ideograph
- 0x216157: (0x9abc, 0),# East Asian ideograph
- 0x287359: (0x7f31, 0),# East Asian ideograph
- 0x276158: (0x9ac5, 0),# East Asian ideograph
- 0x216159: (0x9ad3, 0),# East Asian ideograph
- 0x6f4f63: (0xb9c1, 0),# Korean hangul
- 0x275e4e: (0x95e9, 0),# East Asian ideograph
- 0x27615a: (0x4f53, 0),# East Asian ideograph
- 0x277c24: (0x5a32, 0),# East Asian ideograph
- 0x456036: (0x97ff, 0),# East Asian ideograph
- 0x214821: (0x6e5b, 0),# East Asian ideograph
- 0x214822: (0x6e1a, 0),# East Asian ideograph
- 0x214823: (0x6e56, 0),# East Asian ideograph
- 0x214824: (0x6e2f, 0),# East Asian ideograph
- 0x214825: (0x6e6e, 0),# East Asian ideograph
- 0x214826: (0x6e58, 0),# East Asian ideograph
- 0x214827: (0x6e23, 0),# East Asian ideograph
- 0x214828: (0x6e24, 0),# East Asian ideograph
- 0x214829: (0x6e1b, 0),# East Asian ideograph
- 0x21482a: (0x6e25, 0),# East Asian ideograph
- 0x21482b: (0x6e4a, 0),# East Asian ideograph
- 0x21482c: (0x6e3a, 0),# East Asian ideograph
- 0x21482d: (0x6e6f, 0),# East Asian ideograph
- 0x21482e: (0x6e2d, 0),# East Asian ideograph
- 0x22482f: (0x6ce0, 0),# East Asian ideograph
- 0x214830: (0x6e2c, 0),# East Asian ideograph
- 0x214831: (0x6e26, 0),# East Asian ideograph
- 0x214832: (0x6e4d, 0),# East Asian ideograph
- 0x214833: (0x6e3e, 0),# East Asian ideograph
- 0x214834: (0x6e43, 0),# East Asian ideograph
- 0x214835: (0x6e19, 0),# East Asian ideograph
- 0x214836: (0x6e1d, 0),# East Asian ideograph
- 0x214837: (0x6ed3, 0),# East Asian ideograph
- 0x214838: (0x6eb6, 0),# East Asian ideograph
- 0x214839: (0x6ec2, 0),# East Asian ideograph
- 0x21483b: (0x6eaf, 0),# East Asian ideograph
- 0x21483c: (0x6ea2, 0),# East Asian ideograph
- 0x27483d: (0x6c9f, 0),# East Asian ideograph
- 0x23483e: (0x944c, 0),# East Asian ideograph
- 0x21483f: (0x6ea5, 0),# East Asian ideograph
- 0x214840: (0x6e98, 0),# East Asian ideograph
- 0x214841: (0x6e90, 0),# East Asian ideograph
- 0x214842: (0x6ec5, 0),# East Asian ideograph
- 0x214843: (0x6ec7, 0),# East Asian ideograph
- 0x214844: (0x6ebc, 0),# East Asian ideograph
- 0x214845: (0x6eab, 0),# East Asian ideograph
- 0x214846: (0x6ed1, 0),# East Asian ideograph
- 0x214847: (0x6ecb, 0),# East Asian ideograph
- 0x214848: (0x6ec4, 0),# East Asian ideograph
- 0x214849: (0x6ed4, 0),# East Asian ideograph
- 0x21484a: (0x6eaa, 0),# East Asian ideograph
- 0x21484b: (0x6e96, 0),# East Asian ideograph
- 0x21484c: (0x6e9c, 0),# East Asian ideograph
- 0x21484d: (0x6f33, 0),# East Asian ideograph
- 0x21484e: (0x6ef4, 0),# East Asian ideograph
- 0x21484f: (0x6eec, 0),# East Asian ideograph
- 0x214850: (0x6efe, 0),# East Asian ideograph
- 0x214851: (0x6f29, 0),# East Asian ideograph
- 0x214852: (0x6f14, 0),# East Asian ideograph
- 0x214853: (0x6f3e, 0),# East Asian ideograph
- 0x214854: (0x6f2c, 0),# East Asian ideograph
- 0x214855: (0x6f32, 0),# East Asian ideograph
- 0x214856: (0x6f0f, 0),# East Asian ideograph
- 0x214857: (0x6f22, 0),# East Asian ideograph (variant of 4B4857 which maps to 6F22)
- 0x214858: (0x6eff, 0),# East Asian ideograph
- 0x214859: (0x6f23, 0),# East Asian ideograph
- 0x21485a: (0x6f38, 0),# East Asian ideograph
- 0x21485b: (0x6f15, 0),# East Asian ideograph
- 0x21485c: (0x6f31, 0),# East Asian ideograph
- 0x21485d: (0x6f02, 0),# East Asian ideograph
- 0x21485e: (0x6f06, 0),# East Asian ideograph
- 0x21485f: (0x6eef, 0),# East Asian ideograph
- 0x214860: (0x6f2b, 0),# East Asian ideograph
- 0x214861: (0x6f2f, 0),# East Asian ideograph
- 0x214862: (0x6f20, 0),# East Asian ideograph
- 0x214863: (0x6f3f, 0),# East Asian ideograph
- 0x214864: (0x6ef2, 0),# East Asian ideograph
- 0x214865: (0x6f01, 0),# East Asian ideograph
- 0x214866: (0x6f11, 0),# East Asian ideograph
- 0x214867: (0x6ecc, 0),# East Asian ideograph
- 0x214868: (0x6f2a, 0),# East Asian ideograph
- 0x214869: (0x6f7c, 0),# East Asian ideograph
- 0x21486a: (0x6f88, 0),# East Asian ideograph
- 0x21486b: (0x6f84, 0),# East Asian ideograph
- 0x21486c: (0x6f51, 0),# East Asian ideograph
- 0x21486d: (0x6f64, 0),# East Asian ideograph
- 0x21486e: (0x6f97, 0),# East Asian ideograph
- 0x21486f: (0x6f54, 0),# East Asian ideograph
- 0x214870: (0x6f7a, 0),# East Asian ideograph
- 0x214871: (0x6f86, 0),# East Asian ideograph
- 0x214872: (0x6f8e, 0),# East Asian ideograph
- 0x214873: (0x6f6d, 0),# East Asian ideograph
- 0x214874: (0x6f5b, 0),# East Asian ideograph
- 0x214875: (0x6f6e, 0),# East Asian ideograph
- 0x214876: (0x6f78, 0),# East Asian ideograph
- 0x214877: (0x6f66, 0),# East Asian ideograph
- 0x214878: (0x6f70, 0),# East Asian ideograph
- 0x214879: (0x6f58, 0),# East Asian ideograph
- 0x21487a: (0x6fc2, 0),# East Asian ideograph
- 0x21487b: (0x6fb1, 0),# East Asian ideograph
- 0x21487c: (0x6fc3, 0),# East Asian ideograph
- 0x21487d: (0x6fa7, 0),# East Asian ideograph
- 0x21487e: (0x6fa1, 0),# East Asian ideograph
- 0x4d5875: (0x9cd0, 0),# East Asian ideograph
- 0x2d5d65: (0x8216, 0),# East Asian ideograph
- 0x28735d: (0x7ea9, 0),# East Asian ideograph
- 0x6f5c30: (0xd3a8, 0),# Korean hangul
- 0x22616c: (0x7722, 0),# East Asian ideograph
- 0x22616d: (0x771a, 0),# East Asian ideograph
- 0x6f4f67: (0xb9c9, 0),# Korean hangul
- 0x6f4b24: (0xafbc, 0),# Korean hangul
- 0x21616f: (0x9b45, 0),# East Asian ideograph
- 0x28336f: (0x629f, 0),# East Asian ideograph
- 0x6f5c31: (0xd3a9, 0),# Korean hangul
- 0x6f245e: (0x3147, 0),# Korean hangul
- 0x4b5d42: (0x91e1, 0),# East Asian ideograph
- 0x27407d: (0x626b, 0),# East Asian ideograph
- 0x6f5029: (0xba4e, 0),# Korean hangul
- 0x2d4327: (0x6630, 0),# East Asian ideograph
- 0x275e53: (0x5f00, 0),# East Asian ideograph
- 0x276173: (0x9b47, 0),# East Asian ideograph
- 0x3f3573: (0x8b3c, 0),# East Asian ideograph
- 0x226174: (0x7740, 0),# East Asian ideograph
- 0x6f4e41: (0xb610, 0),# Korean hangul
- 0x4b4c36: (0x7575, 0),# East Asian ideograph
- 0x216175: (0x9b77, 0),# East Asian ideograph
- 0x224b34: (0x6e53, 0),# East Asian ideograph
- 0x216176: (0x9b6f, 0),# East Asian ideograph
- 0x214c21: (0x752c, 0),# East Asian ideograph
- 0x226177: (0x7731, 0),# East Asian ideograph
- 0x214c22: (0x752b, 0),# East Asian ideograph
- 0x6f4f69: (0xb9ce, 0),# Korean hangul
- 0x276178: (0x9c9b, 0),# East Asian ideograph
- 0x6f4b26: (0xafc7, 0),# Korean hangul
- 0x276179: (0x9c9c, 0),# East Asian ideograph
- 0x295269: (0x9a80, 0),# East Asian ideograph
- 0x214c24: (0x7530, 0),# East Asian ideograph
- 0x27617a: (0x9c94, 0),# East Asian ideograph
- 0x2f3143: (0x89f5, 0),# Unrelated variant of EACC 23315E which maps to 89F5
- 0x213a6b: (0x5b8f, 0),# East Asian ideograph
- 0x27617b: (0x9ca8, 0),# East Asian ideograph
- 0x214c26: (0x7531, 0),# East Asian ideograph
- 0x27617c: (0x9ca4, 0),# East Asian ideograph
- 0x214c27: (0x7533, 0),# East Asian ideograph
- 0x6f4f6a: (0xb9cf, 0),# Korean hangul
- 0x275e55: (0x95f4, 0),# East Asian ideograph
- 0x27617d: (0x9cb8, 0),# East Asian ideograph
- 0x6f4b27: (0xafc8, 0),# Korean hangul
- 0x29593b: (0x9c9f, 0),# East Asian ideograph
- 0x27617e: (0x9cb3, 0),# East Asian ideograph
- 0x214c29: (0x7538, 0),# East Asian ideograph
- 0x215b60: (0x8fad, 0),# East Asian ideograph
- 0x214c2a: (0x753d, 0),# East Asian ideograph
- 0x294237: (0x949b, 0),# East Asian ideograph
- 0x6f5c34: (0xd3b4, 0),# Korean hangul
- 0x39553c: (0x5d0b, 0),# East Asian ideograph
- 0x6f5360: (0xc228, 0),# Korean hangul
- 0x6f4c2b: (0xb153, 0),# Korean hangul
- 0x2d4c2c: (0x583a, 0),# East Asian ideograph
- 0x6f4f6b: (0xb9d0, 0),# Korean hangul
- 0x274c2d: (0x4ea9, 0),# East Asian ideograph
- 0x214c2e: (0x755c, 0),# East Asian ideograph
- 0x2d3661: (0x6199, 0),# East Asian ideograph
- 0x6f4c2f: (0xb15c, 0),# Korean hangul
- 0x214921: (0x6fa4, 0),# East Asian ideograph
- 0x214922: (0x6fc1, 0),# East Asian ideograph
- 0x214924: (0x6fc0, 0),# East Asian ideograph
- 0x214925: (0x6fb3, 0),# East Asian ideograph
- 0x214926: (0x6fdf, 0),# East Asian ideograph
- 0x214927: (0x6fd8, 0),# East Asian ideograph
- 0x214928: (0x6ff1, 0),# East Asian ideograph
- 0x214929: (0x6fe0, 0),# East Asian ideograph
- 0x21492a: (0x6fef, 0),# East Asian ideograph
- 0x21492b: (0x6feb, 0),# East Asian ideograph (variant of 4B492B which maps to 6FEB)
- 0x21492c: (0x6fe1, 0),# East Asian ideograph
- 0x21492d: (0x6fe4, 0),# East Asian ideograph
- 0x21492e: (0x6f80, 0),# East Asian ideograph
- 0x22492f: (0x6d34, 0),# East Asian ideograph (variant of 34492F which maps to 6D34)
- 0x234930: (0x9588, 0),# East Asian ideograph
- 0x214931: (0x700b, 0),# East Asian ideograph
- 0x214932: (0x7009, 0),# East Asian ideograph
- 0x214933: (0x7006, 0),# East Asian ideograph
- 0x214934: (0x6ffa, 0),# East Asian ideograph
- 0x214935: (0x7011, 0),# East Asian ideograph
- 0x214936: (0x6ffe, 0),# East Asian ideograph
- 0x214937: (0x700f, 0),# East Asian ideograph
- 0x234938: (0x959f, 0),# East Asian ideograph
- 0x214939: (0x701a, 0),# East Asian ideograph
- 0x23493a: (0x95a0, 0),# East Asian ideograph
- 0x21493b: (0x701d, 0),# East Asian ideograph
- 0x22493c: (0x6d65, 0),# East Asian ideograph
- 0x21493d: (0x701f, 0),# East Asian ideograph
- 0x22493e: (0x6d5e, 0),# East Asian ideograph
- 0x21493f: (0x703e, 0),# East Asian ideograph
- 0x214940: (0x704c, 0),# East Asian ideograph
- 0x214941: (0x7051, 0),# East Asian ideograph
- 0x214942: (0x7058, 0),# East Asian ideograph
- 0x274943: (0x6e7e, 0),# East Asian ideograph
- 0x214944: (0x7064, 0),# East Asian ideograph
- 0x214945: (0x706b, 0),# East Asian ideograph
- 0x214946: (0x7070, 0),# East Asian ideograph
- 0x214947: (0x7076, 0),# East Asian ideograph
- 0x214948: (0x707c, 0),# East Asian ideograph
- 0x214949: (0x7078, 0),# East Asian ideograph
- 0x21494a: (0x707d, 0),# East Asian ideograph
- 0x21494b: (0x7095, 0),# East Asian ideograph
- 0x21494c: (0x708e, 0),# East Asian ideograph
- 0x23494d: (0x95b9, 0),# East Asian ideograph
- 0x21494e: (0x7099, 0),# East Asian ideograph
- 0x21494f: (0x708a, 0),# East Asian ideograph
- 0x214950: (0x70ab, 0),# East Asian ideograph
- 0x214951: (0x70ba, 0),# East Asian ideograph
- 0x214952: (0x70ac, 0),# East Asian ideograph
- 0x214953: (0x70b3, 0),# East Asian ideograph
- 0x214954: (0x70af, 0),# East Asian ideograph
- 0x214955: (0x70ad, 0),# East Asian ideograph
- 0x214956: (0x70ae, 0),# East Asian ideograph
- 0x214957: (0x70b8, 0),# East Asian ideograph
- 0x214958: (0x70ca, 0),# East Asian ideograph
- 0x214959: (0x70e4, 0),# East Asian ideograph
- 0x21495a: (0x70d8, 0),# East Asian ideograph
- 0x21495b: (0x70c8, 0),# East Asian ideograph
- 0x21495c: (0x70d9, 0),# East Asian ideograph
- 0x23495d: (0x95ce, 0),# East Asian ideograph
- 0x21495e: (0x70f9, 0),# East Asian ideograph
- 0x21495f: (0x7109, 0),# East Asian ideograph
- 0x214960: (0x710a, 0),# East Asian ideograph
- 0x214961: (0x70fd, 0),# East Asian ideograph
- 0x214962: (0x7119, 0),# East Asian ideograph
- 0x214963: (0x716e, 0),# East Asian ideograph
- 0x214964: (0x711a, 0),# East Asian ideograph
- 0x214965: (0x7136, 0),# East Asian ideograph
- 0x214966: (0x7121, 0),# East Asian ideograph
- 0x214967: (0x7130, 0),# East Asian ideograph
- 0x214968: (0x7126, 0),# East Asian ideograph
- 0x214969: (0x714e, 0),# East Asian ideograph
- 0x21496a: (0x7149, 0),# East Asian ideograph
- 0x21496b: (0x7159, 0),# East Asian ideograph
- 0x21496c: (0x7164, 0),# East Asian ideograph
- 0x21496d: (0x7169, 0),# East Asian ideograph
- 0x21496e: (0x715c, 0),# East Asian ideograph
- 0x21496f: (0x716c, 0),# East Asian ideograph
- 0x214970: (0x7166, 0),# East Asian ideograph
- 0x214971: (0x7167, 0),# East Asian ideograph
- 0x214972: (0x715e, 0),# East Asian ideograph
- 0x214973: (0x7165, 0),# East Asian ideograph
- 0x214974: (0x714c, 0),# East Asian ideograph
- 0x214975: (0x717d, 0),# East Asian ideograph
- 0x234976: (0x95e7, 0),# East Asian ideograph
- 0x214977: (0x7199, 0),# East Asian ideograph
- 0x214978: (0x718a, 0),# East Asian ideograph
- 0x214979: (0x7184, 0),# East Asian ideograph
- 0x21497a: (0x719f, 0),# East Asian ideograph
- 0x21497b: (0x71a8, 0),# East Asian ideograph
- 0x21497c: (0x71ac, 0),# East Asian ideograph
- 0x21497d: (0x71b1, 0),# East Asian ideograph
- 0x21497e: (0x71d9, 0),# East Asian ideograph
- 0x6f4c40: (0xb1dc, 0),# Korean hangul
- 0x2d432e: (0x66ec, 0),# East Asian ideograph
- 0x214c41: (0x7599, 0),# East Asian ideograph
- 0x395e71: (0x5742, 0),# East Asian ideograph
- 0x214c42: (0x759a, 0),# East Asian ideograph
- 0x6f586e: (0xcb64, 0),# Korean hangul
- 0x214c43: (0x75a4, 0),# East Asian ideograph
- 0x6f5c39: (0xd3c5, 0),# Korean hangul
- 0x224c44: (0x6f00, 0),# East Asian ideograph
- 0x4b3b31: (0x5b9f, 0),# East Asian ideograph
- 0x6f4c45: (0xb208, 0),# Korean hangul
- 0x6f4f70: (0xb9dd, 0),# Korean hangul
- 0x275e5b: (0x9601, 0),# East Asian ideograph
- 0x6f4b2d: (0xafd8, 0),# Korean hangul
- 0x294440: (0x9506, 0),# East Asian ideograph
- 0x333475: (0x9628, 0),# East Asian ideograph
- 0x234c47: (0x9723, 0),# East Asian ideograph
- 0x27727e: (0x54d9, 0),# East Asian ideograph
- 0x21386e: (0x58de, 0),# East Asian ideograph
- 0x6f4c48: (0xb213, 0),# Korean hangul
- 0x6f5c3a: (0xd3c8, 0),# Korean hangul
- 0x214c49: (0x75b2, 0),# East Asian ideograph
- 0x234e60: (0x97e1, 0),# East Asian ideograph
- 0x214c4a: (0x75bd, 0),# East Asian ideograph
- 0x6f4f71: (0xb9de, 0),# Korean hangul
- 0x275e5c: (0x9600, 0),# East Asian ideograph
- 0x6f4877: (0xac2d, 0),# Korean hangul
- 0x214c4b: (0x75be, 0),# East Asian ideograph
- 0x294441: (0x9507, 0),# East Asian ideograph
- 0x333d54: (0x4efd, 0),# East Asian ideograph
- 0x695c71: (0x6a78, 0),# East Asian ideograph
- 0x276f69: (0x5459, 0),# East Asian ideograph
- 0x6f5c3b: (0xd3c9, 0),# Korean hangul
- 0x70603a: (0x55ea, 0),# East Asian ideograph
- 0x69554e: (0x5b36, 0),# East Asian ideograph
- 0x214c4e: (0x75d5, 0),# East Asian ideograph
- 0x6f5852: (0xca5c, 0),# Korean hangul
- 0x6f2460: (0x314a, 0),# Korean hangul
- 0x6f4c4f: (0xb258, 0),# Korean hangul
- 0x6f4f72: (0xb9e1, 0),# Korean hangul
- 0x275e5d: (0x5408, 0),# East Asian ideograph
- 0x214c50: (0x75b5, 0),# East Asian ideograph
- 0x214c51: (0x75ca, 0),# East Asian ideograph (variant of 4B4C51 which maps to 75CA)
- 0x2e3f2d: (0x69b2, 0),# East Asian ideograph
- 0x2f2a73: (0x87ca, 0),# East Asian ideograph
- 0x214c52: (0x75db, 0),# East Asian ideograph
- 0x6f5c3c: (0xd3d0, 0),# Korean hangul
- 0x285150: (0x70c3, 0),# East Asian ideograph
- 0x213e66: (0x60b2, 0),# East Asian ideograph
- 0x293336: (0x8be4, 0),# East Asian ideograph
- 0x6f4c54: (0xb274, 0),# Korean hangul
- 0x6f4f73: (0xb9e3, 0),# Korean hangul
- 0x275e5e: (0x9605, 0),# East Asian ideograph
- 0x2e4731: (0x6c73, 0),# East Asian ideograph
- 0x6f4b30: (0xb000, 0),# Korean hangul
- 0x214c56: (0x75d9, 0),# East Asian ideograph
- 0x214c57: (0x75e2, 0),# East Asian ideograph
- 0x6f5c3d: (0xd3ec, 0),# Korean hangul
- 0x234c58: (0x9730, 0),# East Asian ideograph
- 0x6f4c59: (0xb294, 0),# Korean hangul
- 0x275e5f: (0x95fe, 0),# East Asian ideograph
- 0x214c5a: (0x75f0, 0),# East Asian ideograph
- 0x394444: (0x8988, 0),# East Asian ideograph
- 0x214a21: (0x71be, 0),# East Asian ideograph
- 0x214a22: (0x71c9, 0),# East Asian ideograph
- 0x214a23: (0x71d0, 0),# East Asian ideograph
- 0x214a24: (0x71c8, 0),# East Asian ideograph
- 0x214a25: (0x71dc, 0),# East Asian ideograph
- 0x214a26: (0x71d2, 0),# East Asian ideograph
- 0x214a27: (0x71b9, 0),# East Asian ideograph
- 0x214a28: (0x71d5, 0),# East Asian ideograph
- 0x214a29: (0x71ce, 0),# East Asian ideograph
- 0x214a2a: (0x71c3, 0),# East Asian ideograph
- 0x214a2b: (0x71c4, 0),# East Asian ideograph
- 0x214a2c: (0x71ee, 0),# East Asian ideograph
- 0x214a2d: (0x71e7, 0),# East Asian ideograph
- 0x214a2e: (0x71df, 0),# East Asian ideograph
- 0x214a2f: (0x71e5, 0),# East Asian ideograph
- 0x214a30: (0x71ed, 0),# East Asian ideograph
- 0x214a31: (0x71e6, 0),# East Asian ideograph
- 0x234a32: (0x963c, 0),# East Asian ideograph
- 0x214a33: (0x71f4, 0),# East Asian ideograph
- 0x214a34: (0x71fb, 0),# East Asian ideograph
- 0x224a35: (0x6ddd, 0),# East Asian ideograph
- 0x274a36: (0x70c1, 0),# East Asian ideograph
- 0x214a37: (0x7210, 0),# East Asian ideograph
- 0x214a38: (0x721b, 0),# East Asian ideograph
- 0x224a39: (0x6ddb, 0),# East Asian ideograph
- 0x214a3a: (0x722a, 0),# East Asian ideograph
- 0x214a3b: (0x722d, 0),# East Asian ideograph
- 0x214a3c: (0x722c, 0),# East Asian ideograph
- 0x214a3d: (0x7230, 0),# East Asian ideograph
- 0x214a3e: (0x7235, 0),# East Asian ideograph (variant of 4B4A3E which maps to 7235)
- 0x214a3f: (0x7236, 0),# East Asian ideograph
- 0x214a40: (0x7238, 0),# East Asian ideograph
- 0x214a41: (0x7239, 0),# East Asian ideograph
- 0x214a42: (0x723a, 0),# East Asian ideograph
- 0x214a43: (0x723b, 0),# East Asian ideograph
- 0x214a44: (0x723d, 0),# East Asian ideograph
- 0x214a45: (0x723e, 0),# East Asian ideograph
- 0x224a46: (0x6df0, 0),# East Asian ideograph
- 0x214a47: (0x7247, 0),# East Asian ideograph
- 0x214a48: (0x7248, 0),# East Asian ideograph
- 0x214a49: (0x724c, 0),# East Asian ideograph
- 0x214a4a: (0x7252, 0),# East Asian ideograph
- 0x214a4b: (0x7256, 0),# East Asian ideograph
- 0x214a4c: (0x7258, 0),# East Asian ideograph
- 0x214a4d: (0x7259, 0),# East Asian ideograph
- 0x214a4e: (0x725b, 0),# East Asian ideograph
- 0x214a4f: (0x725f, 0),# East Asian ideograph
- 0x214a50: (0x725d, 0),# East Asian ideograph
- 0x214a51: (0x7262, 0),# East Asian ideograph
- 0x214a52: (0x7261, 0),# East Asian ideograph
- 0x214a53: (0x7260, 0),# East Asian ideograph
- 0x214a54: (0x7267, 0),# East Asian ideograph
- 0x214a55: (0x7269, 0),# East Asian ideograph
- 0x214a56: (0x726f, 0),# East Asian ideograph
- 0x214a57: (0x7272, 0),# East Asian ideograph
- 0x214a58: (0x7274, 0),# East Asian ideograph
- 0x214a59: (0x7279, 0),# East Asian ideograph
- 0x214a5a: (0x727d, 0),# East Asian ideograph
- 0x214a5b: (0x7281, 0),# East Asian ideograph
- 0x214a5c: (0x7280, 0),# East Asian ideograph
- 0x214a5d: (0x7284, 0),# East Asian ideograph
- 0x274a5e: (0x8366, 0),# East Asian ideograph
- 0x214a5f: (0x7292, 0),# East Asian ideograph
- 0x224a60: (0x6e8a, 0),# East Asian ideograph
- 0x214a61: (0x72a2, 0),# East Asian ideograph
- 0x274a62: (0x727a, 0),# East Asian ideograph
- 0x214a63: (0x72ac, 0),# East Asian ideograph
- 0x214a64: (0x72af, 0),# East Asian ideograph
- 0x214a65: (0x72c4, 0),# East Asian ideograph
- 0x214a66: (0x72c2, 0),# East Asian ideograph
- 0x214a67: (0x72d9, 0),# East Asian ideograph
- 0x274a68: (0x72b6, 0),# East Asian ideograph
- 0x214a69: (0x72ce, 0),# East Asian ideograph
- 0x214a6a: (0x72d7, 0),# East Asian ideograph
- 0x214a6b: (0x72d0, 0),# East Asian ideograph
- 0x214a6c: (0x72e1, 0),# East Asian ideograph
- 0x214a6d: (0x72e9, 0),# East Asian ideograph
- 0x214a6e: (0x72e0, 0),# East Asian ideograph
- 0x214a6f: (0x72fc, 0),# East Asian ideograph
- 0x274a70: (0x72ed, 0),# East Asian ideograph
- 0x224a71: (0x6e73, 0),# East Asian ideograph
- 0x214a72: (0x72fd, 0),# East Asian ideograph
- 0x214a73: (0x72f7, 0),# East Asian ideograph
- 0x214a74: (0x731c, 0),# East Asian ideograph
- 0x214a75: (0x731b, 0),# East Asian ideograph
- 0x214a76: (0x7313, 0),# East Asian ideograph
- 0x214a77: (0x7316, 0),# East Asian ideograph
- 0x214a78: (0x7319, 0),# East Asian ideograph
- 0x214a79: (0x7336, 0),# East Asian ideograph
- 0x214a7a: (0x7337, 0),# East Asian ideograph
- 0x214a7b: (0x7329, 0),# East Asian ideograph
- 0x214a7c: (0x7325, 0),# East Asian ideograph
- 0x214a7d: (0x7334, 0),# East Asian ideograph
- 0x214a7e: (0x7344, 0),# East Asian ideograph
- 0x2d4c3c: (0x53e0, 0),# East Asian ideograph
- 0x214c6b: (0x7634, 0),# East Asian ideograph
- 0x6f5c41: (0xd3fc, 0),# Korean hangul
- 0x213d4a: (0x5f46, 0),# East Asian ideograph
- 0x214c6c: (0x7638, 0),# East Asian ideograph
- 0x214c6d: (0x7646, 0),# East Asian ideograph
- 0x6f4f78: (0xb9f4, 0),# Korean hangul
- 0x275e63: (0x9611, 0),# East Asian ideograph
- 0x234c6e: (0x9749, 0),# East Asian ideograph
- 0x233d5b: (0x9046, 0),# East Asian ideograph
- 0x6f4c6f: (0xb2e6, 0),# Korean hangul
- 0x6f4c70: (0xb2e8, 0),# Korean hangul
- 0x214c71: (0x7658, 0),# East Asian ideograph
- 0x4d477b: (0x943e, 0),# East Asian ideograph
- 0x35344d: (0x8b5b, 0),# East Asian ideograph
- 0x6f4f79: (0xb9f5, 0),# Korean hangul
- 0x275e64: (0x95f1, 0),# East Asian ideograph
- 0x274c73: (0x75d2, 0),# East Asian ideograph
- 0x21355e: (0x542e, 0),# East Asian ideograph
- 0x275a21: (0x8d45, 0),# East Asian ideograph
- 0x6f4c74: (0xb2ee, 0),# Korean hangul
- 0x234147: (0x91ad, 0),# East Asian ideograph
- 0x217d7c: (0x5b56, 0),# East Asian ideograph
- 0x214c75: (0x7669, 0),# East Asian ideograph
- 0x6f5c43: (0xd3ff, 0),# Korean hangul
- 0x234c76: (0x975a, 0),# East Asian ideograph
- 0x233721: (0x8cf7, 0),# East Asian ideograph
- 0x287161: (0x7efb, 0),# East Asian ideograph
- 0x3a787d: (0x80fc, 0),# East Asian ideograph
- 0x214c77: (0x766c, 0),# East Asian ideograph
- 0x273722: (0x545b, 0),# East Asian ideograph
- 0x275e65: (0x677f, 0),# East Asian ideograph
- 0x214c78: (0x7671, 0),# East Asian ideograph
- 0x213723: (0x55e1, 0),# East Asian ideograph
- 0x21754e: (0x579d, 0),# East Asian ideograph
- 0x214c79: (0x7672, 0),# East Asian ideograph (variant of 4B4C79 which maps to 7672)
- 0x694c7a: (0x9453, 0),# East Asian ideograph
- 0x213725: (0x561b, 0),# East Asian ideograph
- 0x6f5c44: (0xd401, 0),# Korean hangul
- 0x214c7b: (0x767c, 0),# East Asian ideograph
- 0x233726: (0x8cfe, 0),# East Asian ideograph
- 0x6f4c7c: (0xb2ff, 0),# Korean hangul
- 0x223727: (0x65ae, 0),# East Asian ideograph
- 0x2e4739: (0x6c67, 0),# East Asian ideograph (variant of 224739 which maps to 6C67)
- 0x214c7d: (0x767d, 0),# East Asian ideograph
- 0x6f7728: (0xae07, 0),# Korean hangul
- 0x215336: (0x80b4, 0),# East Asian ideograph
- 0x2d4c7e: (0x4f70, 0),# East Asian ideograph
- 0x217729: (0x582d, 0),# East Asian ideograph
- 0x2d5447: (0x824a, 0),# East Asian ideograph
- 0x21372a: (0x561f, 0),# East Asian ideograph
- 0x6f5c45: (0xd440, 0),# Korean hangul
- 0x2d3a47: (0x5acb, 0),# East Asian ideograph
- 0x4b4358: (0x66fd, 0),# East Asian ideograph
- 0x23372b: (0x8d07, 0),# East Asian ideograph
- 0x217850: (0x58d6, 0),# East Asian ideograph
- 0x334a28: (0x91bc, 0),# East Asian ideograph
- 0x27372c: (0x53f9, 0),# East Asian ideograph
- 0x6f593c: (0xcca0, 0),# Korean hangul
- 0x275e67: (0x95ef, 0),# East Asian ideograph
- 0x6f4b39: (0xb044, 0),# Korean hangul
- 0x275a24: (0x8d3e, 0),# East Asian ideograph
- 0x27372e: (0x5455, 0),# East Asian ideograph
- 0x21372f: (0x560e, 0),# East Asian ideograph
- 0x6f5c46: (0xd444, 0),# Korean hangul
- 0x224a6d: (0x6e63, 0),# East Asian ideograph
- 0x293340: (0x8c02, 0),# East Asian ideograph
- 0x214b21: (0x733f, 0),# East Asian ideograph
- 0x224b22: (0x6e28, 0),# East Asian ideograph
- 0x274b23: (0x72ee, 0),# East Asian ideograph
- 0x214b24: (0x7350, 0),# East Asian ideograph
- 0x6f4b25: (0xafc0, 0),# Korean hangul
- 0x214b26: (0x7357, 0),# East Asian ideograph
- 0x274b27: (0x72ec, 0),# East Asian ideograph
- 0x224b28: (0x6e5e, 0),# East Asian ideograph
- 0x274b29: (0x83b7, 0),# East Asian ideograph
- 0x274b2a: (0x72b7, 0),# East Asian ideograph
- 0x274b2b: (0x517d, 0),# East Asian ideograph
- 0x224b2c: (0x6e84, 0),# East Asian ideograph
- 0x223732: (0x65c3, 0),# East Asian ideograph
- 0x224b2e: (0x6e2e, 0),# East Asian ideograph
- 0x234b2f: (0x96a4, 0),# East Asian ideograph
- 0x214b30: (0x7384, 0),# East Asian ideograph
- 0x214b31: (0x7387, 0),# East Asian ideograph
- 0x214b32: (0x7389, 0),# East Asian ideograph
- 0x223733: (0x65c4, 0),# East Asian ideograph
- 0x214b34: (0x7396, 0),# East Asian ideograph
- 0x214b35: (0x739f, 0),# East Asian ideograph
- 0x214b36: (0x73a8, 0),# East Asian ideograph
- 0x214b37: (0x73a9, 0),# East Asian ideograph
- 0x214b38: (0x73ab, 0),# East Asian ideograph
- 0x214b39: (0x73bb, 0),# East Asian ideograph
- 0x214b3a: (0x73ca, 0),# East Asian ideograph
- 0x214b3b: (0x73b7, 0),# East Asian ideograph
- 0x214b3c: (0x73c0, 0),# East Asian ideograph
- 0x6f4b3d: (0xb04c, 0),# Korean hangul
- 0x214b3e: (0x73b2, 0),# East Asian ideograph
- 0x214b3f: (0x73cd, 0),# East Asian ideograph
- 0x224b40: (0x6e2a, 0),# East Asian ideograph
- 0x224b41: (0x6e4c, 0),# East Asian ideograph
- 0x224b42: (0x6e22, 0),# East Asian ideograph
- 0x224b43: (0x6ece, 0),# East Asian ideograph
- 0x214b44: (0x7409, 0),# East Asian ideograph
- 0x224b45: (0x6e9b, 0),# East Asian ideograph
- 0x224b46: (0x6e9f, 0),# East Asian ideograph
- 0x214b47: (0x73fe, 0),# East Asian ideograph
- 0x224b48: (0x6ec8, 0),# East Asian ideograph
- 0x224b49: (0x6ed8, 0),# East Asian ideograph
- 0x224b4a: (0x6e8f, 0),# East Asian ideograph
- 0x214b4b: (0x7435, 0),# East Asian ideograph
- 0x214b4c: (0x7436, 0),# East Asian ideograph
- 0x224b4d: (0x6e93, 0),# East Asian ideograph
- 0x214b4e: (0x742a, 0),# East Asian ideograph
- 0x224b4f: (0x6ea0, 0),# East Asian ideograph
- 0x214b50: (0x7422, 0),# East Asian ideograph
- 0x224b51: (0x6eb1, 0),# East Asian ideograph
- 0x234b52: (0x96ce, 0),# East Asian ideograph
- 0x214b53: (0x7455, 0),# East Asian ideograph
- 0x214b54: (0x745f, 0),# East Asian ideograph
- 0x214b55: (0x745a, 0),# East Asian ideograph
- 0x214b56: (0x7441, 0),# East Asian ideograph
- 0x214b57: (0x743f, 0),# East Asian ideograph
- 0x214b58: (0x745b, 0),# East Asian ideograph
- 0x224b59: (0x6e92, 0),# East Asian ideograph
- 0x224b5a: (0x6ea7, 0),# East Asian ideograph
- 0x214b5b: (0x7459, 0),# East Asian ideograph
- 0x214b5c: (0x7483, 0),# East Asian ideograph
- 0x214b5d: (0x7469, 0),# East Asian ideograph
- 0x274b5e: (0x739b, 0),# East Asian ideograph
- 0x214b5f: (0x7463, 0),# East Asian ideograph
- 0x214b60: (0x7464, 0),# East Asian ideograph
- 0x214b61: (0x7470, 0),# East Asian ideograph
- 0x214b62: (0x748b, 0),# East Asian ideograph
- 0x214b63: (0x749c, 0),# East Asian ideograph (variant of 4B4B63 which maps to 749C)
- 0x214b64: (0x74a3, 0),# East Asian ideograph
- 0x214b65: (0x74a7, 0),# East Asian ideograph
- 0x214b66: (0x74a9, 0),# East Asian ideograph
- 0x214b67: (0x74b0, 0),# East Asian ideograph
- 0x214b68: (0x74a6, 0),# East Asian ideograph
- 0x214b69: (0x74bd, 0),# East Asian ideograph
- 0x224b6a: (0x6ec9, 0),# East Asian ideograph
- 0x274b6b: (0x73d1, 0),# East Asian ideograph
- 0x224b6c: (0x6eb3, 0),# East Asian ideograph
- 0x224b6d: (0x6eb7, 0),# East Asian ideograph
- 0x214b6e: (0x74e2, 0),# East Asian ideograph
- 0x214b6f: (0x74e3, 0),# East Asian ideograph
- 0x214b70: (0x74e6, 0),# East Asian ideograph
- 0x234b71: (0x96e9, 0),# East Asian ideograph
- 0x214b72: (0x74f7, 0),# East Asian ideograph
- 0x214b73: (0x7504, 0),# East Asian ideograph
- 0x234b74: (0x96f1, 0),# East Asian ideograph
- 0x214b75: (0x7515, 0),# East Asian ideograph
- 0x234b76: (0x96f0, 0),# East Asian ideograph
- 0x214b77: (0x751a, 0),# East Asian ideograph
- 0x234b78: (0x96fa, 0),# East Asian ideograph
- 0x224b79: (0x6ecf, 0),# East Asian ideograph
- 0x214b7a: (0x7522, 0),# East Asian ideograph
- 0x214b7b: (0x7526, 0),# East Asian ideograph
- 0x224b7c: (0x6eca, 0),# East Asian ideograph
- 0x224b7d: (0x6ed5, 0),# East Asian ideograph
- 0x214b7e: (0x7529, 0),# East Asian ideograph
- 0x273740: (0x53fd, 0),# East Asian ideograph
- 0x6f526b: (0xc0c0, 0),# Korean hangul
- 0x294c76: (0x9753, 0),# East Asian ideograph
- 0x213565: (0x542b, 0),# East Asian ideograph
- 0x277742: (0x57d8, 0),# East Asian ideograph
- 0x293344: (0x8c19, 0),# East Asian ideograph
- 0x273744: (0x5428, 0),# East Asian ideograph
- 0x27624f: (0x9e3e, 0),# East Asian ideograph
- 0x223745: (0x65dc, 0),# East Asian ideograph
- 0x6f593d: (0xcca8, 0),# Korean hangul
- 0x6f4b3e: (0xb053, 0),# Korean hangul
- 0x213746: (0x5679, 0),# East Asian ideograph
- 0x223747: (0x65dd, 0),# East Asian ideograph
- 0x223748: (0x65df, 0),# East Asian ideograph
- 0x293345: (0x8be8, 0),# East Asian ideograph
- 0x217749: (0x583d, 0),# East Asian ideograph
- 0x4b3b43: (0x5bfe, 0),# East Asian ideograph
- 0x276175: (0x9c7f, 0),# East Asian ideograph
- 0x21374a: (0x5671, 0),# East Asian ideograph
- 0x6f5d70: (0xd760, 0),# Korean hangul
- 0x6f4b3f: (0xb054, 0),# Korean hangul
- 0x21374b: (0x566f, 0),# East Asian ideograph
- 0x6f5863: (0xcb14, 0),# Korean hangul
- 0x21374c: (0x5662, 0),# East Asian ideograph (variant of 4B374C which maps to 5662)
- 0x282f47: (0x620b, 0),# East Asian ideograph
- 0x22374e: (0x65e4, 0),# East Asian ideograph
- 0x6f543a: (0xc314, 0),# Korean hangul
- 0x6f4b40: (0xb055, 0),# Korean hangul
- 0x692525: (0x30a5, 0),# Katakana letter small U
- 0x4b4c51: (0x75ca, 0),# East Asian ideograph
- 0x273751: (0x5413, 0),# East Asian ideograph
- 0x213752: (0x5690, 0),# East Asian ideograph
- 0x6f5c4d: (0xd488, 0),# Korean hangul
- 0x70604c: (0x55f5, 0),# East Asian ideograph
- 0x224a74: (0x6e4f, 0),# East Asian ideograph
- 0x334e73: (0x79a5, 0),# East Asian ideograph
- 0x234a30: (0x963d, 0),# East Asian ideograph
- 0x273754: (0x565c, 0),# East Asian ideograph
- 0x2d4343: (0x6636, 0),# East Asian ideograph
- 0x2d4768: (0x6d45, 0),# East Asian ideograph (variant of 274768 which maps to 6D45)
- 0x223755: (0x65f0, 0),# East Asian ideograph
- 0x213756: (0x56a8, 0),# East Asian ideograph
- 0x4b375a: (0x53b3, 0),# East Asian ideograph
- 0x213757: (0x56b0, 0),# East Asian ideograph
- 0x2d3758: (0x54bd, 0),# East Asian ideograph
- 0x294162: (0x9486, 0),# East Asian ideograph
- 0x212a3b: (0xe8e8, 0),# EACC component character
- 0x284056: (0x6920, 0),# East Asian ideograph
- 0x21375a: (0x56b4, 0),# East Asian ideograph
- 0x6f592d: (0xcc44, 0),# Korean hangul
- 0x224c21: (0x6ec3, 0),# East Asian ideograph
- 0x234c22: (0x96ff, 0),# East Asian ideograph
- 0x214c23: (0x752d, 0),# East Asian ideograph
- 0x224c24: (0x6eb4, 0),# East Asian ideograph
- 0x214c25: (0x7532, 0),# East Asian ideograph
- 0x224c26: (0x6eb2, 0),# East Asian ideograph
- 0x234c27: (0x9702, 0),# East Asian ideograph
- 0x214c28: (0x7537, 0),# East Asian ideograph
- 0x224c29: (0x6eb5, 0),# East Asian ideograph
- 0x234c2a: (0x9705, 0),# East Asian ideograph
- 0x214c2b: (0x754f, 0),# East Asian ideograph
- 0x214c2c: (0x754c, 0),# East Asian ideograph
- 0x214c2d: (0x755d, 0),# East Asian ideograph
- 0x224c2e: (0x6ef8, 0),# East Asian ideograph
- 0x214c2f: (0x7554, 0),# East Asian ideograph
- 0x224c30: (0x6f37, 0),# East Asian ideograph
- 0x214c31: (0x7559, 0),# East Asian ideograph
- 0x214c32: (0x7566, 0),# East Asian ideograph
- 0x214c33: (0x7562, 0),# East Asian ideograph
- 0x224c34: (0x6efd, 0),# East Asian ideograph
- 0x224c35: (0x6f09, 0),# East Asian ideograph
- 0x214c36: (0x756b, 0),# East Asian ideograph
- 0x214c37: (0x756a, 0),# East Asian ideograph
- 0x214c38: (0x7578, 0),# East Asian ideograph
- 0x214c39: (0x7576, 0),# East Asian ideograph
- 0x214c3a: (0x7586, 0),# East Asian ideograph
- 0x214c3b: (0x7587, 0),# East Asian ideograph
- 0x214c3c: (0x758a, 0),# East Asian ideograph
- 0x224c3d: (0x6f63, 0),# East Asian ideograph
- 0x224c3e: (0x6f12, 0),# East Asian ideograph
- 0x214c3f: (0x7591, 0),# East Asian ideograph
- 0x214c40: (0x759d, 0),# East Asian ideograph
- 0x224c41: (0x6f1a, 0),# East Asian ideograph
- 0x224c42: (0x6ef6, 0),# East Asian ideograph
- 0x224c43: (0x6f19, 0),# East Asian ideograph
- 0x214c44: (0x75ab, 0),# East Asian ideograph
- 0x214c45: (0x75a5, 0),# East Asian ideograph
- 0x214c46: (0x75c7, 0),# East Asian ideograph
- 0x214c47: (0x75c5, 0),# East Asian ideograph
- 0x214c48: (0x75b3, 0),# East Asian ideograph
- 0x234c49: (0x9722, 0),# East Asian ideograph
- 0x234c4a: (0x9724, 0),# East Asian ideograph
- 0x224c4b: (0x6f24, 0),# East Asian ideograph
- 0x214c4c: (0x75bc, 0),# East Asian ideograph
- 0x214c4d: (0x75b9, 0),# East Asian ideograph
- 0x234c4e: (0x9728, 0),# East Asian ideograph
- 0x214c4f: (0x75d4, 0),# East Asian ideograph
- 0x234c50: (0x9726, 0),# East Asian ideograph
- 0x224c51: (0x6f18, 0),# East Asian ideograph
- 0x234c52: (0x9731, 0),# East Asian ideograph
- 0x214c53: (0x75e3, 0),# East Asian ideograph
- 0x214c54: (0x75d8, 0),# East Asian ideograph
- 0x214c55: (0x75de, 0),# East Asian ideograph
- 0x274c56: (0x75c9, 0),# East Asian ideograph
- 0x224c57: (0x6f1f, 0),# East Asian ideograph
- 0x214c58: (0x7601, 0),# East Asian ideograph
- 0x214c59: (0x7600, 0),# East Asian ideograph
- 0x224c5a: (0x6f0a, 0),# East Asian ideograph
- 0x214c5b: (0x75f2, 0),# East Asian ideograph
- 0x234c5c: (0x9736, 0),# East Asian ideograph
- 0x214c5d: (0x75f4, 0),# East Asian ideograph
- 0x214c5e: (0x75ff, 0),# East Asian ideograph
- 0x214c5f: (0x75fa, 0),# East Asian ideograph
- 0x224c60: (0x6ef9, 0),# East Asian ideograph
- 0x224c61: (0x6eee, 0),# East Asian ideograph
- 0x224c62: (0x6f41, 0),# East Asian ideograph
- 0x214c63: (0x760b, 0),# East Asian ideograph
- 0x224c64: (0x6f95, 0),# East Asian ideograph
- 0x214c65: (0x7620, 0),# East Asian ideograph
- 0x214c66: (0x7629, 0),# East Asian ideograph
- 0x214c67: (0x761f, 0),# East Asian ideograph
- 0x214c68: (0x7624, 0),# East Asian ideograph
- 0x214c69: (0x7626, 0),# East Asian ideograph
- 0x214c6a: (0x7621, 0),# East Asian ideograph
- 0x224c6b: (0x6f49, 0),# East Asian ideograph
- 0x234c6c: (0x9746, 0),# East Asian ideograph
- 0x224c6d: (0x6f30, 0),# East Asian ideograph
- 0x214c6e: (0x7642, 0),# East Asian ideograph
- 0x214c6f: (0x764c, 0),# East Asian ideograph
- 0x214c70: (0x7656, 0),# East Asian ideograph
- 0x274c71: (0x75a0, 0),# East Asian ideograph
- 0x6f4c72: (0xb2ec, 0),# Korean hangul
- 0x214c73: (0x7662, 0),# East Asian ideograph
- 0x214c74: (0x7665, 0),# East Asian ideograph
- 0x234c75: (0x9758, 0),# East Asian ideograph
- 0x214c76: (0x766e, 0),# East Asian ideograph
- 0x224c77: (0x6eeb, 0),# East Asian ideograph
- 0x224c78: (0x6f08, 0),# East Asian ideograph
- 0x224c79: (0x6f0e, 0),# East Asian ideograph
- 0x214c7a: (0x7678, 0),# East Asian ideograph
- 0x224c7b: (0x6f35, 0),# East Asian ideograph
- 0x214c7c: (0x767b, 0),# East Asian ideograph
- 0x234c7d: (0x9764, 0),# East Asian ideograph
- 0x214c7e: (0x767e, 0),# East Asian ideograph
- 0x21376b: (0x56f1, 0),# East Asian ideograph
- 0x6f5c52: (0xd4e8, 0),# Korean hangul
- 0x21376d: (0x5703, 0),# East Asian ideograph
- 0x2d4348: (0x6681, 0),# East Asian ideograph
- 0x2e4747: (0x6d64, 0),# East Asian ideograph
- 0x3f424f: (0x542f, 0),# East Asian ideograph (variant of 27424F which maps to 542F)
- 0x6f4b46: (0xb080, 0),# Korean hangul
- 0x21376e: (0x5708, 0),# East Asian ideograph
- 0x212a2f: (0xe8dd, 0),# EACC component character
- 0x27376f: (0x56ef, 0),# East Asian ideograph
- 0x273770: (0x56f4, 0),# East Asian ideograph
- 0x6f5c53: (0xd504, 0),# Korean hangul
- 0x27632b: (0x9f99, 0),# East Asian ideograph
- 0x213771: (0x5712, 0),# East Asian ideograph
- 0x294163: (0x948c, 0),# East Asian ideograph
- 0x224637: (0x6bfa, 0),# East Asian ideograph
- 0x213772: (0x5713, 0),# East Asian ideograph
- 0x2d4349: (0x66a6, 0),# East Asian ideograph
- 0x6f526d: (0xc0c5, 0),# Korean hangul
- 0x213430: (0x52c9, 0),# East Asian ideograph
- 0x6f4b47: (0xb084, 0),# Korean hangul
- 0x275a32: (0x8d4f, 0),# East Asian ideograph
- 0x216f21: (0x544f, 0),# East Asian ideograph
- 0x213774: (0x5716, 0),# East Asian ideograph
- 0x233775: (0x8d8d, 0),# East Asian ideograph
- 0x6f4e2a: (0xb554, 0),# Korean hangul
- 0x29334e: (0x8c0c, 0),# East Asian ideograph
- 0x276221: (0x9cc3, 0),# East Asian ideograph
- 0x213777: (0x572d, 0),# East Asian ideograph
- 0x216222: (0x9c0d, 0),# East Asian ideograph
- 0x6f4b48: (0xb08c, 0),# Korean hangul
- 0x29445b: (0x9516, 0),# East Asian ideograph
- 0x276223: (0x9cab, 0),# East Asian ideograph
- 0x276224: (0x9ccd, 0),# East Asian ideograph
- 0x694c5d: (0x6762, 0),# East Asian ideograph
- 0x222225: (0x5bef, 0),# East Asian ideograph
- 0x706054: (0x5623, 0),# East Asian ideograph
- 0x395568: (0x83dd, 0),# East Asian ideograph
- 0x234e7b: (0x980f, 0),# East Asian ideograph
- 0x216226: (0x9c31, 0),# East Asian ideograph
- 0x276177: (0x9c8d, 0),# East Asian ideograph
- 0x21377c: (0x5751, 0),# East Asian ideograph
- 0x276227: (0x9cd4, 0),# East Asian ideograph
- 0x6f4b49: (0xb08d, 0),# Korean hangul
- 0x21377d: (0x574a, 0),# East Asian ideograph
- 0x276228: (0x9cd7, 0),# East Asian ideograph
- 0x276229: (0x9cdd, 0),# East Asian ideograph
- 0x6f5c56: (0xd50c, 0),# Korean hangul
- 0x27622a: (0x9cde, 0),# East Asian ideograph
- 0x284d27: (0x6d9d, 0),# East Asian ideograph
- 0x27622b: (0x9cdc, 0),# East Asian ideograph
- 0x27622c: (0x9cd6, 0),# East Asian ideograph
- 0x28405e: (0x67fd, 0),# East Asian ideograph
- 0x21622d: (0x9c77, 0),# East Asian ideograph
- 0x4b4c5b: (0x75f3, 0),# East Asian ideograph
- 0x27622e: (0x9c88, 0),# East Asian ideograph
- 0x2d5238: (0x898a, 0),# East Asian ideograph
- 0x2e363f: (0x52c5, 0),# East Asian ideograph
- 0x6f5c57: (0xd514, 0),# Korean hangul
- 0x27622f: (0x9e1f, 0),# East Asian ideograph
- 0x6f5959: (0xcda7, 0),# Korean hangul
- 0x214d21: (0x7682, 0),# East Asian ideograph
- 0x214d22: (0x7684, 0),# East Asian ideograph
- 0x214d23: (0x7687, 0),# East Asian ideograph
- 0x214d24: (0x7686, 0),# East Asian ideograph
- 0x234d25: (0x9767, 0),# East Asian ideograph
- 0x214d26: (0x768e, 0),# East Asian ideograph
- 0x214d27: (0x7696, 0),# East Asian ideograph
- 0x214d28: (0x7693, 0),# East Asian ideograph
- 0x214d29: (0x769a, 0),# East Asian ideograph
- 0x214d2a: (0x76ae, 0),# East Asian ideograph
- 0x214d2b: (0x76b0, 0),# East Asian ideograph
- 0x214d2c: (0x76b4, 0),# East Asian ideograph
- 0x274d2d: (0x76b1, 0),# East Asian ideograph
- 0x214d2e: (0x76bf, 0),# East Asian ideograph
- 0x214d2f: (0x76c2, 0),# East Asian ideograph
- 0x224d30: (0x6f60, 0),# East Asian ideograph
- 0x234d31: (0x9777, 0),# East Asian ideograph
- 0x214d32: (0x76c6, 0),# East Asian ideograph
- 0x214d33: (0x76ca, 0),# East Asian ideograph
- 0x214d34: (0x76cd, 0),# East Asian ideograph
- 0x214d35: (0x76ce, 0),# East Asian ideograph
- 0x214d36: (0x76d4, 0),# East Asian ideograph
- 0x214d37: (0x76d2, 0),# East Asian ideograph
- 0x214d38: (0x76dc, 0),# East Asian ideograph
- 0x214d39: (0x76db, 0),# East Asian ideograph
- 0x234d3a: (0x9780, 0),# East Asian ideograph
- 0x214d3b: (0x76df, 0),# East Asian ideograph
- 0x234d3c: (0x9781, 0),# East Asian ideograph
- 0x214d3d: (0x76e3, 0),# East Asian ideograph
- 0x274d3e: (0x76d8, 0),# East Asian ideograph
- 0x274d3f: (0x5362, 0),# East Asian ideograph
- 0x214d40: (0x76e5, 0),# East Asian ideograph
- 0x214d41: (0x76ea, 0),# East Asian ideograph
- 0x214d42: (0x76ee, 0),# East Asian ideograph
- 0x214d43: (0x76ef, 0),# East Asian ideograph
- 0x214d44: (0x76f2, 0),# East Asian ideograph
- 0x214d45: (0x76f4, 0),# East Asian ideograph
- 0x214d46: (0x7709, 0),# East Asian ideograph
- 0x214d47: (0x76f9, 0),# East Asian ideograph
- 0x214d48: (0x76f8, 0),# East Asian ideograph
- 0x214d49: (0x7701, 0),# East Asian ideograph
- 0x214d4a: (0x770b, 0),# East Asian ideograph
- 0x214d4b: (0x76fc, 0),# East Asian ideograph
- 0x214d4c: (0x76fe, 0),# East Asian ideograph
- 0x214d4d: (0x7729, 0),# East Asian ideograph
- 0x214d4e: (0x7720, 0),# East Asian ideograph
- 0x214d4f: (0x771e, 0),# East Asian ideograph
- 0x214d50: (0x7728, 0),# East Asian ideograph
- 0x214d51: (0x7737, 0),# East Asian ideograph
- 0x214d52: (0x773c, 0),# East Asian ideograph
- 0x214d53: (0x7736, 0),# East Asian ideograph
- 0x214d54: (0x7738, 0),# East Asian ideograph
- 0x214d55: (0x773a, 0),# East Asian ideograph
- 0x274d56: (0x4f17, 0),# East Asian ideograph
- 0x274d57: (0x56f0, 0),# East Asian ideograph
- 0x214d58: (0x776b, 0),# East Asian ideograph
- 0x214d59: (0x775b, 0),# East Asian ideograph
- 0x214d5a: (0x776a, 0),# East Asian ideograph
- 0x214d5b: (0x7766, 0),# East Asian ideograph
- 0x214d5c: (0x7779, 0),# East Asian ideograph
- 0x274d5d: (0x7750, 0),# East Asian ideograph
- 0x214d5e: (0x7763, 0),# East Asian ideograph
- 0x214d5f: (0x775c, 0),# East Asian ideograph
- 0x214d60: (0x776c, 0),# East Asian ideograph
- 0x214d61: (0x7768, 0),# East Asian ideograph
- 0x214d62: (0x7765, 0),# East Asian ideograph
- 0x214d63: (0x777d, 0),# East Asian ideograph
- 0x214d64: (0x7771, 0),# East Asian ideograph
- 0x214d65: (0x777f, 0),# East Asian ideograph
- 0x214d66: (0x7784, 0),# East Asian ideograph
- 0x214d67: (0x7761, 0),# East Asian ideograph
- 0x214d68: (0x7787, 0),# East Asian ideograph
- 0x214d69: (0x778e, 0),# East Asian ideograph
- 0x214d6a: (0x778c, 0),# East Asian ideograph
- 0x214d6b: (0x7791, 0),# East Asian ideograph
- 0x214d6c: (0x779f, 0),# East Asian ideograph
- 0x214d6d: (0x779e, 0),# East Asian ideograph
- 0x214d6e: (0x77a0, 0),# East Asian ideograph
- 0x214d6f: (0x77a5, 0),# East Asian ideograph
- 0x214d70: (0x77b3, 0),# East Asian ideograph
- 0x214d71: (0x77aa, 0),# East Asian ideograph
- 0x214d72: (0x77b0, 0),# East Asian ideograph
- 0x214d73: (0x77ad, 0),# East Asian ideograph
- 0x214d74: (0x77ac, 0),# East Asian ideograph
- 0x214d75: (0x77a7, 0),# East Asian ideograph
- 0x214d76: (0x77bd, 0),# East Asian ideograph
- 0x214d77: (0x77bf, 0),# East Asian ideograph
- 0x214d78: (0x77bb, 0),# East Asian ideograph
- 0x224d79: (0x6fa6, 0),# East Asian ideograph
- 0x214d7a: (0x77d3, 0),# East Asian ideograph
- 0x214d7b: (0x77d7, 0),# East Asian ideograph
- 0x214d7c: (0x77da, 0),# East Asian ideograph
- 0x214d7d: (0x77db, 0),# East Asian ideograph
- 0x214d7e: (0x77dc, 0),# East Asian ideograph
- 0x276240: (0x9e44, 0),# East Asian ideograph
- 0x216241: (0x9d5d, 0),# East Asian ideograph
- 0x233d74: (0x9062, 0),# East Asian ideograph
- 0x216242: (0x9d89, 0),# East Asian ideograph
- 0x293b6d: (0x8f8a, 0),# East Asian ideograph
- 0x276243: (0x9e4a, 0),# East Asian ideograph
- 0x6f4d6a: (0xb4f1, 0),# Korean hangul
- 0x216244: (0x9d6a, 0),# East Asian ideograph
- 0x216245: (0x9d6c, 0),# East Asian ideograph
- 0x6f4b4f: (0xb098, 0),# Korean hangul
- 0x276246: (0x9e64, 0),# East Asian ideograph
- 0x333d75: (0x5fb3, 0),# East Asian ideograph
- 0x276247: (0x83ba, 0),# East Asian ideograph
- 0x6f5c5c: (0xd544, 0),# Korean hangul
- 0x232248: (0x8453, 0),# East Asian ideograph
- 0x276249: (0x9e67, 0),# East Asian ideograph
- 0x27624a: (0x9e25, 0),# East Asian ideograph
- 0x27624b: (0x9e36, 0),# East Asian ideograph
- 0x27624c: (0x9e70, 0),# East Asian ideograph
- 0x2e3645: (0x69e3, 0),# East Asian ideograph
- 0x21624d: (0x9dfa, 0),# East Asian ideograph
- 0x293357: (0x8c14, 0),# East Asian ideograph
- 0x27624e: (0x9e66, 0),# East Asian ideograph
- 0x21624f: (0x9e1e, 0),# East Asian ideograph
- 0x4b4f4c: (0x7a4f, 0),# East Asian ideograph
- 0x6f4b51: (0xb09a, 0),# Korean hangul
- 0x276250: (0x54b8, 0),# East Asian ideograph
- 0x294021: (0x90f8, 0),# East Asian ideograph
- 0x235b4d: (0x9d7b, 0),# East Asian ideograph
- 0x233934: (0x8dec, 0),# East Asian ideograph
- 0x6f5c5e: (0xd54d, 0),# Korean hangul
- 0x216252: (0x9e7c, 0),# East Asian ideograph
- 0x344177: (0x8264, 0),# East Asian ideograph
- 0x39526b: (0x7094, 0),# East Asian ideograph
- 0x216256: (0x9e97, 0),# East Asian ideograph
- 0x2d5461: (0x8306, 0),# East Asian ideograph
- 0x6f5c5f: (0xd54f, 0),# Korean hangul
- 0x274931: (0x6c88, 0),# East Asian ideograph
- 0x293359: (0x8c11, 0),# East Asian ideograph
- 0x4b5d70: (0x92ad, 0),# East Asian ideograph
- 0x234a42: (0x9660, 0),# East Asian ideograph
- 0x216259: (0x9e9d, 0),# East Asian ideograph
- 0x6f4b53: (0xb09f, 0),# Korean hangul
- 0x294466: (0x9515, 0),# East Asian ideograph
- 0x214e21: (0x77e2, 0),# East Asian ideograph
- 0x214e22: (0x77e3, 0),# East Asian ideograph
- 0x214e23: (0x77e5, 0),# East Asian ideograph
- 0x214e24: (0x77e9, 0),# East Asian ideograph
- 0x214e25: (0x77ed, 0),# East Asian ideograph
- 0x214e26: (0x77ee, 0),# East Asian ideograph
- 0x214e27: (0x77ef, 0),# East Asian ideograph
- 0x214e28: (0x77f3, 0),# East Asian ideograph
- 0x214e29: (0x77fd, 0),# East Asian ideograph
- 0x214e2a: (0x7802, 0),# East Asian ideograph
- 0x214e2b: (0x780d, 0),# East Asian ideograph
- 0x214e2c: (0x780c, 0),# East Asian ideograph
- 0x234e2d: (0x97b8, 0),# East Asian ideograph
- 0x214e2e: (0x7830, 0),# East Asian ideograph
- 0x214e2f: (0x781d, 0),# East Asian ideograph
- 0x214e30: (0x7834, 0),# East Asian ideograph
- 0x214e31: (0x7838, 0),# East Asian ideograph
- 0x214e32: (0x7837, 0),# East Asian ideograph
- 0x214e33: (0x7827, 0),# East Asian ideograph
- 0x214e34: (0x782d, 0),# East Asian ideograph
- 0x214e35: (0x7825, 0),# East Asian ideograph
- 0x214e36: (0x786b, 0),# East Asian ideograph
- 0x214e37: (0x784f, 0),# East Asian ideograph
- 0x234e38: (0x97c0, 0),# East Asian ideograph
- 0x214e39: (0x786c, 0),# East Asian ideograph
- 0x214e3a: (0x785d, 0),# East Asian ideograph
- 0x214e3b: (0x786f, 0),# East Asian ideograph
- 0x214e3c: (0x78b0, 0),# East Asian ideograph
- 0x214e3d: (0x7897, 0),# East Asian ideograph
- 0x214e3e: (0x788e, 0),# East Asian ideograph
- 0x214e3f: (0x7898, 0),# East Asian ideograph
- 0x214e40: (0x7889, 0),# East Asian ideograph
- 0x214e41: (0x7891, 0),# East Asian ideograph
- 0x214e42: (0x787c, 0),# East Asian ideograph
- 0x214e43: (0x788c, 0),# East Asian ideograph
- 0x214e44: (0x78a7, 0),# East Asian ideograph
- 0x214e45: (0x78a9, 0),# East Asian ideograph
- 0x214e46: (0x789f, 0),# East Asian ideograph
- 0x214e47: (0x78b3, 0),# East Asian ideograph
- 0x214e48: (0x78cb, 0),# East Asian ideograph
- 0x214e49: (0x78ba, 0),# East Asian ideograph
- 0x214e4a: (0x78c1, 0),# East Asian ideograph
- 0x214e4b: (0x78c5, 0),# East Asian ideograph
- 0x214e4c: (0x78bc, 0),# East Asian ideograph
- 0x214e4d: (0x78d5, 0),# East Asian ideograph
- 0x214e4e: (0x78be, 0),# East Asian ideograph
- 0x214e4f: (0x78ca, 0),# East Asian ideograph
- 0x214e50: (0x78d0, 0),# East Asian ideograph
- 0x214e51: (0x78e8, 0),# East Asian ideograph
- 0x214e52: (0x78ec, 0),# East Asian ideograph
- 0x214e53: (0x78da, 0),# East Asian ideograph
- 0x214e54: (0x78f7, 0),# East Asian ideograph
- 0x214e55: (0x78f4, 0),# East Asian ideograph
- 0x214e56: (0x78fa, 0),# East Asian ideograph (variant of 4B4E56 which maps to 78FA)
- 0x214e57: (0x7901, 0),# East Asian ideograph
- 0x214e58: (0x78ef, 0),# East Asian ideograph
- 0x234e59: (0x97dd, 0),# East Asian ideograph
- 0x214e5a: (0x7919, 0),# East Asian ideograph
- 0x214e5b: (0x7926, 0),# East Asian ideograph
- 0x214e5c: (0x792c, 0),# East Asian ideograph
- 0x224e5d: (0x6fde, 0),# East Asian ideograph
- 0x214e5e: (0x792b, 0),# East Asian ideograph
- 0x214e5f: (0x793a, 0),# East Asian ideograph
- 0x214e60: (0x7940, 0),# East Asian ideograph
- 0x214e61: (0x793e, 0),# East Asian ideograph
- 0x214e62: (0x7941, 0),# East Asian ideograph
- 0x214e63: (0x7945, 0),# East Asian ideograph
- 0x214e64: (0x7949, 0),# East Asian ideograph
- 0x214e65: (0x7948, 0),# East Asian ideograph
- 0x214e66: (0x7947, 0),# East Asian ideograph
- 0x224e67: (0x700c, 0),# East Asian ideograph
- 0x214e68: (0x7960, 0),# East Asian ideograph
- 0x214e69: (0x7950, 0),# East Asian ideograph
- 0x214e6a: (0x7956, 0),# East Asian ideograph
- 0x214e6b: (0x795e, 0),# East Asian ideograph
- 0x214e6c: (0x795d, 0),# East Asian ideograph
- 0x214e6d: (0x795f, 0),# East Asian ideograph
- 0x214e6e: (0x795a, 0),# East Asian ideograph
- 0x214e6f: (0x7957, 0),# East Asian ideograph
- 0x214e70: (0x7965, 0),# East Asian ideograph
- 0x214e71: (0x7968, 0),# East Asian ideograph
- 0x214e72: (0x796d, 0),# East Asian ideograph
- 0x234e73: (0x97fa, 0),# East Asian ideograph
- 0x214e74: (0x7981, 0),# East Asian ideograph
- 0x214e75: (0x797f, 0),# East Asian ideograph
- 0x214e76: (0x798f, 0),# East Asian ideograph
- 0x214e77: (0x798d, 0),# East Asian ideograph
- 0x214e78: (0x798e, 0),# East Asian ideograph
- 0x214e79: (0x79a6, 0),# East Asian ideograph
- 0x214e7a: (0x79a7, 0),# East Asian ideograph
- 0x214e7b: (0x79aa, 0),# East Asian ideograph
- 0x214e7c: (0x79ae, 0),# East Asian ideograph
- 0x214e7d: (0x79b1, 0),# East Asian ideograph
- 0x214e7e: (0x79b9, 0),# East Asian ideograph
- 0x6f5c63: (0xd558, 0),# Korean hangul
- 0x6f4e2d: (0xb55f, 0),# Korean hangul
- 0x29335d: (0x8c16, 0),# East Asian ideograph
- 0x216461: (0x4ec8, 0),# East Asian ideograph
- 0x234a46: (0x9658, 0),# East Asian ideograph
- 0x69626d: (0x7874, 0),# East Asian ideograph
- 0x6f4b57: (0xb0a9, 0),# Korean hangul
- 0x27626f: (0x515a, 0),# East Asian ideograph
- 0x6f5c64: (0xd559, 0),# Korean hangul
- 0x274936: (0x6ee4, 0),# East Asian ideograph
- 0x6f5821: (0xc974, 0),# Korean hangul
- 0x6f4d53: (0xb450, 0),# Korean hangul
- 0x216271: (0x9ef4, 0),# East Asian ideograph
- 0x216272: (0x9ef7, 0),# East Asian ideograph
- 0x4b6159: (0x81b8, 0),# East Asian ideograph
- 0x216273: (0x9f07, 0),# East Asian ideograph
- 0x216275: (0x9f13, 0),# East Asian ideograph
- 0x274937: (0x6d4f, 0),# East Asian ideograph
- 0x6f5822: (0xc988, 0),# Korean hangul
- 0x216276: (0x9f15, 0),# East Asian ideograph
- 0x274633: (0x6b8b, 0),# East Asian ideograph
- 0x6f4d22: (0xb308, 0),# Korean hangul
- 0x6f4b59: (0xb0ac, 0),# Korean hangul
- 0x4b6278: (0x9f21, 0),# East Asian ideograph
- 0x224d23: (0x6f7e, 0),# East Asian ideograph
- 0x21712d: (0x5593, 0),# East Asian ideograph
- 0x224d24: (0x6f9d, 0),# East Asian ideograph
- 0x21627a: (0x9f34, 0),# East Asian ideograph
- 0x6f4d25: (0xb313, 0),# Korean hangul
- 0x6f5823: (0xc989, 0),# Korean hangul
- 0x23227b: (0x8484, 0),# East Asian ideograph
- 0x22464a: (0x6c05, 0),# East Asian ideograph
- 0x6f4d26: (0xb314, 0),# Korean hangul
- 0x6f534b: (0xc1b0, 0),# Korean hangul
- 0x23227c: (0x8478, 0),# East Asian ideograph
- 0x224d27: (0x6f87, 0),# East Asian ideograph
- 0x6f4b5a: (0xb0ad, 0),# Korean hangul
- 0x21627d: (0x9f4a, 0),# East Asian ideograph
- 0x6f4d28: (0xb354, 0),# Korean hangul
- 0x27627e: (0x658e, 0),# East Asian ideograph
- 0x274d29: (0x7691, 0),# East Asian ideograph
- 0x274d7c: (0x77a9, 0),# East Asian ideograph
- 0x6f5c67: (0xd565, 0),# Korean hangul
- 0x6f4d2a: (0xb358, 0),# Korean hangul
- 0x6f5824: (0xc98c, 0),# Korean hangul
- 0x213c7a: (0x5eb8, 0),# East Asian ideograph
- 0x224d2b: (0x6f6f, 0),# East Asian ideograph
- 0x6f5271: (0xc0cf, 0),# Korean hangul
- 0x234d2c: (0x976b, 0),# East Asian ideograph
- 0x6f4b5b: (0xb0ae, 0),# Korean hangul
- 0x214d2d: (0x76ba, 0),# East Asian ideograph
- 0x6f4c39: (0xb192, 0),# Korean hangul
- 0x29402b: (0x90ba, 0),# East Asian ideograph
- 0x23393e: (0x8df2, 0),# East Asian ideograph
- 0x282d79: (0x60ab, 0),# East Asian ideograph
- 0x6f4d2e: (0xb364, 0),# Korean hangul
- 0x2d3251: (0x510c, 0),# East Asian ideograph
- 0x6f492c: (0xac84, 0),# Korean hangul
- 0x6f5c68: (0xd568, 0),# Korean hangul
- 0x224d2f: (0x6f5a, 0),# East Asian ideograph
- 0x293362: (0x8c1d, 0),# East Asian ideograph
- 0x214f21: (0x79bd, 0),# East Asian ideograph
- 0x214f22: (0x842c, 0),# East Asian ideograph
- 0x214f23: (0x79be, 0),# East Asian ideograph
- 0x214f24: (0x79c0, 0),# East Asian ideograph
- 0x214f25: (0x79c1, 0),# East Asian ideograph
- 0x214f26: (0x79bf, 0),# East Asian ideograph
- 0x214d31: (0x76c8, 0),# East Asian ideograph
- 0x214f28: (0x79d1, 0),# East Asian ideograph
- 0x214f29: (0x79cb, 0),# East Asian ideograph
- 0x214f2a: (0x79d2, 0),# East Asian ideograph
- 0x214f2b: (0x79e4, 0),# East Asian ideograph
- 0x214f2c: (0x79e6, 0),# East Asian ideograph
- 0x214f2d: (0x79e3, 0),# East Asian ideograph
- 0x214f2e: (0x79df, 0),# East Asian ideograph
- 0x214f2f: (0x79e7, 0),# East Asian ideograph
- 0x214f30: (0x79e9, 0),# East Asian ideograph
- 0x224f31: (0x702d, 0),# East Asian ideograph
- 0x214f32: (0x7a05, 0),# East Asian ideograph
- 0x214f33: (0x7a0d, 0),# East Asian ideograph
- 0x214f34: (0x7a08, 0),# East Asian ideograph
- 0x214f35: (0x7a0b, 0),# East Asian ideograph
- 0x214f36: (0x7a00, 0),# East Asian ideograph
- 0x214f37: (0x7a1f, 0),# East Asian ideograph
- 0x234f38: (0x981f, 0),# East Asian ideograph
- 0x214f39: (0x7a20, 0),# East Asian ideograph
- 0x214f3a: (0x7a1a, 0),# East Asian ideograph
- 0x214f3b: (0x7a14, 0),# East Asian ideograph
- 0x214f3c: (0x7a31, 0),# East Asian ideograph
- 0x214f3d: (0x7a2e, 0),# East Asian ideograph
- 0x214f3e: (0x7a3f, 0),# East Asian ideograph
- 0x214f3f: (0x7a3c, 0),# East Asian ideograph
- 0x274f40: (0x8c37, 0),# East Asian ideograph
- 0x214f41: (0x7a3d, 0),# East Asian ideograph
- 0x214f42: (0x7a37, 0),# East Asian ideograph
- 0x214f43: (0x7a3b, 0),# East Asian ideograph
- 0x214f44: (0x7a4d, 0),# East Asian ideograph
- 0x214f45: (0x7a4e, 0),# East Asian ideograph
- 0x214f46: (0x7a4c, 0),# East Asian ideograph
- 0x214f47: (0x7a46, 0),# East Asian ideograph
- 0x214f48: (0x7a57, 0),# East Asian ideograph
- 0x274f49: (0x7a51, 0),# East Asian ideograph
- 0x214f4a: (0x7a62, 0),# East Asian ideograph
- 0x274f4b: (0x83b7, 0),# East Asian ideograph (duplicate simplified)
- 0x214f4c: (0x7a69, 0),# East Asian ideograph
- 0x214f4d: (0x7a74, 0),# East Asian ideograph
- 0x214f4e: (0x7a76, 0),# East Asian ideograph
- 0x214f4f: (0x7a79, 0),# East Asian ideograph
- 0x214f50: (0x7a7a, 0),# East Asian ideograph
- 0x214f51: (0x7a7f, 0),# East Asian ideograph
- 0x214f52: (0x7a81, 0),# East Asian ideograph
- 0x214f53: (0x7a84, 0),# East Asian ideograph
- 0x214f54: (0x7a88, 0),# East Asian ideograph
- 0x214f55: (0x7a92, 0),# East Asian ideograph
- 0x214f56: (0x7a95, 0),# East Asian ideograph
- 0x214f57: (0x7a98, 0),# East Asian ideograph
- 0x214f58: (0x7a96, 0),# East Asian ideograph
- 0x214f59: (0x7a97, 0),# East Asian ideograph
- 0x214f5a: (0x7a9f, 0),# East Asian ideograph
- 0x214f5b: (0x7aa0, 0),# East Asian ideograph
- 0x214f5c: (0x7aaa, 0),# East Asian ideograph
- 0x214d3a: (0x76de, 0),# East Asian ideograph
- 0x214f5e: (0x7aaf, 0),# East Asian ideograph
- 0x214f5f: (0x7aae, 0),# East Asian ideograph
- 0x274f60: (0x7aa5, 0),# East Asian ideograph
- 0x274f61: (0x7a8d, 0),# East Asian ideograph
- 0x274f62: (0x7a9c, 0),# East Asian ideograph
- 0x274f63: (0x7aa6, 0),# East Asian ideograph
- 0x214f64: (0x7aca, 0),# East Asian ideograph
- 0x214f65: (0x7acb, 0),# East Asian ideograph
- 0x214f66: (0x7ad9, 0),# East Asian ideograph
- 0x214f67: (0x7ae5, 0),# East Asian ideograph
- 0x214f68: (0x7ae3, 0),# East Asian ideograph
- 0x214d3c: (0x76e1, 0),# East Asian ideograph
- 0x214f6a: (0x7aef, 0),# East Asian ideograph
- 0x274f6b: (0x7ade, 0),# East Asian ideograph
- 0x214f6c: (0x7af9, 0),# East Asian ideograph
- 0x214f6d: (0x7afa, 0),# East Asian ideograph
- 0x214f6e: (0x7aff, 0),# East Asian ideograph
- 0x214f6f: (0x7afd, 0),# East Asian ideograph
- 0x214f70: (0x7b06, 0),# East Asian ideograph
- 0x214f71: (0x7b11, 0),# East Asian ideograph
- 0x214f72: (0x7b20, 0),# East Asian ideograph
- 0x214f73: (0x7b2c, 0),# East Asian ideograph
- 0x214f74: (0x7b28, 0),# East Asian ideograph
- 0x214d3e: (0x76e4, 0),# East Asian ideograph
- 0x214f76: (0x7b1e, 0),# East Asian ideograph
- 0x214f77: (0x7b19, 0),# East Asian ideograph
- 0x214f78: (0x7b26, 0),# East Asian ideograph
- 0x214f79: (0x7b46, 0),# East Asian ideograph
- 0x214f7a: (0x7b49, 0),# East Asian ideograph
- 0x214d3f: (0x76e7, 0),# East Asian ideograph
- 0x214f7c: (0x7b56, 0),# East Asian ideograph
- 0x214f7d: (0x7b52, 0),# East Asian ideograph
- 0x214f7e: (0x7b4b, 0),# East Asian ideograph
- 0x234d40: (0x9784, 0),# East Asian ideograph
- 0x6f4b5f: (0xb0b4, 0),# Korean hangul
- 0x294472: (0x951e, 0),# East Asian ideograph
- 0x4b4d41: (0x862f, 0),# East Asian ideograph
- 0x6f4d42: (0xb3cc, 0),# Korean hangul
- 0x2e3654: (0x657f, 0),# East Asian ideograph
- 0x2d502b: (0x693e, 0),# East Asian ideograph
- 0x234d43: (0x977f, 0),# East Asian ideograph
- 0x6f5829: (0xc9c0, 0),# Korean hangul
- 0x224d44: (0x6f0b, 0),# East Asian ideograph
- 0x2d4362: (0x6722, 0),# East Asian ideograph
- 0x4b4d45: (0x76f4, 0),# East Asian ideograph (variant of 214D45 which maps to 76F4)
- 0x6f4b60: (0xb0b5, 0),# Korean hangul
- 0x217577: (0x57d2, 0),# East Asian ideograph
- 0x6f4d46: (0xb3d7, 0),# Korean hangul
- 0x213145: (0x4f9d, 0),# East Asian ideograph
- 0x217134: (0x5588, 0),# East Asian ideograph
- 0x6f4d47: (0xb3d9, 0),# Korean hangul
- 0x6f5c6d: (0xd571, 0),# Korean hangul
- 0x27493f: (0x6f9c, 0),# East Asian ideograph
- 0x6f582a: (0xc9c1, 0),# Korean hangul
- 0x276256: (0x4e3d, 0),# East Asian ideograph
- 0x224651: (0x6c0c, 0),# East Asian ideograph
- 0x234d49: (0x9789, 0),# East Asian ideograph
- 0x6f4d4a: (0xb400, 0),# Korean hangul
- 0x6f4b61: (0xb0b8, 0),# Korean hangul
- 0x294474: (0x951f, 0),# East Asian ideograph
- 0x224d4b: (0x6f6c, 0),# East Asian ideograph
- 0x294031: (0x909d, 0),# East Asian ideograph
- 0x333944: (0x5b2d, 0),# East Asian ideograph
- 0x6f4d4c: (0xb418, 0),# Korean hangul
- 0x283f30: (0x6966, 0),# East Asian ideograph
- 0x224d4d: (0x6f8b, 0),# East Asian ideograph
- 0x6f582b: (0xc9c4, 0),# Korean hangul
- 0x6f4d4e: (0xb420, 0),# Korean hangul
- 0x2d4364: (0x671e, 0),# East Asian ideograph
- 0x2d4d4f: (0x771f, 0),# East Asian ideograph
- 0x276327: (0x9f89, 0),# East Asian ideograph
- 0x6f586a: (0xcb50, 0),# Korean hangul
- 0x6f4d50: (0xb429, 0),# Korean hangul
- 0x213147: (0x4f75, 0),# East Asian ideograph
- 0x6f4d51: (0xb42b, 0),# Korean hangul
- 0x212329: (0xff09, 0),# Ideographic right parenthesis
- 0x6f4d52: (0xb42c, 0),# Korean hangul
- 0x6f582c: (0xc9c7, 0),# Korean hangul
- 0x4b3b67: (0x6b67, 0),# East Asian ideograph
- 0x234d54: (0x9794, 0),# East Asian ideograph
- 0x6f4b63: (0xb0bc, 0),# Korean hangul
- 0x335773: (0x88b4, 0),# East Asian ideograph
- 0x6f4d55: (0xb454, 0),# Korean hangul
- 0x27514a: (0x7ef0, 0),# East Asian ideograph
- 0x214d56: (0x773e, 0),# East Asian ideograph
- 0x6f5c70: (0xd578, 0),# Korean hangul
- 0x276b5b: (0x5250, 0),# East Asian ideograph
- 0x214d57: (0x774f, 0),# East Asian ideograph
- 0x3f5959: (0x8276, 0),# East Asian ideograph
- 0x224d58: (0x6e88, 0),# East Asian ideograph
- 0x35347b: (0x8b2d, 0),# East Asian ideograph
- 0x234d59: (0x979b, 0),# East Asian ideograph
- 0x6f4b64: (0xb0c4, 0),# Korean hangul
- 0x224d5a: (0x6f55, 0),# East Asian ideograph
- 0x213149: (0x4f73, 0),# East Asian ideograph
- 0x21623e: (0x9d61, 0),# East Asian ideograph
- 0x235021: (0x9865, 0),# East Asian ideograph
- 0x235022: (0x9866, 0),# East Asian ideograph
- 0x215023: (0x7b54, 0),# East Asian ideograph
- 0x215024: (0x7b60, 0),# East Asian ideograph
- 0x215025: (0x7b77, 0),# East Asian ideograph
- 0x215026: (0x7b75, 0),# East Asian ideograph
- 0x215027: (0x7ba1, 0),# East Asian ideograph
- 0x215028: (0x7b94, 0),# East Asian ideograph
- 0x235029: (0x986c, 0),# East Asian ideograph
- 0x21502a: (0x7b9d, 0),# East Asian ideograph
- 0x21502b: (0x7b8b, 0),# East Asian ideograph
- 0x21502c: (0x7b97, 0),# East Asian ideograph
- 0x21502d: (0x7b8f, 0),# East Asian ideograph
- 0x21502e: (0x7bc7, 0),# East Asian ideograph
- 0x214d5d: (0x775e, 0),# East Asian ideograph
- 0x235030: (0x9873, 0),# East Asian ideograph
- 0x215031: (0x7bb1, 0),# East Asian ideograph
- 0x215032: (0x7bb4, 0),# East Asian ideograph
- 0x215033: (0x7bc0, 0),# East Asian ideograph
- 0x215034: (0x7bc6, 0),# East Asian ideograph
- 0x215035: (0x7bc1, 0),# East Asian ideograph
- 0x215036: (0x7c11, 0),# East Asian ideograph
- 0x215037: (0x7bd9, 0),# East Asian ideograph
- 0x215038: (0x7bdb, 0),# East Asian ideograph
- 0x235039: (0x98ad, 0),# East Asian ideograph
- 0x21503a: (0x7bc9, 0),# East Asian ideograph
- 0x21503b: (0x7be1, 0),# East Asian ideograph
- 0x21503c: (0x7be9, 0),# East Asian ideograph
- 0x21503d: (0x7c07, 0),# East Asian ideograph
- 0x21503e: (0x7c0d, 0),# East Asian ideograph
- 0x21503f: (0x7bfe, 0),# East Asian ideograph
- 0x235040: (0x98b4, 0),# East Asian ideograph
- 0x215041: (0x7c21, 0),# East Asian ideograph
- 0x215042: (0x7c2b, 0),# East Asian ideograph
- 0x215043: (0x7c2a, 0),# East Asian ideograph
- 0x215044: (0x7c27, 0),# East Asian ideograph
- 0x215045: (0x7c1e, 0),# East Asian ideograph
- 0x215046: (0x7c23, 0),# East Asian ideograph
- 0x215047: (0x7c3f, 0),# East Asian ideograph
- 0x215048: (0x7c3e, 0),# East Asian ideograph
- 0x215049: (0x7c38, 0),# East Asian ideograph
- 0x21504a: (0x7c37, 0),# East Asian ideograph
- 0x27504b: (0x7b7e, 0),# East Asian ideograph
- 0x21504c: (0x7c43, 0),# East Asian ideograph
- 0x23504d: (0x98bb, 0),# East Asian ideograph
- 0x23504e: (0x98c0, 0),# East Asian ideograph
- 0x21504f: (0x7c50, 0),# East Asian ideograph
- 0x215050: (0x7c60, 0),# East Asian ideograph
- 0x215051: (0x7c5f, 0),# East Asian ideograph
- 0x215052: (0x7c64, 0),# East Asian ideograph
- 0x215053: (0x7c6c, 0),# East Asian ideograph
- 0x215054: (0x7c6e, 0),# East Asian ideograph
- 0x215055: (0x7c72, 0),# East Asian ideograph
- 0x215056: (0x7c73, 0),# East Asian ideograph
- 0x215057: (0x7c89, 0),# East Asian ideograph
- 0x215058: (0x7c92, 0),# East Asian ideograph
- 0x215059: (0x7c97, 0),# East Asian ideograph
- 0x21505a: (0x7c9f, 0),# East Asian ideograph
- 0x21505b: (0x7ca5, 0),# East Asian ideograph
- 0x21505c: (0x7ca4, 0),# East Asian ideograph
- 0x21505d: (0x7cb1, 0),# East Asian ideograph
- 0x21505e: (0x7cb3, 0),# East Asian ideograph
- 0x23505f: (0x98e1, 0),# East Asian ideograph
- 0x275060: (0x7c8b, 0),# East Asian ideograph
- 0x215061: (0xfa1d, 0),# East Asian ideograph
- 0x275062: (0x80e1, 0),# East Asian ideograph (duplicate simplified)
- 0x215063: (0x7cd6, 0),# East Asian ideograph
- 0x215064: (0x7cd5, 0),# East Asian ideograph
- 0x215065: (0x7ce0, 0),# East Asian ideograph
- 0x215066: (0x7cdc, 0),# East Asian ideograph
- 0x215067: (0x7cdf, 0),# East Asian ideograph
- 0x215068: (0x7cde, 0),# East Asian ideograph
- 0x215069: (0x7ce2, 0),# East Asian ideograph
- 0x21506a: (0x7cd9, 0),# East Asian ideograph
- 0x21506b: (0x7ce7, 0),# East Asian ideograph
- 0x21506c: (0x7cef, 0),# East Asian ideograph
- 0x2e506d: (0x70b1, 0),# East Asian ideograph
- 0x21506e: (0x7cfb, 0),# East Asian ideograph
- 0x21506f: (0x7cfe, 0),# East Asian ideograph
- 0x215070: (0x7d00, 0),# East Asian ideograph
- 0x215071: (0x7d02, 0),# East Asian ideograph
- 0x215072: (0x7d05, 0),# East Asian ideograph
- 0x225073: (0x70a9, 0),# East Asian ideograph
- 0x215074: (0x7d04, 0),# East Asian ideograph
- 0x215075: (0x7d07, 0),# East Asian ideograph
- 0x215076: (0x7d21, 0),# East Asian ideograph
- 0x215077: (0x7d0b, 0),# East Asian ideograph
- 0x225078: (0x70ea, 0),# East Asian ideograph
- 0x215079: (0x7d20, 0),# East Asian ideograph
- 0x21507a: (0x7d1c, 0),# East Asian ideograph
- 0x21507b: (0x7d22, 0),# East Asian ideograph
- 0x27507c: (0x7eb0, 0),# East Asian ideograph
- 0x234d6a: (0x97ac, 0),# East Asian ideograph
- 0x21507e: (0x7d10, 0),# East Asian ideograph
- 0x6f5c74: (0xd587, 0),# Korean hangul
- 0x6f4d6b: (0xb514, 0),# Korean hangul
- 0x6f5831: (0xc9d3, 0),# Korean hangul
- 0x6f4d6c: (0xb515, 0),# Korean hangul
- 0x6f7641: (0xe8b3, 0),# Korean hangul
- 0x2d4d6d: (0x7792, 0),# East Asian ideograph
- 0x2d3f27: (0x6120, 0),# East Asian ideograph
- 0x69252d: (0x30ad, 0),# Katakana letter KI
- 0x6f4d6e: (0xb51b, 0),# Korean hangul
- 0x4b4c79: (0x7672, 0),# East Asian ideograph
- 0x6f4d6f: (0xb51c, 0),# Korean hangul
- 0x4c4c35: (0x6e0c, 0),# East Asian ideograph
- 0x6f5c75: (0xd588, 0),# Korean hangul
- 0x234d70: (0x97ae, 0),# East Asian ideograph
- 0x6f5832: (0xc9d5, 0),# Korean hangul
- 0x234d71: (0x97a8, 0),# East Asian ideograph
- 0x334a58: (0x89dd, 0),# East Asian ideograph
- 0x6f4d72: (0xb527, 0),# Korean hangul
- 0x4b537d: (0x9acc, 0),# East Asian ideograph
- 0x6f592a: (0xcc3d, 0),# Korean hangul
- 0x6f5337: (0xc14b, 0),# Korean hangul
- 0x274d73: (0x4e86, 0),# East Asian ideograph
- 0x224d74: (0x6f9f, 0),# East Asian ideograph
- 0x2d325f: (0x50bb, 0),# East Asian ideograph (variant of 4B325F which maps to 50BB)
- 0x6f4d75: (0xb52a, 0),# Korean hangul
- 0x6f5833: (0xc9d6, 0),# Korean hangul
- 0x29416a: (0x948d, 0),# East Asian ideograph
- 0x22465a: (0x6c18, 0),# East Asian ideograph
- 0x2d3821: (0x962f, 0),# East Asian ideograph
- 0x6f5274: (0xc0d9, 0),# Korean hangul
- 0x213822: (0x5747, 0),# East Asian ideograph
- 0x39447d: (0x6ac2, 0),# East Asian ideograph
- 0x234d78: (0x97a5, 0),# East Asian ideograph
- 0x2d4d21: (0x7681, 0),# East Asian ideograph
- 0x6f4d79: (0xb531, 0),# Korean hangul
- 0x287360: (0x7f2c, 0),# East Asian ideograph
- 0x2f3a5e: (0x8e6e, 0),# East Asian ideograph
- 0x234d7a: (0x97b2, 0),# East Asian ideograph
- 0x2d5836: (0x89e6, 0),# East Asian ideograph
- 0x6f5834: (0xc9d9, 0),# Korean hangul
- 0x22465b: (0x6c19, 0),# East Asian ideograph
- 0x216032: (0x7ae0, 0),# East Asian ideograph
- 0x4b372c: (0x5606, 0),# East Asian ideograph
- 0x234d7c: (0x97b4, 0),# East Asian ideograph
- 0x213827: (0x5783, 0),# East Asian ideograph
- 0x224d7d: (0x6fbc, 0),# East Asian ideograph
- 0x213828: (0x576a, 0),# East Asian ideograph
- 0x235b67: (0x9daa, 0),# East Asian ideograph
- 0x4b3773: (0x56e3, 0),# East Asian ideograph
- 0x213829: (0x5769, 0),# East Asian ideograph
- 0x34782a: (0x90c5, 0),# East Asian ideograph
- 0x284d49: (0x6da0, 0),# East Asian ideograph
- 0x213f35: (0x6162, 0),# East Asian ideograph
- 0x21382b: (0x5761, 0),# East Asian ideograph
- 0x4b5946: (0x8a33, 0),# East Asian ideograph
- 0x21382c: (0x5764, 0),# East Asian ideograph
- 0x27383e: (0x57a9, 0),# East Asian ideograph
- 0x6f586c: (0xcb59, 0),# Korean hangul
- 0x6f543d: (0xc31c, 0),# Korean hangul
- 0x4b382e: (0x57c0, 0),# East Asian ideograph
- 0x23382f: (0x8da1, 0),# East Asian ideograph
- 0x693c32: (0x9d2b, 0),# East Asian ideograph
- 0x215121: (0x7d17, 0),# East Asian ideograph
- 0x215122: (0x7d0d, 0),# East Asian ideograph (variant of 455122 which maps to 7D0D)
- 0x215123: (0x7d1a, 0),# East Asian ideograph
- 0x215124: (0x7d19, 0),# East Asian ideograph
- 0x215125: (0x7d1b, 0),# East Asian ideograph
- 0x215126: (0x7d46, 0),# East Asian ideograph
- 0x213831: (0x578b, 0),# East Asian ideograph
- 0x215128: (0x7d3c, 0),# East Asian ideograph
- 0x215129: (0x7d2e, 0),# East Asian ideograph
- 0x21512a: (0x7d39, 0),# East Asian ideograph
- 0x27512b: (0x7ec4, 0),# East Asian ideograph
- 0x21512c: (0x7d30, 0),# East Asian ideograph
- 0x21512d: (0x7d33, 0),# East Asian ideograph
- 0x21512e: (0x7d2f, 0),# East Asian ideograph
- 0x27512f: (0x7ecc, 0),# East Asian ideograph
- 0x275130: (0x7ec8, 0),# East Asian ideograph
- 0x275131: (0x7edf, 0),# East Asian ideograph
- 0x275132: (0x7ede, 0),# East Asian ideograph
- 0x215133: (0x7d68, 0),# East Asian ideograph
- 0x275134: (0x7ed3, 0),# East Asian ideograph
- 0x215135: (0x7d2b, 0),# East Asian ideograph
- 0x215136: (0x7d62, 0),# East Asian ideograph
- 0x215137: (0x7d76, 0),# East Asian ideograph
- 0x215138: (0x7d61, 0),# East Asian ideograph
- 0x275139: (0x7ed9, 0),# East Asian ideograph
- 0x21513a: (0x7d6e, 0),# East Asian ideograph
- 0x21513b: (0x7d72, 0),# East Asian ideograph
- 0x27513c: (0x7ecf, 0),# East Asian ideograph
- 0x21513d: (0x7d91, 0),# East Asian ideograph
- 0x21513e: (0x7d79, 0),# East Asian ideograph
- 0x21513f: (0x7d8f, 0),# East Asian ideograph
- 0x275140: (0x7ed1, 0),# East Asian ideograph
- 0x275141: (0x7efc, 0),# East Asian ideograph
- 0x225142: (0x70f7, 0),# East Asian ideograph
- 0x215143: (0x7db0, 0),# East Asian ideograph
- 0x275144: (0x7d27, 0),# East Asian ideograph
- 0x275145: (0x7eeb, 0),# East Asian ideograph
- 0x275146: (0x7f00, 0),# East Asian ideograph
- 0x215147: (0x7dba, 0),# East Asian ideograph
- 0x275148: (0x7f51, 0),# East Asian ideograph
- 0x275149: (0x7eb2, 0),# East Asian ideograph
- 0x22514a: (0x7110, 0),# East Asian ideograph
- 0x21514b: (0x7db5, 0),# East Asian ideograph
- 0x21514c: (0x7da0, 0),# East Asian ideograph
- 0x27514d: (0x7ef8, 0),# East Asian ideograph
- 0x27514e: (0x7ef4, 0),# East Asian ideograph
- 0x27514f: (0x7ef5, 0),# East Asian ideograph
- 0x275150: (0x7eb6, 0),# East Asian ideograph
- 0x275151: (0x7f01, 0),# East Asian ideograph
- 0x215152: (0x7de0, 0),# East Asian ideograph
- 0x235153: (0x9933, 0),# East Asian ideograph
- 0x235154: (0x9942, 0),# East Asian ideograph (variant of 4D5154 which maps to 9942)
- 0x275155: (0x7eea, 0),# East Asian ideograph
- 0x215156: (0x7dd8, 0),# East Asian ideograph
- 0x275157: (0x7f05, 0),# East Asian ideograph
- 0x275158: (0x7f09, 0),# East Asian ideograph
- 0x275159: (0x7f13, 0),# East Asian ideograph
- 0x27515a: (0x7f18, 0),# East Asian ideograph
- 0x21515b: (0x7de8, 0),# East Asian ideograph
- 0x21515c: (0x7dda, 0),# East Asian ideograph
- 0x27515d: (0x7f0d, 0),# East Asian ideograph
- 0x27515e: (0x7f0e, 0),# East Asian ideograph
- 0x27515f: (0x7f23, 0),# East Asian ideograph
- 0x275160: (0x7f22, 0),# East Asian ideograph
- 0x275161: (0x8426, 0),# East Asian ideograph
- 0x275162: (0x7f1a, 0),# East Asian ideograph
- 0x215163: (0x7dfb, 0),# East Asian ideograph
- 0x275164: (0x53bf, 0),# East Asian ideograph (variant of 455164 which maps to 53BF)
- 0x215165: (0x7e2e, 0),# East Asian ideograph
- 0x215166: (0x7e3e, 0),# East Asian ideograph
- 0x275167: (0x7f2a, 0),# East Asian ideograph
- 0x275168: (0x7f15, 0),# East Asian ideograph
- 0x215169: (0x7e32, 0),# East Asian ideograph
- 0x23516a: (0x9948, 0),# East Asian ideograph
- 0x21516b: (0x7e41, 0),# East Asian ideograph
- 0x23516c: (0x9947, 0),# East Asian ideograph
- 0x23516d: (0x9949, 0),# East Asian ideograph
- 0x21516e: (0x7e31, 0),# East Asian ideograph
- 0x22516f: (0x713f, 0),# East Asian ideograph
- 0x235170: (0x9943, 0),# East Asian ideograph
- 0x275171: (0x7ec7, 0),# East Asian ideograph
- 0x215172: (0x7e61, 0),# East Asian ideograph
- 0x235173: (0x994e, 0),# East Asian ideograph
- 0x235174: (0x9950, 0),# East Asian ideograph
- 0x215175: (0x7e6b, 0),# East Asian ideograph
- 0x215176: (0x7e69, 0),# East Asian ideograph
- 0x215177: (0x7e6d, 0),# East Asian ideograph
- 0x215178: (0x7e79, 0),# East Asian ideograph
- 0x215179: (0x7e6a, 0),# East Asian ideograph
- 0x21517a: (0x8fae, 0),# East Asian ideograph
- 0x27517b: (0x7f24, 0),# East Asian ideograph
- 0x21517c: (0x7e82, 0),# East Asian ideograph
- 0x21517d: (0x7e7c, 0),# East Asian ideograph
- 0x21517e: (0x7e8f, 0),# East Asian ideograph
- 0x6f5947: (0xcce4, 0),# Korean hangul
- 0x227841: (0x80ef, 0),# East Asian ideograph
- 0x217144: (0x5581, 0),# East Asian ideograph
- 0x4b3774: (0x56f3, 0),# East Asian ideograph
- 0x235729: (0x9b8e, 0),# East Asian ideograph
- 0x287321: (0x7f26, 0),# East Asian ideograph
- 0x6f5c7d: (0xd5d2, 0),# Korean hangul
- 0x6f583a: (0xc9e0, 0),# Korean hangul
- 0x216038: (0x9802, 0),# East Asian ideograph
- 0x213844: (0x5831, 0),# East Asian ideograph
- 0x4b594b: (0x5909, 0),# East Asian ideograph
- 0x6f5d72: (0xd763, 0),# Korean hangul
- 0x4c3b31: (0x6798, 0),# East Asian ideograph
- 0x213845: (0x582f, 0),# East Asian ideograph
- 0x213a30: (0x5abc, 0),# East Asian ideograph
- 0x6f5c7e: (0xd5d8, 0),# Korean hangul
- 0x213848: (0x5830, 0),# East Asian ideograph
- 0x274638: (0x6b7c, 0),# East Asian ideograph
- 0x213849: (0x5824, 0),# East Asian ideograph
- 0x21384a: (0x5834, 0),# East Asian ideograph
- 0x21784b: (0x58c6, 0),# East Asian ideograph
- 0x394042: (0x646d, 0),# East Asian ideograph
- 0x284b28: (0x6d48, 0),# East Asian ideograph
- 0x22384c: (0x665e, 0),# East Asian ideograph
- 0x69525d: (0x53fa, 0),# East Asian ideograph
- 0x705d46: (0x841c, 0),# East Asian ideograph
- 0x27384d: (0x6d82, 0),# East Asian ideograph
- 0x21603a: (0x9805, 0),# East Asian ideograph
- 0x234a62: (0x967e, 0),# East Asian ideograph
- 0x213158: (0x4fd1, 0),# East Asian ideograph
- 0x223850: (0x667e, 0),# East Asian ideograph
- 0x21387c: (0x5919, 0),# East Asian ideograph
- 0x213851: (0x584c, 0),# East Asian ideograph
- 0x213852: (0x585a, 0),# East Asian ideograph
- 0x213853: (0x586d, 0),# East Asian ideograph
- 0x6f5276: (0xc0dc, 0),# Korean hangul
- 0x217854: (0x58d2, 0),# East Asian ideograph
- 0x213855: (0x5862, 0),# East Asian ideograph
- 0x335b70: (0x5ef8, 0),# East Asian ideograph
- 0x213f4e: (0x61c7, 0),# East Asian ideograph
- 0x273856: (0x5757, 0),# East Asian ideograph
- 0x6f4e33: (0xb5a8, 0),# Korean hangul
- 0x6f583e: (0xc9e7, 0),# Korean hangul
- 0x22687e: (0x7a2d, 0),# East Asian ideograph
- 0x4b3b79: (0x5d8c, 0),# East Asian ideograph
- 0x217428: (0x5704, 0),# East Asian ideograph
- 0x334621: (0x8b99, 0),# East Asian ideograph
- 0x233859: (0x8daf, 0),# East Asian ideograph
- 0x21385a: (0x588a, 0),# East Asian ideograph
- 0x215221: (0x7e8c, 0),# East Asian ideograph
- 0x275222: (0x7f28, 0),# East Asian ideograph
- 0x215223: (0x7e96, 0),# East Asian ideograph
- 0x215224: (0x7e9c, 0),# East Asian ideograph
- 0x6f5225: (0xbe75, 0),# Korean hangul
- 0x215226: (0x7f38, 0),# East Asian ideograph
- 0x215227: (0x7f3a, 0),# East Asian ideograph
- 0x225228: (0x7135, 0),# East Asian ideograph
- 0x235229: (0x995d, 0),# East Asian ideograph
- 0x6f522a: (0xbe84, 0),# Korean hangul
- 0x21522b: (0x7f50, 0),# East Asian ideograph
- 0x21522c: (0x7f55, 0),# East Asian ideograph
- 0x21522d: (0x7f54, 0),# East Asian ideograph
- 0x21522e: (0x7f5f, 0),# East Asian ideograph
- 0x21522f: (0x7f72, 0),# East Asian ideograph
- 0x215230: (0x7f6e, 0),# East Asian ideograph
- 0x224223: (0x6a89, 0),# East Asian ideograph
- 0x215232: (0x7f6a, 0),# East Asian ideograph
- 0x215233: (0x7f70, 0),# East Asian ideograph
- 0x215234: (0x7f75, 0),# East Asian ideograph
- 0x215235: (0x7f77, 0),# East Asian ideograph
- 0x215236: (0x7f79, 0),# East Asian ideograph
- 0x215237: (0x7f85, 0),# East Asian ideograph
- 0x275238: (0x7f81, 0),# East Asian ideograph
- 0x215239: (0x7f8a, 0),# East Asian ideograph
- 0x21523a: (0x7f8c, 0),# East Asian ideograph
- 0x21523b: (0x7f8e, 0),# East Asian ideograph
- 0x21523c: (0x7f94, 0),# East Asian ideograph
- 0x21523d: (0x7f9e, 0),# East Asian ideograph
- 0x21523e: (0x7f9a, 0),# East Asian ideograph
- 0x21523f: (0x5584, 0),# East Asian ideograph
- 0x215240: (0x7fa8, 0),# East Asian ideograph
- 0x215241: (0x7fa4, 0),# East Asian ideograph
- 0x235242: (0x99aa, 0),# East Asian ideograph
- 0x215243: (0x7faf, 0),# East Asian ideograph
- 0x215244: (0x7fb2, 0),# East Asian ideograph
- 0x215245: (0x7fb6, 0),# East Asian ideograph
- 0x215246: (0x7fb8, 0),# East Asian ideograph
- 0x215247: (0x7fb9, 0),# East Asian ideograph
- 0x215248: (0x7fbd, 0),# East Asian ideograph
- 0x235249: (0x99b5, 0),# East Asian ideograph
- 0x21524a: (0x7fc5, 0),# East Asian ideograph
- 0x21524b: (0x7fc1, 0),# East Asian ideograph
- 0x21524c: (0x7fcc, 0),# East Asian ideograph
- 0x21524d: (0x7fd2, 0),# East Asian ideograph
- 0x21524e: (0x7fce, 0),# East Asian ideograph (variant of 4B524E which maps to 7FCE)
- 0x21524f: (0x7fd4, 0),# East Asian ideograph
- 0x215250: (0x7fd5, 0),# East Asian ideograph
- 0x215251: (0x7fe0, 0),# East Asian ideograph
- 0x215252: (0x7fe1, 0),# East Asian ideograph
- 0x215253: (0x7fdf, 0),# East Asian ideograph
- 0x235254: (0x99bd, 0),# East Asian ideograph
- 0x215255: (0x7ff0, 0),# East Asian ideograph
- 0x215256: (0x7ff3, 0),# East Asian ideograph
- 0x215257: (0x7ffc, 0),# East Asian ideograph
- 0x215258: (0x7ff9, 0),# East Asian ideograph
- 0x215259: (0x7ffb, 0),# East Asian ideograph
- 0x21525a: (0x7ff1, 0),# East Asian ideograph
- 0x21525b: (0x8000, 0),# East Asian ideograph
- 0x22525c: (0x7143, 0),# East Asian ideograph
- 0x21525d: (0x8003, 0),# East Asian ideograph
- 0x21525e: (0x8006, 0),# East Asian ideograph
- 0x21525f: (0x8005, 0),# East Asian ideograph
- 0x215260: (0x800c, 0),# East Asian ideograph
- 0x215261: (0x8010, 0),# East Asian ideograph
- 0x215262: (0x800d, 0),# East Asian ideograph
- 0x215263: (0x8012, 0),# East Asian ideograph
- 0x215264: (0x8015, 0),# East Asian ideograph
- 0x215265: (0x8018, 0),# East Asian ideograph
- 0x215266: (0x8019, 0),# East Asian ideograph
- 0x215267: (0x8017, 0),# East Asian ideograph
- 0x215268: (0x801c, 0),# East Asian ideograph
- 0x235269: (0x99d8, 0),# East Asian ideograph
- 0x21526a: (0x8036, 0),# East Asian ideograph
- 0x21526b: (0x803f, 0),# East Asian ideograph
- 0x21526c: (0x803d, 0),# East Asian ideograph
- 0x21526d: (0x804a, 0),# East Asian ideograph
- 0x21526e: (0x8046, 0),# East Asian ideograph
- 0x21526f: (0x8056, 0),# East Asian ideograph
- 0x215270: (0x8058, 0),# East Asian ideograph
- 0x215271: (0x805e, 0),# East Asian ideograph
- 0x215272: (0x805a, 0),# East Asian ideograph
- 0x215273: (0x8071, 0),# East Asian ideograph
- 0x215274: (0x8072, 0),# East Asian ideograph
- 0x215275: (0x8073, 0),# East Asian ideograph
- 0x215276: (0x8070, 0),# East Asian ideograph
- 0x215277: (0x806f, 0),# East Asian ideograph
- 0x215278: (0x8077, 0),# East Asian ideograph
- 0x275279: (0x8042, 0),# East Asian ideograph
- 0x23527a: (0x99f0, 0),# East Asian ideograph
- 0x27527b: (0x542c, 0),# East Asian ideograph
- 0x21527c: (0x807f, 0),# East Asian ideograph
- 0x6f527c: (0xc0f4, 0),# Korean hangul
- 0x21527e: (0x8084, 0),# East Asian ideograph
- 0x21386b: (0x58d9, 0),# East Asian ideograph
- 0x6f5842: (0xc9f0, 0),# Korean hangul
- 0x27386c: (0x5792, 0),# East Asian ideograph
- 0x6f5a23: (0xcead, 0),# Korean hangul
- 0x21386d: (0x58df, 0),# East Asian ideograph
- 0x27386e: (0x574f, 0),# East Asian ideograph
- 0x4b3850: (0x5861, 0),# East Asian ideograph (variant of 213850)
- 0x23395c: (0x8e1e, 0),# East Asian ideograph
- 0x235732: (0x9b98, 0),# East Asian ideograph
- 0x6f4e34: (0xb5ab, 0),# Korean hangul
- 0x213870: (0x58e4, 0),# East Asian ideograph
- 0x276065: (0x9965, 0),# East Asian ideograph
- 0x4b3b7e: (0x5d15, 0),# East Asian ideograph
- 0x273871: (0x575d, 0),# East Asian ideograph
- 0x6f5d76: (0xd770, 0),# Korean hangul
- 0x223872: (0x6693, 0),# East Asian ideograph
- 0x233873: (0x8dbf, 0),# East Asian ideograph
- 0x273874: (0x58ee, 0),# East Asian ideograph
- 0x343875: (0x5fde, 0),# East Asian ideograph
- 0x284d58: (0x6ca9, 0),# East Asian ideograph
- 0x705c43: (0x82ca, 0),# East Asian ideograph
- 0x223876: (0x6690, 0),# East Asian ideograph
- 0x276321: (0x9f7f, 0),# East Asian ideograph
- 0x213877: (0x58fd, 0),# East Asian ideograph
- 0x276322: (0x9f83, 0),# East Asian ideograph
- 0x4d5d49: (0x9e81, 0),# East Asian ideograph
- 0x226323: (0x77b6, 0),# East Asian ideograph
- 0x213879: (0x5914, 0),# East Asian ideograph
- 0x276324: (0x9f84, 0),# East Asian ideograph
- 0x2e765f: (0x8037, 0),# East Asian ideograph
- 0x21387a: (0x5915, 0),# East Asian ideograph
- 0x284d59: (0x6ed7, 0),# East Asian ideograph
- 0x276325: (0x9f88, 0),# East Asian ideograph
- 0x6f542c: (0xc2f1, 0),# Korean hangul
- 0x213e2a: (0x6035, 0),# East Asian ideograph (variant of 4B3E2A which maps to 6035)
- 0x6f5675: (0xc78a, 0),# Korean hangul
- 0x276326: (0x9f87, 0),# East Asian ideograph
- 0x22787c: (0x8153, 0),# East Asian ideograph
- 0x216327: (0x9f6c, 0),# East Asian ideograph
- 0x21387d: (0x591a, 0),# East Asian ideograph
- 0x276328: (0x9f8a, 0),# East Asian ideograph
- 0x2d517d: (0x7d99, 0),# East Asian ideograph
- 0x4b484a: (0x6e13, 0),# East Asian ideograph
- 0x2d3272: (0x706e, 0),# East Asian ideograph
- 0x232329: (0x845c, 0),# East Asian ideograph
- 0x6f5846: (0xc9fc, 0),# Korean hangul
- 0x27632a: (0x9f8b, 0),# East Asian ideograph
- 0x234a6c: (0x9689, 0),# East Asian ideograph
- 0x22632b: (0x77b9, 0),# East Asian ideograph
- 0x453051: (0x8d30, 0),# East Asian ideograph
- 0x6f4b7d: (0xb124, 0),# Korean hangul
- 0x21632c: (0x9f94, 0),# East Asian ideograph
- 0x27632d: (0x9f9f, 0),# East Asian ideograph
- 0x4b484b: (0x51d6, 0),# East Asian ideograph
- 0x235736: (0x9b9f, 0),# East Asian ideograph
- 0x6f5847: (0xca00, 0),# Korean hangul
- 0x6f5427: (0xc2e4, 0),# Korean hangul
- 0x275321: (0x8083, 0),# East Asian ideograph
- 0x215322: (0x8087, 0),# East Asian ideograph
- 0x215323: (0x8089, 0),# East Asian ideograph
- 0x235324: (0x9a02, 0),# East Asian ideograph
- 0x215325: (0x808c, 0),# East Asian ideograph
- 0x215326: (0x8093, 0),# East Asian ideograph
- 0x215327: (0x809d, 0),# East Asian ideograph
- 0x215328: (0x8098, 0),# East Asian ideograph
- 0x215329: (0x809b, 0),# East Asian ideograph
- 0x21532a: (0x809a, 0),# East Asian ideograph
- 0x22532b: (0x7180, 0),# East Asian ideograph
- 0x22532c: (0x7189, 0),# East Asian ideograph
- 0x21532d: (0x80aa, 0),# East Asian ideograph
- 0x21532e: (0x80ba, 0),# East Asian ideograph
- 0x21532f: (0x80a5, 0),# East Asian ideograph
- 0x235330: (0x99fb, 0),# East Asian ideograph
- 0x235331: (0x99fd, 0),# East Asian ideograph
- 0x215332: (0x80b1, 0),# East Asian ideograph
- 0x225333: (0x7196, 0),# East Asian ideograph
- 0x215334: (0x80a1, 0),# East Asian ideograph
- 0x215335: (0x80a9, 0),# East Asian ideograph
- 0x27495d: (0x4e4c, 0),# East Asian ideograph
- 0x215337: (0x80d6, 0),# East Asian ideograph
- 0x215338: (0x80cc, 0),# East Asian ideograph
- 0x215339: (0x80e5, 0),# East Asian ideograph
- 0x21533a: (0x80da, 0),# East Asian ideograph
- 0x21533b: (0x80e1, 0),# East Asian ideograph
- 0x21533c: (0x80c3, 0),# East Asian ideograph
- 0x21533d: (0x80db, 0),# East Asian ideograph
- 0x21533e: (0x80c4, 0),# East Asian ideograph
- 0x21533f: (0x80ce, 0),# East Asian ideograph
- 0x215340: (0x80de, 0),# East Asian ideograph
- 0x215341: (0x80e4, 0),# East Asian ideograph
- 0x215342: (0x80f0, 0),# East Asian ideograph
- 0x215343: (0x8102, 0),# East Asian ideograph
- 0x215344: (0x8105, 0),# East Asian ideograph
- 0x215345: (0x80f1, 0),# East Asian ideograph
- 0x215346: (0x80f4, 0),# East Asian ideograph
- 0x215347: (0x80ed, 0),# East Asian ideograph
- 0x235348: (0x9a10, 0),# East Asian ideograph
- 0x215349: (0x8106, 0),# East Asian ideograph
- 0x21534a: (0x80f3, 0),# East Asian ideograph
- 0x21534b: (0x80f8, 0),# East Asian ideograph
- 0x23534c: (0x9a24, 0),# East Asian ideograph
- 0x21534d: (0x8108, 0),# East Asian ideograph
- 0x21534e: (0x812b, 0),# East Asian ideograph
- 0x21534f: (0x812f, 0),# East Asian ideograph
- 0x215350: (0x8116, 0),# East Asian ideograph
- 0x225351: (0x71a4, 0),# East Asian ideograph
- 0x215352: (0x8129, 0),# East Asian ideograph
- 0x215353: (0x8155, 0),# East Asian ideograph
- 0x215354: (0x8154, 0),# East Asian ideograph
- 0x215355: (0x814b, 0),# East Asian ideograph
- 0x215356: (0x8151, 0),# East Asian ideograph
- 0x215357: (0x8150, 0),# East Asian ideograph
- 0x215358: (0x814e, 0),# East Asian ideograph
- 0x275359: (0x80c0, 0),# East Asian ideograph
- 0x21535a: (0x8146, 0),# East Asian ideograph
- 0x21535b: (0x813e, 0),# East Asian ideograph
- 0x21535c: (0x8171, 0),# East Asian ideograph
- 0x21535d: (0x8170, 0),# East Asian ideograph
- 0x21535e: (0x8178, 0),# East Asian ideograph
- 0x21535f: (0x8165, 0),# East Asian ideograph
- 0x215360: (0x816e, 0),# East Asian ideograph
- 0x215361: (0x8173, 0),# East Asian ideograph
- 0x275362: (0x80bf, 0),# East Asian ideograph
- 0x215363: (0x8179, 0),# East Asian ideograph
- 0x215364: (0x817a, 0),# East Asian ideograph
- 0x215365: (0x8166, 0),# East Asian ideograph
- 0x215366: (0x8180, 0),# East Asian ideograph
- 0x225367: (0x71d1, 0),# East Asian ideograph
- 0x215368: (0x817f, 0),# East Asian ideograph
- 0x215369: (0x818a, 0),# East Asian ideograph
- 0x21536a: (0x8188, 0),# East Asian ideograph
- 0x21536b: (0x819d, 0),# East Asian ideograph
- 0x21536c: (0x81a0, 0),# East Asian ideograph
- 0x22536d: (0x71ca, 0),# East Asian ideograph
- 0x21536e: (0x819a, 0),# East Asian ideograph
- 0x21536f: (0x819c, 0),# East Asian ideograph
- 0x215370: (0x81b3, 0),# East Asian ideograph
- 0x275371: (0x817b, 0),# East Asian ideograph
- 0x215372: (0x81a8, 0),# East Asian ideograph
- 0x215373: (0x81c6, 0),# East Asian ideograph
- 0x215374: (0x81ba, 0),# East Asian ideograph
- 0x215375: (0x81c3, 0),# East Asian ideograph
- 0x215376: (0x81c0, 0),# East Asian ideograph
- 0x215377: (0x81c2, 0),# East Asian ideograph
- 0x275378: (0x8113, 0),# East Asian ideograph
- 0x275379: (0x80c6, 0),# East Asian ideograph
- 0x27537a: (0x8138, 0),# East Asian ideograph
- 0x27537b: (0x810d, 0),# East Asian ideograph
- 0x21537c: (0x81cd, 0),# East Asian ideograph
- 0x27537d: (0x8191, 0),# East Asian ideograph
- 0x27537e: (0x814a, 0),# East Asian ideograph
- 0x234156: (0x91bf, 0),# East Asian ideograph
- 0x276b79: (0x523f, 0),# East Asian ideograph
- 0x6f584b: (0xca0c, 0),# Korean hangul
- 0x2d3b3f: (0x5c02, 0),# East Asian ideograph
- 0x21313b: (0x4f43, 0),# East Asian ideograph
- 0x2d615a: (0x8ec6, 0),# East Asian ideograph
- 0x6f2526: (0x3161, 0),# Korean hangul
- 0x276b7a: (0x523d, 0),# East Asian ideograph
- 0x6f584c: (0xca0d, 0),# Korean hangul
- 0x69543a: (0x57aa, 0),# East Asian ideograph
- 0x232349: (0x8497, 0),# East Asian ideograph
- 0x6f5279: (0xc0e5, 0),# Korean hangul
- 0x274c33: (0x6bd5, 0),# East Asian ideograph
- 0x213168: (0x4fc4, 0),# East Asian ideograph
- 0x22234b: (0x5c8d, 0),# East Asian ideograph
- 0x213f51: (0x61e3, 0),# East Asian ideograph
- 0x2d3279: (0x514e, 0),# East Asian ideograph
- 0x6f4e36: (0xb5b1, 0),# Korean hangul
- 0x6f2471: (0x3149, 0),# Korean hangul
- 0x6f584d: (0xca18, 0),# Korean hangul
- 0x224674: (0x6c3f, 0),# East Asian ideograph
- 0x6f4929: (0xac80, 0),# Korean hangul
- 0x6f5164: (0xbdd4, 0),# Korean hangul
- 0x217e21: (0x5b5b, 0),# East Asian ideograph
- 0x213169: (0x4fc2, 0),# East Asian ideograph
- 0x217158: (0x55cc, 0),# East Asian ideograph
- 0x233967: (0x8e27, 0),# East Asian ideograph
- 0x4b594a: (0x8aad, 0),# East Asian ideograph
- 0x6f5158: (0xbd80, 0),# Korean hangul
- 0x6f584e: (0xca4c, 0),# Korean hangul
- 0x6f7721: (0xad35, 0),# Korean hangul
- 0x213e33: (0x6025, 0),# East Asian ideograph
- 0x295a28: (0x9e28, 0),# East Asian ideograph
- 0x2d4b72: (0x7506, 0),# East Asian ideograph
- 0x6f584f: (0xca4d, 0),# Korean hangul
- 0x232358: (0x84b9, 0),# East Asian ideograph
- 0x347431: (0x58dc, 0),# East Asian ideograph
- 0x21715a: (0x55db, 0),# East Asian ideograph
- 0x233969: (0x8e18, 0),# East Asian ideograph
- 0x215421: (0x81da, 0),# East Asian ideograph
- 0x235422: (0x9a4d, 0),# East Asian ideograph
- 0x215423: (0x81e3, 0),# East Asian ideograph
- 0x235424: (0x9a52, 0),# East Asian ideograph
- 0x275425: (0x4e34, 0),# East Asian ideograph
- 0x215426: (0x81ea, 0),# East Asian ideograph
- 0x215427: (0x81ec, 0),# East Asian ideograph
- 0x215428: (0x81ed, 0),# East Asian ideograph
- 0x215429: (0x81f3, 0),# East Asian ideograph
- 0x22542a: (0x71de, 0),# East Asian ideograph
- 0x21542b: (0x81fa, 0),# East Asian ideograph
- 0x21542c: (0x81fb, 0),# East Asian ideograph
- 0x21542d: (0x81fc, 0),# East Asian ideograph
- 0x21542e: (0x81fe, 0),# East Asian ideograph
- 0x21542f: (0x8200, 0),# East Asian ideograph
- 0x215430: (0x8202, 0),# East Asian ideograph
- 0x215431: (0x8205, 0),# East Asian ideograph
- 0x215432: (0x8207, 0),# East Asian ideograph
- 0x275433: (0x5174, 0),# East Asian ideograph
- 0x275434: (0x4e3e, 0),# East Asian ideograph
- 0x215435: (0x820a, 0),# East Asian ideograph
- 0x215436: (0x820c, 0),# East Asian ideograph
- 0x215437: (0x820d, 0),# East Asian ideograph
- 0x215438: (0x8210, 0),# East Asian ideograph
- 0x215439: (0x8212, 0),# East Asian ideograph
- 0x23543a: (0x9a6b, 0),# East Asian ideograph
- 0x21543b: (0x821b, 0),# East Asian ideograph
- 0x21543c: (0x821c, 0),# East Asian ideograph
- 0x21543d: (0x821e, 0),# East Asian ideograph
- 0x21543e: (0x821f, 0),# East Asian ideograph
- 0x21543f: (0x8222, 0),# East Asian ideograph
- 0x215440: (0x822a, 0),# East Asian ideograph
- 0x235441: (0x9aab, 0),# East Asian ideograph
- 0x215442: (0x822c, 0),# East Asian ideograph
- 0x215443: (0x8228, 0),# East Asian ideograph
- 0x215444: (0x8237, 0),# East Asian ideograph
- 0x215445: (0x8235, 0),# East Asian ideograph
- 0x215446: (0x8239, 0),# East Asian ideograph
- 0x215447: (0x8236, 0),# East Asian ideograph
- 0x215448: (0x8247, 0),# East Asian ideograph
- 0x215449: (0x8258, 0),# East Asian ideograph
- 0x21544a: (0x8259, 0),# East Asian ideograph
- 0x21544b: (0x8266, 0),# East Asian ideograph
- 0x21544c: (0x826e, 0),# East Asian ideograph
- 0x21544d: (0x826f, 0),# East Asian ideograph
- 0x21544e: (0x8271, 0),# East Asian ideograph
- 0x21544f: (0x8272, 0),# East Asian ideograph
- 0x215450: (0x827e, 0),# East Asian ideograph
- 0x215451: (0x8292, 0),# East Asian ideograph
- 0x215452: (0x828b, 0),# East Asian ideograph
- 0x215453: (0x828d, 0),# East Asian ideograph
- 0x215454: (0x82b3, 0),# East Asian ideograph
- 0x215455: (0x829d, 0),# East Asian ideograph
- 0x215456: (0x8299, 0),# East Asian ideograph
- 0x215457: (0x82bd, 0),# East Asian ideograph
- 0x215458: (0x82ad, 0),# East Asian ideograph
- 0x215459: (0x82ac, 0),# East Asian ideograph
- 0x21545a: (0x82a5, 0),# East Asian ideograph
- 0x21545b: (0x829f, 0),# East Asian ideograph
- 0x27545c: (0x520d, 0),# East Asian ideograph
- 0x21545d: (0x82b1, 0),# East Asian ideograph
- 0x21545e: (0x82b9, 0),# East Asian ideograph
- 0x69545f: (0x58e5, 0),# East Asian ideograph
- 0x215460: (0x82e7, 0),# East Asian ideograph
- 0x215461: (0x8305, 0),# East Asian ideograph
- 0x215462: (0x8309, 0),# East Asian ideograph
- 0x215463: (0x82e3, 0),# East Asian ideograph
- 0x215464: (0x82db, 0),# East Asian ideograph
- 0x215465: (0x82e6, 0),# East Asian ideograph
- 0x215466: (0x8304, 0),# East Asian ideograph
- 0x215467: (0x82e5, 0),# East Asian ideograph
- 0x215468: (0x8302, 0),# East Asian ideograph
- 0x215469: (0x82dc, 0),# East Asian ideograph
- 0x21546a: (0x82d7, 0),# East Asian ideograph
- 0x21546b: (0x82f1, 0),# East Asian ideograph
- 0x21546c: (0x8301, 0),# East Asian ideograph
- 0x23546d: (0x9ad6, 0),# East Asian ideograph
- 0x21546e: (0x82d4, 0),# East Asian ideograph
- 0x21546f: (0x82d1, 0),# East Asian ideograph
- 0x215470: (0x82de, 0),# East Asian ideograph
- 0x215471: (0x82df, 0),# East Asian ideograph
- 0x215472: (0x832b, 0),# East Asian ideograph
- 0x215473: (0x8352, 0),# East Asian ideograph
- 0x235474: (0x9adf, 0),# East Asian ideograph
- 0x215475: (0x8338, 0),# East Asian ideograph
- 0x215476: (0x8354, 0),# East Asian ideograph
- 0x235477: (0x9ae2, 0),# East Asian ideograph
- 0x215478: (0x8349, 0),# East Asian ideograph
- 0x215479: (0x8335, 0),# East Asian ideograph
- 0x21547a: (0x8334, 0),# East Asian ideograph
- 0x21547b: (0x8336, 0),# East Asian ideograph
- 0x21547c: (0x8331, 0),# East Asian ideograph
- 0x21547d: (0x8340, 0),# East Asian ideograph
- 0x21547e: (0x8317, 0),# East Asian ideograph
- 0x6f5853: (0xca5d, 0),# Korean hangul
- 0x295166: (0x9969, 0),# East Asian ideograph
- 0x234a79: (0x9696, 0),# East Asian ideograph
- 0x226450: (0x7826, 0),# East Asian ideograph
- 0x6f5d73: (0xd765, 0),# Korean hangul
- 0x6f4b35: (0xb014, 0),# Korean hangul
- 0x2d6162: (0x9a0c, 0),# East Asian ideograph
- 0x6f5872: (0xcbe7, 0),# Korean hangul
- 0x21316f: (0x4fef, 0),# East Asian ideograph
- 0x4b4858: (0x6e80, 0),# East Asian ideograph
- 0x6f5854: (0xca61, 0),# Korean hangul
- 0x222370: (0x5cd5, 0),# East Asian ideograph
- 0x22467b: (0x6c62, 0),# East Asian ideograph
- 0x213e39: (0x6063, 0),# East Asian ideograph
- 0x6f7648: (0xe8ba, 0),# Korean hangul
- 0x4b374c: (0x5662, 0),# East Asian ideograph
- 0x29594f: (0x9ce2, 0),# East Asian ideograph
- 0x696373: (0x7b02, 0),# East Asian ideograph
- 0x295a70: (0x9e48, 0),# East Asian ideograph
- 0x6f5364: (0xc22f, 0),# Korean hangul
- 0x27496a: (0x70bc, 0),# East Asian ideograph
- 0x6f5855: (0xca84, 0),# Korean hangul
- 0x292375: (0x83b3, 0),# East Asian ideograph
- 0x22467c: (0x6c4a, 0),# East Asian ideograph
- 0x224e21: (0x6faa, 0),# East Asian ideograph
- 0x6f4e22: (0xb541, 0),# Korean hangul
- 0x6f4e23: (0xb543, 0),# Korean hangul
- 0x225346: (0x719e, 0),# East Asian ideograph
- 0x222379: (0x5c8d, 0),# East Asian ideograph (not in Unicode)
- 0x234e24: (0x97b3, 0),# East Asian ideograph
- 0x6f5856: (0xca98, 0),# Korean hangul
- 0x224e25: (0x6fbf, 0),# East Asian ideograph
- 0x6f527b: (0xc0ec, 0),# Korean hangul
- 0x224e26: (0x6fc7, 0),# East Asian ideograph
- 0x275a78: (0x8e52, 0),# East Asian ideograph
- 0x274e27: (0x77eb, 0),# East Asian ideograph
- 0x213172: (0x5025, 0),# East Asian ideograph
- 0x6f4e28: (0xb54d, 0),# Korean hangul
- 0x27574a: (0x51b2, 0),# East Asian ideograph (duplicate simplified)
- 0x227e23: (0x83a6, 0),# East Asian ideograph
- 0x234e29: (0x97b9, 0),# East Asian ideograph
- 0x6f5857: (0xcabc, 0),# Korean hangul
- 0x29516a: (0x9990, 0),# East Asian ideograph
- 0x51356a: (0x8bc3, 0),# East Asian ideograph
- 0x6f4e2b: (0xb55c, 0),# Korean hangul
- 0x6f594d: (0xcd19, 0),# Korean hangul
- 0x6f5d5c: (0xd700, 0),# Korean hangul
- 0x6f4e2c: (0xb55d, 0),# Korean hangul
- 0x213173: (0x5011, 0),# East Asian ideograph
- 0x4b613f: (0x9a08, 0),# East Asian ideograph
- 0x214e2d: (0x65ab, 0),# East Asian ideograph
- 0x224e2e: (0x6f5e, 0),# East Asian ideograph
- 0x6f5858: (0xcabd, 0),# Korean hangul
- 0x6f546a: (0xc4f0, 0),# Korean hangul
- 0x224e2f: (0x6fc8, 0),# East Asian ideograph
- 0x2d5763: (0x88e1, 0),# East Asian ideograph
- 0x235521: (0x9ae7, 0),# East Asian ideograph
- 0x215522: (0x834f, 0),# East Asian ideograph
- 0x215523: (0x8339, 0),# East Asian ideograph
- 0x215524: (0x838e, 0),# East Asian ideograph
- 0x215525: (0x8398, 0),# East Asian ideograph
- 0x215526: (0x839e, 0),# East Asian ideograph
- 0x215527: (0x8378, 0),# East Asian ideograph
- 0x215528: (0x83a2, 0),# East Asian ideograph
- 0x225529: (0x7225, 0),# East Asian ideograph
- 0x22552a: (0x7226, 0),# East Asian ideograph
- 0x21552b: (0x83ab, 0),# East Asian ideograph
- 0x21552c: (0x8392, 0),# East Asian ideograph (variant of 4B552C which maps to 8392)
- 0x21552d: (0x838a, 0),# East Asian ideograph
- 0x21552e: (0x8393, 0),# East Asian ideograph
- 0x21552f: (0x83a0, 0),# East Asian ideograph
- 0x215530: (0x8389, 0),# East Asian ideograph
- 0x215531: (0x8377, 0),# East Asian ideograph
- 0x215532: (0x837c, 0),# East Asian ideograph
- 0x215533: (0x837b, 0),# East Asian ideograph
- 0x215534: (0x840d, 0),# East Asian ideograph
- 0x215535: (0x83e0, 0),# East Asian ideograph
- 0x215536: (0x83e9, 0),# East Asian ideograph
- 0x6f5537: (0xc57d, 0),# Korean hangul
- 0x215538: (0x8403, 0),# East Asian ideograph
- 0x215539: (0x83c5, 0),# East Asian ideograph
- 0x21553a: (0x83c1, 0),# East Asian ideograph
- 0x21553b: (0x840b, 0),# East Asian ideograph
- 0x21553c: (0x83ef, 0),# East Asian ideograph
- 0x6f553d: (0xc58f, 0),# Korean hangul
- 0x21553e: (0x83f1, 0),# East Asian ideograph
- 0x21553f: (0x83bd, 0),# East Asian ideograph
- 0x6f5540: (0xc595, 0),# Korean hangul
- 0x235541: (0x9b05, 0),# East Asian ideograph
- 0x215542: (0x840c, 0),# East Asian ideograph
- 0x225543: (0x7241, 0),# East Asian ideograph
- 0x215544: (0x83dc, 0),# East Asian ideograph
- 0x215545: (0x83ca, 0),# East Asian ideograph
- 0x215546: (0x83f2, 0),# East Asian ideograph
- 0x215547: (0x840e, 0),# East Asian ideograph
- 0x215548: (0x8404, 0),# East Asian ideograph
- 0x215549: (0x843d, 0),# East Asian ideograph
- 0x21554a: (0x8482, 0),# East Asian ideograph
- 0x21554b: (0x8431, 0),# East Asian ideograph
- 0x21554c: (0x8475, 0),# East Asian ideograph
- 0x21554d: (0x8466, 0),# East Asian ideograph
- 0x21554e: (0x8457, 0),# East Asian ideograph
- 0x22554f: (0x7250, 0),# East Asian ideograph
- 0x215550: (0x846c, 0),# East Asian ideograph
- 0x214e38: (0x7843, 0),# East Asian ideograph
- 0x215552: (0x845b, 0),# East Asian ideograph
- 0x215553: (0x8477, 0),# East Asian ideograph
- 0x215554: (0x843c, 0),# East Asian ideograph
- 0x215555: (0x8435, 0),# East Asian ideograph
- 0x225556: (0x725a, 0),# East Asian ideograph
- 0x215557: (0x8463, 0),# East Asian ideograph
- 0x215558: (0x8469, 0),# East Asian ideograph
- 0x225559: (0x7263, 0),# East Asian ideograph
- 0x21555a: (0x84b2, 0),# East Asian ideograph
- 0x21555b: (0x849e, 0),# East Asian ideograph
- 0x21555c: (0x84bf, 0),# East Asian ideograph
- 0x21555d: (0x84c6, 0),# East Asian ideograph
- 0x21555e: (0x84c4, 0),# East Asian ideograph
- 0x21555f: (0x84c9, 0),# East Asian ideograph
- 0x215560: (0x849c, 0),# East Asian ideograph
- 0x215561: (0x84cb, 0),# East Asian ideograph
- 0x215562: (0x84b8, 0),# East Asian ideograph
- 0x275563: (0x836a, 0),# East Asian ideograph
- 0x275564: (0x82ce, 0),# East Asian ideograph
- 0x215565: (0x84d3, 0),# East Asian ideograph
- 0x225566: (0x7276, 0),# East Asian ideograph
- 0x215567: (0x84bc, 0),# East Asian ideograph
- 0x225568: (0x7277, 0),# East Asian ideograph
- 0x215569: (0x84ff, 0),# East Asian ideograph
- 0x21556a: (0x8517, 0),# East Asian ideograph
- 0x22556b: (0x727e, 0),# East Asian ideograph
- 0x21556c: (0x84ee, 0),# East Asian ideograph
- 0x21556d: (0x852c, 0),# East Asian ideograph
- 0x27556e: (0x836b, 0),# East Asian ideograph
- 0x21556f: (0x8513, 0),# East Asian ideograph
- 0x6f5570: (0xc61b, 0),# Korean hangul
- 0x215571: (0x8523, 0),# East Asian ideograph
- 0x215572: (0x8521, 0),# East Asian ideograph
- 0x275573: (0x535c, 0),# East Asian ideograph
- 0x225574: (0x7289, 0),# East Asian ideograph
- 0x215575: (0x8525, 0),# East Asian ideograph
- 0x235576: (0x9b2f, 0),# East Asian ideograph
- 0x215577: (0x854a, 0),# East Asian ideograph
- 0x215578: (0x8559, 0),# East Asian ideograph
- 0x215579: (0x8548, 0),# East Asian ideograph
- 0x21557a: (0x8568, 0),# East Asian ideograph
- 0x21557b: (0x8543, 0),# East Asian ideograph
- 0x21557c: (0x856a, 0),# East Asian ideograph
- 0x21557d: (0x8549, 0),# East Asian ideograph
- 0x21557e: (0x8584, 0),# East Asian ideograph
- 0x224e40: (0x6fa5, 0),# East Asian ideograph
- 0x224e41: (0x6fb0, 0),# East Asian ideograph
- 0x4b6048: (0x981a, 0),# East Asian ideograph
- 0x224e42: (0x6fae, 0),# East Asian ideograph
- 0x2f585c: (0x9c51, 0),# Unrelated variant of EACC 235945 which maps to 9C51
- 0x224e43: (0x6fd9, 0),# East Asian ideograph
- 0x276260: (0x4e48, 0),# East Asian ideograph
- 0x21393f: (0x5969, 0),# East Asian ideograph
- 0x224e44: (0x6fda, 0),# East Asian ideograph
- 0x274e45: (0x7855, 0),# East Asian ideograph
- 0x6f5b3c: (0xd1b0, 0),# Korean hangul
- 0x273422: (0x5218, 0),# East Asian ideograph
- 0x6f4e46: (0xb664, 0),# Korean hangul
- 0x286b7c: (0x7b15, 0),# East Asian ideograph
- 0x22316c: (0x636c, 0),# East Asian ideograph
- 0x6f4e47: (0xb69c, 0),# Korean hangul
- 0x6f585d: (0xcad1, 0),# Korean hangul
- 0x295170: (0x998d, 0),# East Asian ideograph
- 0x274e49: (0x786e, 0),# East Asian ideograph
- 0x6f4c4c: (0xb220, 0),# Korean hangul
- 0x6f4e4a: (0xb6ab, 0),# Korean hangul
- 0x213179: (0x5006, 0),# East Asian ideograph
- 0x234e4b: (0x97ce, 0),# East Asian ideograph
- 0x2d753a: (0x9654, 0),# East Asian ideograph
- 0x2d5321: (0x7c9b, 0),# East Asian ideograph
- 0x274e4c: (0x7801, 0),# East Asian ideograph
- 0x3f3078: (0x5023, 0),# East Asian ideograph
- 0x6f585e: (0xcad2, 0),# Korean hangul
- 0x6f4e4d: (0xb6f0, 0),# Korean hangul
- 0x234e4e: (0x97d0, 0),# East Asian ideograph
- 0x4b552c: (0x8392, 0),# East Asian ideograph
- 0x29546d: (0x9acb, 0),# East Asian ideograph
- 0x333564: (0x5415, 0),# East Asian ideograph
- 0x275154: (0x7ec3, 0),# East Asian ideograph
- 0x224e50: (0x6fd4, 0),# East Asian ideograph
- 0x213930: (0x5949, 0),# East Asian ideograph
- 0x234e51: (0x97d4, 0),# East Asian ideograph
- 0x6f585f: (0xcad3, 0),# Korean hangul
- 0x295172: (0x9994, 0),# East Asian ideograph
- 0x21605d: (0x98b1, 0),# East Asian ideograph
- 0x213e44: (0x606c, 0),# East Asian ideograph
- 0x274e53: (0x7816, 0),# East Asian ideograph
- 0x234642: (0x93a7, 0),# East Asian ideograph
- 0x234e54: (0x97d9, 0),# East Asian ideograph
- 0x69245c: (0x307c, 0),# Hiragana letter BO
- 0x6f4e55: (0xb72f, 0),# Korean hangul
- 0x224e56: (0x6fe9, 0),# East Asian ideograph
- 0x6f5860: (0xcad8, 0),# Korean hangul
- 0x224e57: (0x6ff8, 0),# East Asian ideograph
- 0x4b3758: (0x56a5, 0),# East Asian ideograph (variant of 213758 which maps to 56A5)
- 0x274e58: (0x77f6, 0),# East Asian ideograph
- 0x214e59: (0x790e, 0),# East Asian ideograph
- 0x274e5a: (0x788d, 0),# East Asian ideograph
- 0x2d5c40: (0x5fa8, 0),# East Asian ideograph
- 0x215621: (0x85aa, 0),# East Asian ideograph
- 0x215622: (0x856d, 0),# East Asian ideograph
- 0x235623: (0x9b37, 0),# East Asian ideograph
- 0x275624: (0x59dc, 0),# East Asian ideograph
- 0x215625: (0x857e, 0),# East Asian ideograph
- 0x215626: (0x8594, 0),# East Asian ideograph
- 0x215627: (0x859c, 0),# East Asian ideograph
- 0x225628: (0x728f, 0),# East Asian ideograph
- 0x215629: (0x85cd, 0),# East Asian ideograph (variant of 4B5629 which maps to 85CD)
- 0x27562a: (0x8428, 0),# East Asian ideograph
- 0x21562b: (0x85cf, 0),# East Asian ideograph
- 0x21562c: (0x85af, 0),# East Asian ideograph
- 0x21562d: (0x85d0, 0),# East Asian ideograph
- 0x27562e: (0x501f, 0),# East Asian ideograph
- 0x214e5d: (0x792a, 0),# East Asian ideograph
- 0x215630: (0x85e9, 0),# East Asian ideograph
- 0x215631: (0x85dd, 0),# East Asian ideograph
- 0x275632: (0x85ae, 0),# East Asian ideograph
- 0x215633: (0x85e4, 0),# East Asian ideograph
- 0x215634: (0x85d5, 0),# East Asian ideograph
- 0x224e5e: (0x6fee, 0),# East Asian ideograph
- 0x215636: (0x85fb, 0),# East Asian ideograph
- 0x215637: (0x85f9, 0),# East Asian ideograph
- 0x215638: (0x8611, 0),# East Asian ideograph
- 0x215639: (0x85fa, 0),# East Asian ideograph
- 0x27563a: (0x82a6, 0),# East Asian ideograph
- 0x27563b: (0x82f9, 0),# East Asian ideograph
- 0x27563c: (0x82cf, 0),# East Asian ideograph
- 0x27563d: (0x8574, 0),# East Asian ideograph
- 0x27563e: (0x5170, 0),# East Asian ideograph
- 0x21563f: (0x8617, 0),# East Asian ideograph
- 0x215640: (0x861a, 0),# East Asian ideograph
- 0x215641: (0x8638, 0),# East Asian ideograph
- 0x275642: (0x841d, 0),# East Asian ideograph
- 0x215643: (0x864e, 0),# East Asian ideograph
- 0x215644: (0x8650, 0),# East Asian ideograph
- 0x215645: (0x8654, 0),# East Asian ideograph
- 0x215646: (0x5f6a, 0),# East Asian ideograph
- 0x215647: (0x8655, 0),# East Asian ideograph
- 0x275648: (0x864f, 0),# East Asian ideograph
- 0x215649: (0x865b, 0),# East Asian ideograph
- 0x27564a: (0x53f7, 0),# East Asian ideograph
- 0x21564b: (0x865e, 0),# East Asian ideograph
- 0x22564c: (0x72ab, 0),# East Asian ideograph
- 0x224e62: (0x6ff0, 0),# East Asian ideograph
- 0x22564e: (0x72b0, 0),# East Asian ideograph
- 0x21564f: (0x8679, 0),# East Asian ideograph
- 0x215650: (0x86a9, 0),# East Asian ideograph
- 0x215651: (0x86aa, 0),# East Asian ideograph
- 0x215652: (0x868a, 0),# East Asian ideograph
- 0x215653: (0x8693, 0),# East Asian ideograph
- 0x215654: (0x86a4, 0),# East Asian ideograph
- 0x215655: (0x868c, 0),# East Asian ideograph
- 0x215656: (0x86a3, 0),# East Asian ideograph
- 0x215657: (0x86c0, 0),# East Asian ideograph
- 0x215658: (0x86c7, 0),# East Asian ideograph
- 0x215659: (0x86b5, 0),# East Asian ideograph
- 0x27565a: (0x65e6, 0),# East Asian ideograph
- 0x21565b: (0x86b6, 0),# East Asian ideograph
- 0x21565c: (0x86c4, 0),# East Asian ideograph
- 0x21565d: (0x86c6, 0),# East Asian ideograph
- 0x21565e: (0x86b1, 0),# East Asian ideograph
- 0x21565f: (0x86af, 0),# East Asian ideograph
- 0x225660: (0x72d6, 0),# East Asian ideograph
- 0x215661: (0x86d9, 0),# East Asian ideograph
- 0x215662: (0x86ed, 0),# East Asian ideograph
- 0x215663: (0x86d4, 0),# East Asian ideograph
- 0x225664: (0x72d2, 0),# East Asian ideograph
- 0x224e66: (0x7005, 0),# East Asian ideograph
- 0x215666: (0x86fb, 0),# East Asian ideograph
- 0x225667: (0x72c9, 0),# East Asian ideograph
- 0x215668: (0x8707, 0),# East Asian ideograph
- 0x215669: (0x8703, 0),# East Asian ideograph
- 0x21566a: (0x8708, 0),# East Asian ideograph
- 0x214e67: (0x7955, 0),# East Asian ideograph
- 0x21566c: (0x86fe, 0),# East Asian ideograph
- 0x21566d: (0x8713, 0),# East Asian ideograph
- 0x21566e: (0x8702, 0),# East Asian ideograph
- 0x21566f: (0x871c, 0),# East Asian ideograph
- 0x215670: (0x873f, 0),# East Asian ideograph
- 0x215671: (0x873b, 0),# East Asian ideograph
- 0x215672: (0x8722, 0),# East Asian ideograph
- 0x225673: (0x72e8, 0),# East Asian ideograph
- 0x215674: (0x8734, 0),# East Asian ideograph
- 0x215675: (0x8718, 0),# East Asian ideograph
- 0x215676: (0x8755, 0),# East Asian ideograph
- 0x215677: (0x8760, 0),# East Asian ideograph
- 0x215678: (0x8776, 0),# East Asian ideograph
- 0x225679: (0x72e5, 0),# East Asian ideograph
- 0x27567a: (0x867e, 0),# East Asian ideograph
- 0x21567b: (0x8778, 0),# East Asian ideograph
- 0x21567c: (0x8768, 0),# East Asian ideograph
- 0x21567d: (0x874c, 0),# East Asian ideograph
- 0x22567e: (0x72fa, 0),# East Asian ideograph
- 0x6f4e6b: (0xb790, 0),# Korean hangul
- 0x6f5421: (0xc2b7, 0),# Korean hangul
- 0x234e6c: (0x97f5, 0),# East Asian ideograph
- 0x6f5339: (0xc151, 0),# Korean hangul
- 0x6f4e6d: (0xb797, 0),# Korean hangul
- 0x69245d: (0x307d, 0),# Hiragana letter PO
- 0x6f4e6e: (0xb798, 0),# Korean hangul
- 0x274e6f: (0x53ea, 0),# East Asian ideograph (duplicate simplified)
- 0x6f5865: (0xcb20, 0),# Korean hangul
- 0x6f4e70: (0xb79c, 0),# Korean hangul
- 0x6f5422: (0xc2b9, 0),# Korean hangul
- 0x6f5a2a: (0xcef7, 0),# Korean hangul
- 0x6f4e71: (0xb7a0, 0),# Korean hangul
- 0x234648: (0x939a, 0),# East Asian ideograph
- 0x213441: (0x5305, 0),# East Asian ideograph
- 0x224e72: (0x7026, 0),# East Asian ideograph
- 0x214e73: (0x797a, 0),# East Asian ideograph
- 0x286c58: (0x7ba7, 0),# East Asian ideograph
- 0x6f5633: (0xc68d, 0),# Korean hangul
- 0x6f4e3b: (0xb5bc, 0),# Korean hangul
- 0x6f5866: (0xcb21, 0),# Korean hangul
- 0x395179: (0x7d75, 0),# East Asian ideograph
- 0x6f4e76: (0xb7ad, 0),# Korean hangul
- 0x213921: (0x5920, 0),# East Asian ideograph
- 0x274e77: (0x7978, 0),# East Asian ideograph
- 0x27785e: (0x5786, 0),# East Asian ideograph
- 0x213922: (0x5924, 0),# East Asian ideograph
- 0x274e78: (0x796f, 0),# East Asian ideograph
- 0x213923: (0x5925, 0),# East Asian ideograph
- 0x234e79: (0x9807, 0),# East Asian ideograph
- 0x273924: (0x68a6, 0),# East Asian ideograph
- 0x6f5867: (0xcb41, 0),# Korean hangul
- 0x213f37: (0x6155, 0),# East Asian ideograph
- 0x6f4e7a: (0xb7ec, 0),# Korean hangul
- 0x6f4d61: (0xb4d0, 0),# Korean hangul
- 0x226464: (0x7876, 0),# East Asian ideograph
- 0x274e7b: (0x7985, 0),# East Asian ideograph
- 0x69482b: (0x7560, 0),# East Asian ideograph
- 0x274e7c: (0x793c, 0),# East Asian ideograph
- 0x4b4f3c: (0x79f0, 0),# East Asian ideograph (variant of 274F3C which maps to 79F0)
- 0x213927: (0x592b, 0),# East Asian ideograph
- 0x274e7d: (0x7977, 0),# East Asian ideograph
- 0x2d5323: (0x5b8d, 0),# East Asian ideograph
- 0x234e7e: (0x980d, 0),# East Asian ideograph
- 0x2d3929: (0x6b80, 0),# East Asian ideograph
- 0x213376: (0x5274, 0),# East Asian ideograph
- 0x6f5868: (0xcb48, 0),# Korean hangul
- 0x6f5433: (0xc300, 0),# Korean hangul
- 0x216066: (0x98e7, 0),# East Asian ideograph
- 0x21392b: (0x5931, 0),# East Asian ideograph
- 0x3f304c: (0x5e79, 0),# East Asian ideograph
- 0x2e3a26: (0x661d, 0),# East Asian ideograph
- 0x225359: (0x71b4, 0),# East Asian ideograph
- 0x21392e: (0x593e, 0),# East Asian ideograph
- 0x6f5869: (0xcb49, 0),# Korean hangul
- 0x396b33: (0x5259, 0),# East Asian ideograph (not in Unicode)
- 0x216067: (0x98e9, 0),# East Asian ideograph
- 0x235721: (0x9b83, 0),# East Asian ideograph
- 0x215722: (0x8783, 0),# East Asian ideograph
- 0x215723: (0x8782, 0),# East Asian ideograph
- 0x275724: (0x8424, 0),# East Asian ideograph
- 0x225725: (0x72fe, 0),# East Asian ideograph
- 0x215726: (0x878d, 0),# East Asian ideograph
- 0x215727: (0x879f, 0),# East Asian ideograph
- 0x215728: (0x87d1, 0),# East Asian ideograph
- 0x215729: (0x87c0, 0),# East Asian ideograph
- 0x21572a: (0x87ab, 0),# East Asian ideograph
- 0x23572b: (0x9b90, 0),# East Asian ideograph
- 0x27572c: (0x877c, 0),# East Asian ideograph
- 0x22572d: (0x7301, 0),# East Asian ideograph
- 0x22572e: (0x72f3, 0),# East Asian ideograph
- 0x23572f: (0x9b97, 0),# East Asian ideograph
- 0x215730: (0x87c6, 0),# East Asian ideograph
- 0x215731: (0x87cb, 0),# East Asian ideograph
- 0x215732: (0x87ef, 0),# East Asian ideograph
- 0x215733: (0x87f2, 0),# East Asian ideograph
- 0x215734: (0x87ec, 0),# East Asian ideograph
- 0x225735: (0x730b, 0),# East Asian ideograph
- 0x225736: (0x7317, 0),# East Asian ideograph
- 0x215737: (0x880d, 0),# East Asian ideograph
- 0x215738: (0x87f9, 0),# East Asian ideograph
- 0x215739: (0x8814, 0),# East Asian ideograph
- 0x21573a: (0x8815, 0),# East Asian ideograph
- 0x22573b: (0x7307, 0),# East Asian ideograph
- 0x23573c: (0x9bad, 0),# East Asian ideograph
- 0x23573d: (0x9b9a, 0),# East Asian ideograph
- 0x22573e: (0x7318, 0),# East Asian ideograph
- 0x27573f: (0x86ca, 0),# East Asian ideograph
- 0x215740: (0x8839, 0),# East Asian ideograph
- 0x275741: (0x8695, 0),# East Asian ideograph
- 0x215742: (0x883b, 0),# East Asian ideograph
- 0x235743: (0x9b99, 0),# East Asian ideograph
- 0x215744: (0x884c, 0),# East Asian ideograph
- 0x215745: (0x884d, 0),# East Asian ideograph
- 0x225746: (0x7331, 0),# East Asian ideograph
- 0x215747: (0x8857, 0),# East Asian ideograph
- 0x215748: (0x8859, 0),# East Asian ideograph
- 0x225749: (0x7338, 0),# East Asian ideograph
- 0x22574a: (0x7322, 0),# East Asian ideograph
- 0x21574b: (0x8861, 0),# East Asian ideograph
- 0x22574c: (0x7332, 0),# East Asian ideograph
- 0x22574d: (0x732c, 0),# East Asian ideograph
- 0x22574e: (0x7327, 0),# East Asian ideograph
- 0x22574f: (0x732b, 0),# East Asian ideograph
- 0x215750: (0x886b, 0),# East Asian ideograph
- 0x215751: (0x8882, 0),# East Asian ideograph
- 0x225752: (0x732f, 0),# East Asian ideograph
- 0x215753: (0x8870, 0),# East Asian ideograph
- 0x215754: (0x8877, 0),# East Asian ideograph
- 0x225755: (0x7328, 0),# East Asian ideograph
- 0x235756: (0x9bc7, 0),# East Asian ideograph
- 0x215757: (0x8892, 0),# East Asian ideograph
- 0x215758: (0x8896, 0),# East Asian ideograph
- 0x235759: (0x9bd2, 0),# East Asian ideograph
- 0x22575a: (0x7347, 0),# East Asian ideograph
- 0x22575b: (0x7348, 0),# East Asian ideograph
- 0x22575c: (0x7349, 0),# East Asian ideograph
- 0x23393a: (0x8de6, 0),# East Asian ideograph
- 0x21575e: (0x88b1, 0),# East Asian ideograph
- 0x23575f: (0x9bc1, 0),# East Asian ideograph
- 0x215760: (0x88d9, 0),# East Asian ideograph
- 0x215761: (0x88d8, 0),# East Asian ideograph
- 0x215762: (0x88dc, 0),# East Asian ideograph
- 0x215763: (0x88cf, 0),# East Asian ideograph
- 0x215764: (0x88d4, 0),# East Asian ideograph
- 0x225765: (0x7340, 0),# East Asian ideograph
- 0x215766: (0x88d5, 0),# East Asian ideograph
- 0x215767: (0x8902, 0),# East Asian ideograph
- 0x225768: (0x734d, 0),# East Asian ideograph
- 0x215769: (0x88f8, 0),# East Asian ideograph
- 0x21576a: (0x88f9, 0),# East Asian ideograph
- 0x21576b: (0x88f4, 0),# East Asian ideograph
- 0x23576c: (0x9bd3, 0),# East Asian ideograph
- 0x21576d: (0x88e8, 0),# East Asian ideograph
- 0x21576e: (0x891a, 0),# East Asian ideograph
- 0x21576f: (0x8910, 0),# East Asian ideograph
- 0x6f5770: (0xc906, 0),# Korean hangul
- 0x215771: (0x8913, 0),# East Asian ideograph
- 0x235772: (0x9bc8, 0),# East Asian ideograph
- 0x215773: (0x8932, 0),# East Asian ideograph
- 0x225774: (0x735d, 0),# East Asian ideograph
- 0x215775: (0x8925, 0),# East Asian ideograph
- 0x215776: (0x892b, 0),# East Asian ideograph
- 0x235777: (0x9bd7, 0),# East Asian ideograph
- 0x215778: (0x8936, 0),# East Asian ideograph
- 0x225779: (0x7360, 0),# East Asian ideograph
- 0x23577a: (0x9bd6, 0),# East Asian ideograph
- 0x21577b: (0x895f, 0),# East Asian ideograph
- 0x23577c: (0x9beb, 0),# East Asian ideograph
- 0x21577d: (0x8956, 0),# East Asian ideograph
- 0x22577e: (0x7362, 0),# East Asian ideograph
- 0x273940: (0x593a, 0),# East Asian ideograph
- 0x223941: (0x66aa, 0),# East Asian ideograph
- 0x232b33: (0x874d, 0),# East Asian ideograph
- 0x2d506f: (0x7cfa, 0),# East Asian ideograph
- 0x6f586d: (0xcb5d, 0),# Korean hangul
- 0x6f7643: (0xe8b5, 0),# Korean hangul
- 0x213943: (0x5974, 0),# East Asian ideograph
- 0x213944: (0x5976, 0),# East Asian ideograph
- 0x4b3322: (0x5168, 0),# East Asian ideograph (variant of 213322 which maps to 5168)
- 0x27564c: (0x4e8f, 0),# East Asian ideograph
- 0x4b5e3d: (0x9421, 0),# East Asian ideograph
- 0x213946: (0x5983, 0),# East Asian ideograph
- 0x6f5365: (0xc231, 0),# Korean hangul
- 0x6f516d: (0xbe44, 0),# Korean hangul
- 0x213947: (0x5978, 0),# East Asian ideograph
- 0x2d4c2d: (0x756e, 0),# East Asian ideograph
- 0x6f542b: (0xc2ef, 0),# Korean hangul
- 0x213e53: (0x6089, 0),# East Asian ideograph
- 0x213949: (0x5979, 0),# East Asian ideograph
- 0x21794b: (0x595c, 0),# East Asian ideograph
- 0x454e75: (0x7984, 0),# East Asian ideograph
- 0x213c7d: (0x5ec2, 0),# East Asian ideograph
- 0x6f586f: (0xcbb8, 0),# Korean hangul
- 0x2d394d: (0x59ac, 0),# East Asian ideograph
- 0x217e43: (0x5b93, 0),# East Asian ideograph
- 0x22394e: (0x66c8, 0),# East Asian ideograph
- 0x4b3324: (0x634c, 0),# East Asian ideograph (variant of 2D3324 which maps to 634C)
- 0x21394f: (0x59a4, 0),# East Asian ideograph
- 0x213f58: (0x61fa, 0),# East Asian ideograph
- 0x213950: (0x59a3, 0),# East Asian ideograph
- 0x6f4e3d: (0xb5c4, 0),# Korean hangul
- 0x213951: (0x5993, 0),# East Asian ideograph
- 0x2f5870: (0x9c1b, 0),# East Asian ideograph
- 0x6f7646: (0xe8b8, 0),# Korean hangul
- 0x213952: (0x599e, 0),# East Asian ideograph
- 0x213e55: (0x60a0, 0),# East Asian ideograph
- 0x4b3768: (0x56d8, 0),# East Asian ideograph
- 0x213953: (0x599d, 0),# East Asian ideograph
- 0x233954: (0x8e23, 0),# East Asian ideograph
- 0x213955: (0x59a5, 0),# East Asian ideograph
- 0x335760: (0x88e0, 0),# East Asian ideograph
- 0x2d3956: (0x59d9, 0),# East Asian ideograph
- 0x6f4f22: (0xb7ff, 0),# Korean hangul
- 0x6f5871: (0xcbe4, 0),# Korean hangul
- 0x6f7647: (0xe8b9, 0),# Korean hangul
- 0x213957: (0x5996, 0),# East Asian ideograph
- 0x213958: (0x59be, 0),# East Asian ideograph
- 0x333642: (0x8a92, 0),# East Asian ideograph
- 0x2d3f67: (0x621e, 0),# East Asian ideograph
- 0x6f5878: (0xcc1d, 0),# Korean hangul
- 0x4c7959: (0x817d, 0),# East Asian ideograph
- 0x273437: (0x80dc, 0),# East Asian ideograph
- 0x214f63: (0x7ac7, 0),# East Asian ideograph
- 0x4b524e: (0x7fce, 0),# East Asian ideograph
- 0x21395a: (0x59ae, 0),# East Asian ideograph
- 0x3a4034: (0x6855, 0),# East Asian ideograph
- 0x275821: (0x889c, 0),# East Asian ideograph
- 0x275822: (0x886c, 0),# East Asian ideograph
- 0x215823: (0x8972, 0),# East Asian ideograph
- 0x215824: (0x897f, 0),# East Asian ideograph
- 0x225825: (0x7367, 0),# East Asian ideograph
- 0x215826: (0x8983, 0),# East Asian ideograph
- 0x235827: (0x9be4, 0),# East Asian ideograph
- 0x275828: (0x89c1, 0),# East Asian ideograph
- 0x275829: (0x89c4, 0),# East Asian ideograph
- 0x27582a: (0x89c5, 0),# East Asian ideograph
- 0x27582b: (0x89c6, 0),# East Asian ideograph
- 0x27582c: (0x4eb2, 0),# East Asian ideograph
- 0x27582d: (0x89ce, 0),# East Asian ideograph
- 0x21582e: (0x89ac, 0),# East Asian ideograph
- 0x27582f: (0x89d0, 0),# East Asian ideograph
- 0x215830: (0x89ba, 0),# East Asian ideograph
- 0x275831: (0x89c8, 0),# East Asian ideograph
- 0x215832: (0x89c0, 0),# East Asian ideograph
- 0x215833: (0x89d2, 0),# East Asian ideograph
- 0x235834: (0x9bd4, 0),# East Asian ideograph
- 0x215835: (0x89f4, 0),# East Asian ideograph
- 0x225836: (0x737c, 0),# East Asian ideograph
- 0x215837: (0x8a00, 0),# East Asian ideograph
- 0x215838: (0x8a08, 0),# East Asian ideograph
- 0x215839: (0x8a02, 0),# East Asian ideograph
- 0x21583a: (0x8a03, 0),# East Asian ideograph
- 0x21583b: (0x8a10, 0),# East Asian ideograph
- 0x21583c: (0x8a18, 0),# East Asian ideograph
- 0x21583d: (0x8a0e, 0),# East Asian ideograph
- 0x23583e: (0x9bff, 0),# East Asian ideograph
- 0x21583f: (0x8a15, 0),# East Asian ideograph
- 0x215840: (0x8a0a, 0),# East Asian ideograph
- 0x275841: (0x8bab, 0),# East Asian ideograph
- 0x225842: (0x738e, 0),# East Asian ideograph
- 0x235843: (0x9c06, 0),# East Asian ideograph
- 0x235844: (0x9c15, 0),# East Asian ideograph
- 0x215845: (0x8a23, 0),# East Asian ideograph
- 0x275846: (0x8bb6, 0),# East Asian ideograph
- 0x225847: (0x7392, 0),# East Asian ideograph
- 0x215848: (0x8a31, 0),# East Asian ideograph
- 0x275849: (0x8bbe, 0),# East Asian ideograph
- 0x27584a: (0x8bb9, 0),# East Asian ideograph
- 0x27584b: (0x8bbc, 0),# East Asian ideograph
- 0x27584c: (0x6ce8, 0),# East Asian ideograph
- 0x21584d: (0x8a60, 0),# East Asian ideograph
- 0x27584e: (0x8bc4, 0),# East Asian ideograph
- 0x27584f: (0x8bcd, 0),# East Asian ideograph
- 0x6f5850: (0xca50, 0),# Korean hangul
- 0x215851: (0x8a41, 0),# East Asian ideograph
- 0x235852: (0x9c02, 0),# East Asian ideograph
- 0x215853: (0x8a5b, 0),# East Asian ideograph
- 0x235854: (0x9c10, 0),# East Asian ideograph
- 0x215855: (0x8a46, 0),# East Asian ideograph
- 0x215856: (0x8a34, 0),# East Asian ideograph
- 0x275857: (0x8bca, 0),# East Asian ideograph
- 0x275858: (0x8be7, 0),# East Asian ideograph
- 0x215859: (0x8a72, 0),# East Asian ideograph
- 0x21585a: (0x8a73, 0),# East Asian ideograph
- 0x21585b: (0x8a66, 0),# East Asian ideograph
- 0x27585c: (0x8bd7, 0),# East Asian ideograph
- 0x27585d: (0x8bd8, 0),# East Asian ideograph
- 0x27585e: (0x8be3, 0),# East Asian ideograph
- 0x27585f: (0x8bd9, 0),# East Asian ideograph
- 0x275860: (0x8bda, 0),# East Asian ideograph
- 0x215861: (0x8a87, 0),# East Asian ideograph
- 0x275862: (0x8bdb, 0),# East Asian ideograph
- 0x215863: (0x8a6d, 0),# East Asian ideograph
- 0x215864: (0x8a79, 0),# East Asian ideograph
- 0x275865: (0x8be2, 0),# East Asian ideograph
- 0x275866: (0x8bdd, 0),# East Asian ideograph
- 0x275867: (0x8be0, 0),# East Asian ideograph
- 0x215868: (0x8a6c, 0),# East Asian ideograph
- 0x235869: (0x9c2f, 0),# East Asian ideograph
- 0x22586a: (0x73c2, 0),# East Asian ideograph
- 0x22586b: (0x73d0, 0),# East Asian ideograph
- 0x21586c: (0x8a9e, 0),# East Asian ideograph
- 0x21586d: (0x8a8c, 0),# East Asian ideograph
- 0x21586e: (0x8a93, 0),# East Asian ideograph
- 0x22586f: (0x73bf, 0),# East Asian ideograph
- 0x275870: (0x8ba4, 0),# East Asian ideograph
- 0x275871: (0x8bef, 0),# East Asian ideograph
- 0x275872: (0x8bf2, 0),# East Asian ideograph
- 0x275873: (0x8bf0, 0),# East Asian ideograph
- 0x275874: (0x8bf1, 0),# East Asian ideograph
- 0x275875: (0x8bf3, 0),# East Asian ideograph
- 0x215876: (0x8abc, 0),# East Asian ideograph
- 0x275877: (0x8c06, 0),# East Asian ideograph
- 0x275878: (0x8c05, 0),# East Asian ideograph
- 0x215879: (0x8ac7, 0),# East Asian ideograph
- 0x21587a: (0x8acb, 0),# East Asian ideograph (variant of 4B587A which maps to 8ACB)
- 0x27587b: (0x8bf8, 0),# East Asian ideograph
- 0x27587c: (0x8bfe, 0),# East Asian ideograph
- 0x27587d: (0x8c03, 0),# East Asian ideograph
- 0x23587e: (0x9c46, 0),# East Asian ideograph
- 0x6f5875: (0xcc10, 0),# Korean hangul
- 0x6f764b: (0xe8bd, 0),# Korean hangul
- 0x21796b: (0x5998, 0),# East Asian ideograph
- 0x6f4b62: (0xb0bb, 0),# Korean hangul
- 0x293c5a: (0x8f73, 0),# East Asian ideograph
- 0x2d396e: (0x4f84, 0),# East Asian ideograph
- 0x28732d: (0x7f2f, 0),# East Asian ideograph
- 0x4b6145: (0x9a12, 0),# East Asian ideograph
- 0x21396f: (0x5a01, 0),# East Asian ideograph
- 0x2d4c35: (0x7567, 0),# East Asian ideograph
- 0x2d3970: (0x5a63, 0),# East Asian ideograph
- 0x213971: (0x59e6, 0),# East Asian ideograph
- 0x6f5879: (0xcc21, 0),# Korean hangul
- 0x213972: (0x59da, 0),# East Asian ideograph
- 0x213973: (0x5a11, 0),# East Asian ideograph
- 0x2d3974: (0x5b43, 0),# East Asian ideograph
- 0x6f5877: (0xcc1c, 0),# Korean hangul
- 0x6f764d: (0xe8bf, 0),# Korean hangul
- 0x6f5434: (0xc308, 0),# Korean hangul
- 0x213e5c: (0x60b6, 0),# East Asian ideograph
- 0x4b376f: (0x56fd, 0),# East Asian ideograph
- 0x213976: (0x5a1c, 0),# East Asian ideograph
- 0x692421: (0x3041, 0),# Hiragana letter small A
- 0x213977: (0x5a13, 0),# East Asian ideograph
- 0x2d5773: (0x7d5d, 0),# East Asian ideograph
- 0x213978: (0x59ec, 0),# East Asian ideograph
- 0x33354e: (0x608b, 0),# East Asian ideograph
- 0x213979: (0x5a20, 0),# East Asian ideograph
- 0x216424: (0x4e0f, 0),# East Asian ideograph
- 0x6f764e: (0xe8c0, 0),# Korean hangul
- 0x6f535c: (0xc219, 0),# Korean hangul
- 0x213e5d: (0x60d1, 0),# East Asian ideograph
- 0x2d397b: (0x5a31, 0),# East Asian ideograph
- 0x2d3f6e: (0x6226, 0),# East Asian ideograph
- 0x21397c: (0x5a0c, 0),# East Asian ideograph
- 0x692427: (0x3047, 0),# Hiragana letter small E
- 0x6f5841: (0xc9ef, 0),# Korean hangul
- 0x22797d: (0x81a6, 0),# East Asian ideograph
- 0x692428: (0x3048, 0),# Hiragana letter E
- 0x21397e: (0x5a25, 0),# East Asian ideograph
- 0x222429: (0x5cdd, 0),# East Asian ideograph
- 0x6f764f: (0xe8c1, 0),# Korean hangul
- 0x6f5436: (0xc30b, 0),# Korean hangul
- 0x213e5e: (0x60b5, 0),# East Asian ideograph
- 0x33304c: (0x4e79, 0),# East Asian ideograph
- 0x215c34: (0x904b, 0),# East Asian ideograph
- 0x2d3f6f: (0x622f, 0),# East Asian ideograph
- 0x27742e: (0x56f5, 0),# East Asian ideograph
- 0x216d41: (0x534c, 0),# East Asian ideograph
- 0x6f587a: (0xcc22, 0),# Korean hangul
- 0x6f7650: (0xe8c2, 0),# Korean hangul
- 0x6f5437: (0xc30c, 0),# Korean hangul
- 0x69242f: (0x304f, 0),# Hiragana letter KU
- 0x4b3772: (0x5186, 0),# East Asian ideograph
- 0x4b5631: (0x82b8, 0),# East Asian ideograph
- 0x225921: (0x73d3, 0),# East Asian ideograph
- 0x215922: (0x8ab0, 0),# East Asian ideograph
- 0x215923: (0x8a95, 0),# East Asian ideograph
- 0x215924: (0x8ad6, 0),# East Asian ideograph
- 0x275925: (0x8c1b, 0),# East Asian ideograph
- 0x235926: (0x9c44, 0),# East Asian ideograph
- 0x215927: (0x8aeb, 0),# East Asian ideograph
- 0x225928: (0x73e5, 0),# East Asian ideograph
- 0x235929: (0x9c39, 0),# East Asian ideograph
- 0x22592a: (0x73d9, 0),# East Asian ideograph
- 0x22592b: (0x73ef, 0),# East Asian ideograph
- 0x21592c: (0x8b01, 0),# East Asian ideograph (variant of 2D592C which maps to 8B01)
- 0x21592d: (0x8b02, 0),# East Asian ideograph
- 0x21592e: (0x8afe, 0),# East Asian ideograph
- 0x27592f: (0x8bbd, 0),# East Asian ideograph
- 0x235930: (0x9c47, 0),# East Asian ideograph
- 0x215931: (0x8b17, 0),# East Asian ideograph
- 0x225932: (0x73d6, 0),# East Asian ideograph
- 0x215933: (0x8b0e, 0),# East Asian ideograph
- 0x235934: (0x9c37, 0),# East Asian ideograph
- 0x225935: (0x73bc, 0),# East Asian ideograph
- 0x215936: (0x8b21, 0),# East Asian ideograph
- 0x215937: (0x8b04, 0),# East Asian ideograph
- 0x235938: (0x9c52, 0),# East Asian ideograph
- 0x275939: (0x8c28, 0),# East Asian ideograph
- 0x22593a: (0x73de, 0),# East Asian ideograph
- 0x23593b: (0x9c58, 0),# East Asian ideograph
- 0x22593c: (0x73e6, 0),# East Asian ideograph
- 0x21593d: (0x8b5c, 0),# East Asian ideograph
- 0x21593e: (0x8b4e, 0),# East Asian ideograph
- 0x21593f: (0x8b49, 0),# East Asian ideograph
- 0x275940: (0x8c2d, 0),# East Asian ideograph
- 0x215941: (0x8b41, 0),# East Asian ideograph
- 0x275942: (0x8ba5, 0),# East Asian ideograph
- 0x215943: (0x8b70, 0),# East Asian ideograph
- 0x215944: (0x8b6c, 0),# East Asian ideograph
- 0x225945: (0x73f6, 0),# East Asian ideograph
- 0x215946: (0x8b6f, 0),# East Asian ideograph
- 0x225947: (0x73fa, 0),# East Asian ideograph
- 0x275948: (0x62a4, 0),# East Asian ideograph
- 0x215949: (0x8b7d, 0),# East Asian ideograph
- 0x27594a: (0x8bfb, 0),# East Asian ideograph
- 0x21594b: (0x8b8a, 0),# East Asian ideograph
- 0x27594c: (0x8ba9, 0),# East Asian ideograph
- 0x21594d: (0x8b96, 0),# East Asian ideograph
- 0x21594e: (0x8b92, 0),# East Asian ideograph
- 0x23594f: (0x9c67, 0),# East Asian ideograph
- 0x6f5950: (0xcd2c, 0),# Korean hangul
- 0x215951: (0x8c41, 0),# East Asian ideograph
- 0x215952: (0x8c3f, 0),# East Asian ideograph
- 0x215953: (0x8c46, 0),# East Asian ideograph
- 0x225954: (0x73f5, 0),# East Asian ideograph
- 0x235955: (0x9c5f, 0),# East Asian ideograph
- 0x235956: (0x9c60, 0),# East Asian ideograph
- 0x215957: (0x8c4e, 0),# East Asian ideograph
- 0x235958: (0x9c6d, 0),# East Asian ideograph
- 0x215959: (0x8c54, 0),# East Asian ideograph
- 0x21595a: (0x8c5a, 0),# East Asian ideograph
- 0x23595b: (0x9c68, 0),# East Asian ideograph
- 0x22595c: (0x7407, 0),# East Asian ideograph
- 0x21595d: (0x8c6a, 0),# East Asian ideograph
- 0x22595e: (0x7412, 0),# East Asian ideograph
- 0x21595f: (0x8c6c, 0),# East Asian ideograph
- 0x215960: (0x8c7a, 0),# East Asian ideograph
- 0x215961: (0x8c79, 0),# East Asian ideograph
- 0x215962: (0x8c82, 0),# East Asian ideograph
- 0x225963: (0x743c, 0),# East Asian ideograph
- 0x215964: (0x8c89, 0),# East Asian ideograph
- 0x215965: (0x8c8d, 0),# East Asian ideograph
- 0x225966: (0x742e, 0),# East Asian ideograph
- 0x225967: (0x742f, 0),# East Asian ideograph
- 0x275968: (0x8d1d, 0),# East Asian ideograph
- 0x225969: (0x7414, 0),# East Asian ideograph
- 0x22596a: (0x742c, 0),# East Asian ideograph
- 0x27596b: (0x8d21, 0),# East Asian ideograph
- 0x27596c: (0x8d22, 0),# East Asian ideograph
- 0x27596d: (0x8d23, 0),# East Asian ideograph
- 0x22596e: (0x742b, 0),# East Asian ideograph
- 0x21596f: (0x8ca8, 0),# East Asian ideograph
- 0x225970: (0x73f7, 0),# East Asian ideograph
- 0x225971: (0x741a, 0),# East Asian ideograph
- 0x275972: (0x8d29, 0),# East Asian ideograph
- 0x235973: (0x9ce7, 0),# East Asian ideograph
- 0x235974: (0x9cf0, 0),# East Asian ideograph
- 0x215975: (0x8cbb, 0),# East Asian ideograph
- 0x215976: (0x8cc1, 0),# East Asian ideograph
- 0x235977: (0x9cf2, 0),# East Asian ideograph
- 0x225978: (0x7416, 0),# East Asian ideograph
- 0x215979: (0x8cbc, 0),# East Asian ideograph
- 0x22597a: (0x7426, 0),# East Asian ideograph
- 0x21597b: (0x8cb6, 0),# East Asian ideograph
- 0x21597c: (0x8cbd, 0),# East Asian ideograph
- 0x27597d: (0x8d37, 0),# East Asian ideograph
- 0x21597e: (0x8cbf, 0),# East Asian ideograph
- 0x222441: (0x5cf4, 0),# East Asian ideograph
- 0x224f2b: (0x701e, 0),# East Asian ideograph (variant of 4C4F2B which maps to 701E)
- 0x6f587e: (0xcc28, 0),# Korean hangul
- 0x275e58: (0x9602, 0),# East Asian ideograph
- 0x6f543b: (0xc315, 0),# Korean hangul
- 0x217e52: (0x5ba7, 0),# East Asian ideograph
- 0x333573: (0x8656, 0),# East Asian ideograph
- 0x222446: (0x5cf1, 0),# East Asian ideograph
- 0x69243f: (0x305f, 0),# Hiragana letter TA
- 0x27613e: (0x9a8f, 0),# East Asian ideograph
- 0x6f5b69: (0xd305, 0),# Korean hangul
- 0x2d5c2f: (0x8fe8, 0),# East Asian ideograph
- 0x2d4c3e: (0x758e, 0),# East Asian ideograph
- 0x6f4e74: (0xb7ab, 0),# Korean hangul
- 0x6f7655: (0xe8c7, 0),# Korean hangul
- 0x225f7b: (0x7657, 0),# East Asian ideograph
- 0x213e64: (0x60d5, 0),# East Asian ideograph
- 0x234662: (0x93f9, 0),# East Asian ideograph
- 0x696449: (0x7c13, 0),# East Asian ideograph
- 0x276137: (0x9a76, 0),# East Asian ideograph
- 0x69244a: (0x306a, 0),# Hiragana letter NA
- 0x4b6147: (0x99c6, 0),# East Asian ideograph
- 0x333556: (0x9a03, 0),# East Asian ideograph
- 0x69644c: (0x7c17, 0),# East Asian ideograph
- 0x6f4d66: (0xb4e4, 0),# Korean hangul
- 0x213e65: (0x60bc, 0),# East Asian ideograph
- 0x69644e: (0x7bf6, 0),# East Asian ideograph
- 0x6f587b: (0xcc27, 0),# Korean hangul
- 0x2d3b33: (0x8a67, 0),# East Asian ideograph
- 0x214b2f: (0x7380, 0),# East Asian ideograph
- 0x6f5438: (0xc30d, 0),# Korean hangul
- 0x6f543e: (0xc324, 0),# Korean hangul
- 0x6f7651: (0xe8c3, 0),# Korean hangul
- 0x216452: (0x4eb3, 0),# East Asian ideograph
- 0x294e43: (0x97af, 0),# East Asian ideograph
- 0x234664: (0x93c4, 0),# East Asian ideograph
- 0x342453: (0x5cbd, 0),# East Asian ideograph
- 0x692454: (0x3074, 0),# Hiragana letter PI
- 0x225372: (0x71ba, 0),# East Asian ideograph
- 0x4b6455: (0x4eb6, 0),# East Asian ideograph
- 0x6f4f7a: (0xb9f7, 0),# Korean hangul
- 0x6f543f: (0xc327, 0),# Korean hangul
- 0x4b503b: (0x7c12, 0),# East Asian ideograph
- 0x692457: (0x3077, 0),# Hiragana letter PU
- 0x223e23: (0x691a, 0),# East Asian ideograph
- 0x234222: (0x91e4, 0),# East Asian ideograph
- 0x692459: (0x3079, 0),# Hiragana letter BE
- 0x21645a: (0x4ebc, 0),# East Asian ideograph
- 0x4b4444: (0x8988, 0),# East Asian ideograph (Version J extension)
- 0x215a21: (0x8cc5, 0),# East Asian ideograph
- 0x275a22: (0x8d44, 0),# East Asian ideograph
- 0x275a23: (0x8d3c, 0),# East Asian ideograph
- 0x215a24: (0x8cc8, 0),# East Asian ideograph
- 0x275a25: (0x8d3f, 0),# East Asian ideograph
- 0x215a26: (0x8cb2, 0),# East Asian ideograph
- 0x275a27: (0x8d41, 0),# East Asian ideograph
- 0x225a28: (0x7420, 0),# East Asian ideograph
- 0x275a29: (0x5bbe, 0),# East Asian ideograph
- 0x275a2a: (0x8d48, 0),# East Asian ideograph
- 0x275a2b: (0x8d4a, 0),# East Asian ideograph
- 0x215a2c: (0x8ce0, 0),# East Asian ideograph
- 0x275a2d: (0x8d4b, 0),# East Asian ideograph
- 0x6f5a2e: (0xcf00, 0),# Korean hangul
- 0x275a2f: (0x5356, 0),# East Asian ideograph
- 0x235a30: (0x9d25, 0),# East Asian ideograph
- 0x215a31: (0x8ce4, 0),# East Asian ideograph
- 0x215a32: (0x8cde, 0),# East Asian ideograph
- 0x275a33: (0x8d50, 0),# East Asian ideograph
- 0x215a34: (0x8cea, 0),# East Asian ideograph
- 0x275a35: (0x8d4c, 0),# East Asian ideograph
- 0x215a36: (0x8cf4, 0),# East Asian ideograph
- 0x215a37: (0x8cfd, 0),# East Asian ideograph
- 0x275a38: (0x8d5a, 0),# East Asian ideograph
- 0x275a39: (0x8d58, 0),# East Asian ideograph
- 0x275a3a: (0x8d2d, 0),# East Asian ideograph
- 0x275a3b: (0x8d60, 0),# East Asian ideograph
- 0x275a3c: (0x8d5d, 0),# East Asian ideograph
- 0x275a3d: (0x8d5e, 0),# East Asian ideograph
- 0x275a3e: (0x8d62, 0),# East Asian ideograph
- 0x275a3f: (0x8d61, 0),# East Asian ideograph
- 0x275a40: (0x8d43, 0),# East Asian ideograph
- 0x275a41: (0x8d4e, 0),# East Asian ideograph
- 0x275a42: (0x8d63, 0),# East Asian ideograph
- 0x215a43: (0x8d64, 0),# East Asian ideograph
- 0x215a44: (0x8d67, 0),# East Asian ideograph
- 0x215a45: (0x8d66, 0),# East Asian ideograph
- 0x215a46: (0x8d6b, 0),# East Asian ideograph
- 0x215a47: (0x8d6d, 0),# East Asian ideograph
- 0x235a48: (0x9d1f, 0),# East Asian ideograph
- 0x215a49: (0x8d74, 0),# East Asian ideograph
- 0x215a4a: (0x8d73, 0),# East Asian ideograph
- 0x215a4b: (0x8d77, 0),# East Asian ideograph
- 0x215a4c: (0x8d85, 0),# East Asian ideograph
- 0x215a4d: (0x8d8a, 0),# East Asian ideograph
- 0x215a4e: (0x8d81, 0),# East Asian ideograph
- 0x275a4f: (0x8d75, 0),# East Asian ideograph
- 0x215a50: (0x8d95, 0),# East Asian ideograph
- 0x215a51: (0x8da3, 0),# East Asian ideograph
- 0x215a52: (0x8d9f, 0),# East Asian ideograph
- 0x275a53: (0x8d8b, 0),# East Asian ideograph
- 0x215a54: (0x8db3, 0),# East Asian ideograph
- 0x215a55: (0x8db4, 0),# East Asian ideograph
- 0x215a56: (0x8dbe, 0),# East Asian ideograph
- 0x215a57: (0x8dce, 0),# East Asian ideograph
- 0x215a58: (0x8ddd, 0),# East Asian ideograph
- 0x214b33: (0x738b, 0),# East Asian ideograph
- 0x215a5a: (0x8dcb, 0),# East Asian ideograph
- 0x215a5b: (0x8dda, 0),# East Asian ideograph
- 0x215a5c: (0x8dc6, 0),# East Asian ideograph
- 0x215a5d: (0x8dd1, 0),# East Asian ideograph
- 0x215a5e: (0x8dcc, 0),# East Asian ideograph
- 0x215a5f: (0x8de1, 0),# East Asian ideograph
- 0x215a60: (0x8ddf, 0),# East Asian ideograph
- 0x215a61: (0x8de8, 0),# East Asian ideograph
- 0x225a62: (0x7473, 0),# East Asian ideograph
- 0x235a63: (0x9d3e, 0),# East Asian ideograph
- 0x215a64: (0x8dea, 0),# East Asian ideograph
- 0x215a65: (0x8def, 0),# East Asian ideograph
- 0x215a66: (0x8dfc, 0),# East Asian ideograph
- 0x215a67: (0x8e2b, 0),# East Asian ideograph
- 0x235a68: (0x9d42, 0),# East Asian ideograph
- 0x235a69: (0x9d40, 0),# East Asian ideograph
- 0x215a6a: (0x8e1d, 0),# East Asian ideograph
- 0x215a6b: (0x8e0f, 0),# East Asian ideograph
- 0x215a6c: (0x8e29, 0),# East Asian ideograph
- 0x215a6d: (0x8e1f, 0),# East Asian ideograph
- 0x215a6e: (0x8e44, 0),# East Asian ideograph
- 0x215a6f: (0x8e31, 0),# East Asian ideograph
- 0x215a70: (0x8e42, 0),# East Asian ideograph
- 0x215a71: (0x8e34, 0),# East Asian ideograph
- 0x215a72: (0x8e39, 0),# East Asian ideograph
- 0x215a73: (0x8e35, 0),# East Asian ideograph
- 0x215a74: (0x8e49, 0),# East Asian ideograph
- 0x235a75: (0x9d53, 0),# East Asian ideograph
- 0x215a76: (0x8e48, 0),# East Asian ideograph
- 0x215a77: (0x8e4a, 0),# East Asian ideograph
- 0x215a78: (0x8e63, 0),# East Asian ideograph
- 0x215a79: (0x8e59, 0),# East Asian ideograph
- 0x215a7a: (0x8e66, 0),# East Asian ideograph
- 0x215a7b: (0x8e64, 0),# East Asian ideograph
- 0x215a7c: (0x8e72, 0),# East Asian ideograph
- 0x215a7d: (0x8e6c, 0),# East Asian ideograph
- 0x275a7e: (0x8df7, 0),# East Asian ideograph
- 0x6f5439: (0xc313, 0),# Korean hangul
- 0x6f5443: (0xc343, 0),# Korean hangul
- 0x22646b: (0x789a, 0),# East Asian ideograph
- 0x213a28: (0x5a41, 0),# East Asian ideograph
- 0x6f2458: (0x3134, 0),# Korean hangul
- 0x234226: (0x922b, 0),# East Asian ideograph
- 0x27515c: (0x7ebf, 0),# East Asian ideograph
- 0x475222: (0x9957, 0),# East Asian ideograph
- 0x6f246e: (0x3143, 0),# Korean hangul
- 0x335333: (0x80bb, 0),# East Asian ideograph
- 0x6f2470: (0x3146, 0),# Korean hangul
- 0x692471: (0x3091, 0),# Hiragana letter WE
- 0x232472: (0x852f, 0),# East Asian ideograph
- 0x6f2473: (0x3150, 0),# Korean hangul
- 0x224f35: (0x7021, 0),# East Asian ideograph
- 0x6f2474: (0x3151, 0),# Korean hangul
- 0x6f5445: (0xc368, 0),# Korean hangul
- 0x6f5a31: (0xcf08, 0),# Korean hangul
- 0x6f2476: (0x3153, 0),# Korean hangul
- 0x295729: (0x9c87, 0),# East Asian ideograph
- 0x6f4f21: (0xb7fd, 0),# Korean hangul
- 0x232477: (0x84f7, 0),# East Asian ideograph
- 0x274f22: (0x4e07, 0),# East Asian ideograph
- 0x234f23: (0x980e, 0),# East Asian ideograph
- 0x6f4e42: (0xb611, 0),# Korean hangul
- 0x224f24: (0x7020, 0),# East Asian ideograph
- 0x6f5446: (0xc369, 0),# Korean hangul
- 0x6f247a: (0x3157, 0),# Korean hangul
- 0x274f25: (0x53b6, 0),# East Asian ideograph
- 0x4b333e: (0xf92e, 0),# East Asian ideograph
- 0x224f26: (0x7027, 0),# East Asian ideograph
- 0x214f27: (0x79c9, 0),# East Asian ideograph
- 0x22537a: (0x71bf, 0),# East Asian ideograph
- 0x22537c: (0x71b8, 0),# East Asian ideograph
- 0x29247d: (0x835c, 0),# East Asian ideograph
- 0x6f4f28: (0xb80c, 0),# Korean hangul
- 0x212a38: (0xe8e5, 0),# EACC component character
- 0x276775: (0x4f65, 0),# East Asian ideograph
- 0x6f5447: (0xc36c, 0),# Korean hangul
- 0x6f4f2a: (0xb818, 0),# Korean hangul
- 0x23422a: (0x9292, 0),# East Asian ideograph
- 0x2d5d56: (0x920e, 0),# East Asian ideograph
- 0x234f2c: (0x9826, 0),# East Asian ideograph
- 0x6f5d5a: (0xd6fc, 0),# Korean hangul
- 0x234f2d: (0x981e, 0),# East Asian ideograph
- 0x6f7652: (0xe8c4, 0),# Korean hangul
- 0x6f4f2e: (0xb824, 0),# Korean hangul
- 0x2d3c26: (0x5d18, 0),# East Asian ideograph
- 0x6f5448: (0xc370, 0),# Korean hangul
- 0x213e70: (0x60fb, 0),# East Asian ideograph
- 0x224f2f: (0x702e, 0),# East Asian ideograph
- 0x225b21: (0x7489, 0),# East Asian ideograph
- 0x225b22: (0x747c, 0),# East Asian ideograph
- 0x215b23: (0x8e82, 0),# East Asian ideograph
- 0x215b24: (0x8e81, 0),# East Asian ideograph
- 0x215b25: (0x8e87, 0),# East Asian ideograph
- 0x215b26: (0x8e89, 0),# East Asian ideograph
- 0x214f31: (0x79fb, 0),# East Asian ideograph
- 0x225b28: (0x747e, 0),# East Asian ideograph
- 0x275b29: (0x8dc3, 0),# East Asian ideograph
- 0x235b2a: (0x9d52, 0),# East Asian ideograph
- 0x215b2b: (0x8ea1, 0),# East Asian ideograph
- 0x235b2c: (0x9d77, 0),# East Asian ideograph
- 0x224f39: (0x7018, 0),# East Asian ideograph
- 0x215b2e: (0x8eac, 0),# East Asian ideograph
- 0x215b2f: (0x8eb2, 0),# East Asian ideograph
- 0x225b30: (0x747a, 0),# East Asian ideograph
- 0x215b31: (0x8ec0, 0),# East Asian ideograph
- 0x215b32: (0x8eca, 0),# East Asian ideograph
- 0x275b33: (0x8f67, 0),# East Asian ideograph
- 0x215b34: (0x8ecd, 0),# East Asian ideograph
- 0x215b35: (0x8ecc, 0),# East Asian ideograph
- 0x215b36: (0x8ed2, 0),# East Asian ideograph
- 0x215b37: (0x8ed4, 0),# East Asian ideograph
- 0x215b38: (0x8edf, 0),# East Asian ideograph
- 0x215b39: (0x8edb, 0),# East Asian ideograph
- 0x215b3a: (0x8efb, 0),# East Asian ideograph
- 0x275b3b: (0x8f74, 0),# East Asian ideograph
- 0x215b3c: (0x8efc, 0),# East Asian ideograph
- 0x215b3d: (0x8f03, 0),# East Asian ideograph
- 0x225b3e: (0x747d, 0),# East Asian ideograph
- 0x235b3f: (0x9d78, 0),# East Asian ideograph
- 0x215b40: (0x8f0a, 0),# East Asian ideograph
- 0x215b41: (0x8f14, 0),# East Asian ideograph
- 0x235b42: (0x9d7e, 0),# East Asian ideograph
- 0x215b43: (0x8f15, 0),# East Asian ideograph
- 0x215b44: (0x8f13, 0),# East Asian ideograph
- 0x215b45: (0x8f26, 0),# East Asian ideograph
- 0x215b46: (0x8f1b, 0),# East Asian ideograph
- 0x235b47: (0x9d69, 0),# East Asian ideograph
- 0x215b48: (0x8f1d, 0),# East Asian ideograph
- 0x215b49: (0x8f29, 0),# East Asian ideograph
- 0x215b4a: (0x8f2a, 0),# East Asian ideograph
- 0x275b4b: (0x8f8e, 0),# East Asian ideograph
- 0x215b4c: (0x8f3b, 0),# East Asian ideograph
- 0x215b4d: (0x8f2f, 0),# East Asian ideograph
- 0x215b4e: (0x8f38, 0),# East Asian ideograph
- 0x235b4f: (0x9d83, 0),# East Asian ideograph
- 0x215b50: (0x8f3e, 0),# East Asian ideograph
- 0x215b51: (0x8f45, 0),# East Asian ideograph
- 0x215b52: (0x8f42, 0),# East Asian ideograph (variant of 4B5B52 which maps to 8F42)
- 0x215b53: (0x8f3f, 0),# East Asian ideograph
- 0x225b54: (0x749f, 0),# East Asian ideograph
- 0x225b55: (0x749d, 0),# East Asian ideograph
- 0x215b56: (0x8f54, 0),# East Asian ideograph
- 0x225b57: (0x749e, 0),# East Asian ideograph
- 0x215b58: (0x8f5f, 0),# East Asian ideograph
- 0x215b59: (0x8f61, 0),# East Asian ideograph
- 0x215b5a: (0x8f9b, 0),# East Asian ideograph
- 0x215b5b: (0x8f9c, 0),# East Asian ideograph
- 0x215b5c: (0x8f9f, 0),# East Asian ideograph
- 0x224f3a: (0x7023, 0),# East Asian ideograph
- 0x235b5e: (0x9d92, 0),# East Asian ideograph
- 0x215b5f: (0x8fa6, 0),# East Asian ideograph
- 0x225b60: (0x74b2, 0),# East Asian ideograph
- 0x215b61: (0x8faf, 0),# East Asian ideograph
- 0x215b62: (0x8fb0, 0),# East Asian ideograph
- 0x215b63: (0x8fb1, 0),# East Asian ideograph
- 0x215b64: (0x8fb2, 0),# East Asian ideograph
- 0x295e6a: (0x9eea, 0),# East Asian ideograph
- 0x225b66: (0x74b4, 0),# East Asian ideograph
- 0x225b67: (0x74ab, 0),# East Asian ideograph
- 0x215b68: (0x8fc4, 0),# East Asian ideograph
- 0x215b69: (0x5de1, 0),# East Asian ideograph
- 0x215b6a: (0x8fce, 0),# East Asian ideograph
- 0x215b6b: (0x8fd1, 0),# East Asian ideograph
- 0x215b6c: (0x8fd4, 0),# East Asian ideograph
- 0x275b6d: (0x8ff0, 0),# East Asian ideograph
- 0x215b6e: (0x8fe6, 0),# East Asian ideograph
- 0x215b6f: (0x8fe2, 0),# East Asian ideograph
- 0x235b70: (0x9d96, 0),# East Asian ideograph
- 0x215b71: (0x8fe5, 0),# East Asian ideograph
- 0x6f544b: (0xc379, 0),# Korean hangul
- 0x215b73: (0x8feb, 0),# East Asian ideograph
- 0x215b74: (0x9001, 0),# East Asian ideograph
- 0x215b75: (0x9006, 0),# East Asian ideograph
- 0x225b76: (0x74b8, 0),# East Asian ideograph
- 0x215b77: (0x9000, 0),# East Asian ideograph
- 0x6f5b78: (0xd329, 0),# Korean hangul
- 0x235b79: (0x9dc0, 0),# East Asian ideograph
- 0x225b7a: (0x74c0, 0),# East Asian ideograph
- 0x23422e: (0x9207, 0),# East Asian ideograph
- 0x215b7c: (0x9005, 0),# East Asian ideograph
- 0x215b7d: (0x9019, 0),# East Asian ideograph
- 0x215b7e: (0x9023, 0),# East Asian ideograph
- 0x214f40: (0x7a40, 0),# East Asian ideograph
- 0x393c52: (0x8d26, 0),# East Asian ideograph
- 0x224f41: (0x703c, 0),# East Asian ideograph
- 0x213941: (0x596e, 0),# East Asian ideograph
- 0x6f4f42: (0xb8e1, 0),# Korean hangul
- 0x6f544c: (0xc37c, 0),# Korean hangul
- 0x4b4f43: (0x7a32, 0),# East Asian ideograph
- 0x213a31: (0x5a9b, 0),# East Asian ideograph
- 0x224f44: (0x7035, 0),# East Asian ideograph
- 0x213a41: (0x5afb, 0),# East Asian ideograph
- 0x234f45: (0x9832, 0),# East Asian ideograph
- 0x294331: (0x94f1, 0),# East Asian ideograph
- 0x274f46: (0x7a23, 0),# East Asian ideograph
- 0x6f4f47: (0xb8f8, 0),# Korean hangul
- 0x2d4f48: (0x7a42, 0),# East Asian ideograph
- 0x213a32: (0x5acc, 0),# East Asian ideograph
- 0x294231: (0x94af, 0),# East Asian ideograph
- 0x214f49: (0x7a61, 0),# East Asian ideograph
- 0x4c3744: (0x65d9, 0),# East Asian ideograph
- 0x274f4a: (0x79fd, 0),# East Asian ideograph
- 0x214f4b: (0x7a6b, 0),# East Asian ideograph
- 0x227631: (0x8008, 0),# East Asian ideograph
- 0x274f4c: (0x7a33, 0),# East Asian ideograph
- 0x6f544e: (0xc384, 0),# Korean hangul
- 0x6f4f4d: (0xb958, 0),# Korean hangul
- 0x6f5361: (0xc229, 0),# Korean hangul
- 0x213a33: (0x5ac1, 0),# East Asian ideograph
- 0x234f4e: (0x9844, 0),# East Asian ideograph
- 0x4b393a: (0x5f09, 0),# East Asian ideograph
- 0x6f4f4f: (0xb95c, 0),# Korean hangul
- 0x334755: (0x6fec, 0),# East Asian ideograph
- 0x6f4f50: (0xb960, 0),# Korean hangul
- 0x6f4c69: (0xb2dd, 0),# Korean hangul
- 0x23533e: (0x9a0b, 0),# East Asian ideograph
- 0x224f51: (0x7034, 0),# East Asian ideograph
- 0x274564: (0x69df, 0),# East Asian ideograph
- 0x6f544f: (0xc388, 0),# Korean hangul
- 0x213e77: (0x611b, 0),# East Asian ideograph
- 0x6f4f52: (0xb96d, 0),# Korean hangul
- 0x213a34: (0x5ac9, 0),# East Asian ideograph
- 0x217e79: (0x5be0, 0),# East Asian ideograph
- 0x224f53: (0x7039, 0),# East Asian ideograph
- 0x224f54: (0x703a, 0),# East Asian ideograph
- 0x395e6f: (0x7a7d, 0),# East Asian ideograph
- 0x6f4f55: (0xb978, 0),# Korean hangul
- 0x23533f: (0x9a09, 0),# East Asian ideograph
- 0x6f4e44: (0xb618, 0),# Korean hangul
- 0x6f4f56: (0xb97c, 0),# Korean hangul
- 0x6f5450: (0xc399, 0),# Korean hangul
- 0x6f4f57: (0xb984, 0),# Korean hangul
- 0x213a35: (0x5abe, 0),# East Asian ideograph
- 0x696b27: (0x8977, 0),# East Asian ideograph
- 0x234233: (0x91fe, 0),# East Asian ideograph
- 0x3a2f7c: (0x64c0, 0),# East Asian ideograph
- 0x395a2f: (0x58f2, 0),# East Asian ideograph
- 0x334f59: (0x7a93, 0),# East Asian ideograph
- 0x293c57: (0x8f79, 0),# East Asian ideograph
- 0x6f4f5a: (0xb989, 0),# Korean hangul
- 0x215c21: (0x901f, 0),# East Asian ideograph
- 0x215c22: (0x9017, 0),# East Asian ideograph
- 0x215c23: (0x901d, 0),# East Asian ideograph
- 0x215c24: (0x9010, 0),# East Asian ideograph
- 0x225c25: (0x74bf, 0),# East Asian ideograph
- 0x215c26: (0x900d, 0),# East Asian ideograph
- 0x215c27: (0x901e, 0),# East Asian ideograph
- 0x235c28: (0x9dbb, 0),# East Asian ideograph
- 0x274123: (0x6302, 0),# East Asian ideograph
- 0x215c2a: (0x900f, 0),# East Asian ideograph
- 0x215c2b: (0x9022, 0),# East Asian ideograph
- 0x215c2c: (0x9016, 0),# East Asian ideograph
- 0x215c2d: (0x901b, 0),# East Asian ideograph
- 0x215c2e: (0x9014, 0),# East Asian ideograph
- 0x214f5d: (0x7aa9, 0),# East Asian ideograph
- 0x215c30: (0x9035, 0),# East Asian ideograph
- 0x215c31: (0x9031, 0),# East Asian ideograph
- 0x235c32: (0x9db9, 0),# East Asian ideograph
- 0x275c33: (0x8fdb, 0),# East Asian ideograph
- 0x275c34: (0x8fd0, 0),# East Asian ideograph
- 0x2d4f5e: (0x7ab0, 0),# East Asian ideograph
- 0x215c36: (0x9053, 0),# East Asian ideograph
- 0x215c37: (0x9042, 0),# East Asian ideograph
- 0x215c38: (0x9050, 0),# East Asian ideograph
- 0x275c39: (0x8fbe, 0),# East Asian ideograph
- 0x275c3a: (0x8fdd, 0),# East Asian ideograph
- 0x274f5f: (0x7a77, 0),# East Asian ideograph
- 0x275c3c: (0x8fc2, 0),# East Asian ideograph
- 0x215c3d: (0x904f, 0),# East Asian ideograph
- 0x235c3e: (0x9dd9, 0),# East Asian ideograph
- 0x215c3f: (0x904d, 0),# East Asian ideograph
- 0x215c40: (0x9051, 0),# East Asian ideograph
- 0x214f60: (0x7aba, 0),# East Asian ideograph
- 0x215c42: (0x903e, 0),# East Asian ideograph
- 0x215c43: (0x9058, 0),# East Asian ideograph
- 0x275c44: (0x8fdc, 0),# East Asian ideograph
- 0x275c45: (0x900a, 0),# East Asian ideograph
- 0x215c46: (0x9063, 0),# East Asian ideograph
- 0x214f61: (0x7ac5, 0),# East Asian ideograph
- 0x275c48: (0x9012, 0),# East Asian ideograph
- 0x215c49: (0x9069, 0),# East Asian ideograph
- 0x215c4a: (0x906e, 0),# East Asian ideograph
- 0x215c4b: (0x9068, 0),# East Asian ideograph
- 0x215c4c: (0x906d, 0),# East Asian ideograph
- 0x214f62: (0x7ac4, 0),# East Asian ideograph
- 0x215c4e: (0x9074, 0),# East Asian ideograph
- 0x275c4f: (0x9009, 0),# East Asian ideograph
- 0x275c50: (0x8fdf, 0),# East Asian ideograph
- 0x215c51: (0x9077, 0),# East Asian ideograph
- 0x215c52: (0x907c, 0),# East Asian ideograph
- 0x275c53: (0x9057, 0),# East Asian ideograph
- 0x215c54: (0x907f, 0),# East Asian ideograph
- 0x215c55: (0x907d, 0),# East Asian ideograph
- 0x275c56: (0x8fc8, 0),# East Asian ideograph
- 0x235c57: (0x9df2, 0),# East Asian ideograph
- 0x215c58: (0x9082, 0),# East Asian ideograph
- 0x215c59: (0x9080, 0),# East Asian ideograph
- 0x275c5a: (0x8fe9, 0),# East Asian ideograph (variant of 2D5C5A which maps to 8FE9)
- 0x275c5b: (0x8fb9, 0),# East Asian ideograph
- 0x275c5c: (0x9026, 0),# East Asian ideograph
- 0x275c5d: (0x903b, 0),# East Asian ideograph
- 0x215c5e: (0x9091, 0),# East Asian ideograph
- 0x215c5f: (0x9095, 0),# East Asian ideograph
- 0x215c60: (0x90a3, 0),# East Asian ideograph
- 0x215c61: (0x90a2, 0),# East Asian ideograph
- 0x215c62: (0x90aa, 0),# East Asian ideograph
- 0x215c63: (0x90a6, 0),# East Asian ideograph
- 0x215c64: (0x90b5, 0),# East Asian ideograph
- 0x215c65: (0x90b1, 0),# East Asian ideograph
- 0x215c66: (0x90b8, 0),# East Asian ideograph
- 0x215c67: (0x90ce, 0),# East Asian ideograph
- 0x215c68: (0x90ca, 0),# East Asian ideograph
- 0x4b5564: (0x77c7, 0),# East Asian ideograph
- 0x235c6a: (0x9ded, 0),# East Asian ideograph
- 0x215c6b: (0x90e8, 0),# East Asian ideograph
- 0x215c6c: (0x90ed, 0),# East Asian ideograph
- 0x275c6d: (0x90ae, 0),# East Asian ideograph
- 0x215c6e: (0x90fd, 0),# East Asian ideograph
- 0x215c6f: (0x9102, 0),# East Asian ideograph
- 0x275c70: (0x4e61, 0),# East Asian ideograph
- 0x275c71: (0x90b9, 0),# East Asian ideograph
- 0x215c72: (0x9119, 0),# East Asian ideograph
- 0x275c73: (0x90d1, 0),# East Asian ideograph
- 0x275c74: (0x90bb, 0),# East Asian ideograph
- 0x275c75: (0x9093, 0),# East Asian ideograph
- 0x215c76: (0x9131, 0),# East Asian ideograph
- 0x214f69: (0x7aed, 0),# East Asian ideograph
- 0x215c78: (0x9149, 0),# East Asian ideograph
- 0x215c79: (0x914b, 0),# East Asian ideograph
- 0x215c7a: (0x914a, 0),# East Asian ideograph
- 0x215c7b: (0x9152, 0),# East Asian ideograph
- 0x215c7c: (0x914d, 0),# East Asian ideograph
- 0x215c7d: (0x914c, 0),# East Asian ideograph
- 0x215c7e: (0x9157, 0),# East Asian ideograph
- 0x6f5454: (0xc3df, 0),# Korean hangul
- 0x6f5a34: (0xcf13, 0),# Korean hangul
- 0x214f6b: (0x7af6, 0),# East Asian ideograph
- 0x213a39: (0x5ab3, 0),# East Asian ideograph
- 0x234237: (0x9226, 0),# East Asian ideograph
- 0x6f4f6d: (0xb9d8, 0),# Korean hangul
- 0x695a31: (0x64f6, 0),# East Asian ideograph
- 0x6f4e45: (0xb625, 0),# Korean hangul
- 0x234f6f: (0x9857, 0),# East Asian ideograph
- 0x27456a: (0x6988, 0),# East Asian ideograph
- 0x213e7d: (0x6108, 0),# East Asian ideograph
- 0x395821: (0x97e4, 0),# East Asian ideograph
- 0x274f70: (0x5df4, 0),# East Asian ideograph (duplicate simplified)
- 0x213a3a: (0x5ae1, 0),# East Asian ideograph
- 0x224f71: (0x7052, 0),# East Asian ideograph
- 0x234f72: (0x9856, 0),# East Asian ideograph
- 0x295e7a: (0x9efe, 0),# East Asian ideograph
- 0x224f73: (0x705c, 0),# East Asian ideograph
- 0x213f39: (0x6163, 0),# East Asian ideograph
- 0x6f4f74: (0xb9e4, 0),# Korean hangul
- 0x6f5456: (0xc3e8, 0),# Korean hangul
- 0x213e7e: (0x60f1, 0),# East Asian ideograph
- 0x214f75: (0x7b1b, 0),# East Asian ideograph
- 0x213a3b: (0x5ad7, 0),# East Asian ideograph
- 0x284e66: (0x6ee2, 0),# East Asian ideograph
- 0x213a21: (0x5a46, 0),# East Asian ideograph
- 0x234f77: (0x9862, 0),# East Asian ideograph
- 0x275235: (0x7f62, 0),# East Asian ideograph
- 0x224f78: (0x7059, 0),# East Asian ideograph
- 0x213a23: (0x5a6a, 0),# East Asian ideograph
- 0x274f79: (0x7b14, 0),# East Asian ideograph
- 0x213a24: (0x5a36, 0),# East Asian ideograph
- 0x6f5457: (0xc3ed, 0),# Korean hangul
- 0x22427e: (0x6aed, 0),# East Asian ideograph
- 0x214f7b: (0x7b50, 0),# East Asian ideograph
- 0x213a26: (0x5a40, 0),# East Asian ideograph
- 0x695e63: (0x6e82, 0),# East Asian ideograph
- 0x275679: (0x80e1, 0),# East Asian ideograph (duplicate simplified)
- 0x224f7c: (0x7061, 0),# East Asian ideograph
- 0x213a27: (0x5a66, 0),# East Asian ideograph
- 0x224f7d: (0x705d, 0),# East Asian ideograph
- 0x223a28: (0x6705, 0),# East Asian ideograph
- 0x335347: (0x81d9, 0),# East Asian ideograph
- 0x293b4f: (0x8f78, 0),# East Asian ideograph
- 0x234f7e: (0x9868, 0),# East Asian ideograph
- 0x6f4f7b: (0xb9f8, 0),# Korean hangul
- 0x6f5458: (0xc3f4, 0),# Korean hangul
- 0x6f5363: (0xc22d, 0),# Korean hangul
- 0x2d5d68: (0x8021, 0),# East Asian ideograph
- 0x394928: (0x6d5c, 0),# East Asian ideograph
- 0x29366a: (0x8d53, 0),# East Asian ideograph
- 0x227a2c: (0x81b5, 0),# East Asian ideograph
- 0x223173: (0x637f, 0),# East Asian ideograph
- 0x2d5179: (0x7e62, 0),# East Asian ideograph
- 0x213a2e: (0x5a92, 0),# East Asian ideograph
- 0x6f5459: (0xc3f5, 0),# Korean hangul
- 0x2d3a2f: (0x58fb, 0),# East Asian ideograph
- 0x4b3351: (0x5204, 0),# East Asian ideograph
- 0x215d21: (0x9163, 0),# East Asian ideograph
- 0x215d22: (0x9165, 0),# East Asian ideograph
- 0x215d23: (0x916c, 0),# East Asian ideograph
- 0x215d24: (0x9169, 0),# East Asian ideograph
- 0x215d25: (0x916a, 0),# East Asian ideograph
- 0x215d26: (0x9175, 0),# East Asian ideograph
- 0x215d27: (0x9178, 0),# East Asian ideograph
- 0x215d28: (0x9177, 0),# East Asian ideograph
- 0x215d29: (0x9187, 0),# East Asian ideograph
- 0x215d2a: (0x9189, 0),# East Asian ideograph
- 0x215d2b: (0x918b, 0),# East Asian ideograph
- 0x215d2c: (0x9183, 0),# East Asian ideograph
- 0x215d2d: (0x9192, 0),# East Asian ideograph
- 0x215d2e: (0x91a3, 0),# East Asian ideograph
- 0x275d2f: (0x915d, 0),# East Asian ideograph
- 0x215d30: (0x919c, 0),# East Asian ideograph
- 0x275d31: (0x533b, 0),# East Asian ideograph
- 0x225d32: (0x7512, 0),# East Asian ideograph
- 0x215d33: (0x91ba, 0),# East Asian ideograph
- 0x275d34: (0x917f, 0),# East Asian ideograph
- 0x275d35: (0x8845, 0),# East Asian ideograph
- 0x215d36: (0x91c7, 0),# East Asian ideograph
- 0x215d37: (0x91c9, 0),# East Asian ideograph
- 0x215d38: (0x91cb, 0),# East Asian ideograph
- 0x235d39: (0x9e1c, 0),# East Asian ideograph
- 0x235d3a: (0x9e1b, 0),# East Asian ideograph
- 0x215d3b: (0x91ce, 0),# East Asian ideograph
- 0x235d3c: (0x9e75, 0),# East Asian ideograph
- 0x275d3d: (0x5398, 0),# East Asian ideograph
- 0x215d3e: (0x91d1, 0),# East Asian ideograph
- 0x215d3f: (0x91dd, 0),# East Asian ideograph
- 0x215d40: (0x91d8, 0),# East Asian ideograph
- 0x215d41: (0x91d7, 0),# East Asian ideograph
- 0x235d42: (0x9e7a, 0),# East Asian ideograph
- 0x215d43: (0x91f5, 0),# East Asian ideograph
- 0x225d44: (0x7524, 0),# East Asian ideograph
- 0x215d45: (0x91e3, 0),# East Asian ideograph
- 0x215d46: (0x91e7, 0),# East Asian ideograph
- 0x235d47: (0x9e80, 0),# East Asian ideograph
- 0x215d48: (0x920d, 0),# East Asian ideograph
- 0x215d49: (0x9215, 0),# East Asian ideograph
- 0x215d4a: (0x9209, 0),# East Asian ideograph
- 0x275d4b: (0x949e, 0),# East Asian ideograph
- 0x215d4c: (0x921e, 0),# East Asian ideograph
- 0x215d4d: (0x9210, 0),# East Asian ideograph
- 0x2d4c5d: (0x7661, 0),# East Asian ideograph
- 0x225d4f: (0x753f, 0),# East Asian ideograph
- 0x215d50: (0x9238, 0),# East Asian ideograph
- 0x225d51: (0x7540, 0),# East Asian ideograph
- 0x225d52: (0x753e, 0),# East Asian ideograph
- 0x215d53: (0x9240, 0),# East Asian ideograph
- 0x215d54: (0x924b, 0),# East Asian ideograph
- 0x235d55: (0x9e90, 0),# East Asian ideograph
- 0x215d56: (0x9264, 0),# East Asian ideograph
- 0x215d57: (0x9251, 0),# East Asian ideograph
- 0x235d58: (0x9e8c, 0),# East Asian ideograph
- 0x215d59: (0x9278, 0),# East Asian ideograph
- 0x215d5a: (0x9280, 0),# East Asian ideograph
- 0x275d5b: (0x94d0, 0),# East Asian ideograph
- 0x215d5c: (0x9285, 0),# East Asian ideograph
- 0x235d5d: (0x9e9b, 0),# East Asian ideograph
- 0x215d5e: (0x9296, 0),# East Asian ideograph
- 0x225d5f: (0x755f, 0),# East Asian ideograph
- 0x215d60: (0x9293, 0),# East Asian ideograph
- 0x275d61: (0x8854, 0),# East Asian ideograph
- 0x215d62: (0x92c5, 0),# East Asian ideograph
- 0x215d63: (0x92bb, 0),# East Asian ideograph
- 0x275d64: (0x9510, 0),# East Asian ideograph
- 0x275d65: (0x94fa, 0),# East Asian ideograph
- 0x275d66: (0x9500, 0),# East Asian ideograph
- 0x215d67: (0x92c1, 0),# East Asian ideograph
- 0x215d68: (0x92e4, 0),# East Asian ideograph
- 0x215d69: (0x92bc, 0),# East Asian ideograph
- 0x215d6a: (0x92d2, 0),# East Asian ideograph
- 0x225d6b: (0x756c, 0),# East Asian ideograph
- 0x215d6c: (0x9336, 0),# East Asian ideograph
- 0x275d6d: (0x952f, 0),# East Asian ideograph
- 0x215d6e: (0x9333, 0),# East Asian ideograph
- 0x215d6f: (0x932f, 0),# East Asian ideograph
- 0x215d70: (0x9322, 0),# East Asian ideograph
- 0x215d71: (0x92fc, 0),# East Asian ideograph
- 0x215d72: (0x932b, 0),# East Asian ideograph
- 0x215d73: (0x931a, 0),# East Asian ideograph
- 0x215d74: (0x9304, 0),# East Asian ideograph
- 0x213a3e: (0x5ae9, 0),# East Asian ideograph
- 0x275d76: (0x9526, 0),# East Asian ideograph
- 0x275d77: (0x9540, 0),# East Asian ideograph
- 0x275d78: (0x9541, 0),# East Asian ideograph
- 0x235d79: (0x9eaf, 0),# East Asian ideograph
- 0x215d7a: (0x9365, 0),# East Asian ideograph
- 0x213a3f: (0x5ad8, 0),# East Asian ideograph
- 0x215d7c: (0x934b, 0),# East Asian ideograph
- 0x215d7d: (0x9328, 0),# East Asian ideograph
- 0x215d7e: (0x9370, 0),# East Asian ideograph
- 0x4b5e3f: (0x922c, 0),# East Asian ideograph
- 0x213a40: (0x5ae6, 0),# East Asian ideograph
- 0x6f5367: (0xc234, 0),# Korean hangul
- 0x233a41: (0x8e61, 0),# East Asian ideograph
- 0x6f5825: (0xc990, 0),# Korean hangul
- 0x6f545d: (0xc434, 0),# Korean hangul
- 0x284971: (0x6d9e, 0),# East Asian ideograph
- 0x224a32: (0x6dfc, 0),# East Asian ideograph
- 0x227a43: (0x81d0, 0),# East Asian ideograph
- 0x213a44: (0x5b0c, 0),# East Asian ideograph
- 0x29366b: (0x8d55, 0),# East Asian ideograph
- 0x233a45: (0x8e74, 0),# East Asian ideograph
- 0x213a46: (0x5b34, 0),# East Asian ideograph
- 0x213a47: (0x5b1d, 0),# East Asian ideograph
- 0x6f545e: (0xc43c, 0),# Korean hangul
- 0x6f5a36: (0xcf1c, 0),# Korean hangul
- 0x273a48: (0x5ad4, 0),# East Asian ideograph
- 0x213a43: (0x5b0b, 0),# East Asian ideograph
- 0x395829: (0x69fb, 0),# East Asian ideograph
- 0x6f4c3e: (0xb1d0, 0),# Korean hangul
- 0x4b3a49: (0x5b37, 0),# East Asian ideograph
- 0x2d3b54: (0x5c4a, 0),# East Asian ideograph
- 0x6f5923: (0xcc2e, 0),# Korean hangul
- 0x213a4a: (0x5b30, 0),# East Asian ideograph
- 0x287855: (0x80eb, 0),# East Asian ideograph
- 0x233a4b: (0x8e69, 0),# East Asian ideograph
- 0x213a4c: (0x5b40, 0),# East Asian ideograph
- 0x6f545f: (0xc43f, 0),# Korean hangul
- 0x213a4d: (0x5b50, 0),# East Asian ideograph
- 0x213a4e: (0x5b51, 0),# East Asian ideograph
- 0x217a4f: (0x59ee, 0),# East Asian ideograph
- 0x225b3f: (0x7485, 0),# East Asian ideograph
- 0x295e7c: (0x9f0b, 0),# East Asian ideograph
- 0x29456f: (0x9538, 0),# East Asian ideograph
- 0x6f5460: (0xc464, 0),# Korean hangul
- 0x233a52: (0x8e83, 0),# East Asian ideograph
- 0x213a45: (0x5af5, 0),# East Asian ideograph
- 0x334243: (0x52b9, 0),# East Asian ideograph
- 0x233a53: (0x8e84, 0),# East Asian ideograph
- 0x2d592c: (0x8b01, 0),# East Asian ideograph
- 0x294335: (0x94f5, 0),# East Asian ideograph
- 0x4c3a55: (0x6741, 0),# East Asian ideograph
- 0x4b623b: (0x9d12, 0),# East Asian ideograph
- 0x217a56: (0x59fd, 0),# East Asian ideograph
- 0x6f5461: (0xc465, 0),# Korean hangul
- 0x213a57: (0x5b5f, 0),# East Asian ideograph
- 0x6f5a39: (0xcf2c, 0),# Korean hangul
- 0x213a58: (0x5b63, 0),# East Asian ideograph
- 0x692544: (0x30c4, 0),# Katakana letter TU
- 0x225622: (0x728d, 0),# East Asian ideograph
- 0x215e21: (0x937e, 0),# East Asian ideograph
- 0x275e22: (0x9524, 0),# East Asian ideograph
- 0x275e23: (0x9539, 0),# East Asian ideograph
- 0x215e24: (0x935b, 0),# East Asian ideograph
- 0x275e25: (0x9551, 0),# East Asian ideograph
- 0x215e26: (0x9394, 0),# East Asian ideograph
- 0x275e27: (0x9547, 0),# East Asian ideograph
- 0x275e28: (0x9501, 0),# East Asian ideograph
- 0x215e29: (0x93a2, 0),# East Asian ideograph
- 0x275e2a: (0x954d, 0),# East Asian ideograph
- 0x275e2b: (0x955c, 0),# East Asian ideograph
- 0x275e2c: (0x955d, 0),# East Asian ideograph
- 0x215e2d: (0x93d6, 0),# East Asian ideograph
- 0x275e2e: (0x955e, 0),# East Asian ideograph
- 0x215e2f: (0x93df, 0),# East Asian ideograph
- 0x275e30: (0x94ff, 0),# East Asian ideograph
- 0x275e31: (0x94fe, 0),# East Asian ideograph
- 0x215e32: (0x93e2, 0),# East Asian ideograph
- 0x215e33: (0x93dc, 0),# East Asian ideograph
- 0x215e34: (0x93e4, 0),# East Asian ideograph
- 0x225e35: (0x7598, 0),# East Asian ideograph
- 0x215e36: (0x93cd, 0),# East Asian ideograph
- 0x235e37: (0x9ec8, 0),# East Asian ideograph
- 0x215e39: (0x9403, 0),# East Asian ideograph
- 0x215e3a: (0x942e, 0),# East Asian ideograph
- 0x225e3b: (0x75a3, 0),# East Asian ideograph
- 0x215e3c: (0x9433, 0),# East Asian ideograph
- 0x215e3d: (0x9435, 0),# East Asian ideograph
- 0x215e3e: (0x943a, 0),# East Asian ideograph
- 0x215e3f: (0x9438, 0),# East Asian ideograph
- 0x215e40: (0x9432, 0),# East Asian ideograph
- 0x223a60: (0x675d, 0),# East Asian ideograph
- 0x215e42: (0x9451, 0),# East Asian ideograph
- 0x215e43: (0x9444, 0),# East Asian ideograph
- 0x215e44: (0x9463, 0),# East Asian ideograph
- 0x215e45: (0x9460, 0),# East Asian ideograph
- 0x215e46: (0x9472, 0),# East Asian ideograph
- 0x215e47: (0x9470, 0),# East Asian ideograph
- 0x215e48: (0x947e, 0),# East Asian ideograph
- 0x215e49: (0x947c, 0),# East Asian ideograph
- 0x235e4a: (0x9ed0, 0),# East Asian ideograph
- 0x215e4b: (0x947f, 0),# East Asian ideograph
- 0x215e4c: (0x9577, 0),# East Asian ideograph
- 0x215e4d: (0x9580, 0),# East Asian ideograph
- 0x215e4e: (0x9582, 0),# East Asian ideograph
- 0x215e4f: (0x9583, 0),# East Asian ideograph
- 0x215e50: (0x9589, 0),# East Asian ideograph
- 0x215e51: (0x9594, 0),# East Asian ideograph
- 0x215e52: (0x958f, 0),# East Asian ideograph
- 0x235e53: (0x9eda, 0),# East Asian ideograph
- 0x215e54: (0x9591, 0),# East Asian ideograph
- 0x235e55: (0x9edf, 0),# East Asian ideograph
- 0x215e56: (0x9592, 0),# East Asian ideograph
- 0x215e57: (0x9598, 0),# East Asian ideograph
- 0x215e58: (0x95a1, 0),# East Asian ideograph
- 0x235e59: (0x9ee5, 0),# East Asian ideograph
- 0x215e5a: (0x95a9, 0),# East Asian ideograph
- 0x215e5b: (0x95a3, 0),# East Asian ideograph
- 0x215e5c: (0x95a5, 0),# East Asian ideograph
- 0x215e5d: (0x95a4, 0),# East Asian ideograph
- 0x215e5e: (0x95b1, 0),# East Asian ideograph
- 0x215e5f: (0x95ad, 0),# East Asian ideograph
- 0x235e60: (0x9eee, 0),# East Asian ideograph
- 0x215e61: (0x95ca, 0),# East Asian ideograph
- 0x215e62: (0x95cb, 0),# East Asian ideograph
- 0x215e63: (0x95cc, 0),# East Asian ideograph
- 0x215e64: (0x95c8, 0),# East Asian ideograph
- 0x215e65: (0x95c6, 0),# East Asian ideograph
- 0x235e66: (0x9ef0, 0),# East Asian ideograph
- 0x215e67: (0x95d6, 0),# East Asian ideograph
- 0x215e68: (0x95d0, 0),# East Asian ideograph
- 0x215e69: (0x95dc, 0),# East Asian ideograph
- 0x215e6a: (0x95e1, 0),# East Asian ideograph
- 0x215e6b: (0x95e2, 0),# East Asian ideograph
- 0x215e6c: (0x961c, 0),# East Asian ideograph
- 0x215e6d: (0x9621, 0),# East Asian ideograph
- 0x215e6e: (0x9632, 0),# East Asian ideograph
- 0x215e6f: (0x9631, 0),# East Asian ideograph
- 0x215e70: (0x962e, 0),# East Asian ideograph
- 0x215e71: (0x962a, 0),# East Asian ideograph
- 0x215e72: (0x9640, 0),# East Asian ideograph
- 0x215e73: (0x963f, 0),# East Asian ideograph
- 0x215e74: (0x963b, 0),# East Asian ideograph
- 0x215e75: (0x9644, 0),# East Asian ideograph
- 0x215e76: (0x9650, 0),# East Asian ideograph
- 0x235e77: (0x9efc, 0),# East Asian ideograph
- 0x215e78: (0x964b, 0),# East Asian ideograph
- 0x215e79: (0x964d, 0),# East Asian ideograph
- 0x235e7a: (0x9efd, 0),# East Asian ideograph
- 0x215e7b: (0x9663, 0),# East Asian ideograph
- 0x235e7c: (0x9eff, 0),# East Asian ideograph
- 0x215e7d: (0x9661, 0),# East Asian ideograph
- 0x225e7e: (0x7603, 0),# East Asian ideograph
- 0x6f5465: (0xc479, 0),# Korean hangul
- 0x223a6b: (0x6763, 0),# East Asian ideograph
- 0x223a6e: (0x6753, 0),# East Asian ideograph
- 0x2d355c: (0x5434, 0),# East Asian ideograph
- 0x213a6f: (0x5b98, 0),# East Asian ideograph
- 0x6f5466: (0xc480, 0),# Korean hangul
- 0x6f5440: (0xc328, 0),# Korean hangul
- 0x293a70: (0x8e9c, 0),# East Asian ideograph
- 0x213a4b: (0x5b38, 0),# East Asian ideograph
- 0x294936: (0x95f3, 0),# East Asian ideograph
- 0x215821: (0x896a, 0),# East Asian ideograph
- 0x233a71: (0x8ea9, 0),# East Asian ideograph
- 0x692524: (0x30a4, 0),# Katakana letter I
- 0x213a72: (0x5ba5, 0),# East Asian ideograph
- 0x6f595f: (0xce04, 0),# Korean hangul
- 0x2d3b52: (0x6eba, 0),# East Asian ideograph
- 0x223a75: (0x6793, 0),# East Asian ideograph
- 0x23424a: (0x9216, 0),# East Asian ideograph
- 0x6f2521: (0x315c, 0),# Korean hangul
- 0x4b4767: (0x6db5, 0),# East Asian ideograph (variant of 214767 which maps to 6DB5)
- 0x28342c: (0x63ba, 0),# East Asian ideograph
- 0x295a44: (0x9e32, 0),# East Asian ideograph
- 0x223a78: (0x677c, 0),# East Asian ideograph
- 0x692523: (0x30a3, 0),# Katakana letter small I
- 0x292524: (0x848c, 0),# East Asian ideograph
- 0x29322a: (0x8bb5, 0),# East Asian ideograph
- 0x223a7a: (0x679f, 0),# East Asian ideograph
- 0x226065: (0x76a4, 0),# East Asian ideograph
- 0x23424b: (0x9211, 0),# East Asian ideograph
- 0x4d472c: (0x952a, 0),# East Asian ideograph
- 0x4d5934: (0x9ca6, 0),# East Asian ideograph
- 0x213a7c: (0x5bae, 0),# East Asian ideograph
- 0x692527: (0x30a7, 0),# Katakana letter small E
- 0x213d2e: (0x5eec, 0),# East Asian ideograph
- 0x233a7d: (0x8eb6, 0),# East Asian ideograph
- 0x692528: (0x30a8, 0),# Katakana letter E
- 0x6f5469: (0xc4d5, 0),# Korean hangul
- 0x69252a: (0x30aa, 0),# Katakana letter O
- 0x283b22: (0x4e2b, 0),# East Asian ideograph
- 0x22652c: (0x7892, 0),# East Asian ideograph
- 0x28342e: (0x63bc, 0),# East Asian ideograph
- 0x235359: (0x9a2f, 0),# East Asian ideograph
- 0x22252d: (0x5d47, 0),# East Asian ideograph
- 0x2d4829: (0x51cf, 0),# East Asian ideograph
- 0x6f5027: (0xba4b, 0),# Korean hangul
- 0x23252f: (0x84e7, 0),# East Asian ideograph
- 0x215f21: (0x9664, 0),# East Asian ideograph
- 0x215f22: (0x966a, 0),# East Asian ideograph
- 0x215f23: (0x9673, 0),# East Asian ideograph
- 0x215f24: (0x9678, 0),# East Asian ideograph
- 0x215f25: (0x9675, 0),# East Asian ideograph
- 0x215f26: (0x9672, 0),# East Asian ideograph
- 0x215f27: (0x9676, 0),# East Asian ideograph
- 0x215f28: (0x9677, 0),# East Asian ideograph
- 0x215f29: (0x9674, 0),# East Asian ideograph
- 0x215f2a: (0x9670, 0),# East Asian ideograph
- 0x215f2b: (0x968a, 0),# East Asian ideograph
- 0x215f2c: (0x968e, 0),# East Asian ideograph
- 0x215f2d: (0x968b, 0),# East Asian ideograph
- 0x215f2e: (0x967d, 0),# East Asian ideograph
- 0x235f2f: (0x9f0f, 0),# East Asian ideograph
- 0x215f30: (0x9686, 0),# East Asian ideograph
- 0x235f31: (0x9f10, 0),# East Asian ideograph
- 0x235f32: (0x9f12, 0),# East Asian ideograph
- 0x235f33: (0x9f16, 0),# East Asian ideograph
- 0x235f34: (0x9f17, 0),# East Asian ideograph
- 0x215f35: (0x9695, 0),# East Asian ideograph
- 0x215f36: (0x969c, 0),# East Asian ideograph
- 0x235f37: (0x9f1a, 0),# East Asian ideograph
- 0x215f38: (0x96a7, 0),# East Asian ideograph
- 0x215f39: (0x96a8, 0),# East Asian ideograph
- 0x215f3a: (0x96aa, 0),# East Asian ideograph
- 0x215f3b: (0x96b1, 0),# East Asian ideograph
- 0x215f3c: (0x96b4, 0),# East Asian ideograph
- 0x215f3d: (0x96b8, 0),# East Asian ideograph
- 0x225f3e: (0x7625, 0),# East Asian ideograph
- 0x225f3f: (0x761a, 0),# East Asian ideograph
- 0x215f40: (0x96c7, 0),# East Asian ideograph
- 0x215f41: (0x96c6, 0),# East Asian ideograph
- 0x215f42: (0x96c4, 0),# East Asian ideograph
- 0x215f43: (0x96c1, 0),# East Asian ideograph
- 0x215f44: (0x96c5, 0),# East Asian ideograph
- 0x215f45: (0x96cd, 0),# East Asian ideograph
- 0x215f46: (0x96cb, 0),# East Asian ideograph
- 0x215f47: (0x96c9, 0),# East Asian ideograph
- 0x215f48: (0x96cc, 0),# East Asian ideograph
- 0x215f49: (0x96d5, 0),# East Asian ideograph
- 0x215f4a: (0x96d6, 0),# East Asian ideograph
- 0x215f4b: (0x96dc, 0),# East Asian ideograph
- 0x215f4c: (0x96de, 0),# East Asian ideograph
- 0x215f4d: (0x96db, 0),# East Asian ideograph
- 0x215f4e: (0x96d9, 0),# East Asian ideograph
- 0x215f4f: (0x96e2, 0),# East Asian ideograph
- 0x225f50: (0x7622, 0),# East Asian ideograph
- 0x225f51: (0x762f, 0),# East Asian ideograph
- 0x215f52: (0x96ea, 0),# East Asian ideograph
- 0x215f53: (0x96ef, 0),# East Asian ideograph
- 0x215f54: (0x96f2, 0),# East Asian ideograph
- 0x215f55: (0x96fb, 0),# East Asian ideograph
- 0x215f56: (0x96f7, 0),# East Asian ideograph
- 0x215f57: (0x96f9, 0),# East Asian ideograph
- 0x215f58: (0x96f6, 0),# East Asian ideograph
- 0x215f59: (0x9700, 0),# East Asian ideograph
- 0x23424f: (0x92a2, 0),# East Asian ideograph
- 0x215f5b: (0x9704, 0),# East Asian ideograph
- 0x215f5c: (0x9709, 0),# East Asian ideograph
- 0x215f5d: (0x9706, 0),# East Asian ideograph
- 0x225f5e: (0x763b, 0),# East Asian ideograph
- 0x215f5f: (0x970e, 0),# East Asian ideograph
- 0x225f60: (0x763c, 0),# East Asian ideograph
- 0x215f61: (0x970f, 0),# East Asian ideograph
- 0x225f62: (0x7635, 0),# East Asian ideograph
- 0x215f63: (0x9713, 0),# East Asian ideograph
- 0x235f64: (0x9f3d, 0),# East Asian ideograph
- 0x215f65: (0x971e, 0),# East Asian ideograph
- 0x215f66: (0x972a, 0),# East Asian ideograph
- 0x225f67: (0x7648, 0),# East Asian ideograph
- 0x225f68: (0x764e, 0),# East Asian ideograph
- 0x235f69: (0x9f41, 0),# East Asian ideograph
- 0x225f6a: (0x7643, 0),# East Asian ideograph
- 0x215f6b: (0x973d, 0),# East Asian ideograph
- 0x215f6c: (0x973e, 0),# East Asian ideograph
- 0x215f6d: (0x9744, 0),# East Asian ideograph
- 0x215f6e: (0x9742, 0),# East Asian ideograph
- 0x225f6f: (0x7649, 0),# East Asian ideograph
- 0x215f70: (0x9751, 0),# East Asian ideograph
- 0x215f71: (0xfa1c, 0),# East Asian ideograph
- 0x215f72: (0x975b, 0),# East Asian ideograph (variant of 4B5F72 which maps to 975B)
- 0x215f73: (0x975c, 0),# East Asian ideograph
- 0x215f74: (0x975e, 0),# East Asian ideograph
- 0x225f75: (0x7654, 0),# East Asian ideograph
- 0x215f76: (0x9761, 0),# East Asian ideograph
- 0x215f78: (0x9766, 0),# East Asian ideograph
- 0x235f79: (0x9f4e, 0),# East Asian ideograph
- 0x225f7a: (0x765c, 0),# East Asian ideograph
- 0x235f7b: (0x9f4f, 0),# East Asian ideograph
- 0x235f7c: (0x9f54, 0),# East Asian ideograph
- 0x215f7d: (0x977c, 0),# East Asian ideograph
- 0x235f7e: (0x9f55, 0),# East Asian ideograph
- 0x216540: (0x4f66, 0),# East Asian ideograph
- 0x692541: (0x30c1, 0),# Katakana letter TI
- 0x6f5622: (0xc644, 0),# Korean hangul
- 0x6f546e: (0xc500, 0),# Korean hangul
- 0x6f502b: (0xba54, 0),# Korean hangul
- 0x215829: (0x898f, 0),# East Asian ideograph
- 0x234251: (0x9230, 0),# East Asian ideograph
- 0x225c28: (0x74b5, 0),# East Asian ideograph
- 0x216544: (0x4f67, 0),# East Asian ideograph
- 0x695429: (0x5726, 0),# East Asian ideograph
- 0x6f515c: (0xbd88, 0),# Korean hangul
- 0x292546: (0x8368, 0),# East Asian ideograph
- 0x233145: (0x89dc, 0),# East Asian ideograph
- 0x692547: (0x30c7, 0),# Katakana letter DE
- 0x225c29: (0x74ba, 0),# East Asian ideograph
- 0x213a48: (0x5b2a, 0),# East Asian ideograph
- 0x6f492d: (0xac85, 0),# Korean hangul
- 0x69254a: (0x30ca, 0),# Katakana letter NA
- 0x29254b: (0x835b, 0),# East Asian ideograph
- 0x2d482f: (0x6e07, 0),# East Asian ideograph
- 0x6f5442: (0xc330, 0),# Korean hangul
- 0x70586f: (0x4eeb, 0),# East Asian ideograph
- 0x274142: (0x6325, 0),# East Asian ideograph
- 0x6f502d: (0xba58, 0),# Korean hangul
- 0x23254d: (0x8553, 0),# East Asian ideograph
- 0x21654e: (0x4f5a, 0),# East Asian ideograph
- 0x335065: (0x7a45, 0),# East Asian ideograph
- 0x2e3b22: (0x690f, 0),# East Asian ideograph
- 0x224f61: (0x7044, 0),# East Asian ideograph
- 0x692550: (0x30d0, 0),# Katakana letter BA
- 0x6f5c71: (0xd57c, 0),# Korean hangul
- 0x222551: (0x5d8e, 0),# East Asian ideograph
- 0x6f586b: (0xcb58, 0),# Korean hangul
- 0x335834: (0x89e7, 0),# East Asian ideograph
- 0x2d593d: (0x8ae9, 0),# East Asian ideograph
- 0x4b4476: (0x685f, 0),# East Asian ideograph
- 0x692555: (0x30d5, 0),# Katakana letter HU
- 0x216556: (0x4f82, 0),# East Asian ideograph
- 0x6f5a3a: (0xcf2d, 0),# Korean hangul
- 0x6f502f: (0xba64, 0),# Korean hangul
- 0x692557: (0x30d7, 0),# Katakana letter PU
- 0x294942: (0x9606, 0),# East Asian ideograph
- 0x234255: (0x9248, 0),# East Asian ideograph
- 0x692558: (0x30d8, 0),# Katakana letter HE
- 0x47347b: (0x8c2b, 0),# East Asian ideograph
- 0x6f5262: (0xc0af, 0),# Korean hangul
- 0x23255a: (0x8546, 0),# East Asian ideograph
- 0x216021: (0x978d, 0),# East Asian ideograph
- 0x226022: (0x7664, 0),# East Asian ideograph
- 0x236023: (0x9f57, 0),# East Asian ideograph
- 0x226024: (0x7659, 0),# East Asian ideograph
- 0x216025: (0x97a0, 0),# East Asian ideograph
- 0x216026: (0x97a3, 0),# East Asian ideograph
- 0x216027: (0x97a6, 0),# East Asian ideograph
- 0x236028: (0x9f60, 0),# East Asian ideograph
- 0x216029: (0x97c3, 0),# East Asian ideograph
- 0x21602a: (0x97c1, 0),# East Asian ideograph
- 0x22602b: (0x765f, 0),# East Asian ideograph
- 0x21602c: (0x97cb, 0),# East Asian ideograph
- 0x21602d: (0x97cc, 0),# East Asian ideograph
- 0x21602e: (0x97d3, 0),# East Asian ideograph
- 0x21602f: (0x97dc, 0),# East Asian ideograph
- 0x216030: (0x97ed, 0),# East Asian ideograph
- 0x216031: (0x97f3, 0),# East Asian ideograph
- 0x226032: (0x7667, 0),# East Asian ideograph
- 0x216033: (0x7adf, 0),# East Asian ideograph
- 0x216034: (0x97f6, 0),# East Asian ideograph
- 0x226035: (0x766a, 0),# East Asian ideograph
- 0x216036: (0x97ff, 0),# East Asian ideograph (variant of 456036 which maps to 97FF)
- 0x226037: (0x766d, 0),# East Asian ideograph
- 0x226038: (0x766f, 0),# East Asian ideograph
- 0x216039: (0x9803, 0),# East Asian ideograph
- 0x22603a: (0x7670, 0),# East Asian ideograph
- 0x21603b: (0x9806, 0),# East Asian ideograph
- 0x21603c: (0x9808, 0),# East Asian ideograph
- 0x21603d: (0x9810, 0),# East Asian ideograph
- 0x21603e: (0x980a, 0),# East Asian ideograph
- 0x21603f: (0x9811, 0),# East Asian ideograph
- 0x226040: (0x7676, 0),# East Asian ideograph
- 0x226041: (0x7677, 0),# East Asian ideograph
- 0x216042: (0x980c, 0),# East Asian ideograph
- 0x216043: (0x9817, 0),# East Asian ideograph
- 0x216044: (0x9818, 0),# East Asian ideograph (variant of 4B6044 which maps to 9818)
- 0x216045: (0x9821, 0),# East Asian ideograph
- 0x216046: (0x982d, 0),# East Asian ideograph
- 0x216047: (0x9830, 0),# East Asian ideograph
- 0x226048: (0x7680, 0),# East Asian ideograph
- 0x21582f: (0x89b2, 0),# East Asian ideograph
- 0x22604a: (0x768b, 0),# East Asian ideograph
- 0x21604b: (0x9837, 0),# East Asian ideograph
- 0x21604c: (0x9824, 0),# East Asian ideograph
- 0x21604d: (0x9846, 0),# East Asian ideograph
- 0x21604e: (0x9854, 0),# East Asian ideograph
- 0x21604f: (0x984d, 0),# East Asian ideograph
- 0x216050: (0x984c, 0),# East Asian ideograph
- 0x216051: (0x984e, 0),# East Asian ideograph
- 0x226052: (0x7695, 0),# East Asian ideograph
- 0x216053: (0x985e, 0),# East Asian ideograph (variant of 4B6053 which maps to 985E)
- 0x216054: (0x985a, 0),# East Asian ideograph
- 0x226055: (0x656b, 0),# East Asian ideograph
- 0x216056: (0x9867, 0),# East Asian ideograph
- 0x216057: (0x986b, 0),# East Asian ideograph
- 0x216058: (0x986f, 0),# East Asian ideograph
- 0x226059: (0x7699, 0),# East Asian ideograph
- 0x21605a: (0x9870, 0),# East Asian ideograph
- 0x21605b: (0x98a8, 0),# East Asian ideograph
- 0x21605c: (0x98af, 0),# East Asian ideograph
- 0x22605d: (0x769c, 0),# East Asian ideograph
- 0x21605e: (0x98b3, 0),# East Asian ideograph
- 0x22605f: (0x769d, 0),# East Asian ideograph
- 0x216060: (0x98ba, 0),# East Asian ideograph
- 0x236061: (0x9f93, 0),# East Asian ideograph
- 0x216062: (0x98c4, 0),# East Asian ideograph
- 0x216063: (0x98db, 0),# East Asian ideograph
- 0x216064: (0x98df, 0),# East Asian ideograph
- 0x216065: (0x98e2, 0),# East Asian ideograph
- 0x226066: (0x76a5, 0),# East Asian ideograph
- 0x226067: (0x76a6, 0),# East Asian ideograph
- 0x216068: (0x98ed, 0),# East Asian ideograph
- 0x216069: (0x98ea, 0),# East Asian ideograph
- 0x21606a: (0x98ee, 0),# East Asian ideograph
- 0x23606b: (0x9fa0, 0),# East Asian ideograph
- 0x21606c: (0x98fc, 0),# East Asian ideograph
- 0x21606d: (0x98f4, 0),# East Asian ideograph
- 0x21606e: (0x98fd, 0),# East Asian ideograph
- 0x21606f: (0x98fe, 0),# East Asian ideograph
- 0x216070: (0x9903, 0),# East Asian ideograph
- 0x216071: (0x990a, 0),# East Asian ideograph
- 0x236072: (0x9fa4, 0),# East Asian ideograph
- 0x216073: (0x9909, 0),# East Asian ideograph
- 0x226074: (0x76b8, 0),# East Asian ideograph
- 0x216075: (0x9912, 0),# East Asian ideograph
- 0x216076: (0x9918, 0),# East Asian ideograph
- 0x226077: (0x76bd, 0),# East Asian ideograph
- 0x216078: (0x9905, 0),# East Asian ideograph
- 0x216079: (0x9928, 0),# East Asian ideograph
- 0x21607a: (0x991e, 0),# East Asian ideograph
- 0x21607b: (0x991b, 0),# East Asian ideograph
- 0x21607c: (0x9921, 0),# East Asian ideograph
- 0x21607d: (0x9935, 0),# East Asian ideograph
- 0x21607e: (0x993e, 0),# East Asian ideograph
- 0x6f5369: (0xc258, 0),# Korean hangul
- 0x6f5033: (0xba71, 0),# Korean hangul
- 0x213a5b: (0x5b6b, 0),# East Asian ideograph
- 0x69256b: (0x30eb, 0),# Katakana letter RU
- 0x69256c: (0x30ec, 0),# Katakana letter RE
- 0x293670: (0x8d49, 0),# East Asian ideograph
- 0x69656d: (0x7e83, 0),# East Asian ideograph
- 0x224f67: (0x7047, 0),# East Asian ideograph
- 0x235172: (0x994c, 0),# East Asian ideograph
- 0x69256f: (0x30ef, 0),# Katakana letter WA
- 0x2e4c35: (0x6de5, 0),# East Asian ideograph
- 0x6f5034: (0xba74, 0),# Korean hangul
- 0x213a5c: (0x5b70, 0),# East Asian ideograph
- 0x6f5934: (0xcc59, 0),# Korean hangul
- 0x4d4f39: (0x988c, 0),# East Asian ideograph
- 0x292571: (0x835e, 0),# East Asian ideograph
- 0x226573: (0x78e0, 0),# East Asian ideograph
- 0x6f4e4c: (0xb6b1, 0),# Korean hangul
- 0x292574: (0x83b8, 0),# East Asian ideograph
- 0x4b3a47: (0x88ca, 0),# East Asian ideograph
- 0x6f5035: (0xba78, 0),# Korean hangul
- 0x692575: (0x30f5, 0),# Katakana letter small KA
- 0x294948: (0x960f, 0),# East Asian ideograph
- 0x213378: (0x5275, 0),# East Asian ideograph
- 0x225c32: (0x74cc, 0),# East Asian ideograph
- 0x216576: (0x4f9c, 0),# East Asian ideograph
- 0x215021: (0x7b4d, 0),# East Asian ideograph
- 0x6f5c6c: (0xd56d, 0),# Korean hangul
- 0x232577: (0x858c, 0),# East Asian ideograph
- 0x214b6a: (0x74ca, 0),# East Asian ideograph
- 0x224f69: (0x7049, 0),# East Asian ideograph
- 0x216940: (0x5133, 0),# East Asian ideograph
- 0x692578: (0x309c, 0),# Katakana-hiragana semi-voiced sound mark
- 0x275023: (0x8345, 0),# East Asian ideograph
- 0x2e742e: (0x7516, 0),# East Asian ideograph
- 0x6f5479: (0xc53d, 0),# Korean hangul
- 0x6f5024: (0xba40, 0),# Korean hangul
- 0x6f5036: (0xba83, 0),# Korean hangul
- 0x213a5e: (0x5b71, 0),# East Asian ideograph
- 0x294949: (0x9608, 0),# East Asian ideograph
- 0x225025: (0x7066, 0),# East Asian ideograph
- 0x2e257b: (0x5d1f, 0),# East Asian ideograph
- 0x6f5026: (0xba49, 0),# Korean hangul
- 0x6f4a5f: (0xaecd, 0),# Korean hangul
- 0x225027: (0x7065, 0),# East Asian ideograph
- 0x235369: (0x9a36, 0),# East Asian ideograph
- 0x225028: (0x7068, 0),# East Asian ideograph
- 0x234f26: (0x9816, 0),# East Asian ideograph
- 0x215029: (0x7b95, 0),# East Asian ideograph
- 0x276272: (0x9ee9, 0),# East Asian ideograph
- 0x213a5f: (0x5b75, 0),# East Asian ideograph
- 0x27502a: (0x94b3, 0),# East Asian ideograph
- 0x27502b: (0x7b3a, 0),# East Asian ideograph
- 0x6f502c: (0xba55, 0),# Korean hangul
- 0x224f6b: (0x7055, 0),# East Asian ideograph
- 0x23536a: (0x9a2e, 0),# East Asian ideograph
- 0x2d502d: (0x7b5d, 0),# East Asian ideograph
- 0x4c4339: (0x69de, 0),# East Asian ideograph
- 0x6f502e: (0xba5c, 0),# Korean hangul
- 0x6f5038: (0xba85, 0),# Korean hangul
- 0x213a60: (0x5b78, 0),# East Asian ideograph
- 0x21502f: (0x7bad, 0),# East Asian ideograph
- 0x215030: (0x7bc4, 0),# East Asian ideograph
- 0x216122: (0x993d, 0),# East Asian ideograph
- 0x226123: (0x76cb, 0),# East Asian ideograph
- 0x216124: (0x9952, 0),# East Asian ideograph
- 0x216125: (0x9951, 0),# East Asian ideograph
- 0x226126: (0x76cc, 0),# East Asian ideograph
- 0x216127: (0x995e, 0),# East Asian ideograph
- 0x216128: (0x9996, 0),# East Asian ideograph
- 0x216129: (0x9999, 0),# East Asian ideograph
- 0x21612a: (0x99a5, 0),# East Asian ideograph
- 0x21612b: (0x99a8, 0),# East Asian ideograph
- 0x21612c: (0x99ac, 0),# East Asian ideograph
- 0x21612d: (0x99ae, 0),# East Asian ideograph
- 0x21612e: (0x99ad, 0),# East Asian ideograph
- 0x21612f: (0x99b3, 0),# East Asian ideograph
- 0x216130: (0x99b1, 0),# East Asian ideograph
- 0x216131: (0x99b4, 0),# East Asian ideograph
- 0x216132: (0x99c1, 0),# East Asian ideograph
- 0x275033: (0x8282, 0),# East Asian ideograph
- 0x216134: (0x99dd, 0),# East Asian ideograph
- 0x216135: (0x99d5, 0),# East Asian ideograph
- 0x216136: (0x99df, 0),# East Asian ideograph
- 0x216137: (0x99db, 0),# East Asian ideograph
- 0x216138: (0x99d2, 0),# East Asian ideograph
- 0x216139: (0x99d9, 0),# East Asian ideograph
- 0x21613a: (0x99d1, 0),# East Asian ideograph
- 0x21613b: (0x99ed, 0),# East Asian ideograph
- 0x21613c: (0x99f1, 0),# East Asian ideograph
- 0x21613d: (0x9a01, 0),# East Asian ideograph
- 0x21613e: (0x99ff, 0),# East Asian ideograph
- 0x21613f: (0x99e2, 0),# East Asian ideograph
- 0x216140: (0x9a0e, 0),# East Asian ideograph
- 0x216141: (0x9a19, 0),# East Asian ideograph
- 0x216142: (0x9a16, 0),# East Asian ideograph
- 0x216143: (0x9a2b, 0),# East Asian ideograph
- 0x226144: (0x76ed, 0),# East Asian ideograph
- 0x216145: (0x9a37, 0),# East Asian ideograph
- 0x216146: (0x9a43, 0),# East Asian ideograph
- 0x216147: (0x9a45, 0),# East Asian ideograph
- 0x226148: (0x76f1, 0),# East Asian ideograph
- 0x216149: (0x9a3e, 0),# East Asian ideograph
- 0x21614a: (0x9a55, 0),# East Asian ideograph
- 0x21614b: (0x9a5a, 0),# East Asian ideograph
- 0x21614c: (0x9a5b, 0),# East Asian ideograph
- 0x21614d: (0x9a57, 0),# East Asian ideograph
- 0x21614e: (0x9a5f, 0),# East Asian ideograph
- 0x22614f: (0x7708, 0),# East Asian ideograph
- 0x226150: (0x7707, 0),# East Asian ideograph
- 0x275038: (0x7bac, 0),# East Asian ideograph
- 0x216152: (0x9aa8, 0),# East Asian ideograph
- 0x216153: (0x9aaf, 0),# East Asian ideograph
- 0x226154: (0x770a, 0),# East Asian ideograph
- 0x216155: (0x9ab7, 0),# East Asian ideograph
- 0x216156: (0x9ab8, 0),# East Asian ideograph
- 0x215039: (0x7be4, 0),# East Asian ideograph
- 0x216158: (0x9acf, 0),# East Asian ideograph
- 0x226159: (0x76fb, 0),# East Asian ideograph
- 0x21615a: (0x9ad4, 0),# East Asian ideograph
- 0x21615b: (0x9ad2, 0),# East Asian ideograph
- 0x21615c: (0x9ad8, 0),# East Asian ideograph
- 0x21615d: (0x9ae5, 0),# East Asian ideograph
- 0x22615e: (0x772b, 0),# East Asian ideograph
- 0x21615f: (0x9aee, 0),# East Asian ideograph
- 0x216160: (0x9afb, 0),# East Asian ideograph
- 0x216161: (0x9aed, 0),# East Asian ideograph
- 0x216162: (0x9b03, 0),# East Asian ideograph
- 0x216163: (0x9b06, 0),# East Asian ideograph
- 0x216164: (0x9b0d, 0),# East Asian ideograph
- 0x216165: (0x9b1a, 0),# East Asian ideograph
- 0x216166: (0x9b22, 0),# East Asian ideograph
- 0x216167: (0x9b25, 0),# East Asian ideograph
- 0x216168: (0x9b27, 0),# East Asian ideograph
- 0x27503c: (0x7b5b, 0),# East Asian ideograph
- 0x21616a: (0x9b31, 0),# East Asian ideograph
- 0x21616b: (0x9b32, 0),# East Asian ideograph
- 0x21616c: (0x9b3c, 0),# East Asian ideograph
- 0x21616d: (0x9b41, 0),# East Asian ideograph
- 0x21616e: (0x9b42, 0),# East Asian ideograph
- 0x22616f: (0x7721, 0),# East Asian ideograph
- 0x216170: (0x9b44, 0),# East Asian ideograph
- 0x216171: (0x9b4f, 0),# East Asian ideograph
- 0x216172: (0x9b54, 0),# East Asian ideograph
- 0x216173: (0x9b58, 0),# East Asian ideograph
- 0x216174: (0x9b5a, 0),# East Asian ideograph
- 0x226175: (0x7739, 0),# East Asian ideograph
- 0x226176: (0x772f, 0),# East Asian ideograph
- 0x216177: (0x9b91, 0),# East Asian ideograph
- 0x216178: (0x9bab, 0),# East Asian ideograph
- 0x216179: (0x9bae, 0),# East Asian ideograph
- 0x21617a: (0x9baa, 0),# East Asian ideograph
- 0x21617b: (0x9bca, 0),# East Asian ideograph
- 0x21617c: (0x9bc9, 0),# East Asian ideograph
- 0x21617d: (0x9be8, 0),# East Asian ideograph
- 0x21617e: (0x9be7, 0),# East Asian ideograph
- 0x215040: (0x7bf7, 0),# East Asian ideograph
- 0x275041: (0x7b80, 0),# East Asian ideograph
- 0x225042: (0x7086, 0),# East Asian ideograph
- 0x6f503c: (0xbaab, 0),# Korean hangul
- 0x29596b: (0x9ca1, 0),# East Asian ideograph
- 0x335f3d: (0x96b7, 0),# East Asian ideograph
- 0x29494f: (0x960a, 0),# East Asian ideograph
- 0x6f5043: (0xbac3, 0),# Korean hangul
- 0x4b5044: (0x7c27, 0),# East Asian ideograph (variant of 215044 which maps to 7C27)
- 0x275045: (0x7baa, 0),# East Asian ideograph
- 0x2e3d73: (0x7a1c, 0),# East Asian ideograph
- 0x275046: (0x7bd1, 0),# East Asian ideograph
- 0x6f5047: (0xbb34, 0),# Korean hangul
- 0x6f503d: (0xbaac, 0),# Korean hangul
- 0x213a65: (0x5b87, 0),# East Asian ideograph
- 0x294950: (0x960c, 0),# East Asian ideograph
- 0x235048: (0x98b8, 0),# East Asian ideograph
- 0x225c3a: (0x74d4, 0),# East Asian ideograph
- 0x27583a: (0x8ba3, 0),# East Asian ideograph
- 0x225049: (0x7084, 0),# East Asian ideograph
- 0x2d594c: (0x8b72, 0),# East Asian ideograph
- 0x6f5960: (0xce20, 0),# Korean hangul
- 0x22504a: (0x7081, 0),# East Asian ideograph
- 0x235370: (0x9a41, 0),# East Asian ideograph
- 0x21504b: (0x7c3d, 0),# East Asian ideograph
- 0x4d3032: (0x88ae, 0),# East Asian ideograph
- 0x6f5a3d: (0xcf54, 0),# Korean hangul
- 0x27504c: (0x7bee, 0),# East Asian ideograph
- 0x274153: (0x6363, 0),# East Asian ideograph
- 0x4d5c6b: (0x9d50, 0),# East Asian ideograph
- 0x213a66: (0x5b88, 0),# East Asian ideograph
- 0x21504d: (0x7c4c, 0),# East Asian ideograph
- 0x234264: (0x925e, 0),# East Asian ideograph
- 0x2d3b77: (0x5ce9, 0),# East Asian ideograph
- 0x21504e: (0x7c4d, 0),# East Asian ideograph
- 0x6f5025: (0xba48, 0),# Korean hangul
- 0x2d504f: (0x7c58, 0),# East Asian ideograph
- 0x69542a: (0x5737, 0),# East Asian ideograph
- 0x293b59: (0x8f82, 0),# East Asian ideograph
- 0x4b4b2b: (0x7363, 0),# East Asian ideograph
- 0x275050: (0x7b3c, 0),# East Asian ideograph
- 0x275051: (0x7c41, 0),# East Asian ideograph
- 0x6f503f: (0xbab8, 0),# Korean hangul
- 0x213a67: (0x5b89, 0),# East Asian ideograph
- 0x294952: (0x960d, 0),# East Asian ideograph
- 0x275052: (0x7b7e, 0),# East Asian ideograph (duplicate simplified)
- 0x6f5963: (0xce35, 0),# Korean hangul
- 0x2d3b78: (0x5cef, 0),# East Asian ideograph
- 0x275053: (0x7bf1, 0),# East Asian ideograph
- 0x4d594e: (0x9bf5, 0),# East Asian ideograph
- 0x3f614c: (0x99c5, 0),# East Asian ideograph
- 0x275054: (0x7ba9, 0),# East Asian ideograph
- 0x4b6258: (0x68ba, 0),# East Asian ideograph
- 0x275055: (0x5401, 0),# East Asian ideograph
- 0x6f245a: (0x3139, 0),# Korean hangul
- 0x225056: (0x7088, 0),# East Asian ideograph
- 0x6f5040: (0xbab9, 0),# Korean hangul
- 0x213a68: (0x5b85, 0),# East Asian ideograph
- 0x21583e: (0x8a0c, 0),# East Asian ideograph
- 0x2d3b79: (0x5d8b, 0),# East Asian ideograph
- 0x6f5058: (0xbb88, 0),# Korean hangul
- 0x2d594f: (0x8b83, 0),# East Asian ideograph
- 0x225059: (0x708c, 0),# East Asian ideograph
- 0x224b31: (0x6e5d, 0),# East Asian ideograph
- 0x216221: (0x9c13, 0),# East Asian ideograph
- 0x226222: (0x7725, 0),# East Asian ideograph
- 0x216223: (0x9bfd, 0),# East Asian ideograph
- 0x216224: (0x9c2d, 0),# East Asian ideograph
- 0x216225: (0x9c25, 0),# East Asian ideograph
- 0x226226: (0x7734, 0),# East Asian ideograph
- 0x216227: (0x9c3e, 0),# East Asian ideograph
- 0x216228: (0x9c3b, 0),# East Asian ideograph
- 0x216229: (0x9c54, 0),# East Asian ideograph
- 0x21622a: (0x9c57, 0),# East Asian ideograph
- 0x21622b: (0x9c56, 0),# East Asian ideograph
- 0x21622c: (0x9c49, 0),# East Asian ideograph
- 0x22622d: (0x7747, 0),# East Asian ideograph
- 0x21622e: (0x9c78, 0),# East Asian ideograph
- 0x21622f: (0x9ce5, 0),# East Asian ideograph
- 0x216230: (0x9ce9, 0),# East Asian ideograph
- 0x226231: (0x7745, 0),# East Asian ideograph
- 0x226232: (0x774d, 0),# East Asian ideograph
- 0x216233: (0x9cf3, 0),# East Asian ideograph
- 0x216234: (0x9d06, 0),# East Asian ideograph
- 0x216235: (0x9d09, 0),# East Asian ideograph
- 0x216236: (0x9d15, 0),# East Asian ideograph
- 0x226237: (0x774e, 0),# East Asian ideograph
- 0x216238: (0x9d28, 0),# East Asian ideograph
- 0x216239: (0x9d26, 0),# East Asian ideograph
- 0x22623a: (0x775f, 0),# East Asian ideograph
- 0x21505f: (0x7cbd, 0),# East Asian ideograph
- 0x21623c: (0x9d3b, 0),# East Asian ideograph
- 0x21623d: (0x9d3f, 0),# East Asian ideograph
- 0x22623e: (0x7752, 0),# East Asian ideograph
- 0x21623f: (0x9d51, 0),# East Asian ideograph
- 0x216240: (0x9d60, 0),# East Asian ideograph
- 0x215060: (0x7cb9, 0),# East Asian ideograph
- 0x226242: (0x7758, 0),# East Asian ideograph
- 0x216243: (0x9d72, 0),# East Asian ideograph
- 0x226244: (0x7756, 0),# East Asian ideograph
- 0x226245: (0x775a, 0),# East Asian ideograph
- 0x216246: (0x9db4, 0),# East Asian ideograph
- 0x216247: (0x9daf, 0),# East Asian ideograph
- 0x216248: (0x9dc2, 0),# East Asian ideograph
- 0x216249: (0x9dd3, 0),# East Asian ideograph
- 0x21624a: (0x9dd7, 0),# East Asian ideograph
- 0x21624b: (0x9de5, 0),# East Asian ideograph
- 0x21624c: (0x9df9, 0),# East Asian ideograph
- 0x215062: (0x7cca, 0),# East Asian ideograph
- 0x21624e: (0x9e1a, 0),# East Asian ideograph
- 0x22624f: (0x7762, 0),# East Asian ideograph
- 0x216250: (0x9e79, 0),# East Asian ideograph
- 0x216251: (0x9e7d, 0),# East Asian ideograph
- 0x226252: (0x7780, 0),# East Asian ideograph
- 0x216253: (0x9e7f, 0),# East Asian ideograph
- 0x216254: (0x9e82, 0),# East Asian ideograph
- 0x216255: (0x9e8b, 0),# East Asian ideograph
- 0x226256: (0x776f, 0),# East Asian ideograph
- 0x216257: (0x9e92, 0),# East Asian ideograph
- 0x216258: (0x9e93, 0),# East Asian ideograph
- 0x224b33: (0x6e30, 0),# East Asian ideograph
- 0x21625a: (0x9e9f, 0),# East Asian ideograph
- 0x21625b: (0x9ea5, 0),# East Asian ideograph
- 0x21625c: (0x9ea9, 0),# East Asian ideograph
- 0x21625d: (0x9eb4, 0),# East Asian ideograph
- 0x21625e: (0x9eb5, 0),# East Asian ideograph
- 0x22625f: (0x7785, 0),# East Asian ideograph
- 0x216260: (0x9ebc, 0),# East Asian ideograph
- 0x216261: (0x9ebe, 0),# East Asian ideograph
- 0x216262: (0x9ec3, 0),# East Asian ideograph
- 0x216263: (0x9ecd, 0),# East Asian ideograph
- 0x216264: (0x9ece, 0),# East Asian ideograph
- 0x216265: (0x9ecf, 0),# East Asian ideograph
- 0x226266: (0x778b, 0),# East Asian ideograph (variant of 4C6266 which maps to 778B)
- 0x216267: (0x58a8, 0),# East Asian ideograph
- 0x216268: (0x9ed8, 0),# East Asian ideograph
- 0x216269: (0x9ed4, 0),# East Asian ideograph
- 0x22626a: (0x778d, 0),# East Asian ideograph
- 0x21626b: (0x9edc, 0),# East Asian ideograph
- 0x21626c: (0x9edb, 0),# East Asian ideograph
- 0x21626d: (0x9edd, 0),# East Asian ideograph
- 0x21626e: (0x9ee0, 0),# East Asian ideograph
- 0x21626f: (0x9ee8, 0),# East Asian ideograph
- 0x216270: (0x9eef, 0),# East Asian ideograph
- 0x235068: (0x98f1, 0),# East Asian ideograph
- 0x226272: (0x7798, 0),# East Asian ideograph
- 0x226273: (0x7796, 0),# East Asian ideograph
- 0x216274: (0x9f0e, 0),# East Asian ideograph
- 0x226275: (0x77a2, 0),# East Asian ideograph
- 0x226276: (0x7799, 0),# East Asian ideograph
- 0x216277: (0x9f19, 0),# East Asian ideograph
- 0x216278: (0x9f20, 0),# East Asian ideograph
- 0x216279: (0x9f2c, 0),# East Asian ideograph
- 0x22627a: (0x77b5, 0),# East Asian ideograph
- 0x21627b: (0x9f3b, 0),# East Asian ideograph
- 0x21627c: (0x9f3e, 0),# East Asian ideograph
- 0x22627d: (0x77b7, 0),# East Asian ideograph
- 0x21627e: (0x9f4b, 0),# East Asian ideograph
- 0x6f5044: (0xbafc, 0),# Korean hangul
- 0x27506b: (0x7cae, 0),# East Asian ideograph
- 0x6f5964: (0xce58, 0),# Korean hangul
- 0x225c41: (0x74db, 0),# East Asian ideograph
- 0x236040: (0x9f6f, 0),# East Asian ideograph
- 0x23506c: (0x98eb, 0),# East Asian ideograph
- 0x6f506d: (0xbc14, 0),# Korean hangul
- 0x23315e: (0x89f5, 0),# East Asian ideograph
- 0x6f506e: (0xbc15, 0),# Korean hangul
- 0x4b4049: (0x62d0, 0),# East Asian ideograph
- 0x234f34: (0x982b, 0),# East Asian ideograph
- 0x22506f: (0x70a7, 0),# East Asian ideograph
- 0x27415a: (0x5c4f, 0),# East Asian ideograph
- 0x696273: (0x78b5, 0),# East Asian ideograph
- 0x275070: (0x7eaa, 0),# East Asian ideograph
- 0x215843: (0x8a13, 0),# East Asian ideograph
- 0x225071: (0x70b5, 0),# East Asian ideograph
- 0x275072: (0x7ea2, 0),# East Asian ideograph
- 0x295a65: (0x9e39, 0),# East Asian ideograph
- 0x215073: (0x7d09, 0),# East Asian ideograph
- 0x396179: (0x5c20, 0),# East Asian ideograph
- 0x275074: (0x7ea6, 0),# East Asian ideograph
- 0x27415b: (0x631a, 0),# East Asian ideograph
- 0x6f5046: (0xbb18, 0),# Korean hangul
- 0x275075: (0x7ea5, 0),# East Asian ideograph
- 0x215844: (0x8a2a, 0),# East Asian ideograph
- 0x275076: (0x7eba, 0),# East Asian ideograph
- 0x213b21: (0x5bc6, 0),# East Asian ideograph
- 0x275077: (0x7eb9, 0),# East Asian ideograph
- 0x213b22: (0x5bc7, 0),# East Asian ideograph
- 0x235379: (0x9a42, 0),# East Asian ideograph
- 0x215078: (0x7d0a, 0),# East Asian ideograph
- 0x6f5729: (0xc7b0, 0),# Korean hangul
- 0x213b23: (0x5bc5, 0),# East Asian ideograph
- 0x6f5c72: (0xd584, 0),# Korean hangul
- 0x6f5079: (0xbc2d, 0),# Korean hangul
- 0x6f536d: (0xc27c, 0),# Korean hangul
- 0x29495a: (0x9612, 0),# East Asian ideograph
- 0x27507a: (0x7ead, 0),# East Asian ideograph
- 0x213b25: (0x5bc2, 0),# East Asian ideograph
- 0x22507b: (0x70e5, 0),# East Asian ideograph
- 0x213b26: (0x5bbf, 0),# East Asian ideograph
- 0x21507c: (0x7d15, 0),# East Asian ideograph
- 0x224f7b: (0x705e, 0),# East Asian ideograph
- 0x23537a: (0x9a44, 0),# East Asian ideograph
- 0x22507d: (0x70d3, 0),# East Asian ideograph
- 0x234f37: (0x9820, 0),# East Asian ideograph
- 0x27507e: (0x7ebd, 0),# East Asian ideograph
- 0x335276: (0x8061, 0),# East Asian ideograph
- 0x6f5048: (0xbb35, 0),# Korean hangul
- 0x215846: (0x8a1d, 0),# East Asian ideograph
- 0x2d3b2a: (0x5ebd, 0),# East Asian ideograph
- 0x2d5957: (0x7aea, 0),# East Asian ideograph
- 0x4b386c: (0x5841, 0),# East Asian ideograph
- 0x295a68: (0x9e3a, 0),# East Asian ideograph
- 0x453336: (0x5b82, 0),# East Asian ideograph
- 0x224b39: (0x6e6b, 0),# East Asian ideograph
- 0x213b2d: (0x5be8, 0),# East Asian ideograph
- 0x273b2e: (0x5bdd, 0),# East Asian ideograph
- 0x6f5049: (0xbb36, 0),# Korean hangul
- 0x213a71: (0x5b9b, 0),# East Asian ideograph
- 0x6f5965: (0xce59, 0),# Korean hangul
- 0x213b2f: (0x5be4, 0),# East Asian ideograph
- 0x4b515a: (0x7e01, 0),# East Asian ideograph
- 0x216321: (0x9f52, 0),# East Asian ideograph
- 0x216322: (0x9f5f, 0),# East Asian ideograph
- 0x216323: (0x9f63, 0),# East Asian ideograph
- 0x216324: (0x9f61, 0),# East Asian ideograph (variant of 456324 which maps to 9F61)
- 0x216325: (0x9f66, 0),# East Asian ideograph
- 0x216326: (0x9f5c, 0),# East Asian ideograph
- 0x233b31: (0x8ece, 0),# East Asian ideograph
- 0x216328: (0x9f6a, 0),# East Asian ideograph
- 0x216329: (0x9f77, 0),# East Asian ideograph
- 0x21632a: (0x9f72, 0),# East Asian ideograph
- 0x21632b: (0x9f8d, 0),# East Asian ideograph
- 0x22632c: (0x77bc, 0),# East Asian ideograph
- 0x21632d: (0x9f9c, 0),# East Asian ideograph
- 0x216330: (0x8288, 0),# East Asian ideograph
- 0x213b33: (0x5bdf, 0),# East Asian ideograph
- 0x6f504a: (0xbb38, 0),# Korean hangul
- 0x226335: (0x77cd, 0),# East Asian ideograph
- 0x29307d: (0x89cf, 0),# East Asian ideograph
- 0x696733: (0x81a4, 0),# East Asian ideograph
- 0x225c47: (0x74de, 0),# East Asian ideograph
- 0x6f4c3f: (0xb1d4, 0),# Korean hangul
- 0x213b35: (0x5bec, 0),# East Asian ideograph
- 0x706340: (0x61b7, 0),# East Asian ideograph
- 0x45462b: (0x7688, 0),# East Asian ideograph
- 0x226345: (0x77de, 0),# East Asian ideograph
- 0x226346: (0x77df, 0),# East Asian ideograph
- 0x23537d: (0x9a48, 0),# East Asian ideograph
- 0x224b3b: (0x6e8b, 0),# East Asian ideograph
- 0x213b37: (0x5beb, 0),# East Asian ideograph
- 0x335738: (0x880f, 0),# East Asian ideograph
- 0x69634e: (0x7a43, 0),# East Asian ideograph
- 0x22634f: (0x77e7, 0),# East Asian ideograph
- 0x274160: (0x63b4, 0),# East Asian ideograph
- 0x226352: (0x77e6, 0),# East Asian ideograph
- 0x226355: (0x77ec, 0),# East Asian ideograph
- 0x273b39: (0x5b9d, 0),# East Asian ideograph
- 0x69254d: (0x30cd, 0),# Katakana letter NE
- 0x226359: (0x77f0, 0),# East Asian ideograph
- 0x22635a: (0x77f1, 0),# East Asian ideograph
- 0x22635c: (0x77f4, 0),# East Asian ideograph
- 0x217b3a: (0x5a38, 0),# East Asian ideograph
- 0x226360: (0x77fc, 0),# East Asian ideograph
- 0x213b3b: (0x5bfa, 0),# East Asian ideograph
- 0x23537e: (0x9a4c, 0),# East Asian ideograph
- 0x226367: (0x77f8, 0),# East Asian ideograph
- 0x226368: (0x77fb, 0),# East Asian ideograph
- 0x277b3c: (0x5a05, 0),# East Asian ideograph
- 0x355739: (0x9c76, 0),# East Asian ideograph
- 0x234944: (0x95ab, 0),# East Asian ideograph
- 0x226370: (0x7809, 0),# East Asian ideograph
- 0x226371: (0x7806, 0),# East Asian ideograph
- 0x226373: (0x7819, 0),# East Asian ideograph
- 0x226374: (0x7811, 0),# East Asian ideograph
- 0x293b3e: (0x8f71, 0),# East Asian ideograph
- 0x4c6376: (0x7839, 0),# East Asian ideograph
- 0x226378: (0x7812, 0),# East Asian ideograph
- 0x223b3f: (0x67a1, 0),# East Asian ideograph
- 0x213b40: (0x5c07, 0),# East Asian ideograph
- 0x4b5e27: (0x93ad, 0),# East Asian ideograph
- 0x273b42: (0x5bfb, 0),# East Asian ideograph
- 0x6f504d: (0xbb3d, 0),# Korean hangul
- 0x6f5935: (0xcc60, 0),# Korean hangul
- 0x294960: (0x9619, 0),# East Asian ideograph
- 0x213b43: (0x5c0d, 0),# East Asian ideograph
- 0x223a31: (0x6710, 0),# East Asian ideograph
- 0x273b44: (0x5bfc, 0),# East Asian ideograph
- 0x224b3e: (0x6e76, 0),# East Asian ideograph
- 0x234f3d: (0x9833, 0),# East Asian ideograph
- 0x2d4850: (0x6eda, 0),# East Asian ideograph
- 0x293b47: (0x8f77, 0),# East Asian ideograph
- 0x6f504e: (0xbb44, 0),# Korean hangul
- 0x275f39: (0x968f, 0),# East Asian ideograph
- 0x23573f: (0x9ba8, 0),# East Asian ideograph
- 0x274b74: (0x74ef, 0),# East Asian ideograph
- 0x217b48: (0x5a50, 0),# East Asian ideograph
- 0x213b49: (0x5c24, 0),# East Asian ideograph
- 0x234e3b: (0x97c5, 0),# East Asian ideograph
- 0x4b4053: (0x627a, 0),# East Asian ideograph
- 0x213b4b: (0x5c31, 0),# East Asian ideograph
- 0x273b4c: (0x5c34, 0),# East Asian ideograph
- 0x6f504f: (0xbb47, 0),# Korean hangul
- 0x275f3a: (0x9669, 0),# East Asian ideograph
- 0x2d625f: (0x83fb, 0),# East Asian ideograph
- 0x213634: (0x5501, 0),# East Asian ideograph
- 0x213b4e: (0x5c3a, 0),# East Asian ideograph
- 0x394956: (0x792e, 0),# East Asian ideograph
- 0x2d7552: (0x579b, 0),# East Asian ideograph
- 0x213b4f: (0x5c3c, 0),# East Asian ideograph
- 0x69562c: (0x599b, 0),# East Asian ideograph
- 0x294e54: (0x97ea, 0),# East Asian ideograph
- 0x692574: (0x30f4, 0),# Katakana letter VU
- 0x233b51: (0x8eff, 0),# East Asian ideograph
- 0x6f5050: (0xbb49, 0),# Korean hangul
- 0x275f3b: (0x9690, 0),# East Asian ideograph
- 0x213635: (0x54fc, 0),# East Asian ideograph
- 0x27516c: (0x7f1d, 0),# East Asian ideograph
- 0x4b4537: (0x6804, 0),# East Asian ideograph
- 0x213b54: (0x5c46, 0),# East Asian ideograph
- 0x45304c: (0x69a6, 0),# East Asian ideograph
- 0x234f40: (0x982e, 0),# East Asian ideograph
- 0x2d4853: (0x7001, 0),# East Asian ideograph
- 0x224a3d: (0x6da4, 0),# East Asian ideograph
- 0x213b56: (0x5c48, 0),# East Asian ideograph
- 0x6f5051: (0xbb4d, 0),# Korean hangul
- 0x275f3c: (0x9647, 0),# East Asian ideograph
- 0x334277: (0x65ef, 0),# East Asian ideograph
- 0x213b58: (0x5c4b, 0),# East Asian ideograph
- 0x213b59: (0x5c4d, 0),# East Asian ideograph
- 0x23316b: (0x89ff, 0),# East Asian ideograph
- 0x213b5a: (0x5c55, 0),# East Asian ideograph
- 0x213b5b: (0x5c51, 0),# East Asian ideograph
- 0x226424: (0x781b, 0),# East Asian ideograph
- 0x216425: (0x5187, 0),# East Asian ideograph
- 0x226426: (0x782c, 0),# East Asian ideograph
- 0x226427: (0x7823, 0),# East Asian ideograph
- 0x226428: (0x782b, 0),# East Asian ideograph
- 0x216429: (0x4e28, 0),# East Asian ideograph
- 0x22642a: (0x7829, 0),# East Asian ideograph
- 0x22642d: (0x7822, 0),# East Asian ideograph
- 0x21642e: (0x4e31, 0),# East Asian ideograph
- 0x2d3748: (0x8b5f, 0),# East Asian ideograph
- 0x226431: (0x7835, 0),# East Asian ideograph
- 0x226432: (0x7833, 0),# East Asian ideograph
- 0x226433: (0x782e, 0),# East Asian ideograph
- 0x216434: (0x4e42, 0),# East Asian ideograph
- 0x226435: (0x7820, 0),# East Asian ideograph
- 0x216437: (0x738d, 0),# East Asian ideograph
- 0x226438: (0x783d, 0),# East Asian ideograph
- 0x22643b: (0x781f, 0),# East Asian ideograph
- 0x21643c: (0x4e5c, 0),# East Asian ideograph
- 0x22643d: (0x7831, 0),# East Asian ideograph
- 0x21643f: (0x6c39, 0),# East Asian ideograph
- 0x274168: (0x6320, 0),# East Asian ideograph
- 0x6f5053: (0xbb50, 0),# Korean hangul
- 0x275f3e: (0x53ea, 0),# East Asian ideograph (duplicate simplified)
- 0x226444: (0x784d, 0),# East Asian ideograph
- 0x216446: (0x4e85, 0),# East Asian ideograph
- 0x273b61: (0x5c42, 0),# East Asian ideograph
- 0x226448: (0x7848, 0),# East Asian ideograph
- 0x226449: (0x7853, 0),# East Asian ideograph
- 0x22644a: (0x7854, 0),# East Asian ideograph
- 0x22644b: (0x7845, 0),# East Asian ideograph
- 0x22644c: (0x7852, 0),# East Asian ideograph
- 0x295938: (0x9cdf, 0),# East Asian ideograph
- 0x22644e: (0x7850, 0),# East Asian ideograph
- 0x22644f: (0x7858, 0),# East Asian ideograph
- 0x216450: (0x4ea0, 0),# East Asian ideograph
- 0x216451: (0x4ea2, 0),# East Asian ideograph
- 0x226452: (0x7847, 0),# East Asian ideograph
- 0x233b63: (0x8f27, 0),# East Asian ideograph
- 0x216455: (0x4eb6, 0),# East Asian ideograph (variant of 4B6455 which maps to 4EB6)
- 0x226456: (0x784c, 0),# East Asian ideograph
- 0x695630: (0x5cbc, 0),# East Asian ideograph
- 0x216458: (0x4eb9, 0),# East Asian ideograph
- 0x223b64: (0x67b7, 0),# East Asian ideograph
- 0x22645a: (0x7868, 0),# East Asian ideograph
- 0x22645b: (0x786d, 0),# East Asian ideograph
- 0x21645e: (0x4ec9, 0),# East Asian ideograph
- 0x226460: (0x7864, 0),# East Asian ideograph
- 0x226461: (0x785c, 0),# East Asian ideograph
- 0x216462: (0x4ece, 0),# East Asian ideograph (not in Unicode)
- 0x216463: (0x4ee8, 0),# East Asian ideograph
- 0x215852: (0x8a54, 0),# East Asian ideograph
- 0x213639: (0x54fa, 0),# East Asian ideograph
- 0x226466: (0x786a, 0),# East Asian ideograph
- 0x226469: (0x7886, 0),# East Asian ideograph
- 0x21646b: (0x4ee1, 0),# East Asian ideograph
- 0x22646c: (0x787f, 0),# East Asian ideograph
- 0x22646d: (0x7887, 0),# East Asian ideograph
- 0x213c2a: (0x5d84, 0),# East Asian ideograph
- 0x226470: (0x7894, 0),# East Asian ideograph
- 0x696471: (0x7cc0, 0),# East Asian ideograph
- 0x216472: (0x4f08, 0),# East Asian ideograph
- 0x216473: (0x4f0e, 0),# East Asian ideograph
- 0x696474: (0x7cd8, 0),# East Asian ideograph
- 0x216475: (0x4f03, 0),# East Asian ideograph
- 0x226476: (0x788f, 0),# East Asian ideograph
- 0x234f44: (0x982f, 0),# East Asian ideograph
- 0x6f544a: (0xc378, 0),# Korean hangul
- 0x21647c: (0x4f22, 0),# East Asian ideograph
- 0x22647e: (0x7899, 0),# East Asian ideograph
- 0x213b6b: (0x5cb7, 0),# East Asian ideograph
- 0x225c52: (0x74e7, 0),# East Asian ideograph
- 0x27516d: (0x603b, 0),# East Asian ideograph
- 0x223b6d: (0x6802, 0),# East Asian ideograph
- 0x695632: (0x5cc5, 0),# East Asian ideograph
- 0x213b6e: (0x5ca1, 0),# East Asian ideograph
- 0x4c3a5b: (0x6859, 0),# East Asian ideograph
- 0x213b6f: (0x5cab, 0),# East Asian ideograph
- 0x6f5056: (0xbb61, 0),# Korean hangul
- 0x294969: (0x961a, 0),# East Asian ideograph
- 0x215854: (0x8a50, 0),# East Asian ideograph
- 0x21363b: (0x54ee, 0),# East Asian ideograph
- 0x21762a: (0x57fb, 0),# East Asian ideograph
- 0x27583f: (0x8baa, 0),# East Asian ideograph
- 0x213b71: (0x5cb1, 0),# East Asian ideograph
- 0x6f5961: (0xce21, 0),# Korean hangul
- 0x213b72: (0x5cd9, 0),# East Asian ideograph
- 0x275f2c: (0x9636, 0),# East Asian ideograph
- 0x223b74: (0x67df, 0),# East Asian ideograph
- 0x6f5057: (0xbb63, 0),# Korean hangul
- 0x233b75: (0x8f17, 0),# East Asian ideograph
- 0x23576b: (0x9bbb, 0),# East Asian ideograph
- 0x213b77: (0x5ce8, 0),# East Asian ideograph
- 0x216622: (0x4fe4, 0),# East Asian ideograph
- 0x6f4e53: (0xb729, 0),# Korean hangul
- 0x223b78: (0x6806, 0),# East Asian ideograph
- 0x223b79: (0x67ae, 0),# East Asian ideograph
- 0x213b7a: (0x5cea, 0),# East Asian ideograph
- 0x336054: (0x985b, 0),# East Asian ideograph
- 0x213b7b: (0x5d07, 0),# East Asian ideograph
- 0x27527a: (0x804b, 0),# East Asian ideograph
- 0x213b7c: (0x5d06, 0),# East Asian ideograph
- 0x216627: (0x4fc5, 0),# East Asian ideograph
- 0x6f773e: (0xcb4c, 0),# Korean hangul
- 0x692435: (0x3055, 0),# Hiragana letter SA
- 0x233b7d: (0x8f2d, 0),# East Asian ideograph
- 0x455746: (0x672f, 0),# East Asian ideograph
- 0x213b7e: (0x5d16, 0),# East Asian ideograph
- 0x6f5059: (0xbb8c, 0),# Korean hangul
- 0x216629: (0x4fc9, 0),# East Asian ideograph
- 0x4b516a: (0x7ef7, 0),# East Asian ideograph
- 0x6f7737: (0xc5ab, 0),# Korean hangul
- 0x6f5a67: (0xd0a5, 0),# Korean hangul
- 0x6f5d23: (0xd5dd, 0),# Korean hangul
- 0x2d485c: (0x6f44, 0),# East Asian ideograph
- 0x6f505a: (0xbba4, 0),# Korean hangul
- 0x21363f: (0x54e9, 0),# East Asian ideograph
- 0x22262f: (0x5dae, 0),# East Asian ideograph
- 0x27516e: (0x7eb5, 0),# East Asian ideograph
- 0x283462: (0x6322, 0),# East Asian ideograph
- 0x334c7b: (0x767a, 0),# East Asian ideograph
- 0x216527: (0x4ef5, 0),# East Asian ideograph
- 0x216528: (0x4f07, 0),# East Asian ideograph
- 0x226529: (0x7893, 0),# East Asian ideograph
- 0x21652a: (0x4f00, 0),# East Asian ideograph
- 0x21652c: (0x4f0b, 0),# East Asian ideograph
- 0x22652d: (0x7896, 0),# East Asian ideograph
- 0x22652f: (0x78b2, 0),# East Asian ideograph
- 0x6f5371: (0xc288, 0),# Korean hangul
- 0x226531: (0x78a1, 0),# East Asian ideograph
- 0x216532: (0x4f3b, 0),# East Asian ideograph
- 0x292633: (0x84e3, 0),# East Asian ideograph
- 0x216536: (0x4f58, 0),# East Asian ideograph
- 0x216537: (0x4f62, 0),# East Asian ideograph
- 0x216539: (0x4f64, 0),# East Asian ideograph
- 0x21653a: (0x4f49, 0),# East Asian ideograph
- 0x22653b: (0x78a4, 0),# East Asian ideograph
- 0x22653e: (0x78b4, 0),# East Asian ideograph
- 0x21653f: (0x4f3e, 0),# East Asian ideograph
- 0x226540: (0x78ad, 0),# East Asian ideograph
- 0x226541: (0x78a3, 0),# East Asian ideograph
- 0x226543: (0x789e, 0),# East Asian ideograph
- 0x226544: (0x78a8, 0),# East Asian ideograph
- 0x232636: (0x857b, 0),# East Asian ideograph
- 0x214b5e: (0x746a, 0),# East Asian ideograph
- 0x226548: (0x78ab, 0),# East Asian ideograph
- 0x234f4b: (0x9847, 0),# East Asian ideograph
- 0x4b6637: (0x4fe3, 0),# East Asian ideograph
- 0x21654d: (0x4f68, 0),# East Asian ideograph
- 0x22654e: (0x78bb, 0),# East Asian ideograph
- 0x21654f: (0x4f5f, 0),# East Asian ideograph
- 0x6f505c: (0xbbc4, 0),# Korean hangul
- 0x29496f: (0x95fc, 0),# East Asian ideograph
- 0x226555: (0x78cc, 0),# East Asian ideograph
- 0x226556: (0x78c9, 0),# East Asian ideograph
- 0x216557: (0x4f7c, 0),# East Asian ideograph
- 0x226558: (0x78d1, 0),# East Asian ideograph
- 0x21655a: (0x4f98, 0),# East Asian ideograph
- 0x21655b: (0x4f92, 0),# East Asian ideograph
- 0x21655c: (0x4f7d, 0),# East Asian ideograph
- 0x22655e: (0x78c8, 0),# East Asian ideograph
- 0x226560: (0x78d4, 0),# East Asian ideograph
- 0x274e3b: (0x781a, 0),# East Asian ideograph
- 0x216562: (0x4f76, 0),# East Asian ideograph
- 0x216564: (0x4fa2, 0),# East Asian ideograph
- 0x4c6565: (0x78b9, 0),# East Asian ideograph
- 0x216566: (0x4f91, 0),# East Asian ideograph
- 0x216567: (0x4f95, 0),# East Asian ideograph
- 0x226568: (0x78df, 0),# East Asian ideograph
- 0x22656a: (0x78e7, 0),# East Asian ideograph
- 0x21656c: (0x4f4c, 0),# East Asian ideograph
- 0x21656d: (0x4f97, 0),# East Asian ideograph
- 0x22656e: (0x78db, 0),# East Asian ideograph
- 0x22656f: (0x78e1, 0),# East Asian ideograph
- 0x216570: (0x4f79, 0),# East Asian ideograph
- 0x216571: (0x4f9a, 0),# East Asian ideograph
- 0x216572: (0x4f81, 0),# East Asian ideograph
- 0x216573: (0x4f78, 0),# East Asian ideograph
- 0x225c5a: (0x74f0, 0),# East Asian ideograph
- 0x4b516e: (0x7e26, 0),# East Asian ideograph
- 0x226576: (0x78ee, 0),# East Asian ideograph
- 0x226577: (0x78e3, 0),# East Asian ideograph
- 0x226579: (0x78f2, 0),# East Asian ideograph
- 0x21657b: (0x4f7a, 0),# East Asian ideograph
- 0x21657c: (0x4fcd, 0),# East Asian ideograph
- 0x2d5529: (0x830e, 0),# East Asian ideograph (variant of 275529)
- 0x22657e: (0x7905, 0),# East Asian ideograph
- 0x6f5d27: (0xd5ec, 0),# Korean hangul
- 0x6f505e: (0xbbd0, 0),# Korean hangul
- 0x217a75: (0x5a16, 0),# East Asian ideograph
- 0x224f5d: (0x7043, 0),# East Asian ideograph
- 0x216643: (0x4fb9, 0),# East Asian ideograph
- 0x232644: (0x8597, 0),# East Asian ideograph
- 0x283466: (0x63ff, 0),# East Asian ideograph
- 0x6f5d28: (0xd5f4, 0),# Korean hangul
- 0x287269: (0x7ec9, 0),# East Asian ideograph
- 0x2d3c38: (0x9245, 0),# East Asian ideograph
- 0x216646: (0x501e, 0),# East Asian ideograph
- 0x217e25: (0x5b67, 0),# East Asian ideograph
- 0x6f4b42: (0xb059, 0),# Korean hangul
- 0x6f505f: (0xbbf8, 0),# Korean hangul
- 0x282647: (0x5cbf, 0),# East Asian ideograph
- 0x275f4a: (0x867d, 0),# East Asian ideograph
- 0x225c5c: (0x74ee, 0),# East Asian ideograph
- 0x217633: (0x5800, 0),# East Asian ideograph
- 0x23605b: (0x9f8e, 0),# East Asian ideograph
- 0x276649: (0x4f1c, 0),# East Asian ideograph
- 0x274e3e: (0x7815, 0),# East Asian ideograph
- 0x293866: (0x8db1, 0),# East Asian ideograph
- 0x29563c: (0x9b49, 0),# East Asian ideograph
- 0x6f5c73: (0xd585, 0),# Korean hangul
- 0x4b3c21: (0x5d5c, 0),# East Asian ideograph
- 0x21664c: (0x5007, 0),# East Asian ideograph
- 0x21664d: (0x5013, 0),# East Asian ideograph
- 0x23264e: (0x8586, 0),# East Asian ideograph
- 0x6f5d2a: (0xd5f7, 0),# Korean hangul
- 0x222650: (0x5ddb, 0),# East Asian ideograph
- 0x22606a: (0x76aa, 0),# East Asian ideograph
- 0x292651: (0x84df, 0),# East Asian ideograph
- 0x275f4c: (0x9e21, 0),# East Asian ideograph
- 0x696466: (0x7cad, 0),# East Asian ideograph
- 0x217635: (0x57ec, 0),# East Asian ideograph
- 0x6f5264: (0xc0b3, 0),# Korean hangul
- 0x393770: (0x56f2, 0),# East Asian ideograph
- 0x2d552d: (0x8358, 0),# East Asian ideograph
- 0x6f5d2b: (0xd5f9, 0),# Korean hangul
- 0x4b4066: (0x62f4, 0),# East Asian ideograph
- 0x286655: (0x783b, 0),# East Asian ideograph
- 0x4b3c23: (0x5ce5, 0),# East Asian ideograph
- 0x274177: (0x631e, 0),# East Asian ideograph
- 0x222656: (0x5de4, 0),# East Asian ideograph
- 0x217636: (0x5807, 0),# East Asian ideograph
- 0x292658: (0x83b6, 0),# East Asian ideograph
- 0x282659: (0x5def, 0),# East Asian ideograph
- 0x692432: (0x3052, 0),# Hiragana letter GE
- 0x276068: (0x996c, 0),# East Asian ideograph
- 0x706d3b: (0x7818, 0),# East Asian ideograph
- 0x226621: (0x78f9, 0),# East Asian ideograph
- 0x226622: (0x78fd, 0),# East Asian ideograph
- 0x6f5063: (0xbc00, 0),# Korean hangul
- 0x216626: (0x4fb7, 0),# East Asian ideograph
- 0x226627: (0x78fe, 0),# East Asian ideograph
- 0x226629: (0x78fb, 0),# East Asian ideograph
- 0x21662a: (0x4fe5, 0),# East Asian ideograph
- 0x22662b: (0x7904, 0),# East Asian ideograph
- 0x21662c: (0x4fe7, 0),# East Asian ideograph
- 0x22662e: (0x7912, 0),# East Asian ideograph
- 0x226632: (0x790c, 0),# East Asian ideograph
- 0x216633: (0x4fdc, 0),# East Asian ideograph
- 0x226634: (0x7913, 0),# East Asian ideograph
- 0x216635: (0x4fd4, 0),# East Asian ideograph
- 0x216637: (0x4fc1, 0),# East Asian ideograph
- 0x21663b: (0x4fdb, 0),# East Asian ideograph
- 0x21663e: (0x4fc6, 0),# East Asian ideograph
- 0x706640: (0x80ec, 0),# East Asian ideograph
- 0x6f5064: (0xbc08, 0),# Korean hangul
- 0x226643: (0x791e, 0),# East Asian ideograph
- 0x6f4c21: (0xb128, 0),# Korean hangul
- 0x226646: (0x7922, 0),# East Asian ideograph
- 0x292661: (0x8360, 0),# East Asian ideograph
- 0x216648: (0x503f, 0),# East Asian ideograph
- 0x216649: (0x5005, 0),# East Asian ideograph
- 0x4d5973: (0x51eb, 0),# East Asian ideograph
- 0x22664c: (0x7924, 0),# East Asian ideograph
- 0x22664d: (0x7927, 0),# East Asian ideograph
- 0x21664e: (0x5022, 0),# East Asian ideograph
- 0x226650: (0x7929, 0),# East Asian ideograph
- 0x216652: (0x4ff5, 0),# East Asian ideograph
- 0x6f2463: (0x314d, 0),# Korean hangul
- 0x226655: (0x7931, 0),# East Asian ideograph
- 0x393428: (0x5227, 0),# East Asian ideograph
- 0x276235: (0x9e26, 0),# East Asian ideograph
- 0x216659: (0x4ff4, 0),# East Asian ideograph
- 0x21665b: (0x5037, 0),# East Asian ideograph
- 0x22665d: (0x7934, 0),# East Asian ideograph
- 0x21665e: (0x502e, 0),# East Asian ideograph
- 0x6f5065: (0xbc09, 0),# Korean hangul
- 0x226660: (0x7936, 0),# East Asian ideograph
- 0x216661: (0x4ff6, 0),# East Asian ideograph
- 0x216662: (0x501c, 0),# East Asian ideograph
- 0x6f4c22: (0xb12c, 0),# Korean hangul
- 0x226665: (0x793d, 0),# East Asian ideograph
- 0x216666: (0x502c, 0),# East Asian ideograph
- 0x226667: (0x7942, 0),# East Asian ideograph
- 0x226668: (0x793f, 0),# East Asian ideograph
- 0x216669: (0x5010, 0),# East Asian ideograph
- 0x22666a: (0x794a, 0),# East Asian ideograph
- 0x22666b: (0x794d, 0),# East Asian ideograph
- 0x292668: (0x8369, 0),# East Asian ideograph
- 0x226675: (0x7946, 0),# East Asian ideograph
- 0x226677: (0x7958, 0),# East Asian ideograph
- 0x216679: (0x503d, 0),# East Asian ideograph
- 0x22667a: (0x795c, 0),# East Asian ideograph
- 0x22667b: (0x794f, 0),# East Asian ideograph
- 0x22667c: (0x7953, 0),# East Asian ideograph
- 0x22667d: (0x7953, 0),# Unrelated variant of EACC 22667C which maps to 7953
- 0x6f4c23: (0xb134, 0),# Korean hangul
- 0x225c63: (0x74f8, 0),# East Asian ideograph
- 0x236062: (0x9f95, 0),# East Asian ideograph
- 0x2d336b: (0x5c05, 0),# East Asian ideograph
- 0x213f71: (0x6233, 0),# East Asian ideograph
- 0x6f5d30: (0xd610, 0),# Korean hangul
- 0x224b57: (0x6ea8, 0),# East Asian ideograph
- 0x27627d: (0x9f50, 0),# East Asian ideograph
- 0x6f5d77: (0xd774, 0),# Korean hangul
- 0x213b2e: (0x5be2, 0),# East Asian ideograph
- 0x6f4c24: (0xb135, 0),# Korean hangul
- 0x21763b: (0x580f, 0),# East Asian ideograph
- 0x227a3a: (0x81ca, 0),# East Asian ideograph
- 0x232672: (0x85bf, 0),# East Asian ideograph
- 0x6f5528: (0xc557, 0),# Korean hangul
- 0x6f5068: (0xbc0d, 0),# Korean hangul
- 0x6f4c25: (0xb137, 0),# Korean hangul
- 0x295a48: (0x9e31, 0),# East Asian ideograph
- 0x395a36: (0x983c, 0),# East Asian ideograph
- 0x6f4939: (0xaca1, 0),# Korean hangul
- 0x275121: (0x7eb1, 0),# East Asian ideograph
- 0x222677: (0x5e12, 0),# East Asian ideograph
- 0x225122: (0x70dd, 0),# East Asian ideograph
- 0x225123: (0x70e1, 0),# East Asian ideograph
- 0x27417e: (0x62e9, 0),# East Asian ideograph
- 0x226679: (0x795b, 0),# East Asian ideograph
- 0x275f54: (0x4e91, 0),# East Asian ideograph
- 0x235124: (0x9907, 0),# East Asian ideograph
- 0x6f4c26: (0xb140, 0),# Korean hangul
- 0x225c66: (0x74fb, 0),# East Asian ideograph
- 0x275125: (0x7eb7, 0),# East Asian ideograph
- 0x4b3869: (0x5727, 0),# East Asian ideograph
- 0x235c22: (0x9dc7, 0),# East Asian ideograph
- 0x225126: (0x70e3, 0),# East Asian ideograph
- 0x6f5d33: (0xd614, 0),# Korean hangul
- 0x6f5127: (0xbc85, 0),# Korean hangul
- 0x235128: (0x9902, 0),# East Asian ideograph
- 0x6f5374: (0xc298, 0),# Korean hangul
- 0x6f506a: (0xbc11, 0),# Korean hangul
- 0x275129: (0x624e, 0),# East Asian ideograph
- 0x277d2b: (0x5a06, 0),# East Asian ideograph
- 0x225c67: (0x74ff, 0),# East Asian ideograph
- 0x27512a: (0x7ecd, 0),# East Asian ideograph
- 0x21512b: (0x7d44, 0),# East Asian ideograph
- 0x6f4f31: (0xb82c, 0),# Korean hangul
- 0x27413f: (0x626c, 0),# East Asian ideograph
- 0x6f5d34: (0xd615, 0),# Korean hangul
- 0x27512c: (0x7ec6, 0),# East Asian ideograph
- 0x27512d: (0x7ec5, 0),# East Asian ideograph
- 0x4c7328: (0x5fad, 0),# East Asian ideograph (variant of 2E7328 which maps to 5FAD)
- 0x22512e: (0x70d1, 0),# East Asian ideograph
- 0x215869: (0x8aaa, 0),# East Asian ideograph
- 0x21512f: (0x7d40, 0),# East Asian ideograph
- 0x215130: (0x7d42, 0),# East Asian ideograph
- 0x216722: (0x506f, 0),# East Asian ideograph
- 0x216723: (0x5050, 0),# East Asian ideograph
- 0x216725: (0x5070, 0),# East Asian ideograph
- 0x215131: (0x7d71, 0),# East Asian ideograph
- 0x216729: (0x5053, 0),# East Asian ideograph
- 0x21672a: (0x506a, 0),# East Asian ideograph
- 0x21672c: (0x5056, 0),# East Asian ideograph
- 0x215132: (0x7d5e, 0),# East Asian ideograph
- 0x226730: (0x7972, 0),# East Asian ideograph
- 0x216731: (0x506d, 0),# East Asian ideograph
- 0x275133: (0x7ed2, 0),# East Asian ideograph
- 0x6f4c29: (0xb150, 0),# Korean hangul
- 0x216738: (0x505d, 0),# East Asian ideograph
- 0x215134: (0x7d50, 0),# East Asian ideograph
- 0x21673b: (0x5058, 0),# East Asian ideograph
- 0x21673c: (0x5072, 0),# East Asian ideograph
- 0x22673e: (0x797c, 0),# East Asian ideograph
- 0x3f6179: (0x5c1f, 0),# East Asian ideograph
- 0x216741: (0x5041, 0),# East Asian ideograph
- 0x6f5d36: (0xd638, 0),# Korean hangul
- 0x275136: (0x7eda, 0),# East Asian ideograph
- 0x216746: (0x5015, 0),# East Asian ideograph
- 0x293430: (0x8bb4, 0),# East Asian ideograph
- 0x216748: (0x507a, 0),# East Asian ideograph
- 0x21674a: (0x506c, 0),# East Asian ideograph
- 0x275137: (0x7edd, 0),# East Asian ideograph
- 0x21674d: (0x506b, 0),# East Asian ideograph
- 0x21674e: (0x5094, 0),# East Asian ideograph
- 0x22674f: (0x798b, 0),# East Asian ideograph
- 0x216750: (0x509e, 0),# East Asian ideograph
- 0x235138: (0x9915, 0),# East Asian ideograph
- 0x216752: (0x509b, 0),# East Asian ideograph
- 0x216753: (0x509a, 0),# East Asian ideograph
- 0x226754: (0x7994, 0),# East Asian ideograph
- 0x226755: (0x7993, 0),# East Asian ideograph
- 0x215139: (0x7d66, 0),# East Asian ideograph
- 0x21675a: (0x508c, 0),# East Asian ideograph
- 0x21675c: (0x5088, 0),# East Asian ideograph
- 0x23513a: (0x9924, 0),# East Asian ideograph
- 0x22675f: (0x79a1, 0),# East Asian ideograph
- 0x226760: (0x799b, 0),# East Asian ideograph
- 0x226761: (0x79a3, 0),# East Asian ideograph
- 0x216762: (0x508e, 0),# East Asian ideograph
- 0x23513b: (0x991f, 0),# East Asian ideograph
- 0x224b5e: (0x6e8e, 0),# East Asian ideograph
- 0x216767: (0x50a6, 0),# East Asian ideograph
- 0x274c76: (0x763e, 0),# East Asian ideograph
- 0x21513c: (0x7d93, 0),# East Asian ideograph
- 0x21676a: (0x5092, 0),# East Asian ideograph
- 0x21676c: (0x509c, 0),# East Asian ideograph
- 0x2d442d: (0x6780, 0),# East Asian ideograph
- 0x22676e: (0x79a9, 0),# East Asian ideograph
- 0x27513d: (0x6346, 0),# East Asian ideograph
- 0x226770: (0x79ab, 0),# East Asian ideograph
- 0x216771: (0x50c7, 0),# East Asian ideograph
- 0x216775: (0x50c9, 0),# East Asian ideograph
- 0x22677a: (0x79b3, 0),# East Asian ideograph
- 0x22513f: (0x70fa, 0),# East Asian ideograph
- 0x21677c: (0x50b4, 0),# East Asian ideograph
- 0x6f5d38: (0xd63c, 0),# Korean hangul
- 0x212a23: (0xe8d2, 0),# EACC component character
- 0x215140: (0x7d81, 0),# East Asian ideograph
- 0x334f5e: (0x7a91, 0),# East Asian ideograph
- 0x215141: (0x7d9c, 0),# East Asian ideograph
- 0x6f5375: (0xc29b, 0),# Korean hangul
- 0x213538: (0x53f0, 0),# East Asian ideograph (duplicate simplified)
- 0x6f506f: (0xbc16, 0),# Korean hangul
- 0x215142: (0x7dbb, 0),# East Asian ideograph
- 0x6f4c2c: (0xb154, 0),# Korean hangul
- 0x284140: (0x6861, 0),# East Asian ideograph
- 0x235143: (0x9929, 0),# East Asian ideograph
- 0x2d3765: (0x8086, 0),# East Asian ideograph
- 0x215144: (0x7dca, 0),# East Asian ideograph
- 0x6f5d39: (0xd640, 0),# Korean hangul
- 0x215145: (0x7dbe, 0),# East Asian ideograph
- 0x224b60: (0x6ed9, 0),# East Asian ideograph
- 0x215146: (0x7db4, 0),# East Asian ideograph
- 0x2e2d79: (0x6128, 0),# East Asian ideograph
- 0x4b5724: (0x86cd, 0),# East Asian ideograph
- 0x235147: (0x991a, 0),# East Asian ideograph
- 0x275242: (0x4e49, 0),# East Asian ideograph
- 0x6f4c2d: (0xb155, 0),# Korean hangul
- 0x6f5c48: (0xd46f, 0),# Korean hangul
- 0x215148: (0x7db2, 0),# East Asian ideograph
- 0x215149: (0x7db1, 0),# East Asian ideograph
- 0x6f5d3a: (0xd648, 0),# Korean hangul
- 0x21514a: (0x7dbd, 0),# East Asian ideograph
- 0x224b61: (0x6ebd, 0),# East Asian ideograph
- 0x234f60: (0x9852, 0),# East Asian ideograph
- 0x4b3c32: (0x5dd3, 0),# East Asian ideograph
- 0x6f5071: (0xbc1b, 0),# Korean hangul
- 0x22514c: (0x7103, 0),# East Asian ideograph
- 0x21586f: (0x8aa3, 0),# East Asian ideograph
- 0x21514d: (0x7da2, 0),# East Asian ideograph
- 0x22582b: (0x736b, 0),# East Asian ideograph
- 0x4b615f: (0x9aea, 0),# East Asian ideograph
- 0x21514e: (0x7dad, 0),# East Asian ideograph
- 0x2d3324: (0x634c, 0),# East Asian ideograph
- 0x6f5d3b: (0xd649, 0),# Korean hangul
- 0x21514f: (0x7dbf, 0),# East Asian ideograph
- 0x2d356a: (0x8a36, 0),# East Asian ideograph
- 0x215150: (0x7db8, 0),# East Asian ideograph
- 0x6f5072: (0xbc1c, 0),# Korean hangul
- 0x215151: (0x7dc7, 0),# East Asian ideograph
- 0x22482d: (0x6cf2, 0),# East Asian ideograph
- 0x275152: (0x7f14, 0),# East Asian ideograph
- 0x6f493b: (0xaca9, 0),# Korean hangul
- 0x2d3768: (0x56ec, 0),# East Asian ideograph
- 0x215153: (0x7def, 0),# East Asian ideograph
- 0x294346: (0x94d1, 0),# East Asian ideograph
- 0x2f5d3c: (0x6ef7, 0),# East Asian ideograph
- 0x215154: (0x7df4, 0),# East Asian ideograph (variant of 4B5154 which maps to 7DF4)
- 0x224b63: (0x6ec1, 0),# East Asian ideograph
- 0x234f62: (0x984b, 0),# East Asian ideograph
- 0x235155: (0x9932, 0),# East Asian ideograph
- 0x6f5073: (0xbc1d, 0),# Korean hangul
- 0x225156: (0x7112, 0),# East Asian ideograph
- 0x6f4c30: (0xb178, 0),# Korean hangul
- 0x69675c: (0x825d, 0),# East Asian ideograph
- 0x215157: (0x7dec, 0),# East Asian ideograph
- 0x234179: (0x91e9, 0),# East Asian ideograph
- 0x215158: (0x7ddd, 0),# East Asian ideograph
- 0x213e37: (0x6012, 0),# East Asian ideograph
- 0x6f5d3d: (0xd64d, 0),# Korean hangul
- 0x215159: (0x7de9, 0),# East Asian ideograph
- 0x21515a: (0x7de3, 0),# East Asian ideograph
- 0x213539: (0x53e5, 0),# East Asian ideograph
- 0x6f5074: (0xbc1f, 0),# Korean hangul
- 0x216822: (0x50c2, 0),# East Asian ideograph
- 0x27515b: (0x7f16, 0),# East Asian ideograph
- 0x226825: (0x79bc, 0),# East Asian ideograph
- 0x225c71: (0x7505, 0),# East Asian ideograph
- 0x226828: (0x79c6, 0),# East Asian ideograph
- 0x22515c: (0x710c, 0),# East Asian ideograph
- 0x22682a: (0x79c8, 0),# East Asian ideograph
- 0x21682c: (0x50ba, 0),# East Asian ideograph
- 0x22682d: (0x79d4, 0),# East Asian ideograph
- 0x21682e: (0x50cd, 0),# East Asian ideograph
- 0x21515d: (0x7d9e, 0),# East Asian ideograph
- 0x6f4f33: (0xb835, 0),# Korean hangul
- 0x226832: (0x79d6, 0),# East Asian ideograph
- 0x216834: (0x50ef, 0),# East Asian ideograph
- 0x21515e: (0x7dde, 0),# East Asian ideograph
- 0x293438: (0x8c29, 0),# East Asian ideograph
- 0x21683a: (0x50f4, 0),# East Asian ideograph
- 0x21515f: (0x7e11, 0),# East Asian ideograph
- 0x21683c: (0x50dd, 0),# East Asian ideograph
- 0x22683d: (0x79ec, 0),# East Asian ideograph
- 0x22683e: (0x79eb, 0),# East Asian ideograph (variant of 4C683E which maps to 79EB)
- 0x6f5075: (0xbc24, 0),# Korean hangul
- 0x215160: (0x7e0a, 0),# East Asian ideograph
- 0x226842: (0x79e1, 0),# East Asian ideograph
- 0x6f4c32: (0xb17a, 0),# Korean hangul
- 0x226844: (0x79dd, 0),# East Asian ideograph
- 0x226845: (0x79ed, 0),# East Asian ideograph
- 0x216846: (0x50d9, 0),# East Asian ideograph
- 0x215161: (0x7e08, 0),# East Asian ideograph
- 0x226848: (0x79f8, 0),# East Asian ideograph
- 0x215162: (0x7e1b, 0),# East Asian ideograph
- 0x2e684e: (0x8020, 0),# East Asian ideograph
- 0x22684f: (0x7a02, 0),# East Asian ideograph
- 0x226850: (0x7a0a, 0),# East Asian ideograph
- 0x6f5d3f: (0xd654, 0),# Korean hangul
- 0x6f5625: (0xc651, 0),# Korean hangul
- 0x275163: (0x81f4, 0),# East Asian ideograph
- 0x226854: (0x7a09, 0),# East Asian ideograph
- 0x216855: (0x50ec, 0),# East Asian ideograph
- 0x4b442d: (0x67a9, 0),# East Asian ideograph
- 0x215164: (0x7e23, 0),# East Asian ideograph
- 0x21685b: (0x510e, 0),# East Asian ideograph
- 0x22685c: (0x7a03, 0),# East Asian ideograph
- 0x6f5076: (0xbc25, 0),# Korean hangul
- 0x275165: (0x7f29, 0),# East Asian ideograph
- 0x226861: (0x7a0c, 0),# East Asian ideograph
- 0x293160: (0x89ef, 0),# East Asian ideograph
- 0x225166: (0x7113, 0),# East Asian ideograph
- 0x216866: (0x5107, 0),# East Asian ideograph
- 0x216867: (0x510f, 0),# East Asian ideograph
- 0x216868: (0x50fe, 0),# East Asian ideograph
- 0x216869: (0x510b, 0),# East Asian ideograph
- 0x21686a: (0x50fd, 0),# East Asian ideograph
- 0x22686b: (0x7a11, 0),# East Asian ideograph
- 0x22686c: (0x7a18, 0),# East Asian ideograph
- 0x21686d: (0x5101, 0),# East Asian ideograph
- 0x234e43: (0x97c9, 0),# East Asian ideograph
- 0x22686f: (0x7a19, 0),# East Asian ideograph (variant of 2E686F which maps to 7A19)
- 0x6f5d40: (0xd655, 0),# Korean hangul
- 0x226871: (0x7a1e, 0),# East Asian ideograph
- 0x216872: (0x5113, 0),# East Asian ideograph
- 0x234f66: (0x983f, 0),# East Asian ideograph
- 0x226876: (0x7a17, 0),# East Asian ideograph
- 0x275169: (0x7f27, 0),# East Asian ideograph
- 0x216878: (0x511a, 0),# East Asian ideograph
- 0x216879: (0x9797, 0),# East Asian ideograph
- 0x6f5077: (0xbc27, 0),# Korean hangul
- 0x21516a: (0x7e43, 0),# East Asian ideograph
- 0x21687e: (0x5126, 0),# East Asian ideograph
- 0x6f4c34: (0xb180, 0),# Korean hangul
- 0x223a5b: (0x6745, 0),# East Asian ideograph
- 0x33516b: (0x7dd0, 0),# East Asian ideograph
- 0x4c735d: (0x7d4b, 0),# East Asian ideograph
- 0x22516c: (0x711e, 0),# East Asian ideograph
- 0x2e3729: (0x65b5, 0),# East Asian ideograph
- 0x6f5d41: (0xd658, 0),# Korean hangul
- 0x21516d: (0x7e3d, 0),# East Asian ideograph
- 0x6f5451: (0xc3d8, 0),# Korean hangul
- 0x22516e: (0x7120, 0),# East Asian ideograph
- 0x2d4437: (0x67fe, 0),# East Asian ideograph
- 0x21516f: (0x7e45, 0),# East Asian ideograph
- 0x6f4c35: (0xb188, 0),# Korean hangul
- 0x215170: (0x7e55, 0),# East Asian ideograph
- 0x275174: (0x7f2d, 0),# East Asian ideograph
- 0x235171: (0x994d, 0),# East Asian ideograph
- 0x473539: (0x8b9e, 0),# East Asian ideograph
- 0x29426d: (0x94cd, 0),# East Asian ideograph
- 0x275172: (0x7ee3, 0),# East Asian ideograph
- 0x224b69: (0x6ebb, 0),# East Asian ideograph
- 0x275173: (0x7ed5, 0),# East Asian ideograph
- 0x6f5b23: (0xd0f0, 0),# Korean hangul
- 0x215174: (0x7e5a, 0),# East Asian ideograph
- 0x6f4c36: (0xb189, 0),# Korean hangul
- 0x225175: (0x712d, 0),# East Asian ideograph
- 0x2d376f: (0x5700, 0),# East Asian ideograph
- 0x275176: (0x7ef3, 0),# East Asian ideograph
- 0x213c21: (0x5d0e, 0),# East Asian ideograph
- 0x275177: (0x8327, 0),# East Asian ideograph
- 0x2d3c22: (0x5d10, 0),# East Asian ideograph
- 0x275178: (0x7ece, 0),# East Asian ideograph
- 0x223c23: (0x67c2, 0),# East Asian ideograph
- 0x6f507a: (0xbc30, 0),# Korean hangul
- 0x275179: (0x7ed8, 0),# East Asian ideograph
- 0x215878: (0x8ad2, 0),# East Asian ideograph
- 0x225c77: (0x7503, 0),# East Asian ideograph
- 0x27517a: (0x8fab, 0),# East Asian ideograph
- 0x217c25: (0x5a9e, 0),# East Asian ideograph
- 0x45465b: (0x6c2f, 0),# East Asian ideograph
- 0x21517b: (0x7e7d, 0),# East Asian ideograph
- 0x223c26: (0x67ca, 0),# East Asian ideograph
- 0x274e59: (0x7840, 0),# East Asian ideograph
- 0x6f5d44: (0xd667, 0),# Korean hangul
- 0x6f517c: (0xbe61, 0),# Korean hangul
- 0x213c27: (0x5d4c, 0),# East Asian ideograph
- 0x234f6a: (0x985c, 0),# East Asian ideograph
- 0x27517d: (0x7ee7, 0),# East Asian ideograph
- 0x223c28: (0x67ce, 0),# East Asian ideograph
- 0x2d443a: (0x6942, 0),# East Asian ideograph
- 0x23517e: (0x9955, 0),# East Asian ideograph
- 0x213b32: (0x5be7, 0),# East Asian ideograph
- 0x213c29: (0x5d69, 0),# East Asian ideograph
- 0x223c2a: (0x67f2, 0),# East Asian ideograph
- 0x275e3e: (0x94db, 0),# East Asian ideograph
- 0x2d5547: (0x837d, 0),# East Asian ideograph
- 0x223c2b: (0x67c3, 0),# East Asian ideograph
- 0x6f5d45: (0xd669, 0),# Korean hangul
- 0x234f6b: (0x9859, 0),# East Asian ideograph
- 0x223c2d: (0x67dd, 0),# East Asian ideograph
- 0x6f507c: (0xbc34, 0),# Korean hangul
- 0x275f67: (0x96fe, 0),# East Asian ideograph
- 0x213c2e: (0x5dbd, 0),# East Asian ideograph
- 0x6f4d43: (0xb3d0, 0),# Korean hangul
- 0x233e5f: (0x90ad, 0),# East Asian ideograph
- 0x213c2f: (0x5dba, 0),# East Asian ideograph (variant of 4B3C2F which maps to 5DBA)
- 0x6f493d: (0xacac, 0),# Korean hangul
- 0x233c30: (0x8f46, 0),# East Asian ideograph
- 0x226922: (0x7a2c, 0),# East Asian ideograph
- 0x6f5d46: (0xd670, 0),# Korean hangul
- 0x233c31: (0x8f4a, 0),# East Asian ideograph
- 0x216929: (0x5124, 0),# East Asian ideograph
- 0x6f5452: (0xc3d9, 0),# Korean hangul
- 0x21692b: (0x5129, 0),# East Asian ideograph
- 0x213c32: (0x5dd4, 0),# East Asian ideograph
- 0x6f507d: (0xbc37, 0),# Korean hangul
- 0x216930: (0x5131, 0),# East Asian ideograph
- 0x273c33: (0x5ca9, 0),# East Asian ideograph
- 0x29454d: (0x9534, 0),# East Asian ideograph
- 0x275175: (0x7cfb, 0),# East Asian ideograph (duplicate simplified)
- 0x226939: (0x7a48, 0),# East Asian ideograph
- 0x22693d: (0x7a4b, 0),# East Asian ideograph
- 0x22693e: (0x7a47, 0),# East Asian ideograph
- 0x22693f: (0x7a44, 0),# East Asian ideograph
- 0x274e5c: (0x77fe, 0),# East Asian ideograph
- 0x6f5d47: (0xd671, 0),# Korean hangul
- 0x216944: (0x513a, 0),# East Asian ideograph
- 0x213c36: (0x5de2, 0),# East Asian ideograph
- 0x696946: (0x8630, 0),# East Asian ideograph
- 0x216947: (0x5139, 0),# East Asian ideograph
- 0x216948: (0x513b, 0),# East Asian ideograph
- 0x213c37: (0x5de5, 0),# East Asian ideograph
- 0x22694d: (0x7a5f, 0),# East Asian ideograph
- 0x22694f: (0x7a60, 0),# East Asian ideograph
- 0x216951: (0x5159, 0),# East Asian ideograph
- 0x216952: (0x515b, 0),# East Asian ideograph
- 0x213663: (0x55aa, 0),# East Asian ideograph
- 0x29454e: (0x9545, 0),# East Asian ideograph
- 0x216955: (0x515d, 0),# East Asian ideograph
- 0x216956: (0x515e, 0),# East Asian ideograph
- 0x225838: (0x737e, 0),# East Asian ideograph
- 0x216958: (0x515f, 0),# East Asian ideograph
- 0x216959: (0x5161, 0),# East Asian ideograph
- 0x69695b: (0x86ab, 0),# East Asian ideograph
- 0x21695c: (0x5163, 0),# East Asian ideograph
- 0x6f4f35: (0xb838, 0),# Korean hangul
- 0x274e5d: (0x783a, 0),# East Asian ideograph
- 0x22695f: (0x7a70, 0),# East Asian ideograph
- 0x6f5d48: (0xd683, 0),# Korean hangul
- 0x696962: (0x86ef, 0),# East Asian ideograph
- 0x213c3b: (0x5deb, 0),# East Asian ideograph
- 0x226966: (0x7a75, 0),# East Asian ideograph
- 0x216967: (0x5182, 0),# East Asian ideograph
- 0x216969: (0x5184, 0),# East Asian ideograph
- 0x22696b: (0x7a80, 0),# East Asian ideograph
- 0x21696e: (0x518f, 0),# East Asian ideograph
- 0x213c3d: (0x5df1, 0),# East Asian ideograph
- 0x216970: (0x5194, 0),# East Asian ideograph
- 0x216971: (0x5193, 0),# East Asian ideograph
- 0x2e403d: (0x6ac1, 0),# East Asian ideograph
- 0x216975: (0x5196, 0),# East Asian ideograph
- 0x226978: (0x7a8a, 0),# East Asian ideograph
- 0x22697a: (0x7a94, 0),# East Asian ideograph
- 0x21697b: (0x51a1, 0),# East Asian ideograph
- 0x21697c: (0x51a3, 0),# East Asian ideograph
- 0x22697e: (0x68a5, 0),# East Asian ideograph
- 0x213c40: (0x5df4, 0),# East Asian ideograph
- 0x223c41: (0x6832, 0),# East Asian ideograph
- 0x213c42: (0x5dfd, 0),# East Asian ideograph
- 0x275b28: (0x8e0c, 0),# East Asian ideograph
- 0x213c43: (0x5dfe, 0),# East Asian ideograph
- 0x275e3f: (0x94ce, 0),# East Asian ideograph
- 0x213c44: (0x5e02, 0),# East Asian ideograph
- 0x6f5471: (0xc510, 0),# Korean hangul
- 0x29565d: (0x9c82, 0),# East Asian ideograph
- 0x217c45: (0x5ab7, 0),# East Asian ideograph
- 0x2d4440: (0x6822, 0),# East Asian ideograph
- 0x223c47: (0x682b, 0),# East Asian ideograph
- 0x294551: (0x9517, 0),# East Asian ideograph
- 0x223c48: (0x682d, 0),# East Asian ideograph
- 0x6f493e: (0xacaf, 0),# Korean hangul
- 0x235c3a: (0x9ddf, 0),# East Asian ideograph
- 0x233c49: (0x8f57, 0),# East Asian ideograph
- 0x6f5d4b: (0xd68d, 0),# Korean hangul
- 0x213c4a: (0x5e16, 0),# East Asian ideograph
- 0x334f71: (0x54b2, 0),# East Asian ideograph
- 0x213c4b: (0x5e15, 0),# East Asian ideograph
- 0x275f6d: (0x972d, 0),# East Asian ideograph
- 0x213c4c: (0x5e1b, 0),# East Asian ideograph
- 0x4b3321: (0x5185, 0),# East Asian ideograph
- 0x233c4d: (0x8f5c, 0),# East Asian ideograph
- 0x213c4e: (0x5e1d, 0),# East Asian ideograph
- 0x284f7d: (0x704f, 0),# East Asian ideograph
- 0x29426f: (0x94bd, 0),# East Asian ideograph
- 0x223c4f: (0x6844, 0),# East Asian ideograph
- 0x4b5e5d: (0x95d4, 0),# East Asian ideograph
- 0x224730: (0x6c78, 0),# East Asian ideograph
- 0x6f5379: (0xc2a8, 0),# Korean hangul
- 0x217c50: (0x5aba, 0),# East Asian ideograph
- 0x275f6e: (0x96f3, 0),# East Asian ideograph
- 0x213c51: (0x5e2b, 0),# East Asian ideograph
- 0x6f5b26: (0xd131, 0),# Korean hangul
- 0x213668: (0x55ae, 0),# East Asian ideograph
- 0x232635: (0x8598, 0),# East Asian ideograph
- 0x213c52: (0x5e33, 0),# East Asian ideograph
- 0x233c53: (0x8f5d, 0),# East Asian ideograph
- 0x6f5d4d: (0xd6a1, 0),# Korean hangul
- 0x224731: (0x6c74, 0),# East Asian ideograph
- 0x213c55: (0x5e37, 0),# East Asian ideograph
- 0x275f6f: (0x7075, 0),# East Asian ideograph
- 0x213c56: (0x5e45, 0),# East Asian ideograph
- 0x6f4c41: (0xb1e8, 0),# Korean hangul
- 0x275b2c: (0x8e8f, 0),# East Asian ideograph
- 0x213226: (0x5000, 0),# East Asian ideograph
- 0x223c58: (0x6834, 0),# East Asian ideograph
- 0x334f37: (0x5ee9, 0),# East Asian ideograph
- 0x695d36: (0x6b1f, 0),# East Asian ideograph
- 0x223c59: (0x6812, 0),# East Asian ideograph
- 0x224732: (0x6c86, 0),# East Asian ideograph
- 0x213c5a: (0x5e5b, 0),# East Asian ideograph
- 0x216a22: (0x51aa, 0),# East Asian ideograph
- 0x216a23: (0x51ab, 0),# East Asian ideograph
- 0x6f4c42: (0xb1fd, 0),# Korean hangul
- 0x216a26: (0x51b1, 0),# East Asian ideograph
- 0x29233c: (0x836d, 0),# East Asian ideograph
- 0x226a28: (0x7aa3, 0),# East Asian ideograph
- 0x213227: (0x4fee, 0),# East Asian ideograph
- 0x226a2b: (0x7a9e, 0),# East Asian ideograph
- 0x226a2c: (0x7aa7, 0),# East Asian ideograph
- 0x226a2e: (0x7aa8, 0),# East Asian ideograph
- 0x46284c: (0x5ed0, 0),# East Asian ideograph
- 0x226a31: (0x7aac, 0),# East Asian ideograph
- 0x6f5d4f: (0xd6c4, 0),# Korean hangul
- 0x216a35: (0x51bc, 0),# East Asian ideograph
- 0x226a36: (0x7ab3, 0),# East Asian ideograph
- 0x226a3a: (0x7abd, 0),# East Asian ideograph
- 0x2d3c5f: (0x6a66, 0),# East Asian ideograph
- 0x226a3c: (0x7ab6, 0),# East Asian ideograph
- 0x226a3d: (0x7ab8, 0),# East Asian ideograph
- 0x226a3e: (0x7ab5, 0),# East Asian ideograph
- 0x226a3f: (0x7abb, 0),# East Asian ideograph
- 0x213c60: (0x5e5f, 0),# East Asian ideograph
- 0x6f4c43: (0xb204, 0),# Korean hangul
- 0x216a43: (0x51ca, 0),# East Asian ideograph
- 0x216a46: (0x51c7, 0),# East Asian ideograph
- 0x213c61: (0x5e6b, 0),# East Asian ideograph
- 0x226a49: (0x7acd, 0),# East Asian ideograph
- 0x226a4b: (0x7acf, 0),# East Asian ideograph
- 0x216a4e: (0x51d1, 0),# East Asian ideograph
- 0x216a4f: (0x51d0, 0),# East Asian ideograph
- 0x287271: (0x7ea4, 0),# East Asian ideograph (duplicate simplified)
- 0x226a51: (0x7ad3, 0),# East Asian ideograph
- 0x226a52: (0x7ad4, 0),# East Asian ideograph
- 0x216a54: (0x51d3, 0),# East Asian ideograph
- 0x226a55: (0x7ada, 0),# East Asian ideograph
- 0x226a5a: (0x7ae1, 0),# East Asian ideograph
- 0x226a5e: (0x7ae6, 0),# East Asian ideograph
- 0x233c65: (0x8fa4, 0),# East Asian ideograph
- 0x277d48: (0x5ad2, 0),# East Asian ideograph
- 0x696a61: (0x88c3, 0),# East Asian ideograph
- 0x21765b: (0x57dd, 0),# East Asian ideograph
- 0x216a63: (0x51d9, 0),# East Asian ideograph
- 0x226a66: (0x7aeb, 0),# East Asian ideograph
- 0x216a68: (0x51e2, 0),# East Asian ideograph
- 0x226a6b: (0x7af0, 0),# East Asian ideograph
- 0x696a6d: (0x8904, 0),# East Asian ideograph
- 0x6f5d51: (0xd6c8, 0),# Korean hangul
- 0x213c68: (0x5e7b, 0),# East Asian ideograph
- 0x216a73: (0x5160, 0),# East Asian ideograph
- 0x226a76: (0x7af5, 0),# East Asian ideograph
- 0x213c69: (0x5e7c, 0),# East Asian ideograph
- 0x216a78: (0x51f5, 0),# East Asian ideograph
- 0x216a79: (0x51f7, 0),# East Asian ideograph
- 0x226a7c: (0x7afe, 0),# East Asian ideograph
- 0x2d3c6a: (0x51fc, 0),# East Asian ideograph
- 0x273c6b: (0x51e0, 0),# East Asian ideograph
- 0x4b4d56: (0x8846, 0),# East Asian ideograph
- 0x2d5554: (0x855a, 0),# East Asian ideograph
- 0x213c6c: (0x5e8f, 0),# East Asian ideograph
- 0x6f5d52: (0xd6cc, 0),# Korean hangul
- 0x233c6d: (0x8fb7, 0),# East Asian ideograph
- 0x295222: (0x98e8, 0),# East Asian ideograph
- 0x234b35: (0x96a9, 0),# East Asian ideograph
- 0x227333: (0x7e50, 0),# East Asian ideograph
- 0x6f4c46: (0xb20b, 0),# Korean hangul
- 0x275b31: (0x8eaf, 0),# East Asian ideograph
- 0x213c70: (0x5e97, 0),# East Asian ideograph
- 0x22326a: (0x63f9, 0),# East Asian ideograph
- 0x223c71: (0x689b, 0),# East Asian ideograph
- 0x6f5d53: (0xd6d1, 0),# Korean hangul
- 0x213c72: (0x5e9c, 0),# East Asian ideograph
- 0x29344d: (0x8c2e, 0),# East Asian ideograph
- 0x6f5972: (0xce7c, 0),# Korean hangul
- 0x223c74: (0x68b6, 0),# East Asian ideograph
- 0x6f4c47: (0xb20c, 0),# Korean hangul
- 0x275b32: (0x8f66, 0),# East Asian ideograph
- 0x213c75: (0x5ea6, 0),# East Asian ideograph
- 0x223c76: (0x6882, 0),# East Asian ideograph
- 0x226721: (0x7951, 0),# East Asian ideograph
- 0x6f5d54: (0xd6d4, 0),# Korean hangul
- 0x273c77: (0x5750, 0),# East Asian ideograph
- 0x23595c: (0x9c6f, 0),# East Asian ideograph
- 0x234b37: (0x96ae, 0),# East Asian ideograph
- 0x226723: (0x7954, 0),# East Asian ideograph
- 0x28232b: (0x5c66, 0),# East Asian ideograph
- 0x232724: (0x8624, 0),# East Asian ideograph
- 0x223c7a: (0x6890, 0),# East Asian ideograph
- 0x4b4d59: (0x775b, 0),# East Asian ideograph (variant of 214D59 which maps to 775B)
- 0x2f317d: (0x8a7e, 0),# East Asian ideograph
- 0x213c7b: (0x5eb6, 0),# East Asian ideograph
- 0x6f5d55: (0xd6d7, 0),# Korean hangul
- 0x275d67: (0x94dd, 0),# East Asian ideograph
- 0x217c7c: (0x5aeb, 0),# East Asian ideograph
- 0x2d462c: (0x6b7a, 0),# East Asian ideograph
- 0x224739: (0x6c67, 0),# East Asian ideograph
- 0x233c7d: (0x8fcd, 0),# East Asian ideograph
- 0x4b5a23: (0x621d, 0),# East Asian ideograph
- 0x213c7e: (0x5ec1, 0),# East Asian ideograph
- 0x2d4756: (0x6f94, 0),# East Asian ideograph
- 0x275b34: (0x519b, 0),# East Asian ideograph
- 0x4b5c47: (0x9059, 0),# East Asian ideograph
- 0x22672a: (0x7967, 0),# East Asian ideograph
- 0x235c45: (0x9dd6, 0),# East Asian ideograph
- 0x6f4866: (0xac10, 0),# Korean hangul
- 0x6f5b27: (0xd134, 0),# Korean hangul
- 0x4c6266: (0x778b, 0),# East Asian ideograph
- 0x22672d: (0x796b, 0),# East Asian ideograph
- 0x2d6222: (0x9c0c, 0),# East Asian ideograph
- 0x6f4c4a: (0xb215, 0),# Korean hangul
- 0x275b35: (0x8f68, 0),# East Asian ideograph
- 0x6f4f38: (0xb85c, 0),# Korean hangul
- 0x33476f: (0x6d44, 0),# East Asian ideograph
- 0x6f5d57: (0xd6e4, 0),# Korean hangul
- 0x216b24: (0x5213, 0),# East Asian ideograph
- 0x216b26: (0x5216, 0),# East Asian ideograph
- 0x226b27: (0x7b39, 0),# East Asian ideograph
- 0x22473b: (0x6c84, 0),# East Asian ideograph
- 0x216b2a: (0x521c, 0),# East Asian ideograph
- 0x226b2d: (0x7b0f, 0),# East Asian ideograph
- 0x226b2e: (0x7b08, 0),# East Asian ideograph
- 0x275f79: (0x9765, 0),# East Asian ideograph
- 0x6f4c4b: (0xb217, 0),# Korean hangul
- 0x226b33: (0x7b0a, 0),# East Asian ideograph
- 0x29455e: (0x94e1, 0),# East Asian ideograph
- 0x226b35: (0x7b35, 0),# East Asian ideograph
- 0x226b36: (0x7b25, 0),# East Asian ideograph
- 0x216b37: (0x5232, 0),# East Asian ideograph
- 0x226b39: (0x7b38, 0),# East Asian ideograph
- 0x226b3b: (0x7b3b, 0),# East Asian ideograph
- 0x216b3e: (0x5244, 0),# East Asian ideograph
- 0x226b3f: (0x7b24, 0),# East Asian ideograph
- 0x226b40: (0x7b33, 0),# East Asian ideograph
- 0x226b42: (0x7b2a, 0),# East Asian ideograph
- 0x216b43: (0x5249, 0),# East Asian ideograph
- 0x226b44: (0x7b18, 0),# East Asian ideograph
- 0x282736: (0x5e0f, 0),# East Asian ideograph
- 0x226b47: (0x7b31, 0),# East Asian ideograph
- 0x234b3b: (0x96b0, 0),# East Asian ideograph
- 0x226b4a: (0x7b2b, 0),# East Asian ideograph
- 0x216b4b: (0x525a, 0),# East Asian ideograph
- 0x216b4c: (0x5252, 0),# East Asian ideograph
- 0x226b4d: (0x7b1f, 0),# East Asian ideograph
- 0x213b36: (0x5be9, 0),# East Asian ideograph
- 0x216b50: (0x525f, 0),# East Asian ideograph
- 0x226b52: (0x7b4a, 0),# East Asian ideograph
- 0x226b53: (0x7b59, 0),# East Asian ideograph (not in Unicode)
- 0x226b54: (0x7b04, 0),# East Asian ideograph (variant of 2E6B54 which maps to 7B04)
- 0x226b55: (0x7b47, 0),# East Asian ideograph
- 0x216739: (0x5048, 0),# East Asian ideograph
- 0x226b59: (0x7b58, 0),# East Asian ideograph
- 0x226b5b: (0x7b6c, 0),# East Asian ideograph
- 0x696b5c: (0x8ada, 0),# East Asian ideograph
- 0x216b5e: (0x5268, 0),# East Asian ideograph
- 0x216b5f: (0x7b9a, 0),# East Asian ideograph
- 0x226b60: (0x7b48, 0),# East Asian ideograph
- 0x226b61: (0x7b45, 0),# East Asian ideograph
- 0x226b62: (0x7b4c, 0),# East Asian ideograph
- 0x226b63: (0x7b4e, 0),# East Asian ideograph
- 0x234b3c: (0x96b2, 0),# East Asian ideograph
- 0x226b68: (0x7b66, 0),# East Asian ideograph
- 0x706b6a: (0x8159, 0),# East Asian ideograph
- 0x216b6b: (0x5278, 0),# East Asian ideograph
- 0x226b6c: (0x7b64, 0),# East Asian ideograph
- 0x226b6e: (0x7b69, 0),# East Asian ideograph
- 0x275b38: (0x8f6f, 0),# East Asian ideograph
- 0x226b70: (0x7b6d, 0),# East Asian ideograph
- 0x226b74: (0x7b62, 0),# East Asian ideograph
- 0x226b75: (0x7b6e, 0),# East Asian ideograph
- 0x226b76: (0x7b74, 0),# East Asian ideograph
- 0x233a30: (0x8e50, 0),# East Asian ideograph
- 0x216b79: (0x528c, 0),# East Asian ideograph
- 0x216b7a: (0x528a, 0),# East Asian ideograph
- 0x226b7b: (0x7b6f, 0),# East Asian ideograph
- 0x216b7c: (0x5290, 0),# East Asian ideograph
- 0x226b7e: (0x7b65, 0),# East Asian ideograph
- 0x4b3a2f: (0x805f, 0),# East Asian ideograph
- 0x275b39: (0x8f6d, 0),# East Asian ideograph
- 0x6f4867: (0xac11, 0),# Korean hangul
- 0x27457a: (0x6b20, 0),# East Asian ideograph (duplicate simplified)
- 0x222969: (0x5f54, 0),# East Asian ideograph
- 0x4b3c53: (0x5e2f, 0),# East Asian ideograph
- 0x234b3e: (0x96b3, 0),# East Asian ideograph
- 0x4c6564: (0x78d9, 0),# East Asian ideograph
- 0x282747: (0x5e3b, 0),# East Asian ideograph
- 0x22584c: (0x7393, 0),# East Asian ideograph
- 0x6f4f39: (0xb85d, 0),# Korean hangul
- 0x2f5d5c: (0x730a, 0),# East Asian ideograph
- 0x294944: (0x9603, 0),# East Asian ideograph
- 0x22674a: (0x7998, 0),# East Asian ideograph
- 0x33306c: (0x8b90, 0),# East Asian ideograph
- 0x21674b: (0x505f, 0),# East Asian ideograph
- 0x273d65: (0x540e, 0),# East Asian ideograph
- 0x6f4c50: (0xb25c, 0),# Korean hangul
- 0x27583b: (0x8ba6, 0),# East Asian ideograph
- 0x213235: (0x5074, 0),# East Asian ideograph
- 0x22674d: (0x7999, 0),# East Asian ideograph
- 0x22674e: (0x7995, 0),# East Asian ideograph
- 0x6f5d5d: (0xd711, 0),# Korean hangul
- 0x226750: (0x7996, 0),# East Asian ideograph
- 0x333051: (0x8cb3, 0),# East Asian ideograph
- 0x2d6229: (0x9c53, 0),# East Asian ideograph
- 0x6f4c51: (0xb260, 0),# Korean hangul
- 0x275b3c: (0x8f76, 0),# East Asian ideograph
- 0x294564: (0x9536, 0),# East Asian ideograph
- 0x292752: (0x830f, 0),# East Asian ideograph
- 0x233a34: (0x8e5c, 0),# East Asian ideograph
- 0x6f5a29: (0xcef5, 0),# Korean hangul
- 0x6f5d5e: (0xd718, 0),# Korean hangul
- 0x274a30: (0x70db, 0),# East Asian ideograph
- 0x292577: (0x8297, 0),# East Asian ideograph
- 0x4b4a62: (0x72a0, 0),# East Asian ideograph
- 0x273d67: (0x5f84, 0),# East Asian ideograph
- 0x6f4c52: (0xb268, 0),# Korean hangul
- 0x275b3d: (0x8f83, 0),# East Asian ideograph
- 0x217669: (0x5819, 0),# East Asian ideograph
- 0x223636: (0x6549, 0),# East Asian ideograph
- 0x2d5561: (0x76d6, 0),# East Asian ideograph
- 0x6f5d5f: (0xd719, 0),# Korean hangul
- 0x274a31: (0x707f, 0),# East Asian ideograph
- 0x293459: (0x8c2f, 0),# East Asian ideograph
- 0x284e30: (0x6e11, 0),# East Asian ideograph
- 0x29584b: (0x9cbd, 0),# East Asian ideograph
- 0x6f584a: (0xca0b, 0),# Korean hangul
- 0x34715a: (0x7e1a, 0),# East Asian ideograph
- 0x216c21: (0x5293, 0),# East Asian ideograph
- 0x6f4c53: (0xb269, 0),# Korean hangul
- 0x275b3e: (0x8f7c, 0),# East Asian ideograph
- 0x226c26: (0x7b71, 0),# East Asian ideograph
- 0x226c27: (0x7b70, 0),# East Asian ideograph
- 0x216c29: (0x5298, 0),# East Asian ideograph
- 0x235c4f: (0x9de9, 0),# East Asian ideograph
- 0x216c2b: (0x529a, 0),# East Asian ideograph
- 0x216c2c: (0x5299, 0),# East Asian ideograph
- 0x226c2d: (0x7b9c, 0),# East Asian ideograph
- 0x216c2e: (0x52a6, 0),# East Asian ideograph
- 0x22275d: (0x5e68, 0),# East Asian ideograph
- 0x212a2b: (0xe8d9, 0),# EACC component character
- 0x216c31: (0x52ad, 0),# East Asian ideograph
- 0x226c33: (0x7b92, 0),# East Asian ideograph
- 0x226c34: (0x7b91, 0),# East Asian ideograph
- 0x226c35: (0x7b90, 0),# East Asian ideograph
- 0x216c37: (0x52bb, 0),# East Asian ideograph
- 0x226c38: (0x7ba3, 0),# East Asian ideograph
- 0x226c3a: (0x7b8d, 0),# East Asian ideograph
- 0x28275f: (0x5e31, 0),# East Asian ideograph
- 0x216c3c: (0x52ca, 0),# East Asian ideograph
- 0x216c3d: (0x52cd, 0),# East Asian ideograph
- 0x2e6c3e: (0x7b59, 0),# East Asian ideograph
- 0x2d622c: (0x9f08, 0),# East Asian ideograph
- 0x216c40: (0x52d0, 0),# East Asian ideograph
- 0x226c41: (0x7b85, 0),# East Asian ideograph
- 0x706c42: (0x70bb, 0),# East Asian ideograph
- 0x226c43: (0x7b8e, 0),# East Asian ideograph
- 0x226c44: (0x7b98, 0),# East Asian ideograph
- 0x213239: (0x504c, 0),# East Asian ideograph
- 0x226c46: (0x7b86, 0),# East Asian ideograph
- 0x226c48: (0x7b99, 0),# East Asian ideograph
- 0x6f4f3a: (0xb860, 0),# Korean hangul
- 0x216c4c: (0x52e3, 0),# East Asian ideograph
- 0x216c4e: (0x52e1, 0),# East Asian ideograph
- 0x6f5d61: (0xd720, 0),# Korean hangul
- 0x216c50: (0x55e7, 0),# East Asian ideograph
- 0x226c52: (0x7bb2, 0),# East Asian ideograph
- 0x216c53: (0x52e9, 0),# East Asian ideograph
- 0x6f5623: (0xc648, 0),# Korean hangul
- 0x226c58: (0x7bcb, 0),# East Asian ideograph
- 0x226c59: (0x7bb8, 0),# East Asian ideograph
- 0x226c5a: (0x7bcf, 0),# East Asian ideograph
- 0x226c5c: (0x7bd0, 0),# East Asian ideograph
- 0x216c5e: (0x52f7, 0),# East Asian ideograph
- 0x292765: (0x82c8, 0),# East Asian ideograph
- 0x226c60: (0x7bbe, 0),# East Asian ideograph
- 0x216c61: (0x52f9, 0),# East Asian ideograph
- 0x216c62: (0x52fa, 0),# East Asian ideograph
- 0x216c64: (0x52fc, 0),# East Asian ideograph
- 0x216c69: (0x5307, 0),# East Asian ideograph
- 0x216c6a: (0x5303, 0),# East Asian ideograph
- 0x216c6b: (0x5306, 0),# East Asian ideograph (not in Unicode)
- 0x6f5d62: (0xd728, 0),# Korean hangul
- 0x216c6e: (0x530a, 0),# East Asian ideograph
- 0x226c6f: (0x7bcc, 0),# East Asian ideograph
- 0x216560: (0x4f80, 0),# East Asian ideograph
- 0x216c77: (0x5311, 0),# East Asian ideograph
- 0x213f6a: (0x6221, 0),# East Asian ideograph
- 0x6f5975: (0xce87, 0),# Korean hangul
- 0x216c7b: (0x6706, 0),# East Asian ideograph
- 0x234767: (0x93f5, 0),# East Asian ideograph
- 0x21323b: (0x500f, 0),# East Asian ideograph
- 0x343e38: (0x7bda, 0),# East Asian ideograph
- 0x4b6167: (0x95d8, 0),# East Asian ideograph
- 0x6f5d63: (0xd729, 0),# Korean hangul
- 0x6f5532: (0xc571, 0),# Korean hangul
- 0x216561: (0x4f74, 0),# East Asian ideograph
- 0x4b5a31: (0x8cce, 0),# East Asian ideograph
- 0x6f4c57: (0xb290, 0),# Korean hangul
- 0x275b42: (0x8f84, 0),# East Asian ideograph
- 0x333e7d: (0x7652, 0),# East Asian ideograph
- 0x4b4925: (0x6fb3, 0),# East Asian ideograph (variant of 214925 which maps to 6FB3)
- 0x226771: (0x79a8, 0),# East Asian ideograph
- 0x225a7e: (0x7488, 0),# East Asian ideograph
- 0x6f5921: (0xcc29, 0),# Korean hangul
- 0x692577: (0x309b, 0),# Katakana-hiragana voiced sound mark
- 0x224b26: (0x6e31, 0),# East Asian ideograph
- 0x6f5a3e: (0xcf55, 0),# Korean hangul
- 0x6f4c58: (0xb291, 0),# Korean hangul
- 0x275b43: (0x8f7b, 0),# East Asian ideograph
- 0x226775: (0x79b0, 0),# East Asian ideograph
- 0x233a3b: (0x8e67, 0),# East Asian ideograph
- 0x275221: (0x7eed, 0),# East Asian ideograph
- 0x6f5922: (0xcc2c, 0),# Korean hangul
- 0x215222: (0x7e93, 0),# East Asian ideograph
- 0x234b48: (0x96b9, 0),# East Asian ideograph
- 0x4d445b: (0x9306, 0),# East Asian ideograph (variant of 23445B which maps to 9306)
- 0x275223: (0x7ea4, 0),# East Asian ideograph
- 0x275224: (0x7f06, 0),# East Asian ideograph
- 0x21323e: (0x50a2, 0),# East Asian ideograph
- 0x6f4f3b: (0xb864, 0),# Korean hangul
- 0x2d334f: (0x5202, 0),# East Asian ideograph
- 0x21677b: (0x50ca, 0),# East Asian ideograph
- 0x4b4b2c: (0x731f, 0),# East Asian ideograph
- 0x213933: (0x5944, 0),# East Asian ideograph
- 0x27677c: (0x4f1b, 0),# East Asian ideograph
- 0x4c6022: (0x7596, 0),# East Asian ideograph
- 0x225227: (0x7139, 0),# East Asian ideograph
- 0x4b3c5e: (0x5e64, 0),# East Asian ideograph
- 0x234b49: (0x96bc, 0),# East Asian ideograph
- 0x215228: (0x7f3d, 0),# East Asian ideograph
- 0x6f4c5a: (0xb298, 0),# Korean hangul
- 0x275b45: (0x8f87, 0),# East Asian ideograph
- 0x215229: (0x7f44, 0),# East Asian ideograph
- 0x22363e: (0x6554, 0),# East Asian ideograph
- 0x23522b: (0x995f, 0),# East Asian ideograph
- 0x6f5924: (0xcc2f, 0),# Korean hangul
- 0x22522c: (0x713b, 0),# East Asian ideograph
- 0x516122: (0x9988, 0),# East Asian ideograph
- 0x6f522d: (0xbe90, 0),# Korean hangul
- 0x6f4c5b: (0xb299, 0),# Korean hangul
- 0x22522e: (0x711c, 0),# East Asian ideograph
- 0x213240: (0x5099, 0),# East Asian ideograph
- 0x23522f: (0x9997, 0),# East Asian ideograph
- 0x225b59: (0x74a0, 0),# East Asian ideograph
- 0x4b6168: (0x9599, 0),# East Asian ideograph
- 0x235230: (0x9998, 0),# East Asian ideograph
- 0x226d22: (0x7bdd, 0),# East Asian ideograph
- 0x216d23: (0x531a, 0),# East Asian ideograph
- 0x226d24: (0x7be5, 0),# East Asian ideograph
- 0x216d25: (0x531f, 0),# East Asian ideograph
- 0x215231: (0x7f69, 0),# East Asian ideograph
- 0x226d29: (0x7be8, 0),# East Asian ideograph
- 0x277267: (0x5452, 0),# East Asian ideograph
- 0x225232: (0x713d, 0),# East Asian ideograph
- 0x226d2e: (0x7bf9, 0),# East Asian ideograph
- 0x226d2f: (0x7bd4, 0),# East Asian ideograph
- 0x6f4c5c: (0xb2a0, 0),# Korean hangul
- 0x226d32: (0x7bdf, 0),# East Asian ideograph
- 0x275233: (0x7f5a, 0),# East Asian ideograph
- 0x226d35: (0x7bd8, 0),# East Asian ideograph
- 0x216d36: (0x5335, 0),# East Asian ideograph
- 0x226d37: (0x7bea, 0),# Unrelated variant of EACC 3A6A7C which maps to 7BEA
- 0x213c2d: (0x5dbc, 0),# East Asian ideograph
- 0x275234: (0x9a82, 0),# East Asian ideograph
- 0x216d3a: (0x5338, 0),# East Asian ideograph
- 0x226d3b: (0x7c06, 0),# East Asian ideograph
- 0x226d3e: (0x7bf0, 0),# East Asian ideograph
- 0x275d6b: (0x952d, 0),# East Asian ideograph
- 0x696d40: (0x8ec5, 0),# East Asian ideograph
- 0x226d41: (0x7c0f, 0),# East Asian ideograph
- 0x216d42: (0x534d, 0),# East Asian ideograph
- 0x6f5926: (0xcc38, 0),# Korean hangul
- 0x706d45: (0x783c, 0),# East Asian ideograph
- 0x226d46: (0x7c0b, 0),# East Asian ideograph
- 0x222534: (0x5d74, 0),# East Asian ideograph
- 0x275237: (0x7f57, 0),# East Asian ideograph
- 0x216d4c: (0x5363, 0),# East Asian ideograph
- 0x2d6235: (0x9d76, 0),# East Asian ideograph
- 0x216d4e: (0x5365, 0),# East Asian ideograph (not in Unicode)
- 0x226d4f: (0x7bf4, 0),# East Asian ideograph
- 0x215238: (0x7f88, 0),# East Asian ideograph
- 0x216d53: (0x536c, 0),# East Asian ideograph
- 0x226d54: (0x7bf3, 0),# East Asian ideograph
- 0x216d57: (0x5372, 0),# East Asian ideograph
- 0x216d58: (0x537a, 0),# East Asian ideograph
- 0x4b492b: (0x6feb, 0),# East Asian ideograph
- 0x226d5a: (0x7c09, 0),# East Asian ideograph
- 0x226d5b: (0x7c03, 0),# East Asian ideograph
- 0x226d5c: (0x7bfc, 0),# East Asian ideograph
- 0x216d5d: (0x5380, 0),# East Asian ideograph
- 0x226d5f: (0x7c1c, 0),# East Asian ideograph
- 0x226d61: (0x7c26, 0),# East Asian ideograph
- 0x226d62: (0x7c28, 0),# East Asian ideograph
- 0x22523b: (0x7129, 0),# East Asian ideograph
- 0x216d64: (0x538e, 0),# East Asian ideograph
- 0x233d3f: (0x9004, 0),# East Asian ideograph
- 0x226d66: (0x7c1f, 0),# East Asian ideograph
- 0x216d67: (0x5394, 0),# East Asian ideograph
- 0x226d68: (0x7c2f, 0),# East Asian ideograph
- 0x23523c: (0x99a1, 0),# East Asian ideograph
- 0x6f4c5e: (0xb2a5, 0),# Korean hangul
- 0x216d6d: (0x5399, 0),# East Asian ideograph
- 0x6f523d: (0xbf18, 0),# Korean hangul
- 0x285f48: (0x7617, 0),# East Asian ideograph
- 0x213243: (0x5096, 0),# East Asian ideograph
- 0x216d74: (0x8652, 0),# East Asian ideograph
- 0x226d75: (0x7c30, 0),# East Asian ideograph
- 0x6f4f3c: (0xb86c, 0),# Korean hangul
- 0x216d7a: (0x53a4, 0),# East Asian ideograph
- 0x216d7b: (0x53ab, 0),# East Asian ideograph
- 0x2d5941: (0x5629, 0),# East Asian ideograph
- 0x6f5928: (0xcc3b, 0),# Korean hangul
- 0x2d5240: (0x7fa1, 0),# East Asian ideograph
- 0x235241: (0x99a9, 0),# East Asian ideograph
- 0x6f4c5f: (0xb2a6, 0),# Korean hangul
- 0x215242: (0x7fa9, 0),# East Asian ideograph
- 0x283d30: (0x67a7, 0),# East Asian ideograph
- 0x235c5b: (0x9df8, 0),# East Asian ideograph
- 0x225243: (0x712e, 0),# East Asian ideograph
- 0x6f5244: (0xbf51, 0),# Korean hangul
- 0x6f5929: (0xcc3c, 0),# Korean hangul
- 0x226969: (0x7a78, 0),# East Asian ideograph
- 0x6f523b: (0xbf08, 0),# Korean hangul
- 0x28337b: (0x62a0, 0),# East Asian ideograph
- 0x6f4c60: (0xb2aa, 0),# Korean hangul
- 0x4b5247: (0x7fae, 0),# East Asian ideograph
- 0x334256: (0x6b5b, 0),# East Asian ideograph
- 0x22585d: (0x73a5, 0),# East Asian ideograph
- 0x235c5c: (0x9dfc, 0),# East Asian ideograph
- 0x225248: (0x7177, 0),# East Asian ideograph
- 0x233a43: (0x8e5d, 0),# East Asian ideograph
- 0x4b492e: (0x6e0b, 0),# East Asian ideograph
- 0x234e4c: (0x97cd, 0),# East Asian ideograph
- 0x2d7345: (0x56d3, 0),# East Asian ideograph
- 0x215249: (0x7fbf, 0),# East Asian ideograph
- 0x284e3e: (0x6cf6, 0),# East Asian ideograph
- 0x212a3a: (0xe8e7, 0),# EACC component character
- 0x2d524a: (0x7fc4, 0),# East Asian ideograph
- 0x39483b: (0x9061, 0),# East Asian ideograph
- 0x276029: (0x9791, 0),# East Asian ideograph
- 0x6f524b: (0xbfd0, 0),# Korean hangul
- 0x2e337b: (0x630e, 0),# East Asian ideograph
- 0x6f4c61: (0xb2ac, 0),# Korean hangul
- 0x6f524c: (0xbfd4, 0),# Korean hangul
- 0x27524d: (0x4e60, 0),# East Asian ideograph
- 0x233a44: (0x8e75, 0),# East Asian ideograph
- 0x23524e: (0x99bc, 0),# East Asian ideograph
- 0x293468: (0x8c35, 0),# East Asian ideograph
- 0x6f545a: (0xc410, 0),# Korean hangul
- 0x23524f: (0x99c3, 0),# East Asian ideograph
- 0x6f5250: (0xc058, 0),# Korean hangul
- 0x6f4c62: (0xb2c8, 0),# Korean hangul
- 0x275b4d: (0x8f91, 0),# East Asian ideograph
- 0x275251: (0x7fc6, 0),# East Asian ideograph
- 0x213247: (0x50b5, 0),# East Asian ideograph
- 0x4b4d73: (0x66b8, 0),# East Asian ideograph
- 0x225252: (0x7152, 0),# East Asian ideograph
- 0x235253: (0x99b9, 0),# East Asian ideograph
- 0x6f592c: (0xcc3f, 0),# Korean hangul
- 0x215254: (0x7fe9, 0),# East Asian ideograph
- 0x274d3a: (0x76cf, 0),# East Asian ideograph
- 0x225255: (0x715d, 0),# East Asian ideograph
- 0x6f4c63: (0xb2c9, 0),# Korean hangul
- 0x225256: (0x7141, 0),# East Asian ideograph
- 0x227636: (0x800f, 0),# East Asian ideograph
- 0x4b4931: (0x6e16, 0),# East Asian ideograph
- 0x4d3359: (0x56af, 0),# East Asian ideograph
- 0x275258: (0x7fd8, 0),# East Asian ideograph
- 0x21656e: (0x4f94, 0),# East Asian ideograph
- 0x284e41: (0x6f4b, 0),# East Asian ideograph
- 0x225259: (0x7175, 0),# East Asian ideograph
- 0x692546: (0x30c6, 0),# Katakana letter TE
- 0x22525a: (0x7173, 0),# East Asian ideograph
- 0x6f4c64: (0xb2cc, 0),# Korean hangul
- 0x275b4f: (0x8f96, 0),# East Asian ideograph
- 0x33525b: (0x71ff, 0),# East Asian ideograph
- 0x226e27: (0x7c35, 0),# East Asian ideograph
- 0x23347b: (0x8b7e, 0),# East Asian ideograph
- 0x21525c: (0x8001, 0),# East Asian ideograph
- 0x226e2a: (0x7c40, 0),# East Asian ideograph
- 0x2d5573: (0x83d4, 0),# East Asian ideograph
- 0x216e2c: (0x53b5, 0),# East Asian ideograph
- 0x216e2e: (0x53b9, 0),# East Asian ideograph
- 0x22525d: (0x715a, 0),# East Asian ideograph
- 0x226e30: (0x7c39, 0),# East Asian ideograph
- 0x6f592e: (0xcc45, 0),# Korean hangul
- 0x226e34: (0x7c3b, 0),# East Asian ideograph
- 0x226e35: (0x7c34, 0),# East Asian ideograph
- 0x6f5426: (0xc2e3, 0),# Korean hangul
- 0x226e3b: (0x7c42, 0),# East Asian ideograph
- 0x216e3e: (0x53d0, 0),# East Asian ideograph
- 0x70727d: (0x87a8, 0),# East Asian ideograph
- 0x275b50: (0x8f97, 0),# East Asian ideograph
- 0x225260: (0x714b, 0),# East Asian ideograph
- 0x4c6e42: (0x7c31, 0),# East Asian ideograph
- 0x21324a: (0x50be, 0),# East Asian ideograph
- 0x225862: (0x73a2, 0),# East Asian ideograph
- 0x226e46: (0x7c4e, 0),# East Asian ideograph
- 0x235261: (0x99d3, 0),# East Asian ideograph
- 0x216e48: (0x53da, 0),# East Asian ideograph
- 0x4d5574: (0x9b2e, 0),# East Asian ideograph
- 0x275e47: (0x94a5, 0),# East Asian ideograph
- 0x225262: (0x7147, 0),# East Asian ideograph
- 0x6f592f: (0xcc48, 0),# Korean hangul
- 0x235263: (0x99d4, 0),# East Asian ideograph
- 0x226e54: (0x7c5d, 0),# East Asian ideograph
- 0x226e56: (0x7c5c, 0),# East Asian ideograph
- 0x226e57: (0x7c5a, 0),# East Asian ideograph
- 0x226e58: (0x7c5b, 0),# East Asian ideograph
- 0x226e59: (0x7c59, 0),# East Asian ideograph
- 0x226e5b: (0x7c5e, 0),# East Asian ideograph
- 0x226e5c: (0x7c67, 0),# East Asian ideograph
- 0x6f4c66: (0xb2d8, 0),# Korean hangul
- 0x226e5e: (0x7c63, 0),# East Asian ideograph
- 0x235265: (0x99c9, 0),# East Asian ideograph
- 0x226e61: (0x7c68, 0),# East Asian ideograph
- 0x226e62: (0x7c65, 0),# East Asian ideograph
- 0x2d3132: (0x4ecf, 0),# East Asian ideograph
- 0x225266: (0x7171, 0),# East Asian ideograph
- 0x216e68: (0x5406, 0),# East Asian ideograph
- 0x286e69: (0x7c16, 0),# East Asian ideograph
- 0x225267: (0x715f, 0),# East Asian ideograph
- 0x216e6c: (0x544c, 0),# East Asian ideograph
- 0x216e6d: (0x5445, 0),# East Asian ideograph
- 0x226e6f: (0x7c6f, 0),# East Asian ideograph
- 0x216e70: (0x5432, 0),# East Asian ideograph
- 0x226970: (0x7a85, 0),# East Asian ideograph
- 0x226e75: (0x7c75, 0),# East Asian ideograph
- 0x216e76: (0x5421, 0),# East Asian ideograph
- 0x215269: (0x8033, 0),# East Asian ideograph
- 0x216e78: (0x5430, 0),# East Asian ideograph
- 0x226e79: (0x7c7e, 0),# East Asian ideograph
- 0x226e7a: (0x7c78, 0),# East Asian ideograph
- 0x6f4c67: (0xb2d9, 0),# Korean hangul
- 0x275b52: (0x6bc2, 0),# East Asian ideograph
- 0x226e7d: (0x7c7d, 0),# East Asian ideograph
- 0x27517e: (0x7f20, 0),# East Asian ideograph
- 0x692560: (0x30e0, 0),# Katakana letter MU
- 0x215022: (0x7b4f, 0),# East Asian ideograph
- 0x6f5323: (0xc11e, 0),# Korean hangul
- 0x225421: (0x71dd, 0),# East Asian ideograph
- 0x6f486c: (0xac16, 0),# Korean hangul
- 0x2d526c: (0x8ead, 0),# East Asian ideograph
- 0x274a46: (0x5899, 0),# East Asian ideograph
- 0x6f5931: (0xcc54, 0),# Korean hangul
- 0x225f5f: (0x7630, 0),# East Asian ideograph
- 0x6f5b2d: (0xd145, 0),# Korean hangul
- 0x22253f: (0x5d75, 0),# East Asian ideograph
- 0x234b57: (0x96d2, 0),# East Asian ideograph
- 0x69654f: (0x7e05, 0),# East Asian ideograph
- 0x4b526e: (0x8046, 0),# East Asian ideograph (variant of 21526E which maps to 8046)
- 0x6f4c68: (0xb2db, 0),# Korean hangul
- 0x27526f: (0x5723, 0),# East Asian ideograph
- 0x22763b: (0x801f, 0),# East Asian ideograph
- 0x225422: (0x71c0, 0),# East Asian ideograph
- 0x335821: (0x97c8, 0),# East Asian ideograph
- 0x275271: (0x95fb, 0),# East Asian ideograph
- 0x6f5932: (0xcc55, 0),# Korean hangul
- 0x6f5272: (0xc0d0, 0),# Korean hangul
- 0x2d446b: (0x6936, 0),# East Asian ideograph
- 0x2d6241: (0x9d5e, 0),# East Asian ideograph
- 0x21346a: (0x536f, 0),# East Asian ideograph
- 0x275b54: (0x8f99, 0),# East Asian ideograph
- 0x235274: (0x99ec, 0),# East Asian ideograph
- 0x2e625f: (0x77c1, 0),# East Asian ideograph
- 0x275275: (0x8038, 0),# East Asian ideograph
- 0x4b4937: (0x56a0, 0),# East Asian ideograph
- 0x225276: (0x7172, 0),# East Asian ideograph
- 0x223d21: (0x6872, 0),# East Asian ideograph
- 0x6f5933: (0xcc58, 0),# Korean hangul
- 0x29486f: (0x9569, 0),# East Asian ideograph
- 0x275277: (0x8054, 0),# East Asian ideograph
- 0x223d22: (0x689c, 0),# East Asian ideograph
- 0x275278: (0x804c, 0),# East Asian ideograph
- 0x6f5979: (0xce94, 0),# Korean hangul
- 0x2f3c2d: (0x8f3c, 0),# East Asian ideograph
- 0x6f4c6a: (0xb2e2, 0),# Korean hangul
- 0x275b55: (0x8f6c, 0),# East Asian ideograph
- 0x215279: (0x8076, 0),# East Asian ideograph
- 0x2e7d24: (0x83f0, 0),# East Asian ideograph
- 0x225867: (0x73b6, 0),# East Asian ideograph
- 0x235d66: (0x9e9e, 0),# East Asian ideograph
- 0x21527a: (0x807e, 0),# East Asian ideograph
- 0x213d25: (0x5ed3, 0),# East Asian ideograph
- 0x275e48: (0x92ae, 0),# East Asian ideograph
- 0x235823: (0x9bd5, 0),# East Asian ideograph
- 0x21527b: (0x807d, 0),# East Asian ideograph
- 0x217d26: (0x5aff, 0),# East Asian ideograph
- 0x287061: (0x7ec0, 0),# East Asian ideograph
- 0x23527c: (0x99ea, 0),# East Asian ideograph
- 0x213d27: (0x5ee2, 0),# East Asian ideograph
- 0x284668: (0x6c29, 0),# East Asian ideograph
- 0x6f527d: (0xc0f7, 0),# Korean hangul
- 0x333d28: (0x53a8, 0),# East Asian ideograph
- 0x275b56: (0x8f9a, 0),# East Asian ideograph
- 0x6f527e: (0xc0f9, 0),# Korean hangul
- 0x2d3d29: (0x53ae, 0),# East Asian ideograph
- 0x33417e: (0x629e, 0),# East Asian ideograph
- 0x213d2a: (0x5ee3, 0),# East Asian ideograph (variant of 4B3D2A which maps to 5EE3)
- 0x287279: (0x7f25, 0),# East Asian ideograph
- 0x294568: (0x9518, 0),# East Asian ideograph
- 0x213d2b: (0x5edf, 0),# East Asian ideograph
- 0x287062: (0x7ec1, 0),# East Asian ideograph
- 0x6f545c: (0xc430, 0),# Korean hangul
- 0x226975: (0x7a86, 0),# East Asian ideograph
- 0x22475c: (0x6ca0, 0),# East Asian ideograph
- 0x216133: (0x99d0, 0),# East Asian ideograph
- 0x226532: (0x78b6, 0),# East Asian ideograph
- 0x213d2d: (0x9f90, 0),# East Asian ideograph
- 0x275b57: (0x8f7f, 0),# East Asian ideograph
- 0x223d2e: (0x68a9, 0),# East Asian ideograph
- 0x213d2f: (0x5ef3, 0),# East Asian ideograph
- 0x212a30: (0xe8de, 0),# EACC component character
- 0x6f5d79: (0xd789, 0),# Korean hangul
- 0x226f21: (0x7c81, 0),# East Asian ideograph
- 0x216577: (0x4f90, 0),# East Asian ideograph
- 0x216f24: (0x542a, 0),# East Asian ideograph
- 0x33314c: (0x5fa0, 0),# East Asian ideograph
- 0x216f26: (0x5422, 0),# East Asian ideograph
- 0x274d3c: (0x5c3d, 0),# East Asian ideograph
- 0x226f28: (0x7c8e, 0),# East Asian ideograph
- 0x226f29: (0x7c91, 0),# East Asian ideograph
- 0x226f2a: (0x7c83, 0),# East Asian ideograph
- 0x226f2c: (0x7c8d, 0),# East Asian ideograph
- 0x213d32: (0x5ef6, 0),# East Asian ideograph
- 0x216f2e: (0x545f, 0),# East Asian ideograph
- 0x216f2f: (0x549c, 0),# East Asian ideograph
- 0x27393f: (0x5941, 0),# East Asian ideograph
- 0x223d33: (0x68a0, 0),# East Asian ideograph
- 0x213252: (0x50d6, 0),# East Asian ideograph
- 0x216f35: (0x5488, 0),# East Asian ideograph
- 0x216f37: (0x547f, 0),# East Asian ideograph
- 0x216f39: (0x5482, 0),# East Asian ideograph
- 0x226f3a: (0x7c99, 0),# East Asian ideograph
- 0x226f3b: (0x7c98, 0),# East Asian ideograph
- 0x6f5d7a: (0xd78c, 0),# Korean hangul
- 0x226f3e: (0x7c9c, 0),# East Asian ideograph
- 0x226f40: (0x7c95, 0),# East Asian ideograph
- 0x6f5937: (0xcc70, 0),# Korean hangul
- 0x226f42: (0x7ca7, 0),# East Asian ideograph
- 0x226f43: (0x7ca2, 0),# East Asian ideograph
- 0x226f45: (0x7c9e, 0),# East Asian ideograph
- 0x226f46: (0x7ca9, 0),# East Asian ideograph
- 0x6f5939: (0xcc98, 0),# Korean hangul
- 0x226f48: (0x7ca8, 0),# East Asian ideograph
- 0x226f49: (0x7ca1, 0),# East Asian ideograph
- 0x226f4a: (0x7cac, 0),# East Asian ideograph
- 0x216f4b: (0x5474, 0),# East Asian ideograph
- 0x226f4c: (0x7ca6, 0),# East Asian ideograph
- 0x6f4c6e: (0xb2e5, 0),# Korean hangul
- 0x216f52: (0x5466, 0),# East Asian ideograph
- 0x216f53: (0x5464, 0),# East Asian ideograph
- 0x226f54: (0x7cb2, 0),# East Asian ideograph
- 0x216f55: (0x54a4, 0),# East Asian ideograph
- 0x213d39: (0x5f0f, 0),# East Asian ideograph
- 0x226f58: (0x7cbb, 0),# East Asian ideograph
- 0x226f59: (0x7cbf, 0),# East Asian ideograph
- 0x216f5a: (0x54ad, 0),# East Asian ideograph
- 0x216f5b: (0x54ba, 0),# East Asian ideograph
- 0x216f5c: (0x54cf, 0),# East Asian ideograph
- 0x696f5d: (0x9596, 0),# East Asian ideograph
- 0x226f5e: (0x7cba, 0),# East Asian ideograph
- 0x226f5f: (0x7cbc, 0),# East Asian ideograph
- 0x216f60: (0x54a5, 0),# East Asian ideograph
- 0x216f63: (0x54a7, 0),# East Asian ideograph
- 0x226f64: (0x7cc2, 0),# East Asian ideograph
- 0x216f66: (0x54a2, 0),# East Asian ideograph
- 0x216f67: (0x5472, 0),# East Asian ideograph
- 0x216f68: (0x5470, 0),# East Asian ideograph
- 0x216f69: (0x54bc, 0),# East Asian ideograph
- 0x216f6a: (0x54b7, 0),# East Asian ideograph
- 0x216f6b: (0x54de, 0),# East Asian ideograph
- 0x216f6c: (0x54d6, 0),# East Asian ideograph
- 0x226f6d: (0x7ccc, 0),# East Asian ideograph
- 0x226f6f: (0x7cc9, 0),# East Asian ideograph
- 0x226f71: (0x7cd2, 0),# East Asian ideograph
- 0x6f4a22: (0xadc0, 0),# Korean hangul
- 0x29442d: (0x94a1, 0),# East Asian ideograph
- 0x216f74: (0x54c6, 0),# East Asian ideograph
- 0x225429: (0x71cb, 0),# East Asian ideograph
- 0x226f77: (0x7ce1, 0),# East Asian ideograph
- 0x6f5d7c: (0xd798, 0),# Korean hangul
- 0x33467a: (0x6ca1, 0),# East Asian ideograph
- 0x223d3f: (0x6877, 0),# East Asian ideograph
- 0x216f7c: (0x54e2, 0),# East Asian ideograph
- 0x216f7d: (0x5507, 0),# East Asian ideograph
- 0x233d40: (0x9008, 0),# East Asian ideograph
- 0x233d59: (0x9036, 0),# East Asian ideograph
- 0x277d74: (0x5a08, 0),# East Asian ideograph
- 0x333d42: (0x7d43, 0),# East Asian ideograph
- 0x213a63: (0x5b7f, 0),# East Asian ideograph
- 0x213d43: (0x5f27, 0),# East Asian ideograph
- 0x2d3366: (0x5234, 0),# East Asian ideograph
- 0x6f5d7d: (0xd799, 0),# Korean hangul
- 0x223d44: (0x688e, 0),# East Asian ideograph
- 0x6f593a: (0xcc99, 0),# Korean hangul
- 0x233d45: (0x900b, 0),# East Asian ideograph
- 0x277030: (0x5457, 0),# East Asian ideograph
- 0x697023: (0x9666, 0),# East Asian ideograph
- 0x6f4a64: (0xaedc, 0),# Korean hangul
- 0x213d47: (0x5f35, 0),# East Asian ideograph
- 0x235c6d: (0x9dee, 0),# East Asian ideograph
- 0x233d48: (0x900c, 0),# East Asian ideograph
- 0x6f5d7e: (0xd79b, 0),# Korean hangul
- 0x213d49: (0x5f3c, 0),# East Asian ideograph
- 0x6f593b: (0xcc9c, 0),# Korean hangul
- 0x394944: (0x6b12, 0),# East Asian ideograph
- 0x6f5b2f: (0xd14d, 0),# Korean hangul
- 0x224762: (0x6ceb, 0),# East Asian ideograph
- 0x4b3b52: (0x8132, 0),# East Asian ideograph
- 0x2d4474: (0x690d, 0),# East Asian ideograph (variant of 214474 which maps to 690D)
- 0x273d4b: (0x5f39, 0),# East Asian ideograph
- 0x2d4031: (0x64a6, 0),# East Asian ideograph
- 0x6f5a32: (0xcf10, 0),# Korean hangul
- 0x213d4c: (0x5f4c, 0),# East Asian ideograph
- 0x213d4d: (0x5f4e, 0),# East Asian ideograph
- 0x4b4940: (0x6f45, 0),# East Asian ideograph
- 0x23582b: (0x9bf1, 0),# East Asian ideograph
- 0x6f5d25: (0xd5e5, 0),# Korean hangul
- 0x213d4e: (0x5f57, 0),# East Asian ideograph
- 0x6f5030: (0xba65, 0),# Korean hangul
- 0x224763: (0x6cee, 0),# East Asian ideograph
- 0x226539: (0x78b7, 0),# East Asian ideograph
- 0x213d50: (0x5f5d, 0),# East Asian ideograph
- 0x6f4c73: (0xb2ed, 0),# Korean hangul
- 0x223d51: (0x6917, 0),# East Asian ideograph
- 0x225870: (0x73c8, 0),# East Asian ideograph
- 0x217247: (0x5642, 0),# East Asian ideograph
- 0x4b6247: (0x9d2c, 0),# East Asian ideograph
- 0x217d52: (0x5b2c, 0),# East Asian ideograph
- 0x23582c: (0x9be1, 0),# East Asian ideograph
- 0x213d53: (0x5f65, 0),# East Asian ideograph
- 0x28706a: (0x7ed0, 0),# East Asian ideograph
- 0x294871: (0x954a, 0),# East Asian ideograph
- 0x6f5d78: (0xd788, 0),# Korean hangul
- 0x224764: (0x6cc0, 0),# East Asian ideograph
- 0x6f597b: (0xcea0, 0),# Korean hangul
- 0x275b5f: (0x529e, 0),# East Asian ideograph
- 0x285f5e: (0x7618, 0),# East Asian ideograph
- 0x223d56: (0x690b, 0),# East Asian ideograph
- 0x213259: (0x5100, 0),# East Asian ideograph
- 0x233d57: (0x9034, 0),# East Asian ideograph
- 0x455122: (0x7d0d, 0),# East Asian ideograph
- 0x23582d: (0x9bdb, 0),# East Asian ideograph
- 0x233d58: (0x902f, 0),# East Asian ideograph
- 0x6f593e: (0xcca9, 0),# Korean hangul
- 0x223d59: (0x6904, 0),# East Asian ideograph
- 0x6f5b45: (0xd230, 0),# Korean hangul
- 0x234b64: (0x96df, 0),# East Asian ideograph
- 0x6f4c75: (0xb2f3, 0),# Korean hangul
- 0x275b60: (0x8f9e, 0),# East Asian ideograph
- 0x227022: (0x7cdd, 0),# East Asian ideograph
- 0x217023: (0x5517, 0),# East Asian ideograph
- 0x217024: (0x54fd, 0),# East Asian ideograph
- 0x217025: (0x54e7, 0),# East Asian ideograph
- 0x217027: (0x54f3, 0),# East Asian ideograph
- 0x227028: (0x7ced, 0),# East Asian ideograph
- 0x213d5c: (0x5f80, 0),# East Asian ideograph
- 0x21702a: (0x54e4, 0),# East Asian ideograph
- 0x21702b: (0x550a, 0),# East Asian ideograph
- 0x21702d: (0x54ff, 0),# East Asian ideograph
- 0x21702e: (0x5518, 0),# East Asian ideograph
- 0x223d5d: (0x6929, 0),# East Asian ideograph
- 0x227030: (0x7cf2, 0),# East Asian ideograph
- 0x6f593f: (0xccab, 0),# Korean hangul
- 0x217032: (0x54ef, 0),# East Asian ideograph
- 0x217034: (0x5508, 0),# East Asian ideograph
- 0x227035: (0x7cf4, 0),# East Asian ideograph
- 0x217038: (0x54f6, 0),# East Asian ideograph
- 0x227039: (0x7cf6, 0),# East Asian ideograph
- 0x6f4c76: (0xb2f4, 0),# Korean hangul
- 0x21703e: (0x550e, 0),# East Asian ideograph
- 0x275b61: (0x8fa9, 0),# East Asian ideograph
- 0x4b5c50: (0x9045, 0),# East Asian ideograph
- 0x692563: (0x30e3, 0),# Katakana letter small YA
- 0x21325b: (0x50fb, 0),# East Asian ideograph
- 0x217044: (0x5523, 0),# East Asian ideograph
- 0x227045: (0x7d08, 0),# East Asian ideograph
- 0x217046: (0x550f, 0),# East Asian ideograph
- 0x217047: (0x5511, 0),# East Asian ideograph
- 0x23582f: (0x9be2, 0),# East Asian ideograph
- 0x22704a: (0x7d13, 0),# East Asian ideograph
- 0x21704b: (0x5575, 0),# East Asian ideograph
- 0x21704d: (0x5573, 0),# East Asian ideograph
- 0x21704e: (0x554c, 0),# East Asian ideograph
- 0x21704f: (0x5576, 0),# East Asian ideograph
- 0x217050: (0x554d, 0),# East Asian ideograph
- 0x217051: (0x555a, 0),# East Asian ideograph
- 0x227052: (0x7d1d, 0),# East Asian ideograph
- 0x217053: (0x553c, 0),# East Asian ideograph
- 0x217055: (0x5550, 0),# East Asian ideograph
- 0x217057: (0x5539, 0),# East Asian ideograph
- 0x217058: (0x5548, 0),# East Asian ideograph
- 0x217059: (0x552d, 0),# East Asian ideograph
- 0x21705a: (0x5551, 0),# East Asian ideograph
- 0x6f4c77: (0xb2f5, 0),# Korean hangul
- 0x21705d: (0x552a, 0),# East Asian ideograph
- 0x213d65: (0x5f8c, 0),# East Asian ideograph
- 0x217060: (0x5562, 0),# East Asian ideograph
- 0x217061: (0x5536, 0),# East Asian ideograph
- 0x227062: (0x7d32, 0),# East Asian ideograph
- 0x217064: (0x5549, 0),# East Asian ideograph
- 0x227065: (0x7d31, 0),# East Asian ideograph
- 0x335830: (0x658d, 0),# East Asian ideograph
- 0x227068: (0x7d45, 0),# East Asian ideograph
- 0x21706a: (0x5540, 0),# East Asian ideograph
- 0x21706b: (0x5535, 0),# East Asian ideograph
- 0x22706c: (0x7d29, 0),# East Asian ideograph
- 0x6f5941: (0xccb4, 0),# Korean hangul
- 0x22706f: (0x7d41, 0),# East Asian ideograph
- 0x217070: (0x5545, 0),# East Asian ideograph
- 0x227071: (0x7d3e, 0),# East Asian ideograph
- 0x234b67: (0x96dd, 0),# East Asian ideograph
- 0x213d69: (0x5f98, 0),# East Asian ideograph
- 0x217079: (0x553f, 0),# East Asian ideograph
- 0x22707a: (0x7d5c, 0),# East Asian ideograph
- 0x21707b: (0x5541, 0),# East Asian ideograph
- 0x22707c: (0x7d53, 0),# East Asian ideograph
- 0x21707d: (0x5565, 0),# East Asian ideograph
- 0x22707e: (0x7d5a, 0),# East Asian ideograph
- 0x275779: (0x891b, 0),# East Asian ideograph
- 0x225432: (0x71eb, 0),# East Asian ideograph
- 0x235831: (0x9bf0, 0),# East Asian ideograph
- 0x213d6c: (0x5f9e, 0),# East Asian ideograph
- 0x6f5942: (0xccb5, 0),# Korean hangul
- 0x213f27: (0x614d, 0),# East Asian ideograph
- 0x213b3f: (0x5c08, 0),# East Asian ideograph
- 0x283d6e: (0x67a8, 0),# East Asian ideograph
- 0x2d6251: (0x5869, 0),# East Asian ideograph
- 0x6f4c79: (0xb2f9, 0),# Korean hangul
- 0x275b64: (0x519c, 0),# East Asian ideograph
- 0x273d6f: (0x590d, 0),# East Asian ideograph
- 0x21325e: (0x5102, 0),# East Asian ideograph
- 0x6f4a24: (0xadc8, 0),# Korean hangul
- 0x4b4947: (0x7ac3, 0),# East Asian ideograph
- 0x6f5943: (0xccb8, 0),# Korean hangul
- 0x213f28: (0x614b, 0),# East Asian ideograph
- 0x213d73: (0x5fae, 0),# East Asian ideograph
- 0x2d6252: (0x78b1, 0),# East Asian ideograph
- 0x295a59: (0x9e38, 0),# East Asian ideograph
- 0x277328: (0x54dc, 0),# East Asian ideograph
- 0x213d74: (0x5fb9, 0),# East Asian ideograph
- 0x21325f: (0x510d, 0),# East Asian ideograph
- 0x285b21: (0x740f, 0),# East Asian ideograph
- 0x233a5d: (0x8e94, 0),# East Asian ideograph
- 0x213d75: (0x5fb7, 0),# East Asian ideograph
- 0x275d71: (0x94a2, 0),# East Asian ideograph
- 0x217d76: (0x5b4b, 0),# East Asian ideograph
- 0x6f5023: (0xba3c, 0),# Korean hangul
- 0x226822: (0x79b8, 0),# East Asian ideograph
- 0x223d78: (0x68fd, 0),# East Asian ideograph
- 0x226823: (0x79ba, 0),# East Asian ideograph
- 0x213d79: (0x5fc5, 0),# East Asian ideograph
- 0x4b7e6a: (0x5bc3, 0),# East Asian ideograph (variant of 217E6A which maps to 5BC3)
- 0x223f3e: (0x696f, 0),# East Asian ideograph
- 0x212a33: (0xe8e0, 0),# EACC component character
- 0x223d7b: (0x68f3, 0),# East Asian ideograph
- 0x6f5945: (0xccc7, 0),# Korean hangul
- 0x6f5b31: (0xd154, 0),# Korean hangul
- 0x213d7c: (0x5fcc, 0),# East Asian ideograph
- 0x27493c: (0x6fd2, 0),# East Asian ideograph
- 0x213261: (0x5109, 0),# East Asian ideograph
- 0x233a5f: (0x8e92, 0),# East Asian ideograph
- 0x29282a: (0x8539, 0),# East Asian ideograph
- 0x29494d: (0x9609, 0),# East Asian ideograph
- 0x6f5342: (0xc190, 0),# Korean hangul
- 0x696d3f: (0x8ebe, 0),# East Asian ideograph
- 0x213f2b: (0x6134, 0),# East Asian ideograph
- 0x334729: (0x6e2b, 0),# East Asian ideograph
- 0x6f4c7d: (0xb300, 0),# Korean hangul
- 0x2f2a64: (0x87b5, 0),# East Asian ideograph
- 0x22682e: (0x79d5, 0),# East Asian ideograph
- 0x287431: (0x575b, 0),# East Asian ideograph (duplicate simplified)
- 0x275e60: (0x960e, 0),# East Asian ideograph
- 0x233a60: (0x8e93, 0),# East Asian ideograph
- 0x22282f: (0x5ea4, 0),# East Asian ideograph
- 0x227122: (0x7d70, 0),# East Asian ideograph
- 0x217123: (0x5591, 0),# East Asian ideograph
- 0x697124: (0x98aa, 0),# East Asian ideograph
- 0x217125: (0x5577, 0),# East Asian ideograph
- 0x217126: (0x55a8, 0),# East Asian ideograph
- 0x217127: (0x55ad, 0),# East Asian ideograph
- 0x227129: (0x7d67, 0),# East Asian ideograph
- 0x21712a: (0x5605, 0),# East Asian ideograph
- 0x22712b: (0x7d6a, 0),# East Asian ideograph
- 0x22712c: (0x7d6b, 0),# East Asian ideograph
- 0x216832: (0x50d4, 0),# East Asian ideograph
- 0x21712f: (0x5586, 0),# East Asian ideograph
- 0x227130: (0x7d73, 0),# East Asian ideograph
- 0x227134: (0x7d4e, 0),# East Asian ideograph
- 0x217136: (0x55b4, 0),# East Asian ideograph
- 0x227137: (0x7d8b, 0),# East Asian ideograph
- 0x227139: (0x7d88, 0),# East Asian ideograph
- 0x22713b: (0x7d85, 0),# East Asian ideograph
- 0x2d514a: (0x6dd6, 0),# East Asian ideograph
- 0x22713d: (0x7d8e, 0),# East Asian ideograph
- 0x216835: (0x50e6, 0),# East Asian ideograph
- 0x6f5948: (0xcd08, 0),# Korean hangul
- 0x227142: (0x7d7f, 0),# East Asian ideograph
- 0x217143: (0x55e2, 0),# East Asian ideograph (variant of 2D7143 which maps to 55E2)
- 0x227144: (0x7d86, 0),# East Asian ideograph
- 0x217145: (0x558e, 0),# East Asian ideograph
- 0x217147: (0x55b5, 0),# East Asian ideograph
- 0x227148: (0x7d8d, 0),# East Asian ideograph
- 0x217149: (0x558f, 0),# East Asian ideograph
- 0x21714b: (0x5559, 0),# East Asian ideograph
- 0x22714d: (0x7d83, 0),# East Asian ideograph
- 0x22714f: (0x7d7d, 0),# East Asian ideograph
- 0x217150: (0x55a4, 0),# East Asian ideograph
- 0x217151: (0x5592, 0),# East Asian ideograph
- 0x217152: (0x5599, 0),# East Asian ideograph
- 0x227154: (0x7d7b, 0),# East Asian ideograph
- 0x233a62: (0x8e90, 0),# East Asian ideograph
- 0x217156: (0x55f4, 0),# East Asian ideograph
- 0x227158: (0x7d7a, 0),# East Asian ideograph
- 0x227159: (0x7d96, 0),# East Asian ideograph
- 0x22715a: (0x7d5b, 0),# East Asian ideograph
- 0x22715b: (0x7d8c, 0),# East Asian ideograph
- 0x21715c: (0x55de, 0),# East Asian ideograph
- 0x21715d: (0x55d9, 0),# East Asian ideograph
- 0x21715e: (0x55c3, 0),# East Asian ideograph
- 0x21715f: (0x55c9, 0),# East Asian ideograph
- 0x217161: (0x55ca, 0),# East Asian ideograph
- 0x227162: (0x7dae, 0),# East Asian ideograph
- 0x21683b: (0x50ce, 0),# East Asian ideograph
- 0x217164: (0x55d4, 0),# East Asian ideograph
- 0x217165: (0x55c4, 0),# East Asian ideograph
- 0x227167: (0x7dcb, 0),# East Asian ideograph
- 0x227169: (0x7daa, 0),# East Asian ideograph
- 0x22716a: (0x7dce, 0),# East Asian ideograph
- 0x22716b: (0x7dc9, 0),# East Asian ideograph
- 0x692565: (0x30e5, 0),# Katakana letter small YU
- 0x22716e: (0x7dc5, 0),# East Asian ideograph
- 0x22716f: (0x7da6, 0),# East Asian ideograph
- 0x217170: (0x55d2, 0),# East Asian ideograph
- 0x223664: (0x6585, 0),# East Asian ideograph
- 0x277c36: (0x59ab, 0),# East Asian ideograph
- 0x22543a: (0x71f5, 0),# East Asian ideograph
- 0x227174: (0x7dc4, 0),# East Asian ideograph
- 0x217175: (0x55e5, 0),# East Asian ideograph
- 0x6f4871: (0xac1c, 0),# Korean hangul
- 0x217177: (0x55d6, 0),# East Asian ideograph
- 0x227178: (0x7dac, 0),# East Asian ideograph
- 0x217179: (0x55f2, 0),# East Asian ideograph
- 0x6f5c77: (0xd590, 0),# Korean hangul
- 0x2e717c: (0x7d63, 0),# East Asian ideograph
- 0x22717d: (0x7db9, 0),# East Asian ideograph
- 0x21717e: (0x5627, 0),# East Asian ideograph
- 0x292840: (0x84e0, 0),# East Asian ideograph
- 0x235f5f: (0x9f37, 0),# East Asian ideograph
- 0x216841: (0x50f3, 0),# East Asian ideograph
- 0x216842: (0x50e8, 0),# East Asian ideograph
- 0x217255: (0x5635, 0),# East Asian ideograph
- 0x2d514d: (0x7d2c, 0),# East Asian ideograph
- 0x216844: (0x50f0, 0),# East Asian ideograph
- 0x6f594b: (0xcd10, 0),# Korean hangul
- 0x224772: (0x6cf5, 0),# East Asian ideograph
- 0x6f4e69: (0xb78d, 0),# Korean hangul
- 0x23307d: (0x89af, 0),# East Asian ideograph
- 0x295b35: (0x9e2b, 0),# East Asian ideograph
- 0x4b7874: (0x590a, 0),# East Asian ideograph
- 0x23284c: (0x8645, 0),# East Asian ideograph
- 0x6f4a26: (0xadd1, 0),# Korean hangul
- 0x22543d: (0x71f3, 0),# East Asian ideograph
- 0x234e53: (0x97d8, 0),# East Asian ideograph
- 0x69684d: (0x8422, 0),# East Asian ideograph
- 0x333623: (0x9f69, 0),# East Asian ideograph
- 0x225b61: (0x74b1, 0),# East Asian ideograph
- 0x6f553b: (0xc58c, 0),# Korean hangul
- 0x706d3f: (0x781c, 0),# East Asian ideograph
- 0x4b587a: (0x8acb, 0),# East Asian ideograph
- 0x6f7723: (0xe8ca, 0),# Korean hangul
- 0x213f32: (0x615d, 0),# East Asian ideograph
- 0x234730: (0x93e6, 0),# East Asian ideograph
- 0x222851: (0x5ecc, 0),# East Asian ideograph
- 0x6f594e: (0xcd1b, 0),# Korean hangul
- 0x284e62: (0x6f4d, 0),# East Asian ideograph
- 0x6f5732: (0xc7cc, 0),# Korean hangul
- 0x6f5328: (0xc126, 0),# Korean hangul
- 0x213f33: (0x6182, 0),# East Asian ideograph
- 0x285f6f: (0x7605, 0),# East Asian ideograph
- 0x6f4a3d: (0xae43, 0),# Korean hangul
- 0x6f4e25: (0xb545, 0),# Korean hangul
- 0x295f2b: (0x9f0d, 0),# East Asian ideograph
- 0x212a35: (0xe8e2, 0),# EACC component character
- 0x6f594f: (0xcd1d, 0),# Korean hangul
- 0x6f5b33: (0xd15d, 0),# Korean hangul
- 0x274621: (0x6b22, 0),# East Asian ideograph
- 0x232859: (0x864d, 0),# East Asian ideograph
- 0x224333: (0x6adf, 0),# East Asian ideograph
- 0x234732: (0x940b, 0),# East Asian ideograph
- 0x21797c: (0x5997, 0),# East Asian ideograph
- 0x23285a: (0x8653, 0),# East Asian ideograph
- 0x6f4e43: (0xb614, 0),# Korean hangul
- 0x227222: (0x7d9f, 0),# East Asian ideograph
- 0x217224: (0x55fb, 0),# East Asian ideograph
- 0x217225: (0x5612, 0),# East Asian ideograph
- 0x217227: (0x55f8, 0),# East Asian ideograph
- 0x217228: (0x560f, 0),# East Asian ideograph
- 0x227229: (0x7de1, 0),# East Asian ideograph
- 0x22722a: (0x7dd9, 0),# East Asian ideograph
- 0x22722b: (0x7de4, 0),# East Asian ideograph
- 0x6f4f44: (0xb8e9, 0),# Korean hangul
- 0x21722e: (0x561e, 0),# East Asian ideograph
- 0x217539: (0x5790, 0),# East Asian ideograph
- 0x227231: (0x7dd7, 0),# East Asian ideograph
- 0x295263: (0x9a75, 0),# East Asian ideograph
- 0x217234: (0x561c, 0),# East Asian ideograph
- 0x217235: (0x5610, 0),# East Asian ideograph
- 0x217236: (0x5601, 0),# East Asian ideograph
- 0x217238: (0x5613, 0),# East Asian ideograph
- 0x217239: (0x55f6, 0),# East Asian ideograph
- 0x22723a: (0x7e06, 0),# East Asian ideograph
- 0x21685f: (0x5105, 0),# East Asian ideograph
- 0x21723c: (0x5602, 0),# East Asian ideograph
- 0x22723e: (0x7de6, 0),# East Asian ideograph
- 0x697240: (0x9bb4, 0),# East Asian ideograph
- 0x217242: (0x561d, 0),# East Asian ideograph
- 0x27577c: (0x88c6, 0),# East Asian ideograph
- 0x217244: (0x55ff, 0),# East Asian ideograph
- 0x697245: (0x9bcf, 0),# East Asian ideograph
- 0x227246: (0x7ddc, 0),# East Asian ideograph
- 0x216861: (0x50fc, 0),# East Asian ideograph
- 0x217248: (0x564c, 0),# East Asian ideograph
- 0x227249: (0x7de5, 0),# East Asian ideograph
- 0x22724b: (0x7df5, 0),# East Asian ideograph
- 0x6f4e6a: (0xb78f, 0),# Korean hangul
- 0x295021: (0x98a2, 0),# East Asian ideograph
- 0x2e7328: (0x5fad, 0),# East Asian ideograph
- 0x227250: (0x7e17, 0),# East Asian ideograph
- 0x227251: (0x7e1e, 0),# East Asian ideograph
- 0x227252: (0x7e21, 0),# East Asian ideograph
- 0x227253: (0x7e0b, 0),# East Asian ideograph
- 0x224335: (0x6ade, 0),# East Asian ideograph
- 0x227256: (0x7e22, 0),# East Asian ideograph
- 0x217257: (0x5649, 0),# East Asian ideograph
- 0x217258: (0x5641, 0),# East Asian ideograph
- 0x2d5124: (0x5e0b, 0),# East Asian ideograph
- 0x22725b: (0x7e20, 0),# East Asian ideograph
- 0x21725c: (0x5658, 0),# East Asian ideograph
- 0x22725d: (0x7e1d, 0),# East Asian ideograph
- 0x21725e: (0x5654, 0),# East Asian ideograph
- 0x216865: (0x5106, 0),# East Asian ideograph
- 0x217260: (0x562a, 0),# East Asian ideograph
- 0x217261: (0x563d, 0),# East Asian ideograph
- 0x217264: (0x562c, 0),# East Asian ideograph
- 0x227265: (0x7e15, 0),# East Asian ideograph
- 0x6f5474: (0xc52c, 0),# Korean hangul
- 0x217267: (0x5638, 0),# East Asian ideograph
- 0x4d5154: (0x9942, 0),# East Asian ideograph
- 0x217269: (0x564d, 0),# East Asian ideograph
- 0x22726a: (0x7e0f, 0),# East Asian ideograph
- 0x21726b: (0x562b, 0),# East Asian ideograph
- 0x21726c: (0x564f, 0),# East Asian ideograph
- 0x22726d: (0x7e3b, 0),# East Asian ideograph
- 0x21726e: (0x5670, 0),# East Asian ideograph
- 0x21726f: (0x565f, 0),# East Asian ideograph
- 0x217270: (0x567c, 0),# East Asian ideograph
- 0x227271: (0x7e34, 0),# East Asian ideograph
- 0x227272: (0x7e2d, 0),# East Asian ideograph
- 0x227273: (0x7e2f, 0),# East Asian ideograph
- 0x227275: (0x7e36, 0),# East Asian ideograph
- 0x227277: (0x7e3a, 0),# East Asian ideograph
- 0x217278: (0x5676, 0),# East Asian ideograph
- 0x227279: (0x7e39, 0),# East Asian ideograph
- 0x21727a: (0x5666, 0),# East Asian ideograph
- 0x21727b: (0x5673, 0),# East Asian ideograph
- 0x21727c: (0x566d, 0),# East Asian ideograph
- 0x22727d: (0x7e44, 0),# East Asian ideograph
- 0x21727e: (0x5672, 0),# East Asian ideograph
- 0x33537d: (0x9ad5, 0),# East Asian ideograph
- 0x4b3e7e: (0x60a9, 0),# East Asian ideograph
- 0x6f5953: (0xcd94, 0),# Korean hangul
- 0x6f7729: (0xae0e, 0),# Korean hangul
- 0x224b30: (0x6e36, 0),# East Asian ideograph
- 0x2d6262: (0x9ec4, 0),# East Asian ideograph
- 0x2d4049: (0x67b4, 0),# East Asian ideograph
- 0x4b5c54: (0x8f9f, 0),# East Asian ideograph (duplicate simplified)
- 0x2e686f: (0x7a19, 0),# East Asian ideograph
- 0x6f4873: (0xac20, 0),# Korean hangul
- 0x33362a: (0x9b28, 0),# East Asian ideograph
- 0x216871: (0x5115, 0),# East Asian ideograph
- 0x6f5954: (0xcd95, 0),# Korean hangul
- 0x6f5b34: (0xd15f, 0),# Korean hangul
- 0x234b7a: (0x96f5, 0),# East Asian ideograph
- 0x6f4f45: (0xb8ec, 0),# Korean hangul
- 0x6f5031: (0xba67, 0),# Korean hangul
- 0x6f5955: (0xcd98, 0),# Korean hangul
- 0x215321: (0x8085, 0),# East Asian ideograph
- 0x6f772b: (0xae11, 0),# Korean hangul
- 0x213f3a: (0x615f, 0),# East Asian ideograph
- 0x6f5322: (0xc11d, 0),# Korean hangul
- 0x225323: (0x7192, 0),# East Asian ideograph
- 0x2d5e21: (0x9418, 0),# East Asian ideograph
- 0x29415c: (0x917e, 0),# East Asian ideograph
- 0x215324: (0x808b, 0),# East Asian ideograph
- 0x6f5325: (0xc123, 0),# Korean hangul
- 0x224539: (0x6bab, 0),# East Asian ideograph
- 0x6f5956: (0xcd9c, 0),# Korean hangul
- 0x695326: (0x54d8, 0),# East Asian ideograph
- 0x22477d: (0x6cc2, 0),# East Asian ideograph
- 0x23287c: (0x866f, 0),# East Asian ideograph
- 0x6f5327: (0xc125, 0),# Korean hangul
- 0x275e35: (0x9558, 0),# East Asian ideograph
- 0x2d404c: (0x6283, 0),# East Asian ideograph
- 0x6f4b6b: (0xb0e0, 0),# Korean hangul
- 0x275735: (0x8681, 0),# East Asian ideograph
- 0x6f4a28: (0xaddc, 0),# Korean hangul
- 0x235329: (0x99f8, 0),# East Asian ideograph
- 0x225447: (0x71e0, 0),# East Asian ideograph
- 0x23532a: (0x99f4, 0),# East Asian ideograph
- 0x6f5957: (0xcda4, 0),# Korean hangul
- 0x21532b: (0x8096, 0),# East Asian ideograph
- 0x276842: (0x507e, 0),# East Asian ideograph
- 0x274629: (0x5c81, 0),# East Asian ideograph
- 0x213f3c: (0x617e, 0),# East Asian ideograph
- 0x21532c: (0x80b2, 0),# East Asian ideograph
- 0x6f5235: (0xbed8, 0),# Korean hangul
- 0x6f532d: (0xc12f, 0),# Korean hangul
- 0x213273: (0x5147, 0),# East Asian ideograph
- 0x6f532e: (0xc130, 0),# Korean hangul
- 0x275d75: (0x9525, 0),# East Asian ideograph
- 0x216f43: (0x546b, 0),# East Asian ideograph
- 0x6f5958: (0xcda5, 0),# Korean hangul
- 0x215330: (0x80a2, 0),# East Asian ideograph
- 0x27462a: (0x5386, 0),# East Asian ideograph
- 0x217325: (0x5693, 0),# East Asian ideograph
- 0x227326: (0x7e3f, 0),# East Asian ideograph
- 0x215331: (0x80ab, 0),# East Asian ideograph
- 0x227328: (0x7e47, 0),# East Asian ideograph
- 0x225332: (0x7185, 0),# East Asian ideograph
- 0x22732f: (0x7e51, 0),# East Asian ideograph
- 0x217332: (0x56ba, 0),# East Asian ideograph
- 0x215333: (0x80af, 0),# East Asian ideograph
- 0x227334: (0x7e67, 0),# East Asian ideograph
- 0x217335: (0x5684, 0),# East Asian ideograph
- 0x217336: (0x5691, 0),# East Asian ideograph
- 0x227337: (0x7e56, 0),# East Asian ideograph
- 0x6f5334: (0xc140, 0),# Korean hangul
- 0x21733e: (0x569e, 0),# East Asian ideograph
- 0x6f5335: (0xc148, 0),# Korean hangul
- 0x27462b: (0x5f52, 0),# East Asian ideograph
- 0x217342: (0x569a, 0),# East Asian ideograph
- 0x213f3e: (0x61b2, 0),# East Asian ideograph
- 0x225336: (0x717c, 0),# East Asian ideograph
- 0x227348: (0x7e68, 0),# East Asian ideograph
- 0x227349: (0x7e6e, 0),# East Asian ideograph
- 0x21734b: (0x56ad, 0),# East Asian ideograph
- 0x21734c: (0x56a6, 0),# East Asian ideograph
- 0x22734e: (0x7e70, 0),# East Asian ideograph
- 0x6f4e66: (0xb780, 0),# Korean hangul
- 0x227351: (0x7e6f, 0),# East Asian ideograph
- 0x227352: (0x7e73, 0),# East Asian ideograph
- 0x217353: (0x56b2, 0),# East Asian ideograph
- 0x235849: (0x9c0a, 0),# East Asian ideograph
- 0x225339: (0x7198, 0),# East Asian ideograph
- 0x227358: (0x7e7b, 0),# East Asian ideograph
- 0x227359: (0x7e7e, 0),# East Asian ideograph
- 0x21735a: (0x56b3, 0),# East Asian ideograph
- 0x22735b: (0x7e81, 0),# East Asian ideograph
- 0x6f595a: (0xcda9, 0),# Korean hangul
- 0x22735d: (0x7e8a, 0),# East Asian ideograph
- 0x22735e: (0x7e87, 0),# East Asian ideograph
- 0x6f7730: (0xaf50, 0),# Korean hangul
- 0x227360: (0x7e88, 0),# East Asian ideograph
- 0x217362: (0x56cf, 0),# East Asian ideograph
- 0x4b533b: (0x695c, 0),# East Asian ideograph
- 0x227364: (0x7e86, 0),# East Asian ideograph
- 0x23473d: (0x93fb, 0),# East Asian ideograph
- 0x217367: (0x56cd, 0),# East Asian ideograph
- 0x22533c: (0x7197, 0),# East Asian ideograph
- 0x22736a: (0x7e91, 0),# East Asian ideograph
- 0x21736b: (0x56d7, 0),# East Asian ideograph
- 0x22736d: (0x7e94, 0),# East Asian ideograph
- 0x70736e: (0x7ba2, 0),# East Asian ideograph
- 0x23533d: (0x9a0f, 0),# East Asian ideograph
- 0x227370: (0x7e9b, 0),# East Asian ideograph
- 0x227371: (0x7e9a, 0),# East Asian ideograph
- 0x22544b: (0x720c, 0),# East Asian ideograph
- 0x227373: (0x7e99, 0),# East Asian ideograph
- 0x227374: (0x7e98, 0),# East Asian ideograph
- 0x22533e: (0x71b5, 0),# East Asian ideograph
- 0x217376: (0x56ee, 0),# East Asian ideograph
- 0x217377: (0x56e7, 0),# East Asian ideograph
- 0x217379: (0x56fb, 0),# East Asian ideograph
- 0x22533f: (0x71a9, 0),# East Asian ideograph
- 0x21737e: (0x56f7, 0),# East Asian ideograph
- 0x222569: (0x5d97, 0),# East Asian ideograph
- 0x295340: (0x9a92, 0),# East Asian ideograph
- 0x4b3853: (0x586d, 0),# East Asian ideograph (not in Unicode)
- 0x4b5629: (0x85cd, 0),# East Asian ideograph
- 0x6f5341: (0xc18e, 0),# Korean hangul
- 0x6f4a29: (0xade0, 0),# Korean hangul
- 0x225342: (0x71a5, 0),# East Asian ideograph
- 0x334260: (0x89d4, 0),# East Asian ideograph
- 0x275e50: (0x95ed, 0),# East Asian ideograph
- 0x23584b: (0x9c08, 0),# East Asian ideograph
- 0x2e3936: (0x66cd, 0),# East Asian ideograph
- 0x6f595c: (0xcdc4, 0),# Korean hangul
- 0x235344: (0x9a04, 0),# East Asian ideograph
- 0x6f7732: (0xb060, 0),# Korean hangul
- 0x235345: (0x9a11, 0),# East Asian ideograph
- 0x275b7e: (0x8fde, 0),# East Asian ideograph
- 0x2d5e28: (0x93c1, 0),# East Asian ideograph
- 0x213a6a: (0x5b8b, 0),# East Asian ideograph
- 0x235347: (0x9a05, 0),# East Asian ideograph
- 0x4b4874: (0x6ff3, 0),# East Asian ideograph
- 0x23584c: (0x9c14, 0),# East Asian ideograph
- 0x215348: (0x80fd, 0),# East Asian ideograph
- 0x6f5349: (0xc1a8, 0),# Korean hangul
- 0x705d5c: (0x8488, 0),# East Asian ideograph
- 0x6f7733: (0xb9c4, 0),# Korean hangul
- 0x224b32: (0x6e72, 0),# East Asian ideograph
- 0x6f5128: (0xbc88, 0),# Korean hangul
- 0x22655a: (0x78d8, 0),# East Asian ideograph
- 0x27534a: (0x8090, 0),# East Asian ideograph
- 0x226d4b: (0x7c0c, 0),# East Asian ideograph
- 0x334740: (0x6d1a, 0),# East Asian ideograph
- 0x4b562b: (0x8535, 0),# East Asian ideograph
- 0x2d534b: (0x80f7, 0),# East Asian ideograph
- 0x27573c: (0x86ce, 0),# East Asian ideograph
- 0x21534c: (0x810a, 0),# East Asian ideograph
- 0x23584d: (0x9c04, 0),# East Asian ideograph
- 0x23534d: (0x9a22, 0),# East Asian ideograph
- 0x6f595e: (0xcde8, 0),# Korean hangul
- 0x22534e: (0x71af, 0),# East Asian ideograph
- 0x6f5b36: (0xd161, 0),# Korean hangul
- 0x6f7734: (0xc54d, 0),# Korean hangul
- 0x23534f: (0x9a20, 0),# East Asian ideograph
- 0x6f5350: (0xc1e4, 0),# Korean hangul
- 0x21327a: (0x5152, 0),# East Asian ideograph
- 0x2d6134: (0x99de, 0),# East Asian ideograph
- 0x233a78: (0x8eb3, 0),# East Asian ideograph
- 0x4c6f7b: (0x7ce8, 0),# East Asian ideograph
- 0x235352: (0x9a27, 0),# East Asian ideograph
- 0x6f5343: (0xc194, 0),# Korean hangul
- 0x6f5353: (0xc1f1, 0),# Korean hangul
- 0x6f7735: (0xc54f, 0),# Korean hangul
- 0x213f44: (0x619a, 0),# East Asian ideograph
- 0x6f5354: (0xc1f3, 0),# Korean hangul
- 0x4c7c45: (0x82ae, 0),# East Asian ideograph
- 0x225355: (0x719a, 0),# East Asian ideograph
- 0x4d4832: (0x953f, 0),# East Asian ideograph
- 0x27573e: (0x8721, 0),# East Asian ideograph
- 0x22367a: (0x65a0, 0),# East Asian ideograph
- 0x223237: (0x63b1, 0),# East Asian ideograph
- 0x6f5629: (0xc65c, 0),# Korean hangul
- 0x225357: (0x71b3, 0),# East Asian ideograph
- 0x27407b: (0x5377, 0),# East Asian ideograph
- 0x275358: (0x80be, 0),# East Asian ideograph
- 0x213f45: (0x61a9, 0),# East Asian ideograph
- 0x215359: (0x8139, 0),# East Asian ideograph
- 0x2d416e: (0x6534, 0),# East Asian ideograph
- 0x23535a: (0x9a38, 0),# East Asian ideograph
- 0x217421: (0x56f9, 0),# East Asian ideograph
- 0x6f4a2a: (0xade4, 0),# Korean hangul
- 0x294435: (0x950a, 0),# East Asian ideograph
- 0x217424: (0x56ff, 0),# East Asian ideograph
- 0x227425: (0x7f43, 0),# East Asian ideograph
- 0x275e51: (0x95f5, 0),# East Asian ideograph
- 0x217427: (0x5705, 0),# East Asian ideograph
- 0x227428: (0x7f45, 0),# East Asian ideograph
- 0x217429: (0x5702, 0),# East Asian ideograph
- 0x22742b: (0x7f4b, 0),# East Asian ideograph
- 0x21742c: (0x570a, 0),# East Asian ideograph
- 0x21742d: (0x5709, 0),# East Asian ideograph
- 0x22742e: (0x7f4c, 0),# East Asian ideograph
- 0x22742f: (0x7f4d, 0),# East Asian ideograph
- 0x217430: (0x570c, 0),# East Asian ideograph
- 0x217431: (0x5715, 0),# East Asian ideograph
- 0x227432: (0x7f4f, 0),# East Asian ideograph
- 0x213f46: (0x6194, 0),# East Asian ideograph
- 0x27535e: (0x80a0, 0),# East Asian ideograph
- 0x224345: (0x6ae8, 0),# East Asian ideograph
- 0x217437: (0x571c, 0),# East Asian ideograph
- 0x4b423a: (0x64b9, 0),# East Asian ideograph
- 0x217439: (0x571d, 0),# East Asian ideograph
- 0x21743a: (0x571e, 0),# East Asian ideograph
- 0x6f535f: (0xc220, 0),# Korean hangul
- 0x22743e: (0x7f60, 0),# East Asian ideograph
- 0x22743f: (0x7f61, 0),# East Asian ideograph
- 0x235360: (0x9a2d, 0),# East Asian ideograph
- 0x217442: (0x572e, 0),# East Asian ideograph
- 0x227443: (0x7f5d, 0),# East Asian ideograph
- 0x227445: (0x7f5b, 0),# East Asian ideograph
- 0x235361: (0x9a35, 0),# East Asian ideograph
- 0x217448: (0x5738, 0),# East Asian ideograph
- 0x21744c: (0x572a, 0),# East Asian ideograph
- 0x215362: (0x816b, 0),# East Asian ideograph
- 0x6f4b43: (0xb05d, 0),# Korean hangul
- 0x227450: (0x7f65, 0),# East Asian ideograph
- 0x227451: (0x7f66, 0),# East Asian ideograph
- 0x213f47: (0x618a, 0),# East Asian ideograph
- 0x227453: (0x7f6d, 0),# East Asian ideograph
- 0x227454: (0x7f6b, 0),# East Asian ideograph
- 0x227455: (0x7f67, 0),# East Asian ideograph
- 0x227457: (0x7f68, 0),# East Asian ideograph
- 0x235364: (0x9a32, 0),# East Asian ideograph
- 0x21327e: (0x5165, 0),# East Asian ideograph
- 0x22745e: (0x7f71, 0),# East Asian ideograph
- 0x275365: (0x8111, 0),# East Asian ideograph
- 0x227460: (0x7f73, 0),# East Asian ideograph
- 0x227463: (0x7f76, 0),# East Asian ideograph
- 0x6f5022: (0xba39, 0),# Korean hangul
- 0x217465: (0x5745, 0),# East Asian ideograph
- 0x6f572f: (0xc7c1, 0),# Korean hangul
- 0x217468: (0x574b, 0),# East Asian ideograph
- 0x217469: (0x574c, 0),# East Asian ideograph
- 0x21746a: (0x573f, 0),# East Asian ideograph
- 0x215367: (0x818f, 0),# East Asian ideograph
- 0x22746c: (0x7f7d, 0),# East Asian ideograph
- 0x6f7739: (0xc61c, 0),# Korean hangul
- 0x217470: (0x5768, 0),# East Asian ideograph
- 0x6f5368: (0xc250, 0),# Korean hangul
- 0x227472: (0x7f86, 0),# East Asian ideograph
- 0x6f4f25: (0xb807, 0),# Korean hangul
- 0x217475: (0x578a, 0),# East Asian ideograph
- 0x225369: (0x71c7, 0),# East Asian ideograph
- 0x345175: (0x7162, 0),# East Asian ideograph
- 0x217479: (0x5774, 0),# East Asian ideograph
- 0x21747a: (0x5767, 0),# East Asian ideograph
- 0x22536a: (0x71b7, 0),# East Asian ideograph
- 0x22747e: (0x7f96, 0),# East Asian ideograph
- 0x6f536b: (0xc270, 0),# Korean hangul
- 0x27536c: (0x80f6, 0),# East Asian ideograph
- 0x274636: (0x6b93, 0),# East Asian ideograph
- 0x6f5521: (0xc549, 0),# Korean hangul
- 0x21536d: (0x819b, 0),# East Asian ideograph
- 0x224348: (0x6af5, 0),# East Asian ideograph
- 0x4b5632: (0x7c54, 0),# East Asian ideograph
- 0x27536e: (0x80a4, 0),# East Asian ideograph
- 0x454f45: (0x9896, 0),# East Asian ideograph
- 0x22536f: (0x71cf, 0),# East Asian ideograph
- 0x4c5541: (0x4e2c, 0),# East Asian ideograph
- 0x225370: (0x71d6, 0),# East Asian ideograph
- 0x213031: (0x4e1e, 0),# East Asian ideograph
- 0x215371: (0x81a9, 0),# East Asian ideograph
- 0x274637: (0x6ba1, 0),# East Asian ideograph
- 0x6f5522: (0xc54a, 0),# Korean hangul
- 0x213f4a: (0x61c9, 0),# East Asian ideograph
- 0x217463: (0x5749, 0),# East Asian ideograph
- 0x6f5373: (0xc290, 0),# Korean hangul
- 0x335172: (0x7d89, 0),# East Asian ideograph
- 0x212a29: (0xe8d7, 0),# EACC component character
- 0x6f4a2b: (0xadec, 0),# Korean hangul
- 0x235374: (0x9a3b, 0),# East Asian ideograph
- 0x4b496a: (0x932c, 0),# East Asian ideograph
- 0x225375: (0x71c2, 0),# East Asian ideograph
- 0x6f5376: (0xc29d, 0),# Korean hangul
- 0x233e21: (0x9070, 0),# East Asian ideograph
- 0x213f4b: (0x6190, 0),# East Asian ideograph
- 0x225377: (0x71c5, 0),# East Asian ideograph
- 0x4b385e: (0x5897, 0),# East Asian ideograph
- 0x234749: (0x93fa, 0),# East Asian ideograph
- 0x2d6275: (0x76b7, 0),# East Asian ideograph
- 0x215378: (0x81bf, 0),# East Asian ideograph
- 0x216431: (0x4e36, 0),# East Asian ideograph
- 0x215379: (0x81bd, 0),# East Asian ideograph
- 0x223e24: (0x6919, 0),# East Asian ideograph
- 0x4b496b: (0x83f8, 0),# East Asian ideograph
- 0x21537a: (0x81c9, 0),# East Asian ideograph
- 0x213e25: (0x5ffd, 0),# East Asian ideograph
- 0x21537b: (0x81be, 0),# East Asian ideograph
- 0x39304c: (0x4e81, 0),# East Asian ideograph
- 0x213e26: (0x5fdd, 0),# East Asian ideograph
- 0x23603f: (0x9f6e, 0),# East Asian ideograph
- 0x27537c: (0x8110, 0),# East Asian ideograph
- 0x33474a: (0x6d1f, 0),# East Asian ideograph
- 0x21537d: (0x81cf, 0),# East Asian ideograph
- 0x213e28: (0x5fff, 0),# East Asian ideograph
- 0x275746: (0x672e, 0),# East Asian ideograph
- 0x21537e: (0x81d8, 0),# East Asian ideograph
- 0x6f4e26: (0xb54b, 0),# Korean hangul
- 0x227042: (0x7d06, 0),# East Asian ideograph
- 0x294471: (0x951d, 0),# East Asian ideograph
- 0x233e2a: (0x907b, 0),# East Asian ideograph
- 0x6f5968: (0xce61, 0),# Korean hangul
- 0x6f5b38: (0xd1a0, 0),# Korean hangul
- 0x213e2b: (0x602a, 0),# East Asian ideograph
- 0x213e2c: (0x602f, 0),# East Asian ideograph
- 0x6f585b: (0xcacc, 0),# Korean hangul
- 0x213e2d: (0x6016, 0),# East Asian ideograph
- 0x213e2f: (0x600f, 0),# East Asian ideograph
- 0x6f5969: (0xce68, 0),# Korean hangul
- 0x227523: (0x7f97, 0),# East Asian ideograph
- 0x227524: (0x7f95, 0),# East Asian ideograph
- 0x51456d: (0x822e, 0),# East Asian ideograph
- 0x217526: (0x5770, 0),# East Asian ideograph
- 0x217528: (0x5771, 0),# East Asian ideograph
- 0x21752a: (0x576e, 0),# East Asian ideograph
- 0x22752c: (0x7fa2, 0),# East Asian ideograph
- 0x21752d: (0x5776, 0),# East Asian ideograph
- 0x21752e: (0x5789, 0),# East Asian ideograph
- 0x217530: (0x577f, 0),# East Asian ideograph
- 0x217531: (0x5775, 0),# East Asian ideograph
- 0x217532: (0x577b, 0),# East Asian ideograph
- 0x227533: (0x7fa7, 0),# East Asian ideograph
- 0x217535: (0x5773, 0),# East Asian ideograph
- 0x223241: (0x636d, 0),# East Asian ideograph
- 0x217538: (0x579f, 0),# East Asian ideograph
- 0x233e34: (0x9083, 0),# East Asian ideograph
- 0x21753a: (0x5793, 0),# East Asian ideograph
- 0x22753b: (0x7fb0, 0),# East Asian ideograph
- 0x22753c: (0x7fad, 0),# East Asian ideograph
- 0x6f596a: (0xce69, 0),# Korean hangul
- 0x22753f: (0x7fb1, 0),# East Asian ideograph
- 0x227540: (0x7fb4, 0),# East Asian ideograph
- 0x6f5527: (0xc555, 0),# Korean hangul
- 0x227542: (0x7fb5, 0),# East Asian ideograph
- 0x217543: (0x579a, 0),# East Asian ideograph
- 0x217545: (0x5794, 0),# East Asian ideograph
- 0x217547: (0x57a4, 0),# East Asian ideograph
- 0x217548: (0x5799, 0),# East Asian ideograph
- 0x217549: (0x578c, 0),# East Asian ideograph
- 0x22754a: (0x7fbc, 0),# East Asian ideograph
- 0x21754b: (0x5797, 0),# East Asian ideograph
- 0x22754c: (0x7fbe, 0),# East Asian ideograph
- 0x275749: (0x536b, 0),# East Asian ideograph
- 0x227551: (0x7fc3, 0),# East Asian ideograph
- 0x217552: (0x579c, 0),# East Asian ideograph
- 0x217554: (0x57a7, 0),# East Asian ideograph
- 0x227557: (0x7fca, 0),# East Asian ideograph
- 0x217559: (0x3013, 0),# East Asian ideograph (not found in unified han)
- 0x21755b: (0x5795, 0),# East Asian ideograph
- 0x213e3a: (0x6068, 0),# East Asian ideograph
- 0x21755f: (0x57b8, 0),# East Asian ideograph
- 0x217560: (0x57c7, 0),# East Asian ideograph
- 0x227567: (0x7fdb, 0),# East Asian ideograph
- 0x227568: (0x7fe3, 0),# East Asian ideograph
- 0x2d3e3c: (0x803b, 0),# East Asian ideograph
- 0x21756a: (0x5809, 0),# East Asian ideograph
- 0x22756c: (0x7fe6, 0),# East Asian ideograph
- 0x22756f: (0x7fe5, 0),# East Asian ideograph
- 0x22545c: (0x5911, 0),# East Asian ideograph
- 0x217571: (0x57db, 0),# East Asian ideograph
- 0x227572: (0x7fec, 0),# East Asian ideograph
- 0x227573: (0x7feb, 0),# East Asian ideograph
- 0x273b60: (0x5c61, 0),# East Asian ideograph
- 0x213e3e: (0x606d, 0),# East Asian ideograph
- 0x227577: (0x7fef, 0),# East Asian ideograph
- 0x6f596c: (0xce6d, 0),# Korean hangul
- 0x22757a: (0x7fee, 0),# East Asian ideograph
- 0x233e3f: (0x9099, 0),# East Asian ideograph
- 0x28632c: (0x7751, 0),# East Asian ideograph
- 0x293066: (0x89c7, 0),# East Asian ideograph
- 0x21757e: (0x57c6, 0),# East Asian ideograph
- 0x233e40: (0x9097, 0),# East Asian ideograph
- 0x4b563a: (0x82a6, 0),# East Asian ideograph (variant of 27563A which maps to 82A6)
- 0x4b3421: (0x5263, 0),# East Asian ideograph
- 0x275936: (0x8c23, 0),# East Asian ideograph
- 0x213e41: (0x604d, 0),# East Asian ideograph
- 0x6f4860: (0xac01, 0),# Korean hangul
- 0x4c695f: (0x7a63, 0),# East Asian ideograph
- 0x213e42: (0x606b, 0),# East Asian ideograph
- 0x395f49: (0x5f6b, 0),# East Asian ideograph
- 0x23585c: (0x9c09, 0),# East Asian ideograph
- 0x233643: (0x8c9f, 0),# East Asian ideograph
- 0x213e43: (0x6069, 0),# East Asian ideograph
- 0x6f5b39: (0xd1a1, 0),# Korean hangul
- 0x223e44: (0x6911, 0),# East Asian ideograph
- 0x6f552a: (0xc559, 0),# Korean hangul
- 0x4b5a7e: (0x5c69, 0),# East Asian ideograph
- 0x213e46: (0x606a, 0),# East Asian ideograph
- 0x6f4861: (0xac02, 0),# Korean hangul
- 0x223e47: (0x68ef, 0),# East Asian ideograph
- 0x22545e: (0x720a, 0),# East Asian ideograph
- 0x6f4f4a: (0xb8fd, 0),# Korean hangul
- 0x213e48: (0x6070, 0),# East Asian ideograph
- 0x213e49: (0x6055, 0),# East Asian ideograph
- 0x274640: (0x6bb4, 0),# East Asian ideograph
- 0x234751: (0x9427, 0),# East Asian ideograph
- 0x33433e: (0x95c7, 0),# East Asian ideograph
- 0x213e4b: (0x60a6, 0),# East Asian ideograph
- 0x4d4835: (0x954c, 0),# East Asian ideograph
- 0x2d3c21: (0x57fc, 0),# East Asian ideograph
- 0x275f2e: (0x9633, 0),# East Asian ideograph
- 0x393e4c: (0x6142, 0),# East Asian ideograph
- 0x4b4973: (0x7115, 0),# East Asian ideograph
- 0x213e4d: (0x609f, 0),# East Asian ideograph
- 0x27407e: (0x626a, 0),# East Asian ideograph
- 0x6f596f: (0xce74, 0),# Korean hangul
- 0x6f552c: (0xc55f, 0),# Korean hangul
- 0x697058: (0x9779, 0),# East Asian ideograph
- 0x275e36: (0x9559, 0),# East Asian ideograph
- 0x2d627e: (0x658b, 0),# East Asian ideograph
- 0x213b48: (0x5c1a, 0),# East Asian ideograph
- 0x2d5e3b: (0x92b9, 0),# East Asian ideograph
- 0x6f4863: (0xac07, 0),# Korean hangul
- 0x6f4a2d: (0xadf9, 0),# Korean hangul
- 0x393078: (0x9ae3, 0),# East Asian ideograph
- 0x227e51: (0x8423, 0),# East Asian ideograph
- 0x225460: (0x7217, 0),# East Asian ideograph
- 0x223247: (0x63d3, 0),# East Asian ideograph
- 0x213e52: (0x60a3, 0),# East Asian ideograph
- 0x6f5970: (0xce75, 0),# Korean hangul
- 0x6f5542: (0xc598, 0),# Korean hangul
- 0x223e53: (0x6974, 0),# East Asian ideograph
- 0x6f552d: (0xc560, 0),# Korean hangul
- 0x213e54: (0x6094, 0),# East Asian ideograph
- 0x2d4066: (0x63ce, 0),# East Asian ideograph
- 0x216433: (0x4e3f, 0),# East Asian ideograph
- 0x6f4864: (0xac08, 0),# Korean hangul
- 0x4b4975: (0x6427, 0),# East Asian ideograph
- 0x275d7a: (0x9532, 0),# East Asian ideograph
- 0x213e57: (0x60b4, 0),# East Asian ideograph
- 0x395d23: (0x91bb, 0),# East Asian ideograph
- 0x4b517e: (0x7e92, 0),# East Asian ideograph
- 0x6f5971: (0xce78, 0),# Korean hangul
- 0x223e58: (0x6962, 0),# East Asian ideograph
- 0x224b36: (0x6e39, 0),# East Asian ideograph
- 0x6f5468: (0xc4d4, 0),# Korean hangul
- 0x213e59: (0x60cb, 0),# East Asian ideograph
- 0x4b563f: (0x6a98, 0),# East Asian ideograph
- 0x2d4067: (0x62cf, 0),# East Asian ideograph
- 0x6f4865: (0xac09, 0),# Korean hangul
- 0x6f7621: (0x3181, 0),# Korean hangul
- 0x217622: (0x57c4, 0),# East Asian ideograph
- 0x233e5b: (0x90b6, 0),# East Asian ideograph
- 0x6f7624: (0xe8b0, 0),# Korean hangul
- 0x6f7625: (0xe8b1, 0),# Korean hangul
- 0x4b4556: (0x6a2a, 0),# East Asian ideograph
- 0x217627: (0x70fe, 0),# East Asian ideograph
- 0x227629: (0x7ffd, 0),# East Asian ideograph
- 0x22762a: (0x7ffe, 0),# East Asian ideograph
- 0x21762b: (0x5803, 0),# East Asian ideograph
- 0x22762c: (0x7fff, 0),# East Asian ideograph
- 0x21762d: (0x57e6, 0),# East Asian ideograph
- 0x22762e: (0x8004, 0),# East Asian ideograph
- 0x233e5d: (0x90b0, 0),# East Asian ideograph
- 0x29332c: (0x8bfc, 0),# East Asian ideograph
- 0x217631: (0x57ed, 0),# East Asian ideograph
- 0x227633: (0x800b, 0),# East Asian ideograph
- 0x227634: (0x800e, 0),# East Asian ideograph
- 0x227635: (0x8011, 0),# East Asian ideograph
- 0x234755: (0x9409, 0),# East Asian ideograph
- 0x227637: (0x8014, 0),# East Asian ideograph
- 0x277638: (0x57ad, 0),# East Asian ideograph
- 0x227639: (0x8016, 0),# East Asian ideograph
- 0x223e5f: (0x6957, 0),# East Asian ideograph
- 0x21763d: (0x57f4, 0),# East Asian ideograph
- 0x22763e: (0x801d, 0),# East Asian ideograph
- 0x294179: (0x9492, 0),# East Asian ideograph
- 0x217640: (0x580d, 0),# East Asian ideograph
- 0x223e60: (0x693f, 0),# East Asian ideograph
- 0x6f7642: (0xe8b4, 0),# Korean hangul
- 0x217643: (0x57ef, 0),# East Asian ideograph
- 0x6f7644: (0xe8b6, 0),# Korean hangul
- 0x6f7645: (0xe8b7, 0),# Korean hangul
- 0x6f4f4b: (0xb904, 0),# Korean hangul
- 0x273e61: (0x6076, 0),# East Asian ideograph
- 0x217648: (0x5801, 0),# East Asian ideograph
- 0x217649: (0x5812, 0),# East Asian ideograph
- 0x6f764a: (0xe8bc, 0),# Korean hangul
- 0x22764b: (0x8025, 0),# East Asian ideograph
- 0x22764c: (0x8026, 0),# East Asian ideograph
- 0x22764d: (0x802a, 0),# East Asian ideograph
- 0x22764e: (0x8029, 0),# East Asian ideograph
- 0x22764f: (0x8028, 0),# East Asian ideograph
- 0x217650: (0x580c, 0),# East Asian ideograph
- 0x217651: (0x5813, 0),# East Asian ideograph
- 0x217652: (0x57f0, 0),# East Asian ideograph
- 0x6f7653: (0xe8c5, 0),# Korean hangul
- 0x6f7654: (0xe8c6, 0),# Korean hangul
- 0x287655: (0x8027, 0),# East Asian ideograph
- 0x217656: (0x580b, 0),# East Asian ideograph
- 0x6f7657: (0xe8c9, 0),# Korean hangul
- 0x217658: (0x57f3, 0),# East Asian ideograph
- 0x217659: (0x5804, 0),# East Asian ideograph
- 0x21765a: (0x57cf, 0),# East Asian ideograph
- 0x22765b: (0x8030, 0),# East Asian ideograph
- 0x22765d: (0x8031, 0),# East Asian ideograph
- 0x21765f: (0x5847, 0),# East Asian ideograph
- 0x227660: (0x8035, 0),# East Asian ideograph
- 0x225021: (0x9e02, 0),# East Asian ideograph
- 0x4d2f5d: (0x8941, 0),# East Asian ideograph (variant of 232F5D which maps to 8941)
- 0x217667: (0x581b, 0),# East Asian ideograph
- 0x227669: (0x8039, 0),# East Asian ideograph
- 0x21766a: (0x5833, 0),# East Asian ideograph
- 0x22766b: (0x8041, 0),# East Asian ideograph
- 0x21766c: (0x581e, 0),# East Asian ideograph
- 0x21766d: (0x583f, 0),# East Asian ideograph
- 0x213f59: (0x61fe, 0),# East Asian ideograph
- 0x227670: (0x8043, 0),# East Asian ideograph
- 0x213e68: (0x60b8, 0),# East Asian ideograph
- 0x217676: (0x5828, 0),# East Asian ideograph
- 0x213e69: (0x60da, 0),# East Asian ideograph
- 0x217678: (0x582e, 0),# East Asian ideograph
- 0x6f4868: (0xac12, 0),# Korean hangul
- 0x21767a: (0x581d, 0),# East Asian ideograph
- 0x22767b: (0x8052, 0),# East Asian ideograph
- 0x233e6a: (0x90bd, 0),# East Asian ideograph
- 0x22767e: (0x8062, 0),# East Asian ideograph
- 0x225b69: (0x74aa, 0),# East Asian ideograph
- 0x213e6b: (0x610f, 0),# East Asian ideograph
- 0x2d4d34: (0x76c7, 0),# East Asian ideograph
- 0x4b4046: (0x629c, 0),# East Asian ideograph
- 0x213e6c: (0x611c, 0),# East Asian ideograph
- 0x29306f: (0x89cb, 0),# East Asian ideograph
- 0x213f5a: (0x61ff, 0),# East Asian ideograph
- 0x224832: (0x6cd2, 0),# East Asian ideograph
- 0x234d62: (0x979c, 0),# East Asian ideograph
- 0x6f773d: (0xc733, 0),# Korean hangul
- 0x2d4844: (0x6fd5, 0),# East Asian ideograph
- 0x2d4461: (0x6746, 0),# East Asian ideograph
- 0x213e6e: (0x611f, 0),# East Asian ideograph
- 0x6f4869: (0xac13, 0),# Korean hangul
- 0x39417c: (0x62e0, 0),# East Asian ideograph
- 0x213e6f: (0x60f0, 0),# East Asian ideograph
- 0x225466: (0x7215, 0),# East Asian ideograph
- 0x22723c: (0x7df2, 0),# East Asian ideograph
- 0x223e70: (0x696a, 0),# East Asian ideograph
- 0x6f5976: (0xce89, 0),# Korean hangul
- 0x213e71: (0x60fa, 0),# East Asian ideograph
- 0x224b37: (0x6e71, 0),# East Asian ideograph
- 0x213e72: (0x611a, 0),# East Asian ideograph
- 0x234759: (0x9404, 0),# East Asian ideograph
- 0x333d48: (0x5f3a, 0),# East Asian ideograph
- 0x213e73: (0x6115, 0),# East Asian ideograph
- 0x6f486a: (0xac14, 0),# Korean hangul
- 0x212a3d: (0xe8ea, 0),# EACC component character
- 0x235866: (0x9c1c, 0),# East Asian ideograph
- 0x233e75: (0x90c7, 0),# East Asian ideograph
- 0x456064: (0x9963, 0),# East Asian ideograph
- 0x6f5977: (0xce90, 0),# Korean hangul
- 0x6f5b3b: (0xd1a8, 0),# Korean hangul
- 0x2d3b7b: (0x5d08, 0),# East Asian ideograph
- 0x222921: (0x5ef1, 0),# East Asian ideograph
- 0x213f5c: (0x6200, 0),# East Asian ideograph
- 0x697060: (0x9790, 0),# East Asian ideograph
- 0x4b506c: (0x7cab, 0),# East Asian ideograph
- 0x215d32: (0x91ac, 0),# East Asian ideograph
- 0x225347: (0x71b2, 0),# East Asian ideograph
- 0x213e78: (0x610e, 0),# East Asian ideograph
- 0x2d5e43: (0x92f3, 0),# East Asian ideograph
- 0x6f486b: (0xac15, 0),# Korean hangul
- 0x213e79: (0x6100, 0),# East Asian ideograph
- 0x23364e: (0x8cb0, 0),# East Asian ideograph
- 0x213e7a: (0x6101, 0),# East Asian ideograph
- 0x4d2925: (0x8770, 0),# East Asian ideograph
- 0x6f5344: (0xc19c, 0),# Korean hangul
- 0x6f5978: (0xce91, 0),# Korean hangul
- 0x213e7b: (0x60f6, 0),# East Asian ideograph
- 0x6f5535: (0xc575, 0),# Korean hangul
- 0x4b3870: (0x58cc, 0),# East Asian ideograph
- 0x232927: (0x867c, 0),# East Asian ideograph
- 0x223e7d: (0x6980, 0),# East Asian ideograph
- 0x28736d: (0x624d, 0),# East Asian ideograph
- 0x275e62: (0x9615, 0),# East Asian ideograph
- 0x223e7e: (0x6933, 0),# East Asian ideograph
- 0x225469: (0x7213, 0),# East Asian ideograph
- 0x6f4935: (0xac94, 0),# Korean hangul
- 0x6f4e72: (0xb7a8, 0),# Korean hangul
- 0x2d4d38: (0x76d7, 0),# East Asian ideograph
- 0x6f5536: (0xc57c, 0),# Korean hangul
- 0x4b3871: (0x57bb, 0),# East Asian ideograph
- 0x4b5647: (0x51e6, 0),# East Asian ideograph
- 0x6f486d: (0xac17, 0),# Korean hangul
- 0x2d5434: (0x64e7, 0),# East Asian ideograph
- 0x234e5c: (0x97de, 0),# East Asian ideograph
- 0x225b6a: (0x7490, 0),# East Asian ideograph
- 0x23292f: (0x86a8, 0),# East Asian ideograph
- 0x6f597a: (0xce98, 0),# Korean hangul
- 0x4b5221: (0x7d9a, 0),# East Asian ideograph
- 0x217721: (0x5848, 0),# East Asian ideograph
- 0x6f7722: (0xad7b, 0),# Korean hangul
- 0x217723: (0x5818, 0),# East Asian ideograph
- 0x6f7724: (0xad89, 0),# Korean hangul
- 0x6f7725: (0xad9d, 0),# Korean hangul
- 0x217726: (0x57f5, 0),# East Asian ideograph
- 0x4b5d36: (0x91c6, 0),# East Asian ideograph
- 0x227728: (0x8063, 0),# East Asian ideograph
- 0x21315b: (0x4fbf, 0),# East Asian ideograph
- 0x6f772a: (0xae0f, 0),# Korean hangul
- 0x21772b: (0x5820, 0),# East Asian ideograph
- 0x6f772c: (0xae14, 0),# Korean hangul
- 0x6f486e: (0xac19, 0),# Korean hangul
- 0x6f772e: (0xaeed, 0),# Korean hangul
- 0x6f772f: (0xaf09, 0),# Korean hangul
- 0x217730: (0x584e, 0),# East Asian ideograph
- 0x6f7731: (0xafbf, 0),# Korean hangul
- 0x227732: (0x806c, 0),# East Asian ideograph
- 0x217733: (0x585d, 0),# East Asian ideograph
- 0x6f4926: (0xac78, 0),# Korean hangul
- 0x217735: (0x5859, 0),# East Asian ideograph
- 0x6f7736: (0xc552, 0),# Korean hangul
- 0x217737: (0x584b, 0),# East Asian ideograph
- 0x6f7738: (0xc5b1, 0),# Korean hangul
- 0x227739: (0x8075, 0),# East Asian ideograph
- 0x6f773a: (0xc61d, 0),# Korean hangul
- 0x2d3876: (0x58f7, 0),# East Asian ideograph
- 0x6f773c: (0xe8cb, 0),# Korean hangul
- 0x21773d: (0x5865, 0),# East Asian ideograph
- 0x22773e: (0x807b, 0),# East Asian ideograph
- 0x22773f: (0x8079, 0),# East Asian ideograph
- 0x217740: (0x586c, 0),# East Asian ideograph
- 0x213f60: (0x620d, 0),# East Asian ideograph
- 0x217742: (0x5852, 0),# East Asian ideograph
- 0x33475e: (0x6fb9, 0),# East Asian ideograph
- 0x217745: (0x5864, 0),# East Asian ideograph
- 0x227747: (0x808a, 0),# East Asian ideograph
- 0x217748: (0x584f, 0),# East Asian ideograph
- 0x227749: (0x808e, 0),# East Asian ideograph
- 0x213533: (0x53fc, 0),# East Asian ideograph
- 0x6f486f: (0xac1a, 0),# Korean hangul
- 0x21774d: (0x584d, 0),# East Asian ideograph
- 0x22774e: (0x809f, 0),# East Asian ideograph
- 0x6f5730: (0xc7c8, 0),# Korean hangul
- 0x225029: (0x7054, 0),# East Asian ideograph
- 0x232939: (0x8698, 0),# East Asian ideograph
- 0x217758: (0x5892, 0),# East Asian ideograph
- 0x6f597c: (0xcea1, 0),# Korean hangul
- 0x21775a: (0x588e, 0),# East Asian ideograph
- 0x22775c: (0x670a, 0),# East Asian ideograph
- 0x70775d: (0x9b0f, 0),# East Asian ideograph
- 0x282632: (0x5d58, 0),# East Asian ideograph
- 0x21775f: (0x5840, 0),# East Asian ideograph
- 0x227760: (0x80a7, 0),# East Asian ideograph
- 0x227761: (0x80b0, 0),# East Asian ideograph
- 0x33475f: (0x60bd, 0),# East Asian ideograph
- 0x21576c: (0x88fd, 0),# East Asian ideograph
- 0x217765: (0x5890, 0),# East Asian ideograph
- 0x217768: (0x5898, 0),# East Asian ideograph
- 0x227769: (0x80b5, 0),# East Asian ideograph
- 0x22776a: (0x80a6, 0),# East Asian ideograph
- 0x21776b: (0x587d, 0),# East Asian ideograph
- 0x6f5c36: (0xd3b8, 0),# Korean hangul
- 0x21776f: (0x587f, 0),# East Asian ideograph
- 0x217770: (0x5881, 0),# East Asian ideograph
- 0x707771: (0x9ee2, 0),# East Asian ideograph (Version J extension)
- 0x227773: (0x80e0, 0),# East Asian ideograph
- 0x21693e: (0x5135, 0),# East Asian ideograph
- 0x235b26: (0x9d5a, 0),# East Asian ideograph
- 0x6f597d: (0xcea3, 0),# Korean hangul
- 0x213d62: (0x5f8b, 0),# East Asian ideograph
- 0x22777b: (0x80df, 0),# East Asian ideograph
- 0x22777d: (0x80c2, 0),# East Asian ideograph
- 0x21777e: (0x58a1, 0),# East Asian ideograph
- 0x226940: (0x7a5c, 0),# East Asian ideograph
- 0x4b7421: (0xf9a9, 0),# East Asian ideograph
- 0x4c433f: (0x6a65, 0),# East Asian ideograph
- 0x6f4d21: (0xb304, 0),# Korean hangul
- 0x45606b: (0x98f0, 0),# East Asian ideograph
- 0x27482d: (0x6c64, 0),# East Asian ideograph
- 0x293b6b: (0x8f8b, 0),# East Asian ideograph
- 0x276944: (0x50a9, 0),# East Asian ideograph
- 0x213f63: (0x6212, 0),# East Asian ideograph
- 0x2d5e4a: (0x945a, 0),# East Asian ideograph
- 0x6f4a30: (0xae00, 0),# Korean hangul
- 0x275e57: (0x95f8, 0),# East Asian ideograph
- 0x6f5321: (0xc11c, 0),# Korean hangul
- 0x276948: (0x50a5, 0),# East Asian ideograph
- 0x6f553c: (0xc58d, 0),# Korean hangul
- 0x213f64: (0x6211, 0),# East Asian ideograph
- 0x215d3a: (0x91cd, 0),# East Asian ideograph
- 0x234762: (0x9423, 0),# East Asian ideograph
- 0x23294b: (0x86bf, 0),# East Asian ideograph
- 0x6f4956: (0xacf6, 0),# Korean hangul
- 0x6f4b41: (0xb057, 0),# Korean hangul
- 0x292c5d: (0x867f, 0),# East Asian ideograph
- 0x4b3435: (0x52b4, 0),# East Asian ideograph
- 0x6f4e27: (0xb54c, 0),# Korean hangul
- 0x222951: (0x5f33, 0),# East Asian ideograph
- 0x223258: (0x63e6, 0),# East Asian ideograph
- 0x235870: (0x9c2e, 0),# East Asian ideograph
- 0x227247: (0x7df1, 0),# East Asian ideograph
- 0x6f5b3d: (0xd1b1, 0),# Korean hangul
- 0x6f553e: (0xc590, 0),# Korean hangul
- 0x213f66: (0x6215, 0),# East Asian ideograph
- 0x2d3539: (0x52fe, 0),# East Asian ideograph
- 0x22613b: (0x76e6, 0),# East Asian ideograph
- 0x4b3436: (0x52f2, 0),# East Asian ideograph
- 0x6f4e6c: (0xb791, 0),# Korean hangul
- 0x23517a: (0x9958, 0),# East Asian ideograph
- 0x214c30: (0x755a, 0),# East Asian ideograph
- 0x226957: (0x7a6e, 0),# East Asian ideograph
- 0x222958: (0x5f38, 0),# East Asian ideograph
- 0x2d424f: (0x555f, 0),# East Asian ideograph
- 0x215d3d: (0x91d0, 0),# East Asian ideograph
- 0x22613c: (0x76e9, 0),# East Asian ideograph
- 0x334342: (0x7156, 0),# East Asian ideograph
- 0x513d67: (0x8ff3, 0),# East Asian ideograph
- 0x217824: (0x58b1, 0),# East Asian ideograph
- 0x227827: (0x80d9, 0),# East Asian ideograph
- 0x4b4544: (0x69d8, 0),# East Asian ideograph
- 0x4c695c: (0x7a06, 0),# East Asian ideograph
- 0x22782a: (0x80dd, 0),# East Asian ideograph
- 0x21782b: (0x58ad, 0),# East Asian ideograph
- 0x22782d: (0x80cf, 0),# East Asian ideograph
- 0x21782e: (0x58a0, 0),# East Asian ideograph
- 0x22782f: (0x80cd, 0),# East Asian ideograph
- 0x227830: (0x80d7, 0),# East Asian ideograph
- 0x213f68: (0x621a, 0),# East Asian ideograph
- 0x217832: (0x58a6, 0),# East Asian ideograph
- 0x227833: (0x80f2, 0),# East Asian ideograph
- 0x227834: (0x80fa, 0),# East Asian ideograph
- 0x227838: (0x80fe, 0),# East Asian ideograph
- 0x21783a: (0x58c8, 0),# East Asian ideograph
- 0x2d3c36: (0x5de3, 0),# East Asian ideograph
- 0x22783c: (0x8103, 0),# East Asian ideograph
- 0x275762: (0x8865, 0),# East Asian ideograph
- 0x227840: (0x80f9, 0),# East Asian ideograph
- 0x217841: (0x58bc, 0),# East Asian ideograph
- 0x227842: (0x80d4, 0),# East Asian ideograph
- 0x33365a: (0x5405, 0),# East Asian ideograph
- 0x4b4545: (0x6982, 0),# East Asian ideograph
- 0x217849: (0x58bf, 0),# East Asian ideograph
- 0x22784b: (0x8118, 0),# East Asian ideograph
- 0x21784c: (0x58ba, 0),# East Asian ideograph
- 0x222962: (0x5f4d, 0),# East Asian ideograph
- 0x6f5541: (0xc597, 0),# Korean hangul
- 0x227850: (0x8130, 0),# East Asian ideograph
- 0x232963: (0x86b4, 0),# East Asian ideograph
- 0x227854: (0x8124, 0),# East Asian ideograph
- 0x227855: (0x811b, 0),# East Asian ideograph
- 0x217856: (0x58ce, 0),# East Asian ideograph
- 0x2d5e50: (0x9587, 0),# East Asian ideograph
- 0x6f4878: (0xac2f, 0),# Korean hangul
- 0x21785a: (0x58e0, 0),# East Asian ideograph
- 0x21785e: (0x58da, 0),# East Asian ideograph
- 0x227860: (0x812a, 0),# East Asian ideograph
- 0x227861: (0x811e, 0),# East Asian ideograph
- 0x294362: (0x94ea, 0),# East Asian ideograph
- 0x227864: (0x8121, 0),# East Asian ideograph
- 0x212321: (0x3000, 0),# Ideographic space per ANSI Z39.64
- 0x227866: (0x8117, 0),# East Asian ideograph
- 0x227869: (0x813a, 0),# East Asian ideograph
- 0x22786a: (0x815a, 0),# East Asian ideograph
- 0x21786c: (0x58fc, 0),# East Asian ideograph
- 0x22786d: (0x8148, 0),# East Asian ideograph
- 0x28786e: (0x80e8, 0),# East Asian ideograph
- 0x4b387d: (0x591b, 0),# East Asian ideograph
- 0x217870: (0x5902, 0),# East Asian ideograph
- 0x213b27: (0x5bcc, 0),# East Asian ideograph
- 0x217873: (0x5906, 0),# East Asian ideograph
- 0x217874: (0x6535, 0),# East Asian ideograph
- 0x227877: (0x814c, 0),# East Asian ideograph
- 0x21787a: (0x5910, 0),# East Asian ideograph
- 0x21787c: (0x8641, 0),# East Asian ideograph
- 0x22787d: (0x8141, 0),# East Asian ideograph
- 0x22325d: (0x63f6, 0),# East Asian ideograph
- 0x214c34: (0x7570, 0),# East Asian ideograph
- 0x343a5b: (0x572c, 0),# East Asian ideograph
- 0x2e735d: (0x7d56, 0),# East Asian ideograph
- 0x276871: (0x4faa, 0),# East Asian ideograph
- 0x6f5543: (0xc59c, 0),# Korean hangul
- 0x696464: (0x7c90, 0),# East Asian ideograph
- 0x213b28: (0x5bd2, 0),# East Asian ideograph
- 0x234326: (0x924c, 0),# East Asian ideograph
- 0x275765: (0x88c5, 0),# East Asian ideograph
- 0x23296f: (0x86e9, 0),# East Asian ideograph
- 0x22325e: (0x63f2, 0),# East Asian ideograph
- 0x214c35: (0x7565, 0),# East Asian ideograph
- 0x6f557c: (0xc63a, 0),# Korean hangul
- 0x235433: (0x9a64, 0),# East Asian ideograph
- 0x222971: (0x5f61, 0),# East Asian ideograph
- 0x6f5544: (0xc5b4, 0),# Korean hangul
- 0x2d5e24: (0x7145, 0),# East Asian ideograph
- 0x23476a: (0x9407, 0),# East Asian ideograph
- 0x4b403d: (0x62dd, 0),# East Asian ideograph
- 0x6f487b: (0xac38, 0),# Korean hangul
- 0x2d4d65: (0x53e1, 0),# East Asian ideograph
- 0x232974: (0x86d5, 0),# East Asian ideograph
- 0x22325f: (0x63f8, 0),# East Asian ideograph
- 0x23365e: (0x8cd8, 0),# East Asian ideograph
- 0x235434: (0x9a66, 0),# East Asian ideograph
- 0x275421: (0x80ea, 0),# East Asian ideograph
- 0x275e37: (0x9535, 0),# East Asian ideograph
- 0x215422: (0x81df, 0),# East Asian ideograph
- 0x6f5862: (0xcb10, 0),# Korean hangul
- 0x6f487c: (0xac39, 0),# Korean hangul
- 0x6f4a32: (0xae08, 0),# Korean hangul
- 0x6f5423: (0xc2dc, 0),# Korean hangul
- 0x27623f: (0x9e43, 0),# East Asian ideograph
- 0x275e59: (0x95fa, 0),# East Asian ideograph
- 0x215424: (0x81e5, 0),# East Asian ideograph
- 0x23365f: (0x8cd5, 0),# East Asian ideograph
- 0x456076: (0x9980, 0),# East Asian ideograph
- 0x215425: (0x81e8, 0),# East Asian ideograph
- 0x6f5b48: (0xd23d, 0),# Korean hangul
- 0x275142: (0x7efd, 0),# East Asian ideograph
- 0x225426: (0x71d4, 0),# East Asian ideograph
- 0x235427: (0x9a4a, 0),# East Asian ideograph
- 0x6f487d: (0xac40, 0),# Korean hangul
- 0x4b5428: (0x81ed, 0),# East Asian ideograph (variant of 215428 which maps to 81ED)
- 0x235879: (0x9c24, 0),# East Asian ideograph
- 0x6f5c6a: (0xd56b, 0),# Korean hangul
- 0x23542a: (0x9a58, 0),# East Asian ideograph
- 0x6f546d: (0xc4f8, 0),# Korean hangul
- 0x6f5547: (0xc5b8, 0),# Korean hangul
- 0x27542b: (0x53f0, 0),# East Asian ideograph
- 0x333d4c: (0x7030, 0),# East Asian ideograph
- 0x23542c: (0x9a56, 0),# East Asian ideograph
- 0x6f542d: (0xc2f6, 0),# Korean hangul
- 0x293d4e: (0x8ff8, 0),# East Asian ideograph
- 0x28352a: (0x6448, 0),# East Asian ideograph
- 0x47577a: (0x9bd6, 0),# East Asian ideograph (variant of 23577A which maps to 9BD6)
- 0x6f5b3f: (0xd1b5, 0),# Korean hangul
- 0x6f5430: (0xc2f8, 0),# Korean hangul
- 0x227925: (0x814d, 0),# East Asian ideograph
- 0x6f5431: (0xc2f9, 0),# Korean hangul
- 0x217928: (0x592c, 0),# East Asian ideograph
- 0x21792b: (0x592f, 0),# East Asian ideograph
- 0x275432: (0x4e0e, 0),# East Asian ideograph
- 0x22792e: (0x6720, 0),# East Asian ideograph
- 0x21507d: (0x7d14, 0),# East Asian ideograph
- 0x217930: (0x593c, 0),# East Asian ideograph
- 0x395f68: (0x8987, 0),# East Asian ideograph
- 0x227932: (0x8160, 0),# East Asian ideograph
- 0x215433: (0x8208, 0),# East Asian ideograph
- 0x225039: (0x7074, 0),# East Asian ideograph
- 0x217938: (0x594d, 0),# East Asian ideograph
- 0x215434: (0x8209, 0),# East Asian ideograph
- 0x22793b: (0x8169, 0),# East Asian ideograph
- 0x22793c: (0x817c, 0),# East Asian ideograph
- 0x275435: (0x65e7, 0),# East Asian ideograph
- 0x294e5c: (0x97eb, 0),# East Asian ideograph
- 0x227941: (0x8161, 0),# East Asian ideograph
- 0x6f5a65: (0xd081, 0),# Korean hangul
- 0x217943: (0x5953, 0),# East Asian ideograph
- 0x225436: (0x71e8, 0),# East Asian ideograph
- 0x227946: (0x8176, 0),# East Asian ideograph
- 0x227947: (0x8174, 0),# East Asian ideograph
- 0x227948: (0x8167, 0),# East Asian ideograph
- 0x334633: (0x6b8b, 0),# East Asian ideograph (variant of 274633 which maps to 6B8B)
- 0x22794b: (0x816f, 0),# East Asian ideograph
- 0x22794d: (0x8182, 0),# East Asian ideograph
- 0x4c794e: (0x80b7, 0),# East Asian ideograph
- 0x21794f: (0x5961, 0),# East Asian ideograph
- 0x227951: (0x818b, 0),# East Asian ideograph
- 0x227952: (0x8186, 0),# East Asian ideograph
- 0x217954: (0x596c, 0),# East Asian ideograph
- 0x217955: (0x596d, 0),# East Asian ideograph
- 0x274830: (0x6d4b, 0),# East Asian ideograph
- 0x4b6324: (0x9f62, 0),# East Asian ideograph
- 0x227959: (0x8183, 0),# East Asian ideograph
- 0x21543a: (0x8214, 0),# East Asian ideograph
- 0x334770: (0x5a6c, 0),# East Asian ideograph
- 0x69543b: (0x57b0, 0),# East Asian ideograph
- 0x217965: (0x597c, 0),# East Asian ideograph
- 0x6f4a33: (0xae09, 0),# Korean hangul
- 0x217969: (0x59a7, 0),# East Asian ideograph
- 0x22796a: (0x819f, 0),# East Asian ideograph
- 0x22796b: (0x81a3, 0),# East Asian ideograph
- 0x287941: (0x8136, 0),# East Asian ideograph
- 0x21796f: (0x599a, 0),# East Asian ideograph
- 0x227970: (0x8198, 0),# East Asian ideograph
- 0x22503b: (0x707a, 0),# East Asian ideograph
- 0x335e3d: (0x9244, 0),# East Asian ideograph
- 0x227975: (0x8195, 0),# East Asian ideograph
- 0x227977: (0x8197, 0),# East Asian ideograph
- 0x22543f: (0x71e1, 0),# East Asian ideograph
- 0x22797c: (0x81aa, 0),# East Asian ideograph
- 0x224372: (0x6b1e, 0),# East Asian ideograph
- 0x22797e: (0x6725, 0),# East Asian ideograph
- 0x213b30: (0x5bde, 0),# East Asian ideograph
- 0x2d5440: (0x6841, 0),# East Asian ideograph
- 0x6f4862: (0xac04, 0),# Korean hangul
- 0x215441: (0x822b, 0),# East Asian ideograph
- 0x275068: (0x7caa, 0),# East Asian ideograph
- 0x695442: (0x57d6, 0),# East Asian ideograph
- 0x227255: (0x7e12, 0),# East Asian ideograph
- 0x235443: (0x9ab1, 0),# East Asian ideograph
- 0x294e79: (0x9878, 0),# East Asian ideograph
- 0x6f5444: (0xc345, 0),# Korean hangul
- 0x213b31: (0x5be6, 0),# East Asian ideograph
- 0x235445: (0x9ab3, 0),# East Asian ideograph
- 0x33432f: (0x664b, 0),# East Asian ideograph
- 0x225651: (0x72c6, 0),# East Asian ideograph
- 0x2d5446: (0x8229, 0),# East Asian ideograph
- 0x216e57: (0x53fb, 0),# East Asian ideograph
- 0x214c3e: (0x758f, 0),# East Asian ideograph
- 0x276329: (0x9f8c, 0),# East Asian ideograph
- 0x6f554d: (0xc5c4, 0),# Korean hangul
- 0x235449: (0x9ab6, 0),# East Asian ideograph
- 0x227427: (0x7f46, 0),# East Asian ideograph
- 0x224a62: (0x6e4b, 0),# East Asian ideograph
- 0x27544a: (0x8231, 0),# East Asian ideograph
- 0x294161: (0x9487, 0),# East Asian ideograph
- 0x27544b: (0x8230, 0),# East Asian ideograph
- 0x283955: (0x6619, 0),# East Asian ideograph
- 0x23544c: (0x9abb, 0),# East Asian ideograph
- 0x233667: (0x8ce8, 0),# East Asian ideograph
- 0x4d2f7a: (0x891d, 0),# East Asian ideograph
- 0x6f5345: (0xc19d, 0),# Korean hangul
- 0x6f544d: (0xc37d, 0),# Korean hangul
- 0x235871: (0x9c28, 0),# East Asian ideograph
- 0x6f554e: (0xc5c5, 0),# Korean hangul
- 0x27544e: (0x8270, 0),# East Asian ideograph
- 0x234774: (0x943f, 0),# East Asian ideograph
- 0x22544f: (0x71fc, 0),# East Asian ideograph
- 0x235450: (0x9aba, 0),# East Asian ideograph
- 0x695451: (0x58b9, 0),# East Asian ideograph
- 0x233668: (0x8ce9, 0),# East Asian ideograph
- 0x4b4553: (0x6955, 0),# East Asian ideograph
- 0x6f4e77: (0xb7b4, 0),# Korean hangul
- 0x274831: (0x6da1, 0),# East Asian ideograph
- 0x233225: (0x8a22, 0),# East Asian ideograph
- 0x6f554f: (0xc5c6, 0),# Korean hangul
- 0x6f5453: (0xc3dc, 0),# Korean hangul
- 0x235454: (0x9abd, 0),# East Asian ideograph
- 0x2e6c26: (0x7be0, 0),# East Asian ideograph
- 0x6f4a34: (0xae0b, 0),# Korean hangul
- 0x6f5455: (0xc3e0, 0),# Korean hangul
- 0x225456: (0x71f9, 0),# East Asian ideograph
- 0x225040: (0x7093, 0),# East Asian ideograph
- 0x23543f: (0x9aad, 0),# East Asian ideograph
- 0x235457: (0x9ac1, 0),# East Asian ideograph
- 0x6f5550: (0xc5c7, 0),# Korean hangul
- 0x275458: (0x5df4, 0),# East Asian ideograph (duplicate simplified)
- 0x274222: (0x62c5, 0),# East Asian ideograph
- 0x4b3b22: (0x51a6, 0),# East Asian ideograph
- 0x235459: (0x9ac0, 0),# East Asian ideograph
- 0x22572c: (0x72fb, 0),# East Asian ideograph
- 0x23545a: (0x9ac2, 0),# East Asian ideograph
- 0x217a21: (0x5990, 0),# East Asian ideograph
- 0x22545b: (0x720e, 0),# East Asian ideograph
- 0x217a24: (0x59c5, 0),# East Asian ideograph
- 0x217a25: (0x59b5, 0),# East Asian ideograph
- 0x217a28: (0x59cf, 0),# East Asian ideograph
- 0x21545c: (0x82bb, 0),# East Asian ideograph
- 0x217a2a: (0x59ba, 0),# East Asian ideograph
- 0x3f377b: (0x784e, 0),# East Asian ideograph (Version J extension)
- 0x217a2c: (0x59b8, 0),# East Asian ideograph
- 0x6f546f: (0xc501, 0),# Korean hangul
- 0x227a2e: (0x81b0, 0),# East Asian ideograph
- 0x227a2f: (0x81b4, 0),# East Asian ideograph
- 0x215d4f: (0x9237, 0),# East Asian ideograph
- 0x227a33: (0x81b7, 0),# East Asian ideograph
- 0x217a35: (0x59b2, 0),# East Asian ideograph
- 0x227a37: (0x81bb, 0),# East Asian ideograph
- 0x227a38: (0x81c1, 0),# East Asian ideograph
- 0x227a39: (0x81cc, 0),# East Asian ideograph
- 0x217a3a: (0x59b7, 0),# East Asian ideograph
- 0x227a3b: (0x81c4, 0),# East Asian ideograph
- 0x217a3e: (0x59c1, 0),# East Asian ideograph
- 0x227a40: (0x81d1, 0),# East Asian ideograph
- 0x227a41: (0x81ce, 0),# East Asian ideograph
- 0x217a43: (0x59f9, 0),# East Asian ideograph
- 0x217a44: (0x59f8, 0),# East Asian ideograph
- 0x212a43: (0xe8f0, 0),# EACC component character
- 0x225461: (0x7207, 0),# East Asian ideograph
- 0x227a4b: (0x81db, 0),# East Asian ideograph
- 0x6f5552: (0xc5c9, 0),# Korean hangul
- 0x6f5462: (0xc468, 0),# Korean hangul
- 0x227a4f: (0x81dd, 0),# East Asian ideograph
- 0x217a50: (0x59f1, 0),# East Asian ideograph
- 0x217a51: (0x5a00, 0),# East Asian ideograph
- 0x217a52: (0x59de, 0),# East Asian ideograph
- 0x227a53: (0x81de, 0),# East Asian ideograph
- 0x227a56: (0x81e0, 0),# East Asian ideograph
- 0x227a57: (0x81e2, 0),# East Asian ideograph
- 0x6f5464: (0xc474, 0),# Korean hangul
- 0x227a5b: (0x81e7, 0),# East Asian ideograph
- 0x217a5d: (0x59f6, 0),# East Asian ideograph
- 0x217a5e: (0x59dd, 0),# East Asian ideograph
- 0x217a5f: (0x59fa, 0),# East Asian ideograph
- 0x227a60: (0x81ef, 0),# East Asian ideograph
- 0x217a61: (0x59e4, 0),# East Asian ideograph
- 0x227a65: (0x81f2, 0),# East Asian ideograph
- 0x227a68: (0x81f6, 0),# East Asian ideograph
- 0x6f5553: (0xc5ca, 0),# Korean hangul
- 0x6f5467: (0xc494, 0),# Korean hangul
- 0x274225: (0x6324, 0),# East Asian ideograph
- 0x217a6e: (0x5a2a, 0),# East Asian ideograph
- 0x213b38: (0x5bf5, 0),# East Asian ideograph
- 0x227a70: (0x8201, 0),# East Asian ideograph
- 0x2d5468: (0x6959, 0),# East Asian ideograph
- 0x227a72: (0x8201, 0),# East Asian ideograph (not in Unicode)
- 0x227a74: (0x8203, 0),# East Asian ideograph
- 0x227a75: (0x8204, 0),# East Asian ideograph
- 0x2d3c49: (0x83f7, 0),# East Asian ideograph
- 0x227a77: (0x820b, 0),# East Asian ideograph
- 0x217a78: (0x5a09, 0),# East Asian ideograph
- 0x23546a: (0x9ad1, 0),# East Asian ideograph
- 0x217a7e: (0x5a12, 0),# East Asian ideograph
- 0x6f4e78: (0xb7b5, 0),# Korean hangul
- 0x6f546b: (0xc4f1, 0),# Korean hangul
- 0x6f5554: (0xc5cc, 0),# Korean hangul
- 0x6f546c: (0xc4f4, 0),# Korean hangul
- 0x274226: (0x62e7, 0),# East Asian ideograph
- 0x213b39: (0x5bf6, 0),# East Asian ideograph
- 0x21546d: (0x82d3, 0),# East Asian ideograph
- 0x234337: (0x927c, 0),# East Asian ideograph
- 0x22546e: (0x7218, 0),# East Asian ideograph
- 0x28395c: (0x6654, 0),# East Asian ideograph
- 0x2d546f: (0x83c0, 0),# East Asian ideograph
- 0x22725e: (0x7e09, 0),# East Asian ideograph
- 0x4b4559: (0x9792, 0),# East Asian ideograph
- 0x6f5470: (0xc50c, 0),# Korean hangul
- 0x4b5227: (0x6b20, 0),# East Asian ideograph
- 0x6f5555: (0xc5ce, 0),# Korean hangul
- 0x225471: (0x720b, 0),# East Asian ideograph
- 0x33477b: (0x904a, 0),# East Asian ideograph
- 0x235472: (0x9adc, 0),# East Asian ideograph
- 0x4b5223: (0x7e4a, 0),# East Asian ideograph
- 0x275777: (0x4eb5, 0),# East Asian ideograph
- 0x215474: (0x834a, 0),# East Asian ideograph
- 0x22725f: (0x7e1f, 0),# East Asian ideograph
- 0x23366f: (0x8ceb, 0),# East Asian ideograph
- 0x335445: (0x67c1, 0),# East Asian ideograph
- 0x6f5475: (0xc530, 0),# Korean hangul
- 0x6f5556: (0xc5d0, 0),# Korean hangul
- 0x235476: (0x9ae0, 0),# East Asian ideograph
- 0x274228: (0x62df, 0),# East Asian ideograph
- 0x23477c: (0x943d, 0),# East Asian ideograph
- 0x215477: (0x8350, 0),# East Asian ideograph
- 0x27593f: (0x8bc1, 0),# East Asian ideograph
- 0x233f22: (0x90dd, 0),# East Asian ideograph
- 0x6f5478: (0xc53b, 0),# Korean hangul
- 0x293f23: (0x90cf, 0),# East Asian ideograph
- 0x6f5827: (0xc999, 0),# Korean hangul
- 0x225479: (0x721a, 0),# East Asian ideograph
- 0x233670: (0x8cda, 0),# East Asian ideograph
- 0x212a44: (0xe8f1, 0),# EACC component character
- 0x335446: (0x8221, 0),# East Asian ideograph
- 0x22437e: (0x6b2c, 0),# East Asian ideograph
- 0x4b5154: (0x7df4, 0),# East Asian ideograph
- 0x6f547c: (0xc544, 0),# Korean hangul
- 0x233f27: (0x90d8, 0),# East Asian ideograph
- 0x6f5b6d: (0xd30d, 0),# Korean hangul
- 0x22547d: (0x721f, 0),# East Asian ideograph
- 0x273f28: (0x6001, 0),# East Asian ideograph
- 0x223272: (0x63eb, 0),# East Asian ideograph
- 0x6f4f53: (0xb974, 0),# Korean hangul
- 0x213f29: (0x613e, 0),# East Asian ideograph
- 0x225048: (0x7096, 0),# East Asian ideograph (not in Unicode)
- 0x6f5624: (0xc650, 0),# Korean hangul
- 0x213f2a: (0x6127, 0),# East Asian ideograph
- 0x27422a: (0x6269, 0),# East Asian ideograph
- 0x213c2b: (0x5d87, 0),# East Asian ideograph
- 0x213f2c: (0x6147, 0),# East Asian ideograph
- 0x275f37: (0x9645, 0),# East Asian ideograph
- 0x223f2d: (0x6985, 0),# East Asian ideograph
- 0x273f2e: (0x5e86, 0),# East Asian ideograph
- 0x6f4e79: (0xb7c9, 0),# Korean hangul
- 0x274833: (0x6d51, 0),# East Asian ideograph
- 0x213f2f: (0x6167, 0),# East Asian ideograph
- 0x6f5559: (0xc5d8, 0),# Korean hangul
- 0x27422b: (0x63b7, 0),# East Asian ideograph
- 0x2e624f: (0x772d, 0),# East Asian ideograph
- 0x227b27: (0x821d, 0),# East Asian ideograph
- 0x227b29: (0x8220, 0),# East Asian ideograph
- 0x6f4a36: (0xae30, 0),# Korean hangul
- 0x217b2c: (0x5a60, 0),# East Asian ideograph
- 0x223f32: (0x693d, 0),# East Asian ideograph
- 0x227b2e: (0x822d, 0),# East Asian ideograph
- 0x227b2f: (0x822f, 0),# East Asian ideograph
- 0x6f5477: (0xc539, 0),# Korean hangul
- 0x217b31: (0x5a67, 0),# East Asian ideograph
- 0x227b32: (0x8238, 0),# East Asian ideograph
- 0x273f33: (0x5fe7, 0),# East Asian ideograph
- 0x227b34: (0x823a, 0),# East Asian ideograph
- 0x227b35: (0x8233, 0),# East Asian ideograph
- 0x227b36: (0x8234, 0),# East Asian ideograph
- 0x213f34: (0x617c, 0),# East Asian ideograph
- 0x227b3a: (0x8232, 0),# East Asian ideograph
- 0x217b3b: (0x5a5e, 0),# East Asian ideograph
- 0x217b3c: (0x5a6d, 0),# East Asian ideograph
- 0x217b3d: (0x5a35, 0),# East Asian ideograph
- 0x217b3e: (0x5a55, 0),# East Asian ideograph
- 0x27422c: (0x64b5, 0),# East Asian ideograph
- 0x215d58: (0x9234, 0),# East Asian ideograph (variant of 4B5D58 which maps to 9234)
- 0x217b41: (0x5a2c, 0),# East Asian ideograph
- 0x227b42: (0x8248, 0),# East Asian ideograph
- 0x227b43: (0x8249, 0),# East Asian ideograph
- 0x227b45: (0x8244, 0),# East Asian ideograph
- 0x227b47: (0x8240, 0),# East Asian ideograph
- 0x227b48: (0x8241, 0),# East Asian ideograph
- 0x217b49: (0x5a65, 0),# East Asian ideograph
- 0x227b4a: (0x8245, 0),# East Asian ideograph
- 0x227b4b: (0x824b, 0),# East Asian ideograph
- 0x334e37: (0x784e, 0),# East Asian ideograph
- 0x227b50: (0x824f, 0),# East Asian ideograph
- 0x213f38: (0x6158, 0),# East Asian ideograph
- 0x217b52: (0x5a64, 0),# East Asian ideograph
- 0x227b53: (0x824e, 0),# East Asian ideograph
- 0x227b56: (0x8256, 0),# East Asian ideograph
- 0x227b57: (0x8257, 0),# East Asian ideograph
- 0x2d6b33: (0x5231, 0),# East Asian ideograph (not in Unicode)
- 0x6f555b: (0xc5e1, 0),# Korean hangul
- 0x223f3a: (0x6934, 0),# East Asian ideograph
- 0x227b5e: (0x825a, 0),# East Asian ideograph
- 0x227b62: (0x825f, 0),# East Asian ideograph
- 0x223f3b: (0x6969, 0),# East Asian ideograph
- 0x217b65: (0x5a8a, 0),# East Asian ideograph
- 0x227b67: (0x8262, 0),# East Asian ideograph
- 0x217b69: (0x5acf, 0),# East Asian ideograph
- 0x217b6a: (0x5a7a, 0),# East Asian ideograph
- 0x227b6b: (0x8268, 0),# East Asian ideograph
- 0x227b6f: (0x826d, 0),# East Asian ideograph
- 0x217b71: (0x5a9f, 0),# East Asian ideograph
- 0x273f3e: (0x5baa, 0),# East Asian ideograph
- 0x227b77: (0x8278, 0),# East Asian ideograph
- 0x213f3f: (0x6191, 0),# East Asian ideograph
- 0x227b7d: (0x827f, 0),# East Asian ideograph
- 0x23433f: (0x928d, 0),# East Asian ideograph
- 0x213f41: (0x61ab, 0),# East Asian ideograph
- 0x23486c: (0x946f, 0),# East Asian ideograph
- 0x295f7c: (0x9f80, 0),# East Asian ideograph
- 0x6f4f54: (0xb975, 0),# Korean hangul
- 0x213f42: (0x61a4, 0),# East Asian ideograph
- 0x453d53: (0x5f66, 0),# East Asian ideograph
- 0x4b4561: (0x691c, 0),# East Asian ideograph
- 0x4b5061: (0x7cbe, 0),# East Asian ideograph
- 0x2d4d5f: (0x7741, 0),# East Asian ideograph
- 0x6f555d: (0xc5e5, 0),# Korean hangul
- 0x27422f: (0x64de, 0),# East Asian ideograph
- 0x6f5a69: (0xd0ac, 0),# Korean hangul
- 0x213b42: (0x5c0b, 0),# East Asian ideograph
- 0x294666: (0x933e, 0),# East Asian ideograph
- 0x223f45: (0x69a0, 0),# East Asian ideograph
- 0x234340: (0x92ee, 0),# East Asian ideograph
- 0x4b522b: (0x7f36, 0),# East Asian ideograph
- 0x6f5c50: (0xd48d, 0),# Korean hangul
- 0x223f46: (0x69b1, 0),# East Asian ideograph
- 0x23553c: (0x9b08, 0),# East Asian ideograph
- 0x233f47: (0x90fe, 0),# East Asian ideograph
- 0x295031: (0x98a7, 0),# East Asian ideograph
- 0x213f48: (0x61b6, 0),# East Asian ideograph
- 0x6f555e: (0xc5ec, 0),# Korean hangul
- 0x213f49: (0x61cd, 0),# East Asian ideograph
- 0x213b52: (0x5c3f, 0),# East Asian ideograph
- 0x233f4a: (0x90ff, 0),# East Asian ideograph
- 0x6f4a37: (0xae31, 0),# Korean hangul
- 0x273f4b: (0x601c, 0),# East Asian ideograph
- 0x213f4c: (0x61be, 0),# East Asian ideograph
- 0x516a26: (0x51b4, 0),# East Asian ideograph
- 0x4d4d61: (0x7ef1, 0),# East Asian ideograph
- 0x6f5b49: (0xd23f, 0),# Korean hangul
- 0x275143: (0x7efe, 0),# East Asian ideograph
- 0x274231: (0x62e2, 0),# East Asian ideograph
- 0x295940: (0x9cbc, 0),# East Asian ideograph
- 0x213b44: (0x5c0e, 0),# East Asian ideograph
- 0x234a21: (0x9627, 0),# East Asian ideograph
- 0x223f50: (0x69ce, 0),# East Asian ideograph
- 0x22327a: (0x63dc, 0),# East Asian ideograph
- 0x227269: (0x7e10, 0),# East Asian ideograph
- 0x6f5472: (0xc528, 0),# Korean hangul
- 0x4b3f53: (0x61f2, 0),# East Asian ideograph
- 0x4b5671: (0x873b, 0),# East Asian ideograph (variant of 215671 which maps to 873B)
- 0x223f44: (0x698a, 0),# East Asian ideograph
- 0x213f54: (0x61f7, 0),# East Asian ideograph
- 0x234343: (0x927a, 0),# East Asian ideograph
- 0x213f55: (0x61f6, 0),# East Asian ideograph
- 0x27414f: (0x635f, 0),# East Asian ideograph
- 0x22327b: (0x63d7, 0),# East Asian ideograph
- 0x213f56: (0x61f8, 0),# East Asian ideograph
- 0x223f51: (0x69ca, 0),# East Asian ideograph
- 0x233237: (0x8a57, 0),# East Asian ideograph
- 0x213f57: (0x61f5, 0),# East Asian ideograph
- 0x6f5561: (0xc5f0, 0),# Korean hangul
- 0x21355b: (0x5436, 0),# East Asian ideograph
- 0x233f58: (0x9111, 0),# East Asian ideograph
- 0x215d5f: (0x927b, 0),# East Asian ideograph
- 0x224a66: (0x6e62, 0),# East Asian ideograph
- 0x223f59: (0x698d, 0),# East Asian ideograph
- 0x223f5a: (0x6991, 0),# East Asian ideograph
- 0x217c21: (0x5aa6, 0),# East Asian ideograph
- 0x217c22: (0x5a8c, 0),# East Asian ideograph
- 0x213f5b: (0x61fc, 0),# East Asian ideograph
- 0x227c24: (0x828e, 0),# East Asian ideograph
- 0x227c25: (0x8291, 0),# East Asian ideograph
- 0x217c26: (0x5aa2, 0),# East Asian ideograph
- 0x227c27: (0x828f, 0),# East Asian ideograph
- 0x227c28: (0x8284, 0),# East Asian ideograph
- 0x273f5c: (0x604b, 0),# East Asian ideograph
- 0x227c2d: (0x8283, 0),# East Asian ideograph
- 0x227c2e: (0x828a, 0),# East Asian ideograph
- 0x213f5d: (0x6208, 0),# East Asian ideograph
- 0x225138: (0x70cb, 0),# East Asian ideograph
- 0x274c78: (0x762b, 0),# East Asian ideograph
- 0x227c34: (0x82a7, 0),# East Asian ideograph
- 0x217c35: (0x5a95, 0),# East Asian ideograph
- 0x217c36: (0x5aaf, 0),# East Asian ideograph
- 0x227c38: (0x82ab, 0),# East Asian ideograph
- 0x217c39: (0x5ac8, 0),# East Asian ideograph
- 0x227c3a: (0x82b0, 0),# East Asian ideograph
- 0x227c3c: (0x82a4, 0),# East Asian ideograph
- 0x217c3e: (0x5ab5, 0),# East Asian ideograph
- 0x227c3f: (0x829a, 0),# East Asian ideograph
- 0x233f60: (0x910b, 0),# East Asian ideograph
- 0x227c42: (0x82a3, 0),# East Asian ideograph
- 0x6f4e7b: (0xb7ed, 0),# Korean hangul
- 0x227c44: (0x82b7, 0),# East Asian ideograph
- 0x227c45: (0x82ae, 0),# East Asian ideograph (variant of 4C7C45 which maps to 82AE)
- 0x227c46: (0x82a9, 0),# East Asian ideograph
- 0x213f61: (0x620c, 0),# East Asian ideograph
- 0x217c49: (0x5ad1, 0),# East Asian ideograph
- 0x217c4a: (0x5a90, 0),# East Asian ideograph
- 0x6f5563: (0xc5f6, 0),# Korean hangul
- 0x227c4c: (0x82a8, 0),# East Asian ideograph
- 0x213f62: (0x6210, 0),# East Asian ideograph
- 0x227c4e: (0x82b4, 0),# East Asian ideograph
- 0x217c4f: (0x5ab8, 0),# East Asian ideograph
- 0x227c50: (0x82a1, 0),# East Asian ideograph
- 0x226160: (0x770e, 0),# East Asian ideograph
- 0x217c52: (0x5aaa, 0),# East Asian ideograph
- 0x227c53: (0x82aa, 0),# East Asian ideograph
- 0x227c55: (0x82d9, 0),# East Asian ideograph
- 0x227c57: (0x82fe, 0),# East Asian ideograph
- 0x2d3224: (0x7b87, 0),# East Asian ideograph
- 0x217c59: (0x5ad3, 0),# East Asian ideograph
- 0x227c5a: (0x82e0, 0),# East Asian ideograph
- 0x217c5b: (0x5ab1, 0),# East Asian ideograph
- 0x227c5c: (0x8300, 0),# East Asian ideograph
- 0x227c5f: (0x82ea, 0),# East Asian ideograph
- 0x227c60: (0x82f7, 0),# East Asian ideograph
- 0x227c62: (0x82ef, 0),# East Asian ideograph
- 0x227c63: (0x833a, 0),# East Asian ideograph
- 0x227c64: (0x82e4, 0),# East Asian ideograph
- 0x227c65: (0x82d5, 0),# East Asian ideograph
- 0x227c67: (0x8307, 0),# East Asian ideograph
- 0x227c68: (0x82fa, 0),# East Asian ideograph
- 0x227c69: (0x82f4, 0),# East Asian ideograph
- 0x227c6a: (0x82e2, 0),# East Asian ideograph
- 0x213f67: (0x621b, 0),# East Asian ideograph
- 0x227c6d: (0x82d2, 0),# East Asian ideograph
- 0x217c6e: (0x5ae0, 0),# East Asian ideograph
- 0x227c71: (0x82eb, 0),# East Asian ideograph
- 0x227c72: (0x82d8, 0),# East Asian ideograph
- 0x227c73: (0x82e1, 0),# East Asian ideograph
- 0x227c75: (0x82f6, 0),# East Asian ideograph
- 0x28602b: (0x762a, 0),# East Asian ideograph
- 0x227c7b: (0x8310, 0),# East Asian ideograph
- 0x227c7c: (0x82f3, 0),# East Asian ideograph
- 0x233f6a: (0x911e, 0),# East Asian ideograph
- 0x4b4569: (0x6a71, 0),# East Asian ideograph
- 0x23323b: (0x8a58, 0),# East Asian ideograph
- 0x6f5473: (0xc529, 0),# Korean hangul
- 0x274237: (0x631b, 0),# East Asian ideograph
- 0x6f5122: (0xbc41, 0),# Korean hangul
- 0x226162: (0x771b, 0),# East Asian ideograph
- 0x213f6d: (0x622e, 0),# East Asian ideograph
- 0x213f6e: (0x6230, 0),# East Asian ideograph
- 0x4b5973: (0x8d2e, 0),# East Asian ideograph
- 0x275344: (0x80c1, 0),# East Asian ideograph
- 0x213f6f: (0x6232, 0),# East Asian ideograph
- 0x23323c: (0x8a52, 0),# East Asian ideograph
- 0x6f5566: (0xc5fd, 0),# Korean hangul
- 0x21355c: (0x5433, 0),# East Asian ideograph
- 0x274238: (0x644a, 0),# East Asian ideograph
- 0x6f5123: (0xbc43, 0),# Korean hangul
- 0x226163: (0x7724, 0),# East Asian ideograph
- 0x213f72: (0x6236, 0),# East Asian ideograph
- 0x234349: (0x92aa, 0),# East Asian ideograph
- 0x6f4c38: (0xb18d, 0),# Korean hangul
- 0x22557c: (0x728b, 0),# East Asian ideograph
- 0x213f74: (0x623e, 0),# East Asian ideograph
- 0x225057: (0x7098, 0),# East Asian ideograph
- 0x235b2f: (0x9d7a, 0),# East Asian ideograph
- 0x213f75: (0x6240, 0),# East Asian ideograph
- 0x29325d: (0x8bd4, 0),# East Asian ideograph
- 0x2d3f76: (0x78a5, 0),# East Asian ideograph
- 0x6f5a6b: (0xd0b5, 0),# Korean hangul
- 0x6f5124: (0xbc44, 0),# Korean hangul
- 0x213b4c: (0x5c37, 0),# East Asian ideograph
- 0x223f77: (0x69be, 0),# East Asian ideograph
- 0x4b6a22: (0x7f83, 0),# East Asian ideograph
- 0x213f78: (0x6248, 0),# East Asian ideograph
- 0x222a23: (0x5f82, 0),# East Asian ideograph
- 0x233f79: (0x912b, 0),# East Asian ideograph
- 0x4b456c: (0x6adb, 0),# East Asian ideograph (variant of 21456C)
- 0x2d602d: (0x976d, 0),# East Asian ideograph
- 0x213f7a: (0x624b, 0),# East Asian ideograph
- 0x212a25: (0xe8d4, 0),# EACC component character
- 0x6f5568: (0xc5ff, 0),# Korean hangul
- 0x294e7b: (0x9883, 0),# East Asian ideograph
- 0x4b6a26: (0x6c8d, 0),# East Asian ideograph
- 0x6f4a39: (0xae37, 0),# Korean hangul
- 0x2d5a34: (0x8cad, 0),# East Asian ideograph
- 0x393d6f: (0x8907, 0),# East Asian ideograph
- 0x213f7e: (0x6254, 0),# East Asian ideograph
- 0x6f5a70: (0xd0c0, 0),# Korean hangul
- 0x4b456d: (0x823b, 0),# East Asian ideograph
- 0x2d403f: (0x6255, 0),# East Asian ideograph
- 0x695f70: (0x7195, 0),# East Asian ideograph
- 0x27423b: (0x63fd, 0),# East Asian ideograph
- 0x6f5126: (0xbc84, 0),# Korean hangul
- 0x225731: (0x731d, 0),# East Asian ideograph
- 0x213a7a: (0x5bb5, 0),# East Asian ideograph
- 0x696a2c: (0x87d0, 0),# East Asian ideograph
- 0x2e5a78: (0x74a2, 0),# East Asian ideograph
- 0x277954: (0x5956, 0),# East Asian ideograph
- 0x212a2e: (0xe8dc, 0),# EACC component character
- 0x333240: (0x4ffb, 0),# East Asian ideograph
- 0x232a2f: (0x86fa, 0),# East Asian ideograph
- 0x227d21: (0x830c, 0),# East Asian ideograph
- 0x227d22: (0x82fb, 0),# East Asian ideograph
- 0x227d24: (0x82fd, 0),# East Asian ideograph
- 0x215925: (0x8ae6, 0),# East Asian ideograph
- 0x227d26: (0x8333, 0),# East Asian ideograph
- 0x4b5238: (0x7f87, 0),# East Asian ideograph
- 0x227d29: (0x8328, 0),# East Asian ideograph
- 0x217d2a: (0x5afd, 0),# East Asian ideograph
- 0x217d2b: (0x5b08, 0),# East Asian ideograph
- 0x212a32: (0xe8df, 0),# EACC component character
- 0x227d2e: (0x8351, 0),# East Asian ideograph
- 0x214c5c: (0x75f1, 0),# East Asian ideograph
- 0x6f5c7b: (0xd5cc, 0),# Korean hangul
- 0x4b456f: (0x685c, 0),# East Asian ideograph
- 0x227d35: (0x831b, 0),# East Asian ideograph
- 0x217d38: (0x5b03, 0),# East Asian ideograph
- 0x222a34: (0x3013, 0),# East Asian ideograph (not found in unified han)
- 0x227d3b: (0x8356, 0),# East Asian ideograph
- 0x217d3d: (0x5b17, 0),# East Asian ideograph
- 0x217d3e: (0x5b16, 0),# East Asian ideograph
- 0x227d3f: (0x8322, 0),# East Asian ideograph
- 0x227d40: (0x832c, 0),# East Asian ideograph
- 0x212a36: (0xe8e3, 0),# EACC component character
- 0x217d47: (0x5b1b, 0),# East Asian ideograph
- 0x227d48: (0x833c, 0),# East Asian ideograph
- 0x227d4a: (0x834d, 0),# East Asian ideograph
- 0x334f3a: (0x7a49, 0),# East Asian ideograph
- 0x227d4d: (0x8343, 0),# East Asian ideograph (variant of 4C7D4D which maps to 8343)
- 0x22505c: (0x70b7, 0),# East Asian ideograph
- 0x4b4570: (0x6a29, 0),# East Asian ideograph
- 0x227d52: (0x832f, 0),# East Asian ideograph
- 0x227d53: (0x8348, 0),# East Asian ideograph
- 0x227d54: (0x8312, 0),# East Asian ideograph
- 0x227d56: (0x8316, 0),# East Asian ideograph
- 0x212a39: (0xe8e6, 0),# EACC component character
- 0x227d58: (0x831a, 0),# East Asian ideograph
- 0x217d59: (0x5b32, 0),# East Asian ideograph
- 0x2e6f43: (0x9908, 0),# East Asian ideograph
- 0x6f5a6c: (0xd0b7, 0),# Korean hangul
- 0x6f5129: (0xbc8b, 0),# Korean hangul
- 0x213b51: (0x5c41, 0),# East Asian ideograph
- 0x227d5f: (0x8347, 0),# East Asian ideograph
- 0x227d62: (0x83a8, 0),# East Asian ideograph
- 0x217d63: (0x5b3f, 0),# East Asian ideograph
- 0x4b3021: (0x58f1, 0),# East Asian ideograph
- 0x227d67: (0x83ad, 0),# East Asian ideograph
- 0x6f5121: (0xbc40, 0),# Korean hangul
- 0x286a3c: (0x7aad, 0),# East Asian ideograph
- 0x4c7d6a: (0x8323, 0),# East Asian ideograph
- 0x227d6d: (0x8373, 0),# East Asian ideograph
- 0x217d6e: (0x5b45, 0),# East Asian ideograph
- 0x6f4e7d: (0xb7f4, 0),# Korean hangul
- 0x227d72: (0x83b0, 0),# East Asian ideograph
- 0x217d74: (0x5b4c, 0),# East Asian ideograph
- 0x212a3e: (0xe8eb, 0),# EACC component character
- 0x227d76: (0x831d, 0),# East Asian ideograph
- 0x6f556d: (0xc608, 0),# Korean hangul
- 0x282868: (0x5e91, 0),# East Asian ideograph
- 0x227d7a: (0x838f, 0),# East Asian ideograph
- 0x6f512a: (0xbc8c, 0),# Korean hangul
- 0x227d7c: (0x8395, 0),# East Asian ideograph
- 0x227d7e: (0x8375, 0),# East Asian ideograph
- 0x215928: (0x8af1, 0),# East Asian ideograph
- 0x234350: (0x92a6, 0),# East Asian ideograph
- 0x51384d: (0x51c3, 0),# East Asian ideograph
- 0x69545c: (0x58d7, 0),# East Asian ideograph
- 0x212a41: (0xe8ee, 0),# EACC component character
- 0x275e61: (0x9614, 0),# East Asian ideograph
- 0x212a46: (0x3013, 0),# Ideographic geta symbol
- 0x212a42: (0xe8ef, 0),# EACC component character
- 0x23545d: (0x9ac8, 0),# East Asian ideograph
- 0x226a43: (0x7abf, 0),# East Asian ideograph
- 0x6f556e: (0xc60c, 0),# Korean hangul
- 0x6f512b: (0xbc94, 0),# Korean hangul
- 0x22315c: (0x634b, 0),# East Asian ideograph
- 0x212a45: (0xe8f2, 0),# EACC component character
- 0x6f5574: (0xc62c, 0),# Korean hangul
- 0x2d314c: (0x5008, 0),# East Asian ideograph
- 0x27534d: (0x8109, 0),# East Asian ideograph
- 0x273b6e: (0x5188, 0),# East Asian ideograph
- 0x214c60: (0x760d, 0),# East Asian ideograph
- 0x2d4d71: (0x7719, 0),# East Asian ideograph
- 0x213e6a: (0x60df, 0),# East Asian ideograph
- 0x6f5c21: (0xd33d, 0),# Korean hangul
- 0x2d3c61: (0x5e47, 0),# East Asian ideograph
- 0x234667: (0x93e7, 0),# East Asian ideograph
- 0x6f512c: (0xbc95, 0),# Korean hangul
- 0x21592a: (0x8adc, 0),# East Asian ideograph
- 0x2d3c65: (0x79ca, 0),# East Asian ideograph
- 0x6f4e29: (0xb550, 0),# Korean hangul
- 0x393e47: (0x8cc9, 0),# East Asian ideograph
- 0x6f5032: (0xba70, 0),# Korean hangul
- 0x2f386f: (0x8dd7, 0),# East Asian ideograph
- 0x214c61: (0x7627, 0),# East Asian ideograph
- 0x6f2464: (0x314e, 0),# Korean hangul
- 0x6f5b47: (0xd23c, 0),# Korean hangul
- 0x6f512d: (0xbc97, 0),# Korean hangul
- 0x6f516e: (0xbe45, 0),# Korean hangul
- 0x226a4f: (0x7ad1, 0),# East Asian ideograph
- 0x4b3974: (0x5b22, 0),# East Asian ideograph
- 0x3f5564: (0x61de, 0),# East Asian ideograph
- 0x214c62: (0x7613, 0),# East Asian ideograph
- 0x4b6130: (0x99c4, 0),# East Asian ideograph
- 0x6f5571: (0xc624, 0),# Korean hangul
- 0x6f512e: (0xbc98, 0),# Korean hangul
- 0x284642: (0x6bf5, 0),# East Asian ideograph
- 0x226a54: (0x7ad5, 0),# East Asian ideograph
- 0x2d5a3d: (0x8cdb, 0),# East Asian ideograph
- 0x225c50: (0x74e4, 0),# East Asian ideograph
- 0x225062: (0x70a1, 0),# East Asian ideograph
- 0x335461: (0x6cd6, 0),# East Asian ideograph
- 0x213041: (0x4e4d, 0),# East Asian ideograph
- 0x6f5572: (0xc625, 0),# Korean hangul
- 0x6f512f: (0xbc99, 0),# Korean hangul
- 0x234355: (0x929a, 0),# East Asian ideograph
- 0x6f4a3b: (0xae40, 0),# Korean hangul
- 0x217e59: (0x5bc1, 0),# East Asian ideograph
- 0x2f2a5a: (0x868b, 0),# Unrelated variant of EACC 23293D which maps to 868B
- 0x22714b: (0x7d9b, 0),# East Asian ideograph
- 0x227e21: (0x837f, 0),# East Asian ideograph
- 0x227e22: (0x8399, 0),# East Asian ideograph
- 0x225063: (0x70a3, 0),# East Asian ideograph
- 0x217e24: (0x5b65, 0),# East Asian ideograph
- 0x227e25: (0x8387, 0),# East Asian ideograph
- 0x227e26: (0x83b9, 0),# East Asian ideograph
- 0x217e27: (0x5c58, 0),# East Asian ideograph (not in Unicode)
- 0x217e28: (0x5b6c, 0),# East Asian ideograph
- 0x217e2a: (0x5b6e, 0),# East Asian ideograph
- 0x227e2b: (0x83a9, 0),# East Asian ideograph
- 0x227e2f: (0x839b, 0),# East Asian ideograph
- 0x217e30: (0x5b7b, 0),# East Asian ideograph
- 0x217e31: (0x5b7c, 0),# East Asian ideograph
- 0x217e32: (0x5b80, 0),# East Asian ideograph
- 0x227e33: (0x83aa, 0),# East Asian ideograph
- 0x217e34: (0x5b84, 0),# East Asian ideograph
- 0x217e35: (0x5b82, 0),# East Asian ideograph (not in Unicode)
- 0x227e37: (0x839c, 0),# East Asian ideograph
- 0x227e38: (0x839f, 0),# East Asian ideograph
- 0x222a5f: (0x5fd1, 0),# East Asian ideograph
- 0x217e40: (0x5b95, 0),# East Asian ideograph
- 0x227e41: (0x83cf, 0),# East Asian ideograph
- 0x227e43: (0x83f9, 0),# East Asian ideograph
- 0x2f445f: (0x941a, 0),# East Asian ideograph
- 0x227e45: (0x8421, 0),# East Asian ideograph
- 0x6f5476: (0xc538, 0),# Korean hangul
- 0x217e49: (0x5bac, 0),# East Asian ideograph
- 0x6f5131: (0xbca0, 0),# Korean hangul
- 0x294a44: (0x9655, 0),# East Asian ideograph
- 0x21592f: (0x8af7, 0),# East Asian ideograph
- 0x227e52: (0x83ea, 0),# East Asian ideograph
- 0x227e53: (0x8413, 0),# East Asian ideograph
- 0x217e55: (0x5bb7, 0),# East Asian ideograph
- 0x227e56: (0x83fc, 0),# East Asian ideograph
- 0x227e57: (0x83f6, 0),# East Asian ideograph
- 0x227e59: (0x8410, 0),# East Asian ideograph
- 0x227e5a: (0x83e1, 0),# East Asian ideograph
- 0x217e5b: (0x3761, 0),# East Asian ideograph (not found in unified han)
- 0x227e60: (0x83c6, 0),# East Asian ideograph
- 0x227e61: (0x8407, 0),# East Asian ideograph
- 0x227e63: (0x83eb, 0),# East Asian ideograph
- 0x216a66: (0x51df, 0),# East Asian ideograph
- 0x6f5575: (0xc62d, 0),# Korean hangul
- 0x217e68: (0x5bd4, 0),# East Asian ideograph
- 0x217e6a: (0x5bc3, 0),# East Asian ideograph
- 0x227e6b: (0x83e2, 0),# East Asian ideograph
- 0x227e6d: (0x8401, 0),# East Asian ideograph
- 0x217e6e: (0x5bd6, 0),# East Asian ideograph
- 0x234358: (0x92ab, 0),# East Asian ideograph
- 0x227e71: (0x83d8, 0),# East Asian ideograph
- 0x227e72: (0x83e5, 0),# East Asian ideograph
- 0x227e74: (0x8418, 0),# East Asian ideograph
- 0x217e75: (0x5bd7, 0),# East Asian ideograph
- 0x227e79: (0x83ce, 0),# East Asian ideograph
- 0x227e7b: (0x83d3, 0),# East Asian ideograph
- 0x295b52: (0x9e4e, 0),# East Asian ideograph
- 0x227e7d: (0x83d6, 0),# East Asian ideograph
- 0x217e7e: (0x5bea, 0),# East Asian ideograph
- 0x6f5576: (0xc62e, 0),# Korean hangul
- 0x6f5133: (0xbca4, 0),# Korean hangul
- 0x294a46: (0x9649, 0),# East Asian ideograph
- 0x275f3d: (0x96b6, 0),# East Asian ideograph
- 0x4b6260: (0x9ebd, 0),# East Asian ideograph
- 0x227e6a: (0x83bf, 0),# East Asian ideograph
- 0x2d6030: (0x97ee, 0),# East Asian ideograph
- 0x235466: (0x9ad0, 0),# East Asian ideograph
- 0x22454d: (0x6996, 0),# East Asian ideograph
- 0x6f5577: (0xc633, 0),# Korean hangul
- 0x6f5134: (0xbca7, 0),# Korean hangul
- 0x213b5c: (0x5c50, 0),# East Asian ideograph
- 0x215932: (0x8b19, 0),# East Asian ideograph
- 0x6f4a3c: (0xae41, 0),# Korean hangul
- 0x2d3c6d: (0x8298, 0),# East Asian ideograph (duplicate simplified)
- 0x222a73: (0x5ff8, 0),# East Asian ideograph
- 0x225068: (0x7551, 0),# East Asian ideograph
- 0x6f5551: (0xc5c8, 0),# Korean hangul
- 0x6f5135: (0xbca8, 0),# Korean hangul
- 0x215521: (0x5179, 0),# East Asian ideograph
- 0x223f5c: (0x69aa, 0),# East Asian ideograph
- 0x275274: (0x58f0, 0),# East Asian ideograph
- 0x2d3c6e: (0x7240, 0),# East Asian ideograph
- 0x6f5523: (0xc54c, 0),# Korean hangul
- 0x6f5524: (0xc54e, 0),# Korean hangul
- 0x23324f: (0x8a7f, 0),# East Asian ideograph
- 0x233e37: (0x9088, 0),# East Asian ideograph
- 0x4c4446: (0x6b4e, 0),# East Asian ideograph
- 0x6f5525: (0xc553, 0),# Korean hangul
- 0x232a7b: (0x877b, 0),# East Asian ideograph
- 0x6f5526: (0xc554, 0),# Korean hangul
- 0x3a6a7c: (0x7bea, 0),# East Asian ideograph
- 0x235527: (0x9aeb, 0),# East Asian ideograph
- 0x22763d: (0x801e, 0),# East Asian ideograph
- 0x235528: (0x9af2, 0),# East Asian ideograph
- 0x215529: (0x8396, 0),# East Asian ideograph
- 0x233250: (0x8a86, 0),# East Asian ideograph
- 0x225424: (0x71c1, 0),# East Asian ideograph
- 0x21552a: (0x83a7, 0),# East Asian ideograph
- 0x6f5137: (0xbcb1, 0),# Korean hangul
- 0x213b5f: (0x5c5c, 0),# East Asian ideograph
- 0x6f552b: (0xc55e, 0),# Korean hangul
- 0x295f7b: (0x9f51, 0),# East Asian ideograph
- 0x2d3c70: (0x576b, 0),# East Asian ideograph
- 0x27552d: (0x5e84, 0),# East Asian ideograph
- 0x2d552e: (0x82fa, 0),# East Asian ideograph (variant of 227C68)
- 0x23316e: (0x8a04, 0),# East Asian ideograph
- 0x2d493a: (0x702c, 0),# East Asian ideograph
- 0x215d79: (0x9375, 0),# East Asian ideograph
- 0x28464c: (0x6be1, 0),# East Asian ideograph
- 0x213b60: (0x5c62, 0),# East Asian ideograph
- 0x6f5531: (0xc570, 0),# Korean hangul
- 0x293726: (0x8d5c, 0),# East Asian ideograph
- 0x235532: (0x9af9, 0),# East Asian ideograph
- 0x6f5533: (0xc573, 0),# Korean hangul
- 0x6f5534: (0xc574, 0),# Korean hangul
- 0x2f3363: (0x8b1a, 0),# East Asian ideograph
- 0x6f5139: (0xbcb5, 0),# Korean hangul
- 0x235535: (0x9afd, 0),# East Asian ideograph
- 0x4b3474: (0x537f, 0),# East Asian ideograph (variant of 213474 which maps to 537F)
- 0x6f582f: (0xc9d0, 0),# Korean hangul
- 0x22385a: (0x6678, 0),# East Asian ideograph
- 0x235536: (0x9b01, 0),# East Asian ideograph
- 0x2d5a48: (0x8d71, 0),# East Asian ideograph
- 0x295b59: (0x9e5c, 0),# East Asian ideograph
- 0x235538: (0x9b02, 0),# East Asian ideograph
- 0x6f5539: (0xc584, 0),# Korean hangul
- 0x6f513a: (0xbcbc, 0),# Korean hangul
- 0x4b553a: (0x83c1, 0),# East Asian ideograph (variant of 21553A which maps to 83C1)
- 0x23553b: (0x9b00, 0),# East Asian ideograph
- 0x2d3830: (0x573b, 0),# East Asian ideograph
- 0x27553c: (0x534e, 0),# East Asian ideograph
- 0x283542: (0x64b7, 0),# East Asian ideograph
- 0x287531: (0x7f9f, 0),# East Asian ideograph
- 0x33502a: (0x9257, 0),# East Asian ideograph
- 0x23553e: (0x9b04, 0),# East Asian ideograph
- 0x6f513b: (0xbcbd, 0),# Korean hangul
- 0x213b63: (0x5c6c, 0),# East Asian ideograph
- 0x22565b: (0x72c1, 0),# East Asian ideograph
- 0x275947: (0x8c34, 0),# East Asian ideograph
- 0x223442: (0x648f, 0),# East Asian ideograph
- 0x213e3f: (0x6062, 0),# East Asian ideograph
- 0x3f4472: (0x7881, 0),# East Asian ideograph
- 0x215541: (0x840a, 0),# East Asian ideograph
- 0x4b5542: (0x8420, 0),# East Asian ideograph
- 0x33502b: (0x724b, 0),# East Asian ideograph
- 0x235543: (0x9b0b, 0),# East Asian ideograph
- 0x6f4b2b: (0xafce, 0),# Korean hangul
- 0x2e4d3d: (0x6d38, 0),# East Asian ideograph
- 0x696a5e: (0x88b0, 0),# East Asian ideograph
- 0x227431: (0x7f4e, 0),# East Asian ideograph
- 0x213561: (0x543b, 0),# East Asian ideograph
- 0x225d39: (0x7517, 0),# East Asian ideograph
- 0x6f5545: (0xc5b5, 0),# Korean hangul
- 0x6f5546: (0xc5b6, 0),# Korean hangul
- 0x6f4f5b: (0xb98e, 0),# Korean hangul
- 0x295b5c: (0x9e5b, 0),# East Asian ideograph
- 0x225070: (0x79cc, 0),# East Asian ideograph
- 0x235547: (0x9b0e, 0),# East Asian ideograph
- 0x233256: (0x8a61, 0),# East Asian ideograph
- 0x6f5548: (0xc5b9, 0),# Korean hangul
- 0x274252: (0x654c, 0),# East Asian ideograph
- 0x6f513d: (0xbcc4, 0),# Korean hangul
- 0x284651: (0x6c07, 0),# East Asian ideograph
- 0x6f5549: (0xc5ba, 0),# Korean hangul
- 0x213722: (0x55c6, 0),# East Asian ideograph
- 0x233c2d: (0x8f40, 0),# East Asian ideograph
- 0x6f554a: (0xc5bb, 0),# Korean hangul
- 0x6f554b: (0xc5bc, 0),# Korean hangul
- 0x28575e: (0x72b8, 0),# East Asian ideograph
- 0x227849: (0x811d, 0),# East Asian ideograph
- 0x393944: (0x59b3, 0),# East Asian ideograph
- 0x6f554c: (0xc5bd, 0),# Korean hangul
- 0x355e76: (0x82be, 0),# East Asian ideograph
- 0x27554d: (0x82c7, 0),# East Asian ideograph
- 0x6f513e: (0xbccc, 0),# Korean hangul
- 0x23554e: (0x9b11, 0),# East Asian ideograph
- 0x223f65: (0x699e, 0),# East Asian ideograph
- 0x4b5959: (0x8273, 0),# East Asian ideograph
- 0x21554f: (0x8449, 0),# East Asian ideograph
- 0x2d5550: (0x585f, 0),# East Asian ideograph
- 0x28575f: (0x72f2, 0),# East Asian ideograph
- 0x283546: (0x6445, 0),# East Asian ideograph
- 0x225072: (0x70bf, 0),# East Asian ideograph
- 0x215551: (0x846b, 0),# East Asian ideograph
- 0x233258: (0x8a3e, 0),# East Asian ideograph
- 0x225552: (0x7253, 0),# East Asian ideograph
- 0x274254: (0x6570, 0),# East Asian ideograph
- 0x6f513f: (0xbccd, 0),# Korean hangul
- 0x225553: (0x7255, 0),# East Asian ideograph
- 0x275276: (0x806a, 0),# East Asian ideograph
- 0x6f7726: (0xadb9, 0),# Korean hangul
- 0x235554: (0x9b18, 0),# East Asian ideograph
- 0x6f585a: (0xcac4, 0),# Korean hangul
- 0x333f22: (0x6168, 0),# East Asian ideograph (variant of 213F22 which maps to 6168)
- 0x275555: (0x83b4, 0)# East Asian ideograph
-}
-charset_70 = { # Superscripts
- 0x28: (0x207d, 0),# SUPERSCRIPT OPENING PARENTHESIS / SUPERSCRIPT LEFT PARENTHESIS
- 0x29: (0x207e, 0),# SUPERSCRIPT CLOSING PARENTHESIS / SUPERSCRIPT RIGHT PARENTHESIS
- 0x2b: (0x207a, 0),# SUPERSCRIPT PLUS SIGN
- 0x2d: (0x207b, 0),# SUPERSCRIPT HYPHEN-MINUS / SUPERSCRIPT MINUS
- 0x30: (0x2070, 0),# SUPERSCRIPT DIGIT ZERO
- 0x31: (0xb9, 0),# SUPERSCRIPT DIGIT ONE
- 0x32: (0xb2, 0),# SUPERSCRIPT DIGIT TWO
- 0x33: (0xb3, 0),# SUPERSCRIPT DIGIT THREE
- 0x34: (0x2074, 0),# SUPERSCRIPT DIGIT FOUR
- 0x35: (0x2075, 0),# SUPERSCRIPT DIGIT FIVE
- 0x36: (0x2076, 0),# SUPERSCRIPT DIGIT SIX
- 0x37: (0x2077, 0),# SUPERSCRIPT DIGIT SEVEN
- 0x38: (0x2078, 0),# SUPERSCRIPT DIGIT EIGHT
- 0x39: (0x2079, 0)# SUPERSCRIPT DIGIT NINE
-}
-charset_51 = { # Extended Cyrillic
- 0xc0: (0x491, 0),# LOWERCASE GE WITH UPTURN / CYRILLIC SMALL LETTER GHE WITH UPTURN
- 0xc1: (0x452, 0),# LOWERCASE DJE / CYRILLIC SMALL LETTER DJE (Serbian)
- 0xc2: (0x453, 0),# CYRILLIC SMALL LETTER GJE
- 0xc3: (0x454, 0),# LOWERCASE E / CYRILLIC SMALL LETTER UKRAINIAN IE
- 0xc4: (0x451, 0),# CYRILLIC SMALL LETTER IO
- 0xc5: (0x455, 0),# CYRILLIC SMALL LETTER DZE
- 0xc6: (0x456, 0),# LOWERCASE I / CYRILLIC SMALL LETTER BYELORUSSIAN-UKRANIAN I
- 0xc7: (0x457, 0),# LOWERCASE YI / CYRILLIC SMALL LETTER YI (Ukrainian)
- 0xc8: (0x458, 0),# CYRILLIC SMALL LETTER JE
- 0xc9: (0x459, 0),# CYRILLIC SMALL LETTER LJE
- 0xca: (0x45a, 0),# CYRILLIC SMALL LETTER NJE
- 0xcb: (0x45b, 0),# LOWERCASE TSHE / CYRILLIC SMALL LETTER TSHE (Serbian)
- 0xcc: (0x45c, 0),# CYRILLIC SMALL LETTER KJE
- 0xcd: (0x45e, 0),# LOWERCASE SHORT U / CYRILLIC SMALL LETTER SHORT U (Byelorussian)
- 0xce: (0x45f, 0),# CYRILLIC SMALL LETTER DZHE
- 0xd0: (0x463, 0),# CYRILLIC SMALL LETTER YAT
- 0xd1: (0x473, 0),# CYRILLIC SMALL LETTER FITA
- 0xd2: (0x475, 0),# CYRILLIC SMALL LETTER IZHITSA
- 0xd3: (0x46b, 0),# CYRILLIC SMALL LETTER BIG YUS
- 0xdb: (0x5b, 0),# OPENING SQUARE BRACKET / LEFT SQUARE BRACKET
- 0xdd: (0x5d, 0),# CLOSING SQUARE BRACKET / RIGHT SQUARE BRACKET
- 0xdf: (0x5f, 0),# SPACING UNDERSCORE / LOW LINE
- 0xe0: (0x490, 0),# UPPERCASE GE WITH UPTURN / CYRILLIC CAPITAL LETTER GHE WITH UPTURN
- 0xe1: (0x402, 0),# UPPERCASE DJE / CYRILLIC CAPITAL LETTER DJE (Serbian)
- 0xe2: (0x403, 0),# CYRILLIC CAPITAL LETTER GJE
- 0xe3: (0x404, 0),# UPPERCASE E / CYRILLIC CAPITAL LETTER UKRAINIAN IE
- 0xe4: (0x401, 0),# CYRILLIC CAPITAL LETTER IO
- 0xe5: (0x405, 0),# CYRILLIC CAPITAL LETTER DZE
- 0xe6: (0x406, 0),# UPPERCASE I / CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRANIAN I
- 0xe7: (0x407, 0),# UPPERCASE YI / CYRILLIC CAPITAL LETTER YI (Ukrainian)
- 0xe8: (0x408, 0),# CYRILLIC CAPITAL LETTER JE
- 0xe9: (0x409, 0),# CYRILLIC CAPITAL LETTER LJE
- 0xea: (0x40a, 0),# CYRILLIC CAPITAL LETTER NJE
- 0xeb: (0x40b, 0),# UPPERCASE TSHE / CYRILLIC CAPITAL LETTER TSHE (Serbian)
- 0xec: (0x40c, 0),# CYRILLIC CAPITAL LETTER KJE
- 0xed: (0x40e, 0),# UPPERCASE SHORT U / CYRILLIC CAPITAL LETTER SHORT U (Byelorussian)
- 0xee: (0x40f, 0),# CYRILLIC CAPITAL LETTER DZHE
- 0xef: (0x42a, 0),# CYRILLIC CAPITAL LETTER HARD SIGN
- 0xf0: (0x462, 0),# CYRILLIC CAPITAL LETTER YAT
- 0xf1: (0x472, 0),# CYRILLIC CAPITAL LETTER FITA
- 0xf2: (0x474, 0),# CYRILLIC CAPITAL LETTER IZHITSA
- 0xf3: (0x46a, 0)# CYRILLIC CAPITAL LETTER BIG YUS
-}
-charset_53 = { # Basic Greek
- 0x21: (0x300, 1),# COMBINING GRAVE ACCENT
- 0x22: (0x301, 1),# COMBINING ACUTE ACCENT
- 0x23: (0x308, 1),# COMBINING DIAERESIS
- 0x24: (0x342, 1),# COMBINING GREEK PERISPOMENI / CIRCUMFLEX
- 0x25: (0x313, 1),# COMBINING COMMA ABOVE / SMOOTH BREATHING
- 0x26: (0x314, 1),# COMBINING REVERSED COMMA ABOVE / ROUGH BREATHING
- 0x27: (0x345, 1),# COMBINING GREEK YPOGEGRAMMENI / IOTA SUBSCRIPT
- 0x30: (0xab, 0),# LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
- 0x31: (0xbb, 0),# RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
- 0x32: (0x201c, 0),# LEFT DOUBLE QUOTATION MARK
- 0x33: (0x201d, 0),# RIGHT DOUBLE QUOTATION MARK
- 0x34: (0x374, 0),# GREEK NUMERAL SIGN / UPPER PRIME
- 0x35: (0x375, 0),# GREEK LOWER NUMERAL SIGN / LOWER PRIME
- 0x3b: (0x387, 0),# GREEK ANO TELEIA / RAISED DOT, GREEK SEMICOLON
- 0x3f: (0x37e, 0),# GREEK QUESTION MARK
- 0x41: (0x391, 0),# GREEK CAPITAL LETTER ALPHA
- 0x42: (0x392, 0),# GREEK CAPITAL LETTER BETA
- 0x44: (0x393, 0),# GREEK CAPITAL LETTER GAMMA
- 0x45: (0x394, 0),# GREEK CAPITAL LETTER DELTA
- 0x46: (0x395, 0),# GREEK CAPITAL LETTER EPSILON
- 0x47: (0x3da, 0),# GREEK LETTER STIGMA
- 0x48: (0x3dc, 0),# GREEK LETTER DIGAMMA
- 0x49: (0x396, 0),# GREEK CAPITAL LETTER ZETA
- 0x4a: (0x397, 0),# GREEK CAPITAL LETTER ETA
- 0x4b: (0x398, 0),# GREEK CAPITAL LETTER THETA
- 0x4c: (0x399, 0),# GREEK CAPITAL LETTER IOTA
- 0x4d: (0x39a, 0),# GREEK CAPITAL LETTER KAPPA
- 0x4e: (0x39b, 0),# GREEK CAPITAL LETTER LAMDA
- 0x4f: (0x39c, 0),# GREEK CAPITAL LETTER MU
- 0x50: (0x39d, 0),# GREEK CAPITAL LETTER NU
- 0x51: (0x39e, 0),# GREEK CAPITAL LETTER XI
- 0x52: (0x39f, 0),# GREEK CAPITAL LETTER OMICRON
- 0x53: (0x3a0, 0),# GREEK CAPITAL LETTER PI
- 0x54: (0x3de, 0),# GREEK LETTER KOPPA
- 0x55: (0x3a1, 0),# GREEK CAPITAL LETTER RHO
- 0x56: (0x3a3, 0),# GREEK CAPITAL LETTER SIGMA
- 0x58: (0x3a4, 0),# GREEK CAPITAL LETTER TAU
- 0x59: (0x3a5, 0),# GREEK CAPITAL LETTER UPSILON
- 0x5a: (0x3a6, 0),# GREEK CAPITAL LETTER PHI
- 0x5b: (0x3a7, 0),# GREEK CAPITAL LETTER CHI
- 0x5c: (0x3a8, 0),# GREEK CAPITAL LETTER PSI
- 0x5d: (0x3a9, 0),# GREEK CAPITAL LETTER OMEGA
- 0x5e: (0x3e0, 0),# GREEK LETTER SAMPI
- 0x61: (0x3b1, 0),# GREEK SMALL LETTER ALPHA
- 0x62: (0x3b2, 0),# GREEK SMALL LETTER BETA / SMALL LETTER BETA BEGINNING OF WORD
- 0x63: (0x3d0, 0),# GREEK BETA SYMBOL / SMALL LETTER BETA MIDDLE OF WORD
- 0x64: (0x3b3, 0),# GREEK SMALL LETTER GAMMA
- 0x65: (0x3b4, 0),# GREEK SMALL LETTER DELTA
- 0x66: (0x3b5, 0),# GREEK SMALL LETTER EPSILON
- 0x67: (0x3db, 0),# GREEK SMALL LETTER STIGMA
- 0x68: (0x3dd, 0),# GREEK SMALL LETTER DIGAMMA
- 0x69: (0x3b6, 0),# GREEK SMALL LETTER ZETA
- 0x6a: (0x3b7, 0),# GREEK SMALL LETTER ETA
- 0x6b: (0x3b8, 0),# GREEK SMALL LETTER THETA
- 0x6c: (0x3b9, 0),# GREEK SMALL LETTER IOTA
- 0x6d: (0x3ba, 0),# GREEK SMALL LETTER KAPPA
- 0x6e: (0x3bb, 0),# GREEK SMALL LETTER LAMDA
- 0x6f: (0x3bc, 0),# GREEK SMALL LETTER MU
- 0x70: (0x3bd, 0),# GREEK SMALL LETTER NU
- 0x71: (0x3be, 0),# GREEK SMALL LETTER XI
- 0x72: (0x3bf, 0),# GREEK SMALL LETTER OMICRON
- 0x73: (0x3c0, 0),# GREEK SMALL LETTER PI
- 0x74: (0x3df, 0),# GREEK SMALL LETTER KOPPA
- 0x75: (0x3c1, 0),# GREEK SMALL LETTER RHO
- 0x76: (0x3c3, 0),# GREEK SMALL LETTER SIGMA
- 0x77: (0x3c2, 0),# GREEK SMALL LETTER FINAL SIGMA / SMALL LETTER SIGMA END OF WORD
- 0x78: (0x3c4, 0),# GREEK SMALL LETTER TAU
- 0x79: (0x3c5, 0),# GREEK SMALL LETTER UPSILON
- 0x7a: (0x3c6, 0),# GREEK SMALL LETTER PHI
- 0x7b: (0x3c7, 0),# GREEK SMALL LETTER CHI
- 0x7c: (0x3c8, 0),# GREEK SMALL LETTER PSI
- 0x7d: (0x3c9, 0),# GREEK SMALL LETTER OMEGA
- 0x7e: (0x3e1, 0)# GREEK SMALL LETTER SAMPI
-}
-charset_42 = { # Basic Latin (ASCII)
- 0x1b: (0x1b, 0),# ESCAPE (Unlikely to occur in UCS/Unicode)
- 0x1d: (0x1d, 0),# RECORD TERMINATOR / GROUP SEPARATOR
- 0x1e: (0x1e, 0),# FIELD TERMINATOR / RECORD SEPARATOR
- 0x1f: (0x1f, 0),# SUBFIELD DELIMITER / UNIT SEPARATOR
- 0x20: (0x20, 0),# SPACE, BLANK / SPACE
- 0x21: (0x21, 0),# EXCLAMATION MARK
- 0x22: (0x22, 0),# QUOTATION MARK
- 0x23: (0x23, 0),# NUMBER SIGN
- 0x24: (0x24, 0),# DOLLAR SIGN
- 0x25: (0x25, 0),# PERCENT SIGN
- 0x26: (0x26, 0),# AMPERSAND
- 0x27: (0x27, 0),# APOSTROPHE
- 0x28: (0x28, 0),# OPENING PARENTHESIS / LEFT PARENTHESIS
- 0x29: (0x29, 0),# CLOSING PARENTHESIS / CLOSING PARENTHESIS
- 0x2a: (0x2a, 0),# ASTERISK
- 0x2b: (0x2b, 0),# PLUS SIGN
- 0x2c: (0x2c, 0),# COMMA
- 0x2d: (0x2d, 0),# HYPHEN-MINUS
- 0x2e: (0x2e, 0),# PERIOD, DECIMAL POINT / FULL STOP
- 0x2f: (0x2f, 0),# SLASH / SOLIDUS
- 0x30: (0x30, 0),# DIGIT ZERO
- 0x31: (0x31, 0),# DIGIT ONE
- 0x32: (0x32, 0),# DIGIT TWO
- 0x33: (0x33, 0),# DIGIT THREE
- 0x34: (0x34, 0),# DIGIT FOUR
- 0x35: (0x35, 0),# DIGIT FIVE
- 0x36: (0x36, 0),# DIGIT SIX
- 0x37: (0x37, 0),# DIGIT SEVEN
- 0x38: (0x38, 0),# DIGIT EIGHT
- 0x39: (0x39, 0),# DIGIT NINE
- 0x3a: (0x3a, 0),# COLON
- 0x3b: (0x3b, 0),# SEMICOLON
- 0x3c: (0x3c, 0),# LESS-THAN SIGN
- 0x3d: (0x3d, 0),# EQUALS SIGN
- 0x3e: (0x3e, 0),# GREATER-THAN SIGN
- 0x3f: (0x3f, 0),# QUESTION MARK
- 0x40: (0x40, 0),# COMMERCIAL AT
- 0x41: (0x41, 0),# LATIN CAPITAL LETTER A
- 0x42: (0x42, 0),# LATIN CAPITAL LETTER B
- 0x43: (0x43, 0),# LATIN CAPITAL LETTER C
- 0x44: (0x44, 0),# LATIN CAPITAL LETTER D
- 0x45: (0x45, 0),# LATIN CAPITAL LETTER E
- 0x46: (0x46, 0),# LATIN CAPITAL LETTER F
- 0x47: (0x47, 0),# LATIN CAPITAL LETTER G
- 0x48: (0x48, 0),# LATIN CAPITAL LETTER H
- 0x49: (0x49, 0),# LATIN CAPITAL LETTER I
- 0x4a: (0x4a, 0),# LATIN CAPITAL LETTER J
- 0x4b: (0x4b, 0),# LATIN CAPITAL LETTER K
- 0x4c: (0x4c, 0),# LATIN CAPITAL LETTER L
- 0x4d: (0x4d, 0),# LATIN CAPITAL LETTER M
- 0x4e: (0x4e, 0),# LATIN CAPITAL LETTER N
- 0x4f: (0x4f, 0),# LATIN CAPITAL LETTER O
- 0x50: (0x50, 0),# LATIN CAPITAL LETTER P
- 0x51: (0x51, 0),# LATIN CAPITAL LETTER Q
- 0x52: (0x52, 0),# LATIN CAPITAL LETTER R
- 0x53: (0x53, 0),# LATIN CAPITAL LETTER S
- 0x54: (0x54, 0),# LATIN CAPITAL LETTER T
- 0x55: (0x55, 0),# LATIN CAPITAL LETTER U
- 0x56: (0x56, 0),# LATIN CAPITAL LETTER V
- 0x57: (0x57, 0),# LATIN CAPITAL LETTER W
- 0x58: (0x58, 0),# LATIN CAPITAL LETTER X
- 0x59: (0x59, 0),# LATIN CAPITAL LETTER Y
- 0x5a: (0x5a, 0),# LATIN CAPITAL LETTER Z
- 0x5b: (0x5b, 0),# OPENING SQUARE BRACKET / LEFT SQUARE BRACKET
- 0x5c: (0x5c, 0),# REVERSE SLASH / REVERSE SOLIDUS
- 0x5d: (0x5d, 0),# CLOSING SQUARE BRACKET / RIGHT SQUARE BRACKET
- 0x5e: (0x5e, 0),# SPACING CIRCUMFLEX / CIRCUMFLEX ACCENT
- 0x5f: (0x5f, 0),# SPACING UNDERSCORE / LOW LINE
- 0x60: (0x60, 0),# SPACING GRAVE / GRAVE ACCENT
- 0x61: (0x61, 0),# LATIN SMALL LETTER A
- 0x62: (0x62, 0),# LATIN SMALL LETTER B
- 0x63: (0x63, 0),# LATIN SMALL LETTER C
- 0x64: (0x64, 0),# LATIN SMALL LETTER D
- 0x65: (0x65, 0),# LATIN SMALL LETTER E
- 0x66: (0x66, 0),# LATIN SMALL LETTER F
- 0x67: (0x67, 0),# LATIN SMALL LETTER G
- 0x68: (0x68, 0),# LATIN SMALL LETTER H
- 0x69: (0x69, 0),# LATIN SMALL LETTER I
- 0x6a: (0x6a, 0),# LATIN SMALL LETTER J
- 0x6b: (0x6b, 0),# LATIN SMALL LETTER K
- 0x6c: (0x6c, 0),# LATIN SMALL LETTER L
- 0x6d: (0x6d, 0),# LATIN SMALL LETTER M
- 0x6e: (0x6e, 0),# LATIN SMALL LETTER N
- 0x6f: (0x6f, 0),# LATIN SMALL LETTER O
- 0x70: (0x70, 0),# LATIN SMALL LETTER P
- 0x71: (0x71, 0),# LATIN SMALL LETTER Q
- 0x72: (0x72, 0),# LATIN SMALL LETTER R
- 0x73: (0x73, 0),# LATIN SMALL LETTER S
- 0x74: (0x74, 0),# LATIN SMALL LETTER T
- 0x75: (0x75, 0),# LATIN SMALL LETTER U
- 0x76: (0x76, 0),# LATIN SMALL LETTER V
- 0x77: (0x77, 0),# LATIN SMALL LETTER W
- 0x78: (0x78, 0),# LATIN SMALL LETTER X
- 0x79: (0x79, 0),# LATIN SMALL LETTER Y
- 0x7a: (0x7a, 0),# LATIN SMALL LETTER Z
- 0x7b: (0x7b, 0),# OPENING CURLY BRACKET / LEFT CURLY BRACKET
- 0x7c: (0x7c, 0),# VERTICAL BAR (FILL) / VERTICAL LINE
- 0x7d: (0x7d, 0),# CLOSING CURLY BRACKET / RIGHT CURLY BRACKET
- 0x7e: (0x7e, 0)# SPACING TILDE / TILDE
-}
-charset_62 = { # Subscripts
- 0x28: (0x208d, 0),# SUBSCRIPT OPENING PARENTHESIS / SUBSCRIPT LEFT PARENTHESIS
- 0x29: (0x208e, 0),# SUBSCRIPT CLOSING PARENTHESIS / SUBSCRIPT RIGHT PARENTHESIS
- 0x2b: (0x208a, 0),# SUBSCRIPT PLUS SIGN
- 0x2d: (0x208b, 0),# SUBSCRIPT HYPHEN-MINUS / SUBSCRIPT MINUS
- 0x30: (0x2080, 0),# SUBSCRIPT DIGIT ZERO
- 0x31: (0x2081, 0),# SUBSCRIPT DIGIT ONE
- 0x32: (0x2082, 0),# SUBSCRIPT DIGIT TWO
- 0x33: (0x2083, 0),# SUBSCRIPT DIGIT THREE
- 0x34: (0x2084, 0),# SUBSCRIPT DIGIT FOUR
- 0x35: (0x2085, 0),# SUBSCRIPT DIGIT FIVE
- 0x36: (0x2086, 0),# SUBSCRIPT DIGIT SIX
- 0x37: (0x2087, 0),# SUBSCRIPT DIGIT SEVEN
- 0x38: (0x2088, 0),# SUBSCRIPT DIGIT EIGHT
- 0x39: (0x2089, 0)# SUBSCRIPT DIGIT NINE
-}
-charset_67 = { # Greek Symbols
- 0x61: (0x3b1, 0),# GREEK SMALL LETTER ALPHA
- 0x62: (0x3b2, 0),# GREEK SMALL LETTER BETA
- 0x63: (0x3b3, 0)# GREEK SMALL LETTER GAMMA
-}
-charset_4E = { # Basic Cyrillic
- 0x21: (0x21, 0),# EXCLAMATION MARK
- 0x22: (0x22, 0),# QUOTATION MARK
- 0x23: (0x23, 0),# NUMBER SIGN
- 0x24: (0x24, 0),# DOLLAR SIGN
- 0x25: (0x25, 0),# PERCENT SIGN
- 0x26: (0x26, 0),# AMPERSAND
- 0x27: (0x27, 0),# APOSTROPHE
- 0x28: (0x28, 0),# OPENING PARENTHESIS / LEFT PARENTHESIS
- 0x29: (0x29, 0),# CLOSING PARENTHESIS / RIGHT PARENTHESIS
- 0x2a: (0x2a, 0),# ASTERISK
- 0x2b: (0x2b, 0),# PLUS SIGN
- 0x2c: (0x2c, 0),# COMMA
- 0x2d: (0x2d, 0),# HYPHEN-MINUS
- 0x2e: (0x2e, 0),# PERIOD, DECIMAL POINT / FULL STOP
- 0x2f: (0x2f, 0),# SLASH / SOLIDUS
- 0x30: (0x30, 0),# DIGIT ZERO
- 0x31: (0x31, 0),# DIGIT ONE
- 0x32: (0x32, 0),# DIGIT TWO
- 0x33: (0x33, 0),# DIGIT THREE
- 0x34: (0x34, 0),# DIGIT FOUR
- 0x35: (0x35, 0),# DIGIT FIVE
- 0x36: (0x36, 0),# DIGIT SIX
- 0x37: (0x37, 0),# DIGIT SEVEN
- 0x38: (0x38, 0),# DIGIT EIGHT
- 0x39: (0x39, 0),# DIGIT NINE
- 0x3a: (0x3a, 0),# COLON
- 0x3b: (0x3b, 0),# SEMICOLON
- 0x3c: (0x3c, 0),# LESS-THAN SIGN
- 0x3d: (0x3d, 0),# EQUALS SIGN
- 0x3e: (0x3e, 0),# GREATER-THAN SIGN
- 0x3f: (0x3f, 0),# QUESTION MARK
- 0x40: (0x44e, 0),# LOWERCASE IU / CYRILLIC SMALL LETTER YU
- 0x41: (0x430, 0),# CYRILLIC SMALL LETTER A
- 0x42: (0x431, 0),# CYRILLIC SMALL LETTER BE
- 0x43: (0x446, 0),# CYRILLIC SMALL LETTER TSE
- 0x44: (0x434, 0),# CYRILLIC SMALL LETTER DE
- 0x45: (0x435, 0),# CYRILLIC SMALL LETTER IE
- 0x46: (0x444, 0),# CYRILLIC SMALL LETTER EF
- 0x47: (0x433, 0),# LOWERCASE GE / CYRILLIC SMALL LETTER GHE
- 0x48: (0x445, 0),# LOWERCASE KHA / CYRILLIC SMALL LETTER HA
- 0x49: (0x438, 0),# LOWERCASE II / CYRILLIC SMALL LETTER I
- 0x4a: (0x439, 0),# LOWERCASE SHORT II / CYRILLIC SMALL LETTER SHORT I
- 0x4b: (0x43a, 0),# CYRILLIC SMALL LETTER KA
- 0x4c: (0x43b, 0),# CYRILLIC SMALL LETTER EL
- 0x4d: (0x43c, 0),# CYRILLIC SMALL LETTER EM
- 0x4e: (0x43d, 0),# CYRILLIC SMALL LETTER EN
- 0x4f: (0x43e, 0),# CYRILLIC SMALL LETTER O
- 0x50: (0x43f, 0),# CYRILLIC SMALL LETTER PE
- 0x51: (0x44f, 0),# LOWERCASE IA / CYRILLIC SMALL LETTER YA
- 0x52: (0x440, 0),# CYRILLIC SMALL LETTER ER
- 0x53: (0x441, 0),# CYRILLIC SMALL LETTER ES
- 0x54: (0x442, 0),# CYRILLIC SMALL LETTER TE
- 0x55: (0x443, 0),# CYRILLIC SMALL LETTER U
- 0x56: (0x436, 0),# CYRILLIC SMALL LETTER ZHE
- 0x57: (0x432, 0),# CYRILLIC SMALL LETTER VE
- 0x58: (0x44c, 0),# CYRILLIC SMALL LETTER SOFT SIGN
- 0x59: (0x44b, 0),# LOWERCASE YERI / CYRILLIC SMALL LETTER YERI
- 0x5a: (0x437, 0),# CYRILLIC SMALL LETTER ZE
- 0x5b: (0x448, 0),# CYRILLIC SMALL LETTER SHA
- 0x5c: (0x44d, 0),# LOWERCASE REVERSED E / CYRILLIC SMALL LETTER E
- 0x5d: (0x449, 0),# CYRILLIC SMALL LETTER SHCHA
- 0x5e: (0x447, 0),# CYRILLIC SMALL LETTER CHE
- 0x5f: (0x44a, 0),# CYRILLIC SMALL LETTER HARD SIGN
- 0x60: (0x42e, 0),# UPPERCASE IU / CYRILLIC CAPITAL LETTER YU
- 0x61: (0x410, 0),# CYRILLIC CAPITAL LETTER A
- 0x62: (0x411, 0),# CYRILLIC CAPITAL LETTER BE
- 0x63: (0x426, 0),# CYRILLIC CAPITAL LETTER TSE
- 0x64: (0x414, 0),# CYRILLIC CAPITAL LETTER DE
- 0x65: (0x415, 0),# CYRILLIC CAPITAL LETTER IE
- 0x66: (0x424, 0),# CYRILLIC CAPITAL LETTER EF
- 0x67: (0x413, 0),# UPPERCASE GE / CYRILLIC CAPITAL LETTER GHE
- 0x68: (0x425, 0),# UPPERCASE KHA / CYRILLIC CAPITAL LETTER HA
- 0x69: (0x418, 0),# UPPERCASE II / CYRILLIC CAPITAL LETTER I
- 0x6a: (0x419, 0),# UPPERCASE SHORT II / CYRILLIC CAPITAL LETTER SHORT I
- 0x6b: (0x41a, 0),# CYRILLIC CAPITAL LETTER KA
- 0x6c: (0x41b, 0),# CYRILLIC CAPITAL LETTER EL
- 0x6d: (0x41c, 0),# CYRILLIC CAPITAL LETTER EM
- 0x6e: (0x41d, 0),# CYRILLIC CAPITAL LETTER EN
- 0x6f: (0x41e, 0),# CYRILLIC CAPITAL LETTER O
- 0x70: (0x41f, 0),# CYRILLIC CAPITAL LETTER PE
- 0x71: (0x42f, 0),# UPPERCASE IA / CYRILLIC CAPITAL LETTER YA
- 0x72: (0x420, 0),# CYRILLIC CAPITAL LETTER ER
- 0x73: (0x421, 0),# CYRILLIC CAPITAL LETTER ES
- 0x74: (0x422, 0),# CYRILLIC CAPITAL LETTER TE
- 0x75: (0x423, 0),# CYRILLIC CAPITAL LETTER U
- 0x76: (0x416, 0),# CYRILLIC CAPITAL LETTER ZHE
- 0x77: (0x412, 0),# CYRILLIC CAPITAL LETTER VE
- 0x78: (0x42c, 0),# CYRILLIC CAPITAL LETTER SOFT SIGN
- 0x79: (0x42b, 0),# UPPERCASE YERI / CYRILLIC CAPITAL LETTER YERI
- 0x7a: (0x417, 0),# CYRILLIC CAPITAL LETTER ZE
- 0x7b: (0x428, 0),# CYRILLIC CAPITAL LETTER SHA
- 0x7c: (0x42d, 0),# CYRILLIC CAPITAL LETTER E
- 0x7d: (0x429, 0),# CYRILLIC CAPITAL LETTER SHCHA
- 0x7e: (0x427, 0)# CYRILLIC CAPITAL LETTER CHE
-}
-codesets = {
- 0x34: charset_34,
- 0x45: charset_45,
- 0x33: charset_33,
- 0x32: charset_32,
- 0x31: charset_31,
- 0x70: charset_70,
- 0x51: charset_51,
- 0x53: charset_53,
- 0x42: charset_42,
- 0x62: charset_62,
- 0x67: charset_67,
- 0x4E: charset_4E}
diff --git a/python/PyZ3950/oids.py b/python/PyZ3950/oids.py
deleted file mode 100644
index e1b525e..0000000
--- a/python/PyZ3950/oids.py
+++ /dev/null
@@ -1,479 +0,0 @@
-from PyZ3950 import asn1
-oids = {}
-oids['Z3950'] = {'oid': asn1.OidVal([1, 2, 840, 10003]), 'val': [1, 2, 840, 10003]}
-oids['Z3950']['ATTRS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3]), 'val': [1, 2, 840, 10003, 3]}
-oids['Z3950']['DIAG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 4]), 'val': [1, 2, 840, 10003, 4]}
-oids['Z3950']['RECSYN'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5]), 'val': [1, 2, 840, 10003, 5]}
-oids['Z3950']['TRANSFER'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 6]), 'val': [1, 2, 840, 10003, 6]}
-oids['Z3950']['RRF'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 7]), 'val': [1, 2, 840, 10003, 7]}
-oids['Z3950']['ACCESS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 8]), 'val': [1, 2, 840, 10003, 8]}
-oids['Z3950']['ES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9]), 'val': [1, 2, 840, 10003, 9]}
-oids['Z3950']['USR'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10]), 'val': [1, 2, 840, 10003, 10]}
-oids['Z3950']['SPEC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 11]), 'val': [1, 2, 840, 10003, 11]}
-oids['Z3950']['VAR'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 12]), 'val': [1, 2, 840, 10003, 12]}
-oids['Z3950']['SCHEMA'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13]), 'val': [1, 2, 840, 10003, 13]}
-oids['Z3950']['TAGSET'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14]), 'val': [1, 2, 840, 10003, 14]}
-oids['Z3950']['NEG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15]), 'val': [1, 2, 840, 10003, 15]}
-oids['Z3950']['QUERY'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 16]), 'val': [1, 2, 840, 10003, 16]}
-oids['Z3950']['ATTRS']['BIB1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 1]), 'val': [1, 2, 840, 10003, 3, 1]}
-oids['Z3950']['ATTRS']['EXP1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 2]), 'val': [1, 2, 840, 10003, 3, 2]}
-oids['Z3950']['ATTRS']['EXT1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 3]), 'val': [1, 2, 840, 10003, 3, 3]}
-oids['Z3950']['ATTRS']['CCL1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 4]), 'val': [1, 2, 840, 10003, 3, 4]}
-oids['Z3950']['ATTRS']['GILS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 5]), 'val': [1, 2, 840, 10003, 3, 5]}
-oids['Z3950']['ATTRS']['STAS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 6]), 'val': [1, 2, 840, 10003, 3, 6]}
-oids['Z3950']['ATTRS']['COLLECTIONS1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 7]), 'val': [1, 2, 840, 10003, 3, 7]}
-oids['Z3950']['ATTRS']['CIMI1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 8]), 'val': [1, 2, 840, 10003, 3, 8]}
-oids['Z3950']['ATTRS']['GEO'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 9]), 'val': [1, 2, 840, 10003, 3, 9]}
-oids['Z3950']['ATTRS']['ZBIG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 10]), 'val': [1, 2, 840, 10003, 3, 10]}
-oids['Z3950']['ATTRS']['UTIL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 11]), 'val': [1, 2, 840, 10003, 3, 11]}
-oids['Z3950']['ATTRS']['XD1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 12]), 'val': [1, 2, 840, 10003, 3, 12]}
-oids['Z3950']['ATTRS']['ZTHES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 13]), 'val': [1, 2, 840, 10003, 3, 13]}
-oids['Z3950']['ATTRS']['FIN1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 14]), 'val': [1, 2, 840, 10003, 3, 14]}
-oids['Z3950']['ATTRS']['DAN1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 15]), 'val': [1, 2, 840, 10003, 3, 15]}
-oids['Z3950']['ATTRS']['HOLD'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 16]), 'val': [1, 2, 840, 10003, 3, 16]}
-oids['Z3950']['ATTRS']['MARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 17]), 'val': [1, 2, 840, 10003, 3, 17]}
-oids['Z3950']['ATTRS']['BIB2'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 18]), 'val': [1, 2, 840, 10003, 3, 18]}
-oids['Z3950']['ATTRS']['ZEEREX'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 19]), 'val': [1, 2, 840, 10003, 3, 19]}
-oids['Z3950']['DIAG']['BIB1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 4, 1]), 'val': [1, 2, 840, 10003, 4, 1]}
-oids['Z3950']['DIAG']['DIAG1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 4, 2]), 'val': [1, 2, 840, 10003, 4, 2]}
-oids['Z3950']['DIAG']['ES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 4, 3]), 'val': [1, 2, 840, 10003, 4, 3]}
-oids['Z3950']['DIAG']['GENERAL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 4, 4]), 'val': [1, 2, 840, 10003, 4, 4]}
-oids['Z3950']['RECSYN']['UNIMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 1]), 'val': [1, 2, 840, 10003, 5, 1]}
-oids['Z3950']['RECSYN']['INTERMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 2]), 'val': [1, 2, 840, 10003, 5, 2]}
-oids['Z3950']['RECSYN']['CCF'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 3]), 'val': [1, 2, 840, 10003, 5, 3]}
-oids['Z3950']['RECSYN']['USMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 10]), 'val': [1, 2, 840, 10003, 5, 10]}
-oids['Z3950']['RECSYN']['USMARC']['BIBLIO'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 10, 1]), 'val': [1, 2, 840, 10003, 5, 10, 1]}
-oids['Z3950']['RECSYN']['USMARC']['AUTH'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 10, 2]), 'val': [1, 2, 840, 10003, 5, 10, 2]}
-oids['Z3950']['RECSYN']['USMARC']['HOLD'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 10, 3]), 'val': [1, 2, 840, 10003, 5, 10, 3]}
-oids['Z3950']['RECSYN']['USMARC']['COMMUNITY'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 10, 4]), 'val': [1, 2, 840, 10003, 5, 10, 4]}
-oids['Z3950']['RECSYN']['USMARC']['CLASS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 10, 5]), 'val': [1, 2, 840, 10003, 5, 10, 5]}
-oids['Z3950']['RECSYN']['UKMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 11]), 'val': [1, 2, 840, 10003, 5, 11]}
-oids['Z3950']['RECSYN']['NORMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 12]), 'val': [1, 2, 840, 10003, 5, 12]}
-oids['Z3950']['RECSYN']['LIBRISMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 13]), 'val': [1, 2, 840, 10003, 5, 13]}
-oids['Z3950']['RECSYN']['DANMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 14]), 'val': [1, 2, 840, 10003, 5, 14]}
-oids['Z3950']['RECSYN']['FINMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 15]), 'val': [1, 2, 840, 10003, 5, 15]}
-oids['Z3950']['RECSYN']['MAB'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 16]), 'val': [1, 2, 840, 10003, 5, 16]}
-oids['Z3950']['RECSYN']['CANMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 17]), 'val': [1, 2, 840, 10003, 5, 17]}
-oids['Z3950']['RECSYN']['SBNMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 18]), 'val': [1, 2, 840, 10003, 5, 18]}
-oids['Z3950']['RECSYN']['PICAMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 19]), 'val': [1, 2, 840, 10003, 5, 19]}
-oids['Z3950']['RECSYN']['AUSMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 20]), 'val': [1, 2, 840, 10003, 5, 20]}
-oids['Z3950']['RECSYN']['IBERMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 21]), 'val': [1, 2, 840, 10003, 5, 21]}
-oids['Z3950']['RECSYN']['CATMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 22]), 'val': [1, 2, 840, 10003, 5, 22]}
-oids['Z3950']['RECSYN']['MALMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 23]), 'val': [1, 2, 840, 10003, 5, 23]}
-oids['Z3950']['RECSYN']['JPMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 24]), 'val': [1, 2, 840, 10003, 5, 24]}
-oids['Z3950']['RECSYN']['SWEMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 25]), 'val': [1, 2, 840, 10003, 5, 25]}
-oids['Z3950']['RECSYN']['SIGLEMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 26]), 'val': [1, 2, 840, 10003, 5, 26]}
-oids['Z3950']['RECSYN']['ISDSMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 27]), 'val': [1, 2, 840, 10003, 5, 27]}
-oids['Z3950']['RECSYN']['RUSMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 28]), 'val': [1, 2, 840, 10003, 5, 28]}
-oids['Z3950']['RECSYN']['HUNMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 29]), 'val': [1, 2, 840, 10003, 5, 29]}
-oids['Z3950']['RECSYN']['NACSISCATP'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 30]), 'val': [1, 2, 840, 10003, 5, 30]}
-oids['Z3950']['RECSYN']['FINMARC2000'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 31]), 'val': [1, 2, 840, 10003, 5, 31]}
-oids['Z3950']['RECSYN']['MARC21FIN'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 32]), 'val': [1, 2, 840, 10003, 5, 32]}
-oids['Z3950']['RECSYN']['COMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 33]), 'val': [1, 2, 840, 10003, 5, 33]}
-oids['Z3950']['RECSYN']['EXPLAIN'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 100]), 'val': [1, 2, 840, 10003, 5, 100]}
-oids['Z3950']['RECSYN']['SUTRS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 101]), 'val': [1, 2, 840, 10003, 5, 101]}
-oids['Z3950']['RECSYN']['OPAC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 102]), 'val': [1, 2, 840, 10003, 5, 102]}
-oids['Z3950']['RECSYN']['SUMMARY'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 103]), 'val': [1, 2, 840, 10003, 5, 103]}
-oids['Z3950']['RECSYN']['GRS0'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 104]), 'val': [1, 2, 840, 10003, 5, 104]}
-oids['Z3950']['RECSYN']['GRS1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 105]), 'val': [1, 2, 840, 10003, 5, 105]}
-oids['Z3950']['RECSYN']['ES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 106]), 'val': [1, 2, 840, 10003, 5, 106]}
-oids['Z3950']['RECSYN']['FRAGMENT'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 107]), 'val': [1, 2, 840, 10003, 5, 107]}
-oids['Z3950']['RECSYN']['MIME'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109]), 'val': [1, 2, 840, 10003, 5, 109]}
-oids['Z3950']['RECSYN']['MIME']['PDF'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 1]), 'val': [1, 2, 840, 10003, 5, 109, 1]}
-oids['Z3950']['RECSYN']['MIME']['POSTSCRIPT'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 2]), 'val': [1, 2, 840, 10003, 5, 109, 2]}
-oids['Z3950']['RECSYN']['MIME']['HTML'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 3]), 'val': [1, 2, 840, 10003, 5, 109, 3]}
-oids['Z3950']['RECSYN']['MIME']['TIFF'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 4]), 'val': [1, 2, 840, 10003, 5, 109, 4]}
-oids['Z3950']['RECSYN']['MIME']['GIF'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 5]), 'val': [1, 2, 840, 10003, 5, 109, 5]}
-oids['Z3950']['RECSYN']['MIME']['JPEG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 6]), 'val': [1, 2, 840, 10003, 5, 109, 6]}
-oids['Z3950']['RECSYN']['MIME']['PNG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 7]), 'val': [1, 2, 840, 10003, 5, 109, 7]}
-oids['Z3950']['RECSYN']['MIME']['MPEG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 8]), 'val': [1, 2, 840, 10003, 5, 109, 8]}
-oids['Z3950']['RECSYN']['MIME']['SGML'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 9]), 'val': [1, 2, 840, 10003, 5, 109, 9]}
-oids['Z3950']['RECSYN']['MIME']['XML'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 10]), 'val': [1, 2, 840, 10003, 5, 109, 10]}
-oids['Z3950']['RECSYN']['ZMIME'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 110]), 'val': [1, 2, 840, 10003, 5, 110]}
-oids['Z3950']['RECSYN']['ZMIME']['TIFFB'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 110, 1]), 'val': [1, 2, 840, 10003, 5, 110, 1]}
-oids['Z3950']['RECSYN']['ZMIME']['WAV'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 110, 2]), 'val': [1, 2, 840, 10003, 5, 110, 2]}
-oids['Z3950']['RECSYN']['SQL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 111]), 'val': [1, 2, 840, 10003, 5, 111]}
-oids['Z3950']['RRF']['RESOURCE1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 7, 1]), 'val': [1, 2, 840, 10003, 7, 1]}
-oids['Z3950']['RRF']['RESOURCE2'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 7, 2]), 'val': [1, 2, 840, 10003, 7, 2]}
-oids['Z3950']['ACCESS']['PROMPT1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 8, 1]), 'val': [1, 2, 840, 10003, 8, 1]}
-oids['Z3950']['ACCESS']['DES1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 8, 2]), 'val': [1, 2, 840, 10003, 8, 2]}
-oids['Z3950']['ACCESS']['KRB1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 8, 3]), 'val': [1, 2, 840, 10003, 8, 3]}
-oids['Z3950']['ES']['PERSISTRS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 1]), 'val': [1, 2, 840, 10003, 9, 1]}
-oids['Z3950']['ES']['PERSISTQRY'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 2]), 'val': [1, 2, 840, 10003, 9, 2]}
-oids['Z3950']['ES']['PERIODQRY'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 3]), 'val': [1, 2, 840, 10003, 9, 3]}
-oids['Z3950']['ES']['ITEMORDER'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 4]), 'val': [1, 2, 840, 10003, 9, 4]}
-oids['Z3950']['ES']['DBUPDATE'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 5]), 'val': [1, 2, 840, 10003, 9, 5]}
-oids['Z3950']['ES']['DBUPDATE']['REV'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 5, 1]), 'val': [1, 2, 840, 10003, 9, 5, 1]}
-oids['Z3950']['ES']['DBUPDATE']['REV']['1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 5, 1, 1]), 'val': [1, 2, 840, 10003, 9, 5, 1, 1]}
-oids['Z3950']['ES']['EXPORTSPEC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 6]), 'val': [1, 2, 840, 10003, 9, 6]}
-oids['Z3950']['ES']['EXPORTINV'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 7]), 'val': [1, 2, 840, 10003, 9, 7]}
-oids['Z3950']['USR']['SEARCHRES1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 1]), 'val': [1, 2, 840, 10003, 10, 1]}
-oids['Z3950']['USR']['CHARSETNEG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 2]), 'val': [1, 2, 840, 10003, 10, 2]}
-oids['Z3950']['USR']['INFO1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 3]), 'val': [1, 2, 840, 10003, 10, 3]}
-oids['Z3950']['USR']['SEARCHTERMS1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 4]), 'val': [1, 2, 840, 10003, 10, 4]}
-oids['Z3950']['USR']['SEARCHTERMS2'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 5]), 'val': [1, 2, 840, 10003, 10, 5]}
-oids['Z3950']['USR']['DATETIME'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 6]), 'val': [1, 2, 840, 10003, 10, 6]}
-oids['Z3950']['USR']['INSERTACTIONQUAL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 9]), 'val': [1, 2, 840, 10003, 10, 9]}
-oids['Z3950']['USR']['EDITACTIONQUAL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 10]), 'val': [1, 2, 840, 10003, 10, 10]}
-oids['Z3950']['USR']['AUTHFILE'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 11]), 'val': [1, 2, 840, 10003, 10, 11]}
-oids['Z3950']['USR']['PRIVATE'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 1000]), 'val': [1, 2, 840, 10003, 10, 1000]}
-oids['Z3950']['USR']['PRIVATE']['OCLC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 1000, 17]), 'val': [1, 2, 840, 10003, 10, 1000, 17]}
-oids['Z3950']['USR']['PRIVATE']['OCLC']['INFO'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 1000, 17, 1]), 'val': [1, 2, 840, 10003, 10, 1000, 17, 1]}
-oids['Z3950']['SPEC']['ESPEC1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 11, 1]), 'val': [1, 2, 840, 10003, 11, 1]}
-oids['Z3950']['SPEC']['ESPEC2'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 11, 2]), 'val': [1, 2, 840, 10003, 11, 2]}
-oids['Z3950']['SPEC']['ESPECQ'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 11, 3]), 'val': [1, 2, 840, 10003, 11, 3]}
-oids['Z3950']['VAR']['VARIANT1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 12, 1]), 'val': [1, 2, 840, 10003, 12, 1]}
-oids['Z3950']['SCHEMA']['WAIS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 1]), 'val': [1, 2, 840, 10003, 13, 1]}
-oids['Z3950']['SCHEMA']['GILS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 2]), 'val': [1, 2, 840, 10003, 13, 2]}
-oids['Z3950']['SCHEMA']['COLLECTIONS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 3]), 'val': [1, 2, 840, 10003, 13, 3]}
-oids['Z3950']['SCHEMA']['GEO'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 4]), 'val': [1, 2, 840, 10003, 13, 4]}
-oids['Z3950']['SCHEMA']['CIMI'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 5]), 'val': [1, 2, 840, 10003, 13, 5]}
-oids['Z3950']['SCHEMA']['UPDATE'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 6]), 'val': [1, 2, 840, 10003, 13, 6]}
-oids['Z3950']['SCHEMA']['HOLDINGS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 7]), 'val': [1, 2, 840, 10003, 13, 7]}
-oids['Z3950']['SCHEMA']['HOLDINGS']['11'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 7, 1]), 'val': [1, 2, 840, 10003, 13, 7, 1]}
-oids['Z3950']['SCHEMA']['HOLDINGS']['12'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 7, 2]), 'val': [1, 2, 840, 10003, 13, 7, 2]}
-oids['Z3950']['SCHEMA']['HOLDINGS']['14'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 7, 4]), 'val': [1, 2, 840, 10003, 13, 7, 4]}
-oids['Z3950']['SCHEMA']['ZTHES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 1]), 'val': [1, 2, 840, 10003, 13, 1]}
-oids['Z3950']['SCHEMA']['INSERT'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 1]), 'val': [1, 2, 840, 10003, 13, 1]}
-oids['Z3950']['SCHEMA']['EDIT'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 1]), 'val': [1, 2, 840, 10003, 13, 1]}
-oids['Z3950']['TAGSET']['M'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 1]), 'val': [1, 2, 840, 10003, 14, 1]}
-oids['Z3950']['TAGSET']['G'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 2]), 'val': [1, 2, 840, 10003, 14, 2]}
-oids['Z3950']['TAGSET']['STAS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 3]), 'val': [1, 2, 840, 10003, 14, 3]}
-oids['Z3950']['TAGSET']['GILS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 4]), 'val': [1, 2, 840, 10003, 14, 4]}
-oids['Z3950']['TAGSET']['COLLECTIONS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 5]), 'val': [1, 2, 840, 10003, 14, 5]}
-oids['Z3950']['TAGSET']['CIMI'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 6]), 'val': [1, 2, 840, 10003, 14, 6]}
-oids['Z3950']['TAGSET']['UPDATE'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 7]), 'val': [1, 2, 840, 10003, 14, 7]}
-oids['Z3950']['TAGSET']['ZTHES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 8]), 'val': [1, 2, 840, 10003, 14, 8]}
-oids['Z3950']['NEG']['CHARSET2'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15, 1]), 'val': [1, 2, 840, 10003, 15, 1]}
-oids['Z3950']['NEG']['ES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15, 2]), 'val': [1, 2, 840, 10003, 15, 2]}
-oids['Z3950']['NEG']['CHARSET3'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15, 3]), 'val': [1, 2, 840, 10003, 15, 3]}
-oids['Z3950']['NEG']['PRIVATE'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15, 1000]), 'val': [1, 2, 840, 10003, 15, 1000]}
-oids['Z3950']['NEG']['PRIVATE']['INDEXDATA'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15, 1000, 81]), 'val': [1, 2, 840, 10003, 15, 1000, 81]}
-oids['Z3950']['NEG']['PRIVATE']['INDEXDATA']['CHARSETNAME'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15, 1000, 81, 1]), 'val': [1, 2, 840, 10003, 15, 1000, 81, 1]}
-oids['Z3950']['QUERY']['SQL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 16, 1]), 'val': [1, 2, 840, 10003, 16, 1]}
-oids['Z3950']['QUERY']['CQL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 16, 2]), 'val': [1, 2, 840, 10003, 16, 2]}
-oids['UNICODE'] = {'oid': asn1.OidVal([1, 0, 10646]), 'val': [1, 0, 10646]}
-oids['UNICODE']['PART1'] = {'oid': asn1.OidVal([1, 0, 10646, 1]), 'val': [1, 0, 10646, 1]}
-oids['UNICODE']['PART1']['XFERSYN'] = {'oid': asn1.OidVal([1, 0, 10646, 1, 0]), 'val': [1, 0, 10646, 1, 0]}
-oids['UNICODE']['PART1']['XFERSYN']['UCS2'] = {'oid': asn1.OidVal([1, 0, 10646, 1, 0, 2]), 'val': [1, 0, 10646, 1, 0, 2]}
-oids['UNICODE']['PART1']['XFERSYN']['UCS4'] = {'oid': asn1.OidVal([1, 0, 10646, 1, 0, 4]), 'val': [1, 0, 10646, 1, 0, 4]}
-oids['UNICODE']['PART1']['XFERSYN']['UTF16'] = {'oid': asn1.OidVal([1, 0, 10646, 1, 0, 5]), 'val': [1, 0, 10646, 1, 0, 5]}
-oids['UNICODE']['PART1']['XFERSYN']['UTF8'] = {'oid': asn1.OidVal([1, 0, 10646, 1, 0, 8]), 'val': [1, 0, 10646, 1, 0, 8]}
-UNICODE = [1, 0, 10646]
-UNICODE_ov = asn1.OidVal([1, 0, 10646])
-UNICODE_PART1 = [1, 0, 10646, 1]
-UNICODE_PART1_ov = asn1.OidVal([1, 0, 10646, 1])
-UNICODE_PART1_XFERSYN = [1, 0, 10646, 1, 0]
-UNICODE_PART1_XFERSYN_ov = asn1.OidVal([1, 0, 10646, 1, 0])
-UNICODE_PART1_XFERSYN_UCS2 = [1, 0, 10646, 1, 0, 2]
-UNICODE_PART1_XFERSYN_UCS2_ov = asn1.OidVal([1, 0, 10646, 1, 0, 2])
-UNICODE_PART1_XFERSYN_UCS4 = [1, 0, 10646, 1, 0, 4]
-UNICODE_PART1_XFERSYN_UCS4_ov = asn1.OidVal([1, 0, 10646, 1, 0, 4])
-UNICODE_PART1_XFERSYN_UTF16 = [1, 0, 10646, 1, 0, 5]
-UNICODE_PART1_XFERSYN_UTF16_ov = asn1.OidVal([1, 0, 10646, 1, 0, 5])
-UNICODE_PART1_XFERSYN_UTF8 = [1, 0, 10646, 1, 0, 8]
-UNICODE_PART1_XFERSYN_UTF8_ov = asn1.OidVal([1, 0, 10646, 1, 0, 8])
-Z3950 = [1, 2, 840, 10003]
-Z3950_ov = asn1.OidVal([1, 2, 840, 10003])
-Z3950_ACCESS = [1, 2, 840, 10003, 8]
-Z3950_ACCESS_ov = asn1.OidVal([1, 2, 840, 10003, 8])
-Z3950_ACCESS_DES1 = [1, 2, 840, 10003, 8, 2]
-Z3950_ACCESS_DES1_ov = asn1.OidVal([1, 2, 840, 10003, 8, 2])
-Z3950_ACCESS_KRB1 = [1, 2, 840, 10003, 8, 3]
-Z3950_ACCESS_KRB1_ov = asn1.OidVal([1, 2, 840, 10003, 8, 3])
-Z3950_ACCESS_PROMPT1 = [1, 2, 840, 10003, 8, 1]
-Z3950_ACCESS_PROMPT1_ov = asn1.OidVal([1, 2, 840, 10003, 8, 1])
-Z3950_ATTRS = [1, 2, 840, 10003, 3]
-Z3950_ATTRS_ov = asn1.OidVal([1, 2, 840, 10003, 3])
-Z3950_ATTRS_BIB1 = [1, 2, 840, 10003, 3, 1]
-Z3950_ATTRS_BIB1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 1])
-Z3950_ATTRS_BIB2 = [1, 2, 840, 10003, 3, 18]
-Z3950_ATTRS_BIB2_ov = asn1.OidVal([1, 2, 840, 10003, 3, 18])
-Z3950_ATTRS_CCL1 = [1, 2, 840, 10003, 3, 4]
-Z3950_ATTRS_CCL1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 4])
-Z3950_ATTRS_CIMI1 = [1, 2, 840, 10003, 3, 8]
-Z3950_ATTRS_CIMI1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 8])
-Z3950_ATTRS_COLLECTIONS1 = [1, 2, 840, 10003, 3, 7]
-Z3950_ATTRS_COLLECTIONS1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 7])
-Z3950_ATTRS_DAN1 = [1, 2, 840, 10003, 3, 15]
-Z3950_ATTRS_DAN1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 15])
-Z3950_ATTRS_EXP1 = [1, 2, 840, 10003, 3, 2]
-Z3950_ATTRS_EXP1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 2])
-Z3950_ATTRS_EXT1 = [1, 2, 840, 10003, 3, 3]
-Z3950_ATTRS_EXT1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 3])
-Z3950_ATTRS_FIN1 = [1, 2, 840, 10003, 3, 14]
-Z3950_ATTRS_FIN1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 14])
-Z3950_ATTRS_GEO = [1, 2, 840, 10003, 3, 9]
-Z3950_ATTRS_GEO_ov = asn1.OidVal([1, 2, 840, 10003, 3, 9])
-Z3950_ATTRS_GILS = [1, 2, 840, 10003, 3, 5]
-Z3950_ATTRS_GILS_ov = asn1.OidVal([1, 2, 840, 10003, 3, 5])
-Z3950_ATTRS_HOLD = [1, 2, 840, 10003, 3, 16]
-Z3950_ATTRS_HOLD_ov = asn1.OidVal([1, 2, 840, 10003, 3, 16])
-Z3950_ATTRS_MARC = [1, 2, 840, 10003, 3, 17]
-Z3950_ATTRS_MARC_ov = asn1.OidVal([1, 2, 840, 10003, 3, 17])
-Z3950_ATTRS_STAS = [1, 2, 840, 10003, 3, 6]
-Z3950_ATTRS_STAS_ov = asn1.OidVal([1, 2, 840, 10003, 3, 6])
-Z3950_ATTRS_UTIL = [1, 2, 840, 10003, 3, 11]
-Z3950_ATTRS_UTIL_ov = asn1.OidVal([1, 2, 840, 10003, 3, 11])
-Z3950_ATTRS_XD1 = [1, 2, 840, 10003, 3, 12]
-Z3950_ATTRS_XD1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 12])
-Z3950_ATTRS_ZBIG = [1, 2, 840, 10003, 3, 10]
-Z3950_ATTRS_ZBIG_ov = asn1.OidVal([1, 2, 840, 10003, 3, 10])
-Z3950_ATTRS_ZEEREX = [1, 2, 840, 10003, 3, 19]
-Z3950_ATTRS_ZEEREX_ov = asn1.OidVal([1, 2, 840, 10003, 3, 19])
-Z3950_ATTRS_ZTHES = [1, 2, 840, 10003, 3, 13]
-Z3950_ATTRS_ZTHES_ov = asn1.OidVal([1, 2, 840, 10003, 3, 13])
-Z3950_DIAG = [1, 2, 840, 10003, 4]
-Z3950_DIAG_ov = asn1.OidVal([1, 2, 840, 10003, 4])
-Z3950_DIAG_BIB1 = [1, 2, 840, 10003, 4, 1]
-Z3950_DIAG_BIB1_ov = asn1.OidVal([1, 2, 840, 10003, 4, 1])
-Z3950_DIAG_DIAG1 = [1, 2, 840, 10003, 4, 2]
-Z3950_DIAG_DIAG1_ov = asn1.OidVal([1, 2, 840, 10003, 4, 2])
-Z3950_DIAG_ES = [1, 2, 840, 10003, 4, 3]
-Z3950_DIAG_ES_ov = asn1.OidVal([1, 2, 840, 10003, 4, 3])
-Z3950_DIAG_GENERAL = [1, 2, 840, 10003, 4, 4]
-Z3950_DIAG_GENERAL_ov = asn1.OidVal([1, 2, 840, 10003, 4, 4])
-Z3950_ES = [1, 2, 840, 10003, 9]
-Z3950_ES_ov = asn1.OidVal([1, 2, 840, 10003, 9])
-Z3950_ES_DBUPDATE = [1, 2, 840, 10003, 9, 5]
-Z3950_ES_DBUPDATE_ov = asn1.OidVal([1, 2, 840, 10003, 9, 5])
-Z3950_ES_DBUPDATE_REV = [1, 2, 840, 10003, 9, 5, 1]
-Z3950_ES_DBUPDATE_REV_ov = asn1.OidVal([1, 2, 840, 10003, 9, 5, 1])
-Z3950_ES_DBUPDATE_REV_1 = [1, 2, 840, 10003, 9, 5, 1, 1]
-Z3950_ES_DBUPDATE_REV_1_ov = asn1.OidVal([1, 2, 840, 10003, 9, 5, 1, 1])
-Z3950_ES_EXPORTINV = [1, 2, 840, 10003, 9, 7]
-Z3950_ES_EXPORTINV_ov = asn1.OidVal([1, 2, 840, 10003, 9, 7])
-Z3950_ES_EXPORTSPEC = [1, 2, 840, 10003, 9, 6]
-Z3950_ES_EXPORTSPEC_ov = asn1.OidVal([1, 2, 840, 10003, 9, 6])
-Z3950_ES_ITEMORDER = [1, 2, 840, 10003, 9, 4]
-Z3950_ES_ITEMORDER_ov = asn1.OidVal([1, 2, 840, 10003, 9, 4])
-Z3950_ES_PERIODQRY = [1, 2, 840, 10003, 9, 3]
-Z3950_ES_PERIODQRY_ov = asn1.OidVal([1, 2, 840, 10003, 9, 3])
-Z3950_ES_PERSISTQRY = [1, 2, 840, 10003, 9, 2]
-Z3950_ES_PERSISTQRY_ov = asn1.OidVal([1, 2, 840, 10003, 9, 2])
-Z3950_ES_PERSISTRS = [1, 2, 840, 10003, 9, 1]
-Z3950_ES_PERSISTRS_ov = asn1.OidVal([1, 2, 840, 10003, 9, 1])
-Z3950_NEG = [1, 2, 840, 10003, 15]
-Z3950_NEG_ov = asn1.OidVal([1, 2, 840, 10003, 15])
-Z3950_NEG_CHARSET2 = [1, 2, 840, 10003, 15, 1]
-Z3950_NEG_CHARSET2_ov = asn1.OidVal([1, 2, 840, 10003, 15, 1])
-Z3950_NEG_CHARSET3 = [1, 2, 840, 10003, 15, 3]
-Z3950_NEG_CHARSET3_ov = asn1.OidVal([1, 2, 840, 10003, 15, 3])
-Z3950_NEG_ES = [1, 2, 840, 10003, 15, 2]
-Z3950_NEG_ES_ov = asn1.OidVal([1, 2, 840, 10003, 15, 2])
-Z3950_NEG_PRIVATE = [1, 2, 840, 10003, 15, 1000]
-Z3950_NEG_PRIVATE_ov = asn1.OidVal([1, 2, 840, 10003, 15, 1000])
-Z3950_NEG_PRIVATE_INDEXDATA = [1, 2, 840, 10003, 15, 1000, 81]
-Z3950_NEG_PRIVATE_INDEXDATA_ov = asn1.OidVal([1, 2, 840, 10003, 15, 1000, 81])
-Z3950_NEG_PRIVATE_INDEXDATA_CHARSETNAME = [1, 2, 840, 10003, 15, 1000, 81, 1]
-Z3950_NEG_PRIVATE_INDEXDATA_CHARSETNAME_ov = asn1.OidVal([1, 2, 840, 10003, 15, 1000, 81, 1])
-Z3950_QUERY = [1, 2, 840, 10003, 16]
-Z3950_QUERY_ov = asn1.OidVal([1, 2, 840, 10003, 16])
-Z3950_QUERY_CQL = [1, 2, 840, 10003, 16, 2]
-Z3950_QUERY_CQL_ov = asn1.OidVal([1, 2, 840, 10003, 16, 2])
-Z3950_QUERY_SQL = [1, 2, 840, 10003, 16, 1]
-Z3950_QUERY_SQL_ov = asn1.OidVal([1, 2, 840, 10003, 16, 1])
-Z3950_RECSYN = [1, 2, 840, 10003, 5]
-Z3950_RECSYN_ov = asn1.OidVal([1, 2, 840, 10003, 5])
-Z3950_RECSYN_AUSMARC = [1, 2, 840, 10003, 5, 20]
-Z3950_RECSYN_AUSMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 20])
-Z3950_RECSYN_CANMARC = [1, 2, 840, 10003, 5, 17]
-Z3950_RECSYN_CANMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 17])
-Z3950_RECSYN_CATMARC = [1, 2, 840, 10003, 5, 22]
-Z3950_RECSYN_CATMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 22])
-Z3950_RECSYN_CCF = [1, 2, 840, 10003, 5, 3]
-Z3950_RECSYN_CCF_ov = asn1.OidVal([1, 2, 840, 10003, 5, 3])
-Z3950_RECSYN_COMARC = [1, 2, 840, 10003, 5, 33]
-Z3950_RECSYN_COMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 33])
-Z3950_RECSYN_DANMARC = [1, 2, 840, 10003, 5, 14]
-Z3950_RECSYN_DANMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 14])
-Z3950_RECSYN_ES = [1, 2, 840, 10003, 5, 106]
-Z3950_RECSYN_ES_ov = asn1.OidVal([1, 2, 840, 10003, 5, 106])
-Z3950_RECSYN_EXPLAIN = [1, 2, 840, 10003, 5, 100]
-Z3950_RECSYN_EXPLAIN_ov = asn1.OidVal([1, 2, 840, 10003, 5, 100])
-Z3950_RECSYN_FINMARC = [1, 2, 840, 10003, 5, 15]
-Z3950_RECSYN_FINMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 15])
-Z3950_RECSYN_FINMARC2000 = [1, 2, 840, 10003, 5, 31]
-Z3950_RECSYN_FINMARC2000_ov = asn1.OidVal([1, 2, 840, 10003, 5, 31])
-Z3950_RECSYN_FRAGMENT = [1, 2, 840, 10003, 5, 107]
-Z3950_RECSYN_FRAGMENT_ov = asn1.OidVal([1, 2, 840, 10003, 5, 107])
-Z3950_RECSYN_GRS0 = [1, 2, 840, 10003, 5, 104]
-Z3950_RECSYN_GRS0_ov = asn1.OidVal([1, 2, 840, 10003, 5, 104])
-Z3950_RECSYN_GRS1 = [1, 2, 840, 10003, 5, 105]
-Z3950_RECSYN_GRS1_ov = asn1.OidVal([1, 2, 840, 10003, 5, 105])
-Z3950_RECSYN_HUNMARC = [1, 2, 840, 10003, 5, 29]
-Z3950_RECSYN_HUNMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 29])
-Z3950_RECSYN_IBERMARC = [1, 2, 840, 10003, 5, 21]
-Z3950_RECSYN_IBERMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 21])
-Z3950_RECSYN_INTERMARC = [1, 2, 840, 10003, 5, 2]
-Z3950_RECSYN_INTERMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 2])
-Z3950_RECSYN_ISDSMARC = [1, 2, 840, 10003, 5, 27]
-Z3950_RECSYN_ISDSMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 27])
-Z3950_RECSYN_JPMARC = [1, 2, 840, 10003, 5, 24]
-Z3950_RECSYN_JPMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 24])
-Z3950_RECSYN_LIBRISMARC = [1, 2, 840, 10003, 5, 13]
-Z3950_RECSYN_LIBRISMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 13])
-Z3950_RECSYN_MAB = [1, 2, 840, 10003, 5, 16]
-Z3950_RECSYN_MAB_ov = asn1.OidVal([1, 2, 840, 10003, 5, 16])
-Z3950_RECSYN_MALMARC = [1, 2, 840, 10003, 5, 23]
-Z3950_RECSYN_MALMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 23])
-Z3950_RECSYN_MARC21FIN = [1, 2, 840, 10003, 5, 32]
-Z3950_RECSYN_MARC21FIN_ov = asn1.OidVal([1, 2, 840, 10003, 5, 32])
-Z3950_RECSYN_MIME = [1, 2, 840, 10003, 5, 109]
-Z3950_RECSYN_MIME_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109])
-Z3950_RECSYN_MIME_GIF = [1, 2, 840, 10003, 5, 109, 5]
-Z3950_RECSYN_MIME_GIF_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 5])
-Z3950_RECSYN_MIME_HTML = [1, 2, 840, 10003, 5, 109, 3]
-Z3950_RECSYN_MIME_HTML_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 3])
-Z3950_RECSYN_MIME_JPEG = [1, 2, 840, 10003, 5, 109, 6]
-Z3950_RECSYN_MIME_JPEG_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 6])
-Z3950_RECSYN_MIME_MPEG = [1, 2, 840, 10003, 5, 109, 8]
-Z3950_RECSYN_MIME_MPEG_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 8])
-Z3950_RECSYN_MIME_PDF = [1, 2, 840, 10003, 5, 109, 1]
-Z3950_RECSYN_MIME_PDF_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 1])
-Z3950_RECSYN_MIME_PNG = [1, 2, 840, 10003, 5, 109, 7]
-Z3950_RECSYN_MIME_PNG_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 7])
-Z3950_RECSYN_MIME_POSTSCRIPT = [1, 2, 840, 10003, 5, 109, 2]
-Z3950_RECSYN_MIME_POSTSCRIPT_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 2])
-Z3950_RECSYN_MIME_SGML = [1, 2, 840, 10003, 5, 109, 9]
-Z3950_RECSYN_MIME_SGML_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 9])
-Z3950_RECSYN_MIME_TIFF = [1, 2, 840, 10003, 5, 109, 4]
-Z3950_RECSYN_MIME_TIFF_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 4])
-Z3950_RECSYN_MIME_XML = [1, 2, 840, 10003, 5, 109, 10]
-Z3950_RECSYN_MIME_XML_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 10])
-Z3950_RECSYN_NACSISCATP = [1, 2, 840, 10003, 5, 30]
-Z3950_RECSYN_NACSISCATP_ov = asn1.OidVal([1, 2, 840, 10003, 5, 30])
-Z3950_RECSYN_NORMARC = [1, 2, 840, 10003, 5, 12]
-Z3950_RECSYN_NORMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 12])
-Z3950_RECSYN_OPAC = [1, 2, 840, 10003, 5, 102]
-Z3950_RECSYN_OPAC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 102])
-Z3950_RECSYN_PICAMARC = [1, 2, 840, 10003, 5, 19]
-Z3950_RECSYN_PICAMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 19])
-Z3950_RECSYN_RUSMARC = [1, 2, 840, 10003, 5, 28]
-Z3950_RECSYN_RUSMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 28])
-Z3950_RECSYN_SBNMARC = [1, 2, 840, 10003, 5, 18]
-Z3950_RECSYN_SBNMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 18])
-Z3950_RECSYN_SIGLEMARC = [1, 2, 840, 10003, 5, 26]
-Z3950_RECSYN_SIGLEMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 26])
-Z3950_RECSYN_SQL = [1, 2, 840, 10003, 5, 111]
-Z3950_RECSYN_SQL_ov = asn1.OidVal([1, 2, 840, 10003, 5, 111])
-Z3950_RECSYN_SUMMARY = [1, 2, 840, 10003, 5, 103]
-Z3950_RECSYN_SUMMARY_ov = asn1.OidVal([1, 2, 840, 10003, 5, 103])
-Z3950_RECSYN_SUTRS = [1, 2, 840, 10003, 5, 101]
-Z3950_RECSYN_SUTRS_ov = asn1.OidVal([1, 2, 840, 10003, 5, 101])
-Z3950_RECSYN_SWEMARC = [1, 2, 840, 10003, 5, 25]
-Z3950_RECSYN_SWEMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 25])
-Z3950_RECSYN_UKMARC = [1, 2, 840, 10003, 5, 11]
-Z3950_RECSYN_UKMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 11])
-Z3950_RECSYN_UNIMARC = [1, 2, 840, 10003, 5, 1]
-Z3950_RECSYN_UNIMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 1])
-Z3950_RECSYN_USMARC = [1, 2, 840, 10003, 5, 10]
-Z3950_RECSYN_USMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 10])
-Z3950_RECSYN_USMARC_AUTH = [1, 2, 840, 10003, 5, 10, 2]
-Z3950_RECSYN_USMARC_AUTH_ov = asn1.OidVal([1, 2, 840, 10003, 5, 10, 2])
-Z3950_RECSYN_USMARC_BIBLIO = [1, 2, 840, 10003, 5, 10, 1]
-Z3950_RECSYN_USMARC_BIBLIO_ov = asn1.OidVal([1, 2, 840, 10003, 5, 10, 1])
-Z3950_RECSYN_USMARC_CLASS = [1, 2, 840, 10003, 5, 10, 5]
-Z3950_RECSYN_USMARC_CLASS_ov = asn1.OidVal([1, 2, 840, 10003, 5, 10, 5])
-Z3950_RECSYN_USMARC_COMMUNITY = [1, 2, 840, 10003, 5, 10, 4]
-Z3950_RECSYN_USMARC_COMMUNITY_ov = asn1.OidVal([1, 2, 840, 10003, 5, 10, 4])
-Z3950_RECSYN_USMARC_HOLD = [1, 2, 840, 10003, 5, 10, 3]
-Z3950_RECSYN_USMARC_HOLD_ov = asn1.OidVal([1, 2, 840, 10003, 5, 10, 3])
-Z3950_RECSYN_ZMIME = [1, 2, 840, 10003, 5, 110]
-Z3950_RECSYN_ZMIME_ov = asn1.OidVal([1, 2, 840, 10003, 5, 110])
-Z3950_RECSYN_ZMIME_TIFFB = [1, 2, 840, 10003, 5, 110, 1]
-Z3950_RECSYN_ZMIME_TIFFB_ov = asn1.OidVal([1, 2, 840, 10003, 5, 110, 1])
-Z3950_RECSYN_ZMIME_WAV = [1, 2, 840, 10003, 5, 110, 2]
-Z3950_RECSYN_ZMIME_WAV_ov = asn1.OidVal([1, 2, 840, 10003, 5, 110, 2])
-Z3950_RRF = [1, 2, 840, 10003, 7]
-Z3950_RRF_ov = asn1.OidVal([1, 2, 840, 10003, 7])
-Z3950_RRF_RESOURCE1 = [1, 2, 840, 10003, 7, 1]
-Z3950_RRF_RESOURCE1_ov = asn1.OidVal([1, 2, 840, 10003, 7, 1])
-Z3950_RRF_RESOURCE2 = [1, 2, 840, 10003, 7, 2]
-Z3950_RRF_RESOURCE2_ov = asn1.OidVal([1, 2, 840, 10003, 7, 2])
-Z3950_SCHEMA = [1, 2, 840, 10003, 13]
-Z3950_SCHEMA_ov = asn1.OidVal([1, 2, 840, 10003, 13])
-Z3950_SCHEMA_CIMI = [1, 2, 840, 10003, 13, 5]
-Z3950_SCHEMA_CIMI_ov = asn1.OidVal([1, 2, 840, 10003, 13, 5])
-Z3950_SCHEMA_COLLECTIONS = [1, 2, 840, 10003, 13, 3]
-Z3950_SCHEMA_COLLECTIONS_ov = asn1.OidVal([1, 2, 840, 10003, 13, 3])
-Z3950_SCHEMA_EDIT = [1, 2, 840, 10003, 13, 1]
-Z3950_SCHEMA_EDIT_ov = asn1.OidVal([1, 2, 840, 10003, 13, 1])
-Z3950_SCHEMA_GEO = [1, 2, 840, 10003, 13, 4]
-Z3950_SCHEMA_GEO_ov = asn1.OidVal([1, 2, 840, 10003, 13, 4])
-Z3950_SCHEMA_GILS = [1, 2, 840, 10003, 13, 2]
-Z3950_SCHEMA_GILS_ov = asn1.OidVal([1, 2, 840, 10003, 13, 2])
-Z3950_SCHEMA_HOLDINGS = [1, 2, 840, 10003, 13, 7]
-Z3950_SCHEMA_HOLDINGS_ov = asn1.OidVal([1, 2, 840, 10003, 13, 7])
-Z3950_SCHEMA_HOLDINGS_11 = [1, 2, 840, 10003, 13, 7, 1]
-Z3950_SCHEMA_HOLDINGS_11_ov = asn1.OidVal([1, 2, 840, 10003, 13, 7, 1])
-Z3950_SCHEMA_HOLDINGS_12 = [1, 2, 840, 10003, 13, 7, 2]
-Z3950_SCHEMA_HOLDINGS_12_ov = asn1.OidVal([1, 2, 840, 10003, 13, 7, 2])
-Z3950_SCHEMA_HOLDINGS_14 = [1, 2, 840, 10003, 13, 7, 4]
-Z3950_SCHEMA_HOLDINGS_14_ov = asn1.OidVal([1, 2, 840, 10003, 13, 7, 4])
-Z3950_SCHEMA_INSERT = [1, 2, 840, 10003, 13, 1]
-Z3950_SCHEMA_INSERT_ov = asn1.OidVal([1, 2, 840, 10003, 13, 1])
-Z3950_SCHEMA_UPDATE = [1, 2, 840, 10003, 13, 6]
-Z3950_SCHEMA_UPDATE_ov = asn1.OidVal([1, 2, 840, 10003, 13, 6])
-Z3950_SCHEMA_WAIS = [1, 2, 840, 10003, 13, 1]
-Z3950_SCHEMA_WAIS_ov = asn1.OidVal([1, 2, 840, 10003, 13, 1])
-Z3950_SCHEMA_ZTHES = [1, 2, 840, 10003, 13, 1]
-Z3950_SCHEMA_ZTHES_ov = asn1.OidVal([1, 2, 840, 10003, 13, 1])
-Z3950_SPEC = [1, 2, 840, 10003, 11]
-Z3950_SPEC_ov = asn1.OidVal([1, 2, 840, 10003, 11])
-Z3950_SPEC_ESPEC1 = [1, 2, 840, 10003, 11, 1]
-Z3950_SPEC_ESPEC1_ov = asn1.OidVal([1, 2, 840, 10003, 11, 1])
-Z3950_SPEC_ESPEC2 = [1, 2, 840, 10003, 11, 2]
-Z3950_SPEC_ESPEC2_ov = asn1.OidVal([1, 2, 840, 10003, 11, 2])
-Z3950_SPEC_ESPECQ = [1, 2, 840, 10003, 11, 3]
-Z3950_SPEC_ESPECQ_ov = asn1.OidVal([1, 2, 840, 10003, 11, 3])
-Z3950_TAGSET = [1, 2, 840, 10003, 14]
-Z3950_TAGSET_ov = asn1.OidVal([1, 2, 840, 10003, 14])
-Z3950_TAGSET_CIMI = [1, 2, 840, 10003, 14, 6]
-Z3950_TAGSET_CIMI_ov = asn1.OidVal([1, 2, 840, 10003, 14, 6])
-Z3950_TAGSET_COLLECTIONS = [1, 2, 840, 10003, 14, 5]
-Z3950_TAGSET_COLLECTIONS_ov = asn1.OidVal([1, 2, 840, 10003, 14, 5])
-Z3950_TAGSET_G = [1, 2, 840, 10003, 14, 2]
-Z3950_TAGSET_G_ov = asn1.OidVal([1, 2, 840, 10003, 14, 2])
-Z3950_TAGSET_GILS = [1, 2, 840, 10003, 14, 4]
-Z3950_TAGSET_GILS_ov = asn1.OidVal([1, 2, 840, 10003, 14, 4])
-Z3950_TAGSET_M = [1, 2, 840, 10003, 14, 1]
-Z3950_TAGSET_M_ov = asn1.OidVal([1, 2, 840, 10003, 14, 1])
-Z3950_TAGSET_STAS = [1, 2, 840, 10003, 14, 3]
-Z3950_TAGSET_STAS_ov = asn1.OidVal([1, 2, 840, 10003, 14, 3])
-Z3950_TAGSET_UPDATE = [1, 2, 840, 10003, 14, 7]
-Z3950_TAGSET_UPDATE_ov = asn1.OidVal([1, 2, 840, 10003, 14, 7])
-Z3950_TAGSET_ZTHES = [1, 2, 840, 10003, 14, 8]
-Z3950_TAGSET_ZTHES_ov = asn1.OidVal([1, 2, 840, 10003, 14, 8])
-Z3950_TRANSFER = [1, 2, 840, 10003, 6]
-Z3950_TRANSFER_ov = asn1.OidVal([1, 2, 840, 10003, 6])
-Z3950_USR = [1, 2, 840, 10003, 10]
-Z3950_USR_ov = asn1.OidVal([1, 2, 840, 10003, 10])
-Z3950_USR_AUTHFILE = [1, 2, 840, 10003, 10, 11]
-Z3950_USR_AUTHFILE_ov = asn1.OidVal([1, 2, 840, 10003, 10, 11])
-Z3950_USR_CHARSETNEG = [1, 2, 840, 10003, 10, 2]
-Z3950_USR_CHARSETNEG_ov = asn1.OidVal([1, 2, 840, 10003, 10, 2])
-Z3950_USR_DATETIME = [1, 2, 840, 10003, 10, 6]
-Z3950_USR_DATETIME_ov = asn1.OidVal([1, 2, 840, 10003, 10, 6])
-Z3950_USR_EDITACTIONQUAL = [1, 2, 840, 10003, 10, 10]
-Z3950_USR_EDITACTIONQUAL_ov = asn1.OidVal([1, 2, 840, 10003, 10, 10])
-Z3950_USR_INFO1 = [1, 2, 840, 10003, 10, 3]
-Z3950_USR_INFO1_ov = asn1.OidVal([1, 2, 840, 10003, 10, 3])
-Z3950_USR_INSERTACTIONQUAL = [1, 2, 840, 10003, 10, 9]
-Z3950_USR_INSERTACTIONQUAL_ov = asn1.OidVal([1, 2, 840, 10003, 10, 9])
-Z3950_USR_PRIVATE = [1, 2, 840, 10003, 10, 1000]
-Z3950_USR_PRIVATE_ov = asn1.OidVal([1, 2, 840, 10003, 10, 1000])
-Z3950_USR_PRIVATE_OCLC = [1, 2, 840, 10003, 10, 1000, 17]
-Z3950_USR_PRIVATE_OCLC_ov = asn1.OidVal([1, 2, 840, 10003, 10, 1000, 17])
-Z3950_USR_PRIVATE_OCLC_INFO = [1, 2, 840, 10003, 10, 1000, 17, 1]
-Z3950_USR_PRIVATE_OCLC_INFO_ov = asn1.OidVal([1, 2, 840, 10003, 10, 1000, 17, 1])
-Z3950_USR_SEARCHRES1 = [1, 2, 840, 10003, 10, 1]
-Z3950_USR_SEARCHRES1_ov = asn1.OidVal([1, 2, 840, 10003, 10, 1])
-Z3950_USR_SEARCHTERMS1 = [1, 2, 840, 10003, 10, 4]
-Z3950_USR_SEARCHTERMS1_ov = asn1.OidVal([1, 2, 840, 10003, 10, 4])
-Z3950_USR_SEARCHTERMS2 = [1, 2, 840, 10003, 10, 5]
-Z3950_USR_SEARCHTERMS2_ov = asn1.OidVal([1, 2, 840, 10003, 10, 5])
-Z3950_VAR = [1, 2, 840, 10003, 12]
-Z3950_VAR_ov = asn1.OidVal([1, 2, 840, 10003, 12])
-Z3950_VAR_VARIANT1 = [1, 2, 840, 10003, 12, 1]
-Z3950_VAR_VARIANT1_ov = asn1.OidVal([1, 2, 840, 10003, 12, 1])
diff --git a/python/PyZ3950/pqf.py b/python/PyZ3950/pqf.py
deleted file mode 100644
index 3103685..0000000
--- a/python/PyZ3950/pqf.py
+++ /dev/null
@@ -1,260 +0,0 @@
-#!/usr/local/bin/python2.3
-
-try:
- from cStringIO import StringIO
-except:
- from StringIO import StringIO
-from PyZ3950 import z3950, oids,asn1
-from PyZ3950.zdefs import make_attr
-from types import IntType, StringType, ListType
-from PyZ3950.CQLParser import CQLshlex
-
-
-"""
-Parser for PQF directly into RPN structure.
-PQF docs: http://www.indexdata.dk/yaz/doc/tools.html
-
-NB: This does not implement /everything/ in PQF, in particular: @attr 2=3 @and @attr 1=4 title @attr 1=1003 author (eg that 2 should be 3 for all subsequent clauses)
-
-"""
-
-
-class PQFParser:
- lexer = None
- currentToken = None
- nextToken = None
-
- def __init__(self, l):
- self.lexer = l
- self.fetch_token()
-
- def fetch_token(self):
- """ Read ahead one token """
- tok = self.lexer.get_token()
- self.currentToken = self.nextToken
- self.nextToken = tok
-
- def is_boolean(self):
- if (self.currentToken.lower() in ['@and', '@or', '@not', '@prox']):
- return 1
- else:
- return 0
-
- def defaultClause(self, t):
- # Assign a default clause: anywhere =
- clause = z3950.AttributesPlusTerm()
- attrs = [(oids.Z3950_ATTRS_BIB1, 1, 1016), (oids.Z3950_ATTRS_BIB1, 2, 3)]
- clause.attributes = [make_attr(*e) for e in attrs]
- clause.term = t
- return ('op', ('attrTerm', clause))
-
- # Grammar fns
-
- def query(self):
- set = self.top_set()
- qst = self.query_struct()
-
- # Pull in a (hopefully) null token
- self.fetch_token()
- if (self.currentToken):
- # Nope, unprocessed tokens remain
- raise(ValueError)
-
- rpnq = z3950.RPNQuery()
- if set:
- rpnq.attributeSet = set
- else:
- rpnq.attributeSet = oids.Z3950_ATTRS_BIB1_ov
- rpnq.rpn = qst
-
-
- return ('type_1', rpnq)
-
- def top_set(self):
- if (self.nextToken == '@attrset'):
- self.fetch_token()
- self.fetch_token()
- n = self.currentToken.upper()
- if (n[:14] == "1.2.840.10003."):
- return asn1.OidVal(map(int, n.split('.')))
- return oids.oids['Z3950']['ATTRS'][n]['oid']
- else:
- return None
-
- # This totally ignores the BNF, but does the 'right' thing
- def query_struct(self):
- self.fetch_token()
- if (self.currentToken == '@attr'):
- attrs = []
- while self.currentToken == '@attr':
- attrs.append(self.attr_spec())
- self.fetch_token()
- t = self.term()
-
- # Now we have attrs + term
- clause = z3950.AttributesPlusTerm()
- clause.attributes = [make_attr(*e) for e in attrs]
- clause.term = t
- return ('op', ('attrTerm', clause))
- elif (self.is_boolean()):
- # @operator query query
- return self.complex()
- elif (self.currentToken == '@set'):
- return self.result_set()
- elif (self.currentToken == "{"):
- # Parens
- s = self.query_struct()
- if (self.nextToken <> "}"):
- raise(ValueError)
- else:
- self.fetch_token()
- return s
-
- else:
- t = self.term()
- return self.defaultClause(t)
-
- def term(self):
- # Need to split to allow attrlist then @term
- type = 'general'
- if (self.currentToken == '@term'):
- self.fetch_token()
- type = self.currentToken.lower()
- types = {'general' : 'general', 'string' : 'characterString', 'numeric' : 'numeric', 'external' : 'external'}
- type = types[type]
- self.fetch_token()
-
- if (self.currentToken[0] == '"' and self.currentToken[-1] == '"'):
- term = self.currentToken[1:-1]
- else:
- term = self.currentToken
-
- return (type, term)
-
- def result_set(self):
- self.fetch_token()
- return ('op', ('resultSet', self.currentToken))
-
- def attr_spec(self):
- # @attr is CT
- self.fetch_token()
- if (self.currentToken.find('=') == -1):
- # attrset
- set = self.currentToken
- if (set[:14] == "1.2.840.10003."):
- set = asn1.OidVal(map(int, set.split('.')))
- else:
- set = oids.oids['Z3950']['ATTRS'][set.upper()]['oid']
- self.fetch_token()
- else:
- set = None
- # May raise
- (atype, val) = self.currentToken.split('=')
- if (not atype.isdigit()):
- raise ValueError
- atype = int(atype)
- if (val.isdigit()):
- val = int(val)
- return (set, atype, val)
-
- def complex(self):
- op = z3950.RpnRpnOp()
- op.op = self.boolean()
- op.rpn1 = self.query_struct()
- op.rpn2 = self.query_struct()
- return ('rpnRpnOp', op)
-
- def boolean(self):
- b = self.currentToken[1:]
- b = b.lower()
- if (b == 'prox'):
- self.fetch_token()
- exclusion = self.currentToken
- self.fetch_token()
- distance = self.currentToken
- self.fetch_token()
- ordered = self.currentToken
- self.fetch_token()
- relation = self.currentToken
- self.fetch_token()
- which = self.currentToken
- self.fetch_token()
- unit = self.currentToken
-
- prox = z3950.ProximityOperator()
- if (not (relation.isdigit() and exclusion.isdigit() and distance.isdigit() and unit.isdigit())):
- raise ValueError
- prox.relationType = int(relation)
- prox.exclusion = bool(exclusion)
- prox.distance = int(distance)
- if (which[0] == 'k'):
- prox.unit = ('known', int(unit))
- elif (which[0] == 'p'):
- prox.unit = ('private', int(unit))
- else:
- raise ValueError
-
- return (b, prox)
- elif b == 'not':
- return ('and-not', None)
- else:
- return (b, None)
-
-
-def parse(q):
-
- query = StringIO(q)
- lexer = CQLshlex(query)
- # Override CQL's wordchars list to include /=><()
- lexer.wordchars += "!@#$%^&*-+[];,.?|~`:\\><=/'()"
-
- parser = PQFParser(lexer)
- return parser.query()
-
-
-def rpn2pqf(rpn):
- # Turn RPN structure into PQF equivalent
- q = rpn[1]
- if (rpn[0] == 'type_1'):
- # Top level
- if (q.attributeSet):
- query = '@attrset %s ' % ( '.'.join(map(str, q.attributeSet.lst)))
- else:
- query = ""
- rest = rpn2pqf(q.rpn)
- return "%s%s" % (query, rest)
- elif (rpn[0] == 'rpnRpnOp'):
- # boolean
- if (q.op[0] in ['and', 'or']):
- query = ['@', q.op[0], ' ']
- elif (q.op[0] == 'and-not'):
- query = ['@not ']
- else:
- query = ['@prox']
- # XXX
- query.append(' ')
- query.append(rpn2pqf(q.rpn1))
- query.append(' ')
- query.append(rpn2pqf(q.rpn2))
- return ''.join(query)
- elif (rpn[0] == 'op'):
- if (q[0] == 'attrTerm'):
- query = []
- for a in q[1].attributes:
- if (a.attributeValue[0] == 'numeric'):
- val = str(a.attributeValue[1])
- else:
- val = a.attributeValue[1].list[0][1]
- query.append("@attr %i=%s " % (a.attributeType, val))
- query.append('"%s" ' % (q[1].term[1]))
- return ''.join(query)
- elif (q[0] == 'resultSet'):
- return "@set %s" % (q[1])
-
-
-
-
-
-
-
-
diff --git a/python/PyZ3950/z3950.py b/python/PyZ3950/z3950.py
deleted file mode 100644
index bb26a4d..0000000
--- a/python/PyZ3950/z3950.py
+++ /dev/null
@@ -1,754 +0,0 @@
-#!/usr/bin/env python
-
-# This file should be available from
-# http://www.pobox.com/~asl2/software/PyZ3950/
-# and is licensed under the X Consortium license:
-# Copyright (c) 2001, Aaron S. Lav, asl2@pobox.com
-# All rights reserved.
-
-# Permission is hereby granted, free of charge, to any person obtaining a
-# copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, and/or sell copies of the Software, and to permit persons
-# to whom the Software is furnished to do so, provided that the above
-# copyright notice(s) and this permission notice appear in all copies of
-# the Software and that both the above copyright notice(s) and this
-# permission notice appear in supporting documentation.
-
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
-# OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
-# HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL
-# INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-# Except as contained in this notice, the name of a copyright holder
-# shall not be used in advertising or otherwise to promote the sale, use
-# or other dealings in this Software without prior written authorization
-# of the copyright holder.
-
-# Change history:
-# 2002/05/23
-# Fix for Python2 compatibility. Thanks to Douglas Bates
-# Fix to support SUTRS (requires asn1 updates, too)
-# 2002/05/28
-# Make SUTRS printing a little more useful
-# Correctly close connection when done
-# Handle receiving diagnostics instead of records a little better
-
-"""
PyZ3950 currently is capable of sending and receiving v2 or v3 PDUs
-Initialize, Search, Present, Scan, Sort, Close, and Delete. For client
-work, you probably want to use ZOOM, which should be in the same
-distribution as this file, in zoom.py. The Server class in this file
-implements a server, but could use some work. Both interoperate with
-the Yaz toolkit and the
-client interoperates with a variety of libraries.
-"""
-
-from __future__ import nested_scopes
-import getopt
-import sys
-import exceptions
-import random
-import socket
-import string
-import traceback
-
-import codecs
-
-from PyZ3950 import asn1
-from PyZ3950 import zmarc
-from PyZ3950.zdefs import *
-
-out_encoding = None
-
-trace_recv = 0
-trace_init = 0
-
-print_hex = 0
-
-class Z3950Error(Exception):
- pass
-
-# Note: following 3 exceptions are defaults, but can be changed by
-# calling conn.set_exs
-
-class ConnectionError(Z3950Error): # TCP or other transport error
- pass
-
-class ProtocolError(Z3950Error): # Unexpected message or badly formatted
- pass
-
-class UnexpectedCloseError(ProtocolError):
- pass
-
-vers = '0.62'
-default_resultSetName = 'default'
-
-
-DEFAULT_PORT = 2101
-
-Z3950_VERS = 3 # This is a global switch: do we support V3 at all?
-
-def extract_recs (resp):
- (typ, recs) = resp.records
- if (typ <> 'responseRecords'):
- raise ProtocolError ("Bad records typ " + str (typ) + str (recs))
- if len (recs) == 0:
- raise ProtocolError ("No records")
- fmtoid = None
- extract = []
- for r in recs:
- (typ, data) = r.record
- if (typ <> 'retrievalRecord'):
- raise ProtocolError ("Bad typ %s data %s" % (str (typ), str(data)))
- oid = data.direct_reference
- if fmtoid == None:
- fmtoid = oid
- elif fmtoid <> oid:
- raise ProtocolError (
- "Differing OIDs %s %s" % (str (fmtoid), str (oid)))
- # Not, strictly speaking, an error.
- dat = data.encoding
- (typ, dat) = dat
- if (oid == Z3950_RECSYN_USMARC_ov):
- if typ <> 'octet-aligned':
- raise ProtocolError ("Weird record EXTERNAL MARC type: " + typ)
- extract.append (dat)
- return (fmtoid, extract)
-
-def get_formatter (oid):
- def printer (x):
- print oid, repr (x)
- def print_marc (marc):
- print str (zmarc.MARC(marc))
- def print_sutrs (x):
- print "SUTRS:",
- if isinstance (x, type ('')):
- print x
- elif isinstance (x, type (u'')):
- if out_encoding == None:
- print repr (x)
- else:
- try:
- print x.encode (out_encoding)
- except UnicodeError, u:
- print "Cannot print %s in current encoding %s" % (
- repr (x), out_encoding)
- if oid == Z3950_RECSYN_SUTRS_ov:
- return print_sutrs
- if oid == Z3950_RECSYN_USMARC_ov:
- return print_marc
- else:
- return printer
-
-def disp_resp (resp):
- try:
- (fmtoid, recs) = extract_recs (resp)
- except ProtocolError, val:
- print "Bad records", str (val)
- formatter = get_formatter (fmtoid)
- for rec in recs:
- formatter (rec)
-
-class Conn:
- rdsz = 65536
- def __init__ (self, sock = None, ConnectionError = ConnectionError,
- ProtocolError = ProtocolError, UnexpectedCloseError =
- UnexpectedCloseError):
- self.set_exns (ConnectionError, ProtocolError, UnexpectedCloseError)
- if sock == None:
- self.sock = socket.socket (socket.AF_INET, socket.SOCK_STREAM)
- else:
- self.sock = sock
- self.decode_ctx = asn1.IncrementalDecodeCtx (APDU)
- self.encode_ctx = asn1.Ctx ()
- def set_exns (self, conn, protocol, unexp_close):
- self.ConnectionError = conn
- self.ProtocolError = protocol
- self.UnexpectedCloseError = unexp_close
-
- def set_codec (self, charset_name, charsets_in_records):
- self.charset_name = charset_name
- self.charsets_in_records = not not charsets_in_records # collapse None and 0
- if trace_charset:
- print "Setting up codec!", self.charset_name
- strip_bom = self.charset_name == 'utf-16'
- # XXX should create a new codec which wraps utf-16 but
- # strips the Byte Order Mark, or use stream codecs
- if self.charset_name <> None:
- self.encode_ctx.set_codec (asn1.GeneralString,
- codecs.lookup (self.charset_name),
- strip_bom)
- self.decode_ctx.set_codec (asn1.GeneralString,
- codecs.lookup (self.charset_name),
- strip_bom)
- if not charsets_in_records: # None or 0
- register_retrieval_record_oids(self.decode_ctx)
- register_retrieval_record_oids(self.encode_ctx)
-
- def readproc (self):
- if self.sock == None:
- raise self.ConnectionError ('disconnected')
- try:
- b = self.sock.recv (self.rdsz)
- except socket.error, val:
- self.sock = None
- raise self.ConnectionError ('socket', str (val))
- if len (b) == 0: # graceful close
- self.sock = None
- raise self.ConnectionError ('graceful close')
- if trace_recv:
- print map (lambda x: hex(ord(x)), b)
- return b
- def read_PDU (self):
- while 1:
- if self.decode_ctx.val_count () > 0:
- return self.decode_ctx.get_first_decoded ()
- try:
- b = self.readproc ()
- self.decode_ctx.feed (map (ord, b))
- except asn1.BERError, val:
- raise self.ProtocolError ('ASN1 BER', str(val))
-
-
-class Server (Conn):
- test = 0
- def __init__ (self, sock):
- Conn.__init__ (self, sock)
- self.expecting_init = 1
- self.done = 0
- self.result_sets = {}
- self.charset_name = None
- def run (self):
- while not self.done:
- (typ, val) = self.read_PDU ()
- fn = self.fn_dict.get (typ, None)
- if fn == None:
- raise self.ProtocolError ("Bad typ", typ + " " + str (val))
- if typ <> 'initRequest' and self.expecting_init:
- raise self.ProtocolError ("Init expected", typ)
- fn (self, val)
- def send (self, val):
- b = self.encode_ctx.encode (APDU, val)
- if self.test:
- print "Internal Testing"
- # a reminder not to leave this switched on by accident
- self.decode_ctx.feed (b)
- decoded = self.read_PDU ()
- assert (val== decoded)
- self.sock.send (b)
-
- def do_close (self, reason, info):
- close = Close ()
- close.closeReason = reason
- close.diagnosticInformation = info
- self.send (('close', close))
-
- def close (self, parm):
- self.done = 1
- self.do_close (0, 'Normal close')
-
- def search_child (self, query):
- return range (random.randint (2,10))
- def search (self, sreq):
- if sreq.replaceIndicator == 0 and self.result_sets.has_key (
- sreq.resultSetName):
- raise self.ProtocolError ("replaceIndicator 0")
- result = self.search_child (sreq.query)
- sresp = SearchResponse ()
- self.result_sets[sreq.resultSetName] = result
- sresp.resultCount = len (result)
- sresp.numberOfRecordsReturned = 0
- sresp.nextResultSetPosition = 1
- sresp.searchStatus = 1
- sresp.resultSetStatus = 0
- sresp.presentStatus = PresentStatus.get_num_from_name ('success')
- sresp.records = ('responseRecords', [])
- self.send (('searchResponse', sresp))
- def format_records (self, start, count, res_set, prefsyn):
- l = []
- for i in range (start - 1, start + count - 1):
- elt = res_set[i]
- elt_external = asn1.EXTERNAL ()
- elt_external.direct_reference = Z3950_RECSYN_SUTRS_ov
-
- # Not only has this text been extensively translated, but
- # it also prefigures Z39.50's separation of Search and Present,
- # once rearranged a little.
- strings = [
- 'seek, and ye shall find; ask, and it shall be given you',
- u"""Car quiconque demande re\u00e7oit, qui cherche trouve, et \u00e0 quit frappe on ouvrira""", # This (next) verse has non-ASCII characters
- u"\u0391\u03b9\u03c4\u03b5\u03b9\u03c4\u03b5, "
- u"\u03ba\u03b1\u03b9 \u03b4\u03bf\u03b8\u03b7\u03c3\u03b5\u03c4\u03b1\u03b9 "+
- u"\u03c5\u03bc\u03b9\u03bd; \u03b6\u03b7\u03c4\u03b5\u03b9\u03c4\u03b5 " +
- u"\u03ba\u03b1\u03b9 \u03b5\u03c5\u03c1\u03b7\u03c3\u03b5\u03c4\u03b5",
- u"\u05e8\u05d0\u05d4 \u05d6\u05d4 \u05de\u05e6\u05d0\u05ea\u05d9"]
- if self.charsets_in_records:
- encode_charset = self.charset_name
- else:
- encode_charset = 'ascii'
- def can_encode (s):
- try:
- s.encode (encode_charset)
- except UnicodeError:
- return 0
- return 1
- if self.charset_name == None:
- candidate_strings = [strings[0]]
- else:
- candidate_strings = [s for s in strings if can_encode (s)]
- # Note: this code is for debugging/testing purposes. Usually,
- # language/content selection should not be made on the
- # basis of the selected charset, and a surrogate diagnostic
- # should be generated if the data cannot be encoded.
- text = random.choice (candidate_strings)
- add_str = " #%d charset %s cir %d" % (elt, encode_charset,
- self.charsets_in_records)
- elt_external.encoding = ('single-ASN1-type', text + add_str)
- n = NamePlusRecord ()
- n.name = 'foo'
- n.record = ('retrievalRecord', elt_external)
- l.append (n)
- return l
-
- def present (self, preq):
- presp = PresentResponse ()
- res_set = self.result_sets [preq.resultSetId]
- presp.numberOfRecordsReturned = preq.numberOfRecordsRequested
- presp.nextResultSetPosition = preq.resultSetStartPoint + \
- preq.numberOfRecordsRequested
- presp.presentStatus = 0
- presp.records = ('responseRecords',
- self.format_records (preq.resultSetStartPoint,
- preq.numberOfRecordsRequested,
- res_set,
- preq.preferredRecordSyntax))
- self.send (('presentResponse', presp))
-
- def init (self, ireq):
- if trace_init:
- print "Init received", ireq
- self.v3_flag = (ireq.protocolVersion ['version_3'] and
- Z3950_VERS == 3)
-
- ir = InitializeResponse ()
- ir.protocolVersion = ProtocolVersion ()
- ir.protocolVersion ['version_1'] = 1
- ir.protocolVersion ['version_2'] = 1
- ir.protocolVersion ['version_3'] = self.v3_flag
- val = get_charset_negot (ireq)
- charset_name = None
- records_in_charsets = 0
- if val <> None:
- csreq = CharsetNegotReq ()
- csreq.unpack_proposal (val)
- def rand_choose (list_or_none):
- if list_or_none == None or len (list_or_none) == 0:
- return None
- return random.choice (list_or_none)
- charset_name = rand_choose (csreq.charset_list)
- if charset_name <> None:
- try:
- codecs.lookup (charset_name)
- except LookupError, l:
- charset_name = None
- csresp = CharsetNegotResp (
- charset_name,
- rand_choose (csreq.lang_list),
- csreq.records_in_charsets)
- records_in_charsets = csresp.records_in_charsets
- if trace_charset:
- print csreq, csresp
- set_charset_negot (ir, csresp.pack_negot_resp (), self.v3_flag)
-
- optionslist = ['search', 'present', 'delSet', 'scan','negotiation']
- ir.options = Options ()
- for o in optionslist:
- ir.options[o] = 1
-
- ir.preferredMessageSize = 0
-
- ir.exceptionalRecordSize = 0
- # z9350-2001 3.2.1.1.4, 0 means client should be prepared to accept
- # arbitrarily long messages.
-
- ir.implementationId = implementationId
-
- ir.implementationName = 'PyZ3950 Test server'
- ir.implementationVersion = impl_vers
- ir.result = 1
-
- if trace_charset or trace_init:
- print ir
- self.expecting_init = 0
- self.send (('initResponse', ir))
- self.set_codec (charset_name, records_in_charsets)
-
- def sort (self, sreq):
- sresp = SortResponse ()
- sresp.sortStatus = 0
- self.send (('sortResponse', sresp))
- def delete (self, dreq):
- dresp = DeleteResultSetResponse ()
- dresp.deleteOperationStatus = 0
- self.send (('deleteResultSetResponse', dresp))
- def esrequest (self, esreq):
- print "ES", esreq
- esresp = ExtendedServicesResponse ()
- esresp.operationStatus = ExtendedServicesResponse['operationStatus'].get_num_from_name ('failure')
- self.send (('extendedServicesResponse', esresp))
-
- fn_dict = {'searchRequest': search,
- 'presentRequest': present,
- 'initRequest' : init,
- 'close' : close,
- 'sortRequest' : sort,
- 'deleteResultSetRequest' : delete,
- 'extendedServicesRequest': esrequest}
-
-
-def run_server (test = 0):
- listen = socket.socket (socket.AF_INET, socket.SOCK_STREAM)
- listen.setsockopt (socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- listen.bind (('', DEFAULT_PORT))
- listen.listen (1)
- while 1:
- (sock,addr) = listen.accept ()
- try:
- serv = Server (sock)
- serv.test = test
- serv.run ()
- except:
- (typ, val, tb) = sys.exc_info ()
- if typ == exceptions.KeyboardInterrupt:
- print "kbd interrupt, leaving"
- raise
- print "error %s %s from %s" % (typ, val, addr)
- traceback.print_exc(40)
- sock.close ()
-
-def extract_apt (rpnQuery):
- """Takes RPNQuery to AttributePlusTerm"""
- RPNStruct = rpnQuery.rpn
- assert (RPNStruct [0] == 'op')
- operand = RPNStruct [1]
- assert (operand [0] == 'attrTerm')
- return operand [1]
-
-
-class Client (Conn):
- test = 0
-
- def __init__ (self, addr, port = DEFAULT_PORT, optionslist = None,
- charset = None, lang = None, user = None, password = None,
- preferredMessageSize = 0x100000, group = None,
- maximumRecordSize = 0x100000, implementationId = "",
- implementationName = "", implementationVersion = "",
- ConnectionError = ConnectionError,
- ProtocolError = ProtocolError,
- UnexpectedCloseError = UnexpectedCloseError):
-
- Conn.__init__ (self, ConnectionError = ConnectionError,
- ProtocolError = ProtocolError,
- UnexpectedCloseError = UnexpectedCloseError)
- try:
- self.sock.connect ((addr, port))
- except socket.error, val:
- self.sock = None
- raise self.ConnectionError ('socket', str(val))
- try_v3 = Z3950_VERS == 3
-
- if (charset and not isinstance(charset, list)):
- charset = [charset]
- if (lang and not isinstance(lang, list)):
- charset = [lang]
- negotiate_charset = charset or lang
-
- if (user or password or group):
- authentication = (user, password, group)
- else:
- authentication = None
-
- InitReq = make_initreq (optionslist, authentication = authentication,
- v3 = try_v3,
- preferredMessageSize = preferredMessageSize,
- maximumRecordSize = maximumRecordSize,
- implementationId = implementationId,
- implementationName = implementationName,
- implementationVersion = implementationVersion,
- negotiate_charset = negotiate_charset)
- if negotiate_charset:
- # languages = ['eng', 'fre', 'enm']
- # Thanne longen folk to looken in catalogues
- # and clerkes for to seken straunge bookes ...
- cnr = CharsetNegotReq (charset, lang, random.choice((0,1,None)))
- if trace_charset:
- print cnr
- set_charset_negot (InitReq, cnr.pack_proposal (), try_v3)
-
- if trace_init:
- print "Initialize request", InitReq
-
- self.initresp = self.transact (
- ('initRequest', InitReq), 'initResponse')
- if trace_init:
- print "Initialize Response", self.initresp
- self.v3_flag = self.initresp.protocolVersion ['version_3']
- val = get_charset_negot (self.initresp)
- if val <> None:
- csr = CharsetNegotResp ()
- csr.unpack_negot_resp (val)
- if trace_charset:
- print "Got csr", str (csr)
- self.set_codec (csr.charset, csr.records_in_charsets)
-
- self.search_results = {}
- self.max_to_request = 20
- self.default_recordSyntax = Z3950_RECSYN_USMARC_ov
- def get_option (self, option_name):
- return self.initresp.options[option_name]
- def transact (self, to_send, expected):
- b = self.encode_ctx.encode (APDU, to_send)
- if print_hex:
- print map (hex, b)
- if self.test:
- print "Internal Testing"
- # a reminder not to leave this switched on by accident
- self.decode_ctx.feed (b)
- decoded = self.read_PDU ()
- print "to_send", to_send, "decoded", decoded
- assert (to_send == decoded)
- if self.sock == None:
- raise self.ConnectionError ('disconnected')
- try:
- self.sock.send (b)
- except socket.error, val:
- self.sock = None
- raise self.ConnectionError('socket', str(val))
-
- if expected == None:
- return
- pdu = self.read_PDU ()
- (arm, val) = pdu
- if self.test:
- print "Internal Testing 2"
- b = self.encode_ctx.encode (APDU, (arm, val))
- self.decode_ctx.feed (b)
- redecoded = self.read_PDU ()
- if redecoded <> (arm, val):
- print "Redecoded", redecoded
- print "old", (arm, val)
- assert (redecoded == (arm, val))
- if arm == expected: # may be 'close'
- return val
- elif arm == 'close':
- raise self.UnexpectedCloseError (
- "Server closed connection reason %d diag info %s" % \
- (getattr (val, 'closeReason', -1),
- getattr (val, 'diagnosticInformation', 'None given')))
- else:
- raise self.ProtocolError (
- "Unexpected response from server %s %s " % (expected,
- repr ((arm, val))))
- def set_dbnames (self, dbnames):
- self.dbnames = dbnames
- def search_2 (self, query, rsn = default_resultSetName, **kw):
- # We used to check self.initresp.options['search'], but
- # support for search is required by the standard, and
- # www.cnshb.ru:210 doesn't set the search bit if you negotiate
- # v2, but supports search anyway
- sreq = make_sreq (query, self.dbnames, rsn, **kw)
- recv = self.transact (('searchRequest', sreq), 'searchResponse')
- self.search_results [rsn] = recv
- return recv
- def search (self, query, rsn = default_resultSetName, **kw):
- # for backwards compat
- recv = self.search_2 (('type_1', query), rsn, **kw)
- return recv.searchStatus and (recv.resultCount > 0)
- # If searchStatus is failure, check result-set-status -
- # -subset - partial, valid results available
- # -interim - partial, not necessarily valid
- # -none - no result set
- # If searchStatus is success, check present-status:
- # - success - OK
- # - partial-1 - not all, access control
- # - partial-2 - not all, won't fit in msg size (but we currently don't ask for
- # any records in search, shouldn't happen)
- # - partial-3 - not all, resource control (origin)
- # - partial-4 - not all, resource control (target)
- # - failure - no records, nonsurrogate diagnostic.
- def get_count (self, rsn = default_resultSetName):
- return self.search_results[rsn].resultCount
- def delete (self, rsn):
- if not self.initresp.options['delSet']:
- return None
- delreq = DeleteResultSetRequest ()
- delreq.deleteFunction = 0 # list
- delreq.resultSetList = [rsn]
- return self.transact (('deleteResultSetRequest', delreq),
- 'deleteResultSetResponse')
- def present (self, rsn= default_resultSetName, start = None,
- count = None, recsyn = None, esn = None):
- # don't check for support in init resp: see search for reasoning
-
- # XXX Azaroth 2004-01-08. This does work when rs is result of sort.
- try:
- sresp = self.search_results [rsn]
- if start == None:
- start = sresp.nextResultSetPosition
- if count == None:
- count = sresp.resultCount
- if self.max_to_request > 0:
- count = min (self.max_to_request, count)
- except:
- pass
- if recsyn == None:
- recsyn = self.default_recordSyntax
- preq = PresentRequest ()
- preq.resultSetId = rsn
- preq.resultSetStartPoint = start
- preq.numberOfRecordsRequested = count
- preq.preferredRecordSyntax = recsyn
- if esn <> None:
- preq.recordComposition = ('simple', esn)
- return self.transact (('presentRequest', preq), 'presentResponse')
- def scan (self, query, **kw):
- sreq = ScanRequest ()
- sreq.databaseNames = self.dbnames
- assert (query[0] == 'type_1' or query [0] == 'type_101')
- sreq.attributeSet = query[1].attributeSet
- sreq.termListAndStartPoint = extract_apt (query[1])
- sreq.numberOfTermsRequested = 20 # default
- for (key, val) in kw.items ():
- setattr (sreq, key, val)
-
- return self.transact (('scanRequest', sreq), 'scanResponse')
- def close (self):
- close = Close ()
- close.closeReason = 0
- close.diagnosticInformation = 'Normal close'
- try:
- rv = self.transact (('close', close), 'close')
- except self.ConnectionError:
- rv = None
- if self.sock <> None:
- self.sock.close ()
- self.sock = None
- return rv
-
-
-def mk_compound_query ():
- aelt1 = AttributeElement (attributeType = 1,
- attributeValue = ('numeric',4))
- apt1 = AttributesPlusTerm ()
- apt1.attributes = [aelt1]
- apt1.term = ('general', '1066')
- aelt2 = AttributeElement (attributeType = 1,
- attributeValue = ('numeric', 1))
- apt2 = AttributesPlusTerm ()
- apt2.attributes = [aelt2]
- apt2.term = ('general', 'Sellar')
- myrpnRpnOp = RpnRpnOp ()
- myrpnRpnOp.rpn1 = ('op', ('attrTerm', apt1))
- myrpnRpnOp.rpn2 = ('op', ('attrTerm', apt2))
- myrpnRpnOp.op = ('and', None)
- rpnq = RPNQuery (attributeSet = Z3950_ATTRS_BIB1_ov)
- rpnq.rpn = ('rpnRpnOp', myrpnRpnOp)
- return rpnq
-
-def mk_simple_query (title):
- aelt1 = AttributeElement (attributeType = 1,
- attributeValue = ('numeric', 1003))
- apt1 = AttributesPlusTerm ()
- apt1.attributes = [aelt1]
- apt1.term = ('general', title) # XXX or should be characterString, not general, but only when V3.
- rpnq = RPNQuery (attributeSet = Z3950_ATTRS_BIB1_ov)
- rpnq.rpn = ('op', ('attrTerm', apt1))
- return rpnq
-
-def_host = 'LC'
-
-host_dict = {'BIBSYS': ('z3950.bibsys.no', 2100, 'BIBSYS'),
- 'YAZ': ('127.0.0.1', 9999, 'foo'),
- 'LCTEST' : ('ilssun2.loc.gov', 7090, 'Voyager'),
- 'LC' : ('z3950.loc.gov', 7090, 'Voyager'),
- 'NLC' : ('amicus.nlc-bnc.ca', 210, 'NL'),
- 'BNC' : ('amicus.nlc-bnc.ca', 210, 'NL'),
- # On parle franc,ais aussi.
- 'LOCAL': ('127.0.0.1', 9999, 'Default'),
- 'LOCAL2': ('127.0.0.1', 2101, 'foo'),
- 'BL' :('blpcz.bl.uk', 21021, 'BLPC-ALL'),
- 'BELLLABS' : ('z3950.bell-labs.com', 210, 'books'),
- 'BIBHIT' : ('www.bibhit.dk', 210, 'Default'),
- 'YALE': ('webpac.library.yale.edu', 210, 'YALEOPAC'),
- 'OXFORD': ('library.ox.ac.uk', 210, 'ADVANCE'),
- 'OVID': ('z3950.ovid.com', 2213, 'pmed'), # scan only
- 'UC': ('ipac.lib.uchicago.edu', 210, 'uofc'),
- 'KUB' : ('dbiref.kub.nl', 1800, 'jel'),
- 'INDEXDATA' : ('muffin.indexdata.dk', 9004, 'thatt')}
-# last two are Zthes servers.
-
-if __name__ == '__main__':
- optlist, args = getopt.getopt (sys.argv[1:], 'e:sh:tc:l:')
- server = 0
- host = def_host
- test = 0
- charset_list = None
- lang_list = None
- for (opt, val) in optlist:
- if opt == '-s':
- server = 1
- elif opt == '-h':
- host = val
- elif opt == '-t':
- test = 1
- elif opt == '-e':
- out_encoding = val
- elif opt == '-c':
- charset_list = val.split (',')
- elif opt == '-l':
- lang_list = val.split (',')
- if server:
- run_server (test)
-
- host = host.upper ()
- (name, port, dbname) = host_dict.get (host, host_dict[def_host])
- cli = Client (name, port, charset = charset_list,
- lang = lang_list)
- cli.test = test
- cli.set_dbnames ([dbname])
- print "Starting search"
-# rpnq = mk_simple_query ('Perec, Georges')
-# rpnq = mk_simple_query ('Johnson, Kim')
- rpnq = mk_compound_query ()
- if cli.search (rpnq, smallSetUpperBound = 0, mediumSetPresentNumber = 0,
- largeSetLowerBound = 1):
- disp_resp (cli.present (recsyn = Z3950_RECSYN_USMARC_ov))
- else:
- print "Not found"
- print "Deleting"
- cli.delete (default_resultSetName)
- cli.delete ('bogus')
- print "Closing"
- try:
- cli.close ()
- except ConnectionError:
- # looks like LC, at least, sends a FIN on receipt of Close PDU
- # guess we should check for gracefullness of close, and complain
- # if not.
- pass
-
diff --git a/python/PyZ3950/z3950_2001.py b/python/PyZ3950/z3950_2001.py
deleted file mode 100644
index 9a0f56c..0000000
--- a/python/PyZ3950/z3950_2001.py
+++ /dev/null
@@ -1,1503 +0,0 @@
-#!/usr/bin/env python
-# Auto-generated from ../compiler/tests/z3950-2001.txt at Wed, 02 Jun 2004 15:30:47 +0000
-from PyZ3950 import asn1
-#module Module None
-KnownProximityUnit=asn1.INTEGER_class ([('character',1),('word',2),('sentence',3),('paragraph',4),('section',5),('chapter',6),('document',7),('element',8),('subelement',9),('elementType',10),('byte',11)],None,None)
-InternationalString=asn1.GeneralString
-Specification=asn1.SEQUENCE ([('schema',None, asn1.CHOICE ([('oid',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER)),
- ('uri',None,asn1.TYPE(asn1.IMPLICIT(300,cls=asn1.CONTEXT_FLAG),InternationalString))]),1),
- ('elementSpec',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('elementSetName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('externalEspec',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL))])),1)], seq_name = 'Specification')
-ElementSetName=asn1.TYPE(asn1.IMPLICIT(103,cls=asn1.CONTEXT_FLAG),InternationalString)
-Permissions=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('userId',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('allowableFunctions',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.INTEGER_class ([('delete',1),('modifyContents',2),('modifyPermissions',3),('present',4),('invoke',5)],None,None))),0)], seq_name = None))
-DeleteSetStatus=asn1.TYPE(asn1.IMPLICIT(33,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('success',0),('resultSetDidNotExist',1),('previouslyDeletedByServer',2),('systemProblemAtServer',3),('accessNotAllowed',4),('resourceControlAtClient',5),('resourceControlAtServer',6),('bulkDeleteNotSupported',7),('notAllRsltSetsDeletedOnBulkDlte',8),('notAllRequestedResultSetsDeleted',9),('resultSetInUse',10)],None,None))
-PresentStatus=asn1.TYPE(asn1.IMPLICIT(27,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('success',0),('partial_1',1),('partial_2',2),('partial_3',3),('partial_4',4),('failure',5)],None,None))
-StringOrNumeric=asn1.CHOICE ([('string',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('numeric',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)))])
-AttributeSetId=asn1.OBJECT_IDENTIFIER
-ProximityOperator=asn1.SEQUENCE ([('exclusion',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1),
- ('distance',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('ordered',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('relationType',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('lessThan',1),('lessThanOrEqual',2),('equal',3),('greaterThanOrEqual',4),('greaterThan',5),('notEqual',6)],None,None)),0),
- ('proximityUnitCode',None,asn1.TYPE(asn1.EXPLICIT(5,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('known',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),KnownProximityUnit)),
- ('private',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)))])),0)], seq_name = 'ProximityOperator')
-ResourceReport=asn1.EXTERNAL
-Options=asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.BITSTRING_class ([('search',0),('present',1),('delSet',2),('resourceReport',3),('triggerResourceCtrl',4),('resourceCtrl',5),('accessCtrl',6),('scan',7),('sort',8),('unused',9),('extendedServices',10),('level_1Segmentation',11),('level_2Segmentation',12),('concurrentOperations',13),('namedResultSets',14),('encapsulation',15),('resultCountInSort',16),('negotiation',17),('dedup',18),('query104',19),('pQESCorrection',20),('stringSchema',21)],None,None))
-Unit=asn1.SEQUENCE ([('unitSystem',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('unitType',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),StringOrNumeric),1),
- ('unit',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),StringOrNumeric),1),
- ('scaleFactor',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1)], seq_name = 'Unit')
-CloseReason=asn1.TYPE(asn1.IMPLICIT(211,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('finished',0),('shutdown',1),('systemProblem',2),('costLimit',3),('resources',4),('securityViolation',5),('protocolError',6),('lackOfActivity',7),('responseToPeer',8),('unspecified',9)],None,None))
-AttributeElement=asn1.SEQUENCE ([('attributeSet',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),AttributeSetId),1),
- ('attributeType',None,asn1.TYPE(asn1.IMPLICIT(120,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('attributeValue',None, asn1.CHOICE ([('numeric',None,asn1.TYPE(asn1.IMPLICIT(121,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('complex',None,asn1.TYPE(asn1.IMPLICIT(224,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('list',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (StringOrNumeric)),0),
- ('semanticAction',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.INTEGER_class ([],None,None))),1)], seq_name = None)))]),0)], seq_name = 'AttributeElement')
-DefaultDiagFormat=asn1.SEQUENCE ([('diagnosticSetId',None,asn1.OBJECT_IDENTIFIER,0),
- ('condition',None,asn1.INTEGER_class ([],None,None),0),
- ('addinfo',None, asn1.CHOICE ([('v2Addinfo',None,asn1.VisibleString),
- ('v3Addinfo',None,InternationalString)]),0)], seq_name = 'DefaultDiagFormat')
-ResourceReportId=asn1.OBJECT_IDENTIFIER
-FragmentSyntax=asn1.CHOICE ([('externallyTagged',None,asn1.EXTERNAL),
- ('notExternallyTagged',None,asn1.OCTSTRING)])
-Operator=asn1.TYPE(asn1.EXPLICIT(46,cls=asn1.CONTEXT_FLAG),asn1.CHOICE ([('and',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('or',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('and_not',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('prox',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),ProximityOperator))]))
-DiagRec=asn1.CHOICE ([('defaultFormat',None,DefaultDiagFormat),
- ('externallyDefined',None,asn1.EXTERNAL)])
-ProtocolVersion=asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.BITSTRING_class ([('version_1',0),('version_2',1),('version_3',2)],None,None))
-Range=asn1.SEQUENCE ([('startingPosition',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('numberOfRecords',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = 'Range')
-ReferenceId=asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)
-RetentionCriterion=asn1.CHOICE ([('numberOfEntries',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('percentOfEntries',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('duplicatesOnly',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('discardRsDuplicates',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-DuplicateDetectionCriterion=asn1.CHOICE ([('levelOfMatch',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('caseSensitive',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('punctuationSensitive',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('regularExpression',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL)),
- ('rsDuplicates',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-InfoCategory=asn1.SEQUENCE ([('categoryTypeId',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('categoryValue',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = 'InfoCategory')
-AttributeList=asn1.TYPE(asn1.IMPLICIT(44,cls=asn1.CONTEXT_FLAG),asn1.SEQUENCE_OF (AttributeElement))
-ResultSetId=asn1.TYPE(asn1.IMPLICIT(31,cls=asn1.CONTEXT_FLAG),InternationalString)
-DatabaseName=asn1.TYPE(asn1.IMPLICIT(105,cls=asn1.CONTEXT_FLAG),InternationalString)
-IdAuthentication=asn1.TYPE(asn1.EXPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.CHOICE ([('open',None,asn1.VisibleString),
- ('idPass',None, asn1.SEQUENCE ([('groupId',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('userId',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('password',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = None)),
- ('anonymous',None,asn1.NULL),
- ('other',None,asn1.EXTERNAL)]))
-SortCriterion=asn1.CHOICE ([('mostComprehensive',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('leastComprehensive',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('mostRecent',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('oldest',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('leastCost',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('preferredDatabases',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)))])
-SortKey=asn1.CHOICE ([('privateSortKey',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('elementSpec',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),Specification)),
- ('sortAttributes',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('id',None,AttributeSetId,0),
- ('list',None,AttributeList,0)], seq_name = None)))])
-ListStatuses=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('id',None,ResultSetId,0),
- ('status',None,DeleteSetStatus,0)], seq_name = None))
-OtherInformation=asn1.TYPE(asn1.IMPLICIT(201,cls=asn1.CONTEXT_FLAG),asn1.SEQUENCE_OF (asn1.SEQUENCE ([('category',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InfoCategory),1),
- ('information',None, asn1.CHOICE ([('characterInfo',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('binaryInfo',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('externallyDefinedInfo',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL)),
- ('oid',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER))]),0)], seq_name = None)))
-ResultSetPlusAttributes=asn1.TYPE(asn1.IMPLICIT(214,cls=asn1.CONTEXT_FLAG),asn1.SEQUENCE ([('resultSet',None,ResultSetId,0),
- ('attributes',None,AttributeList,0)], seq_name = 'ResultSetPlusAttributes'))
-InitializeResponse=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('protocolVersion',None,ProtocolVersion,0),
- ('options',None,Options,0),
- ('preferredMessageSize',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('exceptionalRecordSize',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('result',None,asn1.TYPE(asn1.IMPLICIT(12,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('implementationId',None,asn1.TYPE(asn1.IMPLICIT(110,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('implementationName',None,asn1.TYPE(asn1.IMPLICIT(111,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('implementationVersion',None,asn1.TYPE(asn1.IMPLICIT(112,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('userInformationField',None,asn1.TYPE(asn1.EXPLICIT(11,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'InitializeResponse')
-SortElement=asn1.CHOICE ([('generic',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),SortKey)),
- ('datbaseSpecific',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('databaseName',None,DatabaseName,0),
- ('dbSort',None,SortKey,0)], seq_name = None))))])
-IntUnit=asn1.SEQUENCE ([('value',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('unitUsed',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Unit),0)], seq_name = 'IntUnit')
-ElementSetNames=asn1.CHOICE ([('genericElementSetName',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('databaseSpecific',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('dbName',None,DatabaseName,0),
- ('esn',None,ElementSetName,0)], seq_name = None))))])
-SortResponse=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('sortStatus',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('success',0),('partial_1',1),('failure',2)],None,None)),0),
- ('resultSetStatus',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('empty',1),('interim',2),('unchanged',3),('none',4)],None,None)),1),
- ('diagnostics',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DiagRec)),1),
- ('resultCount',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'SortResponse')
-Term=asn1.CHOICE ([('general',None,asn1.TYPE(asn1.IMPLICIT(45,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('numeric',None,asn1.TYPE(asn1.IMPLICIT(215,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('characterString',None,asn1.TYPE(asn1.IMPLICIT(216,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('oid',None,asn1.TYPE(asn1.IMPLICIT(217,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER)),
- ('dateTime',None,asn1.TYPE(asn1.IMPLICIT(218,cls=asn1.CONTEXT_FLAG),asn1.GeneralizedTime)),
- ('external',None,asn1.TYPE(asn1.IMPLICIT(219,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL)),
- ('integerAndUnit',None,asn1.TYPE(asn1.IMPLICIT(220,cls=asn1.CONTEXT_FLAG),IntUnit)),
- ('null',None,asn1.TYPE(asn1.IMPLICIT(221,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-InitializeRequest=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('protocolVersion',None,ProtocolVersion,0),
- ('options',None,Options,0),
- ('preferredMessageSize',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('exceptionalRecordSize',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('idAuthentication',None,asn1.TYPE(asn1.EXPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.ANY),1),
- ('implementationId',None,asn1.TYPE(asn1.IMPLICIT(110,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('implementationName',None,asn1.TYPE(asn1.IMPLICIT(111,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('implementationVersion',None,asn1.TYPE(asn1.IMPLICIT(112,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('userInformationField',None,asn1.TYPE(asn1.EXPLICIT(11,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'InitializeRequest')
-ExtendedServicesResponse=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('operationStatus',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('done',1),('accepted',2),('failure',3)],None,None)),0),
- ('diagnostics',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DiagRec)),1),
- ('taskPackage',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'ExtendedServicesResponse')
-AccessControlResponse=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('securityChallengeResponse',None, asn1.CHOICE ([('simpleForm',None,asn1.TYPE(asn1.IMPLICIT(38,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('externallyDefined',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL))]),1),
- ('diagnostic',None,asn1.TYPE(asn1.EXPLICIT(223,cls=asn1.CONTEXT_FLAG),DiagRec),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'AccessControlResponse')
-TriggerResourceControlRequest=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('requestedAction',None,asn1.TYPE(asn1.IMPLICIT(46,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('resourceReport',1),('resourceControl',2),('cancel',3)],None,None)),0),
- ('prefResourceReportFormat',None,asn1.TYPE(asn1.IMPLICIT(47,cls=asn1.CONTEXT_FLAG),ResourceReportId),1),
- ('resultSetWanted',None,asn1.TYPE(asn1.IMPLICIT(48,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'TriggerResourceControlRequest')
-AccessControlRequest=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('securityChallenge',None, asn1.CHOICE ([('simpleForm',None,asn1.TYPE(asn1.IMPLICIT(37,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('externallyDefined',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL))]),0),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'AccessControlRequest')
-DeleteResultSetResponse=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('deleteOperationStatus',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),DeleteSetStatus),0),
- ('deleteListStatuses',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),ListStatuses),1),
- ('numberNotDeleted',None,asn1.TYPE(asn1.IMPLICIT(34,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('bulkStatuses',None,asn1.TYPE(asn1.IMPLICIT(35,cls=asn1.CONTEXT_FLAG),ListStatuses),1),
- ('deleteMessage',None,asn1.TYPE(asn1.IMPLICIT(36,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'DeleteResultSetResponse')
-CompSpec=asn1.SEQUENCE ([('selectAlternativeSyntax',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('generic',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Specification),1),
- ('dbSpecific',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('db',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),DatabaseName),0),
- ('spec',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Specification),0)], seq_name = None))),1),
- ('recordSyntax',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.OBJECT_IDENTIFIER)),1)], seq_name = 'CompSpec')
-AttributesPlusTerm=asn1.TYPE(asn1.IMPLICIT(102,cls=asn1.CONTEXT_FLAG),asn1.SEQUENCE ([('attributes',None,AttributeList,0),
- ('term',None,Term,0)], seq_name = 'AttributesPlusTerm'))
-NamePlusRecord=asn1.SEQUENCE ([('name',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),DatabaseName),1),
- ('record',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('retrievalRecord',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL)),
- ('surrogateDiagnostic',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),DiagRec)),
- ('startingFragment',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),FragmentSyntax)),
- ('intermediateFragment',None,asn1.TYPE(asn1.EXPLICIT(4,cls=asn1.CONTEXT_FLAG),FragmentSyntax)),
- ('finalFragment',None,asn1.TYPE(asn1.EXPLICIT(5,cls=asn1.CONTEXT_FLAG),FragmentSyntax))])),0)], seq_name = 'NamePlusRecord')
-ResourceReportResponse=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('resourceReportStatus',None,asn1.TYPE(asn1.IMPLICIT(50,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('success',0),('partial',1),('failure_1',2),('failure_2',3),('failure_3',4),('failure_4',5),('failure_5',6),('failure_6',7)],None,None)),0),
- ('resourceReport',None,asn1.TYPE(asn1.EXPLICIT(51,cls=asn1.CONTEXT_FLAG),ResourceReport),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'ResourceReportResponse')
-Operand=asn1.CHOICE ([('attrTerm',None,AttributesPlusTerm),
- ('resultSet',None,ResultSetId),
- ('resultAttr',None,ResultSetPlusAttributes)])
-SortKeySpec=asn1.SEQUENCE ([('sortElement',None,SortElement,0),
- ('sortRelation',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('ascending',0),('descending',1),('ascendingByFrequency',3),('descendingByfrequency',4)],None,None)),0),
- ('caseSensitivity',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('caseSensitive',0),('caseInsensitive',1)],None,None)),0),
- ('missingValueAction',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('abort',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('null',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('missingValueData',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING))])),1)], seq_name = 'SortKeySpec')
-ResourceControlRequest=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('suspendedFlag',None,asn1.TYPE(asn1.IMPLICIT(39,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1),
- ('resourceReport',None,asn1.TYPE(asn1.EXPLICIT(40,cls=asn1.CONTEXT_FLAG),ResourceReport),1),
- ('partialResultsAvailable',None,asn1.TYPE(asn1.IMPLICIT(41,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('subset',1),('interim',2),('none',3)],None,None)),1),
- ('responseRequired',None,asn1.TYPE(asn1.IMPLICIT(42,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('triggeredRequestFlag',None,asn1.TYPE(asn1.IMPLICIT(43,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'ResourceControlRequest')
-DuplicateDetectionRequest=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('inputResultSetIds',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),0),
- ('outputResultSetName',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('applicablePortionOfRecord',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1),
- ('duplicateDetectionCriteria',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DuplicateDetectionCriterion)),1),
- ('clustering',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1),
- ('retentionCriteria',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (RetentionCriterion)),0),
- ('sortCriteria',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (SortCriterion)),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'DuplicateDetectionRequest')
-ResourceControlResponse=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('continueFlag',None,asn1.TYPE(asn1.IMPLICIT(44,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('resultSetWanted',None,asn1.TYPE(asn1.IMPLICIT(45,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'ResourceControlResponse')
-DuplicateDetectionResponse=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('status',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('success',0),('failure',1)],None,None)),0),
- ('resultSetCount',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('diagnostics',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DiagRec)),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'DuplicateDetectionResponse')
-PresentRequest=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('resultSetId',None,ResultSetId,0),
- ('resultSetStartPoint',None,asn1.TYPE(asn1.IMPLICIT(30,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('numberOfRecordsRequested',None,asn1.TYPE(asn1.IMPLICIT(29,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('additionalRanges',None,asn1.TYPE(asn1.IMPLICIT(212,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (Range)),1),
- ('recordComposition',None, asn1.CHOICE ([('simple',None,asn1.TYPE(asn1.EXPLICIT(19,cls=asn1.CONTEXT_FLAG),ElementSetNames)),
- ('complex',None,asn1.TYPE(asn1.IMPLICIT(209,cls=asn1.CONTEXT_FLAG),CompSpec))]),1),
- ('preferredRecordSyntax',None,asn1.TYPE(asn1.IMPLICIT(104,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('maxSegmentCount',None,asn1.TYPE(asn1.IMPLICIT(204,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('maxRecordSize',None,asn1.TYPE(asn1.IMPLICIT(206,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('maxSegmentSize',None,asn1.TYPE(asn1.IMPLICIT(207,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'PresentRequest')
-ResourceReportRequest=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('opId',None,asn1.TYPE(asn1.IMPLICIT(210,cls=asn1.CONTEXT_FLAG),ReferenceId),1),
- ('prefResourceReportFormat',None,asn1.TYPE(asn1.IMPLICIT(49,cls=asn1.CONTEXT_FLAG),ResourceReportId),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'ResourceReportRequest')
-ExtendedServicesRequest=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('function',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('create',1),('delete',2),('modify',3)],None,None)),0),
- ('packageType',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('packageName',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('userId',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('retentionTime',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),IntUnit),1),
- ('permissions',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),Permissions),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('taskSpecificParameters',None,asn1.TYPE(asn1.IMPLICIT(10,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1),
- ('waitAction',None,asn1.TYPE(asn1.IMPLICIT(11,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('wait',1),('waitIfPossible',2),('dontWait',3),('dontReturnPackage',4)],None,None)),0),
- ('elements',None,ElementSetName,1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'ExtendedServicesRequest')
-Close=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('closeReason',None,CloseReason,0),
- ('diagnosticInformation',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('resourceReportFormat',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),ResourceReportId),1),
- ('resourceReport',None,asn1.TYPE(asn1.EXPLICIT(5,cls=asn1.CONTEXT_FLAG),ResourceReport),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'Close')
-DeleteResultSetRequest=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('deleteFunction',None,asn1.TYPE(asn1.IMPLICIT(32,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('list',0),('all',1)],None,None)),0),
- ('resultSetList',None, asn1.SEQUENCE_OF (ResultSetId),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'DeleteResultSetRequest')
-OccurrenceByAttributes=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('attributes',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),AttributeList),0),
- ('occurrences',None, asn1.CHOICE ([('global',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('byDatabase',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('db',None,DatabaseName,0),
- ('num',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('otherDbInfo',None,OtherInformation,1)], seq_name = None))))]),1),
- ('otherOccurInfo',None,OtherInformation,1)], seq_name = None))
-Records=asn1.CHOICE ([('responseRecords',None,asn1.TYPE(asn1.IMPLICIT(28,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (NamePlusRecord))),
- ('nonSurrogateDiagnostic',None,asn1.TYPE(asn1.IMPLICIT(130,cls=asn1.CONTEXT_FLAG),DefaultDiagFormat)),
- ('multipleNonSurDiagnostics',None,asn1.TYPE(asn1.IMPLICIT(205,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DiagRec)))])
-Segment=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('numberOfRecordsReturned',None,asn1.TYPE(asn1.IMPLICIT(24,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('segmentRecords',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (NamePlusRecord)),0),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'Segment')
-TermInfo=asn1.SEQUENCE ([('term',None,Term,0),
- ('displayTerm',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('suggestedAttributes',None,AttributeList,1),
- ('alternativeTerm',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (AttributesPlusTerm)),1),
- ('globalOccurrences',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('byAttributes',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),OccurrenceByAttributes),1),
- ('otherTermInfo',None,OtherInformation,1)], seq_name = 'TermInfo')
-SearchResponse=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('resultCount',None,asn1.TYPE(asn1.IMPLICIT(23,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('numberOfRecordsReturned',None,asn1.TYPE(asn1.IMPLICIT(24,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('nextResultSetPosition',None,asn1.TYPE(asn1.IMPLICIT(25,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('searchStatus',None,asn1.TYPE(asn1.IMPLICIT(22,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('resultSetStatus',None,asn1.TYPE(asn1.IMPLICIT(26,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('subset',1),('interim',2),('none',3)],None,None)),1),
- ('presentStatus',None,PresentStatus,1),
- ('records',None,Records,1),
- ('additionalSearchInfo',None,asn1.TYPE(asn1.IMPLICIT(203,cls=asn1.CONTEXT_FLAG),OtherInformation),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'SearchResponse')
-Entry=asn1.CHOICE ([('termInfo',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),TermInfo)),
- ('surrogateDiagnostic',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),DiagRec))])
-RPNStructure=asn1.CHOICE ([('op',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),Operand)),
- ('rpnRpnOp',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-ScanRequest=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('databaseNames',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DatabaseName)),0),
- ('attributeSet',None,AttributeSetId,1),
- ('termListAndStartPoint',None,AttributesPlusTerm,0),
- ('stepSize',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('numberOfTermsRequested',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('preferredPositionInResponse',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'ScanRequest')
-PresentResponse=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('numberOfRecordsReturned',None,asn1.TYPE(asn1.IMPLICIT(24,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('nextResultSetPosition',None,asn1.TYPE(asn1.IMPLICIT(25,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('presentStatus',None,PresentStatus,0),
- ('records',None,Records,1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'PresentResponse')
-SortRequest=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('inputResultSetNames',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),0),
- ('sortedResultSetName',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('sortSequence',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (SortKeySpec)),0),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'SortRequest')
-ListEntries=asn1.SEQUENCE ([('entries',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (Entry)),1),
- ('nonsurrogateDiagnostics',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DiagRec)),1)], seq_name = 'ListEntries')
-RPNQuery=asn1.SEQUENCE ([('attributeSet',None,AttributeSetId,0),
- ('rpn',None,RPNStructure,0)], seq_name = 'RPNQuery')
-ScanResponse=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('stepSize',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('scanStatus',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('success',0),('partial_1',1),('partial_2',2),('partial_3',3),('partial_4',4),('partial_5',5),('failure',6)],None,None)),0),
- ('numberOfEntriesReturned',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('positionOfTerm',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('entries',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),ListEntries),1),
- ('attributeSet',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),AttributeSetId),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'ScanResponse')
-RpnRpnOp=asn1.SEQUENCE ([('rpn1',None,RPNStructure,0),
- ('rpn2',None,RPNStructure,0),
- ('op',None,Operator,0)], seq_name = 'RpnRpnOp')
-Query=asn1.CHOICE ([('type_0',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.ANY)),
- ('type_1',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),RPNQuery)),
- ('type_2',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('type_100',None,asn1.TYPE(asn1.EXPLICIT(100,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('type_101',None,asn1.TYPE(asn1.IMPLICIT(101,cls=asn1.CONTEXT_FLAG),RPNQuery)),
- ('type_102',None,asn1.TYPE(asn1.EXPLICIT(102,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('type_104',None,asn1.TYPE(asn1.IMPLICIT(104,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL))])
-SearchRequest=asn1.SEQUENCE ([('referenceId',None,ReferenceId,1),
- ('smallSetUpperBound',None,asn1.TYPE(asn1.IMPLICIT(13,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('largeSetLowerBound',None,asn1.TYPE(asn1.IMPLICIT(14,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('mediumSetPresentNumber',None,asn1.TYPE(asn1.IMPLICIT(15,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('replaceIndicator',None,asn1.TYPE(asn1.IMPLICIT(16,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('resultSetName',None,asn1.TYPE(asn1.IMPLICIT(17,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('databaseNames',None,asn1.TYPE(asn1.IMPLICIT(18,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DatabaseName)),0),
- ('smallSetElementSetNames',None,asn1.TYPE(asn1.EXPLICIT(100,cls=asn1.CONTEXT_FLAG),ElementSetNames),1),
- ('mediumSetElementSetNames',None,asn1.TYPE(asn1.EXPLICIT(101,cls=asn1.CONTEXT_FLAG),ElementSetNames),1),
- ('preferredRecordSyntax',None,asn1.TYPE(asn1.IMPLICIT(104,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('query',None,asn1.TYPE(asn1.EXPLICIT(21,cls=asn1.CONTEXT_FLAG),Query),0),
- ('additionalSearchInfo',None,asn1.TYPE(asn1.IMPLICIT(203,cls=asn1.CONTEXT_FLAG),OtherInformation),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'SearchRequest')
-APDU=asn1.CHOICE ([('initRequest',None,asn1.TYPE(asn1.IMPLICIT(20,cls=asn1.CONTEXT_FLAG),InitializeRequest)),
- ('initResponse',None,asn1.TYPE(asn1.IMPLICIT(21,cls=asn1.CONTEXT_FLAG),InitializeResponse)),
- ('searchRequest',None,asn1.TYPE(asn1.IMPLICIT(22,cls=asn1.CONTEXT_FLAG),SearchRequest)),
- ('searchResponse',None,asn1.TYPE(asn1.IMPLICIT(23,cls=asn1.CONTEXT_FLAG),SearchResponse)),
- ('presentRequest',None,asn1.TYPE(asn1.IMPLICIT(24,cls=asn1.CONTEXT_FLAG),PresentRequest)),
- ('presentResponse',None,asn1.TYPE(asn1.IMPLICIT(25,cls=asn1.CONTEXT_FLAG),PresentResponse)),
- ('deleteResultSetRequest',None,asn1.TYPE(asn1.IMPLICIT(26,cls=asn1.CONTEXT_FLAG),DeleteResultSetRequest)),
- ('deleteResultSetResponse',None,asn1.TYPE(asn1.IMPLICIT(27,cls=asn1.CONTEXT_FLAG),DeleteResultSetResponse)),
- ('accessControlRequest',None,asn1.TYPE(asn1.IMPLICIT(28,cls=asn1.CONTEXT_FLAG),AccessControlRequest)),
- ('accessControlResponse',None,asn1.TYPE(asn1.IMPLICIT(29,cls=asn1.CONTEXT_FLAG),AccessControlResponse)),
- ('resourceControlRequest',None,asn1.TYPE(asn1.IMPLICIT(30,cls=asn1.CONTEXT_FLAG),ResourceControlRequest)),
- ('resourceControlResponse',None,asn1.TYPE(asn1.IMPLICIT(31,cls=asn1.CONTEXT_FLAG),ResourceControlResponse)),
- ('triggerResourceControlRequest',None,asn1.TYPE(asn1.IMPLICIT(32,cls=asn1.CONTEXT_FLAG),TriggerResourceControlRequest)),
- ('resourceReportRequest',None,asn1.TYPE(asn1.IMPLICIT(33,cls=asn1.CONTEXT_FLAG),ResourceReportRequest)),
- ('resourceReportResponse',None,asn1.TYPE(asn1.IMPLICIT(34,cls=asn1.CONTEXT_FLAG),ResourceReportResponse)),
- ('scanRequest',None,asn1.TYPE(asn1.IMPLICIT(35,cls=asn1.CONTEXT_FLAG),ScanRequest)),
- ('scanResponse',None,asn1.TYPE(asn1.IMPLICIT(36,cls=asn1.CONTEXT_FLAG),ScanResponse)),
- ('sortRequest',None,asn1.TYPE(asn1.IMPLICIT(43,cls=asn1.CONTEXT_FLAG),SortRequest)),
- ('sortResponse',None,asn1.TYPE(asn1.IMPLICIT(44,cls=asn1.CONTEXT_FLAG),SortResponse)),
- ('segmentRequest',None,asn1.TYPE(asn1.IMPLICIT(45,cls=asn1.CONTEXT_FLAG),Segment)),
- ('extendedServicesRequest',None,asn1.TYPE(asn1.IMPLICIT(46,cls=asn1.CONTEXT_FLAG),ExtendedServicesRequest)),
- ('extendedServicesResponse',None,asn1.TYPE(asn1.IMPLICIT(47,cls=asn1.CONTEXT_FLAG),ExtendedServicesResponse)),
- ('close',None,asn1.TYPE(asn1.IMPLICIT(48,cls=asn1.CONTEXT_FLAG),Close)),
- ('duplicateDetectionRequest',None,asn1.TYPE(asn1.IMPLICIT(49,cls=asn1.CONTEXT_FLAG),ExtendedServicesRequest)),
- ('duplicateDetectionResponse',None,asn1.TYPE(asn1.IMPLICIT(50,cls=asn1.CONTEXT_FLAG),DuplicateDetectionResponse))])
-RPNStructure['rpnRpnOp'] = ('rpnRpnOp', 1, RpnRpnOp)
-
-
-#module GeneralDiagnosticContainer None
-DiagnosticContainer=asn1.SEQUENCE_OF (DiagRec)
-
-
-#module Explain None
-PrimitiveDataType=asn1.INTEGER_class ([('octetString',0),('numeric',1),('date',2),('external',3),('string',4),('trueOrFalse',5),('oid',6),('intUnit',7),('empty',8),('noneOfTheAbove',100)],None,None)
-NetworkAddress=asn1.CHOICE ([('internetAddress',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('hostAddress',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('port',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None))),
- ('depricated',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('depricated0',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('depricated1',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('depricated2',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('depricated3',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),0)], seq_name = None))),
- ('other',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('type',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('address',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0)], seq_name = None)))])
-AttributeOccurrence=asn1.SEQUENCE ([('attributeSet',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),AttributeSetId),1),
- ('attributeType',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('mustBeSupplied',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL),1),
- ('attributeValues',None, asn1.CHOICE ([('any_or_none',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('specific',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (StringOrNumeric)))]),0)], seq_name = 'AttributeOccurrence')
-ValueDescription=asn1.CHOICE ([('integer',None,asn1.INTEGER_class ([],None,None)),
- ('string',None,InternationalString),
- ('octets',None,asn1.OCTSTRING),
- ('oid',None,asn1.OBJECT_IDENTIFIER),
- ('unit',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),Unit)),
- ('valueAndUnit',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),IntUnit))])
-Path=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('tagType',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('tagValue',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),StringOrNumeric),0)], seq_name = None))
-IconObject=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('bodyType',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('ianaType',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('z3950type',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('otherType',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString))])),0),
- ('content',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING),0)], seq_name = None))
-LanguageCode=InternationalString
-HumanString=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('language',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),LanguageCode),1),
- ('text',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0)], seq_name = None))
-RecordTag=asn1.SEQUENCE ([('qualifier',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),StringOrNumeric),1),
- ('tagValue',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),StringOrNumeric),0)], seq_name = 'RecordTag')
-AttributeCombination=asn1.SEQUENCE_OF (AttributeOccurrence)
-AttributeDescription=asn1.SEQUENCE ([('name',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('attributeValue',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),StringOrNumeric),0),
- ('equivalentAttributes',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (StringOrNumeric)),1)], seq_name = 'AttributeDescription')
-DatabaseList=asn1.SEQUENCE_OF (DatabaseName)
-AccessRestrictions=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('accessType',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('any',0),('search',1),('present',2),('specific_elements',3),('extended_services',4),('by_database',5)],None,None)),0),
- ('accessText',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('accessChallenges',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.OBJECT_IDENTIFIER)),1)], seq_name = None))
-SearchKey=asn1.SEQUENCE ([('searchKey',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1)], seq_name = 'SearchKey')
-ElementDataType=asn1.CHOICE ([('primitive',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),PrimitiveDataType)),
- ('structured',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-AttributeValue=asn1.SEQUENCE ([('value',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),StringOrNumeric),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('subAttributes',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (StringOrNumeric)),1),
- ('superAttributes',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (StringOrNumeric)),1),
- ('partialSupport',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.NULL),1)], seq_name = 'AttributeValue')
-ValueRange=asn1.SEQUENCE ([('lower',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),ValueDescription),1),
- ('upper',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ValueDescription),1)], seq_name = 'ValueRange')
-CommonInfo=asn1.SEQUENCE ([('dateAdded',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.GeneralizedTime),1),
- ('dateChanged',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.GeneralizedTime),1),
- ('expiry',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.GeneralizedTime),1),
- ('humanString_Language',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),LanguageCode),1),
- ('otherInfo',None,OtherInformation,1)], seq_name = 'CommonInfo')
-ElementInfo=asn1.SEQUENCE ([('elementName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('elementTagPath',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Path),0),
- ('dataType',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),ElementDataType),1),
- ('required',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('repeatable',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),HumanString),1)], seq_name = 'ElementInfo')
-ExtendedServicesInfo=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('type',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('name',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('privateType',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('restrictionsApply',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('feeApply',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('available',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('retentionSupported',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('waitAction',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('waitSupported',1),('waitAlways',2),('waitNotSupported',3),('depends',4),('notSaying',5)],None,None)),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(10,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('specificExplain',None,asn1.TYPE(asn1.IMPLICIT(11,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1),
- ('esASN',None,asn1.TYPE(asn1.IMPLICIT(12,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'ExtendedServicesInfo')
-RecordSyntaxInfo=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('recordSyntax',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('name',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('transferSyntaxes',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.OBJECT_IDENTIFIER)),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('asn1Module',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('abstractStructure',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (ElementInfo)),1)], seq_name = 'RecordSyntaxInfo')
-PrivateCapabilities=asn1.SEQUENCE ([('operators',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('operator',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1)], seq_name = None))),1),
- ('searchKeys',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (SearchKey)),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (HumanString)),1)], seq_name = 'PrivateCapabilities')
-Units=asn1.SEQUENCE ([('name',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('unit',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),StringOrNumeric),0)], seq_name = 'Units')
-Charge=asn1.SEQUENCE ([('cost',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),IntUnit),0),
- ('perWhat',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Unit),1),
- ('text',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),HumanString),1)], seq_name = 'Charge')
-ProcessingInformation=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('databaseName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),DatabaseName),0),
- ('processingContext',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('access',0),('search',1),('retrieval',2),('record_presentation',3),('record_handling',4)],None,None)),0),
- ('name',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('oid',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('instructions',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1)], seq_name = 'ProcessingInformation')
-ValueSet=asn1.CHOICE ([('range',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),ValueRange)),
- ('enumerated',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (ValueDescription)))])
-CategoryInfo=asn1.SEQUENCE ([('category',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('originalCategory',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('asn1Module',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'CategoryInfo')
-UnitType=asn1.SEQUENCE ([('name',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('unitType',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),StringOrNumeric),0),
- ('units',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (Units)),0)], seq_name = 'UnitType')
-ProximitySupport=asn1.SEQUENCE ([('anySupport',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('unitsSupported',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.CHOICE ([('known',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('private',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('unit',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('description',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1)], seq_name = None)))]))),1)], seq_name = 'ProximitySupport')
-AttributeType=asn1.SEQUENCE ([('name',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('attributeType',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('attributeValues',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (AttributeDescription)),0)], seq_name = 'AttributeType')
-VariantValue=asn1.SEQUENCE ([('dataType',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),PrimitiveDataType),0),
- ('values',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ValueSet),1)], seq_name = 'VariantValue')
-ContactInfo=asn1.SEQUENCE ([('name',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('address',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('email',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('phone',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'ContactInfo')
-TagSetInfo=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('tagSet',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('name',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('elements',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('elementname',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('nicknames',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('elementTag',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),StringOrNumeric),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('dataType',None,asn1.TYPE(asn1.EXPLICIT(5,cls=asn1.CONTEXT_FLAG),PrimitiveDataType),1),
- ('otherTagInfo',None,OtherInformation,1)], seq_name = None))),1)], seq_name = 'TagSetInfo')
-SchemaInfo=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('schema',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('name',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('tagTypeMapping',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('tagType',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('tagSet',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('defaultTagType',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL),1)], seq_name = None))),1),
- ('recordStructure',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (ElementInfo)),1)], seq_name = 'SchemaInfo')
-AttributeCombinations=asn1.SEQUENCE ([('defaultAttributeSet',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),AttributeSetId),0),
- ('legalCombinations',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (AttributeCombination)),0)], seq_name = 'AttributeCombinations')
-Iso8777Capabilities=asn1.SEQUENCE ([('searchKeys',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (SearchKey)),0),
- ('restrictions',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1)], seq_name = 'Iso8777Capabilities')
-OmittedAttributeInterpretation=asn1.SEQUENCE ([('defaultValue',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),StringOrNumeric),1),
- ('defaultDescription',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1)], seq_name = 'OmittedAttributeInterpretation')
-VariantType=asn1.SEQUENCE ([('name',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('variantType',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('variantValue',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),VariantValue),1)], seq_name = 'VariantType')
-AttributeTypeDetails=asn1.SEQUENCE ([('attributeType',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('defaultIfOmitted',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),OmittedAttributeInterpretation),1),
- ('attributeValues',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (AttributeValue)),1)], seq_name = 'AttributeTypeDetails')
-Costs=asn1.SEQUENCE ([('connectCharge',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),Charge),1),
- ('connectTime',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),Charge),1),
- ('displayCharge',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Charge),1),
- ('searchCharge',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),Charge),1),
- ('subscriptCharge',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),Charge),1),
- ('otherCharges',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('forWhat',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),0),
- ('charge',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Charge),0)], seq_name = None))),1)], seq_name = 'Costs')
-AttributeSetInfo=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('attributeSet',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),AttributeSetId),0),
- ('name',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('attributes',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (AttributeType)),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),HumanString),1)], seq_name = 'AttributeSetInfo')
-TermListInfo=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('databaseName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),DatabaseName),0),
- ('termLists',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('name',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('title',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('searchCost',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('optimized',0),('normal',1),('expensive',2),('filter',3)],None,None)),1),
- ('scanable',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('broader',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('narrower',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1)], seq_name = None))),0)], seq_name = 'TermListInfo')
-CategoryList=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('categories',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (CategoryInfo)),0)], seq_name = 'CategoryList')
-RpnCapabilities=asn1.SEQUENCE ([('operators',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.INTEGER_class ([('and',0),('or',1),('and_not',2),('prox',3)],None,None))),1),
- ('resultSetAsOperandSupported',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('restrictionOperandSupported',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('proximity',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),ProximitySupport),1)], seq_name = 'RpnCapabilities')
-VariantClass=asn1.SEQUENCE ([('name',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('variantClass',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('variantTypes',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (VariantType)),0)], seq_name = 'VariantClass')
-PerElementDetails=asn1.SEQUENCE ([('name',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('recordTag',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),RecordTag),1),
- ('schemaTags',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (Path)),1),
- ('maxSize',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('minSize',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('avgSize',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('fixedSize',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('repeatable',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('required',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(12,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('contents',None,asn1.TYPE(asn1.IMPLICIT(13,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('billingInfo',None,asn1.TYPE(asn1.IMPLICIT(14,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('restrictions',None,asn1.TYPE(asn1.IMPLICIT(15,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('alternateNames',None,asn1.TYPE(asn1.IMPLICIT(16,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('genericNames',None,asn1.TYPE(asn1.IMPLICIT(17,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('searchAccess',None,asn1.TYPE(asn1.IMPLICIT(18,cls=asn1.CONTEXT_FLAG),AttributeCombinations),1)], seq_name = 'PerElementDetails')
-VariantSetInfo=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('variantSet',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('name',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('variants',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (VariantClass)),1)], seq_name = 'VariantSetInfo')
-AttributeSetDetails=asn1.SEQUENCE ([('attributeSet',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),AttributeSetId),0),
- ('attributesByType',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (AttributeTypeDetails)),0)], seq_name = 'AttributeSetDetails')
-ElementSetDetails=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('databaseName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),DatabaseName),0),
- ('elementSetName',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),ElementSetName),0),
- ('recordSyntax',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('schema',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('detailsPerElement',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (PerElementDetails)),1)], seq_name = 'ElementSetDetails')
-UnitInfo=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('unitSystem',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('units',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (UnitType)),1)], seq_name = 'UnitInfo')
-QueryTypeDetails=asn1.CHOICE ([('private',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),PrivateCapabilities)),
- ('rpn',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),RpnCapabilities)),
- ('iso8777',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Iso8777Capabilities)),
- ('z39_58',None,asn1.TYPE(asn1.IMPLICIT(100,cls=asn1.CONTEXT_FLAG),HumanString)),
- ('erpn',None,asn1.TYPE(asn1.IMPLICIT(101,cls=asn1.CONTEXT_FLAG),RpnCapabilities)),
- ('rankedList',None,asn1.TYPE(asn1.IMPLICIT(102,cls=asn1.CONTEXT_FLAG),HumanString))])
-TermListDetails=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('termListName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('attributes',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),AttributeCombinations),1),
- ('scanInfo',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('maxStepSize',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('collatingSequence',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('increasing',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1)], seq_name = None)),1),
- ('estNumberTerms',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('sampleTerms',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (Term)),1)], seq_name = 'TermListDetails')
-SortKeyDetails=asn1.SEQUENCE ([('description',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('elementSpecifications',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (Specification)),1),
- ('attributeSpecifications',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),AttributeCombinations),1),
- ('sortType',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('character',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('numeric',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('structured',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),HumanString))])),1),
- ('caseSensitivity',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('always',0),('never',1),('default_yes',2),('default_no',3)],None,None)),1)], seq_name = 'SortKeyDetails')
-AccessInfo=asn1.SEQUENCE ([('queryTypesSupported',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (QueryTypeDetails)),1),
- ('diagnosticsSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.OBJECT_IDENTIFIER)),1),
- ('attributeSetIds',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (AttributeSetId)),1),
- ('schemas',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.OBJECT_IDENTIFIER)),1),
- ('recordSyntaxes',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.OBJECT_IDENTIFIER)),1),
- ('resourceChallenges',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.OBJECT_IDENTIFIER)),1),
- ('restrictedAccess',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),AccessRestrictions),1),
- ('costInfo',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),Costs),1),
- ('variantSets',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.OBJECT_IDENTIFIER)),1),
- ('elementSetNames',None,asn1.TYPE(asn1.IMPLICIT(10,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (ElementSetName)),1),
- ('unitSystems',None,asn1.TYPE(asn1.IMPLICIT(11,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1)], seq_name = 'AccessInfo')
-TargetInfo=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('name',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('recent_news',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('icon',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),IconObject),1),
- ('namedResultSets',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('multipleDBsearch',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('maxResultSets',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('maxResultSize',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('maxTerms',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('timeoutInterval',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),IntUnit),1),
- ('welcomeMessage',None,asn1.TYPE(asn1.IMPLICIT(10,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('contactInfo',None,asn1.TYPE(asn1.IMPLICIT(11,cls=asn1.CONTEXT_FLAG),ContactInfo),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(12,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('nicknames',None,asn1.TYPE(asn1.IMPLICIT(13,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('usage_restrictions',None,asn1.TYPE(asn1.IMPLICIT(14,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('paymentAddr',None,asn1.TYPE(asn1.IMPLICIT(15,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('hours',None,asn1.TYPE(asn1.IMPLICIT(16,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('dbCombinations',None,asn1.TYPE(asn1.IMPLICIT(17,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DatabaseList)),1),
- ('addresses',None,asn1.TYPE(asn1.IMPLICIT(18,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (NetworkAddress)),1),
- ('languages',None,asn1.TYPE(asn1.IMPLICIT(101,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('characterSets',None,asn1.TYPE(asn1.IMPLICIT(102,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('commonAccessInfo',None,asn1.TYPE(asn1.IMPLICIT(19,cls=asn1.CONTEXT_FLAG),AccessInfo),1)], seq_name = 'TargetInfo')
-AttributeDetails=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('databaseName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),DatabaseName),0),
- ('attributesBySet',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (AttributeSetDetails)),1),
- ('attributeCombinations',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),AttributeCombinations),1)], seq_name = 'AttributeDetails')
-SortDetails=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('databaseName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),DatabaseName),0),
- ('sortKeys',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (SortKeyDetails)),1)], seq_name = 'SortDetails')
-RetrievalRecordDetails=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('databaseName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),DatabaseName),0),
- ('schema',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('recordSyntax',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('detailsPerElement',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (PerElementDetails)),1)], seq_name = 'RetrievalRecordDetails')
-DatabaseInfo=asn1.SEQUENCE ([('commonInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),CommonInfo),1),
- ('name',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),DatabaseName),0),
- ('explainDatabase',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL),1),
- ('nicknames',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DatabaseName)),1),
- ('icon',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),IconObject),1),
- ('user_fee',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('available',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('titleString',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('keywords',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (HumanString)),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('associatedDbs',None,asn1.TYPE(asn1.IMPLICIT(10,cls=asn1.CONTEXT_FLAG),DatabaseList),1),
- ('subDbs',None,asn1.TYPE(asn1.IMPLICIT(11,cls=asn1.CONTEXT_FLAG),DatabaseList),1),
- ('disclaimers',None,asn1.TYPE(asn1.IMPLICIT(12,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('news',None,asn1.TYPE(asn1.IMPLICIT(13,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('recordCount',None,asn1.TYPE(asn1.EXPLICIT(14,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('actualNumber',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('approxNumber',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)))])),1),
- ('defaultOrder',None,asn1.TYPE(asn1.IMPLICIT(15,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('avRecordSize',None,asn1.TYPE(asn1.IMPLICIT(16,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('maxRecordSize',None,asn1.TYPE(asn1.IMPLICIT(17,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('hours',None,asn1.TYPE(asn1.IMPLICIT(18,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('bestTime',None,asn1.TYPE(asn1.IMPLICIT(19,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('lastUpdate',None,asn1.TYPE(asn1.IMPLICIT(20,cls=asn1.CONTEXT_FLAG),asn1.GeneralizedTime),1),
- ('updateInterval',None,asn1.TYPE(asn1.IMPLICIT(21,cls=asn1.CONTEXT_FLAG),IntUnit),1),
- ('coverage',None,asn1.TYPE(asn1.IMPLICIT(22,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('proprietary',None,asn1.TYPE(asn1.IMPLICIT(23,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1),
- ('copyrightText',None,asn1.TYPE(asn1.IMPLICIT(24,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('copyrightNotice',None,asn1.TYPE(asn1.IMPLICIT(25,cls=asn1.CONTEXT_FLAG),HumanString),1),
- ('producerContactInfo',None,asn1.TYPE(asn1.IMPLICIT(26,cls=asn1.CONTEXT_FLAG),ContactInfo),1),
- ('supplierContactInfo',None,asn1.TYPE(asn1.IMPLICIT(27,cls=asn1.CONTEXT_FLAG),ContactInfo),1),
- ('submissionContactInfo',None,asn1.TYPE(asn1.IMPLICIT(28,cls=asn1.CONTEXT_FLAG),ContactInfo),1),
- ('accessInfo',None,asn1.TYPE(asn1.IMPLICIT(29,cls=asn1.CONTEXT_FLAG),AccessInfo),1)], seq_name = 'DatabaseInfo')
-Explain_Record=asn1.CHOICE ([('targetInfo',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),TargetInfo)),
- ('databaseInfo',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),DatabaseInfo)),
- ('schemaInfo',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),SchemaInfo)),
- ('tagSetInfo',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),TagSetInfo)),
- ('recordSyntaxInfo',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),RecordSyntaxInfo)),
- ('attributeSetInfo',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),AttributeSetInfo)),
- ('termListInfo',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),TermListInfo)),
- ('extendedServicesInfo',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),ExtendedServicesInfo)),
- ('attributeDetails',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),AttributeDetails)),
- ('termListDetails',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),TermListDetails)),
- ('elementSetDetails',None,asn1.TYPE(asn1.IMPLICIT(10,cls=asn1.CONTEXT_FLAG),ElementSetDetails)),
- ('retrievalRecordDetails',None,asn1.TYPE(asn1.IMPLICIT(11,cls=asn1.CONTEXT_FLAG),RetrievalRecordDetails)),
- ('sortDetails',None,asn1.TYPE(asn1.IMPLICIT(12,cls=asn1.CONTEXT_FLAG),SortDetails)),
- ('processing',None,asn1.TYPE(asn1.IMPLICIT(13,cls=asn1.CONTEXT_FLAG),ProcessingInformation)),
- ('variants',None,asn1.TYPE(asn1.IMPLICIT(14,cls=asn1.CONTEXT_FLAG),VariantSetInfo)),
- ('units',None,asn1.TYPE(asn1.IMPLICIT(15,cls=asn1.CONTEXT_FLAG),UnitInfo)),
- ('categoryList',None,asn1.TYPE(asn1.IMPLICIT(100,cls=asn1.CONTEXT_FLAG),CategoryList))])
-ElementDataType['structured'] = ('structured', 1, asn1.SEQUENCE_OF(ElementInfo))
-
-
-#module RecordSyntax_SUTRS None
-SutrsRecord=InternationalString
-
-
-#module RecordSyntax_generic None
-Usage=asn1.SEQUENCE ([('type',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('redistributable',1),('restricted',2),('licensePointer',3)],None,None)),0),
- ('restriction',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'Usage')
-TagPath=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('tagType',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('tagValue',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),StringOrNumeric),0),
- ('tagOccurrence',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1)], seq_name = None))
-ElementData=asn1.CHOICE ([('octets',None,asn1.OCTSTRING),
- ('numeric',None,asn1.INTEGER_class ([],None,None)),
- ('date',None,asn1.GeneralizedTime),
- ('ext',None,asn1.EXTERNAL),
- ('string',None,InternationalString),
- ('trueOrFalse',None,asn1.BOOLEAN),
- ('oid',None,asn1.OBJECT_IDENTIFIER),
- ('intUnit',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),IntUnit)),
- ('elementNotThere',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('elementEmpty',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('noDataRequested',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('diagnostic',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL)),
- ('subtree',None,asn1.TYPE(asn1.EXPLICIT(6,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.NULL)))])
-Variant=asn1.SEQUENCE ([('globalVariantSetId',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('triples',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('variantSetId',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('class',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('type',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('value',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('int',None,asn1.INTEGER_class ([],None,None)),
- ('str',None,InternationalString),
- ('oct',None,asn1.OCTSTRING),
- ('oid',None,asn1.OBJECT_IDENTIFIER),
- ('bool',None,asn1.BOOLEAN),
- ('nul',None,asn1.NULL),
- ('unit',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),Unit)),
- ('valueAndUnit',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),IntUnit))])),0)], seq_name = None))),0)], seq_name = 'Variant')
-Order=asn1.SEQUENCE ([('ascending',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('order',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = 'Order')
-HitVector=asn1.SEQUENCE ([('satisfier',None,Term,1),
- ('offsetIntoElement',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),IntUnit),1),
- ('length',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),IntUnit),1),
- ('hitRank',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('serverToken',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING),1)], seq_name = 'HitVector')
-ElementMetaData=asn1.SEQUENCE ([('seriesOrder',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),Order),1),
- ('usageRight',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Usage),1),
- ('hits',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (HitVector)),1),
- ('displayName',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('supportedVariants',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (Variant)),1),
- ('message',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('elementDescriptor',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING),1),
- ('surrogateFor',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),TagPath),1),
- ('surrogateElement',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),TagPath),1),
- ('other',None,asn1.TYPE(asn1.IMPLICIT(99,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1)], seq_name = 'ElementMetaData')
-TaggedElement=asn1.SEQUENCE ([('tagType',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('tagValue',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),StringOrNumeric),0),
- ('tagOccurrence',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('content',None,asn1.TYPE(asn1.EXPLICIT(4,cls=asn1.CONTEXT_FLAG),ElementData),0),
- ('metaData',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),ElementMetaData),1),
- ('appliedVariant',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),Variant),1)], seq_name = 'TaggedElement')
-GenericRecord=asn1.SEQUENCE_OF (TaggedElement)
-ElementData['subtree'] = ('subtree', asn1.EXPLICIT(6), asn1.SEQUENCE_OF(TaggedElement))
-
-
-#module RecordSyntax_ESTaskPackage None
-TaskPackage=asn1.SEQUENCE ([('packageType',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('packageName',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('userId',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('retentionTime',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),IntUnit),1),
- ('permissions',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),Permissions),1),
- ('description',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('serverReference',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING),1),
- ('creationDateTime',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),asn1.GeneralizedTime),1),
- ('taskStatus',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('pending',0),('active',1),('complete',2),('aborted',3)],None,None)),0),
- ('packageDiagnostics',None,asn1.TYPE(asn1.IMPLICIT(10,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DiagRec)),1),
- ('taskSpecificParameters',None,asn1.TYPE(asn1.IMPLICIT(11,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),0)], seq_name = 'TaskPackage')
-
-
-#module ResourceReport_Format_Resource_2 None
-Estimate=asn1.SEQUENCE ([('type',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),StringOrNumeric),0),
- ('value',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),IntUnit),0)], seq_name = 'Estimate')
-ResourceReport_2=asn1.SEQUENCE ([('estimates',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (Estimate)),1),
- ('message',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'ResourceReport_2')
-
-
-#module AccessControlFormat_prompt_1 None
-PromptId=asn1.CHOICE ([('enummeratedPrompt',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('type',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('groupId',0),('userId',1),('password',2),('newPassword',3),('copyright',4),('sessionId',5)],None,None)),0),
- ('suggestedString',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = None))),
- ('nonEnumeratedPrompt',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString))])
-Encryption=asn1.SEQUENCE ([('cryptType',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING),1),
- ('credential',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING),1),
- ('data',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING),0)], seq_name = 'Encryption')
-Challenge=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('promptId',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),PromptId),0),
- ('defaultResponse',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('promptInfo',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('character',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('encrypted',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Encryption))])),1),
- ('regExpr',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('responseRequired',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.NULL),1),
- ('allowedValues',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('shouldSave',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.NULL),1),
- ('dataType',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('integer',1),('date',2),('float',3),('alphaNumeric',4),('url_urn',5),('boolean',6)],None,None)),1),
- ('diagnostic',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1)], seq_name = None))
-Response=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('promptId',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),PromptId),0),
- ('promptResponse',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('string',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('accept',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN)),
- ('acknowledge',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('diagnostic',None,asn1.TYPE(asn1.EXPLICIT(4,cls=asn1.CONTEXT_FLAG),DiagRec)),
- ('encrypted',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),Encryption))])),0)], seq_name = None))
-PromptObject=asn1.CHOICE ([('challenge',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),Challenge)),
- ('response',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Response))])
-
-
-#module AccessControlFormat_des_1 None
-DRNType=asn1.SEQUENCE ([('userId',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING),1),
- ('salt',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING),1),
- ('randomNumber',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING),0)], seq_name = 'DRNType')
-DES_RN_Object=asn1.CHOICE ([('challenge',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),DRNType)),
- ('response',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),DRNType))])
-
-
-#module AccessControlFormat_krb_1 None
-KRBRequest=asn1.SEQUENCE ([('service',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('instance',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('realm',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'KRBRequest')
-KRBResponse=asn1.SEQUENCE ([('userid',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('ticket',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING),0)], seq_name = 'KRBResponse')
-KRBObject=asn1.CHOICE ([('challenge',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),KRBRequest)),
- ('response',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),KRBResponse))])
-
-
-#module ESFormat_PersistentResultSet None
-ClientPartNotToKeep_prs=asn1.SEQUENCE ([('clientSuppliedResultSet',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('replaceOrAppend',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('replace',1),('append',2)],None,None)),1)], seq_name = 'ClientPartNotToKeep_prs')
-ServerPart_prs=asn1.SEQUENCE ([('serverSuppliedResultSet',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('numberOfRecords',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1)], seq_name = 'ServerPart_prs')
-PersistentResultSet=asn1.CHOICE ([('esRequest',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('toKeep',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL),0),
- ('notToKeep',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ClientPartNotToKeep_prs),1)], seq_name = None))),
- ('taskPackage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('clientPart',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL),0),
- ('serverPart',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ServerPart_prs),1)], seq_name = None)))])
-
-
-#module ESFormat_PersistentQuery None
-ClientPartToKeep_pq=asn1.SEQUENCE ([('dbNames',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('additionalSearchInfo',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),OtherInformation),1)], seq_name = 'ClientPartToKeep_pq')
-ServerPart_pq=Query
-ClientPartNotToKeep_pq=asn1.CHOICE ([('package',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('query',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),Query))])
-PersistentQuery=asn1.CHOICE ([('esRequest',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('toKeep',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ClientPartToKeep_pq),1),
- ('notToKeep',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ClientPartNotToKeep_pq),0)], seq_name = None))),
- ('taskPackage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('clientPart',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ClientPartToKeep_pq),1),
- ('serverPart',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ServerPart_pq),0)], seq_name = None)))])
-
-
-#module ESFormat_ExportSpecification None
-Destination=asn1.CHOICE ([('phoneNumber',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('faxNumber',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('x400address',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('emailAddress',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('pagerNumber',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('ftpAddress',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('ftamAddress',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('printerAddress',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('other',None,asn1.TYPE(asn1.IMPLICIT(100,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('vehicle',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('destination',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),0)], seq_name = None)))])
-ClientPartToKeep_es=asn1.SEQUENCE ([('composition',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),CompSpec),0),
- ('exportDestination',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),Destination),0)], seq_name = 'ClientPartToKeep_es')
-ExportSpecification=asn1.CHOICE ([('esRequest',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('toKeep',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ClientPartToKeep_es),0),
- ('notToKeep',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL),0)], seq_name = None))),
- ('taskPackage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('clientPart',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ClientPartToKeep_es),0),
- ('serverPart',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL),0)], seq_name = None)))])
-
-
-#module ESFormat_PeriodicQuerySchedule None
-Period=asn1.CHOICE ([('unit',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),IntUnit)),
- ('businessDaily',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('continuous',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('other',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString))])
-ClientPartToKeep_pqs=asn1.SEQUENCE ([('activeFlag',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('databaseNames',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('resultSetDisposition',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('replace',1),('append',2),('createNew',3)],None,None)),1),
- ('alertDestination',None,asn1.TYPE(asn1.EXPLICIT(4,cls=asn1.CONTEXT_FLAG),Destination),1),
- ('exportParameters',None,asn1.TYPE(asn1.EXPLICIT(5,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('packageName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('exportPackage',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ExportSpecification))])),1)], seq_name = 'ClientPartToKeep_pqs')
-ServerPart_pqs=asn1.SEQUENCE ([('databaseNames',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('actualQuery',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),Query),0),
- ('serverStatedPeriod',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),Period),0),
- ('expiration',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.GeneralizedTime),1),
- ('resultSetPackage',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('lastQueryTime',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.GeneralizedTime),1),
- ('lastResultNumber',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('numberSinceModify',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('additionalSearchInfo',None,asn1.TYPE(asn1.EXPLICIT(8,cls=asn1.CONTEXT_FLAG),OtherInformation),1)], seq_name = 'ServerPart_pqs')
-ClientPartNotToKeep_pqs=asn1.SEQUENCE ([('databaseNames',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('querySpec',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('actualQuery',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),Query)),
- ('packageName',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString))])),1),
- ('clientSuggestedPeriod',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),Period),1),
- ('expiration',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.GeneralizedTime),1),
- ('resultSetPackage',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('additionalSearchInfo',None,asn1.TYPE(asn1.EXPLICIT(5,cls=asn1.CONTEXT_FLAG),OtherInformation),1)], seq_name = 'ClientPartNotToKeep_pqs')
-PeriodicQuerySchedule=asn1.CHOICE ([('esRequest',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('toKeep',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ClientPartToKeep_pqs),0),
- ('notToKeep',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ClientPartNotToKeep_pqs),0)], seq_name = None))),
- ('taskPackage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('clientPart',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ClientPartToKeep_pqs),0),
- ('serverPart',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ServerPart_pqs),0)], seq_name = None)))])
-
-
-#module ESFormat_ItemOrder None
-ClientPartNotToKeep_io=asn1.SEQUENCE ([('resultSetItem',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('resultSetId',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('item',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None)),1),
- ('itemRequest',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1)], seq_name = 'ClientPartNotToKeep_io')
-ServerPart_io=asn1.SEQUENCE ([('itemRequest',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1),
- ('statusOrErrorReport',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1),
- ('auxiliaryStatus',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('notReceived',1),('loanQueue',2),('forwarded',3),('unfilledCopyright',4),('filledCopyright',5)],None,None)),1)], seq_name = 'ServerPart_io')
-CreditCardInfo=asn1.SEQUENCE ([('nameOnCard',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('expirationDate',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('cardNumber',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),0)], seq_name = 'CreditCardInfo')
-ClientPartToKeep_io=asn1.SEQUENCE ([('supplDescription',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1),
- ('contact',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('name',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('phone',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('email',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = None)),1),
- ('addlBilling',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('paymentMethod',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('billInvoice',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('prepay',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('depositAccount',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('creditCard',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),CreditCardInfo)),
- ('cardInfoPreviouslySupplied',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('privateKnown',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('privateNotKnown',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL))])),0),
- ('customerReference',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('customerPONumber',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = None)),1)], seq_name = 'ClientPartToKeep_io')
-ItemOrder=asn1.CHOICE ([('esRequest',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('toKeep',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ClientPartToKeep_io),1),
- ('notToKeep',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ClientPartNotToKeep_io),0)], seq_name = None))),
- ('taskPackage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('clientPart',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ClientPartToKeep_io),1),
- ('serverPart',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ServerPart_io),0)], seq_name = None)))])
-
-
-#module ESFormat_Update None
-ClientPartToKeep_upd=asn1.SEQUENCE ([('action',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('recordInsert',1),('recordReplace',2),('recordDelete',3),('elementUpdate',4)],None,None)),0),
- ('databaseName',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('schema',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('elementSetName',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'ClientPartToKeep_upd')
-CorrelationInfo=asn1.SEQUENCE ([('note',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('id',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1)], seq_name = 'CorrelationInfo')
-SuppliedRecords=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('recordId',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('number',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('string',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('opaque',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING))])),1),
- ('supplementalId',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('timeStamp',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.GeneralizedTime)),
- ('versionNumber',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('previousVersion',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL))])),1),
- ('correlationInfo',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),CorrelationInfo),1),
- ('record',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),0)], seq_name = None))
-ClientPartNotToKeep_upd=SuppliedRecords
-TaskPackageRecordStructure=asn1.SEQUENCE ([('recordOrSurDiag',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('record',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL)),
- ('diagnostic',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),DiagRec))])),1),
- ('correlationInfo',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),CorrelationInfo),1),
- ('recordStatus',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('success',1),('queued',2),('inProcess',3),('failure',4)],None,None)),0)], seq_name = 'TaskPackageRecordStructure')
-ServerPart_upd=asn1.SEQUENCE ([('updateStatus',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('success',1),('partial',2),('failure',3)],None,None)),0),
- ('globalDiagnostics',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DiagRec)),1),
- ('taskPackageRecords',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (TaskPackageRecordStructure)),0)], seq_name = 'ServerPart_upd')
-Update=asn1.CHOICE ([('esRequest',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('toKeep',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ClientPartToKeep_upd),0),
- ('notToKeep',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ClientPartNotToKeep_upd),0)], seq_name = None))),
- ('taskPackage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('clientPart',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ClientPartToKeep_upd),0),
- ('serverPart',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ServerPart_upd),0)], seq_name = None)))])
-
-
-#module ESFormat_ExportInvocation None
-ClientPartToKeep_ei=asn1.SEQUENCE ([('exportSpec',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('packageName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('packageSpec',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ExportSpecification))])),0),
- ('numberOfCopies',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = 'ClientPartToKeep_ei')
-ClientPartNotToKeep_ei=asn1.SEQUENCE ([('resultSetId',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('records',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('all',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('ranges',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('start',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('count',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1)], seq_name = None))))])),0)], seq_name = 'ClientPartNotToKeep_ei')
-ServerPart_ei=asn1.SEQUENCE ([('estimatedQuantity',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),IntUnit),1),
- ('quantitySoFar',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),IntUnit),1),
- ('estimatedCost',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),IntUnit),1),
- ('costSoFar',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),IntUnit),1)], seq_name = 'ServerPart_ei')
-ExportInvocation=asn1.CHOICE ([('esRequest',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('toKeep',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ClientPartToKeep_ei),0),
- ('notToKeep',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ClientPartNotToKeep_ei),0)], seq_name = None))),
- ('taskPackage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('clientPart',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),ClientPartToKeep_ei),0),
- ('serverPart',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),ServerPart_ei),1)], seq_name = None)))])
-
-
-#module UserInfoFormat_searchResult_1 None
-ResultsByDB=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('databases',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('all',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('list',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DatabaseName)))])),0),
- ('count',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('resultSetName',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = None))
-QueryExpression=asn1.CHOICE ([('term',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('queryTerm',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),Term),0),
- ('termComment',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = None))),
- ('query',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),Query))])
-SearchInfoReport=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('subqueryId',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('fullQuery',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('subqueryExpression',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),QueryExpression),1),
- ('subqueryInterpretation',None,asn1.TYPE(asn1.EXPLICIT(4,cls=asn1.CONTEXT_FLAG),QueryExpression),1),
- ('subqueryRecommendation',None,asn1.TYPE(asn1.EXPLICIT(5,cls=asn1.CONTEXT_FLAG),QueryExpression),1),
- ('subqueryCount',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('subqueryWeight',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),IntUnit),1),
- ('resultsByDB',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),ResultsByDB),1)], seq_name = None))
-
-
-#module UserInfoFormat_userInfo_1 None
-UserInfo_1=OtherInformation
-
-
-#module ESpec_2 None
-Occurrences=asn1.CHOICE ([('all',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('last',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('values',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('start',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('howMany',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1)], seq_name = None)))])
-Espec_2_TagPath=asn1.SEQUENCE_OF (asn1.CHOICE ([('specificTag',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('schemaId',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('tagType',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('tagValue',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),StringOrNumeric),0),
- ('occurrence',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),Occurrences),1)], seq_name = None))),
- ('wildThing',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),Occurrences)),
- ('wildPath',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL))]))
-SimpleElement=asn1.SEQUENCE ([('path',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),Espec_2_TagPath),0),
- ('variantRequest',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Variant),1)], seq_name = 'SimpleElement')
-ElementRequest=asn1.CHOICE ([('simpleElement',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),SimpleElement)),
- ('compositeElement',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('elementList',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('primitives',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString))),
- ('specs',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (SimpleElement)))])),0),
- ('deliveryTag',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Espec_2_TagPath),0),
- ('variantRequest',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),Variant),1)], seq_name = None)))])
-Espec_2=asn1.SEQUENCE ([('elementSetNames',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),1),
- ('defaultVariantSetId',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('defaultVariantRequest',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),Variant),1),
- ('defaultTagType',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('elements',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (ElementRequest)),1)], seq_name = 'Espec_2')
-
-
-#module ESpec_q None
-Espec_q_RPNStructure=asn1.CHOICE ([('op',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),AttributesPlusTerm)),
- ('rpnRpnOp',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-Espec_q_RpnRpnOp=asn1.SEQUENCE ([('rpn1',None,Espec_q_RPNStructure,0),
- ('rpn2',None,Espec_q_RPNStructure,0),
- ('op',None,asn1.TYPE(asn1.EXPLICIT(46,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('and',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('or',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('and_not',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL))])),0)], seq_name = 'Espec_q_RpnRpnOp')
-Espec_q_AttributesPlusTerm=asn1.TYPE(asn1.IMPLICIT(102,cls=asn1.CONTEXT_FLAG),asn1.SEQUENCE ([('attributes',None,AttributeList,0),
- ('term',None,Term,0)], seq_name = 'Espec_q_AttributesPlusTerm'))
-ValueRestrictor=asn1.SEQUENCE ([('attributeSetId',None,asn1.OBJECT_IDENTIFIER,0),
- ('nodeSelectionCriteria',None,Espec_q_RPNStructure,0)], seq_name = 'ValueRestrictor')
-Espec_q=asn1.SEQUENCE ([('valueRestrictor',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),ValueRestrictor),0),
- ('elementSelector',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1)], seq_name = 'Espec_q')
-Espec_q_RPNStructure['rpnRpnOp'] = ('rpnRpnOp', 1, Espec_q_RpnRpnOp)
-
-
-#!/usr/bin/env python
-# Auto-generated from auth_file_info.asn at Wed, 02 Jun 2004 15:30:48 +0000
-from PyZ3950 import asn1
-#module UserInfoFormat_authorityFileInfo None
-AuthorityFileInfo=asn1.SEQUENCE ([('name',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),HumanString),0),
- ('database',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('exclusive',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL),1)], seq_name = 'AuthorityFileInfo')
-
-
-#!/usr/bin/env python
-# Auto-generated from charset_1.asn at Wed, 02 Jun 2004 15:30:48 +0000
-from PyZ3950 import asn1
-#module UserInfoFormat_charSetandLanguageNegotiation_1 None
-Environment=asn1.CHOICE ([('sevenBit',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('eightBit',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-Iso10646=asn1.SEQUENCE ([('collections',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('encodingLevel',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0)], seq_name = 'Iso10646')
-LeftAndRight=asn1.SEQUENCE ([('gLeft',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('g0',0),('g1',1),('g2',2),('g3',3)],None,None)),0),
- ('gRight',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('g1',1),('g2',2),('g3',3)],None,None)),0)], seq_name = 'LeftAndRight')
-LanguageCode1=asn1.GeneralString
-PrivateCharacterSet=asn1.CHOICE ([('viaOid',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.OBJECT_IDENTIFIER))),
- ('externallySpecified',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL)),
- ('previouslyAgreedUpon',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-InitialSet=asn1.SEQUENCE ([('g0',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('g1',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('g2',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('g3',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('c0',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('c1',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = 'InitialSet')
-Iso2022=asn1.CHOICE ([('originProposal',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('proposedEnvironment',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),Environment),1),
- ('proposedSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.INTEGER_class ([],None,None))),0),
- ('proposedInitialSets',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InitialSet)),0),
- ('proposedLeftAndRight',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),LeftAndRight),0)], seq_name = None))),
- ('targetResponse',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('selectedEnvironment',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),Environment),0),
- ('selectedSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.INTEGER_class ([],None,None))),0),
- ('selectedinitialSet',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InitialSet),0),
- ('selectedLeftAndRight',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),LeftAndRight),0)], seq_name = None)))])
-OriginProposal=asn1.SEQUENCE ([('proposedCharSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.CHOICE ([('iso2022',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),Iso2022)),
- ('iso10646',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Iso10646)),
- ('private',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),PrivateCharacterSet))]))),1),
- ('proposedlanguages',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (LanguageCode1)),1),
- ('recordsInSelectedCharSets',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1)], seq_name = 'OriginProposal')
-TargetResponse=asn1.SEQUENCE ([('selectedCharSets',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('iso2022',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),Iso2022)),
- ('iso10646',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Iso10646)),
- ('private',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),PrivateCharacterSet)),
- ('none',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.NULL))])),1),
- ('selectedLanguage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),LanguageCode1),1),
- ('recordsInSelectedCharSets',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1)], seq_name = 'TargetResponse')
-CharSetandLanguageNegotiation=asn1.CHOICE ([('proposal',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),OriginProposal)),
- ('response',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),TargetResponse))])
-
-
-#!/usr/bin/env python
-# Auto-generated from charset_2.asn at Wed, 02 Jun 2004 15:30:48 +0000
-from PyZ3950 import asn1
-#module NegotiationRecordDefinition_charSetandLanguageNegotiation_2 None
-InitialSet_2=asn1.SEQUENCE ([('g0',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('g1',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('g2',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('g3',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('c0',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('c1',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1)], seq_name = 'InitialSet_2')
-LanguageCode2=asn1.GeneralString
-LeftAndRight_2=asn1.SEQUENCE ([('gLeft',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('g0',0),('g1',1),('g2',2),('g3',3)],None,None)),0),
- ('gRight',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('g1',1),('g2',2),('g3',3)],None,None)),1)], seq_name = 'LeftAndRight_2')
-PrivateCharacterSet2=asn1.CHOICE ([('viaOid',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.OBJECT_IDENTIFIER))),
- ('externallySpecified',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL)),
- ('previouslyAgreedUpon',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-Iso10646_2=asn1.SEQUENCE ([('collections',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0),
- ('encodingLevel',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0)], seq_name = 'Iso10646_2')
-Environment_2=asn1.CHOICE ([('sevenBit',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('eightBit',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-Iso2022_2=asn1.CHOICE ([('originProposal',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('proposedEnvironment',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),Environment_2),1),
- ('proposedSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.INTEGER_class ([],None,None))),0),
- ('proposedInitialSets',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InitialSet_2)),0),
- ('proposedLeftAndRight',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),LeftAndRight_2),0)], seq_name = None))),
- ('targetResponse',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('selectedEnvironment',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),Environment_2),0),
- ('selectedSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.INTEGER_class ([],None,None))),0),
- ('selectedinitialSet',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InitialSet_2),0),
- ('selectedLeftAndRight',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),LeftAndRight_2),0)], seq_name = None)))])
-TargetResponse2=asn1.SEQUENCE ([('selectedCharSets',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('iso2022',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),Iso2022_2)),
- ('iso10646',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Iso10646_2)),
- ('private',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),PrivateCharacterSet2)),
- ('none',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.NULL))])),1),
- ('selectedLanguage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),LanguageCode2),1),
- ('recordsInSelectedCharSets',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1)], seq_name = 'TargetResponse2')
-OriginProposal2=asn1.SEQUENCE ([('proposedCharSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.CHOICE ([('iso2022',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),Iso2022_2)),
- ('iso10646',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Iso10646_2)),
- ('private',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),PrivateCharacterSet2))]))),1),
- ('proposedlanguages',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (LanguageCode2)),1),
- ('recordsInSelectedCharSets',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1)], seq_name = 'OriginProposal2')
-CharSetandLanguageNegotiation2=asn1.CHOICE ([('proposal',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),OriginProposal2)),
- ('response',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),TargetResponse2))])
-
-
-#!/usr/bin/env python
-# Auto-generated from charset_3.asn at Wed, 02 Jun 2004 15:30:49 +0000
-from PyZ3950 import asn1
-#module NegotiationRecordDefinition_charSetandLanguageNegotiation_3 None
-Environment_3=asn1.CHOICE ([('sevenBit',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('eightBit',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-LeftAndRight_3=asn1.SEQUENCE ([('gLeft',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('g0',0),('g1',1),('g2',2),('g3',3)],None,None)),0),
- ('gRight',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('g1',1),('g2',2),('g3',3)],None,None)),1)], seq_name = 'LeftAndRight_3')
-InitialSet_3=asn1.SEQUENCE ([('g0',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('g1',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('g2',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('g3',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('c0',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('c1',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1)], seq_name = 'InitialSet_3')
-LanguageCode3=asn1.GeneralString
-PrivateCharacterSet_3=asn1.CHOICE ([('viaOid',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.OBJECT_IDENTIFIER))),
- ('externallySpecified',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL)),
- ('previouslyAgreedUpon',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL))])
-Iso2022_3=asn1.CHOICE ([('originProposal',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('proposedEnvironment',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),Environment_3),1),
- ('proposedSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.INTEGER_class ([],None,None))),0),
- ('proposedInitialSets',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InitialSet_3)),0),
- ('proposedLeftAndRight',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),LeftAndRight_3),0)], seq_name = None))),
- ('targetResponse',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('selectedEnvironment',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),Environment_3),0),
- ('selectedSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.INTEGER_class ([],None,None))),0),
- ('selectedinitialSet',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InitialSet_3),0),
- ('selectedLeftAndRight',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),LeftAndRight_3),0)], seq_name = None)))])
-Iso10646_3=asn1.SEQUENCE ([('collections',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('encodingLevel',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),0)], seq_name = 'Iso10646_3')
-TargetResponse_3=asn1.SEQUENCE ([('selectedCharSets',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('iso2022',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),Iso2022_3)),
- ('iso10646',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Iso10646_3)),
- ('private',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),PrivateCharacterSet_3)),
- ('none',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.NULL))])),1),
- ('selectedLanguage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),LanguageCode3),1),
- ('recordsInSelectedCharSets',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1)], seq_name = 'TargetResponse_3')
-OriginProposal_3=asn1.SEQUENCE ([('proposedCharSets',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.CHOICE ([('iso2022',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),Iso2022_3)),
- ('iso10646',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Iso10646_3)),
- ('private',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),PrivateCharacterSet_3))]))),1),
- ('proposedlanguages',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (LanguageCode3)),1),
- ('recordsInSelectedCharSets',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1)], seq_name = 'OriginProposal_3')
-CharSetandLanguageNegotiation_3=asn1.CHOICE ([('proposal',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),OriginProposal_3)),
- ('response',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),TargetResponse_3))])
-
-
-#!/usr/bin/env python
-# Auto-generated from edit_replace_qual.asn at Wed, 02 Jun 2004 15:30:49 +0000
-from PyZ3950 import asn1
-#module ERAQ None
-EditReplaceActionQualifier=asn1.SEQUENCE ([('persistentResultSetPackageName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('numberOfRecords',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('creationDateTime',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),0),
- ('reviewCode',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('reviewNote',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('changeDataInfo',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('fieldIdentifier',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('oldValue',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('oldValueTruncationAttribute',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('conditionalField',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('conditionalValue',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('conditionalTruncationAttribute',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('newValue',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('editReplaceType',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('fieldInsert',0),('fieldDelete',1),('fieldReplace',2),('subfieldInsert',3),('subfieldDelete',4),('subfieldReplace',5),('subfieldMerge',6),('indicatorChange',7),('dataStringChange',8)],None,None)),0),
- ('case',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1)], seq_name = None))),0)], seq_name = 'EditReplaceActionQualifier')
-
-
-#!/usr/bin/env python
-# Auto-generated from frag.asn at Wed, 02 Jun 2004 15:30:49 +0000
-from PyZ3950 import asn1
-#module FragmentSyntax None
-Fragment=asn1.SEQUENCE ([('realSyntax',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('remainingOctets',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('fragment',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING),0)], seq_name = 'Fragment')
-
-
-#!/usr/bin/env python
-# Auto-generated from ins_qualifier.asn at Wed, 02 Jun 2004 15:30:49 +0000
-from PyZ3950 import asn1
-#module RIAQ None
-RecordInsertActionQualifier=asn1.SEQUENCE ([('idsOrCode',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('nonDupRecordIds',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString))),
- ('recordReviewCode',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString))])),0),
- ('recordReviewNote',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'RecordInsertActionQualifier')
-
-
-#!/usr/bin/env python
-# Auto-generated from multiple_search_term_1.asn at Wed, 02 Jun 2004 15:30:49 +0000
-from PyZ3950 import asn1
-#module UserInfoFormat_multipleSearchTerms_1 None
-MultipleSearchTerms_1=asn1.SEQUENCE_OF (Term)
-
-
-#!/usr/bin/env python
-# Auto-generated from multiple_search_term_2.asn at Wed, 02 Jun 2004 15:30:49 +0000
-from PyZ3950 import asn1
-#module UserInfoFormat_multipleSearchTerms_2 None
-MultipleSearchTerms_2=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('term',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),Term),0),
- ('flag',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1)], seq_name = None))
-
-
-#!/usr/bin/env python
-# Auto-generated from negot_es_size.asn at Wed, 02 Jun 2004 15:30:49 +0000
-from PyZ3950 import asn1
-#module NegotiationRecordDefinition_NegotiateEsSizes None
-NegotiateEsSizes=asn1.SEQUENCE ([('maxMsgSize',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('maxTaskPackageSize',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('maxRecordSize',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1)], seq_name = 'NegotiateEsSizes')
-
-
-#!/usr/bin/env python
-# Auto-generated from oclc.asn at Wed, 02 Jun 2004 15:30:49 +0000
-from PyZ3950 import asn1
-#module UserInfoFormat_OCLC_Info None
-DBName=asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.VisibleString)
-OCLC_UserInformation=asn1.SEQUENCE ([('motd',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.VisibleString),1),
- ('dblist',None, asn1.SEQUENCE_OF (DBName),1),
- ('failReason',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1),
- ('text',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.VisibleString),1)], seq_name = 'OCLC_UserInformation')
-
-
-#!/usr/bin/env python
-# Auto-generated from opac.asn at Wed, 02 Jun 2004 15:30:49 +0000
-from PyZ3950 import asn1
-#module RecordSyntax_opac None
-Volume=asn1.SEQUENCE ([('enumeration',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('chronology',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('enumAndChron',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'Volume')
-CircRecord=asn1.SEQUENCE ([('availableNow',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('availablityDate',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('availableThru',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('restrictions',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('itemId',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('renewable',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('onHold',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('enumAndChron',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('midspine',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('temporaryLocation',None,asn1.TYPE(asn1.IMPLICIT(10,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'CircRecord')
-HoldingsAndCircData=asn1.SEQUENCE ([('typeOfRecord',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('encodingLevel',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('format',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('receiptAcqStatus',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('generalRetention',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('completeness',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('dateOfReport',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('nucCode',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('localLocation',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('shelvingLocation',None,asn1.TYPE(asn1.IMPLICIT(10,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('callNumber',None,asn1.TYPE(asn1.IMPLICIT(11,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('shelvingData',None,asn1.TYPE(asn1.IMPLICIT(12,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('copyNumber',None,asn1.TYPE(asn1.IMPLICIT(13,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('publicNote',None,asn1.TYPE(asn1.IMPLICIT(14,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('reproductionNote',None,asn1.TYPE(asn1.IMPLICIT(15,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('termsUseRepro',None,asn1.TYPE(asn1.IMPLICIT(16,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('enumAndChron',None,asn1.TYPE(asn1.IMPLICIT(17,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('volumes',None,asn1.TYPE(asn1.IMPLICIT(18,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (Volume)),1),
- ('circulationData',None,asn1.TYPE(asn1.IMPLICIT(19,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (CircRecord)),1)], seq_name = 'HoldingsAndCircData')
-HoldingsRecord=asn1.CHOICE ([('marcHoldingsRecord',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL)),
- ('holdingsAndCirc',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),HoldingsAndCircData))])
-OPACRecord=asn1.SEQUENCE ([('bibliographicRecord',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1),
- ('holdingsData',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (HoldingsRecord)),1)], seq_name = 'OPACRecord')
-
-
-#!/usr/bin/env python
-# Auto-generated from update_es_rev1.asn at Wed, 02 Jun 2004 15:30:49 +0000
-from PyZ3950 import asn1
-#module ESFormat_Update None
-CorrelationInfo_updrev1=asn1.SEQUENCE ([('note',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('id',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1)], seq_name = 'CorrelationInfo_updrev1')
-OriginPartToKeep_updrev1=asn1.SEQUENCE ([('action',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('recordInsert',1),('recordReplace',2),('recordDelete',3),('elementUpdate',4),('specialUpdate',5)],None,None)),0),
- ('databaseName',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('schema',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.OBJECT_IDENTIFIER),1),
- ('elementSetName',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('actionQualifier',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),1)], seq_name = 'OriginPartToKeep_updrev1')
-TargetPart_updrev1=asn1.SEQUENCE ([('updateStatus',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('success',1),('partial',2),('failure',3)],None,None)),0),
- ('globalDiagnostics',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DiagRec)),1),
- ('taskPackageRecords',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (TaskPackageRecordStructure)),0)], seq_name = 'TargetPart_updrev1')
-TaskPackageRecordStructure_updrev1=asn1.SEQUENCE ([('recordOrSurDiag',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('record',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL)),
- ('surrogateDiagnostics',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DiagRec)))])),1),
- ('correlationInfo',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),CorrelationInfo_updrev1),1),
- ('recordStatus',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('success',1),('queued',2),('inProcess',3),('failure',4)],None,None)),0),
- ('supplementalDiagnostics',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (DiagRec)),1)], seq_name = 'TaskPackageRecordStructure_updrev1')
-SuppliedRecords_updrev1=asn1.SEQUENCE_OF (asn1.SEQUENCE ([('recordId',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('number',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('string',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('opaque',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING))])),1),
- ('supplementalId',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('timeStamp',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.GeneralizedTime)),
- ('versionNumber',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('previousVersion',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL))])),1),
- ('correlationInfo',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),CorrelationInfo_updrev1),1),
- ('record',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.EXTERNAL),0)], seq_name = None))
-OriginPartNotToKeep_updrev1=SuppliedRecords_updrev1
-Update_updrev1=asn1.CHOICE ([('esRequest',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('toKeep',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),OriginPartToKeep_updrev1),0),
- ('notToKeep',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),OriginPartNotToKeep_updrev1),0)], seq_name = None))),
- ('taskPackage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('originPart',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),OriginPartToKeep_updrev1),0),
- ('targetPart',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),TargetPart_updrev1),0)], seq_name = None)))])
-
-
-#!/usr/bin/env python
-# Auto-generated from zsql.asn at Wed, 02 Jun 2004 15:30:50 +0000
-from PyZ3950 import asn1
-#module Z39_50_EXTERNALS_SQL_RS None
-SQLCharacterSetClause=asn1.SEQUENCE ([('characterSetCatalog',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('characterSetSchema',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('characterSetName',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'SQLCharacterSetClause')
-SQLUniqueConstraint=asn1.INTEGER_class ([('unique',1),('primaryKey',2)],None,None)
-SQLTransformDescriptor=asn1.SEQUENCE ([('groupName',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('fromSQLFunctionName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('toSQLFunctionName',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'SQLTransformDescriptor')
-Z3950CharacterSetLanguageClause=asn1.SEQUENCE ([('characterSet',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('iso2022',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),Iso2022)),
- ('iso10646',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Iso10646)),
- ('private',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),PrivateCharacterSet)),
- ('none',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.NULL))])),1),
- ('language',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),LanguageCode),1)], seq_name = 'Z3950CharacterSetLanguageClause')
-SQLOrderingDescriptor=asn1.SEQUENCE ([('orderingForm',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('equals',1),('full',2),('none',3)],None,None)),0),
- ('orderingCategory',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('relativeRoutineName',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('hashRoutineName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('stateRoutineName',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString))])),0)], seq_name = 'SQLOrderingDescriptor')
-SQLQuery=asn1.SEQUENCE ([('abstractDatabaseFlag',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1),
- ('queryExpression',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),0)], seq_name = 'SQLQuery')
-SQLException=asn1.SEQUENCE ([('sqlState',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('sqlCode',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('sqlErrorText',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'SQLException')
-SQLCollationClause=asn1.SEQUENCE ([('collationCatalog',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('collationSchema',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('collationName',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = 'SQLCollationClause')
-SQLMethodSpecDescriptor=asn1.SEQUENCE ([('routineName',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('parameterList',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('parameterName',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('mode',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('in',1),('out',2),('inout',3)],None,None)),1),
- ('type',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL),0)], seq_name = None))),0),
- ('languageName',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('parameterStyle',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('sql',1),('general',2)],None,None)),1),
- ('returnsDataDescriptor',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.NULL),1),
- ('methodSpecType',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('originalSelfAsResult',1),('originalSelfAsLocator',2),('overriding',3)],None,None)),1),
- ('methodType',None,asn1.TYPE(asn1.IMPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('instance',1),('static',2)],None,None)),1),
- ('deterministic',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1),
- ('possibleMethodFunction',None,asn1.TYPE(asn1.IMPLICIT(8,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('noSQL',1),('containsSQL',2),('readsSQLData',3),('writesSQLData',4)],None,None)),1),
- ('invokableWhenNull',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),1)], seq_name = 'SQLMethodSpecDescriptor')
-SQLAttributeDescriptor=asn1.SEQUENCE ([('attributeName',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('dataDescriptor',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL),1),
- ('collation',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),SQLCollationClause),1)], seq_name = 'SQLAttributeDescriptor')
-SQLValue=asn1.SEQUENCE ([('dataItem',None, asn1.CHOICE ([('characterItem',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('numericItem',None,asn1.TYPE(asn1.EXPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('decimalItem',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('integerItem',None,asn1.TYPE(asn1.EXPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('smallIntItem',None,asn1.TYPE(asn1.EXPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None))),
- ('floatItem',None,asn1.TYPE(asn1.EXPLICIT(6,cls=asn1.CONTEXT_FLAG),asn1.REAL)),
- ('realItem',None,asn1.TYPE(asn1.EXPLICIT(7,cls=asn1.CONTEXT_FLAG),asn1.REAL)),
- ('doublePrecisionItem',None,asn1.TYPE(asn1.EXPLICIT(8,cls=asn1.CONTEXT_FLAG),asn1.REAL)),
- ('dateTimeItem',None,asn1.TYPE(asn1.EXPLICIT(9,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('intervalItem',None,asn1.TYPE(asn1.EXPLICIT(10,cls=asn1.CONTEXT_FLAG),InternationalString)),
- ('varcharItem',None,asn1.TYPE(asn1.EXPLICIT(12,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('booleanItem',None,asn1.TYPE(asn1.EXPLICIT(13,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN)),
- ('bitItem',None,asn1.TYPE(asn1.EXPLICIT(14,cls=asn1.CONTEXT_FLAG),asn1.BITSTRING_class ([],None,None))),
- ('bitVarItem',None,asn1.TYPE(asn1.EXPLICIT(15,cls=asn1.CONTEXT_FLAG),asn1.BITSTRING_class ([],None,None))),
- ('udtItem',None,asn1.TYPE(asn1.EXPLICIT(17,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.NULL))),
- ('udtLocator',None,asn1.TYPE(asn1.EXPLICIT(18,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('rowItem',None,asn1.TYPE(asn1.EXPLICIT(19,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.NULL))),
- ('refItem',None,asn1.TYPE(asn1.EXPLICIT(20,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('collectionItem',None,asn1.TYPE(asn1.EXPLICIT(21,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.NULL))),
- ('collectionLocator',None,asn1.TYPE(asn1.EXPLICIT(22,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('bLOBItem',None,asn1.TYPE(asn1.EXPLICIT(30,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('bLOBLocator',None,asn1.TYPE(asn1.EXPLICIT(31,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('cLOBItem',None,asn1.TYPE(asn1.EXPLICIT(40,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('cLOBLocator',None,asn1.TYPE(asn1.EXPLICIT(41,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING)),
- ('resultSetItem',None,asn1.TYPE(asn1.EXPLICIT(50,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (asn1.NULL))),
- ('resultSetLocator',None,asn1.TYPE(asn1.EXPLICIT(51,cls=asn1.CONTEXT_FLAG),asn1.OCTSTRING))]),1),
- ('indicator',None,asn1.TYPE(asn1.IMPLICIT(50,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('sqlnull',1),('sqlempty',2),('sqldefault',3)],None,None)),1)], seq_name = 'SQLValue')
-SQLDataDescriptor=asn1.CHOICE ([('characterType',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('length',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('sqlCharacterSet',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),SQLCharacterSetClause),1),
- ('zCharacterSetLanguage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Z3950CharacterSetLanguageClause),1),
- ('collation',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),SQLCollationClause),1)], seq_name = None))),
- ('numericType',None,asn1.TYPE(asn1.EXPLICIT(6,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('precision',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('scale',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None))),
- ('decimalType',None,asn1.TYPE(asn1.EXPLICIT(7,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('precision',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('scale',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None))),
- ('integerType',None,asn1.TYPE(asn1.EXPLICIT(8,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('precision',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('precisionBase',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('binary',0),('decimal',1)],None,None)),0)], seq_name = None))),
- ('smallIntType',None,asn1.TYPE(asn1.EXPLICIT(9,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('precision',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('precisionBase',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('binary',0),('decimal',1)],None,None)),0)], seq_name = None))),
- ('floatType',None,asn1.TYPE(asn1.EXPLICIT(10,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('mantissaPrecision',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('maxExponent',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None))),
- ('realType',None,asn1.TYPE(asn1.EXPLICIT(11,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('mantissaPrecision',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('maxExponent',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None))),
- ('doublePrecisionType',None,asn1.TYPE(asn1.EXPLICIT(12,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('mantissaPrecision',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('maxExponent',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None))),
- ('dateTimeType',None,asn1.TYPE(asn1.IMPLICIT(9,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('dateTimeQualifier',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('date',1),('time',2),('timeStamp',3),('timeWithTimeZone',4),('timeStampWithTimeZone',5)],None,None)),0),
- ('fractionalSecondsPrecision',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1)], seq_name = None))),
- ('intervalType',None,asn1.TYPE(asn1.IMPLICIT(10,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('intervalQualifier',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('year',1),('month',2),('day',3),('hour',4),('minute',5),('second',6),('yearToMonth',7),('dayToHour',8),('dayToMinute',9),('dayToSecond',10),('hourToMinute',11),('hourToSecond',12),('minuteToSecond',13)],None,None)),0),
- ('leadingFieldPrecision',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1),
- ('fractionalSecondsPrecision',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),1)], seq_name = None))),
- ('varcharType',None,asn1.TYPE(asn1.IMPLICIT(12,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('length',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('characterSet',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),SQLCharacterSetClause),1),
- ('zCharacterSetLanguage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Z3950CharacterSetLanguageClause),1),
- ('collation',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),SQLCollationClause),1)], seq_name = None))),
- ('booleanType',None,asn1.TYPE(asn1.IMPLICIT(13,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('bitType',None,asn1.TYPE(asn1.IMPLICIT(14,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('length',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None))),
- ('bitVarType',None,asn1.TYPE(asn1.IMPLICIT(15,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('length',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None))),
- ('sQLUserDefinedType',None,asn1.TYPE(asn1.IMPLICIT(17,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('udtName',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('ordering',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),SQLOrderingDescriptor),1),
- ('superTypeName',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('representation',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG), asn1.CHOICE ([('distinct',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.NULL)),
- ('structured',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (SQLAttributeDescriptor))),
- ('system_generated',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.NULL))])),0),
- ('instantiable',None,asn1.TYPE(asn1.IMPLICIT(4,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('final',None,asn1.TYPE(asn1.IMPLICIT(5,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('transformDesc',None,asn1.TYPE(asn1.IMPLICIT(7,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (SQLTransformDescriptor)),1)], seq_name = None))),
- ('sQLUserDefinedTypeLocatorType',None,asn1.TYPE(asn1.IMPLICIT(18,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('length',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None))),
- ('sQLRowType',None,asn1.TYPE(asn1.IMPLICIT(19,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('fieldName',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('dataType',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL),0)], seq_name = None)))),
- ('sQLReferenceType',None,asn1.TYPE(asn1.EXPLICIT(20,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('scopeTableName',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = None))),
- ('sQLCollectionType',None,asn1.TYPE(asn1.EXPLICIT(21,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('dataType',None,asn1.TYPE(asn1.EXPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.NULL),0),
- ('collectionTypeConstructor',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('size',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('type',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('array',1),('set',2)],None,None)),0)], seq_name = None)),0),
- ('sQLCollectionLocatorType',None,asn1.TYPE(asn1.IMPLICIT(22,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('length',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None)),0),
- ('bLOBType',None,asn1.TYPE(asn1.IMPLICIT(30,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('length',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None)),0),
- ('bLOBLocatorType',None,asn1.TYPE(asn1.IMPLICIT(31,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('length',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None)),0),
- ('cLOBType',None,asn1.TYPE(asn1.EXPLICIT(40,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('length',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('sqlCharacterSet',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),SQLCharacterSetClause),1),
- ('zCharacterSetLanguage',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),Z3950CharacterSetLanguageClause),1),
- ('collation',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),SQLCollationClause),1)], seq_name = None)),0)], seq_name = None))),
- ('cLOBLocatorType',None,asn1.TYPE(asn1.IMPLICIT(41,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('length',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None))),
- ('sQLResultSetType',None,asn1.TYPE(asn1.IMPLICIT(50,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('resultSetName',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('size',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0),
- ('listOfSQLDataDescriptors',None,asn1.TYPE(asn1.IMPLICIT(3,cls=asn1.CONTEXT_FLAG),asn1.NULL),0)], seq_name = None)))),
- ('sQLResultSetLocatorType',None,asn1.TYPE(asn1.IMPLICIT(51,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE ([('length',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([],None,None)),0)], seq_name = None)))])
-SQLFieldValue=asn1.SEQUENCE ([('sqlException',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),SQLException),1),
- ('resultValue',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),SQLValue),1)], seq_name = 'SQLFieldValue')
-SQLRowValue=asn1.SEQUENCE_OF (SQLFieldValue)
-SQLDefaultOption=asn1.CHOICE ([('sqlValue',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),SQLValue)),
- ('other',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.INTEGER_class ([('user',1),('currentuser',2),('sessionuser',3),('systemuser',4),('currentpath',5),('sqlnull',6),('sqlempty',7)],None,None)))])
-SQLColumnDescriptor=asn1.SEQUENCE ([('columnName',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),0),
- ('dataType',None,asn1.TYPE(asn1.EXPLICIT(1,cls=asn1.CONTEXT_FLAG),asn1.NULL),0),
- ('columnConstraint',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('nullable',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),asn1.BOOLEAN),0),
- ('uniqueConstraint',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),SQLUniqueConstraint),1),
- ('check',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = None))),0),
- ('sqlDefault',None,asn1.TYPE(asn1.EXPLICIT(3,cls=asn1.CONTEXT_FLAG),SQLDefaultOption),1)], seq_name = 'SQLColumnDescriptor')
-SQLTableDescriptor=asn1.SEQUENCE ([('tableName',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),InternationalString),1),
- ('listOfColumnDescriptors',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (SQLColumnDescriptor)),0),
- ('tableConstraint',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF ( asn1.SEQUENCE ([('listOfColumnNames',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (InternationalString)),0),
- ('uniqueContraint',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG),SQLUniqueConstraint),1),
- ('check',None,asn1.TYPE(asn1.IMPLICIT(2,cls=asn1.CONTEXT_FLAG),InternationalString),1)], seq_name = None))),0)], seq_name = 'SQLTableDescriptor')
-SQL_Result=asn1.SEQUENCE ([('tableDescriptor',None,asn1.TYPE(asn1.IMPLICIT(0,cls=asn1.CONTEXT_FLAG),SQLTableDescriptor),1),
- ('listOfResultValues',None,asn1.TYPE(asn1.IMPLICIT(1,cls=asn1.CONTEXT_FLAG), asn1.SEQUENCE_OF (SQLRowValue)),1)], seq_name = 'SQL_Result')
-SQLColumnDescriptor['dataType'] = ('dataType', asn1.EXPLICIT(1), SQLDataDescriptor)
-
-SQLDataDescriptor['sQLResultSetType'][0]['listOfSQLDataDescriptors'] = ('listOfSQLDataDescriptors', 3, asn1.SEQUENCE_OF (SQLDataDescriptor))
-
-SQLDataDescriptor ['sQLUserDefinedType']['representation']['distinct'] = ('distinct', 0, SQLDataDescriptor)
-
-SQLDataDescriptor['sQLRowType'][0]['dataType'] = ('dataType', asn1.EXPLICIT(1), SQLDataDescriptor)
-
-SQLAttributeDescriptor['dataDescriptor'] = ('dataDescriptor', 1, SQLDataDescriptor)
-
-SQLValue['dataItem']['udtItem'] = ('udtItem', 17, asn1.SEQUENCE_OF(SQLValue))
-
-SQLValue['dataItem']['rowItem'] = ('rowItem', 19, asn1.SEQUENCE_OF(SQLValue))
-
-SQLValue['dataItem']['collectionItem'] = ('udtItem', 21, asn1.SEQUENCE_OF(SQLValue))
-
-SQLValue['dataItem']['resultSetItem'] = ('udtItem', 50, asn1.SEQUENCE_OF(SQLValue))
-
-
diff --git a/python/PyZ3950/zdefs.py b/python/PyZ3950/zdefs.py
deleted file mode 100644
index 9734db3..0000000
--- a/python/PyZ3950/zdefs.py
+++ /dev/null
@@ -1,340 +0,0 @@
-#!/usr/bin/env python
-
-import codecs
-
-from PyZ3950.z3950_2001 import *
-from PyZ3950.oids import *
-
-
-asn1.register_oid (Z3950_RECSYN_GRS1, GenericRecord)
-asn1.register_oid (Z3950_RECSYN_SUTRS, asn1.GeneralString)
-asn1.register_oid (Z3950_RECSYN_EXPLAIN, Explain_Record)
-asn1.register_oid (Z3950_RECSYN_OPAC, OPACRecord)
-
-asn1.register_oid (Z3950_ES_PERSISTRS, PersistentResultSet)
-asn1.register_oid (Z3950_ES_PERSISTQRY, PersistentQuery)
-asn1.register_oid (Z3950_ES_PERIODQRY, PeriodicQuerySchedule)
-asn1.register_oid (Z3950_ES_ITEMORDER, ItemOrder)
-asn1.register_oid (Z3950_ES_DBUPDATE, Update)
-asn1.register_oid (Z3950_ES_DBUPDATE_REV_1, Update_updrev1)
-asn1.register_oid (Z3950_ES_EXPORTSPEC, ExportSpecification)
-asn1.register_oid (Z3950_ES_EXPORTINV, ExportInvocation)
-
-
-asn1.register_oid (Z3950_USR_SEARCHRES1, SearchInfoReport)
-asn1.register_oid (Z3950_USR_INFO1, OtherInformation)
-asn1.register_oid (Z3950_NEG_CHARSET3, CharSetandLanguageNegotiation_3)
-asn1.register_oid (Z3950_USR_PRIVATE_OCLC_INFO, OCLC_UserInformation)
-
-# below here is subject to change without notice, as I try to
-# figure out the appropriate balance between convenience and flexibility
-
-trace_charset = 0
-
-impl_vers = "1.0 beta" # XXX
-implementationId = 'PyZ39.50 - contact asl2@pobox.com' # haven't been assigned an official id, apply XXX
-
-def make_attr(set=None, atype=None, val=None, valType=None):
- ae = AttributeElement()
- if (set <> None):
- ae.attributeSet = set
- ae.attributeType = atype
- if (valType == 'numeric' or (valType == None and isinstance(val, int))):
- ae.attributeValue = ('numeric', val)
- else:
- cattr = AttributeElement['attributeValue']['complex']()
- if (valType == None):
- valType = 'string'
- cattr.list = [(valType, val)]
- ae.attributeValue = ('complex', cattr)
- return ae
-
-# This list is needed to support recordsInSelectedCharSets == 0 when
-# character set negotiation is in effect. The reason we don't
-# just iterate over Z3950_RECSYN is that many of those are carried
-# in OCTET STRINGs, and thus immune to negotiation; but maybe we should
-# anyway.
-
-retrievalRecord_oids = [
- Z3950_RECSYN_EXPLAIN_ov,
- Z3950_RECSYN_SUTRS_ov,
- Z3950_RECSYN_OPAC_ov,
- Z3950_RECSYN_SUMMARY_ov,
- Z3950_RECSYN_GRS1_ov,
- Z3950_RECSYN_ES_ov,
- Z3950_RECSYN_FRAGMENT_ov,
- Z3950_RECSYN_SQL_ov]
-
-
-def register_retrieval_record_oids (ctx, new_codec_name = 'ascii'):
- new_codec = codecs.lookup (new_codec_name)
- def switch_codec ():
- ctx.push_codec ()
- ctx.set_codec (asn1.GeneralString, new_codec)
- for oid in retrievalRecord_oids:
- ctx.register_charset_switcher (oid, switch_codec)
-
-iso_10646_oid_to_name = {
- UNICODE_PART1_XFERSYN_UCS2_ov : 'utf-16', # XXX ucs-2 should differ from utf-16, in that ucs-2 forbids any characters not in the BMP, whereas utf-16 is a 16-bit encoding which encodes those characters into multiple 16-bit units
-
-# UNICODE_PART1_XFERSYN_UCS4_ov : 'ucs-4', # XXX no python support for this encoding?
- UNICODE_PART1_XFERSYN_UTF16_ov : 'utf-16',
- UNICODE_PART1_XFERSYN_UTF8_ov : 'utf-8'
- }
-
-def try_get_iso10646_oid (charset_name):
- for k,v in iso_10646_oid_to_name.iteritems ():
- if charset_name == v:
- return k
- # XXX note that we don't know which of {UCS2, UTF16} oids we'll
- # get from this.
-
-def asn_charset_to_name (charset_tup):
- if trace_charset:
- print "asn_charset_to_name", charset_tup
- charset_name = None
- (typ, charset) = charset_tup
- if typ == 'iso10646':
- charset_name = iso_10646_oid_to_name.get (charset.encodingLevel,
- None)
- elif typ == 'private':
- (spectyp, val) = charset
- if spectyp == 'externallySpecified':
- oid = getattr (val, 'direct_reference', None)
- if oid == Z3950_NEG_PRIVATE_INDEXDATA_CHARSETNAME_ov:
- enctyp, encval = val.encoding
- if enctyp == 'octet-aligned':
- charset_name = encval
- if trace_charset:
- print "returning charset", charset_name
- return charset_name
-
-
-def charset_to_asn (charset_name):
- oid = try_get_iso10646_oid (charset_name)
- if oid <> None:
- iso10646 = Iso10646_3 ()
- iso10646.encodingLevel = oid
- return ('iso10646', iso10646)
- else:
- ext = asn1.EXTERNAL ()
- ext.direct_reference = Z3950_NEG_PRIVATE_INDEXDATA_CHARSETNAME_ov
- ext.encoding = ('octet-aligned', charset_name)
- return ('private', ('externallySpecified', ext))
-
-class CharsetNegotReq:
- def __init__ (self, charset_list = None, lang_list = None,
- records_in_charsets = None):
- """charset_list is a list of character set names, either ISO10646
-(UTF-8 or UTF-16), or private. We support Index Data's semantics
-for private character sets (see
-http://www.indexdata.dk/pipermail/yazlist/2003-March/000504.html), so
-you can pass any character set name for which Python has a codec installed
-(but please don't use rot13 in production). Note that there should be
-at most one of each of (ISO10646, private). (No, I don't know why, but
-it says so in the ASN.1 definition comments.)
-
-lang_list is a list of language codes, as defined in ANSI Z39.53-1994
-(see, e.g., http://xml.coverpages.org/nisoLang3-1994.html).
-
-records_in_charsets governs whether charset negotiation applies to
-records, as well.)
-
-Any of these parameters can be None, since the corresponding
-elements in the ASN.1 are OPTIONAL.
-"""
- self.charset_list = charset_list
- self.lang_list = lang_list
- self.records_in_charsets = records_in_charsets
- def __str__ (self):
- return "Charset negot request %s %s %s" % (
- str (self.charset_list), str (self.lang_list),
- str (self.records_in_charsets))
- def pack_proposal (self):
- origin_prop = OriginProposal_3 ()
- if self.charset_list <> None:
- proposedCharSets = []
- for charset_name in self.charset_list:
- proposedCharSets.append (charset_to_asn (charset_name))
-
- origin_prop.proposedCharSets = proposedCharSets
- if self.lang_list <> None:
- origin_prop.proposedlanguages = self.lang_list
- if self.records_in_charsets <> None:
- origin_prop.recordsInSelectedCharSets = (
- self.records_in_charsets)
- return ('proposal', origin_prop)
- def unpack_proposal (self, csn):
- (tag, proposal) = csn
- assert (tag == 'proposal')
- pcs = getattr (proposal, 'proposedCharSets', None)
- if pcs <> None:
- if trace_charset:
- print "pcs", pcs
- self.charset_list = []
-
- for charset in pcs:
- charset_name = asn_charset_to_name (charset)
- if charset_name <> None:
- self.charset_list.append (charset_name)
-
- lang = getattr (proposal, 'proposedlanguages', None)
- if lang <> None:
- self.lang_list = lang
- self.records_in_charsets = getattr (proposal,
- 'recordsInSelectedCharSets', None)
-
-
-class CharsetNegotResp:
- def __init__ (self, charset = None, lang = None,
- records_in_charsets = None):
- self.charset = charset
- self.lang = lang
- self.records_in_charsets = records_in_charsets
- def __str__ (self):
- return "CharsetNegotResp: %s %s %s" % (
- str (self.charset), str (self.lang),
- str (self.records_in_charsets))
- def unpack_negot_resp (self, neg_resp):
- typ, val = neg_resp
- assert (typ == 'response')
- self.charset = None
- scs = getattr (val, 'selectedCharSets', None)
- if scs <> None:
- self.charset = asn_charset_to_name (scs)
- self.lang = getattr (val, 'selectedLanguage', None)
- self.records_in_charsets = getattr (
- val, 'recordsInSelectedCharSets', None)
- def pack_negot_resp (self):
- resp = TargetResponse_3 ()
- if self.charset <> None:
- resp.selectedCharSets = charset_to_asn (self.charset)
- if self.lang <> None:
- resp.selectedLanguage = self.lang
- if self.records_in_charsets <> None:
- resp.recordsInSelectedCharSets = self.records_in_charsets
- return ('response', resp)
-
-
-def get_charset_negot (init): # can be passed either InitializeRequest or InitializeResponse
- if trace_charset:
- print init
- if not init.options ['negotiation']:
- return None
- otherInfo = []
- if hasattr (init, 'otherInfo'):
- otherInfo = init.otherInfo
- elif hasattr (init, 'userInformationField'):
- ui = init.userInformationField
- if ui.direct_reference == Z3950_USR_INFO1_ov:
- (enctype, otherInfo) = ui.encoding
-
- for oi in otherInfo:
- if trace_charset:
- print oi
- (typ, val) = oi.information
- if typ == 'externallyDefinedInfo':
- if val.direct_reference == Z3950_NEG_CHARSET3_ov:
- (typ, val) = val.encoding
- if typ == 'single-ASN1-type':
- return val
-
- return None
-
-
-def set_charset_negot (init, val, v3_flag):
- # again, can be passed either InitializeRequest or Response
- negot = asn1.EXTERNAL ()
- negot.direct_reference = Z3950_NEG_CHARSET3_ov
- negot.encoding= ('single-ASN1-type', val)
- OtherInfoElt = OtherInformation[0]
- oi_elt = OtherInfoElt ()
- oi_elt.information = ('externallyDefinedInfo', negot)
- other_info = [oi_elt]
- if trace_charset:
- print v3_flag, oi_elt
-
- if v3_flag:
- init.otherInfo = other_info
- else:
- ui = asn1.EXTERNAL ()
-
- ui.direct_reference = Z3950_USR_INFO1_ov
- ui.encoding = ('single-ASN1-type', other_info) # XXX test this
- # see http://lcweb.loc.gov/z3950/agency/defns/user-1.html
- init.userInformationField = ui
-
-
-def_msg_size = 0x10000
-
-# rethink optionslist. Maybe we should just turn on all the
-# bits the underlying code supports? We do need to be able to
-# turn off multiple result sets for testing (see tests/test2.py),
-# but that doesn't have to be the default.
-def make_initreq (optionslist = None, authentication = None, v3 = 0,
- negotiate_charset = 0, preferredMessageSize = 0x100000,
- maximumRecordSize = 0x100000, implementationId = "",
- implementationName = "", implementationVersion = ""):
-
- # see http://lcweb.loc.gov/z3950/agency/wisdom/unicode.html
- InitReq = InitializeRequest ()
- InitReq.protocolVersion = ProtocolVersion ()
- InitReq.protocolVersion ['version_1'] = 1
- InitReq.protocolVersion ['version_2'] = 1
- InitReq.protocolVersion ['version_3'] = v3
- InitReq.options = Options ()
- if optionslist <> None:
- for o in optionslist:
- InitReq.options[o] = 1
- InitReq.options ['search'] = 1
- InitReq.options ['present'] = 1
- InitReq.options ['delSet'] = 1
- InitReq.options ['scan'] = 1
- InitReq.options ['sort'] = 1
- InitReq.options ['extendedServices'] = 1
- InitReq.options ['dedup'] = 1
- InitReq.options ['negotiation'] = negotiate_charset # XXX can negotiate other stuff, too
-
-# Preferred and Exceptional msg sizes are pretty arbitrary --
-# we dynamically allocate no matter what
- InitReq.preferredMessageSize = preferredMessageSize
- InitReq.exceptionalRecordSize = maximumRecordSize
-
- if (implementationId):
- InitReq.implementationId = implementationId
- else:
- InitReq.implementationId = impl_id
- if (implementationName):
- InitReq.implementationName = implementationName
- else:
- InitReq.implementationName = 'PyZ3950'
- if (implementationVersion):
- InitReq.implementationVersion = implementationVersion
- else:
- InitReq.implementationVersion = impl_vers
-
- if authentication <> None:
- class UP: pass
- up = UP ()
- upAttrList = ['userId', 'password', 'groupId']
- for val, attr in zip (authentication, upAttrList): # silently truncate
- if val <> None:
- setattr (up, attr, val)
- InitReq.idAuthentication = ('idPass', up)
-
- return InitReq
-
-def make_sreq (query, dbnames, rsn, **kw):
- sreq = SearchRequest ()
- sreq.smallSetUpperBound = 0
- sreq.largeSetLowerBound = 1
- sreq.mediumSetPresentNumber = 0
-# as per http://lcweb.loc.gov/z3950/lcserver.html, Jun 07 2001,
-# to work around Endeavor bugs in 1.13
- sreq.replaceIndicator = 1
- sreq.resultSetName = rsn
- sreq.databaseNames = dbnames
- sreq.query = query
- for (key, val) in kw.items ():
- setattr (sreq, key, val)
- return sreq
diff --git a/python/PyZ3950/zmarc.py b/python/PyZ3950/zmarc.py
deleted file mode 100644
index d7a5044..0000000
--- a/python/PyZ3950/zmarc.py
+++ /dev/null
@@ -1,1252 +0,0 @@
-#!/usr/bin/env python
-
-"""Parses MARC-format data. The MARC class has a constructor
-which takes binary MARC data.
-"""
-
-# This file should be available from
-# http://www.pobox.com/~asl2/software/PyZ3950/
-# and is licensed under the X Consortium license:
-# Copyright (c) 2001, Aaron S. Lav, asl2@pobox.com
-# All rights reserved.
-
-# Permission is hereby granted, free of charge, to any person obtaining a
-# copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, and/or sell copies of the Software, and to permit persons
-# to whom the Software is furnished to do so, provided that the above
-# copyright notice(s) and this permission notice appear in all copies of
-# the Software and that both the above copyright notice(s) and this
-# permission notice appear in supporting documentation.
-
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
-# OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
-# HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL
-# INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-# Except as contained in this notice, the name of a copyright holder
-# shall not be used in advertising or otherwise to promote the sale, use
-# or other dealings in this Software without prior written authorization
-# of the copyright holder.
-
-import sys
-import string
-
-from xml.sax.saxutils import escape
-
-class MarcError (Exception):
- pass
-
-def is_fixed (num):
- return num < 10
-
-fieldsep = '\x1e'
-sep = '\x1f' # XXX or 1D for pseudo-marc output from z3950.c
-recsep = '\x1d'
-
-
-# Attributes for SGML DTD (!!!) If not present, then I1 I2
-attrHash = { 22 : ['ISDSLvl', 'I2'],
- 24 : ['StdNum', 'DiffInd'], 28 : ['PubNmTyp', 'NteAdEnty'],
- 33 : ['DateType', 'EventTyp'], 34 : ['ScapeTyp', 'I2'],
- 41 : ['TransInd', 'I2'], 45 : ['TimePrd', 'I2'],
- 50 : ['InLofC', 'CNSrc'], 55 : ['InNLC', 'CNCLSSrc'],
- 60 : ['InNLM', 'CNSrc'], 70 : ['InNAL', 'I2'],
- 72 : ['I1', 'CodeSrc'], 82 : ['Edition', 'CNSrc'],
- 86 : ['NumbrSrc', 'I2'], 100 : ['NameType', 'I2'],
- 110: ['NameType', 'I2'], 111 : ['NameType', 'I2'],
- 130: ['NFChars', 'I2'], 150 : ['I1', 'NFChars'],
- 151: ['I1', 'NFChars'], 210 : ['AddEnty', 'I2'],
- 211: ['AddEnty', 'NFChars'], 212 : ['AddEnty', 'I2'],
- 214: ['AddEnty', 'NFChars'], 222 : ['I1', 'NFChars'],
- 240: ['PrntDisp', 'NFChars'], 242 : ['AddEnty', 'NFChars'],
- 243: ['PrntDisp', 'NFChars'], 245 : ['AddEnty', 'NFChars'],
- 246: ['NCAddEty', 'TitleTyp'],247 : ['AddEnty', 'NoteCntl'],
- 270: ['Level', 'AddrType'], 355 : ['CntlElmt', 'I2'],
- 362: ['DTFormat', 'I2'], 400 : ['NameType', 'Pronoun'],
- 410: ['NameType', 'Pronoun'], 411 : ['NameType', 'Pronoun'],
- 430: ['I1', 'NFChars'], 440 : ['I1', 'NFChars'],
- 450: ['I1', 'NFChars'], 451 : ['I1', 'NFChars'],
- 490: ['Traced', 'I2'], 505 : ['DCC', 'CDLevel'],
- 510: ['CoverLoc', 'I2'], 511 : ['DCC', 'I2'],
- 516: ['DCC', 'I2'], 521 : ['DCC', 'I2'],
- 520: ['DCC', 'I2'], 522 : ['DCC', 'I2'],
- 524: ['DCC', 'I2'], 535 : ['Holds', 'I2'],
- 537: ['DCC', 'I2'], 551 : ['I1', 'NFChars'],
- 555: ['DCC', 'I2'], 556 : ['DCC', 'I2'],
- 565: ['DCC', 'I2'], 567 : ['DCC', 'I2'],
- 581: ['DCC', 'I2'], 582 : ['DCC', 'I2'],
- 586: ['DCC', 'I2'], 600 : ['NameType', 'SubjSys'],
- 610: ['NameType', 'SubjSys'], 611 : ['NameType', 'SubjSys'],
- 630: ['NFChars', 'SubjSys'], 650 : ['SubjLvl', 'SubjSys'],
- 651: ['I1', 'SubjSys'], 653 : ['IndexLvl', 'I2'],
- 654: ['IndexLvl', 'I2'], 655 : ['Type', 'Source'],
- 656: ['I1', 'Source'], 656 : ['I1', 'Source'],
- 700: ['NameType','EntryType'],710 : ['NameType','EntryType'],
- 711: ['NameType','EntryType'],730 : ['NFChars','EntryType'],
- 740: ['NFChars','EntryType'], 760 : ['NoteCntl', 'I2'],
- 762: ['NoteCntl', 'I2'], 765 : ['NoteCntl', 'I2'],
- 767: ['NoteCntl', 'I2'], 772 : ['NoteCntl', 'I2'],
- 773: ['NoteCntl', 'I2'], 775 : ['NoteCntl', 'I2'],
- 776: ['NoteCntl', 'I2'], 777 : ['NoteCntl', 'I2'],
- 780: ['NoteCntl', 'RelType'], 785 : ['NoteCntl', 'RelType'],
- 787: ['NoteCntl', 'I2'], 800 : ['NameType', 'I2'],
- 810: ['NameType', 'I2'], 811 : ['NameType', 'I2'],
- 830: ['I1', 'NFChars'], 852 : ['Scheme', 'Order'],
- 853: ['CmprsExpnd', 'Eval'], 853 : ['CmprsExpnd', 'Eval'],
- 856: ['AccsMeth', 'I2'], 863 : ['EncLevel', 'HoldForm'],
- 864: ['EncLevel','HoldForm'], 865 : ['EncLevel', 'HoldForm'],
- 866: ['EncLevel','Notation'], 867 : ['EncLevel', 'Notation'],
- 868: ['EncLevel','Notation'], 886 : ['FldType', 'I2']}
-
-subfieldHash = {'1' : "one", '2' : "two", '3' : "three", '4' : "four", '5' : "five",
- '6' : "six", '7' : "seven", '8' : "eight", '9' : "nine", '0' : "zero"}
-
-
-# takes text, turns it into tuple of (ind1, ind2, list of (subfield, val))
-# where subfield may repeat within the list.
-# We need a structure like this in order to correctly parse both records:
-# 650 0 $aWorld War, 1939-1945$xCampaigns$zTunisia
-# 650 0 $aReal property$zMississippi$zTippah County$xMaps
-# (taken from _USMARC Format for Bibliographic Data_, Prepared by Network
-# Development and MARC Standards Office, Cataloging Distribution Service,
-# Library of Congress, section 650 p. 5, page printed Dec 1991, looseleaf
-# binder issued in 1988.
-
-def parse_sub (field):
- if len (field) < 4:
- if field == ' ':
- # Is this legit? I've seen it, so handle correctly.
- # specifically for au=Johansen, Arnold S from z3950.bibsys.no:2100
- return (' ', ' ', [])
- return None
-
- if field [2] <> sep:
- print "Bad field [2]", repr (field[2])
- return None
- ind1 = field[0]
- ind2 = field[1]
- sublist = []
- splitlist = string.split (field[2:], sep)
- for sub in splitlist:
- if (sub == ''): # we begin w/ sep, so there's an empty prefix
- continue
- sublist.append ((sub[0], string.strip(sub[1:])))
- return (ind1, ind2, sublist)
-
-class MARC:
- """Parses data into 'fields' attribute, indexed by field number.
- Each value is a list. For fixed fields, it's a list of the string data
- (one string for each occurence of the field in the original data). For
- other fields, each list element is a tuple of (indicator 1, indicator 2,
- subdata), where subdata is a list of tuples of (subfield indicator,
- subfield data). Yes, this is kinda lame and I really should have
- used structures, but this was some of the first Python code I ever
- wrote.
- """
- hdrbits = [5,6,7,8,17,18,19]
- # Status, Type, Bib. Level, Type of Ctrl., Enc. Level,
- # Descr. Cat. Form, Linked Rcd Reqt are all part of pseudoentry 0
-
- def __init__(self, MARC = None, strict = 1):
- """Parses MARC data. According to Bill Oldroyd (Bill.Oldroyd at
- bl.uk), some servers don't set the character set and/or other
- bits of the MARC header properly, so it's useful to set strict=0
- when dealing with such servers."""
- self.fields = {}
- self.ok = 0
- self.marc = MARC
- if MARC == None:
- return # we'll write to it later
- reclen = self.extract_int (0,4)
- self.reclen = reclen
- baseaddr = self.extract_int (12, 16)
- zerostr = ""
- for ind in self.hdrbits: zerostr = zerostr + self.marc[ind]
- self.fields [0] = [zerostr]
- if strict:
- assert (self.marc[9] == ' ') # 'a' would be UCS/Unicode
- assert (self.marc[10] == '2' and self.marc[11] == '2')
- assert (self.marc[20:22] == '45')
- pos = 24
- lastpos = baseaddr
- while pos < baseaddr:
- tag = self.marc[pos:pos+3]
- if tag [0] == '\035' or tag [0] == '\036':
- break
- fieldlen = self.extract_int (pos + 3, pos + 6)
- startpos = self.extract_int (pos + 7, pos + 11)
- pos = pos + 12
- start = baseaddr + startpos
- end = start + fieldlen
- line = self.marc[start:end]
- lastpos = startpos
- if line [-1] == '\x1E':
- line = line[:-1]
- else: print "Weird, no hex 1E for", tag, repr(line)
- field = string.atoi (tag)
- if is_fixed (field):
- self.fields[field] = [line]
- # 1-elt list for orthogonality of processing
- else:
- ps = parse_sub (line)
- if ps == None:
- raise MarcError (line)
- self.fields.setdefault (field, []).append (ps)
- self.ok = 1
- # XXX should do more error-checking
- def __str__ (self):
- k = self.fields.keys ()
- k.sort ()
- lst = []
- for field in k:
- lst.append (self.stringify_field (field))
- return "MARC: \n" + "\n".join (lst)
- def stringify_field (self, k):
- f = self.fields [k]
- if is_fixed (k):
- return str (k) + " " + f[0]
- else:
- str_l = []
- for l in f:
- def fmt (x):
- return '$%s%s' % (x[0], x[1])
- sl = map (fmt, l[2])
- str_l.append (str(k) + " " + l[0] + l[1] + " ".join (sl))
- return "\n".join (str_l)
- def extract_int (self, start, end):
- return string.atoi (self.marc[start:end+1])
- def get_MARC (self):
- hdrlist = [' '] * 24
- zerostr = self.fields [0][0]
- for i in range (len (zerostr)):
- hdrlist [self.hdrbits [i]] = zerostr [i]
- hdrlist [10] = '2' # replace these with data map, assert on read
- hdrlist [11] = '2'
- hdrlist [20] = '4'
- hdrlist [21] = '5'
- hdrlist [22] = '0'
- hdrlist [23] = '0'
- # later - 0-4 log. record length, 12-16 base addr of data
- # directory: 3 of tag, 4 of field len, 5 of starting pos (rel.
- # to base address of data, 12-16
- fields = self.fields.keys ()
- data = ''
- directory = ''
- for field in fields:
- if field == 0: # pseudofield
- continue
- for fielddat in self.fields [field]:
- start = len (data)
- if is_fixed (field):
- data += fielddat
- else:
- sublist = (fielddat [0] + fielddat [1] +
- "".join (map (lambda s: sep + s[0] + s[1],
- fielddat[2])))
- data += sublist
- data += fieldsep # XXX is this right?
-
- length = len (data) - start
- directory += "%.03d%.04d%.05d" % (field, length, start)
- def id (x): return x
- data += fieldsep + recsep
- hdrlist [0:5] = map (id, "%.05d" % (len (hdrlist) + len (directory) +
- len (data),))
- hdrlist [12:17] = map (id,"%.05d" % (len (hdrlist) + len (directory),))
- return "".join (hdrlist) + directory + data
-
- def toMARCXML(self):
- " Convert record to MarcXML Schema "
- keys = self.fields.keys()
- keys.sort()
-
-
- xmllist = ["\n", " %s\n" % (self.get_MARC()[:24])]
-
- for key in keys:
- if key == 0:
- # XXX Skip?? What are these??
- pass
- elif key < 10:
- xmllist.append(" %s\n" % (key, self.fields[key][0]))
- else:
- for instance in self.fields[key]:
- if key < 100:
- keystr = "0" + str(key)
- else:
- keystr = str(key)
- xmllist.append(" \n" % (keystr, instance[0], instance[1]))
- for sub in instance[2]:
- xmllist.append(" %s\n" % (sub[0], escape(sub[1])))
- xmllist.append(" \n")
-
- xmllist.append("")
- xml = ''.join(xmllist)
- return xml
-
- def toOAIMARC(self):
- """Convert record to OAI MARC XML Schema.
- Note Well that OAI-MHP 2.0 recommends using MarcXML"""
-
- keys = self.fields.keys()
- keys.sort()
- marc = self.get_MARC()
-
- # What should these attributes really be?
- xmllist = ['\n' % (marc[6], marc[7])]
-
- for key in keys:
- if key == 0:
- # Skip?? What are these?
- pass
- elif key < 10:
- xmllist.append(" %s\n" % (key, self.fields[key][0]))
- else:
- for instance in self.fields[key]:
- xmllist.append(" \n" % (key, instance[0], instance[1]))
- for sub in instance[2]:
- xmllist.append(" %s\n" % (sub[0], escape(sub[1])))
- xmllist.append(" \n")
-
- xmllist.append("")
- xml = ''.join(xmllist)
- return xml
-
- def sgml_processCode(self, k):
- if attrHash.has_key(k):
- i1 = attrHash[k][0]
- i2 = attrHash[k][1]
- else:
- i1 = "I1"
- i2 = "I2"
- if k < 100:
- keystr = "0%d" % (k)
- else:
- keystr = str(k)
-
- sgmllist = []
- for instance in self.fields[k]:
- sgmllist.append(' \n' % (keystr, i1, instance[0], i2, instance[1]))
- for sub in instance[2]:
- stag = sub[0]
- if subfieldHash.has_key(stag):
- stag = subfieldHash[stag]
- sgmllist.append(' <%s>%s%s>\n' % (stag, escape(sub[1]), stag))
- sgmllist.append(' \n' % (keystr))
- sgml = ''.join(sgmllist)
- return sgml
-
-
- def toSGML(self):
- """ Convert record to USMARC SGML """
-
- keys = self.fields.keys()
- keys.sort()
-
- # Extract field ranges
- cflds = []
- numbcode = []
- mainenty = []
- titles = []
- edimprnt = []
- physdesc = []
- series = []
- notes = []
- subjaccs = []
- addenty = []
- linkenty = []
- saddenty = []
- holdaltg = []
- fld9xx = []
- # Ugly
- for k in keys:
- if k == 0:
- pass
- elif k < 10:
- cflds.append(k)
- elif k < 100:
- numbcode.append(k)
- elif k < 200:
- mainenty.append(k)
- elif k < 250:
- titles.append(k)
- elif k < 300:
- edimprnt.append(k)
- elif k < 400:
- physdesc.append(k)
- elif k < 500:
- series.append(k)
- elif k < 600:
- notes.append(k)
- elif k < 700:
- subjaccs.append(k)
- elif k < 760:
- addenty.append(k)
- elif k < 800:
- linkenty.append(k)
- elif k < 840:
- saddenty.append(k)
- elif k < 900:
- holdaltg.append(k)
- else:
- fld9xx.append(k)
-
-
-
- marc = self.get_MARC()
-
- sgml = ["\n"]
- sgml.append(" \n")
- sgml.append(" %s\n" % (marc[:5]))
- sgml.append(" %s\n" % (marc[5]))
- sgml.append(" %s\n" % (marc[6]))
- sgml.append(" %s\n" % (marc[7]))
- sgml.append(" %s\n" % (marc[8:10]))
- sgml.append(" %s\n" % (marc[10]))
- sgml.append(" %s\n" % (marc[11]))
- sgml.append(" %s\n" % (marc[12:17]))
- sgml.append(" %s\n" % (marc[17]))
- sgml.append(" %s\n" % (marc[18]))
- sgml.append(" %s\n" % (marc[19]))
- sgml.append(" \n")
- sgml.append(" %s\n" % (marc[20]))
- sgml.append(" %s\n" % (marc[21]))
- sgml.append(" %s\n" % (marc[22]))
- sgml.append(" %s\n" % (marc[23]))
- sgml.append(" \n")
- sgml.append(" \n")
- sgml.append(" \n")
-
- sgml.append(" \n")
- sgml.append(" \n")
- for k in cflds:
- sgml.append(" %s\n" % (k, self.fields[k][0], k))
- sgml.append(" \n")
- sgml.append(" \n")
- sgml.append(" \n")
- for k in numbcode:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
-
- if mainenty:
- sgml.append(" \n")
- for k in mainenty:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
- if titles:
- sgml.append(" \n")
- for k in titles:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
- if edimprnt:
- sgml.append(" \n")
- for k in edimprnt:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
- if physdesc:
- sgml.append(" \n")
- for k in physdesc:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
- if series:
- sgml.append(" \n")
- for k in series:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
- if notes:
- sgml.append(" \n")
- for k in notes:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
- if subjaccs:
- sgml.append(" \n")
- for k in subjaccs:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
- if addenty:
- sgml.append(" \n")
- for k in addenty:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
- if linkenty:
- sgml.append(" \n")
- for k in linkenty:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
- if saddenty:
- sgml.append(" \n")
- for k in saddenty:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
- if holdaltg:
- sgml.append(" \n")
- for k in holdaltg:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
- if fld9xx:
- sgml.append(" \n")
- for k in fld9xx:
- sgml.append(self.sgml_processCode(k))
- sgml.append(" \n")
- sgml.append(" \n")
- sgml.append(" \n")
- sgml.append("")
- return ''.join(sgml)
-
-
- def toSimpleDC(self):
- """ Convert Marc into DC according to LC Crosswalk """
- xml = ['\n']
-
- # Title -> 245
- if self.fields.has_key(245):
- instance = self.fields[245][0][2]
- a = ''
- b = ''
- for sub in instance:
- if sub[0] == 'a':
- a = sub[1]
- elif sub[0] == 'b':
- b = sub[1]
- if a and b and a[-1] in [',', '.', ';', ':']:
- a += " " + b
- elif a and b:
- a += "; " + b
- elif b and not a:
- a = b
- xml.append(" %s\n" % (a))
-
- # Creator -> 100,110,111,700,710,711
- authorKeys = [100, 110, 111, 700, 710, 711]
- for k in authorKeys:
- if self.fields.has_key(k):
- for instance in self.fields[k]:
- a = ''
- h = ''
- d = ''
- for sub in instance[2]:
- if sub[0] == 'a':
- a = sub[1]
- elif sub[0] == 'h':
- h = sub[1]
- elif sub[0] == 'd':
- d = sub[1]
- if h:
- a += ", " + h
- if d:
- a += " (" + d + ")"
- xml.append(" %s\n" % (a))
-
- # Subject -> 600,610, 611, 630, 650, 653
- # Just dump in directly...
- subjectList = [600, 610, 611, 630, 650, 653]
- for s in subjectList:
- if self.fields.has_key(s):
- for instance in self.fields[s]:
- subject = ''
- for sub in instance[2]:
- subject += sub[1] + " -- "
- subject = subject[:-4]
- xml.append(" %s\n" % (subject))
-
-
- # Publisher -> 260$a$b
- if self.fields.has_key(260):
- for instance in self.fields[260]:
- a = b = ''
- for sub in instance[2]:
- if sub[0] == 'a':
- a = sub[1]
- elif sub[0] == 'b':
- b = sub[1]
- if b[-1] in [',', ';', ':']:
- b = b[:-1]
- elif sub[0] == 'c':
- d = sub[1]
- if d[-1] == '.':
- d = d[:-1]
- xml.append(" %s\n" % (d))
- if b:
- a += " " + b
- if a:
- xml.append(" %s\n" % (a))
-
- # Type -> 655
- if self.fields.has_key(655):
- for instance in self.fields[655]:
- gf = ''
- for sub in instance[2]:
- gf += sub[1] + " -- "
- gf = gf[:-4]
- xml.append(" %s\n" % (gf))
-
- # Non Standard: Identifier -> ISSN/ISBN
- for k in [20,22]:
- if self.fields.has_key(k):
- for instance in self.fields[k]:
- for sub in instance[2]:
- if sub[0] == 'a':
- xml.append(" %s\n" % (sub[1]))
-
- # Non Standard: Description -> 300
- if self.fields.has_key(300):
- for instance in self.fields[300]:
- desc = ''
- for sub in instance[2]:
- desc += sub[1] + " "
- desc = desc[:-1]
- xml.append(" %s\n" % (desc))
-
- xml.append("")
- return ''.join(xml)
-
-
- def toMODS(self):
- """ Tranform MARC record into MODS according to CrossWalk """
- xml = ["\n"]
-
- # --- TitleInfo Fields ---
- if self.fields.has_key(245):
- instance = self.fields[245][0][2]
- xml.append(" \n ")
- insubtitle = 0
- for sub in instance:
- if (sub[0] in ['a', 'f', 'g', 'k']):
- xml.append(escape(sub[1]))
- xml.append(' ')
- elif (sub[0] == 'b'):
- xml.append("\n %s " % (escape(sub[1])))
- insubtitle = 1
- if (insubtitle):
- xml.append("\n \n")
- else:
- xml.append("\n \n")
-
- if self.fields.has_key(210):
- instance = self.fields[210][0][2]
- subf = {}
- for sub in instance:
- subf[sub[0]] = escape(sub[1])
- xml.append(' \n %s\n' % (subf['a']))
- if (subf.has_key('b')):
- xml.append(' %s\n' % (subf['b']))
- xml.append(' \n')
-
- if self.fields.has_key(242):
- instance = self.fields[242][0][2]
- subf = {}
- for sub in instance:
- subf[sub[0]] = escape(sub[1])
- if (subf.has_key('i')):
- label = ' displayLabel="%s"' % (subf['i'])
- else:
- label = ''
- xml.append(' \n %s\n' % (label, subf['a']))
- if (subf.has_key('b')):
- xml.append(' %s\n' % (subf['b']))
- if (subf.has_key('n')):
- xml.append(' %s\n' % (subf['n']))
- if (subf.has_key('p')):
- xml.append(' %s\n' % (subf['p']))
- xml.append(' \n')
-
-
- if self.fields.has_key(246):
- full = self.fields[246][0]
- subfield2 = full[1]
- instance = full[2]
- subf = {}
- for sub in instance:
- subf[sub[0]] = escape(sub[1])
- if (subfield2 == 1):
- xml.append(' \n %s\n' % (subf['a']))
- else:
- xml.append(' \n %s\n' % (subf['a']))
-
- if (subf.has_key('b')):
- xml.append(' %s\n' % (subf['b']))
- if (subf.has_key('n')):
- xml.append(' %s\n' % (subf['n']))
- if (subf.has_key('p')):
- xml.append(' %s\n' % (subf['p']))
- xml.append(' \n')
-
- if self.fields.has_key(130):
- uniform = self.fields[130][0][2]
- elif self.fields.has_key(240):
- uniform = self.fields[240][0][2]
- else:
- uniform = []
- if (uniform):
- subf = {}
- for sub in uniform:
- subf[sub[0]] = escape(sub[1])
- xml.append(' \n %s\n' % (subf['a']))
- if (subf.has_key('n')):
- xml.append(' %s\n' % (subf['n']))
- if (subf.has_key('p')):
- xml.append(' %s\n' % (subf['p']))
- xml.append(' \n')
-
-
- # --- Name Fields ---
- # Creator -> 100,110,111, 700,710,711
- authorKeyTypes = {100 : 'personal', 110 : 'corporate', 111 : 'conference', 700 : 'personal', 710 : 'corporate', 711 : 'conference'}
-
- for k in authorKeyTypes.keys():
- if self.fields.has_key(k):
- for instance in self.fields[k]:
- subf = {}
- for sub in instance[2]:
- subf[sub[0]] = escape(sub[1])
- xml.append(' \n' % (k))
- xml.append(' \n' % (authorKeyTypes[k]))
- xml.append(' creator\n')
- xml.append(' %s\n' % (subf['a']))
- if (subf.has_key('d')):
- xml.append(' %s\n' % (subf['d']))
- if (subf.has_key('b')):
- if (k in [100,700]):
- xml.append(' %s\n' % (subf['b']))
- else:
- xml.append(' %s\n' % (subf['b']))
- if (subf.has_key('e')):
- xml.append(' %s\n' % (subf['e']))
- if (subf.has_key('4')):
- xml.append(' %s\n' % (subf['4']))
- xml.append(' \n')
-
- ldr = self.fields[0][0]
- type = ldr[1]
- types = {'a' : 'text', 't' : 'text', 'e' : 'cartographic', 'f' : 'cartographic', 'c' : 'notated music', 'd' : 'notated music', 'i' : 'sound recording - nonmusical', 'j' : 'sound recording - musical', 'k' : 'still image', 'g' : 'moving image', 'r' : 'three dimensional object', 'm' : 'software, multimedia', 'p' : 'mixed material'}
- if (types.has_key(type)):
- xml.append(' %s\n' % (types[type]))
-
-
- if (self.fields.has_key(8)):
- instance = self.fields[8][0]
- # XXX LONG set of checks for type and various 008 positions :(
- if (len(instance) > 33 and instance[33] == '0'):
- xml.append(' non fiction\n')
-
- if self.fields.has_key(655):
- for instance in self.fields[655]:
- gf = ''
- for sub in instance[2]:
- gf += escape(sub[1]) + " -- "
- gf = gf[:-4]
- xml.append(" %s\n" % (gf))
-
- # PublicationInfo from 260
- f260 = self.fields.get(260, [])
- f44 = self.fields.get(44, [])
- f46 = self.fields.get(46, [])
- f250 = self.fields.get(250, [])
- f310 = self.fields.get(310, [])
- f321 = self.fields.get(321, [])
- f8 = self.fields.get(8, [])
-
- if f260 or f46 or f250 or f310 or f321:
- xml.append(' \n')
-
- if (f8 and len(f8[0]) > 18 ):
- loc = f8[0][15:18]
- if (loc <> ' ' and loc <> '|||'):
- xml.append(' %s\n' % (loc))
-
- if (f44):
- for s in f44[0][2]:
- if (s[0] == 'c'):
- xml.append(' %s\n' % (escape(s[1])))
- if (f260):
- instance = self.fields[260][0][2]
- subf260 = {}
- for sub in instance:
- subf260[sub[0]] = escape(sub[1])
- if (subf260.has_key('a')):
- xml.append(' %s\n' % (subf260['a']))
- if (subf260.has_key('b')):
- xml.append(' %s\n' % (subf260['b']))
- if (subf260.has_key('c')):
- xml.append(' %s\n' % (subf260['c']))
-
- if (f8 and len(f8[0]) > 6):
- f8type = f8[0][6]
- if (f8type in ['e', 'p', 'r', 's', 't']):
- date = f8[0][7:11]
- if (date <> ' '):
- xml.append(' %s\n' % (date))
- if (f8type in ['c', 'd', 'i', 'k', 'm', 'u', 'q']):
- if (f8type == 'q'):
- attrib = ' qualifier="questionable"'
- else:
- attrib = ""
- start = f8[0][7:11]
- if (start <> ' '):
- xml.append(' %s\n' % (attrib, start))
- end = f8[0][11:15]
- if (end <> ' '):
- xml.append(' %s\n' % (attrib, end))
-
- if (f260):
- if subf260.has_key('g'):
- xml.append(' %s\n' % (escape(subf260['g'])))
-
- if (f46):
- instance = f46[0][2]
- subf46 = {}
- for s in instance:
- subf46[s[0]] = escape(s[1])
- if (subf46.has_key('k')):
- xml.append(' %s\n' % (subf46['k']))
- if (subf46.has_key('l')):
- xml.append(' %s\n' % (subf46['l']))
- if (subf46.has_key('m')):
- xml.append(' %s\n' % (subf46['m']))
- if (subf46.has_key('n')):
- xml.append(' %s\n' % (subf46['n']))
- if (subf46.has_key('j')):
- xml.append(' %s\n' % (subf46['j']))
-
- if (f250):
- for s in f250[0][2]:
- if (s[0] == 'a'):
- xml.append(' %s\n' % (escape(s[1])))
- break
-
- if (self.fields.has_key(0) and len(self.fields[0][0]) > 2):
- f0type = self.fields[0][0][2]
- if (f0type in ['b', 'i', 's']):
- xml.append(' continuing\n')
- elif (f0type in ['a', 'c', 'd', 'm']):
- xml.append(' monographic\n')
-
- if (f310):
- subf310 = {'a' : '', 'b' : ''}
- for s in f310[0][2]:
- subf310[s[0]] = escape(s[1])
- xml.append(' %s %s\n' % (subf310['a'], subf310['b']))
- if (f321):
- subf321 = {'a' : '', 'b' : ''}
- for s in f321[0][2]:
- subf321[s[0]] = escape(s[1])
- xml.append(' %s %s\n' % (subf321['a'], subf321['b']))
- xml.append(' \n')
-
-
- # --- Language ---
- if (f8 and len(f8[0]) > 38):
- lang = f8[0][35:38]
- if (lang <> ' '):
- xml.append(' %s\n' % (lang))
- if self.fields.has_key(41):
- a = two = ''
- for sub in self.fields[41][0][2]:
- if sub[0] == 'a':
- a = sub[1]
- elif sub[0] == '2':
- two = sub[1]
- elif sub[0] == 'd' and not a:
- a = sub[1]
- elif sub[0] == 'e' and not a:
- a = sub[1]
-
- if a and not two:
- xml.append(' %s\n' % (escape(a)))
- elif a:
- xml.append(' %s\n' % (escape(two), escape(a)))
-
- # --- Physical Description ---
- # XXX: Better field 008, 242,245,246$h, 256$a
- f300 = self.fields.get(300, [])
- if (f8 and len(f8[0]) > 23):
- f8_23 = self.fields[8][0][23]
- else:
- f8_23 = ' '
- if (f300 or f8_23 == ' '):
- xml.append(" \n")
- if (f8_23 == ' '):
- xml.append(' \n')
- if f300:
- desclist = []
- for s in f300[0][2]:
- desclist.append(escape(s[1]))
- desc = ' '.join(desclist)
- xml.append(" %s\n" % (desc))
- xml.append(" \n")
-
- # Abstract
- if self.fields.has_key(520):
- xml.append(' ')
- for sub in self.fields[520]:
- if sub[0] == 'a' or sub[0] == 'b':
- xml.append(escape(sub[1]))
- xml.append("\n")
-
- # --- Table of Contents ---
- if (self.fields.has_key(505)):
- desclist = []
- for s in self.fields[505][0][2]:
- if (s[0] in ['a', 'g', 'r', 't']):
- desclist.append(escape(s[1]))
- toc = ' '.join(desclist)
- xml.append(' %s\n' % (toc))
-
- # XXX TargetAudience (field 8 again)
-
- # --- Note ---
- if (self.fields.has_key(500)):
- for n in (self.fields[500]):
- xml.append(' ');
- for s in n:
- if (s[0] == 'a'):
- xml.append(escape(s[1]))
- xml.append('\n')
-
- # --- Subject ---
- subjectList = [600, 610, 611, 630, 650, 651, 653]
- for s in subjectList:
- if self.fields.has_key(s):
- for instance in self.fields[s]:
- xml.append(" \n")
-
- if (s in [600, 610, 611]):
- stype = {600 : 'personal', 610 : 'corporate', 611 : 'conference'}[s]
- xml.append(' \n' % (stype))
- for sub in instance[2]:
- val = escape(sub[1])
- if (sub[0] == 'a'):
- xml.append(' %s\n' % (val))
- elif (sub[0] == 'b'):
- attrib = ''
- if (s == 600):
- attrib = ' type="termsOfAddress"'
- xml.append(' %s\n' % (attrib, val))
- elif (sub[0] == 'd'):
- xml.append(' %s\n' % (val))
- elif (sub[0] == 'e'):
- xml.append(' %s\n' % (val))
- elif (sub[0] == '4'):
- xml.append(' %s\n' % (val))
- elif (sub[0] == 'u'):
- xml.append(' %s\n' % (val))
- elif sub[0] in ['v', 'x']:
- xml.append(' %s\n' % (val))
- elif sub[0] == 'y':
- xml.append(' %s\n' % (val))
- elif sub[0] == 'z':
- xml.append(' %s\n' % (val))
- xml.append(' \n')
- elif (s == 630):
- for sub in instance[2]:
- val = escape(sub[1])
- if (sub[0] == 'a'):
- xml.append(' %s\n' % (val))
- elif (sub[0] == 'p'):
- xml.append(' %s\n' % (val))
- elif (sub[0] == 'n'):
- xml.append(' %s\n' % (val))
- elif sub[0] in ['v', 'x']:
- xml.append(' %s\n' % (val))
- elif sub[0] == 'y':
- xml.append(' %s\n' % (val))
- elif sub[0] == 'z':
- xml.append(' %s\n' % (val))
- elif (s in [650, 653]):
- for sub in instance[2]:
- val = escape(sub[1])
- if (sub[0] == 'a'):
- xml.append(' %s\n' % (val))
- elif sub[0] in ['v', 'x']:
- xml.append(' %s\n' % (val))
- elif sub[0] == 'y':
- xml.append(' %s\n' % (val))
- elif sub[0] == 'z':
- xml.append(' %s\n' % (val))
- elif (s == 651):
- for sub in instance[2]:
- val = escape(sub[1])
- if (sub[0] == 'a'):
- xml.append(' %s\n' % (val))
- elif sub[0] in ['v', 'x']:
- xml.append(' %s\n' % (val))
- elif sub[0] == 'y':
- xml.append(' %s\n' % (val))
- elif sub[0] == 'z':
- xml.append(' %s\n' % (val))
-
- xml.append(" \n")
- if (self.fields.has_key(45)):
- full = self.fields[45][0]
- if (full[0] in ['0', '1']):
- for x in self.fields[2]:
- if (x[0] == 'b'):
- xml.append(' %s\n' % (escape(x[1])))
-
- if (self.fields.has_key(43)):
- for sub in self.fields[43][0][2]:
- if (sub[0] == 'a'):
- xml.append(' %s\n' % (escape(sub[1])))
- elif (sub[0] == 'a'):
- xml.append(' %s\n' % (escape(sub[1])))
-
- if (self.fields.has_key(752)):
- xml.append(' \n')
- for sub in self.fields[43][0][2]:
- val = escape(sub[1])
- if (sub[0] == 'a'):
- xml.append(' %s\n' % (val))
- elif (sub[0] == 'b'):
- xml.append(' %s\n' % (val))
- elif (sub[0] == 'c'):
- xml.append(' %s\n' % (val))
- elif (sub[0] == 'd'):
- xml.append(' %s\n' % (val))
- xml.append(' ')
-
-
- if (self.fields.has_key(255)):
- subf = {}
- xml.append(' \n')
- for s in self.fields[255][0][2]:
- subf[s[0]] = escape(s[1])
- if (subf.has_key('c')):
- xml.append(' %s\n' % (subf['c']))
- if (subf.has_key('a')):
- xml.append(' %s\n' % (subf['a']))
- if (subf.has_key('b')):
- xml.append(' %s\n' % (subf['c']))
- xml.append(' \n')
-
- if (self.fields.has_key(656)):
- for s in self.fields[656][0][2]:
- if (s[0] == 'a'):
- xml.append(' %s\n')
-
- # XXX: 34
-
- # XXX: Classification, 84
-
- cfields = {50 : 'lcc', 82 : 'ddc', 80 : 'udc', 60 : 'nlm'}
- for k in cfields:
- if (self.fields.has_key(k)):
- for sub in self.fields[k][0][2]:
- stuff = []
- if (sub[0] == 'a'):
- stuff.append(escape(sub[1]))
- elif (sub[0] == 'b'):
- stuff.append(escape(sub[1]))
- txt = ' '.join(stuff)
- xml.append(' %s\n' % (cfields[k], txt))
-
- if (self.fields.has_key(86)):
- full = self.fields[86][0]
- ind1 = full[0]
- if (ind1 == '0'):
- auth = 'sudocs'
- elif (ind1 == '1'):
- auth = 'candocs'
- else:
- auth = ''
- if (auth):
- for s in full[2]:
- if (s[0] == 'a'):
- xml.append(' %s\n' % (auth, escape(s[1])))
-
-
- # XXX: relatedItem, 7XX
-
- # --- Identifier ---
- if self.fields.has_key(20):
- for instance in self.fields[20]:
- for sub in instance[2]:
- if sub[0] == 'a':
- xml.append(' %s\n' % (escape(sub[1])))
- if self.fields.has_key(22):
- for instance in self.fields[22]:
- for sub in instance[2]:
- if sub[0] == 'a':
- xml.append(' %s\n' % (escape(sub[1])))
- if self.fields.has_key(24):
- for instance in self.fields[24]:
- for sub in instance[2]:
- if sub[0] == 'a':
- xml.append(' %s\n' % (escape(sub[1])))
- if self.fields.has_key(28):
- for instance in self.fields[28]:
- for sub in instance[2]:
- if sub[0] == 'a':
- xml.append(' %s\n' % (escape(sub[1])))
-
- # XXX: location, accessCondition
-
- # --- recordInformation ---
- xml.append(' \n')
- if (self.fields.has_key(40)):
- for instance in self.fields[40]:
- for sub in instance[2]:
- if sub[0] == 'a':
- xml.append(' %s\n' % (escape(sub[1])))
- if (self.fields.has_key(8)):
- date = self.fields[8][0][0:6]
- if (date <> ' '):
- xml.append(' %s\n' % (date))
-
- if (self.fields.has_key(1)):
- xml.append(' %s\n' % (self.fields[1][0]))
- if (self.fields.has_key(40)):
- instance = self.fields[40][0][2]
- for s in instance:
- if (s[0] == 'b'):
- xml.append(' %s\n' % (escape(s[1])))
-
- xml.append(' \n')
- xml.append("")
- txt = ''.join(xml)
- return txt
-
-from PyZ3950 import marc_to_unicode
-
-# see http://www.loc.gov/marc/specifications/speccharmarc8.html
-
-import unicodedata
-
-class MARC8_to_Unicode:
- """Converts MARC-8 to Unicode. Note that currently, unicode strings
- aren't normalized, and some codecs (e.g. iso8859-1) will fail on
- such strings. When I can require python 2.3, this will go away.
-
- Warning: MARC-8 EACC (East Asian characters) makes some
- distinctions which aren't captured in Unicode. The LC tables give
- the option of mapping such characters either to a Unicode private
- use area, or a substitute character which (usually) gives the
- sense. I've picked the second, so this means that the MARC data
- should be treated as primary and the Unicode data used for display
- purposes only. (If you know of either of fonts designed for use
- with LC's private-use Unicode assignments, or of attempts to
- standardize Unicode characters to allow round-trips from EACC,
- or if you need the private-use Unicode character translations,
- please inform me, asl2@pobox.com."""
-
-
-
- basic_latin = 0x42
- ansel = 0x45
- def __init__ (self, G0 = basic_latin, G1 = ansel):
- self.g0 = G0
- self.g1 = G1
-
- def is_multibyte (self, charset):
- return charset == 0x31
-
- def translate (self, s):
- uni_list = []
- combinings = []
- pos = 0
- while pos < len (s):
- if s[pos] == '\x1b':
- if (s[pos +1] == s[pos+2] and
- (s[pos +1] == '$' or s[pos+1] == '(')):
- self.g0 = ord (s[pos+3])
- pos = pos + 4
- continue
- mb_flag = self.is_multibyte (self.g0)
-
- if mb_flag:
- d = (ord (s[pos]) * 65536 +
- ord (s[pos+1]) * 256 +
- ord (s[pos+2]))
- pos += 3
- else:
- d = ord (s[pos])
- pos += 1
-
- if (d < 0x20 or
- (d > 0x80 and d < 0xa0)):
- uni = unichr (d)
- continue
-
- if d > 0x80 and not mb_flag:
- (uni, cflag) = marc_to_unicode.codesets [self.g1] [d]
- else:
- (uni, cflag) = marc_to_unicode.codesets [self.g0] [d]
-
- if cflag:
- combinings.append (unichr (uni))
- else:
- uni_list.append (unichr (uni))
- if len (combinings) > 0:
- uni_list += combinings
- combinings = []
- # what to do if combining chars left over?
- uni_str = u"".join (uni_list)
-
- # unicodedata.normalize not available until Python 2.3
- if hasattr (unicodedata, 'normalize'):
- uni_str = unicodedata.normalize ('NFC', uni_str)
-
- return uni_str
-
-def test_convert (s, enc):
- conv = MARC8_to_Unicode ()
- converted = conv.translate (s)
- converted = unicodedata.normalize ('NFC', converted)
- print converted.encode (enc)
-
- print repr (converted)
-
-
-
-if __name__ == '__main__':
- # My console is usually set to iso-8859-1. Sorry if yours is different.
- test_convert('''The oldest cuisine in the world : cooking in
- Mesopotamia / Jean Bott\xe2ero ; translated by Teresa Lavender Fagan.''',
- 'iso-8859-1')
-
- test_convert (
- """$6 245-02/$1$a \x1b$$1!M>!`o!#!KPa!\\O!#!\x1b((B/$c \x1b$$1!1?!R_!#!-bb!#!!Gm!>`!#!\x1b((B; \x1b$$1!RY!YF!#!9Z6!#!!J(!Yi!#!\x1b((B;\x1b$$1!#!!BX!O>!#!!4`!4)!#!!\\e!#!!Hk!:M!#!\x1b((B... [et al.] ; \x1b$$1!Iq!MH!#!!9%!];!#!!KG!#!\x1b((B= Great garnishes / author, Huang Su-Huei ; translator, Yen-Jen Lai ; collaborators, Cheng-Tzu Chiu ... [et al.] ; photographers, Aki Ohno.""",
- 'utf-8')
-
-
- for f in sys.argv[1:]:
- marc_file = open(f, 'rb')
- marc_text = marc_file.read ()
- while 1:
- marc_data1 = MARC(marc_text)
- print str (marc_data1)
- new = marc_data1.get_MARC ()
- marc_data2 = MARC (marc_text)
- k1 = marc_data1.fields.keys ()
- k2 = marc_data2.fields.keys ()
- assert (k1 == k2)
- for field in k1:
- same = (marc_data1.fields [field] ==
- marc_data2.fields [field])
- assert (same)
- marc_text = marc_text[marc_data1.reclen:]
- if len (marc_text) == 0:
- break
- marc_file.close ()
-
-
diff --git a/python/PyZ3950/zoom.py b/python/PyZ3950/zoom.py
deleted file mode 100644
index ff07a92..0000000
--- a/python/PyZ3950/zoom.py
+++ /dev/null
@@ -1,965 +0,0 @@
-#!/usr/bin/env python
-
-"""Implements the ZOOM 1.4 API (http://zoom.z3950.org/api)
-for Z39.50.
-
-Some global notes on the binding (these will only make sense when read
-after the API document):
-
-Get/Set Option is implemented as member attribute access or
-assignment. Implementations are encouraged to throw an AttributeError
-for unsupported (or, possibly, mistyped) attributes. (Production
-applications are encouraged to catch such errors.)
-
-All errors are reported as exceptions deriving from ZoomError (or, at
-least, it's a bug if they aren't). Bib1Err is defined as part of the
-binding; all the rest are specific to this implementation.
-
-ResultSet provides a sequence interface, with standard Python
-iteration, indexing, and slicing. So if rs is a ResultSet, use len
-(rs) for Get_Size and rs[i] for Get_Record, or iterate with for r in
-rs: foo(r). Any attempt to access a record for which the server
-returned a surrogate diagnostic will raise the appropriate Bib1Err
-exception.
-
-For Record, Render_Record is implemented as Python __str__. The
-'syntax' member contains the string-format record syntax, and the
-'data' member contains the raw data.
-
-The following query types are supported:
-- "CCL", ISO 8777, (http://www.indexdata.dk/yaz/doc/tools.tkl#CCL)
-- "S-CCL", the same, but interpreted on the server side
-- "CQL", the Common Query Language, (http://www.loc.gov/z3950/agency/zing/cql/)
-- "S-CQL", the same, but interpreted on the server side
-- "PQF", Index Data's Prefix Query Format, (http://www.indexdata.dk/yaz/doc/tools.tkl#PQF)
-- "C2", Cheshire II query syntax, (http://cheshire.berkeley.edu/cheshire2.html#zfind)
-- "ZSQL", Z-SQL, see (http://archive.dstc.edu.au/DDU/projects/Z3950/Z+SQL/)
-- "CQL-TREE", a general-purpose escape allowing any object with a toRPN method to be used, e.g. the CQL tree objects
-
-ScanSet, like ResultSet, has a sequence interface. The i-th element
-is a dictionary. See the ScanSet documentation for supported keys.
-
-Sample usage:
- from PyZ3950 import zoom
- conn = zoom.Connection ('z3950.loc.gov', 7090)
- conn.databaseName = 'VOYAGER'
- conn.preferredRecordSyntax = 'USMARC'
- query = zoom.Query ('CCL', 'ti="1066 and all that"')
- res = conn.search (query)
- for r in res:
- print str(r)
- conn.close ()
-I hope everything else is clear from the docstrings and the abstract
-API: let me know if that's wrong, and I'll try to do better.
-
-For some purposes (I think the only one is writing Z39.50 servers),
-you may want to use the functions in the z3950 module instead. """
-
-from __future__ import nested_scopes
-
-__author__ = 'Aaron Lav (asl2@pobox.com)'
-__version__ = '1.0' # XXX
-
-import getopt
-import sys
-
-# TODO:
-# finish lang/charset (requires charset normalization, confer w/ Adam)
-# implement piggyback
-# implement schema (Non useful)
-# implement setname (Impossible?)
-
-from PyZ3950 import z3950
-from PyZ3950 import ccl
-from PyZ3950 import asn1
-from PyZ3950 import zmarc
-from PyZ3950 import bib1msg
-from PyZ3950 import grs1
-from PyZ3950 import oids
-
-# Azaroth 2003-12-04:
-from PyZ3950 import CQLParser, SRWDiagnostics, pqf
-from PyZ3950 import c2query as c2
-asn1.register_oid (oids.Z3950_QUERY_SQL, z3950.SQLQuery)
-
-
-def my_enumerate (l): # replace w/ enumerate when we go to Python 2.3
- return zip (range (len (l)), l)
-
-trace_extract = 0
-"""trace extracting records from search/present reqs"""
-
-class ZoomError (Exception):
- """Base class for all errors reported from this module"""
- pass
-
-class ConnectionError(ZoomError):
- """Exception for TCP error"""
- pass
-
-class ClientNotImplError (ZoomError):
- """Exception for ZOOM client-side functionality not implemented (bug
- author)"""
- pass
-
-class ServerNotImplError (ZoomError):
- """Exception for function not implemented on server"""
- pass
-
-class QuerySyntaxError (ZoomError):
- """Exception for query not parsable by client"""
- pass
-
-class ProtocolError (ZoomError):
- """Exception for malformatted server response"""
- pass
-
-class UnexpectedCloseError (ProtocolError):
- """Exception for unexpected (z3950, not tcp) close from server"""
- pass
-
-class UnknownRecSyn (ZoomError):
- """Exception for unknown record syntax returned from server"""
- pass
-
-class Bib1Err (ZoomError):
- """Exception for BIB-1 error"""
- def __init__ (self, condition, message, addtlInfo):
- self.condition = condition
- self.message = message
- self.addtlInfo = addtlInfo
- ZoomError.__init__ (self)
- def __str__ (self):
- return "Bib1Err: %d %s %s" % (self.condition, self.message, self.addtlInfo)
-
-
-class _ErrHdlr:
- """Error-handling services"""
- err_attrslist = ['errCode','errMsg', 'addtlInfo']
- def err (self, condition, addtlInfo, oid):
- """Translate condition + oid to message, save, and raise exception"""
- self.errCode = condition
- self.errMsg = bib1msg.lookup_errmsg (condition, oid)
- self.addtlInfo = addtlInfo
- raise Bib1Err (self.errCode, self.errMsg, self.addtlInfo)
- def err_diagrec (self, diagrec):
- (typ, data) = diagrec
- if typ == 'externallyDefined':
- raise ClientNotImplErr ("Unknown external diagnostic" + str (data))
- addinfo = data.addinfo [1] # don't care about v2 vs v3
- self.err (data.condition, addinfo, data.diagnosticSetId)
-
-
-_record_type_dict = {}
-"""Map oid to renderer, field-counter, and field-getter functions"""
-
-def _oid_to_key (oid):
- for (k,v) in _record_type_dict.items ():
- if v.oid == oid:
- return k
- raise UnknownRecSyn (oid)
-
-def _extract_attrs (obj, attrlist):
- kw = {}
- for key in attrlist:
- if hasattr (obj, key):
- kw[key] = getattr (obj, key)
- return kw
-
-class _AttrCheck:
- """Prevent typos"""
- attrlist = []
- not_implement_attrs = []
- def __setattr__ (self, attr, val):
- """Ensure attr is in attrlist (list of allowed attributes), or
- private (begins w/ '_'), or begins with 'X-' (reserved for users)"""
- if attr[0] == '_' or attr in self.attrlist or attr[0:2] == 'X-':
- self.__dict__[attr] = val
- elif (attr in self.not_implement_attrs):
- raise ClientNotImplError(attr)
- else:
- raise AttributeError (attr, val)
-
-class Connection(_AttrCheck, _ErrHdlr):
- """Connection object"""
-
- not_implement_attrs = ['piggyback',
- 'schema',
- 'proxy',
- 'async']
- search_attrs = ['smallSetUpperBound',
- 'largeSetLowerBound',
- 'mediumSetPresentNumber',
- 'smallSetElementSetNames',
- 'mediumSetElementSetNames']
- init_attrs = ['user',
- 'password',
- 'group',
- 'maximumRecordSize',
- 'preferredMessageSize',
- 'lang',
- 'charset',
- 'implementationId',
- 'implementationName',
- 'implementationVersion'
- ]
- scan_zoom_to_z3950 = {
- # translate names from ZOOM spec to Z39.50 spec names
- 'stepSize' : 'stepSize',
- 'numberOfEntries' : 'numberOfTermsRequested',
- 'responsePosition' : 'preferredPositionInResponse'
- }
-
- attrlist = search_attrs + init_attrs + scan_zoom_to_z3950.keys () + [
- 'databaseName',
- 'namedResultSets',
- 'preferredRecordSyntax', # these three inheritable by RecordSet
- 'elementSetName',
- 'presentChunk',
- 'targetImplementationId',
- 'targetImplementationName',
- 'targetImplementationVersion',
- 'host',
- 'port',
-
- ] + _ErrHdlr.err_attrslist
-
- _queryTypes = ['S-CQL', 'S-CCL', 'RPN', 'ZSQL']
- _cli = None
- host = ""
- port = 0
-
- # and now, some defaults
- namedResultSets = 1
- elementSetName = 'F'
- preferredRecordSyntax = 'USMARC'
- preferredMessageSize = 0x100000
- maximumRecordSize = 0x100000
- stepSize = 0
- numberOfEntries = 20 # for SCAN
- responsePosition = 1
- databaseName = 'Default'
- implementationId = 'PyZ3950'
- implementationName = 'PyZ3950 1.0/ZOOM v1.4'
- implementationVersion = '1.0'
- lang = None
- charset = None
- user = None
- password = None
- group = None
- presentChunk = 20 # for result sets
-
- def __init__(self, host, port, connect=1, **kw):
- """Establish connection to hostname:port. kw contains initial
- values for options, and is useful for options which affect
- the InitializeRequest. Currently supported values:
-
- user Username for authentication
- password Password for authentication
- group Group for authentication
- maximumRecordSize Maximum size in bytes of one record
- preferredMessageSize Maximum size in bytes for response
- lang 3 letter language code
- charset Character set
- implementationId Id for client implementation
- implementationName Name for client implementation
- implementationVersion Version of client implementation
-
- """
-
- self.host = host
- self.port = port
- self._resultSetCtr = 0
- for (k,v) in kw.items ():
- setattr (self, k, v)
- if (connect):
- self.connect()
-
- def connect(self):
- self._resultSetCtr += 1
- self._lastConnectCtr = self._resultSetCtr
-
- # Bump counters first, since even if we didn't reconnect
- # this time, we could have, and so any use of old connections
- # is an error. (Old cached-and-accessed data is OK to use:
- # cached but not-yet-accessed data is probably an error, but
- # a not-yet-caught error.)
-
- if self._cli <> None and self._cli.sock <> None:
- return
-
- initkw = {}
- for attr in self.init_attrs:
- initkw[attr] = getattr(self, attr)
- if (self.namedResultSets):
- options = ['namedResultSets']
- else:
- options = []
- initkw ['ConnectionError'] = ConnectionError
- initkw ['ProtocolError'] = ProtocolError
- initkw ['UnexpectedCloseError'] = UnexpectedCloseError
- self._cli = z3950.Client (self.host, self.port,
- optionslist = options, **initkw)
- self.namedResultSets = self._cli.get_option ('namedResultSets')
- self.targetImplementationId = getattr (self._cli.initresp, 'implementationId', None)
- self.targetImplementationName = getattr (self._cli.initresp, 'implementationName', None)
- self.targetImplementationVersion = getattr (self._cli.initresp, 'implementationVersion', None)
- if (hasattr (self._cli.initresp, 'userInformationField')):
- # weird. U of Chicago returns an EXTERNAL with nothing
- # but 'encoding', ('octet-aligned', '2545') filled in.
- if (hasattr (self._cli.initresp.userInformationField,
- 'direct_reference') and
- self._cli.initresp.userInformationField.direct_reference ==
- oids.Z3950_USR_PRIVATE_OCLC_INFO_ov):
-# see http://www.oclc.org/support/documentation/firstsearch/z3950/fs_z39_config_guide/ for docs
- oclc_info = self._cli.initresp.userInformationField.encoding [1]
- # the docs are a little unclear, but I presume we're
- # supposed to report failure whenever a failReason is given.
-
- if hasattr (oclc_info, 'failReason'):
- raise UnexpectedCloseError ('OCLC_Info ',
- oclc_info.failReason,
- getattr (oclc_info, 'text',
- ' no text given '))
-
-
-
- def search (self, query):
- """Search, taking Query object, returning ResultSet"""
- if (not self._cli):
- self.connect()
- assert (query.typ in self._queryTypes)
- dbnames = self.databaseName.split ('+')
- self._cli.set_dbnames (dbnames)
- cur_rsn = self._make_rsn ()
- recv = self._cli.search_2 (query.query,
- rsn = cur_rsn,
- **_extract_attrs (self, self.search_attrs))
- self._resultSetCtr += 1
- rs = ResultSet (self, recv, cur_rsn, self._resultSetCtr)
- return rs
- # and 'Error Code', 'Error Message', and 'Addt'l Info' methods still
- # eeded
- def scan (self, query):
- if (not self._cli):
- self.connect()
- self._cli.set_dbnames ([self.databaseName])
- kw = {}
- for k, xl in self.scan_zoom_to_z3950.items ():
- if hasattr (self, k):
- kw [xl] = getattr (self, k)
- return ScanSet (self._cli.scan (query.query, **kw))
- def _make_rsn (self):
- """Return result set name"""
- if self.namedResultSets:
- return "rs%d" % self._resultSetCtr
- else:
- return z3950.default_resultSetName
- def close (self):
- """Close connection"""
- self._cli.close ()
-
- def sort (self, sets, keys):
- """ Sort sets by keys, return resultset interface """
- if (not self._cli):
- self.connect()
-
- # XXX This should probably be shuffled down into z3950.py
- sortrelations = ['ascending', 'descending', 'ascendingByFrequency', 'descendingByFrequency']
-
- req = z3950.SortRequest()
- req.inputResultSetNames = []
- for s in sets:
- s._check_stale ()
- req.inputResultSetNames.append(s._resultSetName)
- cur_rsn = self._make_rsn()
- req.sortedResultSetName = cur_rsn
-
- zkeys = []
- for k in keys:
- zk = z3950.SortKeySpec()
- zk.sortRelation = sortrelations.index(k.relation)
- zk.caseSensitivity = k.caseInsensitive
- if (k.missingValueAction):
- zk.missingValueAction = (k.missingValueAction, None)
- if (k.missingValueData):
- zk.missingValueAction = ('missingValueData', k.missingValueData)
- value = k.sequence
- if (k.type == 'accessPoint'):
- if (value.typ <> 'RPN'):
- raise ValueError # XXX
- l = z3950.SortKey['sortAttributes']()
- l.id = value.query[1].attributeSet
- l.list = value.query[1].rpn[1][1].attributes
- seq = ('sortAttributes', l)
- elif (k.type == 'private'):
- seq = ('privateSortKey', value)
- elif (k.type == 'elementSetName'):
- spec = z3950.Specification()
- spec.elementSpec = ('elementSetName', value)
- seq = ('elementSpec', spec)
- else:
- raise ValueError # XXX
- spec = ('generic', seq)
- zk.sortElement = spec
- zkeys.append(zk)
- req.sortSequence = zkeys
- recv = self._cli.transact(('sortRequest', req), 'sortResponse')
-
- self._resultSetCtr += 1
- if (hasattr(recv, 'diagnostics')):
- diag = recv.diagnostics[0][1]
- self.err(diag.condition, diag.addinfo, diag.diagnosticSetId)
-
- if (not hasattr(recv, 'resultCount')):
- # First guess: sum of all input sets
- recv.resultCount = 0
- for set in sets:
- recv.resultCount += len(set)
- # Check for addInfo to override
- try:
- val = recv.otherInfo[0].information[1]
- if (val[:14] == 'Result-count: '):
- recv.resultCount = int(val[14:])
- except:
- pass
-
- rs = ResultSet (self, recv, cur_rsn, self._resultSetCtr)
- return rs
-
-
-class SortKey(_AttrCheck):
- attrlist = ['relation', 'caseInsensitive', 'missingValueAction', 'missingValueData', 'type', 'sequence']
- relation = "ascending"
- caseInsensitive = 1
- missingValueAction = ""
- missingValueData = ""
- type = "accessPoint"
- sequence = ""
-
- def __init__ (self, **kw):
- for k in kw.keys():
- setattr(self, k, kw[k])
-
-class Query:
- def __init__ (self, typ, query):
- """Creates Query object.
-Supported query types: CCL, S-CCL, CQL, S-CQL, PQF, C2, ZSQL, CQL-TREE
-"""
- typ = typ.upper()
-# XXX maybe replace if ... elif ... with dict mapping querytype to func
- if typ == 'CCL':
- self.typ = 'RPN'
- try:
- self.query = ccl.mk_rpn_query (query)
- except ccl.QuerySyntaxError, err:
- print "zoom raising", str (err), " for", query
- raise QuerySyntaxError (str(err))
- elif typ == 'S-CCL': # server-side ccl
- self.typ = typ
- self.query = ('type-2', query)
- elif typ == 'S-CQL': # server-side cql
- self.typ = typ
- xq = asn1.EXTERNAL()
- xq.direct_reference = oids.Z3950_QUERY_CQL_ov
- xq.encoding = ('single-ASN1-type', query)
- self.query = ('type_104', xq)
- elif typ == 'CQL': # CQL to RPN transformation
- self.typ = 'RPN'
- try:
- q = CQLParser.parse(query)
- rpnq = z3950.RPNQuery()
- # XXX Allow Attribute Architecture somehow?
- rpnq.attributeSet = oids.Z3950_ATTRS_BIB1_ov
- rpnq.rpn = q.toRPN()
- self.query = ('type_1', rpnq)
- except SRWDiagnostics.SRWDiagnostic, err:
- raise err
- except:
- raise QuerySyntaxError
- elif typ == 'PQF': # PQF to RPN transformation
- self.typ = 'RPN'
- try:
- self.query = pqf.parse(query)
- except:
- raise QuerySyntaxError
-
- elif typ == 'C2': # Cheshire2 Syntax
- self.typ = 'RPN'
- try:
- q = c2.parse(query)
- self.query = q[0]
- except:
- raise QuerySyntaxError
- elif typ == 'ZSQL': # External SQL
- self.typ = typ
- xq = asn1.EXTERNAL()
- xq.direct_reference = oids.Z3950_QUERY_SQL_ov
- q = z3950.SQLQuery()
- q.queryExpression = query
- xq.encoding = ('single-ASN1-type', q)
- self.query = ('type_104', xq)
- elif typ == 'CQL-TREE': # Tree to RPN
- self.typ = 'RPN'
- try:
- rpnq = z3950.RPNQuery()
- # XXX Allow Attribute Architecture
- rpnq.attributeSet = oids.Z3950_ATTRS_BIB1_ov
- rpnq.rpn = query.toRPN()
- self.query = ('type_1', rpnq)
- except SRWDiagnostics.SRWDiagnostic, err:
- raise err
- except:
- raise QuerySyntaxError
- else:
- raise ClientNotImplError ('%s queries not supported' % typ)
-
-
-class ResultSet(_AttrCheck, _ErrHdlr):
- """Cache results, presenting read-only sequence interface. If
- a surrogate diagnostic is returned for the i-th record, an
- appropriate exception will be raised on access to the i-th
- element (either access by itself or as part of a slice)."""
-
- inherited_elts = ['elementSetName', 'preferredRecordSyntax',
- 'presentChunk']
- attrlist = inherited_elts + _ErrHdlr.err_attrslist
- not_implement_attrs = ['piggyback',
- 'schema']
-
- def __init__ (self, conn, searchResult, resultSetName, ctr):
- """Only for creation by Connection object"""
- self._conn = conn # needed for 'option inheritance', see ZOOM spec
- self._searchResult = searchResult
- self._resultSetName = resultSetName
- self._records = {}
- self._ctr = ctr
- # _records is a dict indexed by preferredRecordSyntax of
- # dicts indexed by elementSetName of lists of records
- self._ensure_recs ()
-
- # whether there are any records or not, there may be
- # nonsurrogate diagnostics. _extract_recs will get them.
- if hasattr (self._searchResult, 'records'):
- self._extract_recs (self._searchResult.records, 0)
- def __getattr__ (self, key):
- """Forward attribute access to Connection if appropriate"""
- if self.__dict__.has_key (key):
- return self.__dict__[key]
- if key in self.inherited_elts:
- return getattr (self._conn, key) # may raise AttributeError
- raise AttributeError (key)
- def _make_keywords (self):
- """Set up dict of parms for present request"""
- kw = {}
- # need for translation here from preferredRecordSyntax to recsyn
- # is kinda pointless
- if hasattr (self, 'preferredRecordSyntax'):
- try:
- kw['recsyn'] = _record_type_dict [
- self.preferredRecordSyntax].oid
- except KeyError, err:
- raise ClientNotImplError ('Unknown record syntax ' +
- self.preferredRecordSyntax)
- if hasattr (self, 'elementSetName'):
- kw['esn'] = ('genericElementSetName', self.elementSetName)
- return kw
- def __len__ (self):
- """Get number of records"""
- return self._searchResult.resultCount
- def _pin (self, i):
- """Handle negative indices"""
- if i < 0:
- return i + len (self)
- return i
- def _ensure_recs (self):
- if not self._records.has_key (self.preferredRecordSyntax):
- self._records [self.preferredRecordSyntax] = {}
- self._records [self.preferredRecordSyntax][
- self.elementSetName] = [None] * len (self)
- if not self._records[self.preferredRecordSyntax].has_key (
- self.elementSetName):
- self._records [self.preferredRecordSyntax][
- self.elementSetName] = [None] * len (self)
-
- def _get_rec (self, i):
- return self._records [self.preferredRecordSyntax][
- self.elementSetName][i]
-
- def _check_stale (self):
- if self._ctr < self._conn._lastConnectCtr:
- raise ConnectionError ('Stale result set used')
- # XXX is this right?
- if (not self._conn.namedResultSets) and \
- self._ctr <> self._conn._resultSetCtr:
- raise ServerNotImplError ('Multiple Result Sets')
- # XXX or this?
-
- def _ensure_present (self, i):
- self._ensure_recs ()
- if self._get_rec (i) == None:
- self._check_stale ()
- maxreq = self.presentChunk
- if maxreq == 0: # get everything at once
- lbound = i
- count = len (self) - lbound
- else:
- lbound = (i / maxreq) * maxreq
- count = min (maxreq, len (self) - lbound)
- kw = self._make_keywords ()
- if self._get_rec (lbound) == None:
- presentResp = self._conn._cli.present (
- start = lbound + 1, # + 1 b/c 1-based
- count = count,
- rsn = self._resultSetName,
- **kw)
- if not hasattr (presentResp, 'records'):
- raise ProtocolError (str (presentResp))
- self._extract_recs (presentResp.records, lbound)
- # Maybe there was too much data to fit into
- # range (lbound, lbound + count). If so, try
- # retrieving just one record. XXX could try
- # retrieving more, up to next cache bdary.
- if i <> lbound and self._get_rec (i) == None:
- presentResp = self._conn._cli.present (
- start = i + 1,
- count = 1,
- rsn = self._resultSetName,
- **kw)
- self._extract_recs (presentResp.records, i)
- rec = self._records [self.preferredRecordSyntax][
- self.elementSetName][i]
- if rec <> None and rec.is_surrogate_diag ():
- rec.raise_exn ()
- def __getitem__ (self, i):
- """Ensure item is present, and return a Record"""
- i = self._pin (i)
- if i >= len (self):
- raise IndexError
- self._ensure_present (i)
- return self._records [self.preferredRecordSyntax][
- self.elementSetName][i]
- def __getslice__(self, i, j):
- i = self._pin (i)
- j = self._pin (j)
- if j > len (self):
- j = len (self)
- for k in range (i, j):
- self._ensure_present (k)
- if len (self._records) == 0: # XXX is this right?
- return []
- return self._records[self.preferredRecordSyntax][
- self.elementSetName] [i:j]
- def _extract_recs (self, records, lbound):
- (typ, recs) = records
- if trace_extract:
- print "Extracting", len (recs), "starting at", lbound
- if typ == 'nonSurrogateDiagnostic':
- self.err (recs.condition, "", recs.diagnosticSetId)
- elif typ == 'multipleNonSurDiagnostics':
- # see Zoom mailing list discussion of 2002/7/24 to justify
- # ignoring all but first error.
- diagRec = recs [0]
- self.err_diagrec (diagRec)
- if (typ <> 'responseRecords'):
- raise ProtocolError ("Bad records typ " + str (typ) + str (recs))
- for i,r in my_enumerate (recs):
- r = recs [i]
- dbname = getattr (r, 'name', '')
- (typ, data) = r.record
- if (typ == 'surrogateDiagnostic'):
- rec = SurrogateDiagnostic (data)
-
- elif typ == 'retrievalRecord':
- oid = data.direct_reference
- dat = data.encoding
- (typ, dat) = dat
- if (oid == oids.Z3950_RECSYN_USMARC_ov):
- if typ <> 'octet-aligned':
- raise ProtocolError (
- "Weird record EXTERNAL MARC type: " + typ)
- rec = Record (oid, dat, dbname)
- else:
- raise ProtocolError ("Bad typ %s data %s" %
- (str (typ), str(data)))
- self._records[self.preferredRecordSyntax][
- self.elementSetName][lbound + i] = rec
- def delete (self): # XXX or can I handle this w/ a __del__ method?
- """Delete result set"""
- res = self._conn._cli.delete (self._resultSetName)
- if res == None: return # server doesn't support Delete
- # XXX should I throw an exn for delete errors? Probably.
-
- # and 'Error Code', 'Error Message', and 'Addt'l Info' methods
-
- def sort(self, keys):
- return self._conn.sort([self], keys)
-
-
-class SurrogateDiagnostic(_ErrHdlr):
- """Represent surrogate diagnostic. Raise appropriate exception
- on access to syntax or data, or when raise_exn method is called.
- Currently, RecordSet relies on the return from is_surrogate_diag (),
- and calls raise_exn based on that."""
- def __init__ (self, diagrec):
- self.diagrec = diagrec
- def is_surrogate_diag (self):
- return 1
- def raise_exn (self):
- self.err_diagrec (self.diagrec)
- def __getattr__ (self, attr):
- if attr == 'data' or attr == 'syntax':
- self.raise_exn ()
- return _ErrHdlr.__getattr (self, attr)
-
-class Record:
- """Represent retrieved record. 'syntax' attribute is a string,
- 'data' attribute is the data, which is:
-
- USMARC -- raw MARC data
- SUTRS -- a string (possibly in the future unicode)
- XML -- ditto
- GRS-1 -- a tree (see grs1.py for details)
- EXPLAIN -- a hard-to-describe format (contact me if you're actually \
-using this)
- OPAC -- ditto
-
- Other representations are not yet defined."""
- def __init__ (self, oid, data, dbname):
- """Only for use by ResultSet"""
- self.syntax = _oid_to_key (oid)
- self._rt = _record_type_dict [self.syntax]
- self.data = self._rt.preproc (data)
- self.databaseName = dbname
- def is_surrogate_diag (self):
- return 0
- def get_fieldcount (self):
- """Get number of fields"""
- return self._rt.fieldcount (self.data)
- def get_field (self,spec):
- """Get field"""
- return self._rt.field (self.data, spec)
- def __str__ (self):
- """Render printably"""
- s = self._rt.renderer (self.data)
- return 'Rec: ' + str (self.syntax) + " " + s
-
-class _RecordType:
- """Map syntax string to OID and per-syntax utility functions"""
- def __init__ (self, name, oid, renderer = lambda v:v,
- fieldcount = lambda v:1, field = None, preproc = lambda v:v):
- """Register syntax"""
- self.oid = oid
- self.renderer = renderer
- self.fieldcount = fieldcount
- self.field = field
- self.preproc = preproc
- _record_type_dict [name] = self
-
-# XXX do I want an OPAC class? Probably, and render_OPAC should be
-# a member function.
-
-
-def render_OPAC (opac_data):
- s_list = []
- biblio_oid = opac_data.bibliographicRecord.direct_reference
- if (biblio_oid == z3950.Z3950_RECSYN_USMARC_ov):
- bib_marc = zmarc.MARC (opac_data.bibliographicRecord.encoding [1])
- s_list.append ("Bibliographic %s\n" % (str (bib_marc),) )
- else:
- s_list.append ("Unknown bibliographicRecord OID: " + str(biblio_oid))
- for i, hd in my_enumerate (opac_data.holdingsData):
- typ, data = hd
- s_list.append ('Holdings %d:' % (i,))
- if typ == 'holdingsAndCirc':
- def render (item, level = 1):
- s_list = []
- if isinstance (item, asn1.StructBase):
- for attr, val in item.__dict__.items ():
- if attr [0] <> '_':
- s_list.append ("%s%s: %s" % (
- "\t" * level, attr, "\n".join(render (val, level + 1))))
- elif (isinstance (item, type ([])) and len (item) > 0
- and isinstance (item [0], asn1.StructBase)):
- s_list.append ("") # generate newline
- for i, v in my_enumerate (item):
- s_list.append ("\t" * (level + 1) + str (i))
- s_list += render (v, level + 1)
- else:
- s_list.append (repr (item))
- return s_list
- s_list.append ("\n".join (render (data)))
- elif typ == 'marcHoldingsRecord':
- hold_oid = data.direct_reference
- if hold_oid == z3950.Z3950_RECSYN_USMARC_ov:
- holdings_marc = zmarc.MARC (data.encoding [1])
- s_list.append ("Holdings %s\n" % (str (holdings_marc),))
- else:
- s_list.append ("Unknown holdings OID: " + str (hold_oid))
- else:
- s_list.append ("Unknown holdings type: " + typ)
- # shouldn't happen unless z39.50 definition is extended
- return "\n".join (s_list)
-
-_RecordType ('USMARC', z3950.Z3950_RECSYN_USMARC_ov,
- renderer = lambda v: str(zmarc.MARC(v)))
-_RecordType ('UKMARC', z3950.Z3950_RECSYN_UKMARC_ov,
- renderer = lambda v: str(zmarc.MARC(v)))
-_RecordType ('SUTRS', z3950.Z3950_RECSYN_SUTRS_ov)
-_RecordType ('XML', z3950.Z3950_RECSYN_MIME_XML_ov)
-_RecordType ('SGML', z3950.Z3950_RECSYN_MIME_SGML_ov)
-_RecordType ('GRS-1', z3950.Z3950_RECSYN_GRS1_ov,
- renderer = lambda v: str (v),
- preproc = grs1.preproc)
-_RecordType ('OPAC', z3950.Z3950_RECSYN_OPAC_ov, renderer = render_OPAC)
-_RecordType ('EXPLAIN', z3950.Z3950_RECSYN_EXPLAIN_ov,
- renderer = lambda v: str (v))
-
-class ScanSet (_AttrCheck, _ErrHdlr):
- """Hold result of scan.
- """
- zoom_to_z3950 = { # XXX need to provide more processing for attrs, alt
- 'freq' : 'globalOccurrences',
- 'display': 'displayTerm',
- 'attrs' : 'suggestedAttributes',
- 'alt' : 'alternativeTerm',
- 'other' : 'otherTermInfo'}
- attrlist = _ErrHdlr.err_attrslist
-
- def __init__ (self, scanresp):
- """For internal use only!"""
- self._scanresp = scanresp
- if hasattr (scanresp.entries, 'nonsurrogateDiagnostics'):
- self.err_diagrec (scanresp.entries.nonsurrogateDiagnostics[0])
- # Note that specification says that both entries and
- # nonsurrogate diags can be present. This code will always
- # raise the exn, and will need to be changed if both are needed.
-
- def __len__ (self):
- """Return number of entries"""
- return self._scanresp.numberOfEntriesReturned
- def _get_rec (self, i):
- if (not hasattr(self._scanresp.entries, 'entries')):
- raise IndexError
- t = self._scanresp.entries.entries[i]
- if t[0] == 'termInfo':
- return t[1]
- else:
- # Only way asserts can fail here is if someone changes
- # the Z39.50 ASN.1 definitions.
- assert (t[0] == 'surrogateDiagnostic')
- diagRec = t[1]
- if diagRec [0] == 'externallyDefined':
- raise ClientNotImplError (
- 'Scan unknown surrogate diagnostic type: ' +
- str (diagRec))
- assert (diagRec[0] == 'defaultFormat')
- defDiagFmt = diagRec [1]
- self.err (defDiagFmt.condition, defDiagFmt.addinfo,
- defDiagFmt.diagnosticSetId)
- def get_term (self, i):
- """Return term. Note that get_{term,field,fields} can throw an
- exception if the i'th term is a surrogate diagnostic."""
- return self._get_rec (i).term
- def get_field (self, field, i):
- """Returns value of field:
- term: term
- freq: integer
- display: string
- attrs: currently z3950 structure, should be string of attributes
- alt: currently z3950 structure, should be [string of attrs, term]
- other: currently z3950 structure, dunno what the best Python representation would be
- """
- f = self.zoom_to_z3950 [field]
- r = self._get_rec (i)
- return r.__dict__[f]
- def get_fields (self, i):
- """Return a dictionary mapping ZOOM's field names to values
- present in the response. (Like get_field, but for all fields.)"""
- r = self._get_rec (i)
- d = {}
- for k,v in self.zoom_to_z3950.items ():
- val = getattr (r, v, None)
- if val <> None:
- d[k] = val
- d["term"] = self.get_term (i)
- return d
- def _pin (self, i):
- if i < 0:
- return i + len (self)
- return i
- def __getitem__ (self, i):
- return self.get_fields (self._pin (i))
- def __getslice__ (self, i, j):
- i = self._pin (i)
- j = self._pin (j)
- if j > len (self):
- j = len (self)
- return [self.get_fields (k) for k in range (i,j)]
-
-
-
-if __name__ == '__main__':
- optlist, args = getopt.getopt (sys.argv[1:], 'h:q:t:f:a:e:v:')
- host = 'LC'
- query = ''
- qtype = 'CCL'
- fmts = ['USMARC']
- esns = ['F']
- validation = None
- for (opt, val) in optlist:
- if opt == '-h':
- host = val
- elif opt == '-q':
- query = val
- elif opt == '-t':
- qtype = val
- elif opt == '-f':
- fmts = val.split (',')
- elif opt == '-e':
- esns = val.split (',')
- elif opt == '-v':
- validation = val.split (',')
-
- rv = z3950.host_dict.get (host)
- if rv == None:
- (name, port, dbname) = host.split (':')
- port = int (port)
- else:
- (name, port, dbname) = rv
-
- conn = Connection (name, port)
- conn.databaseName = dbname
-
- conn.preferredRecordSyntax = fmts [0]
- def run_one (q):
- try:
- query = Query (qtype, q)
- res = conn.search (query)
- for esn in esns:
- for syn in fmts:
- print "Syntax", syn, "Esn", esn
- res.preferredRecordSyntax = syn
- if esn <> 'NONE':
- res.elementSetName = esn
- try:
- for r in res:
- print str(r)
- except ZoomError, err:
- print "Zoom exception", err.__class__, err
-# res.delete ()
-# Looks as if Oxford will close the connection if a delete is sent,
-# despite claiming delete support (verified with yaz client, too).
- except ZoomError, err:
- print "Zoom exception", err.__class__, err
-
-
-
- if query == '':
- while 1:
- q_str = raw_input ('CCL query: ')
- if q_str == '': break
- run_one (q_str)
- else:
- run_one (query)
- conn.close ()
diff --git a/python/atom/__init__.py b/python/atom/__init__.py
new file mode 100644
index 0000000..6aa96c1
--- /dev/null
+++ b/python/atom/__init__.py
@@ -0,0 +1,1484 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2006 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains classes representing Atom elements.
+
+ Module objective: provide data classes for Atom constructs. These classes hide
+ the XML-ness of Atom and provide a set of native Python classes to interact
+ with.
+
+ Conversions to and from XML should only be necessary when the Atom classes
+ "touch the wire" and are sent over HTTP. For this reason this module
+ provides methods and functions to convert Atom classes to and from strings.
+
+ For more information on the Atom data model, see RFC 4287
+ (http://www.ietf.org/rfc/rfc4287.txt)
+
+ AtomBase: A foundation class on which Atom classes are built. It
+ handles the parsing of attributes and children which are common to all
+ Atom classes. By default, the AtomBase class translates all XML child
+ nodes into ExtensionElements.
+
+ ExtensionElement: Atom allows Atom objects to contain XML which is not part
+ of the Atom specification, these are called extension elements. If a
+ classes parser encounters an unexpected XML construct, it is translated
+ into an ExtensionElement instance. ExtensionElement is designed to fully
+ capture the information in the XML. Child nodes in an XML extension are
+ turned into ExtensionElements as well.
+"""
+
+
+__author__ = 'api.jscudder (Jeffrey Scudder)'
+
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ try:
+ import cElementTree as ElementTree
+ except ImportError:
+ try:
+ from xml.etree import ElementTree
+ except ImportError:
+ from elementtree import ElementTree
+import warnings
+
+
+# XML namespaces which are often used in Atom entities.
+ATOM_NAMESPACE = 'http://www.w3.org/2005/Atom'
+ELEMENT_TEMPLATE = '{http://www.w3.org/2005/Atom}%s'
+APP_NAMESPACE = 'http://purl.org/atom/app#'
+APP_TEMPLATE = '{http://purl.org/atom/app#}%s'
+
+# This encoding is used for converting strings before translating the XML
+# into an object.
+XML_STRING_ENCODING = 'utf-8'
+# The desired string encoding for object members. set or monkey-patch to
+# unicode if you want object members to be Python unicode strings, instead of
+# encoded strings
+MEMBER_STRING_ENCODING = 'utf-8'
+#MEMBER_STRING_ENCODING = unicode
+
+# If True, all methods which are exclusive to v1 will raise a
+# DeprecationWarning
+ENABLE_V1_WARNINGS = False
+
+
+def v1_deprecated(warning=None):
+ """Shows a warning if ENABLE_V1_WARNINGS is True.
+
+ Function decorator used to mark methods used in v1 classes which
+ may be removed in future versions of the library.
+ """
+ warning = warning or ''
+ # This closure is what is returned from the deprecated function.
+ def mark_deprecated(f):
+ # The deprecated_function wraps the actual call to f.
+ def optional_warn_function(*args, **kwargs):
+ if ENABLE_V1_WARNINGS:
+ warnings.warn(warning, DeprecationWarning, stacklevel=2)
+ return f(*args, **kwargs)
+ # Preserve the original name to avoid masking all decorated functions as
+ # 'deprecated_function'
+ try:
+ optional_warn_function.func_name = f.func_name
+ except TypeError:
+ pass # In Python2.3 we can't set the func_name
+ return optional_warn_function
+ return mark_deprecated
+
+
+def CreateClassFromXMLString(target_class, xml_string, string_encoding=None):
+ """Creates an instance of the target class from the string contents.
+
+ Args:
+ target_class: class The class which will be instantiated and populated
+ with the contents of the XML. This class must have a _tag and a
+ _namespace class variable.
+ xml_string: str A string which contains valid XML. The root element
+ of the XML string should match the tag and namespace of the desired
+ class.
+ string_encoding: str The character encoding which the xml_string should
+ be converted to before it is interpreted and translated into
+ objects. The default is None in which case the string encoding
+ is not changed.
+
+ Returns:
+ An instance of the target class with members assigned according to the
+ contents of the XML - or None if the root XML tag and namespace did not
+ match those of the target class.
+ """
+ encoding = string_encoding or XML_STRING_ENCODING
+ if encoding and isinstance(xml_string, unicode):
+ xml_string = xml_string.encode(encoding)
+ tree = ElementTree.fromstring(xml_string)
+ return _CreateClassFromElementTree(target_class, tree)
+
+
+CreateClassFromXMLString = v1_deprecated(
+ 'Please use atom.core.parse with atom.data classes instead.')(
+ CreateClassFromXMLString)
+
+
+def _CreateClassFromElementTree(target_class, tree, namespace=None, tag=None):
+ """Instantiates the class and populates members according to the tree.
+
+ Note: Only use this function with classes that have _namespace and _tag
+ class members.
+
+ Args:
+ target_class: class The class which will be instantiated and populated
+ with the contents of the XML.
+ tree: ElementTree An element tree whose contents will be converted into
+ members of the new target_class instance.
+ namespace: str (optional) The namespace which the XML tree's root node must
+ match. If omitted, the namespace defaults to the _namespace of the
+ target class.
+ tag: str (optional) The tag which the XML tree's root node must match. If
+ omitted, the tag defaults to the _tag class member of the target
+ class.
+
+ Returns:
+ An instance of the target class - or None if the tag and namespace of
+ the XML tree's root node did not match the desired namespace and tag.
+ """
+ if namespace is None:
+ namespace = target_class._namespace
+ if tag is None:
+ tag = target_class._tag
+ if tree.tag == '{%s}%s' % (namespace, tag):
+ target = target_class()
+ target._HarvestElementTree(tree)
+ return target
+ else:
+ return None
+
+
+class ExtensionContainer(object):
+
+ def __init__(self, extension_elements=None, extension_attributes=None,
+ text=None):
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+ self.text = text
+
+ __init__ = v1_deprecated(
+ 'Please use data model classes in atom.data instead.')(
+ __init__)
+
+ # Three methods to create an object from an ElementTree
+ def _HarvestElementTree(self, tree):
+ # Fill in the instance members from the contents of the XML tree.
+ for child in tree:
+ self._ConvertElementTreeToMember(child)
+ for attribute, value in tree.attrib.iteritems():
+ self._ConvertElementAttributeToMember(attribute, value)
+ # Encode the text string according to the desired encoding type. (UTF-8)
+ if tree.text:
+ if MEMBER_STRING_ENCODING is unicode:
+ self.text = tree.text
+ else:
+ self.text = tree.text.encode(MEMBER_STRING_ENCODING)
+
+ def _ConvertElementTreeToMember(self, child_tree, current_class=None):
+ self.extension_elements.append(_ExtensionElementFromElementTree(
+ child_tree))
+
+ def _ConvertElementAttributeToMember(self, attribute, value):
+ # Encode the attribute value's string with the desired type Default UTF-8
+ if value:
+ if MEMBER_STRING_ENCODING is unicode:
+ self.extension_attributes[attribute] = value
+ else:
+ self.extension_attributes[attribute] = value.encode(
+ MEMBER_STRING_ENCODING)
+
+ # One method to create an ElementTree from an object
+ def _AddMembersToElementTree(self, tree):
+ for child in self.extension_elements:
+ child._BecomeChildElement(tree)
+ for attribute, value in self.extension_attributes.iteritems():
+ if value:
+ if isinstance(value, unicode) or MEMBER_STRING_ENCODING is unicode:
+ tree.attrib[attribute] = value
+ else:
+ # Decode the value from the desired encoding (default UTF-8).
+ tree.attrib[attribute] = value.decode(MEMBER_STRING_ENCODING)
+ if self.text:
+ if isinstance(self.text, unicode) or MEMBER_STRING_ENCODING is unicode:
+ tree.text = self.text
+ else:
+ tree.text = self.text.decode(MEMBER_STRING_ENCODING)
+
+ def FindExtensions(self, tag=None, namespace=None):
+ """Searches extension elements for child nodes with the desired name.
+
+ Returns a list of extension elements within this object whose tag
+ and/or namespace match those passed in. To find all extensions in
+ a particular namespace, specify the namespace but not the tag name.
+ If you specify only the tag, the result list may contain extension
+ elements in multiple namespaces.
+
+ Args:
+ tag: str (optional) The desired tag
+ namespace: str (optional) The desired namespace
+
+ Returns:
+ A list of elements whose tag and/or namespace match the parameters
+ values
+ """
+
+ results = []
+
+ if tag and namespace:
+ for element in self.extension_elements:
+ if element.tag == tag and element.namespace == namespace:
+ results.append(element)
+ elif tag and not namespace:
+ for element in self.extension_elements:
+ if element.tag == tag:
+ results.append(element)
+ elif namespace and not tag:
+ for element in self.extension_elements:
+ if element.namespace == namespace:
+ results.append(element)
+ else:
+ for element in self.extension_elements:
+ results.append(element)
+
+ return results
+
+
+class AtomBase(ExtensionContainer):
+
+ _children = {}
+ _attributes = {}
+
+ def __init__(self, extension_elements=None, extension_attributes=None,
+ text=None):
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+ self.text = text
+
+ __init__ = v1_deprecated(
+ 'Please use data model classes in atom.data instead.')(
+ __init__)
+
+ def _ConvertElementTreeToMember(self, child_tree):
+ # Find the element's tag in this class's list of child members
+ if self.__class__._children.has_key(child_tree.tag):
+ member_name = self.__class__._children[child_tree.tag][0]
+ member_class = self.__class__._children[child_tree.tag][1]
+ # If the class member is supposed to contain a list, make sure the
+ # matching member is set to a list, then append the new member
+ # instance to the list.
+ if isinstance(member_class, list):
+ if getattr(self, member_name) is None:
+ setattr(self, member_name, [])
+ getattr(self, member_name).append(_CreateClassFromElementTree(
+ member_class[0], child_tree))
+ else:
+ setattr(self, member_name,
+ _CreateClassFromElementTree(member_class, child_tree))
+ else:
+ ExtensionContainer._ConvertElementTreeToMember(self, child_tree)
+
+ def _ConvertElementAttributeToMember(self, attribute, value):
+ # Find the attribute in this class's list of attributes.
+ if self.__class__._attributes.has_key(attribute):
+ # Find the member of this class which corresponds to the XML attribute
+ # (lookup in current_class._attributes) and set this member to the
+ # desired value (using self.__dict__).
+ if value:
+ # Encode the string to capture non-ascii characters (default UTF-8)
+ if MEMBER_STRING_ENCODING is unicode:
+ setattr(self, self.__class__._attributes[attribute], value)
+ else:
+ setattr(self, self.__class__._attributes[attribute],
+ value.encode(MEMBER_STRING_ENCODING))
+ else:
+ ExtensionContainer._ConvertElementAttributeToMember(
+ self, attribute, value)
+
+ # Three methods to create an ElementTree from an object
+ def _AddMembersToElementTree(self, tree):
+ # Convert the members of this class which are XML child nodes.
+ # This uses the class's _children dictionary to find the members which
+ # should become XML child nodes.
+ member_node_names = [values[0] for tag, values in
+ self.__class__._children.iteritems()]
+ for member_name in member_node_names:
+ member = getattr(self, member_name)
+ if member is None:
+ pass
+ elif isinstance(member, list):
+ for instance in member:
+ instance._BecomeChildElement(tree)
+ else:
+ member._BecomeChildElement(tree)
+ # Convert the members of this class which are XML attributes.
+ for xml_attribute, member_name in self.__class__._attributes.iteritems():
+ member = getattr(self, member_name)
+ if member is not None:
+ if isinstance(member, unicode) or MEMBER_STRING_ENCODING is unicode:
+ tree.attrib[xml_attribute] = member
+ else:
+ tree.attrib[xml_attribute] = member.decode(MEMBER_STRING_ENCODING)
+ # Lastly, call the ExtensionContainers's _AddMembersToElementTree to
+ # convert any extension attributes.
+ ExtensionContainer._AddMembersToElementTree(self, tree)
+
+
+ def _BecomeChildElement(self, tree):
+ """
+
+ Note: Only for use with classes that have a _tag and _namespace class
+ member. It is in AtomBase so that it can be inherited but it should
+ not be called on instances of AtomBase.
+
+ """
+ new_child = ElementTree.Element('')
+ tree.append(new_child)
+ new_child.tag = '{%s}%s' % (self.__class__._namespace,
+ self.__class__._tag)
+ self._AddMembersToElementTree(new_child)
+
+ def _ToElementTree(self):
+ """
+
+ Note, this method is designed to be used only with classes that have a
+ _tag and _namespace. It is placed in AtomBase for inheritance but should
+ not be called on this class.
+
+ """
+ new_tree = ElementTree.Element('{%s}%s' % (self.__class__._namespace,
+ self.__class__._tag))
+ self._AddMembersToElementTree(new_tree)
+ return new_tree
+
+ def ToString(self, string_encoding='UTF-8'):
+ """Converts the Atom object to a string containing XML."""
+ return ElementTree.tostring(self._ToElementTree(), encoding=string_encoding)
+
+ def __str__(self):
+ return self.ToString()
+
+
+class Name(AtomBase):
+ """The atom:name element"""
+
+ _tag = 'name'
+ _namespace = ATOM_NAMESPACE
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Name
+
+ Args:
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def NameFromString(xml_string):
+ return CreateClassFromXMLString(Name, xml_string)
+
+
+class Email(AtomBase):
+ """The atom:email element"""
+
+ _tag = 'email'
+ _namespace = ATOM_NAMESPACE
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Email
+
+ Args:
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ text: str The text data in the this element
+ """
+
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def EmailFromString(xml_string):
+ return CreateClassFromXMLString(Email, xml_string)
+
+
+class Uri(AtomBase):
+ """The atom:uri element"""
+
+ _tag = 'uri'
+ _namespace = ATOM_NAMESPACE
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Uri
+
+ Args:
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ text: str The text data in the this element
+ """
+
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def UriFromString(xml_string):
+ return CreateClassFromXMLString(Uri, xml_string)
+
+
+class Person(AtomBase):
+ """A foundation class from which atom:author and atom:contributor extend.
+
+ A person contains information like name, email address, and web page URI for
+ an author or contributor to an Atom feed.
+ """
+
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+ _children['{%s}name' % (ATOM_NAMESPACE)] = ('name', Name)
+ _children['{%s}email' % (ATOM_NAMESPACE)] = ('email', Email)
+ _children['{%s}uri' % (ATOM_NAMESPACE)] = ('uri', Uri)
+
+ def __init__(self, name=None, email=None, uri=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ """Foundation from which author and contributor are derived.
+
+ The constructor is provided for illustrative purposes, you should not
+ need to instantiate a Person.
+
+ Args:
+ name: Name The person's name
+ email: Email The person's email address
+ uri: Uri The URI of the person's webpage
+ extension_elements: list A list of ExtensionElement instances which are
+ children of this element.
+ extension_attributes: dict A dictionary of strings which are the values
+ for additional XML attributes of this element.
+ text: String The text contents of the element. This is the contents
+ of the Entry's XML text node. (Example: This is the text)
+ """
+
+ self.name = name
+ self.email = email
+ self.uri = uri
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+ self.text = text
+
+
+class Author(Person):
+ """The atom:author element
+
+ An author is a required element in Feed.
+ """
+
+ _tag = 'author'
+ _namespace = ATOM_NAMESPACE
+ _children = Person._children.copy()
+ _attributes = Person._attributes.copy()
+ #_children = {}
+ #_attributes = {}
+
+ def __init__(self, name=None, email=None, uri=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ """Constructor for Author
+
+ Args:
+ name: Name
+ email: Email
+ uri: Uri
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ text: str The text data in the this element
+ """
+
+ self.name = name
+ self.email = email
+ self.uri = uri
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+ self.text = text
+
+
+def AuthorFromString(xml_string):
+ return CreateClassFromXMLString(Author, xml_string)
+
+
+class Contributor(Person):
+ """The atom:contributor element"""
+
+ _tag = 'contributor'
+ _namespace = ATOM_NAMESPACE
+ _children = Person._children.copy()
+ _attributes = Person._attributes.copy()
+
+ def __init__(self, name=None, email=None, uri=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ """Constructor for Contributor
+
+ Args:
+ name: Name
+ email: Email
+ uri: Uri
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ text: str The text data in the this element
+ """
+
+ self.name = name
+ self.email = email
+ self.uri = uri
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+ self.text = text
+
+
+def ContributorFromString(xml_string):
+ return CreateClassFromXMLString(Contributor, xml_string)
+
+
+class Link(AtomBase):
+ """The atom:link element"""
+
+ _tag = 'link'
+ _namespace = ATOM_NAMESPACE
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+ _attributes['rel'] = 'rel'
+ _attributes['href'] = 'href'
+ _attributes['type'] = 'type'
+ _attributes['title'] = 'title'
+ _attributes['length'] = 'length'
+ _attributes['hreflang'] = 'hreflang'
+
+ def __init__(self, href=None, rel=None, link_type=None, hreflang=None,
+ title=None, length=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Link
+
+ Args:
+ href: string The href attribute of the link
+ rel: string
+ type: string
+ hreflang: string The language for the href
+ title: string
+ length: string The length of the href's destination
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ text: str The text data in the this element
+ """
+
+ self.href = href
+ self.rel = rel
+ self.type = link_type
+ self.hreflang = hreflang
+ self.title = title
+ self.length = length
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def LinkFromString(xml_string):
+ return CreateClassFromXMLString(Link, xml_string)
+
+
+class Generator(AtomBase):
+ """The atom:generator element"""
+
+ _tag = 'generator'
+ _namespace = ATOM_NAMESPACE
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+ _attributes['uri'] = 'uri'
+ _attributes['version'] = 'version'
+
+ def __init__(self, uri=None, version=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ """Constructor for Generator
+
+ Args:
+ uri: string
+ version: string
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.uri = uri
+ self.version = version
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+def GeneratorFromString(xml_string):
+ return CreateClassFromXMLString(Generator, xml_string)
+
+
+class Text(AtomBase):
+ """A foundation class from which atom:title, summary, etc. extend.
+
+ This class should never be instantiated.
+ """
+
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+ _attributes['type'] = 'type'
+
+ def __init__(self, text_type=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Text
+
+ Args:
+ text_type: string
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.type = text_type
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class Title(Text):
+ """The atom:title element"""
+
+ _tag = 'title'
+ _namespace = ATOM_NAMESPACE
+ _children = Text._children.copy()
+ _attributes = Text._attributes.copy()
+
+ def __init__(self, title_type=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Title
+
+ Args:
+ title_type: string
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.type = title_type
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def TitleFromString(xml_string):
+ return CreateClassFromXMLString(Title, xml_string)
+
+
+class Subtitle(Text):
+ """The atom:subtitle element"""
+
+ _tag = 'subtitle'
+ _namespace = ATOM_NAMESPACE
+ _children = Text._children.copy()
+ _attributes = Text._attributes.copy()
+
+ def __init__(self, subtitle_type=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Subtitle
+
+ Args:
+ subtitle_type: string
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.type = subtitle_type
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def SubtitleFromString(xml_string):
+ return CreateClassFromXMLString(Subtitle, xml_string)
+
+
+class Rights(Text):
+ """The atom:rights element"""
+
+ _tag = 'rights'
+ _namespace = ATOM_NAMESPACE
+ _children = Text._children.copy()
+ _attributes = Text._attributes.copy()
+
+ def __init__(self, rights_type=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Rights
+
+ Args:
+ rights_type: string
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.type = rights_type
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def RightsFromString(xml_string):
+ return CreateClassFromXMLString(Rights, xml_string)
+
+
+class Summary(Text):
+ """The atom:summary element"""
+
+ _tag = 'summary'
+ _namespace = ATOM_NAMESPACE
+ _children = Text._children.copy()
+ _attributes = Text._attributes.copy()
+
+ def __init__(self, summary_type=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Summary
+
+ Args:
+ summary_type: string
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.type = summary_type
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def SummaryFromString(xml_string):
+ return CreateClassFromXMLString(Summary, xml_string)
+
+
+class Content(Text):
+ """The atom:content element"""
+
+ _tag = 'content'
+ _namespace = ATOM_NAMESPACE
+ _children = Text._children.copy()
+ _attributes = Text._attributes.copy()
+ _attributes['src'] = 'src'
+
+ def __init__(self, content_type=None, src=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ """Constructor for Content
+
+ Args:
+ content_type: string
+ src: string
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.type = content_type
+ self.src = src
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+def ContentFromString(xml_string):
+ return CreateClassFromXMLString(Content, xml_string)
+
+
+class Category(AtomBase):
+ """The atom:category element"""
+
+ _tag = 'category'
+ _namespace = ATOM_NAMESPACE
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+ _attributes['term'] = 'term'
+ _attributes['scheme'] = 'scheme'
+ _attributes['label'] = 'label'
+
+ def __init__(self, term=None, scheme=None, label=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ """Constructor for Category
+
+ Args:
+ term: str
+ scheme: str
+ label: str
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.term = term
+ self.scheme = scheme
+ self.label = label
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def CategoryFromString(xml_string):
+ return CreateClassFromXMLString(Category, xml_string)
+
+
+class Id(AtomBase):
+ """The atom:id element."""
+
+ _tag = 'id'
+ _namespace = ATOM_NAMESPACE
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Id
+
+ Args:
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def IdFromString(xml_string):
+ return CreateClassFromXMLString(Id, xml_string)
+
+
+class Icon(AtomBase):
+ """The atom:icon element."""
+
+ _tag = 'icon'
+ _namespace = ATOM_NAMESPACE
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Icon
+
+ Args:
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def IconFromString(xml_string):
+ return CreateClassFromXMLString(Icon, xml_string)
+
+
+class Logo(AtomBase):
+ """The atom:logo element."""
+
+ _tag = 'logo'
+ _namespace = ATOM_NAMESPACE
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Logo
+
+ Args:
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def LogoFromString(xml_string):
+ return CreateClassFromXMLString(Logo, xml_string)
+
+
+class Draft(AtomBase):
+ """The app:draft element which indicates if this entry should be public."""
+
+ _tag = 'draft'
+ _namespace = APP_NAMESPACE
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for app:draft
+
+ Args:
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def DraftFromString(xml_string):
+ return CreateClassFromXMLString(Draft, xml_string)
+
+
+class Control(AtomBase):
+ """The app:control element indicating restrictions on publication.
+
+ The APP control element may contain a draft element indicating whether or
+ not this entry should be publicly available.
+ """
+
+ _tag = 'control'
+ _namespace = APP_NAMESPACE
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+ _children['{%s}draft' % APP_NAMESPACE] = ('draft', Draft)
+
+ def __init__(self, draft=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for app:control"""
+
+ self.draft = draft
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def ControlFromString(xml_string):
+ return CreateClassFromXMLString(Control, xml_string)
+
+
+class Date(AtomBase):
+ """A parent class for atom:updated, published, etc."""
+
+ #TODO Add text to and from time conversion methods to allow users to set
+ # the contents of a Date to a python DateTime object.
+
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class Updated(Date):
+ """The atom:updated element."""
+
+ _tag = 'updated'
+ _namespace = ATOM_NAMESPACE
+ _children = Date._children.copy()
+ _attributes = Date._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Updated
+
+ Args:
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def UpdatedFromString(xml_string):
+ return CreateClassFromXMLString(Updated, xml_string)
+
+
+class Published(Date):
+ """The atom:published element."""
+
+ _tag = 'published'
+ _namespace = ATOM_NAMESPACE
+ _children = Date._children.copy()
+ _attributes = Date._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Published
+
+ Args:
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def PublishedFromString(xml_string):
+ return CreateClassFromXMLString(Published, xml_string)
+
+
+class LinkFinder(object):
+ """An "interface" providing methods to find link elements
+
+ Entry elements often contain multiple links which differ in the rel
+ attribute or content type. Often, developers are interested in a specific
+ type of link so this class provides methods to find specific classes of
+ links.
+
+ This class is used as a mixin in Atom entries and feeds.
+ """
+
+ def GetSelfLink(self):
+ """Find the first link with rel set to 'self'
+
+ Returns:
+ An atom.Link or none if none of the links had rel equal to 'self'
+ """
+
+ for a_link in self.link:
+ if a_link.rel == 'self':
+ return a_link
+ return None
+
+ def GetEditLink(self):
+ for a_link in self.link:
+ if a_link.rel == 'edit':
+ return a_link
+ return None
+
+ def GetEditMediaLink(self):
+ for a_link in self.link:
+ if a_link.rel == 'edit-media':
+ return a_link
+ return None
+
+ def GetNextLink(self):
+ for a_link in self.link:
+ if a_link.rel == 'next':
+ return a_link
+ return None
+
+ def GetLicenseLink(self):
+ for a_link in self.link:
+ if a_link.rel == 'license':
+ return a_link
+ return None
+
+ def GetAlternateLink(self):
+ for a_link in self.link:
+ if a_link.rel == 'alternate':
+ return a_link
+ return None
+
+
+class FeedEntryParent(AtomBase, LinkFinder):
+ """A super class for atom:feed and entry, contains shared attributes"""
+
+ _children = AtomBase._children.copy()
+ _attributes = AtomBase._attributes.copy()
+ _children['{%s}author' % ATOM_NAMESPACE] = ('author', [Author])
+ _children['{%s}category' % ATOM_NAMESPACE] = ('category', [Category])
+ _children['{%s}contributor' % ATOM_NAMESPACE] = ('contributor', [Contributor])
+ _children['{%s}id' % ATOM_NAMESPACE] = ('id', Id)
+ _children['{%s}link' % ATOM_NAMESPACE] = ('link', [Link])
+ _children['{%s}rights' % ATOM_NAMESPACE] = ('rights', Rights)
+ _children['{%s}title' % ATOM_NAMESPACE] = ('title', Title)
+ _children['{%s}updated' % ATOM_NAMESPACE] = ('updated', Updated)
+
+ def __init__(self, author=None, category=None, contributor=None,
+ atom_id=None, link=None, rights=None, title=None, updated=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ self.author = author or []
+ self.category = category or []
+ self.contributor = contributor or []
+ self.id = atom_id
+ self.link = link or []
+ self.rights = rights
+ self.title = title
+ self.updated = updated
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class Source(FeedEntryParent):
+ """The atom:source element"""
+
+ _tag = 'source'
+ _namespace = ATOM_NAMESPACE
+ _children = FeedEntryParent._children.copy()
+ _attributes = FeedEntryParent._attributes.copy()
+ _children['{%s}generator' % ATOM_NAMESPACE] = ('generator', Generator)
+ _children['{%s}icon' % ATOM_NAMESPACE] = ('icon', Icon)
+ _children['{%s}logo' % ATOM_NAMESPACE] = ('logo', Logo)
+ _children['{%s}subtitle' % ATOM_NAMESPACE] = ('subtitle', Subtitle)
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ """Constructor for Source
+
+ Args:
+ author: list (optional) A list of Author instances which belong to this
+ class.
+ category: list (optional) A list of Category instances
+ contributor: list (optional) A list on Contributor instances
+ generator: Generator (optional)
+ icon: Icon (optional)
+ id: Id (optional) The entry's Id element
+ link: list (optional) A list of Link instances
+ logo: Logo (optional)
+ rights: Rights (optional) The entry's Rights element
+ subtitle: Subtitle (optional) The entry's subtitle element
+ title: Title (optional) the entry's title element
+ updated: Updated (optional) the entry's updated element
+ text: String (optional) The text contents of the element. This is the
+ contents of the Entry's XML text node.
+ (Example: This is the text)
+ extension_elements: list (optional) A list of ExtensionElement instances
+ which are children of this element.
+ extension_attributes: dict (optional) A dictionary of strings which are
+ the values for additional XML attributes of this element.
+ """
+
+ self.author = author or []
+ self.category = category or []
+ self.contributor = contributor or []
+ self.generator = generator
+ self.icon = icon
+ self.id = atom_id
+ self.link = link or []
+ self.logo = logo
+ self.rights = rights
+ self.subtitle = subtitle
+ self.title = title
+ self.updated = updated
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def SourceFromString(xml_string):
+ return CreateClassFromXMLString(Source, xml_string)
+
+
+class Entry(FeedEntryParent):
+ """The atom:entry element"""
+
+ _tag = 'entry'
+ _namespace = ATOM_NAMESPACE
+ _children = FeedEntryParent._children.copy()
+ _attributes = FeedEntryParent._attributes.copy()
+ _children['{%s}content' % ATOM_NAMESPACE] = ('content', Content)
+ _children['{%s}published' % ATOM_NAMESPACE] = ('published', Published)
+ _children['{%s}source' % ATOM_NAMESPACE] = ('source', Source)
+ _children['{%s}summary' % ATOM_NAMESPACE] = ('summary', Summary)
+ _children['{%s}control' % APP_NAMESPACE] = ('control', Control)
+
+ def __init__(self, author=None, category=None, content=None,
+ contributor=None, atom_id=None, link=None, published=None, rights=None,
+ source=None, summary=None, control=None, title=None, updated=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ """Constructor for atom:entry
+
+ Args:
+ author: list A list of Author instances which belong to this class.
+ category: list A list of Category instances
+ content: Content The entry's Content
+ contributor: list A list on Contributor instances
+ id: Id The entry's Id element
+ link: list A list of Link instances
+ published: Published The entry's Published element
+ rights: Rights The entry's Rights element
+ source: Source the entry's source element
+ summary: Summary the entry's summary element
+ title: Title the entry's title element
+ updated: Updated the entry's updated element
+ control: The entry's app:control element which can be used to mark an
+ entry as a draft which should not be publicly viewable.
+ text: String The text contents of the element. This is the contents
+ of the Entry's XML text node. (Example: This is the text)
+ extension_elements: list A list of ExtensionElement instances which are
+ children of this element.
+ extension_attributes: dict A dictionary of strings which are the values
+ for additional XML attributes of this element.
+ """
+
+ self.author = author or []
+ self.category = category or []
+ self.content = content
+ self.contributor = contributor or []
+ self.id = atom_id
+ self.link = link or []
+ self.published = published
+ self.rights = rights
+ self.source = source
+ self.summary = summary
+ self.title = title
+ self.updated = updated
+ self.control = control
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+ __init__ = v1_deprecated('Please use atom.data.Entry instead.')(__init__)
+
+
+def EntryFromString(xml_string):
+ return CreateClassFromXMLString(Entry, xml_string)
+
+
+class Feed(Source):
+ """The atom:feed element"""
+
+ _tag = 'feed'
+ _namespace = ATOM_NAMESPACE
+ _children = Source._children.copy()
+ _attributes = Source._attributes.copy()
+ _children['{%s}entry' % ATOM_NAMESPACE] = ('entry', [Entry])
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None, entry=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ """Constructor for Source
+
+ Args:
+ author: list (optional) A list of Author instances which belong to this
+ class.
+ category: list (optional) A list of Category instances
+ contributor: list (optional) A list on Contributor instances
+ generator: Generator (optional)
+ icon: Icon (optional)
+ id: Id (optional) The entry's Id element
+ link: list (optional) A list of Link instances
+ logo: Logo (optional)
+ rights: Rights (optional) The entry's Rights element
+ subtitle: Subtitle (optional) The entry's subtitle element
+ title: Title (optional) the entry's title element
+ updated: Updated (optional) the entry's updated element
+ entry: list (optional) A list of the Entry instances contained in the
+ feed.
+ text: String (optional) The text contents of the element. This is the
+ contents of the Entry's XML text node.
+ (Example: This is the text)
+ extension_elements: list (optional) A list of ExtensionElement instances
+ which are children of this element.
+ extension_attributes: dict (optional) A dictionary of strings which are
+ the values for additional XML attributes of this element.
+ """
+
+ self.author = author or []
+ self.category = category or []
+ self.contributor = contributor or []
+ self.generator = generator
+ self.icon = icon
+ self.id = atom_id
+ self.link = link or []
+ self.logo = logo
+ self.rights = rights
+ self.subtitle = subtitle
+ self.title = title
+ self.updated = updated
+ self.entry = entry or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+ __init__ = v1_deprecated('Please use atom.data.Feed instead.')(__init__)
+
+
+def FeedFromString(xml_string):
+ return CreateClassFromXMLString(Feed, xml_string)
+
+
+class ExtensionElement(object):
+ """Represents extra XML elements contained in Atom classes."""
+
+ def __init__(self, tag, namespace=None, attributes=None,
+ children=None, text=None):
+ """Constructor for EtensionElement
+
+ Args:
+ namespace: string (optional) The XML namespace for this element.
+ tag: string (optional) The tag (without the namespace qualifier) for
+ this element. To reconstruct the full qualified name of the element,
+ combine this tag with the namespace.
+ attributes: dict (optinal) The attribute value string pairs for the XML
+ attributes of this element.
+ children: list (optional) A list of ExtensionElements which represent
+ the XML child nodes of this element.
+ """
+
+ self.namespace = namespace
+ self.tag = tag
+ self.attributes = attributes or {}
+ self.children = children or []
+ self.text = text
+
+ def ToString(self):
+ element_tree = self._TransferToElementTree(ElementTree.Element(''))
+ return ElementTree.tostring(element_tree, encoding="UTF-8")
+
+ def _TransferToElementTree(self, element_tree):
+ if self.tag is None:
+ return None
+
+ if self.namespace is not None:
+ element_tree.tag = '{%s}%s' % (self.namespace, self.tag)
+ else:
+ element_tree.tag = self.tag
+
+ for key, value in self.attributes.iteritems():
+ element_tree.attrib[key] = value
+
+ for child in self.children:
+ child._BecomeChildElement(element_tree)
+
+ element_tree.text = self.text
+
+ return element_tree
+
+ def _BecomeChildElement(self, element_tree):
+ """Converts this object into an etree element and adds it as a child node.
+
+ Adds self to the ElementTree. This method is required to avoid verbose XML
+ which constantly redefines the namespace.
+
+ Args:
+ element_tree: ElementTree._Element The element to which this object's XML
+ will be added.
+ """
+ new_element = ElementTree.Element('')
+ element_tree.append(new_element)
+ self._TransferToElementTree(new_element)
+
+ def FindChildren(self, tag=None, namespace=None):
+ """Searches child nodes for objects with the desired tag/namespace.
+
+ Returns a list of extension elements within this object whose tag
+ and/or namespace match those passed in. To find all children in
+ a particular namespace, specify the namespace but not the tag name.
+ If you specify only the tag, the result list may contain extension
+ elements in multiple namespaces.
+
+ Args:
+ tag: str (optional) The desired tag
+ namespace: str (optional) The desired namespace
+
+ Returns:
+ A list of elements whose tag and/or namespace match the parameters
+ values
+ """
+
+ results = []
+
+ if tag and namespace:
+ for element in self.children:
+ if element.tag == tag and element.namespace == namespace:
+ results.append(element)
+ elif tag and not namespace:
+ for element in self.children:
+ if element.tag == tag:
+ results.append(element)
+ elif namespace and not tag:
+ for element in self.children:
+ if element.namespace == namespace:
+ results.append(element)
+ else:
+ for element in self.children:
+ results.append(element)
+
+ return results
+
+
+def ExtensionElementFromString(xml_string):
+ element_tree = ElementTree.fromstring(xml_string)
+ return _ExtensionElementFromElementTree(element_tree)
+
+
+def _ExtensionElementFromElementTree(element_tree):
+ element_tag = element_tree.tag
+ if '}' in element_tag:
+ namespace = element_tag[1:element_tag.index('}')]
+ tag = element_tag[element_tag.index('}')+1:]
+ else:
+ namespace = None
+ tag = element_tag
+ extension = ExtensionElement(namespace=namespace, tag=tag)
+ for key, value in element_tree.attrib.iteritems():
+ extension.attributes[key] = value
+ for child in element_tree:
+ extension.children.append(_ExtensionElementFromElementTree(child))
+ extension.text = element_tree.text
+ return extension
+
+
+def deprecated(warning=None):
+ """Decorator to raise warning each time the function is called.
+
+ Args:
+ warning: The warning message to be displayed as a string (optinoal).
+ """
+ warning = warning or ''
+ # This closure is what is returned from the deprecated function.
+ def mark_deprecated(f):
+ # The deprecated_function wraps the actual call to f.
+ def deprecated_function(*args, **kwargs):
+ warnings.warn(warning, DeprecationWarning, stacklevel=2)
+ return f(*args, **kwargs)
+ # Preserve the original name to avoid masking all decorated functions as
+ # 'deprecated_function'
+ try:
+ deprecated_function.func_name = f.func_name
+ except TypeError:
+ # Setting the func_name is not allowed in Python2.3.
+ pass
+ return deprecated_function
+ return mark_deprecated
diff --git a/python/atom/auth.py b/python/atom/auth.py
new file mode 100644
index 0000000..1d84175
--- /dev/null
+++ b/python/atom/auth.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This module is used for version 2 of the Google Data APIs.
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import base64
+
+
+class BasicAuth(object):
+ """Sets the Authorization header as defined in RFC1945"""
+
+ def __init__(self, user_id, password):
+ self.basic_cookie = base64.encodestring(
+ '%s:%s' % (user_id, password)).strip()
+
+ def modify_request(self, http_request):
+ http_request.headers['Authorization'] = 'Basic %s' % self.basic_cookie
+
+ ModifyRequest = modify_request
+
+
+class NoAuth(object):
+
+ def modify_request(self, http_request):
+ pass
diff --git a/python/atom/client.py b/python/atom/client.py
new file mode 100644
index 0000000..ee0c18a
--- /dev/null
+++ b/python/atom/client.py
@@ -0,0 +1,182 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""AtomPubClient provides CRUD ops. in line with the Atom Publishing Protocol.
+
+"""
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.http_core
+
+
+class Error(Exception):
+ pass
+
+
+class MissingHost(Error):
+ pass
+
+
+class AtomPubClient(object):
+ host = None
+ auth_token = None
+ ssl = False # Whether to force all requests over https
+
+ def __init__(self, http_client=None, host=None,
+ auth_token=None, source=None, **kwargs):
+ """Creates a new AtomPubClient instance.
+
+ Args:
+ source: The name of your application.
+ http_client: An object capable of performing HTTP requests through a
+ request method. This object is used to perform the request
+ when the AtomPubClient's request method is called. Used to
+ allow HTTP requests to be directed to a mock server, or use
+ an alternate library instead of the default of httplib to
+ make HTTP requests.
+ host: str The default host name to use if a host is not specified in the
+ requested URI.
+ auth_token: An object which sets the HTTP Authorization header when its
+ modify_request method is called.
+ """
+ self.http_client = http_client or atom.http_core.ProxiedHttpClient()
+ if host is not None:
+ self.host = host
+ if auth_token is not None:
+ self.auth_token = auth_token
+ self.source = source
+
+ def request(self, method=None, uri=None, auth_token=None,
+ http_request=None, **kwargs):
+ """Performs an HTTP request to the server indicated.
+
+ Uses the http_client instance to make the request.
+
+ Args:
+ method: The HTTP method as a string, usually one of 'GET', 'POST',
+ 'PUT', or 'DELETE'
+ uri: The URI desired as a string or atom.http_core.Uri.
+ http_request:
+ auth_token: An authorization token object whose modify_request method
+ sets the HTTP Authorization header.
+
+ Returns:
+ The results of calling self.http_client.request. With the default
+ http_client, this is an HTTP response object.
+ """
+ # Modify the request based on the AtomPubClient settings and parameters
+ # passed in to the request.
+ http_request = self.modify_request(http_request)
+ if isinstance(uri, (str, unicode)):
+ uri = atom.http_core.Uri.parse_uri(uri)
+ if uri is not None:
+ uri.modify_request(http_request)
+ if isinstance(method, (str, unicode)):
+ http_request.method = method
+ # Any unrecognized arguments are assumed to be capable of modifying the
+ # HTTP request.
+ for name, value in kwargs.iteritems():
+ if value is not None:
+ value.modify_request(http_request)
+ # Default to an http request if the protocol scheme is not set.
+ if http_request.uri.scheme is None:
+ http_request.uri.scheme = 'http'
+ # Override scheme. Force requests over https.
+ if self.ssl:
+ http_request.uri.scheme = 'https'
+ if http_request.uri.path is None:
+ http_request.uri.path = '/'
+ # Add the Authorization header at the very end. The Authorization header
+ # value may need to be calculated using information in the request.
+ if auth_token:
+ auth_token.modify_request(http_request)
+ elif self.auth_token:
+ self.auth_token.modify_request(http_request)
+ # Check to make sure there is a host in the http_request.
+ if http_request.uri.host is None:
+ raise MissingHost('No host provided in request %s %s' % (
+ http_request.method, str(http_request.uri)))
+ # Perform the fully specified request using the http_client instance.
+ # Sends the request to the server and returns the server's response.
+ return self.http_client.request(http_request)
+
+ Request = request
+
+ def get(self, uri=None, auth_token=None, http_request=None, **kwargs):
+ """Performs a request using the GET method, returns an HTTP response."""
+ return self.request(method='GET', uri=uri, auth_token=auth_token,
+ http_request=http_request, **kwargs)
+
+ Get = get
+
+ def post(self, uri=None, data=None, auth_token=None, http_request=None,
+ **kwargs):
+ """Sends data using the POST method, returns an HTTP response."""
+ return self.request(method='POST', uri=uri, auth_token=auth_token,
+ http_request=http_request, data=data, **kwargs)
+
+ Post = post
+
+ def put(self, uri=None, data=None, auth_token=None, http_request=None,
+ **kwargs):
+ """Sends data using the PUT method, returns an HTTP response."""
+ return self.request(method='PUT', uri=uri, auth_token=auth_token,
+ http_request=http_request, data=data, **kwargs)
+
+ Put = put
+
+ def delete(self, uri=None, auth_token=None, http_request=None, **kwargs):
+ """Performs a request using the DELETE method, returns an HTTP response."""
+ return self.request(method='DELETE', uri=uri, auth_token=auth_token,
+ http_request=http_request, **kwargs)
+
+ Delete = delete
+
+ def modify_request(self, http_request):
+ """Changes the HTTP request before sending it to the server.
+
+ Sets the User-Agent HTTP header and fills in the HTTP host portion
+ of the URL if one was not included in the request (for this it uses
+ the self.host member if one is set). This method is called in
+ self.request.
+
+ Args:
+ http_request: An atom.http_core.HttpRequest() (optional) If one is
+ not provided, a new HttpRequest is instantiated.
+
+ Returns:
+ An atom.http_core.HttpRequest() with the User-Agent header set and
+ if this client has a value in its host member, the host in the request
+ URL is set.
+ """
+ if http_request is None:
+ http_request = atom.http_core.HttpRequest()
+
+ if self.host is not None and http_request.uri.host is None:
+ http_request.uri.host = self.host
+
+ # Set the user agent header for logging purposes.
+ if self.source:
+ http_request.headers['User-Agent'] = '%s gdata-py/2.0.12' % self.source
+ else:
+ http_request.headers['User-Agent'] = 'gdata-py/2.0.12'
+
+ return http_request
+
+ ModifyRequest = modify_request
diff --git a/python/atom/core.py b/python/atom/core.py
new file mode 100644
index 0000000..4fc93bb
--- /dev/null
+++ b/python/atom/core.py
@@ -0,0 +1,545 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This module is used for version 2 of the Google Data APIs.
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import inspect
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ try:
+ import cElementTree as ElementTree
+ except ImportError:
+ try:
+ from xml.etree import ElementTree
+ except ImportError:
+ from elementtree import ElementTree
+
+
+try:
+ from xml.dom.minidom import parseString as xmlString
+except ImportError:
+ xmlString = None
+
+STRING_ENCODING = 'utf-8'
+
+
+class XmlElement(object):
+ """Represents an element node in an XML document.
+
+ The text member is a UTF-8 encoded str or unicode.
+ """
+ _qname = None
+ _other_elements = None
+ _other_attributes = None
+ # The rule set contains mappings for XML qnames to child members and the
+ # appropriate member classes.
+ _rule_set = None
+ _members = None
+ text = None
+
+ def __init__(self, text=None, *args, **kwargs):
+ if ('_members' not in self.__class__.__dict__
+ or self.__class__._members is None):
+ self.__class__._members = tuple(self.__class__._list_xml_members())
+ for member_name, member_type in self.__class__._members:
+ if member_name in kwargs:
+ setattr(self, member_name, kwargs[member_name])
+ else:
+ if isinstance(member_type, list):
+ setattr(self, member_name, [])
+ else:
+ setattr(self, member_name, None)
+ self._other_elements = []
+ self._other_attributes = {}
+ if text is not None:
+ self.text = text
+
+ def _list_xml_members(cls):
+ """Generator listing all members which are XML elements or attributes.
+
+ The following members would be considered XML members:
+ foo = 'abc' - indicates an XML attribute with the qname abc
+ foo = SomeElement - indicates an XML child element
+ foo = [AnElement] - indicates a repeating XML child element, each instance
+ will be stored in a list in this member
+ foo = ('att1', '{http://example.com/namespace}att2') - indicates an XML
+ attribute which has different parsing rules in different versions of
+ the protocol. Version 1 of the XML parsing rules will look for an
+ attribute with the qname 'att1' but verion 2 of the parsing rules will
+ look for a namespaced attribute with the local name of 'att2' and an
+ XML namespace of 'http://example.com/namespace'.
+ """
+ members = []
+ for pair in inspect.getmembers(cls):
+ if not pair[0].startswith('_') and pair[0] != 'text':
+ member_type = pair[1]
+ if (isinstance(member_type, tuple) or isinstance(member_type, list)
+ or isinstance(member_type, (str, unicode))
+ or (inspect.isclass(member_type)
+ and issubclass(member_type, XmlElement))):
+ members.append(pair)
+ return members
+
+ _list_xml_members = classmethod(_list_xml_members)
+
+ def _get_rules(cls, version):
+ """Initializes the _rule_set for the class which is used when parsing XML.
+
+ This method is used internally for parsing and generating XML for an
+ XmlElement. It is not recommended that you call this method directly.
+
+ Returns:
+ A tuple containing the XML parsing rules for the appropriate version.
+
+ The tuple looks like:
+ (qname, {sub_element_qname: (member_name, member_class, repeating), ..},
+ {attribute_qname: member_name})
+
+ To give a couple of concrete example, the atom.data.Control _get_rules
+ with version of 2 will return:
+ ('{http://www.w3.org/2007/app}control',
+ {'{http://www.w3.org/2007/app}draft': ('draft',
+ ,
+ False)},
+ {})
+ Calling _get_rules with version 1 on gdata.data.FeedLink will produce:
+ ('{http://schemas.google.com/g/2005}feedLink',
+ {'{http://www.w3.org/2005/Atom}feed': ('feed',
+ ,
+ False)},
+ {'href': 'href', 'readOnly': 'read_only', 'countHint': 'count_hint',
+ 'rel': 'rel'})
+ """
+ # Initialize the _rule_set to make sure there is a slot available to store
+ # the parsing rules for this version of the XML schema.
+ # Look for rule set in the class __dict__ proxy so that only the
+ # _rule_set for this class will be found. By using the dict proxy
+ # we avoid finding rule_sets defined in superclasses.
+ # The four lines below provide support for any number of versions, but it
+ # runs a bit slower then hard coding slots for two versions, so I'm using
+ # the below two lines.
+ #if '_rule_set' not in cls.__dict__ or cls._rule_set is None:
+ # cls._rule_set = []
+ #while len(cls.__dict__['_rule_set']) < version:
+ # cls._rule_set.append(None)
+ # If there is no rule set cache in the class, provide slots for two XML
+ # versions. If and when there is a version 3, this list will need to be
+ # expanded.
+ if '_rule_set' not in cls.__dict__ or cls._rule_set is None:
+ cls._rule_set = [None, None]
+ # If a version higher than 2 is requested, fall back to version 2 because
+ # 2 is currently the highest supported version.
+ if version > 2:
+ return cls._get_rules(2)
+ # Check the dict proxy for the rule set to avoid finding any rule sets
+ # which belong to the superclass. We only want rule sets for this class.
+ if cls._rule_set[version-1] is None:
+ # The rule set for each version consists of the qname for this element
+ # ('{namespace}tag'), a dictionary (elements) for looking up the
+ # corresponding class member when given a child element's qname, and a
+ # dictionary (attributes) for looking up the corresponding class member
+ # when given an XML attribute's qname.
+ elements = {}
+ attributes = {}
+ if ('_members' not in cls.__dict__ or cls._members is None):
+ cls._members = tuple(cls._list_xml_members())
+ for member_name, target in cls._members:
+ if isinstance(target, list):
+ # This member points to a repeating element.
+ elements[_get_qname(target[0], version)] = (member_name, target[0],
+ True)
+ elif isinstance(target, tuple):
+ # This member points to a versioned XML attribute.
+ if version <= len(target):
+ attributes[target[version-1]] = member_name
+ else:
+ attributes[target[-1]] = member_name
+ elif isinstance(target, (str, unicode)):
+ # This member points to an XML attribute.
+ attributes[target] = member_name
+ elif issubclass(target, XmlElement):
+ # This member points to a single occurance element.
+ elements[_get_qname(target, version)] = (member_name, target, False)
+ version_rules = (_get_qname(cls, version), elements, attributes)
+ cls._rule_set[version-1] = version_rules
+ return version_rules
+ else:
+ return cls._rule_set[version-1]
+
+ _get_rules = classmethod(_get_rules)
+
+ def get_elements(self, tag=None, namespace=None, version=1):
+ """Find all sub elements which match the tag and namespace.
+
+ To find all elements in this object, call get_elements with the tag and
+ namespace both set to None (the default). This method searches through
+ the object's members and the elements stored in _other_elements which
+ did not match any of the XML parsing rules for this class.
+
+ Args:
+ tag: str
+ namespace: str
+ version: int Specifies the version of the XML rules to be used when
+ searching for matching elements.
+
+ Returns:
+ A list of the matching XmlElements.
+ """
+ matches = []
+ ignored1, elements, ignored2 = self.__class__._get_rules(version)
+ if elements:
+ for qname, element_def in elements.iteritems():
+ member = getattr(self, element_def[0])
+ if member:
+ if _qname_matches(tag, namespace, qname):
+ if element_def[2]:
+ # If this is a repeating element, copy all instances into the
+ # result list.
+ matches.extend(member)
+ else:
+ matches.append(member)
+ for element in self._other_elements:
+ if _qname_matches(tag, namespace, element._qname):
+ matches.append(element)
+ return matches
+
+ GetElements = get_elements
+ # FindExtensions and FindChildren are provided for backwards compatibility
+ # to the atom.AtomBase class.
+ # However, FindExtensions may return more results than the v1 atom.AtomBase
+ # method does, because get_elements searches both the expected children
+ # and the unexpected "other elements". The old AtomBase.FindExtensions
+ # method searched only "other elements" AKA extension_elements.
+ FindExtensions = get_elements
+ FindChildren = get_elements
+
+ def get_attributes(self, tag=None, namespace=None, version=1):
+ """Find all attributes which match the tag and namespace.
+
+ To find all attributes in this object, call get_attributes with the tag
+ and namespace both set to None (the default). This method searches
+ through the object's members and the attributes stored in
+ _other_attributes which did not fit any of the XML parsing rules for this
+ class.
+
+ Args:
+ tag: str
+ namespace: str
+ version: int Specifies the version of the XML rules to be used when
+ searching for matching attributes.
+
+ Returns:
+ A list of XmlAttribute objects for the matching attributes.
+ """
+ matches = []
+ ignored1, ignored2, attributes = self.__class__._get_rules(version)
+ if attributes:
+ for qname, attribute_def in attributes.iteritems():
+ if isinstance(attribute_def, (list, tuple)):
+ attribute_def = attribute_def[0]
+ member = getattr(self, attribute_def)
+ # TODO: ensure this hasn't broken existing behavior.
+ #member = getattr(self, attribute_def[0])
+ if member:
+ if _qname_matches(tag, namespace, qname):
+ matches.append(XmlAttribute(qname, member))
+ for qname, value in self._other_attributes.iteritems():
+ if _qname_matches(tag, namespace, qname):
+ matches.append(XmlAttribute(qname, value))
+ return matches
+
+ GetAttributes = get_attributes
+
+ def _harvest_tree(self, tree, version=1):
+ """Populates object members from the data in the tree Element."""
+ qname, elements, attributes = self.__class__._get_rules(version)
+ for element in tree:
+ if elements and element.tag in elements:
+ definition = elements[element.tag]
+ # If this is a repeating element, make sure the member is set to a
+ # list.
+ if definition[2]:
+ if getattr(self, definition[0]) is None:
+ setattr(self, definition[0], [])
+ getattr(self, definition[0]).append(_xml_element_from_tree(element,
+ definition[1], version))
+ else:
+ setattr(self, definition[0], _xml_element_from_tree(element,
+ definition[1], version))
+ else:
+ self._other_elements.append(_xml_element_from_tree(element, XmlElement,
+ version))
+ for attrib, value in tree.attrib.iteritems():
+ if attributes and attrib in attributes:
+ setattr(self, attributes[attrib], value)
+ else:
+ self._other_attributes[attrib] = value
+ if tree.text:
+ self.text = tree.text
+
+ def _to_tree(self, version=1, encoding=None):
+ new_tree = ElementTree.Element(_get_qname(self, version))
+ self._attach_members(new_tree, version, encoding)
+ return new_tree
+
+ def _attach_members(self, tree, version=1, encoding=None):
+ """Convert members to XML elements/attributes and add them to the tree.
+
+ Args:
+ tree: An ElementTree.Element which will be modified. The members of
+ this object will be added as child elements or attributes
+ according to the rules described in _expected_elements and
+ _expected_attributes. The elements and attributes stored in
+ other_attributes and other_elements are also added a children
+ of this tree.
+ version: int Ingnored in this method but used by VersionedElement.
+ encoding: str (optional)
+ """
+ qname, elements, attributes = self.__class__._get_rules(version)
+ encoding = encoding or STRING_ENCODING
+ # Add the expected elements and attributes to the tree.
+ if elements:
+ for tag, element_def in elements.iteritems():
+ member = getattr(self, element_def[0])
+ # If this is a repeating element and there are members in the list.
+ if member and element_def[2]:
+ for instance in member:
+ instance._become_child(tree, version)
+ elif member:
+ member._become_child(tree, version)
+ if attributes:
+ for attribute_tag, member_name in attributes.iteritems():
+ value = getattr(self, member_name)
+ if value:
+ tree.attrib[attribute_tag] = value
+ # Add the unexpected (other) elements and attributes to the tree.
+ for element in self._other_elements:
+ element._become_child(tree, version)
+ for key, value in self._other_attributes.iteritems():
+ # I'm not sure if unicode can be used in the attribute name, so for now
+ # we assume the encoding is correct for the attribute name.
+ if not isinstance(value, unicode):
+ value = value.decode(encoding)
+ tree.attrib[key] = value
+ if self.text:
+ if isinstance(self.text, unicode):
+ tree.text = self.text
+ else:
+ tree.text = self.text.decode(encoding)
+
+ def to_string(self, version=1, encoding=None, pretty_print=None):
+ """Converts this object to XML."""
+
+ tree_string = ElementTree.tostring(self._to_tree(version, encoding))
+
+ if pretty_print and xmlString is not None:
+ return xmlString(tree_string).toprettyxml()
+
+ return tree_string
+
+ ToString = to_string
+
+ def __str__(self):
+ return self.to_string()
+
+ def _become_child(self, tree, version=1):
+ """Adds a child element to tree with the XML data in self."""
+ new_child = ElementTree.Element('')
+ tree.append(new_child)
+ new_child.tag = _get_qname(self, version)
+ self._attach_members(new_child, version)
+
+ def __get_extension_elements(self):
+ return self._other_elements
+
+ def __set_extension_elements(self, elements):
+ self._other_elements = elements
+
+ extension_elements = property(__get_extension_elements,
+ __set_extension_elements,
+ """Provides backwards compatibility for v1 atom.AtomBase classes.""")
+
+ def __get_extension_attributes(self):
+ return self._other_attributes
+
+ def __set_extension_attributes(self, attributes):
+ self._other_attributes = attributes
+
+ extension_attributes = property(__get_extension_attributes,
+ __set_extension_attributes,
+ """Provides backwards compatibility for v1 atom.AtomBase classes.""")
+
+ def _get_tag(self, version=1):
+ qname = _get_qname(self, version)
+ return qname[qname.find('}')+1:]
+
+ def _get_namespace(self, version=1):
+ qname = _get_qname(self, version)
+ if qname.startswith('{'):
+ return qname[1:qname.find('}')]
+ else:
+ return None
+
+ def _set_tag(self, tag):
+ if isinstance(self._qname, tuple):
+ self._qname = self._qname.copy()
+ if self._qname[0].startswith('{'):
+ self._qname[0] = '{%s}%s' % (self._get_namespace(1), tag)
+ else:
+ self._qname[0] = tag
+ else:
+ if self._qname.startswith('{'):
+ self._qname = '{%s}%s' % (self._get_namespace(), tag)
+ else:
+ self._qname = tag
+
+ def _set_namespace(self, namespace):
+ if isinstance(self._qname, tuple):
+ self._qname = self._qname.copy()
+ if namespace:
+ self._qname[0] = '{%s}%s' % (namespace, self._get_tag(1))
+ else:
+ self._qname[0] = self._get_tag(1)
+ else:
+ if namespace:
+ self._qname = '{%s}%s' % (namespace, self._get_tag(1))
+ else:
+ self._qname = self._get_tag(1)
+
+ tag = property(_get_tag, _set_tag,
+ """Provides backwards compatibility for v1 atom.AtomBase classes.""")
+
+ namespace = property(_get_namespace, _set_namespace,
+ """Provides backwards compatibility for v1 atom.AtomBase classes.""")
+
+ # Provided for backwards compatibility to atom.ExtensionElement
+ children = extension_elements
+ attributes = extension_attributes
+
+
+def _get_qname(element, version):
+ if isinstance(element._qname, tuple):
+ if version <= len(element._qname):
+ return element._qname[version-1]
+ else:
+ return element._qname[-1]
+ else:
+ return element._qname
+
+
+def _qname_matches(tag, namespace, qname):
+ """Logic determines if a QName matches the desired local tag and namespace.
+
+ This is used in XmlElement.get_elements and XmlElement.get_attributes to
+ find matches in the element's members (among all expected-and-unexpected
+ elements-and-attributes).
+
+ Args:
+ expected_tag: string
+ expected_namespace: string
+ qname: string in the form '{xml_namespace}localtag' or 'tag' if there is
+ no namespace.
+
+ Returns:
+ boolean True if the member's tag and namespace fit the expected tag and
+ namespace.
+ """
+ # If there is no expected namespace or tag, then everything will match.
+ if qname is None:
+ member_tag = None
+ member_namespace = None
+ else:
+ if qname.startswith('{'):
+ member_namespace = qname[1:qname.index('}')]
+ member_tag = qname[qname.index('}') + 1:]
+ else:
+ member_namespace = None
+ member_tag = qname
+ return ((tag is None and namespace is None)
+ # If there is a tag, but no namespace, see if the local tag matches.
+ or (namespace is None and member_tag == tag)
+ # There was no tag, but there was a namespace so see if the namespaces
+ # match.
+ or (tag is None and member_namespace == namespace)
+ # There was no tag, and the desired elements have no namespace, so check
+ # to see that the member's namespace is None.
+ or (tag is None and namespace == ''
+ and member_namespace is None)
+ # The tag and the namespace both match.
+ or (tag == member_tag
+ and namespace == member_namespace)
+ # The tag matches, and the expected namespace is the empty namespace,
+ # check to make sure the member's namespace is None.
+ or (tag == member_tag and namespace == ''
+ and member_namespace is None))
+
+
+def parse(xml_string, target_class=None, version=1, encoding=None):
+ """Parses the XML string according to the rules for the target_class.
+
+ Args:
+ xml_string: str or unicode
+ target_class: XmlElement or a subclass. If None is specified, the
+ XmlElement class is used.
+ version: int (optional) The version of the schema which should be used when
+ converting the XML into an object. The default is 1.
+ encoding: str (optional) The character encoding of the bytes in the
+ xml_string. Default is 'UTF-8'.
+ """
+ if target_class is None:
+ target_class = XmlElement
+ if isinstance(xml_string, unicode):
+ if encoding is None:
+ xml_string = xml_string.encode(STRING_ENCODING)
+ else:
+ xml_string = xml_string.encode(encoding)
+ tree = ElementTree.fromstring(xml_string)
+ return _xml_element_from_tree(tree, target_class, version)
+
+
+Parse = parse
+xml_element_from_string = parse
+XmlElementFromString = xml_element_from_string
+
+
+def _xml_element_from_tree(tree, target_class, version=1):
+ if target_class._qname is None:
+ instance = target_class()
+ instance._qname = tree.tag
+ instance._harvest_tree(tree, version)
+ return instance
+ # TODO handle the namespace-only case
+ # Namespace only will be used with Google Spreadsheets rows and
+ # Google Base item attributes.
+ elif tree.tag == _get_qname(target_class, version):
+ instance = target_class()
+ instance._harvest_tree(tree, version)
+ return instance
+ return None
+
+
+class XmlAttribute(object):
+
+ def __init__(self, qname, value):
+ self._qname = qname
+ self.value = value
+
diff --git a/python/atom/data.py b/python/atom/data.py
new file mode 100644
index 0000000..5a3d257
--- /dev/null
+++ b/python/atom/data.py
@@ -0,0 +1,340 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This module is used for version 2 of the Google Data APIs.
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+
+
+XML_TEMPLATE = '{http://www.w3.org/XML/1998/namespace}%s'
+ATOM_TEMPLATE = '{http://www.w3.org/2005/Atom}%s'
+APP_TEMPLATE_V1 = '{http://purl.org/atom/app#}%s'
+APP_TEMPLATE_V2 = '{http://www.w3.org/2007/app}%s'
+
+
+class Name(atom.core.XmlElement):
+ """The atom:name element."""
+ _qname = ATOM_TEMPLATE % 'name'
+
+
+class Email(atom.core.XmlElement):
+ """The atom:email element."""
+ _qname = ATOM_TEMPLATE % 'email'
+
+
+class Uri(atom.core.XmlElement):
+ """The atom:uri element."""
+ _qname = ATOM_TEMPLATE % 'uri'
+
+
+class Person(atom.core.XmlElement):
+ """A foundation class which atom:author and atom:contributor extend.
+
+ A person contains information like name, email address, and web page URI for
+ an author or contributor to an Atom feed.
+ """
+ name = Name
+ email = Email
+ uri = Uri
+
+
+class Author(Person):
+ """The atom:author element.
+
+ An author is a required element in Feed unless each Entry contains an Author.
+ """
+ _qname = ATOM_TEMPLATE % 'author'
+
+
+class Contributor(Person):
+ """The atom:contributor element."""
+ _qname = ATOM_TEMPLATE % 'contributor'
+
+
+class Link(atom.core.XmlElement):
+ """The atom:link element."""
+ _qname = ATOM_TEMPLATE % 'link'
+ href = 'href'
+ rel = 'rel'
+ type = 'type'
+ hreflang = 'hreflang'
+ title = 'title'
+ length = 'length'
+
+
+class Generator(atom.core.XmlElement):
+ """The atom:generator element."""
+ _qname = ATOM_TEMPLATE % 'generator'
+ uri = 'uri'
+ version = 'version'
+
+
+class Text(atom.core.XmlElement):
+ """A foundation class from which atom:title, summary, etc. extend.
+
+ This class should never be instantiated.
+ """
+ type = 'type'
+
+
+class Title(Text):
+ """The atom:title element."""
+ _qname = ATOM_TEMPLATE % 'title'
+
+
+class Subtitle(Text):
+ """The atom:subtitle element."""
+ _qname = ATOM_TEMPLATE % 'subtitle'
+
+
+class Rights(Text):
+ """The atom:rights element."""
+ _qname = ATOM_TEMPLATE % 'rights'
+
+
+class Summary(Text):
+ """The atom:summary element."""
+ _qname = ATOM_TEMPLATE % 'summary'
+
+
+class Content(Text):
+ """The atom:content element."""
+ _qname = ATOM_TEMPLATE % 'content'
+ src = 'src'
+
+
+class Category(atom.core.XmlElement):
+ """The atom:category element."""
+ _qname = ATOM_TEMPLATE % 'category'
+ term = 'term'
+ scheme = 'scheme'
+ label = 'label'
+
+
+class Id(atom.core.XmlElement):
+ """The atom:id element."""
+ _qname = ATOM_TEMPLATE % 'id'
+
+
+class Icon(atom.core.XmlElement):
+ """The atom:icon element."""
+ _qname = ATOM_TEMPLATE % 'icon'
+
+
+class Logo(atom.core.XmlElement):
+ """The atom:logo element."""
+ _qname = ATOM_TEMPLATE % 'logo'
+
+
+class Draft(atom.core.XmlElement):
+ """The app:draft element which indicates if this entry should be public."""
+ _qname = (APP_TEMPLATE_V1 % 'draft', APP_TEMPLATE_V2 % 'draft')
+
+
+class Control(atom.core.XmlElement):
+ """The app:control element indicating restrictions on publication.
+
+ The APP control element may contain a draft element indicating whether or
+ not this entry should be publicly available.
+ """
+ _qname = (APP_TEMPLATE_V1 % 'control', APP_TEMPLATE_V2 % 'control')
+ draft = Draft
+
+
+class Date(atom.core.XmlElement):
+ """A parent class for atom:updated, published, etc."""
+
+
+class Updated(Date):
+ """The atom:updated element."""
+ _qname = ATOM_TEMPLATE % 'updated'
+
+
+class Published(Date):
+ """The atom:published element."""
+ _qname = ATOM_TEMPLATE % 'published'
+
+
+class LinkFinder(object):
+ """An "interface" providing methods to find link elements
+
+ Entry elements often contain multiple links which differ in the rel
+ attribute or content type. Often, developers are interested in a specific
+ type of link so this class provides methods to find specific classes of
+ links.
+
+ This class is used as a mixin in Atom entries and feeds.
+ """
+
+ def find_url(self, rel):
+ """Returns the URL in a link with the desired rel value."""
+ for link in self.link:
+ if link.rel == rel and link.href:
+ return link.href
+ return None
+
+ FindUrl = find_url
+
+ def get_link(self, rel):
+ """Returns a link object which has the desired rel value.
+
+ If you are interested in the URL instead of the link object,
+ consider using find_url instead.
+ """
+ for link in self.link:
+ if link.rel == rel and link.href:
+ return link
+ return None
+
+ GetLink = get_link
+
+ def find_self_link(self):
+ """Find the first link with rel set to 'self'
+
+ Returns:
+ A str containing the link's href or None if none of the links had rel
+ equal to 'self'
+ """
+ return self.find_url('self')
+
+ FindSelfLink = find_self_link
+
+ def get_self_link(self):
+ return self.get_link('self')
+
+ GetSelfLink = get_self_link
+
+ def find_edit_link(self):
+ return self.find_url('edit')
+
+ FindEditLink = find_edit_link
+
+ def get_edit_link(self):
+ return self.get_link('edit')
+
+ GetEditLink = get_edit_link
+
+ def find_edit_media_link(self):
+ link = self.find_url('edit-media')
+ # Search for media-edit as well since Picasa API used media-edit instead.
+ if link is None:
+ return self.find_url('media-edit')
+ return link
+
+ FindEditMediaLink = find_edit_media_link
+
+ def get_edit_media_link(self):
+ link = self.get_link('edit-media')
+ if link is None:
+ return self.get_link('media-edit')
+ return link
+
+ GetEditMediaLink = get_edit_media_link
+
+ def find_next_link(self):
+ return self.find_url('next')
+
+ FindNextLink = find_next_link
+
+ def get_next_link(self):
+ return self.get_link('next')
+
+ GetNextLink = get_next_link
+
+ def find_license_link(self):
+ return self.find_url('license')
+
+ FindLicenseLink = find_license_link
+
+ def get_license_link(self):
+ return self.get_link('license')
+
+ GetLicenseLink = get_license_link
+
+ def find_alternate_link(self):
+ return self.find_url('alternate')
+
+ FindAlternateLink = find_alternate_link
+
+ def get_alternate_link(self):
+ return self.get_link('alternate')
+
+ GetAlternateLink = get_alternate_link
+
+
+class FeedEntryParent(atom.core.XmlElement, LinkFinder):
+ """A super class for atom:feed and entry, contains shared attributes"""
+ author = [Author]
+ category = [Category]
+ contributor = [Contributor]
+ id = Id
+ link = [Link]
+ rights = Rights
+ title = Title
+ updated = Updated
+
+ def __init__(self, atom_id=None, text=None, *args, **kwargs):
+ if atom_id is not None:
+ self.id = atom_id
+ atom.core.XmlElement.__init__(self, text=text, *args, **kwargs)
+
+
+class Source(FeedEntryParent):
+ """The atom:source element."""
+ _qname = ATOM_TEMPLATE % 'source'
+ generator = Generator
+ icon = Icon
+ logo = Logo
+ subtitle = Subtitle
+
+
+class Entry(FeedEntryParent):
+ """The atom:entry element."""
+ _qname = ATOM_TEMPLATE % 'entry'
+ content = Content
+ published = Published
+ source = Source
+ summary = Summary
+ control = Control
+
+
+class Feed(Source):
+ """The atom:feed element which contains entries."""
+ _qname = ATOM_TEMPLATE % 'feed'
+ entry = [Entry]
+
+
+class ExtensionElement(atom.core.XmlElement):
+ """Provided for backwards compatibility to the v1 atom.ExtensionElement."""
+
+ def __init__(self, tag=None, namespace=None, attributes=None,
+ children=None, text=None, *args, **kwargs):
+ if namespace:
+ self._qname = '{%s}%s' % (namespace, tag)
+ else:
+ self._qname = tag
+ self.children = children or []
+ self.attributes = attributes or {}
+ self.text = text
+
+ _BecomeChildElement = atom.core.XmlElement._become_child
+
+
diff --git a/python/atom/http.py b/python/atom/http.py
new file mode 100644
index 0000000..df36693
--- /dev/null
+++ b/python/atom/http.py
@@ -0,0 +1,318 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""HttpClients in this module use httplib to make HTTP requests.
+
+This module make HTTP requests based on httplib, but there are environments
+in which an httplib based approach will not work (if running in Google App
+Engine for example). In those cases, higher level classes (like AtomService
+and GDataService) can swap out the HttpClient to transparently use a
+different mechanism for making HTTP requests.
+
+ HttpClient: Contains a request method which performs an HTTP call to the
+ server.
+
+ ProxiedHttpClient: Contains a request method which connects to a proxy using
+ settings stored in operating system environment variables then
+ performs an HTTP call to the endpoint server.
+"""
+
+
+__author__ = 'api.jscudder (Jeff Scudder)'
+
+
+import types
+import os
+import httplib
+import atom.url
+import atom.http_interface
+import socket
+import base64
+import atom.http_core
+ssl_imported = False
+ssl = None
+try:
+ import ssl
+ ssl_imported = True
+except ImportError:
+ pass
+
+
+
+class ProxyError(atom.http_interface.Error):
+ pass
+
+
+class TestConfigurationError(Exception):
+ pass
+
+
+DEFAULT_CONTENT_TYPE = 'application/atom+xml'
+
+
+class HttpClient(atom.http_interface.GenericHttpClient):
+ # Added to allow old v1 HttpClient objects to use the new
+ # http_code.HttpClient. Used in unit tests to inject a mock client.
+ v2_http_client = None
+
+ def __init__(self, headers=None):
+ self.debug = False
+ self.headers = headers or {}
+
+ def request(self, operation, url, data=None, headers=None):
+ """Performs an HTTP call to the server, supports GET, POST, PUT, and
+ DELETE.
+
+ Usage example, perform and HTTP GET on http://www.google.com/:
+ import atom.http
+ client = atom.http.HttpClient()
+ http_response = client.request('GET', 'http://www.google.com/')
+
+ Args:
+ operation: str The HTTP operation to be performed. This is usually one
+ of 'GET', 'POST', 'PUT', or 'DELETE'
+ data: filestream, list of parts, or other object which can be converted
+ to a string. Should be set to None when performing a GET or DELETE.
+ If data is a file-like object which can be read, this method will
+ read a chunk of 100K bytes at a time and send them.
+ If the data is a list of parts to be sent, each part will be
+ evaluated and sent.
+ url: The full URL to which the request should be sent. Can be a string
+ or atom.url.Url.
+ headers: dict of strings. HTTP headers which should be sent
+ in the request.
+ """
+ all_headers = self.headers.copy()
+ if headers:
+ all_headers.update(headers)
+
+ # If the list of headers does not include a Content-Length, attempt to
+ # calculate it based on the data object.
+ if data and 'Content-Length' not in all_headers:
+ if isinstance(data, types.StringTypes):
+ all_headers['Content-Length'] = str(len(data))
+ else:
+ raise atom.http_interface.ContentLengthRequired('Unable to calculate '
+ 'the length of the data parameter. Specify a value for '
+ 'Content-Length')
+
+ # Set the content type to the default value if none was set.
+ if 'Content-Type' not in all_headers:
+ all_headers['Content-Type'] = DEFAULT_CONTENT_TYPE
+
+ if self.v2_http_client is not None:
+ http_request = atom.http_core.HttpRequest(method=operation)
+ atom.http_core.Uri.parse_uri(str(url)).modify_request(http_request)
+ http_request.headers = all_headers
+ if data:
+ http_request._body_parts.append(data)
+ return self.v2_http_client.request(http_request=http_request)
+
+ if not isinstance(url, atom.url.Url):
+ if isinstance(url, types.StringTypes):
+ url = atom.url.parse_url(url)
+ else:
+ raise atom.http_interface.UnparsableUrlObject('Unable to parse url '
+ 'parameter because it was not a string or atom.url.Url')
+
+ connection = self._prepare_connection(url, all_headers)
+
+ if self.debug:
+ connection.debuglevel = 1
+
+ connection.putrequest(operation, self._get_access_url(url),
+ skip_host=True)
+ if url.port is not None:
+ connection.putheader('Host', '%s:%s' % (url.host, url.port))
+ else:
+ connection.putheader('Host', url.host)
+
+ # Overcome a bug in Python 2.4 and 2.5
+ # httplib.HTTPConnection.putrequest adding
+ # HTTP request header 'Host: www.google.com:443' instead of
+ # 'Host: www.google.com', and thus resulting the error message
+ # 'Token invalid - AuthSub token has wrong scope' in the HTTP response.
+ if (url.protocol == 'https' and int(url.port or 443) == 443 and
+ hasattr(connection, '_buffer') and
+ isinstance(connection._buffer, list)):
+ header_line = 'Host: %s:443' % url.host
+ replacement_header_line = 'Host: %s' % url.host
+ try:
+ connection._buffer[connection._buffer.index(header_line)] = (
+ replacement_header_line)
+ except ValueError: # header_line missing from connection._buffer
+ pass
+
+ # Send the HTTP headers.
+ for header_name in all_headers:
+ connection.putheader(header_name, all_headers[header_name])
+ connection.endheaders()
+
+ # If there is data, send it in the request.
+ if data:
+ if isinstance(data, list):
+ for data_part in data:
+ _send_data_part(data_part, connection)
+ else:
+ _send_data_part(data, connection)
+
+ # Return the HTTP Response from the server.
+ return connection.getresponse()
+
+ def _prepare_connection(self, url, headers):
+ if not isinstance(url, atom.url.Url):
+ if isinstance(url, types.StringTypes):
+ url = atom.url.parse_url(url)
+ else:
+ raise atom.http_interface.UnparsableUrlObject('Unable to parse url '
+ 'parameter because it was not a string or atom.url.Url')
+ if url.protocol == 'https':
+ if not url.port:
+ return httplib.HTTPSConnection(url.host)
+ return httplib.HTTPSConnection(url.host, int(url.port))
+ else:
+ if not url.port:
+ return httplib.HTTPConnection(url.host)
+ return httplib.HTTPConnection(url.host, int(url.port))
+
+ def _get_access_url(self, url):
+ return url.to_string()
+
+
+class ProxiedHttpClient(HttpClient):
+ """Performs an HTTP request through a proxy.
+
+ The proxy settings are obtained from enviroment variables. The URL of the
+ proxy server is assumed to be stored in the environment variables
+ 'https_proxy' and 'http_proxy' respectively. If the proxy server requires
+ a Basic Auth authorization header, the username and password are expected to
+ be in the 'proxy-username' or 'proxy_username' variable and the
+ 'proxy-password' or 'proxy_password' variable.
+
+ After connecting to the proxy server, the request is completed as in
+ HttpClient.request.
+ """
+ def _prepare_connection(self, url, headers):
+ proxy_auth = _get_proxy_auth()
+ if url.protocol == 'https':
+ # destination is https
+ proxy = os.environ.get('https_proxy')
+ if proxy:
+ # Set any proxy auth headers
+ if proxy_auth:
+ proxy_auth = 'Proxy-authorization: %s' % proxy_auth
+
+ # Construct the proxy connect command.
+ port = url.port
+ if not port:
+ port = '443'
+ proxy_connect = 'CONNECT %s:%s HTTP/1.0\r\n' % (url.host, port)
+
+ # Set the user agent to send to the proxy
+ if headers and 'User-Agent' in headers:
+ user_agent = 'User-Agent: %s\r\n' % (headers['User-Agent'])
+ else:
+ user_agent = ''
+
+ proxy_pieces = '%s%s%s\r\n' % (proxy_connect, proxy_auth, user_agent)
+
+ # Find the proxy host and port.
+ proxy_url = atom.url.parse_url(proxy)
+ if not proxy_url.port:
+ proxy_url.port = '80'
+
+ # Connect to the proxy server, very simple recv and error checking
+ p_sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
+ p_sock.connect((proxy_url.host, int(proxy_url.port)))
+ p_sock.sendall(proxy_pieces)
+ response = ''
+
+ # Wait for the full response.
+ while response.find("\r\n\r\n") == -1:
+ response += p_sock.recv(8192)
+
+ p_status = response.split()[1]
+ if p_status != str(200):
+ raise ProxyError('Error status=%s' % str(p_status))
+
+ # Trivial setup for ssl socket.
+ sslobj = None
+ if ssl_imported:
+ sslobj = ssl.wrap_socket(p_sock, None, None)
+ else:
+ sock_ssl = socket.ssl(p_sock, None, None)
+ sslobj = httplib.FakeSocket(p_sock, sock_ssl)
+
+ # Initalize httplib and replace with the proxy socket.
+ connection = httplib.HTTPConnection(proxy_url.host)
+ connection.sock = sslobj
+ return connection
+ else:
+ # The request was HTTPS, but there was no https_proxy set.
+ return HttpClient._prepare_connection(self, url, headers)
+ else:
+ proxy = os.environ.get('http_proxy')
+ if proxy:
+ # Find the proxy host and port.
+ proxy_url = atom.url.parse_url(proxy)
+ if not proxy_url.port:
+ proxy_url.port = '80'
+
+ if proxy_auth:
+ headers['Proxy-Authorization'] = proxy_auth.strip()
+
+ return httplib.HTTPConnection(proxy_url.host, int(proxy_url.port))
+ else:
+ # The request was HTTP, but there was no http_proxy set.
+ return HttpClient._prepare_connection(self, url, headers)
+
+ def _get_access_url(self, url):
+ return url.to_string()
+
+
+def _get_proxy_auth():
+ proxy_username = os.environ.get('proxy-username')
+ if not proxy_username:
+ proxy_username = os.environ.get('proxy_username')
+ proxy_password = os.environ.get('proxy-password')
+ if not proxy_password:
+ proxy_password = os.environ.get('proxy_password')
+ if proxy_username:
+ user_auth = base64.encodestring('%s:%s' % (proxy_username,
+ proxy_password))
+ return 'Basic %s\r\n' % (user_auth.strip())
+ else:
+ return ''
+
+
+def _send_data_part(data, connection):
+ if isinstance(data, types.StringTypes):
+ connection.send(data)
+ return
+ # Check to see if data is a file-like object that has a read method.
+ elif hasattr(data, 'read'):
+ # Read the file and send it a chunk at a time.
+ while 1:
+ binarydata = data.read(100000)
+ if binarydata == '': break
+ connection.send(binarydata)
+ return
+ else:
+ # The data object was not a file.
+ # Try to convert to a string and send the data.
+ connection.send(str(data))
+ return
diff --git a/python/atom/http_core.py b/python/atom/http_core.py
new file mode 100644
index 0000000..b9d6fb1
--- /dev/null
+++ b/python/atom/http_core.py
@@ -0,0 +1,597 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This module is used for version 2 of the Google Data APIs.
+# TODO: add proxy handling.
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import os
+import StringIO
+import urlparse
+import urllib
+import httplib
+ssl = None
+try:
+ import ssl
+except ImportError:
+ pass
+
+
+
+class Error(Exception):
+ pass
+
+
+class UnknownSize(Error):
+ pass
+
+
+class ProxyError(Error):
+ pass
+
+
+MIME_BOUNDARY = 'END_OF_PART'
+
+
+def get_headers(http_response):
+ """Retrieves all HTTP headers from an HTTP response from the server.
+
+ This method is provided for backwards compatibility for Python2.2 and 2.3.
+ The httplib.HTTPResponse object in 2.2 and 2.3 does not have a getheaders
+ method so this function will use getheaders if available, but if not it
+ will retrieve a few using getheader.
+ """
+ if hasattr(http_response, 'getheaders'):
+ return http_response.getheaders()
+ else:
+ headers = []
+ for header in (
+ 'location', 'content-type', 'content-length', 'age', 'allow',
+ 'cache-control', 'content-location', 'content-encoding', 'date',
+ 'etag', 'expires', 'last-modified', 'pragma', 'server',
+ 'set-cookie', 'transfer-encoding', 'vary', 'via', 'warning',
+ 'www-authenticate', 'gdata-version'):
+ value = http_response.getheader(header, None)
+ if value is not None:
+ headers.append((header, value))
+ return headers
+
+
+class HttpRequest(object):
+ """Contains all of the parameters for an HTTP 1.1 request.
+
+ The HTTP headers are represented by a dictionary, and it is the
+ responsibility of the user to ensure that duplicate field names are combined
+ into one header value according to the rules in section 4.2 of RFC 2616.
+ """
+ method = None
+ uri = None
+
+ def __init__(self, uri=None, method=None, headers=None):
+ """Construct an HTTP request.
+
+ Args:
+ uri: The full path or partial path as a Uri object or a string.
+ method: The HTTP method for the request, examples include 'GET', 'POST',
+ etc.
+ headers: dict of strings The HTTP headers to include in the request.
+ """
+ self.headers = headers or {}
+ self._body_parts = []
+ if method is not None:
+ self.method = method
+ if isinstance(uri, (str, unicode)):
+ uri = Uri.parse_uri(uri)
+ self.uri = uri or Uri()
+
+
+ def add_body_part(self, data, mime_type, size=None):
+ """Adds data to the HTTP request body.
+
+ If more than one part is added, this is assumed to be a mime-multipart
+ request. This method is designed to create MIME 1.0 requests as specified
+ in RFC 1341.
+
+ Args:
+ data: str or a file-like object containing a part of the request body.
+ mime_type: str The MIME type describing the data
+ size: int Required if the data is a file like object. If the data is a
+ string, the size is calculated so this parameter is ignored.
+ """
+ if isinstance(data, str):
+ size = len(data)
+ if size is None:
+ # TODO: support chunked transfer if some of the body is of unknown size.
+ raise UnknownSize('Each part of the body must have a known size.')
+ if 'Content-Length' in self.headers:
+ content_length = int(self.headers['Content-Length'])
+ else:
+ content_length = 0
+ # If this is the first part added to the body, then this is not a multipart
+ # request.
+ if len(self._body_parts) == 0:
+ self.headers['Content-Type'] = mime_type
+ content_length = size
+ self._body_parts.append(data)
+ elif len(self._body_parts) == 1:
+ # This is the first member in a mime-multipart request, so change the
+ # _body_parts list to indicate a multipart payload.
+ self._body_parts.insert(0, 'Media multipart posting')
+ boundary_string = '\r\n--%s\r\n' % (MIME_BOUNDARY,)
+ content_length += len(boundary_string) + size
+ self._body_parts.insert(1, boundary_string)
+ content_length += len('Media multipart posting')
+ # Put the content type of the first part of the body into the multipart
+ # payload.
+ original_type_string = 'Content-Type: %s\r\n\r\n' % (
+ self.headers['Content-Type'],)
+ self._body_parts.insert(2, original_type_string)
+ content_length += len(original_type_string)
+ boundary_string = '\r\n--%s\r\n' % (MIME_BOUNDARY,)
+ self._body_parts.append(boundary_string)
+ content_length += len(boundary_string)
+ # Change the headers to indicate this is now a mime multipart request.
+ self.headers['Content-Type'] = 'multipart/related; boundary="%s"' % (
+ MIME_BOUNDARY,)
+ self.headers['MIME-version'] = '1.0'
+ # Include the mime type of this part.
+ type_string = 'Content-Type: %s\r\n\r\n' % (mime_type)
+ self._body_parts.append(type_string)
+ content_length += len(type_string)
+ self._body_parts.append(data)
+ ending_boundary_string = '\r\n--%s--' % (MIME_BOUNDARY,)
+ self._body_parts.append(ending_boundary_string)
+ content_length += len(ending_boundary_string)
+ else:
+ # This is a mime multipart request.
+ boundary_string = '\r\n--%s\r\n' % (MIME_BOUNDARY,)
+ self._body_parts.insert(-1, boundary_string)
+ content_length += len(boundary_string) + size
+ # Include the mime type of this part.
+ type_string = 'Content-Type: %s\r\n\r\n' % (mime_type)
+ self._body_parts.insert(-1, type_string)
+ content_length += len(type_string)
+ self._body_parts.insert(-1, data)
+ self.headers['Content-Length'] = str(content_length)
+ # I could add an "append_to_body_part" method as well.
+
+ AddBodyPart = add_body_part
+
+ def add_form_inputs(self, form_data,
+ mime_type='application/x-www-form-urlencoded'):
+ """Form-encodes and adds data to the request body.
+
+ Args:
+ form_data: dict or sequnce or two member tuples which contains the
+ form keys and values.
+ mime_type: str The MIME type of the form data being sent. Defaults
+ to 'application/x-www-form-urlencoded'.
+ """
+ body = urllib.urlencode(form_data)
+ self.add_body_part(body, mime_type)
+
+ AddFormInputs = add_form_inputs
+
+ def _copy(self):
+ """Creates a deep copy of this request."""
+ copied_uri = Uri(self.uri.scheme, self.uri.host, self.uri.port,
+ self.uri.path, self.uri.query.copy())
+ new_request = HttpRequest(uri=copied_uri, method=self.method,
+ headers=self.headers.copy())
+ new_request._body_parts = self._body_parts[:]
+ return new_request
+
+ def _dump(self):
+ """Converts to a printable string for debugging purposes.
+
+ In order to preserve the request, it does not read from file-like objects
+ in the body.
+ """
+ output = 'HTTP Request\n method: %s\n url: %s\n headers:\n' % (
+ self.method, str(self.uri))
+ for header, value in self.headers.iteritems():
+ output += ' %s: %s\n' % (header, value)
+ output += ' body sections:\n'
+ i = 0
+ for part in self._body_parts:
+ if isinstance(part, (str, unicode)):
+ output += ' %s: %s\n' % (i, part)
+ else:
+ output += ' %s: \n' % i
+ i += 1
+ return output
+
+
+def _apply_defaults(http_request):
+ if http_request.uri.scheme is None:
+ if http_request.uri.port == 443:
+ http_request.uri.scheme = 'https'
+ else:
+ http_request.uri.scheme = 'http'
+
+
+class Uri(object):
+ """A URI as used in HTTP 1.1"""
+ scheme = None
+ host = None
+ port = None
+ path = None
+
+ def __init__(self, scheme=None, host=None, port=None, path=None, query=None):
+ """Constructor for a URI.
+
+ Args:
+ scheme: str This is usually 'http' or 'https'.
+ host: str The host name or IP address of the desired server.
+ post: int The server's port number.
+ path: str The path of the resource following the host. This begins with
+ a /, example: '/calendar/feeds/default/allcalendars/full'
+ query: dict of strings The URL query parameters. The keys and values are
+ both escaped so this dict should contain the unescaped values.
+ For example {'my key': 'val', 'second': '!!!'} will become
+ '?my+key=val&second=%21%21%21' which is appended to the path.
+ """
+ self.query = query or {}
+ if scheme is not None:
+ self.scheme = scheme
+ if host is not None:
+ self.host = host
+ if port is not None:
+ self.port = port
+ if path:
+ self.path = path
+
+ def _get_query_string(self):
+ param_pairs = []
+ for key, value in self.query.iteritems():
+ param_pairs.append('='.join((urllib.quote_plus(key),
+ urllib.quote_plus(str(value)))))
+ return '&'.join(param_pairs)
+
+ def _get_relative_path(self):
+ """Returns the path with the query parameters escaped and appended."""
+ param_string = self._get_query_string()
+ if self.path is None:
+ path = '/'
+ else:
+ path = self.path
+ if param_string:
+ return '?'.join([path, param_string])
+ else:
+ return path
+
+ def _to_string(self):
+ if self.scheme is None and self.port == 443:
+ scheme = 'https'
+ elif self.scheme is None:
+ scheme = 'http'
+ else:
+ scheme = self.scheme
+ if self.path is None:
+ path = '/'
+ else:
+ path = self.path
+ if self.port is None:
+ return '%s://%s%s' % (scheme, self.host, self._get_relative_path())
+ else:
+ return '%s://%s:%s%s' % (scheme, self.host, str(self.port),
+ self._get_relative_path())
+
+ def __str__(self):
+ return self._to_string()
+
+ def modify_request(self, http_request=None):
+ """Sets HTTP request components based on the URI."""
+ if http_request is None:
+ http_request = HttpRequest()
+ if http_request.uri is None:
+ http_request.uri = Uri()
+ # Determine the correct scheme.
+ if self.scheme:
+ http_request.uri.scheme = self.scheme
+ if self.port:
+ http_request.uri.port = self.port
+ if self.host:
+ http_request.uri.host = self.host
+ # Set the relative uri path
+ if self.path:
+ http_request.uri.path = self.path
+ if self.query:
+ http_request.uri.query = self.query.copy()
+ return http_request
+
+ ModifyRequest = modify_request
+
+ def parse_uri(uri_string):
+ """Creates a Uri object which corresponds to the URI string.
+
+ This method can accept partial URIs, but it will leave missing
+ members of the Uri unset.
+ """
+ parts = urlparse.urlparse(uri_string)
+ uri = Uri()
+ if parts[0]:
+ uri.scheme = parts[0]
+ if parts[1]:
+ host_parts = parts[1].split(':')
+ if host_parts[0]:
+ uri.host = host_parts[0]
+ if len(host_parts) > 1:
+ uri.port = int(host_parts[1])
+ if parts[2]:
+ uri.path = parts[2]
+ if parts[4]:
+ param_pairs = parts[4].split('&')
+ for pair in param_pairs:
+ pair_parts = pair.split('=')
+ if len(pair_parts) > 1:
+ uri.query[urllib.unquote_plus(pair_parts[0])] = (
+ urllib.unquote_plus(pair_parts[1]))
+ elif len(pair_parts) == 1:
+ uri.query[urllib.unquote_plus(pair_parts[0])] = None
+ return uri
+
+ parse_uri = staticmethod(parse_uri)
+
+ ParseUri = parse_uri
+
+
+parse_uri = Uri.parse_uri
+
+
+ParseUri = Uri.parse_uri
+
+
+class HttpResponse(object):
+ status = None
+ reason = None
+ _body = None
+
+ def __init__(self, status=None, reason=None, headers=None, body=None):
+ self._headers = headers or {}
+ if status is not None:
+ self.status = status
+ if reason is not None:
+ self.reason = reason
+ if body is not None:
+ if hasattr(body, 'read'):
+ self._body = body
+ else:
+ self._body = StringIO.StringIO(body)
+
+ def getheader(self, name, default=None):
+ if name in self._headers:
+ return self._headers[name]
+ else:
+ return default
+
+ def getheaders(self):
+ return self._headers
+
+ def read(self, amt=None):
+ if self._body is None:
+ return None
+ if not amt:
+ return self._body.read()
+ else:
+ return self._body.read(amt)
+
+
+def _dump_response(http_response):
+ """Converts to a string for printing debug messages.
+
+ Does not read the body since that may consume the content.
+ """
+ output = 'HttpResponse\n status: %s\n reason: %s\n headers:' % (
+ http_response.status, http_response.reason)
+ headers = get_headers(http_response)
+ if isinstance(headers, dict):
+ for header, value in headers.iteritems():
+ output += ' %s: %s\n' % (header, value)
+ else:
+ for pair in headers:
+ output += ' %s: %s\n' % (pair[0], pair[1])
+ return output
+
+
+class HttpClient(object):
+ """Performs HTTP requests using httplib."""
+ debug = None
+
+ def request(self, http_request):
+ return self._http_request(http_request.method, http_request.uri,
+ http_request.headers, http_request._body_parts)
+
+ Request = request
+
+ def _get_connection(self, uri, headers=None):
+ """Opens a socket connection to the server to set up an HTTP request.
+
+ Args:
+ uri: The full URL for the request as a Uri object.
+ headers: A dict of string pairs containing the HTTP headers for the
+ request.
+ """
+ connection = None
+ if uri.scheme == 'https':
+ if not uri.port:
+ connection = httplib.HTTPSConnection(uri.host)
+ else:
+ connection = httplib.HTTPSConnection(uri.host, int(uri.port))
+ else:
+ if not uri.port:
+ connection = httplib.HTTPConnection(uri.host)
+ else:
+ connection = httplib.HTTPConnection(uri.host, int(uri.port))
+ return connection
+
+ def _http_request(self, method, uri, headers=None, body_parts=None):
+ """Makes an HTTP request using httplib.
+
+ Args:
+ method: str example: 'GET', 'POST', 'PUT', 'DELETE', etc.
+ uri: str or atom.http_core.Uri
+ headers: dict of strings mapping to strings which will be sent as HTTP
+ headers in the request.
+ body_parts: list of strings, objects with a read method, or objects
+ which can be converted to strings using str. Each of these
+ will be sent in order as the body of the HTTP request.
+ """
+ if isinstance(uri, (str, unicode)):
+ uri = Uri.parse_uri(uri)
+
+ connection = self._get_connection(uri, headers=headers)
+
+ if self.debug:
+ connection.debuglevel = 1
+
+ if connection.host != uri.host:
+ connection.putrequest(method, str(uri))
+ else:
+ connection.putrequest(method, uri._get_relative_path())
+
+ # Overcome a bug in Python 2.4 and 2.5
+ # httplib.HTTPConnection.putrequest adding
+ # HTTP request header 'Host: www.google.com:443' instead of
+ # 'Host: www.google.com', and thus resulting the error message
+ # 'Token invalid - AuthSub token has wrong scope' in the HTTP response.
+ if (uri.scheme == 'https' and int(uri.port or 443) == 443 and
+ hasattr(connection, '_buffer') and
+ isinstance(connection._buffer, list)):
+ header_line = 'Host: %s:443' % uri.host
+ replacement_header_line = 'Host: %s' % uri.host
+ try:
+ connection._buffer[connection._buffer.index(header_line)] = (
+ replacement_header_line)
+ except ValueError: # header_line missing from connection._buffer
+ pass
+
+ # Send the HTTP headers.
+ for header_name, value in headers.iteritems():
+ connection.putheader(header_name, value)
+ connection.endheaders()
+
+ # If there is data, send it in the request.
+ if body_parts:
+ for part in body_parts:
+ _send_data_part(part, connection)
+
+ # Return the HTTP Response from the server.
+ return connection.getresponse()
+
+
+def _send_data_part(data, connection):
+ if isinstance(data, (str, unicode)):
+ # I might want to just allow str, not unicode.
+ connection.send(data)
+ return
+ # Check to see if data is a file-like object that has a read method.
+ elif hasattr(data, 'read'):
+ # Read the file and send it a chunk at a time.
+ while 1:
+ binarydata = data.read(100000)
+ if binarydata == '': break
+ connection.send(binarydata)
+ return
+ else:
+ # The data object was not a file.
+ # Try to convert to a string and send the data.
+ connection.send(str(data))
+ return
+
+
+class ProxiedHttpClient(HttpClient):
+
+ def _get_connection(self, uri, headers=None):
+ # Check to see if there are proxy settings required for this request.
+ proxy = None
+ if uri.scheme == 'https':
+ proxy = os.environ.get('https_proxy')
+ elif uri.scheme == 'http':
+ proxy = os.environ.get('http_proxy')
+ if not proxy:
+ return HttpClient._get_connection(self, uri, headers=headers)
+ # Now we have the URL of the appropriate proxy server.
+ # Get a username and password for the proxy if required.
+ proxy_auth = _get_proxy_auth()
+ if uri.scheme == 'https':
+ import socket
+ if proxy_auth:
+ proxy_auth = 'Proxy-authorization: %s' % proxy_auth
+ # Construct the proxy connect command.
+ port = uri.port
+ if not port:
+ port = 443
+ proxy_connect = 'CONNECT %s:%s HTTP/1.0\r\n' % (uri.host, port)
+ # Set the user agent to send to the proxy
+ user_agent = ''
+ if headers and 'User-Agent' in headers:
+ user_agent = 'User-Agent: %s\r\n' % (headers['User-Agent'])
+ proxy_pieces = '%s%s%s\r\n' % (proxy_connect, proxy_auth, user_agent)
+ # Find the proxy host and port.
+ proxy_uri = Uri.parse_uri(proxy)
+ if not proxy_uri.port:
+ proxy_uri.port = '80'
+ # Connect to the proxy server, very simple recv and error checking
+ p_sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
+ p_sock.connect((proxy_uri.host, int(proxy_uri.port)))
+ p_sock.sendall(proxy_pieces)
+ response = ''
+ # Wait for the full response.
+ while response.find("\r\n\r\n") == -1:
+ response += p_sock.recv(8192)
+ p_status = response.split()[1]
+ if p_status != str(200):
+ raise ProxyError('Error status=%s' % str(p_status))
+ # Trivial setup for ssl socket.
+ sslobj = None
+ if ssl is not None:
+ sslobj = ssl.wrap_socket(p_sock, None, None)
+ else:
+ sock_ssl = socket.ssl(p_sock, None, Nonesock_)
+ sslobj = httplib.FakeSocket(p_sock, sock_ssl)
+ # Initalize httplib and replace with the proxy socket.
+ connection = httplib.HTTPConnection(proxy_uri.host)
+ connection.sock = sslobj
+ return connection
+ elif uri.scheme == 'http':
+ proxy_uri = Uri.parse_uri(proxy)
+ if not proxy_uri.port:
+ proxy_uri.port = '80'
+ if proxy_auth:
+ headers['Proxy-Authorization'] = proxy_auth.strip()
+ return httplib.HTTPConnection(proxy_uri.host, int(proxy_uri.port))
+ return None
+
+
+def _get_proxy_auth():
+ import base64
+ proxy_username = os.environ.get('proxy-username')
+ if not proxy_username:
+ proxy_username = os.environ.get('proxy_username')
+ proxy_password = os.environ.get('proxy-password')
+ if not proxy_password:
+ proxy_password = os.environ.get('proxy_password')
+ if proxy_username:
+ user_auth = base64.b64encode('%s:%s' % (proxy_username,
+ proxy_password))
+ return 'Basic %s\r\n' % (user_auth.strip())
+ else:
+ return ''
diff --git a/python/atom/http_interface.py b/python/atom/http_interface.py
new file mode 100644
index 0000000..275f5a6
--- /dev/null
+++ b/python/atom/http_interface.py
@@ -0,0 +1,156 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module provides a common interface for all HTTP requests.
+
+ HttpResponse: Represents the server's response to an HTTP request. Provides
+ an interface identical to httplib.HTTPResponse which is the response
+ expected from higher level classes which use HttpClient.request.
+
+ GenericHttpClient: Provides an interface (superclass) for an object
+ responsible for making HTTP requests. Subclasses of this object are
+ used in AtomService and GDataService to make requests to the server. By
+ changing the http_client member object, the AtomService is able to make
+ HTTP requests using different logic (for example, when running on
+ Google App Engine, the http_client makes requests using the App Engine
+ urlfetch API).
+"""
+
+
+__author__ = 'api.jscudder (Jeff Scudder)'
+
+
+import StringIO
+
+
+USER_AGENT = '%s GData-Python/2.0.12'
+
+
+class Error(Exception):
+ pass
+
+
+class UnparsableUrlObject(Error):
+ pass
+
+
+class ContentLengthRequired(Error):
+ pass
+
+
+class HttpResponse(object):
+ def __init__(self, body=None, status=None, reason=None, headers=None):
+ """Constructor for an HttpResponse object.
+
+ HttpResponse represents the server's response to an HTTP request from
+ the client. The HttpClient.request method returns a httplib.HTTPResponse
+ object and this HttpResponse class is designed to mirror the interface
+ exposed by httplib.HTTPResponse.
+
+ Args:
+ body: A file like object, with a read() method. The body could also
+ be a string, and the constructor will wrap it so that
+ HttpResponse.read(self) will return the full string.
+ status: The HTTP status code as an int. Example: 200, 201, 404.
+ reason: The HTTP status message which follows the code. Example:
+ OK, Created, Not Found
+ headers: A dictionary containing the HTTP headers in the server's
+ response. A common header in the response is Content-Length.
+ """
+ if body:
+ if hasattr(body, 'read'):
+ self._body = body
+ else:
+ self._body = StringIO.StringIO(body)
+ else:
+ self._body = None
+ if status is not None:
+ self.status = int(status)
+ else:
+ self.status = None
+ self.reason = reason
+ self._headers = headers or {}
+
+ def getheader(self, name, default=None):
+ if name in self._headers:
+ return self._headers[name]
+ else:
+ return default
+
+ def read(self, amt=None):
+ if not amt:
+ return self._body.read()
+ else:
+ return self._body.read(amt)
+
+
+class GenericHttpClient(object):
+ debug = False
+
+ def __init__(self, http_client, headers=None):
+ """
+
+ Args:
+ http_client: An object which provides a request method to make an HTTP
+ request. The request method in GenericHttpClient performs a
+ call-through to the contained HTTP client object.
+ headers: A dictionary containing HTTP headers which should be included
+ in every HTTP request. Common persistent headers include
+ 'User-Agent'.
+ """
+ self.http_client = http_client
+ self.headers = headers or {}
+
+ def request(self, operation, url, data=None, headers=None):
+ all_headers = self.headers.copy()
+ if headers:
+ all_headers.update(headers)
+ return self.http_client.request(operation, url, data=data,
+ headers=all_headers)
+
+ def get(self, url, headers=None):
+ return self.request('GET', url, headers=headers)
+
+ def post(self, url, data, headers=None):
+ return self.request('POST', url, data=data, headers=headers)
+
+ def put(self, url, data, headers=None):
+ return self.request('PUT', url, data=data, headers=headers)
+
+ def delete(self, url, headers=None):
+ return self.request('DELETE', url, headers=headers)
+
+
+class GenericToken(object):
+ """Represents an Authorization token to be added to HTTP requests.
+
+ Some Authorization headers included calculated fields (digital
+ signatures for example) which are based on the parameters of the HTTP
+ request. Therefore the token is responsible for signing the request
+ and adding the Authorization header.
+ """
+ def perform_request(self, http_client, operation, url, data=None,
+ headers=None):
+ """For the GenericToken, no Authorization token is set."""
+ return http_client.request(operation, url, data=data, headers=headers)
+
+ def valid_for_scope(self, url):
+ """Tells the caller if the token authorizes access to the desired URL.
+
+ Since the generic token doesn't add an auth header, it is not valid for
+ any scope.
+ """
+ return False
diff --git a/python/atom/mock_http.py b/python/atom/mock_http.py
new file mode 100644
index 0000000..c420f37
--- /dev/null
+++ b/python/atom/mock_http.py
@@ -0,0 +1,132 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+__author__ = 'api.jscudder (Jeff Scudder)'
+
+
+import atom.http_interface
+import atom.url
+
+
+class Error(Exception):
+ pass
+
+
+class NoRecordingFound(Error):
+ pass
+
+
+class MockRequest(object):
+ """Holds parameters of an HTTP request for matching against future requests.
+ """
+ def __init__(self, operation, url, data=None, headers=None):
+ self.operation = operation
+ if isinstance(url, (str, unicode)):
+ url = atom.url.parse_url(url)
+ self.url = url
+ self.data = data
+ self.headers = headers
+
+
+class MockResponse(atom.http_interface.HttpResponse):
+ """Simulates an httplib.HTTPResponse object."""
+ def __init__(self, body=None, status=None, reason=None, headers=None):
+ if body and hasattr(body, 'read'):
+ self.body = body.read()
+ else:
+ self.body = body
+ if status is not None:
+ self.status = int(status)
+ else:
+ self.status = None
+ self.reason = reason
+ self._headers = headers or {}
+
+ def read(self):
+ return self.body
+
+
+class MockHttpClient(atom.http_interface.GenericHttpClient):
+ def __init__(self, headers=None, recordings=None, real_client=None):
+ """An HttpClient which responds to request with stored data.
+
+ The request-response pairs are stored as tuples in a member list named
+ recordings.
+
+ The MockHttpClient can be switched from replay mode to record mode by
+ setting the real_client member to an instance of an HttpClient which will
+ make real HTTP requests and store the server's response in list of
+ recordings.
+
+ Args:
+ headers: dict containing HTTP headers which should be included in all
+ HTTP requests.
+ recordings: The initial recordings to be used for responses. This list
+ contains tuples in the form: (MockRequest, MockResponse)
+ real_client: An HttpClient which will make a real HTTP request. The
+ response will be converted into a MockResponse and stored in
+ recordings.
+ """
+ self.recordings = recordings or []
+ self.real_client = real_client
+ self.headers = headers or {}
+
+ def add_response(self, response, operation, url, data=None, headers=None):
+ """Adds a request-response pair to the recordings list.
+
+ After the recording is added, future matching requests will receive the
+ response.
+
+ Args:
+ response: MockResponse
+ operation: str
+ url: str
+ data: str, Currently the data is ignored when looking for matching
+ requests.
+ headers: dict of strings: Currently the headers are ignored when
+ looking for matching requests.
+ """
+ request = MockRequest(operation, url, data=data, headers=headers)
+ self.recordings.append((request, response))
+
+ def request(self, operation, url, data=None, headers=None):
+ """Returns a matching MockResponse from the recordings.
+
+ If the real_client is set, the request will be passed along and the
+ server's response will be added to the recordings and also returned.
+
+ If there is no match, a NoRecordingFound error will be raised.
+ """
+ if self.real_client is None:
+ if isinstance(url, (str, unicode)):
+ url = atom.url.parse_url(url)
+ for recording in self.recordings:
+ if recording[0].operation == operation and recording[0].url == url:
+ return recording[1]
+ raise NoRecordingFound('No recodings found for %s %s' % (
+ operation, url))
+ else:
+ # There is a real HTTP client, so make the request, and record the
+ # response.
+ response = self.real_client.request(operation, url, data=data,
+ headers=headers)
+ # TODO: copy the headers
+ stored_response = MockResponse(body=response, status=response.status,
+ reason=response.reason)
+ self.add_response(stored_response, operation, url, data=data,
+ headers=headers)
+ return stored_response
diff --git a/python/atom/mock_http_core.py b/python/atom/mock_http_core.py
new file mode 100644
index 0000000..f55cdc5
--- /dev/null
+++ b/python/atom/mock_http_core.py
@@ -0,0 +1,323 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This module is used for version 2 of the Google Data APIs.
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import StringIO
+import pickle
+import os.path
+import tempfile
+import atom.http_core
+
+
+class Error(Exception):
+ pass
+
+
+class NoRecordingFound(Error):
+ pass
+
+
+class MockHttpClient(object):
+ debug = None
+ real_client = None
+ last_request_was_live = False
+
+ # The following members are used to construct the session cache temp file
+ # name.
+ # These are combined to form the file name
+ # /tmp/cache_prefix.cache_case_name.cache_test_name
+ cache_name_prefix = 'gdata_live_test'
+ cache_case_name = ''
+ cache_test_name = ''
+
+ def __init__(self, recordings=None, real_client=None):
+ self._recordings = recordings or []
+ if real_client is not None:
+ self.real_client = real_client
+
+ def add_response(self, http_request, status, reason, headers=None,
+ body=None):
+ response = MockHttpResponse(status, reason, headers, body)
+ # TODO Scrub the request and the response.
+ self._recordings.append((http_request._copy(), response))
+
+ AddResponse = add_response
+
+ def request(self, http_request):
+ """Provide a recorded response, or record a response for replay.
+
+ If the real_client is set, the request will be made using the
+ real_client, and the response from the server will be recorded.
+ If the real_client is None (the default), this method will examine
+ the recordings and find the first which matches.
+ """
+ request = http_request._copy()
+ _scrub_request(request)
+ if self.real_client is None:
+ self.last_request_was_live = False
+ for recording in self._recordings:
+ if _match_request(recording[0], request):
+ return recording[1]
+ else:
+ # Pass along the debug settings to the real client.
+ self.real_client.debug = self.debug
+ # Make an actual request since we can use the real HTTP client.
+ self.last_request_was_live = True
+ response = self.real_client.request(http_request)
+ scrubbed_response = _scrub_response(response)
+ self.add_response(request, scrubbed_response.status,
+ scrubbed_response.reason,
+ dict(atom.http_core.get_headers(scrubbed_response)),
+ scrubbed_response.read())
+ # Return the recording which we just added.
+ return self._recordings[-1][1]
+ raise NoRecordingFound('No recoding was found for request: %s %s' % (
+ request.method, str(request.uri)))
+
+ Request = request
+
+ def _save_recordings(self, filename):
+ recording_file = open(os.path.join(tempfile.gettempdir(), filename),
+ 'wb')
+ pickle.dump(self._recordings, recording_file)
+ recording_file.close()
+
+ def _load_recordings(self, filename):
+ recording_file = open(os.path.join(tempfile.gettempdir(), filename),
+ 'rb')
+ self._recordings = pickle.load(recording_file)
+ recording_file.close()
+
+ def _delete_recordings(self, filename):
+ full_path = os.path.join(tempfile.gettempdir(), filename)
+ if os.path.exists(full_path):
+ os.remove(full_path)
+
+ def _load_or_use_client(self, filename, http_client):
+ if os.path.exists(os.path.join(tempfile.gettempdir(), filename)):
+ self._load_recordings(filename)
+ else:
+ self.real_client = http_client
+
+ def use_cached_session(self, name=None, real_http_client=None):
+ """Attempts to load recordings from a previous live request.
+
+ If a temp file with the recordings exists, then it is used to fulfill
+ requests. If the file does not exist, then a real client is used to
+ actually make the desired HTTP requests. Requests and responses are
+ recorded and will be written to the desired temprary cache file when
+ close_session is called.
+
+ Args:
+ name: str (optional) The file name of session file to be used. The file
+ is loaded from the temporary directory of this machine. If no name
+ is passed in, a default name will be constructed using the
+ cache_name_prefix, cache_case_name, and cache_test_name of this
+ object.
+ real_http_client: atom.http_core.HttpClient the real client to be used
+ if the cached recordings are not found. If the default
+ value is used, this will be an
+ atom.http_core.HttpClient.
+ """
+ if real_http_client is None:
+ real_http_client = atom.http_core.HttpClient()
+ if name is None:
+ self._recordings_cache_name = self.get_cache_file_name()
+ else:
+ self._recordings_cache_name = name
+ self._load_or_use_client(self._recordings_cache_name, real_http_client)
+
+ def close_session(self):
+ """Saves recordings in the temporary file named in use_cached_session."""
+ if self.real_client is not None:
+ self._save_recordings(self._recordings_cache_name)
+
+ def delete_session(self, name=None):
+ """Removes recordings from a previous live request."""
+ if name is None:
+ self._delete_recordings(self._recordings_cache_name)
+ else:
+ self._delete_recordings(name)
+
+ def get_cache_file_name(self):
+ return '%s.%s.%s' % (self.cache_name_prefix, self.cache_case_name,
+ self.cache_test_name)
+
+ def _dump(self):
+ """Provides debug information in a string."""
+ output = 'MockHttpClient\n real_client: %s\n cache file name: %s\n' % (
+ self.real_client, self.get_cache_file_name())
+ output += ' recordings:\n'
+ i = 0
+ for recording in self._recordings:
+ output += ' recording %i is for: %s %s\n' % (
+ i, recording[0].method, str(recording[0].uri))
+ i += 1
+ return output
+
+
+def _match_request(http_request, stored_request):
+ """Determines whether a request is similar enough to a stored request
+ to cause the stored response to be returned."""
+ # Check to see if the host names match.
+ if (http_request.uri.host is not None
+ and http_request.uri.host != stored_request.uri.host):
+ return False
+ # Check the request path in the URL (/feeds/private/full/x)
+ elif http_request.uri.path != stored_request.uri.path:
+ return False
+ # Check the method used in the request (GET, POST, etc.)
+ elif http_request.method != stored_request.method:
+ return False
+ # If there is a gsession ID in either request, make sure that it is matched
+ # exactly.
+ elif ('gsessionid' in http_request.uri.query
+ or 'gsessionid' in stored_request.uri.query):
+ if 'gsessionid' not in stored_request.uri.query:
+ return False
+ elif 'gsessionid' not in http_request.uri.query:
+ return False
+ elif (http_request.uri.query['gsessionid']
+ != stored_request.uri.query['gsessionid']):
+ return False
+ # Ignores differences in the query params (?start-index=5&max-results=20),
+ # the body of the request, the port number, HTTP headers, just to name a
+ # few.
+ return True
+
+
+def _scrub_request(http_request):
+ """ Removes email address and password from a client login request.
+
+ Since the mock server saves the request and response in plantext, sensitive
+ information like the password should be removed before saving the
+ recordings. At the moment only requests sent to a ClientLogin url are
+ scrubbed.
+ """
+ if (http_request and http_request.uri and http_request.uri.path and
+ http_request.uri.path.endswith('ClientLogin')):
+ # Remove the email and password from a ClientLogin request.
+ http_request._body_parts = []
+ http_request.add_form_inputs(
+ {'form_data': 'client login request has been scrubbed'})
+ else:
+ # We can remove the body of the post from the recorded request, since
+ # the request body is not used when finding a matching recording.
+ http_request._body_parts = []
+ return http_request
+
+
+def _scrub_response(http_response):
+ return http_response
+
+
+class EchoHttpClient(object):
+ """Sends the request data back in the response.
+
+ Used to check the formatting of the request as it was sent. Always responds
+ with a 200 OK, and some information from the HTTP request is returned in
+ special Echo-X headers in the response. The following headers are added
+ in the response:
+ 'Echo-Host': The host name and port number to which the HTTP connection is
+ made. If no port was passed in, the header will contain
+ host:None.
+ 'Echo-Uri': The path portion of the URL being requested. /example?x=1&y=2
+ 'Echo-Scheme': The beginning of the URL, usually 'http' or 'https'
+ 'Echo-Method': The HTTP method being used, 'GET', 'POST', 'PUT', etc.
+ """
+
+ def request(self, http_request):
+ return self._http_request(http_request.uri, http_request.method,
+ http_request.headers, http_request._body_parts)
+
+ def _http_request(self, uri, method, headers=None, body_parts=None):
+ body = StringIO.StringIO()
+ response = atom.http_core.HttpResponse(status=200, reason='OK', body=body)
+ if headers is None:
+ response._headers = {}
+ else:
+ # Copy headers from the request to the response but convert values to
+ # strings. Server response headers always come in as strings, so an int
+ # should be converted to a corresponding string when echoing.
+ for header, value in headers.iteritems():
+ response._headers[header] = str(value)
+ response._headers['Echo-Host'] = '%s:%s' % (uri.host, str(uri.port))
+ response._headers['Echo-Uri'] = uri._get_relative_path()
+ response._headers['Echo-Scheme'] = uri.scheme
+ response._headers['Echo-Method'] = method
+ for part in body_parts:
+ if isinstance(part, str):
+ body.write(part)
+ elif hasattr(part, 'read'):
+ body.write(part.read())
+ body.seek(0)
+ return response
+
+
+class SettableHttpClient(object):
+ """An HTTP Client which responds with the data given in set_response."""
+
+ def __init__(self, status, reason, body, headers):
+ """Configures the response for the server.
+
+ See set_response for details on the arguments to the constructor.
+ """
+ self.set_response(status, reason, body, headers)
+ self.last_request = None
+
+ def set_response(self, status, reason, body, headers):
+ """Determines the response which will be sent for each request.
+
+ Args:
+ status: An int for the HTTP status code, example: 200, 404, etc.
+ reason: String for the HTTP reason, example: OK, NOT FOUND, etc.
+ body: The body of the HTTP response as a string or a file-like
+ object (something with a read method).
+ headers: dict of strings containing the HTTP headers in the response.
+ """
+ self.response = atom.http_core.HttpResponse(status=status, reason=reason,
+ body=body)
+ self.response._headers = headers.copy()
+
+ def request(self, http_request):
+ self.last_request = http_request
+ return self.response
+
+
+class MockHttpResponse(atom.http_core.HttpResponse):
+
+ def __init__(self, status=None, reason=None, headers=None, body=None):
+ self._headers = headers or {}
+ if status is not None:
+ self.status = status
+ if reason is not None:
+ self.reason = reason
+ if body is not None:
+ # Instead of using a file-like object for the body, store as a string
+ # so that reads can be repeated.
+ if hasattr(body, 'read'):
+ self._body = body.read()
+ else:
+ self._body = body
+
+ def read(self):
+ return self._body
diff --git a/python/atom/mock_service.py b/python/atom/mock_service.py
new file mode 100644
index 0000000..601b68a
--- /dev/null
+++ b/python/atom/mock_service.py
@@ -0,0 +1,243 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""MockService provides CRUD ops. for mocking calls to AtomPub services.
+
+ MockService: Exposes the publicly used methods of AtomService to provide
+ a mock interface which can be used in unit tests.
+"""
+
+import atom.service
+import pickle
+
+
+__author__ = 'api.jscudder (Jeffrey Scudder)'
+
+
+# Recordings contains pairings of HTTP MockRequest objects with MockHttpResponse objects.
+recordings = []
+# If set, the mock service HttpRequest are actually made through this object.
+real_request_handler = None
+
+def ConcealValueWithSha(source):
+ import sha
+ return sha.new(source[:-5]).hexdigest()
+
+def DumpRecordings(conceal_func=ConcealValueWithSha):
+ if conceal_func:
+ for recording_pair in recordings:
+ recording_pair[0].ConcealSecrets(conceal_func)
+ return pickle.dumps(recordings)
+
+def LoadRecordings(recordings_file_or_string):
+ if isinstance(recordings_file_or_string, str):
+ atom.mock_service.recordings = pickle.loads(recordings_file_or_string)
+ elif hasattr(recordings_file_or_string, 'read'):
+ atom.mock_service.recordings = pickle.loads(
+ recordings_file_or_string.read())
+
+def HttpRequest(service, operation, data, uri, extra_headers=None,
+ url_params=None, escape_params=True, content_type='application/atom+xml'):
+ """Simulates an HTTP call to the server, makes an actual HTTP request if
+ real_request_handler is set.
+
+ This function operates in two different modes depending on if
+ real_request_handler is set or not. If real_request_handler is not set,
+ HttpRequest will look in this module's recordings list to find a response
+ which matches the parameters in the function call. If real_request_handler
+ is set, this function will call real_request_handler.HttpRequest, add the
+ response to the recordings list, and respond with the actual response.
+
+ Args:
+ service: atom.AtomService object which contains some of the parameters
+ needed to make the request. The following members are used to
+ construct the HTTP call: server (str), additional_headers (dict),
+ port (int), and ssl (bool).
+ operation: str The HTTP operation to be performed. This is usually one of
+ 'GET', 'POST', 'PUT', or 'DELETE'
+ data: ElementTree, filestream, list of parts, or other object which can be
+ converted to a string.
+ Should be set to None when performing a GET or PUT.
+ If data is a file-like object which can be read, this method will read
+ a chunk of 100K bytes at a time and send them.
+ If the data is a list of parts to be sent, each part will be evaluated
+ and sent.
+ uri: The beginning of the URL to which the request should be sent.
+ Examples: '/', '/base/feeds/snippets',
+ '/m8/feeds/contacts/default/base'
+ extra_headers: dict of strings. HTTP headers which should be sent
+ in the request. These headers are in addition to those stored in
+ service.additional_headers.
+ url_params: dict of strings. Key value pairs to be added to the URL as
+ URL parameters. For example {'foo':'bar', 'test':'param'} will
+ become ?foo=bar&test=param.
+ escape_params: bool default True. If true, the keys and values in
+ url_params will be URL escaped when the form is constructed
+ (Special characters converted to %XX form.)
+ content_type: str The MIME type for the data being sent. Defaults to
+ 'application/atom+xml', this is only used if data is set.
+ """
+ full_uri = atom.service.BuildUri(uri, url_params, escape_params)
+ (server, port, ssl, uri) = atom.service.ProcessUrl(service, uri)
+ current_request = MockRequest(operation, full_uri, host=server, ssl=ssl,
+ data=data, extra_headers=extra_headers, url_params=url_params,
+ escape_params=escape_params, content_type=content_type)
+ # If the request handler is set, we should actually make the request using
+ # the request handler and record the response to replay later.
+ if real_request_handler:
+ response = real_request_handler.HttpRequest(service, operation, data, uri,
+ extra_headers=extra_headers, url_params=url_params,
+ escape_params=escape_params, content_type=content_type)
+ # TODO: need to copy the HTTP headers from the real response into the
+ # recorded_response.
+ recorded_response = MockHttpResponse(body=response.read(),
+ status=response.status, reason=response.reason)
+ # Insert a tuple which maps the request to the response object returned
+ # when making an HTTP call using the real_request_handler.
+ recordings.append((current_request, recorded_response))
+ return recorded_response
+ else:
+ # Look through available recordings to see if one matches the current
+ # request.
+ for request_response_pair in recordings:
+ if request_response_pair[0].IsMatch(current_request):
+ return request_response_pair[1]
+ return None
+
+
+class MockRequest(object):
+ """Represents a request made to an AtomPub server.
+
+ These objects are used to determine if a client request matches a recorded
+ HTTP request to determine what the mock server's response will be.
+ """
+
+ def __init__(self, operation, uri, host=None, ssl=False, port=None,
+ data=None, extra_headers=None, url_params=None, escape_params=True,
+ content_type='application/atom+xml'):
+ """Constructor for a MockRequest
+
+ Args:
+ operation: str One of 'GET', 'POST', 'PUT', or 'DELETE' this is the
+ HTTP operation requested on the resource.
+ uri: str The URL describing the resource to be modified or feed to be
+ retrieved. This should include the protocol (http/https) and the host
+ (aka domain). For example, these are some valud full_uris:
+ 'http://example.com', 'https://www.google.com/accounts/ClientLogin'
+ host: str (optional) The server name which will be placed at the
+ beginning of the URL if the uri parameter does not begin with 'http'.
+ Examples include 'example.com', 'www.google.com', 'www.blogger.com'.
+ ssl: boolean (optional) If true, the request URL will begin with https
+ instead of http.
+ data: ElementTree, filestream, list of parts, or other object which can be
+ converted to a string. (optional)
+ Should be set to None when performing a GET or PUT.
+ If data is a file-like object which can be read, the constructor
+ will read the entire file into memory. If the data is a list of
+ parts to be sent, each part will be evaluated and stored.
+ extra_headers: dict (optional) HTTP headers included in the request.
+ url_params: dict (optional) Key value pairs which should be added to
+ the URL as URL parameters in the request. For example uri='/',
+ url_parameters={'foo':'1','bar':'2'} could become '/?foo=1&bar=2'.
+ escape_params: boolean (optional) Perform URL escaping on the keys and
+ values specified in url_params. Defaults to True.
+ content_type: str (optional) Provides the MIME type of the data being
+ sent.
+ """
+ self.operation = operation
+ self.uri = _ConstructFullUrlBase(uri, host=host, ssl=ssl)
+ self.data = data
+ self.extra_headers = extra_headers
+ self.url_params = url_params or {}
+ self.escape_params = escape_params
+ self.content_type = content_type
+
+ def ConcealSecrets(self, conceal_func):
+ """Conceal secret data in this request."""
+ if self.extra_headers.has_key('Authorization'):
+ self.extra_headers['Authorization'] = conceal_func(
+ self.extra_headers['Authorization'])
+
+ def IsMatch(self, other_request):
+ """Check to see if the other_request is equivalent to this request.
+
+ Used to determine if a recording matches an incoming request so that a
+ recorded response should be sent to the client.
+
+ The matching is not exact, only the operation and URL are examined
+ currently.
+
+ Args:
+ other_request: MockRequest The request which we want to check this
+ (self) MockRequest against to see if they are equivalent.
+ """
+ # More accurate matching logic will likely be required.
+ return (self.operation == other_request.operation and self.uri ==
+ other_request.uri)
+
+
+def _ConstructFullUrlBase(uri, host=None, ssl=False):
+ """Puts URL components into the form http(s)://full.host.strinf/uri/path
+
+ Used to construct a roughly canonical URL so that URLs which begin with
+ 'http://example.com/' can be compared to a uri of '/' when the host is
+ set to 'example.com'
+
+ If the uri contains 'http://host' already, the host and ssl parameters
+ are ignored.
+
+ Args:
+ uri: str The path component of the URL, examples include '/'
+ host: str (optional) The host name which should prepend the URL. Example:
+ 'example.com'
+ ssl: boolean (optional) If true, the returned URL will begin with https
+ instead of http.
+
+ Returns:
+ String which has the form http(s)://example.com/uri/string/contents
+ """
+ if uri.startswith('http'):
+ return uri
+ if ssl:
+ return 'https://%s%s' % (host, uri)
+ else:
+ return 'http://%s%s' % (host, uri)
+
+
+class MockHttpResponse(object):
+ """Returned from MockService crud methods as the server's response."""
+
+ def __init__(self, body=None, status=None, reason=None, headers=None):
+ """Construct a mock HTTPResponse and set members.
+
+ Args:
+ body: str (optional) The HTTP body of the server's response.
+ status: int (optional)
+ reason: str (optional)
+ headers: dict (optional)
+ """
+ self.body = body
+ self.status = status
+ self.reason = reason
+ self.headers = headers or {}
+
+ def read(self):
+ return self.body
+
+ def getheader(self, header_name):
+ return self.headers[header_name]
+
diff --git a/python/atom/service.py b/python/atom/service.py
new file mode 100644
index 0000000..6310c1c
--- /dev/null
+++ b/python/atom/service.py
@@ -0,0 +1,740 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2006, 2007, 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""AtomService provides CRUD ops. in line with the Atom Publishing Protocol.
+
+ AtomService: Encapsulates the ability to perform insert, update and delete
+ operations with the Atom Publishing Protocol on which GData is
+ based. An instance can perform query, insertion, deletion, and
+ update.
+
+ HttpRequest: Function that performs a GET, POST, PUT, or DELETE HTTP request
+ to the specified end point. An AtomService object or a subclass can be
+ used to specify information about the request.
+"""
+
+__author__ = 'api.jscudder (Jeff Scudder)'
+
+
+import atom.http_interface
+import atom.url
+import atom.http
+import atom.token_store
+
+import os
+import httplib
+import urllib
+import re
+import base64
+import socket
+import warnings
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ try:
+ import cElementTree as ElementTree
+ except ImportError:
+ try:
+ from xml.etree import ElementTree
+ except ImportError:
+ from elementtree import ElementTree
+import atom
+
+
+class AtomService(object):
+ """Performs Atom Publishing Protocol CRUD operations.
+
+ The AtomService contains methods to perform HTTP CRUD operations.
+ """
+
+ # Default values for members
+ port = 80
+ ssl = False
+ # Set the current_token to force the AtomService to use this token
+ # instead of searching for an appropriate token in the token_store.
+ current_token = None
+ auto_store_tokens = True
+ auto_set_current_token = True
+
+ def _get_override_token(self):
+ return self.current_token
+
+ def _set_override_token(self, token):
+ self.current_token = token
+
+ override_token = property(_get_override_token, _set_override_token)
+
+ #@atom.v1_deprecated('Please use atom.client.AtomPubClient instead.')
+ def __init__(self, server=None, additional_headers=None,
+ application_name='', http_client=None, token_store=None):
+ """Creates a new AtomService client.
+
+ Args:
+ server: string (optional) The start of a URL for the server
+ to which all operations should be directed. Example:
+ 'www.google.com'
+ additional_headers: dict (optional) Any additional HTTP headers which
+ should be included with CRUD operations.
+ http_client: An object responsible for making HTTP requests using a
+ request method. If none is provided, a new instance of
+ atom.http.ProxiedHttpClient will be used.
+ token_store: Keeps a collection of authorization tokens which can be
+ applied to requests for a specific URLs. Critical methods are
+ find_token based on a URL (atom.url.Url or a string), add_token,
+ and remove_token.
+ """
+ self.http_client = http_client or atom.http.ProxiedHttpClient()
+ self.token_store = token_store or atom.token_store.TokenStore()
+ self.server = server
+ self.additional_headers = additional_headers or {}
+ self.additional_headers['User-Agent'] = atom.http_interface.USER_AGENT % (
+ application_name,)
+ # If debug is True, the HTTPConnection will display debug information
+ self._set_debug(False)
+
+ __init__ = atom.v1_deprecated(
+ 'Please use atom.client.AtomPubClient instead.')(
+ __init__)
+
+ def _get_debug(self):
+ return self.http_client.debug
+
+ def _set_debug(self, value):
+ self.http_client.debug = value
+
+ debug = property(_get_debug, _set_debug,
+ doc='If True, HTTP debug information is printed.')
+
+ def use_basic_auth(self, username, password, scopes=None):
+ if username is not None and password is not None:
+ if scopes is None:
+ scopes = [atom.token_store.SCOPE_ALL]
+ base_64_string = base64.encodestring('%s:%s' % (username, password))
+ token = BasicAuthToken('Basic %s' % base_64_string.strip(),
+ scopes=[atom.token_store.SCOPE_ALL])
+ if self.auto_set_current_token:
+ self.current_token = token
+ if self.auto_store_tokens:
+ return self.token_store.add_token(token)
+ return True
+ return False
+
+ def UseBasicAuth(self, username, password, for_proxy=False):
+ """Sets an Authenticaiton: Basic HTTP header containing plaintext.
+
+ Deprecated, use use_basic_auth instead.
+
+ The username and password are base64 encoded and added to an HTTP header
+ which will be included in each request. Note that your username and
+ password are sent in plaintext.
+
+ Args:
+ username: str
+ password: str
+ """
+ self.use_basic_auth(username, password)
+
+ #@atom.v1_deprecated('Please use atom.client.AtomPubClient for requests.')
+ def request(self, operation, url, data=None, headers=None,
+ url_params=None):
+ if isinstance(url, (str, unicode)):
+ if url.startswith('http:') and self.ssl:
+ # Force all requests to be https if self.ssl is True.
+ url = atom.url.parse_url('https:' + url[5:])
+ elif not url.startswith('http') and self.ssl:
+ url = atom.url.parse_url('https://%s%s' % (self.server, url))
+ elif not url.startswith('http'):
+ url = atom.url.parse_url('http://%s%s' % (self.server, url))
+ else:
+ url = atom.url.parse_url(url)
+
+ if url_params:
+ for name, value in url_params.iteritems():
+ url.params[name] = value
+
+ all_headers = self.additional_headers.copy()
+ if headers:
+ all_headers.update(headers)
+
+ # If the list of headers does not include a Content-Length, attempt to
+ # calculate it based on the data object.
+ if data and 'Content-Length' not in all_headers:
+ content_length = CalculateDataLength(data)
+ if content_length:
+ all_headers['Content-Length'] = str(content_length)
+
+ # Find an Authorization token for this URL if one is available.
+ if self.override_token:
+ auth_token = self.override_token
+ else:
+ auth_token = self.token_store.find_token(url)
+ return auth_token.perform_request(self.http_client, operation, url,
+ data=data, headers=all_headers)
+
+ request = atom.v1_deprecated(
+ 'Please use atom.client.AtomPubClient for requests.')(
+ request)
+
+ # CRUD operations
+ def Get(self, uri, extra_headers=None, url_params=None, escape_params=True):
+ """Query the APP server with the given URI
+
+ The uri is the portion of the URI after the server value
+ (server example: 'www.google.com').
+
+ Example use:
+ To perform a query against Google Base, set the server to
+ 'base.google.com' and set the uri to '/base/feeds/...', where ... is
+ your query. For example, to find snippets for all digital cameras uri
+ should be set to: '/base/feeds/snippets?bq=digital+camera'
+
+ Args:
+ uri: string The query in the form of a URI. Example:
+ '/base/feeds/snippets?bq=digital+camera'.
+ extra_headers: dicty (optional) Extra HTTP headers to be included
+ in the GET request. These headers are in addition to
+ those stored in the client's additional_headers property.
+ The client automatically sets the Content-Type and
+ Authorization headers.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the query. These are translated into query arguments
+ in the form '&dict_key=value&...'.
+ Example: {'max-results': '250'} becomes &max-results=250
+ escape_params: boolean (optional) If false, the calling code has already
+ ensured that the query will form a valid URL (all
+ reserved characters have been escaped). If true, this
+ method will escape the query and any URL parameters
+ provided.
+
+ Returns:
+ httplib.HTTPResponse The server's response to the GET request.
+ """
+ return self.request('GET', uri, data=None, headers=extra_headers,
+ url_params=url_params)
+
+ def Post(self, data, uri, extra_headers=None, url_params=None,
+ escape_params=True, content_type='application/atom+xml'):
+ """Insert data into an APP server at the given URI.
+
+ Args:
+ data: string, ElementTree._Element, or something with a __str__ method
+ The XML to be sent to the uri.
+ uri: string The location (feed) to which the data should be inserted.
+ Example: '/base/feeds/items'.
+ extra_headers: dict (optional) HTTP headers which are to be included.
+ The client automatically sets the Content-Type,
+ Authorization, and Content-Length headers.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the URI. These are translated into query arguments
+ in the form '&dict_key=value&...'.
+ Example: {'max-results': '250'} becomes &max-results=250
+ escape_params: boolean (optional) If false, the calling code has already
+ ensured that the query will form a valid URL (all
+ reserved characters have been escaped). If true, this
+ method will escape the query and any URL parameters
+ provided.
+
+ Returns:
+ httplib.HTTPResponse Server's response to the POST request.
+ """
+ if extra_headers is None:
+ extra_headers = {}
+ if content_type:
+ extra_headers['Content-Type'] = content_type
+ return self.request('POST', uri, data=data, headers=extra_headers,
+ url_params=url_params)
+
+ def Put(self, data, uri, extra_headers=None, url_params=None,
+ escape_params=True, content_type='application/atom+xml'):
+ """Updates an entry at the given URI.
+
+ Args:
+ data: string, ElementTree._Element, or xml_wrapper.ElementWrapper The
+ XML containing the updated data.
+ uri: string A URI indicating entry to which the update will be applied.
+ Example: '/base/feeds/items/ITEM-ID'
+ extra_headers: dict (optional) HTTP headers which are to be included.
+ The client automatically sets the Content-Type,
+ Authorization, and Content-Length headers.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the URI. These are translated into query arguments
+ in the form '&dict_key=value&...'.
+ Example: {'max-results': '250'} becomes &max-results=250
+ escape_params: boolean (optional) If false, the calling code has already
+ ensured that the query will form a valid URL (all
+ reserved characters have been escaped). If true, this
+ method will escape the query and any URL parameters
+ provided.
+
+ Returns:
+ httplib.HTTPResponse Server's response to the PUT request.
+ """
+ if extra_headers is None:
+ extra_headers = {}
+ if content_type:
+ extra_headers['Content-Type'] = content_type
+ return self.request('PUT', uri, data=data, headers=extra_headers,
+ url_params=url_params)
+
+ def Delete(self, uri, extra_headers=None, url_params=None,
+ escape_params=True):
+ """Deletes the entry at the given URI.
+
+ Args:
+ uri: string The URI of the entry to be deleted. Example:
+ '/base/feeds/items/ITEM-ID'
+ extra_headers: dict (optional) HTTP headers which are to be included.
+ The client automatically sets the Content-Type and
+ Authorization headers.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the URI. These are translated into query arguments
+ in the form '&dict_key=value&...'.
+ Example: {'max-results': '250'} becomes &max-results=250
+ escape_params: boolean (optional) If false, the calling code has already
+ ensured that the query will form a valid URL (all
+ reserved characters have been escaped). If true, this
+ method will escape the query and any URL parameters
+ provided.
+
+ Returns:
+ httplib.HTTPResponse Server's response to the DELETE request.
+ """
+ return self.request('DELETE', uri, data=None, headers=extra_headers,
+ url_params=url_params)
+
+
+class BasicAuthToken(atom.http_interface.GenericToken):
+ def __init__(self, auth_header, scopes=None):
+ """Creates a token used to add Basic Auth headers to HTTP requests.
+
+ Args:
+ auth_header: str The value for the Authorization header.
+ scopes: list of str or atom.url.Url specifying the beginnings of URLs
+ for which this token can be used. For example, if scopes contains
+ 'http://example.com/foo', then this token can be used for a request to
+ 'http://example.com/foo/bar' but it cannot be used for a request to
+ 'http://example.com/baz'
+ """
+ self.auth_header = auth_header
+ self.scopes = scopes or []
+
+ def perform_request(self, http_client, operation, url, data=None,
+ headers=None):
+ """Sets the Authorization header to the basic auth string."""
+ if headers is None:
+ headers = {'Authorization':self.auth_header}
+ else:
+ headers['Authorization'] = self.auth_header
+ return http_client.request(operation, url, data=data, headers=headers)
+
+ def __str__(self):
+ return self.auth_header
+
+ def valid_for_scope(self, url):
+ """Tells the caller if the token authorizes access to the desired URL.
+ """
+ if isinstance(url, (str, unicode)):
+ url = atom.url.parse_url(url)
+ for scope in self.scopes:
+ if scope == atom.token_store.SCOPE_ALL:
+ return True
+ if isinstance(scope, (str, unicode)):
+ scope = atom.url.parse_url(scope)
+ if scope == url:
+ return True
+ # Check the host and the path, but ignore the port and protocol.
+ elif scope.host == url.host and not scope.path:
+ return True
+ elif scope.host == url.host and scope.path and not url.path:
+ continue
+ elif scope.host == url.host and url.path.startswith(scope.path):
+ return True
+ return False
+
+
+def PrepareConnection(service, full_uri):
+ """Opens a connection to the server based on the full URI.
+
+ This method is deprecated, instead use atom.http.HttpClient.request.
+
+ Examines the target URI and the proxy settings, which are set as
+ environment variables, to open a connection with the server. This
+ connection is used to make an HTTP request.
+
+ Args:
+ service: atom.AtomService or a subclass. It must have a server string which
+ represents the server host to which the request should be made. It may also
+ have a dictionary of additional_headers to send in the HTTP request.
+ full_uri: str Which is the target relative (lacks protocol and host) or
+ absolute URL to be opened. Example:
+ 'https://www.google.com/accounts/ClientLogin' or
+ 'base/feeds/snippets' where the server is set to www.google.com.
+
+ Returns:
+ A tuple containing the httplib.HTTPConnection and the full_uri for the
+ request.
+ """
+ deprecation('calling deprecated function PrepareConnection')
+ (server, port, ssl, partial_uri) = ProcessUrl(service, full_uri)
+ if ssl:
+ # destination is https
+ proxy = os.environ.get('https_proxy')
+ if proxy:
+ (p_server, p_port, p_ssl, p_uri) = ProcessUrl(service, proxy, True)
+ proxy_username = os.environ.get('proxy-username')
+ if not proxy_username:
+ proxy_username = os.environ.get('proxy_username')
+ proxy_password = os.environ.get('proxy-password')
+ if not proxy_password:
+ proxy_password = os.environ.get('proxy_password')
+ if proxy_username:
+ user_auth = base64.encodestring('%s:%s' % (proxy_username,
+ proxy_password))
+ proxy_authorization = ('Proxy-authorization: Basic %s\r\n' % (
+ user_auth.strip()))
+ else:
+ proxy_authorization = ''
+ proxy_connect = 'CONNECT %s:%s HTTP/1.0\r\n' % (server, port)
+ user_agent = 'User-Agent: %s\r\n' % (
+ service.additional_headers['User-Agent'])
+ proxy_pieces = (proxy_connect + proxy_authorization + user_agent
+ + '\r\n')
+
+ #now connect, very simple recv and error checking
+ p_sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
+ p_sock.connect((p_server,p_port))
+ p_sock.sendall(proxy_pieces)
+ response = ''
+
+ # Wait for the full response.
+ while response.find("\r\n\r\n") == -1:
+ response += p_sock.recv(8192)
+
+ p_status=response.split()[1]
+ if p_status!=str(200):
+ raise 'Error status=',str(p_status)
+
+ # Trivial setup for ssl socket.
+ ssl = socket.ssl(p_sock, None, None)
+ fake_sock = httplib.FakeSocket(p_sock, ssl)
+
+ # Initalize httplib and replace with the proxy socket.
+ connection = httplib.HTTPConnection(server)
+ connection.sock=fake_sock
+ full_uri = partial_uri
+
+ else:
+ connection = httplib.HTTPSConnection(server, port)
+ full_uri = partial_uri
+
+ else:
+ # destination is http
+ proxy = os.environ.get('http_proxy')
+ if proxy:
+ (p_server, p_port, p_ssl, p_uri) = ProcessUrl(service.server, proxy, True)
+ proxy_username = os.environ.get('proxy-username')
+ if not proxy_username:
+ proxy_username = os.environ.get('proxy_username')
+ proxy_password = os.environ.get('proxy-password')
+ if not proxy_password:
+ proxy_password = os.environ.get('proxy_password')
+ if proxy_username:
+ UseBasicAuth(service, proxy_username, proxy_password, True)
+ connection = httplib.HTTPConnection(p_server, p_port)
+ if not full_uri.startswith("http://"):
+ if full_uri.startswith("/"):
+ full_uri = "http://%s%s" % (service.server, full_uri)
+ else:
+ full_uri = "http://%s/%s" % (service.server, full_uri)
+ else:
+ connection = httplib.HTTPConnection(server, port)
+ full_uri = partial_uri
+
+ return (connection, full_uri)
+
+
+def UseBasicAuth(service, username, password, for_proxy=False):
+ """Sets an Authenticaiton: Basic HTTP header containing plaintext.
+
+ Deprecated, use AtomService.use_basic_auth insread.
+
+ The username and password are base64 encoded and added to an HTTP header
+ which will be included in each request. Note that your username and
+ password are sent in plaintext. The auth header is added to the
+ additional_headers dictionary in the service object.
+
+ Args:
+ service: atom.AtomService or a subclass which has an
+ additional_headers dict as a member.
+ username: str
+ password: str
+ """
+ deprecation('calling deprecated function UseBasicAuth')
+ base_64_string = base64.encodestring('%s:%s' % (username, password))
+ base_64_string = base_64_string.strip()
+ if for_proxy:
+ header_name = 'Proxy-Authorization'
+ else:
+ header_name = 'Authorization'
+ service.additional_headers[header_name] = 'Basic %s' % (base_64_string,)
+
+
+def ProcessUrl(service, url, for_proxy=False):
+ """Processes a passed URL. If the URL does not begin with https?, then
+ the default value for server is used
+
+ This method is deprecated, use atom.url.parse_url instead.
+ """
+ if not isinstance(url, atom.url.Url):
+ url = atom.url.parse_url(url)
+
+ server = url.host
+ ssl = False
+ port = 80
+
+ if not server:
+ if hasattr(service, 'server'):
+ server = service.server
+ else:
+ server = service
+ if not url.protocol and hasattr(service, 'ssl'):
+ ssl = service.ssl
+ if hasattr(service, 'port'):
+ port = service.port
+ else:
+ if url.protocol == 'https':
+ ssl = True
+ elif url.protocol == 'http':
+ ssl = False
+ if url.port:
+ port = int(url.port)
+ elif port == 80 and ssl:
+ port = 443
+
+ return (server, port, ssl, url.get_request_uri())
+
+def DictionaryToParamList(url_parameters, escape_params=True):
+ """Convert a dictionary of URL arguments into a URL parameter string.
+
+ This function is deprcated, use atom.url.Url instead.
+
+ Args:
+ url_parameters: The dictionaty of key-value pairs which will be converted
+ into URL parameters. For example,
+ {'dry-run': 'true', 'foo': 'bar'}
+ will become ['dry-run=true', 'foo=bar'].
+
+ Returns:
+ A list which contains a string for each key-value pair. The strings are
+ ready to be incorporated into a URL by using '&'.join([] + parameter_list)
+ """
+ # Choose which function to use when modifying the query and parameters.
+ # Use quote_plus when escape_params is true.
+ transform_op = [str, urllib.quote_plus][bool(escape_params)]
+ # Create a list of tuples containing the escaped version of the
+ # parameter-value pairs.
+ parameter_tuples = [(transform_op(param), transform_op(value))
+ for param, value in (url_parameters or {}).items()]
+ # Turn parameter-value tuples into a list of strings in the form
+ # 'PARAMETER=VALUE'.
+ return ['='.join(x) for x in parameter_tuples]
+
+
+def BuildUri(uri, url_params=None, escape_params=True):
+ """Converts a uri string and a collection of parameters into a URI.
+
+ This function is deprcated, use atom.url.Url instead.
+
+ Args:
+ uri: string
+ url_params: dict (optional)
+ escape_params: boolean (optional)
+ uri: string The start of the desired URI. This string can alrady contain
+ URL parameters. Examples: '/base/feeds/snippets',
+ '/base/feeds/snippets?bq=digital+camera'
+ url_parameters: dict (optional) Additional URL parameters to be included
+ in the query. These are translated into query arguments
+ in the form '&dict_key=value&...'.
+ Example: {'max-results': '250'} becomes &max-results=250
+ escape_params: boolean (optional) If false, the calling code has already
+ ensured that the query will form a valid URL (all
+ reserved characters have been escaped). If true, this
+ method will escape the query and any URL parameters
+ provided.
+
+ Returns:
+ string The URI consisting of the escaped URL parameters appended to the
+ initial uri string.
+ """
+ # Prepare URL parameters for inclusion into the GET request.
+ parameter_list = DictionaryToParamList(url_params, escape_params)
+
+ # Append the URL parameters to the URL.
+ if parameter_list:
+ if uri.find('?') != -1:
+ # If there are already URL parameters in the uri string, add the
+ # parameters after a new & character.
+ full_uri = '&'.join([uri] + parameter_list)
+ else:
+ # The uri string did not have any URL parameters (no ? character)
+ # so put a ? between the uri and URL parameters.
+ full_uri = '%s%s' % (uri, '?%s' % ('&'.join([] + parameter_list)))
+ else:
+ full_uri = uri
+
+ return full_uri
+
+
+def HttpRequest(service, operation, data, uri, extra_headers=None,
+ url_params=None, escape_params=True, content_type='application/atom+xml'):
+ """Performs an HTTP call to the server, supports GET, POST, PUT, and DELETE.
+
+ This method is deprecated, use atom.http.HttpClient.request instead.
+
+ Usage example, perform and HTTP GET on http://www.google.com/:
+ import atom.service
+ client = atom.service.AtomService()
+ http_response = client.Get('http://www.google.com/')
+ or you could set the client.server to 'www.google.com' and use the
+ following:
+ client.server = 'www.google.com'
+ http_response = client.Get('/')
+
+ Args:
+ service: atom.AtomService object which contains some of the parameters
+ needed to make the request. The following members are used to
+ construct the HTTP call: server (str), additional_headers (dict),
+ port (int), and ssl (bool).
+ operation: str The HTTP operation to be performed. This is usually one of
+ 'GET', 'POST', 'PUT', or 'DELETE'
+ data: ElementTree, filestream, list of parts, or other object which can be
+ converted to a string.
+ Should be set to None when performing a GET or PUT.
+ If data is a file-like object which can be read, this method will read
+ a chunk of 100K bytes at a time and send them.
+ If the data is a list of parts to be sent, each part will be evaluated
+ and sent.
+ uri: The beginning of the URL to which the request should be sent.
+ Examples: '/', '/base/feeds/snippets',
+ '/m8/feeds/contacts/default/base'
+ extra_headers: dict of strings. HTTP headers which should be sent
+ in the request. These headers are in addition to those stored in
+ service.additional_headers.
+ url_params: dict of strings. Key value pairs to be added to the URL as
+ URL parameters. For example {'foo':'bar', 'test':'param'} will
+ become ?foo=bar&test=param.
+ escape_params: bool default True. If true, the keys and values in
+ url_params will be URL escaped when the form is constructed
+ (Special characters converted to %XX form.)
+ content_type: str The MIME type for the data being sent. Defaults to
+ 'application/atom+xml', this is only used if data is set.
+ """
+ deprecation('call to deprecated function HttpRequest')
+ full_uri = BuildUri(uri, url_params, escape_params)
+ (connection, full_uri) = PrepareConnection(service, full_uri)
+
+ if extra_headers is None:
+ extra_headers = {}
+
+ # Turn on debug mode if the debug member is set.
+ if service.debug:
+ connection.debuglevel = 1
+
+ connection.putrequest(operation, full_uri)
+
+ # If the list of headers does not include a Content-Length, attempt to
+ # calculate it based on the data object.
+ if (data and not service.additional_headers.has_key('Content-Length') and
+ not extra_headers.has_key('Content-Length')):
+ content_length = CalculateDataLength(data)
+ if content_length:
+ extra_headers['Content-Length'] = str(content_length)
+
+ if content_type:
+ extra_headers['Content-Type'] = content_type
+
+ # Send the HTTP headers.
+ if isinstance(service.additional_headers, dict):
+ for header in service.additional_headers:
+ connection.putheader(header, service.additional_headers[header])
+ if isinstance(extra_headers, dict):
+ for header in extra_headers:
+ connection.putheader(header, extra_headers[header])
+ connection.endheaders()
+
+ # If there is data, send it in the request.
+ if data:
+ if isinstance(data, list):
+ for data_part in data:
+ __SendDataPart(data_part, connection)
+ else:
+ __SendDataPart(data, connection)
+
+ # Return the HTTP Response from the server.
+ return connection.getresponse()
+
+
+def __SendDataPart(data, connection):
+ """This method is deprecated, use atom.http._send_data_part"""
+ deprecated('call to deprecated function __SendDataPart')
+ if isinstance(data, str):
+ #TODO add handling for unicode.
+ connection.send(data)
+ return
+ elif ElementTree.iselement(data):
+ connection.send(ElementTree.tostring(data))
+ return
+ # Check to see if data is a file-like object that has a read method.
+ elif hasattr(data, 'read'):
+ # Read the file and send it a chunk at a time.
+ while 1:
+ binarydata = data.read(100000)
+ if binarydata == '': break
+ connection.send(binarydata)
+ return
+ else:
+ # The data object was not a file.
+ # Try to convert to a string and send the data.
+ connection.send(str(data))
+ return
+
+
+def CalculateDataLength(data):
+ """Attempts to determine the length of the data to send.
+
+ This method will respond with a length only if the data is a string or
+ and ElementTree element.
+
+ Args:
+ data: object If this is not a string or ElementTree element this funtion
+ will return None.
+ """
+ if isinstance(data, str):
+ return len(data)
+ elif isinstance(data, list):
+ return None
+ elif ElementTree.iselement(data):
+ return len(ElementTree.tostring(data))
+ elif hasattr(data, 'read'):
+ # If this is a file-like object, don't try to guess the length.
+ return None
+ else:
+ return len(str(data))
+
+
+def deprecation(message):
+ warnings.warn(message, DeprecationWarning, stacklevel=2)
diff --git a/python/atom/token_store.py b/python/atom/token_store.py
new file mode 100644
index 0000000..d618965
--- /dev/null
+++ b/python/atom/token_store.py
@@ -0,0 +1,117 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module provides a TokenStore class which is designed to manage
+auth tokens required for different services.
+
+Each token is valid for a set of scopes which is the start of a URL. An HTTP
+client will use a token store to find a valid Authorization header to send
+in requests to the specified URL. If the HTTP client determines that a token
+has expired or been revoked, it can remove the token from the store so that
+it will not be used in future requests.
+"""
+
+
+__author__ = 'api.jscudder (Jeff Scudder)'
+
+
+import atom.http_interface
+import atom.url
+
+
+SCOPE_ALL = 'http'
+
+
+class TokenStore(object):
+ """Manages Authorization tokens which will be sent in HTTP headers."""
+ def __init__(self, scoped_tokens=None):
+ self._tokens = scoped_tokens or {}
+
+ def add_token(self, token):
+ """Adds a new token to the store (replaces tokens with the same scope).
+
+ Args:
+ token: A subclass of http_interface.GenericToken. The token object is
+ responsible for adding the Authorization header to the HTTP request.
+ The scopes defined in the token are used to determine if the token
+ is valid for a requested scope when find_token is called.
+
+ Returns:
+ True if the token was added, False if the token was not added becase
+ no scopes were provided.
+ """
+ if not hasattr(token, 'scopes') or not token.scopes:
+ return False
+
+ for scope in token.scopes:
+ self._tokens[str(scope)] = token
+ return True
+
+ def find_token(self, url):
+ """Selects an Authorization header token which can be used for the URL.
+
+ Args:
+ url: str or atom.url.Url or a list containing the same.
+ The URL which is going to be requested. All
+ tokens are examined to see if any scopes begin match the beginning
+ of the URL. The first match found is returned.
+
+ Returns:
+ The token object which should execute the HTTP request. If there was
+ no token for the url (the url did not begin with any of the token
+ scopes available), then the atom.http_interface.GenericToken will be
+ returned because the GenericToken calls through to the http client
+ without adding an Authorization header.
+ """
+ if url is None:
+ return None
+ if isinstance(url, (str, unicode)):
+ url = atom.url.parse_url(url)
+ if url in self._tokens:
+ token = self._tokens[url]
+ if token.valid_for_scope(url):
+ return token
+ else:
+ del self._tokens[url]
+ for scope, token in self._tokens.iteritems():
+ if token.valid_for_scope(url):
+ return token
+ return atom.http_interface.GenericToken()
+
+ def remove_token(self, token):
+ """Removes the token from the token_store.
+
+ This method is used when a token is determined to be invalid. If the
+ token was found by find_token, but resulted in a 401 or 403 error stating
+ that the token was invlid, then the token should be removed to prevent
+ future use.
+
+ Returns:
+ True if a token was found and then removed from the token
+ store. False if the token was not in the TokenStore.
+ """
+ token_found = False
+ scopes_to_delete = []
+ for scope, stored_token in self._tokens.iteritems():
+ if stored_token == token:
+ scopes_to_delete.append(scope)
+ token_found = True
+ for scope in scopes_to_delete:
+ del self._tokens[scope]
+ return token_found
+
+ def remove_all_tokens(self):
+ self._tokens = {}
diff --git a/python/atom/url.py b/python/atom/url.py
new file mode 100644
index 0000000..0effa10
--- /dev/null
+++ b/python/atom/url.py
@@ -0,0 +1,139 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+__author__ = 'api.jscudder (Jeff Scudder)'
+
+
+import urlparse
+import urllib
+
+
+DEFAULT_PROTOCOL = 'http'
+DEFAULT_PORT = 80
+
+
+def parse_url(url_string):
+ """Creates a Url object which corresponds to the URL string.
+
+ This method can accept partial URLs, but it will leave missing
+ members of the Url unset.
+ """
+ parts = urlparse.urlparse(url_string)
+ url = Url()
+ if parts[0]:
+ url.protocol = parts[0]
+ if parts[1]:
+ host_parts = parts[1].split(':')
+ if host_parts[0]:
+ url.host = host_parts[0]
+ if len(host_parts) > 1:
+ url.port = host_parts[1]
+ if parts[2]:
+ url.path = parts[2]
+ if parts[4]:
+ param_pairs = parts[4].split('&')
+ for pair in param_pairs:
+ pair_parts = pair.split('=')
+ if len(pair_parts) > 1:
+ url.params[urllib.unquote_plus(pair_parts[0])] = (
+ urllib.unquote_plus(pair_parts[1]))
+ elif len(pair_parts) == 1:
+ url.params[urllib.unquote_plus(pair_parts[0])] = None
+ return url
+
+class Url(object):
+ """Represents a URL and implements comparison logic.
+
+ URL strings which are not identical can still be equivalent, so this object
+ provides a better interface for comparing and manipulating URLs than
+ strings. URL parameters are represented as a dictionary of strings, and
+ defaults are used for the protocol (http) and port (80) if not provided.
+ """
+ def __init__(self, protocol=None, host=None, port=None, path=None,
+ params=None):
+ self.protocol = protocol
+ self.host = host
+ self.port = port
+ self.path = path
+ self.params = params or {}
+
+ def to_string(self):
+ url_parts = ['', '', '', '', '', '']
+ if self.protocol:
+ url_parts[0] = self.protocol
+ if self.host:
+ if self.port:
+ url_parts[1] = ':'.join((self.host, str(self.port)))
+ else:
+ url_parts[1] = self.host
+ if self.path:
+ url_parts[2] = self.path
+ if self.params:
+ url_parts[4] = self.get_param_string()
+ return urlparse.urlunparse(url_parts)
+
+ def get_param_string(self):
+ param_pairs = []
+ for key, value in self.params.iteritems():
+ param_pairs.append('='.join((urllib.quote_plus(key),
+ urllib.quote_plus(str(value)))))
+ return '&'.join(param_pairs)
+
+ def get_request_uri(self):
+ """Returns the path with the parameters escaped and appended."""
+ param_string = self.get_param_string()
+ if param_string:
+ return '?'.join([self.path, param_string])
+ else:
+ return self.path
+
+ def __cmp__(self, other):
+ if not isinstance(other, Url):
+ return cmp(self.to_string(), str(other))
+ difference = 0
+ # Compare the protocol
+ if self.protocol and other.protocol:
+ difference = cmp(self.protocol, other.protocol)
+ elif self.protocol and not other.protocol:
+ difference = cmp(self.protocol, DEFAULT_PROTOCOL)
+ elif not self.protocol and other.protocol:
+ difference = cmp(DEFAULT_PROTOCOL, other.protocol)
+ if difference != 0:
+ return difference
+ # Compare the host
+ difference = cmp(self.host, other.host)
+ if difference != 0:
+ return difference
+ # Compare the port
+ if self.port and other.port:
+ difference = cmp(self.port, other.port)
+ elif self.port and not other.port:
+ difference = cmp(self.port, DEFAULT_PORT)
+ elif not self.port and other.port:
+ difference = cmp(DEFAULT_PORT, other.port)
+ if difference != 0:
+ return difference
+ # Compare the path
+ difference = cmp(self.path, other.path)
+ if difference != 0:
+ return difference
+ # Compare the parameters
+ return cmp(self.params, other.params)
+
+ def __str__(self):
+ return self.to_string()
+
diff --git a/python/gdata/Crypto/Cipher/__init__.py b/python/gdata/Crypto/Cipher/__init__.py
new file mode 100644
index 0000000..3b2f855
--- /dev/null
+++ b/python/gdata/Crypto/Cipher/__init__.py
@@ -0,0 +1,33 @@
+"""Secret-key encryption algorithms.
+
+Secret-key encryption algorithms transform plaintext in some way that
+is dependent on a key, producing ciphertext. This transformation can
+easily be reversed, if (and, hopefully, only if) one knows the key.
+
+The encryption modules here all support the interface described in PEP
+272, "API for Block Encryption Algorithms".
+
+If you don't know which algorithm to choose, use AES because it's
+standard and has undergone a fair bit of examination.
+
+Crypto.Cipher.AES Advanced Encryption Standard
+Crypto.Cipher.ARC2 Alleged RC2
+Crypto.Cipher.ARC4 Alleged RC4
+Crypto.Cipher.Blowfish
+Crypto.Cipher.CAST
+Crypto.Cipher.DES The Data Encryption Standard. Very commonly used
+ in the past, but today its 56-bit keys are too small.
+Crypto.Cipher.DES3 Triple DES.
+Crypto.Cipher.IDEA
+Crypto.Cipher.RC5
+Crypto.Cipher.XOR The simple XOR cipher.
+"""
+
+__all__ = ['AES', 'ARC2', 'ARC4',
+ 'Blowfish', 'CAST', 'DES', 'DES3', 'IDEA', 'RC5',
+ 'XOR'
+ ]
+
+__revision__ = "$Id: __init__.py,v 1.7 2003/02/28 15:28:35 akuchling Exp $"
+
+
diff --git a/python/gdata/Crypto/Hash/HMAC.py b/python/gdata/Crypto/Hash/HMAC.py
new file mode 100644
index 0000000..eeb5782
--- /dev/null
+++ b/python/gdata/Crypto/Hash/HMAC.py
@@ -0,0 +1,108 @@
+"""HMAC (Keyed-Hashing for Message Authentication) Python module.
+
+Implements the HMAC algorithm as described by RFC 2104.
+
+This is just a copy of the Python 2.2 HMAC module, modified to work when
+used on versions of Python before 2.2.
+"""
+
+__revision__ = "$Id: HMAC.py,v 1.5 2002/07/25 17:19:02 z3p Exp $"
+
+import string
+
+def _strxor(s1, s2):
+ """Utility method. XOR the two strings s1 and s2 (must have same length).
+ """
+ return "".join(map(lambda x, y: chr(ord(x) ^ ord(y)), s1, s2))
+
+# The size of the digests returned by HMAC depends on the underlying
+# hashing module used.
+digest_size = None
+
+class HMAC:
+ """RFC2104 HMAC class.
+
+ This supports the API for Cryptographic Hash Functions (PEP 247).
+ """
+
+ def __init__(self, key, msg = None, digestmod = None):
+ """Create a new HMAC object.
+
+ key: key for the keyed hash object.
+ msg: Initial input for the hash, if provided.
+ digestmod: A module supporting PEP 247. Defaults to the md5 module.
+ """
+ if digestmod == None:
+ import md5
+ digestmod = md5
+
+ self.digestmod = digestmod
+ self.outer = digestmod.new()
+ self.inner = digestmod.new()
+ try:
+ self.digest_size = digestmod.digest_size
+ except AttributeError:
+ self.digest_size = len(self.outer.digest())
+
+ blocksize = 64
+ ipad = "\x36" * blocksize
+ opad = "\x5C" * blocksize
+
+ if len(key) > blocksize:
+ key = digestmod.new(key).digest()
+
+ key = key + chr(0) * (blocksize - len(key))
+ self.outer.update(_strxor(key, opad))
+ self.inner.update(_strxor(key, ipad))
+ if (msg):
+ self.update(msg)
+
+## def clear(self):
+## raise NotImplementedError, "clear() method not available in HMAC."
+
+ def update(self, msg):
+ """Update this hashing object with the string msg.
+ """
+ self.inner.update(msg)
+
+ def copy(self):
+ """Return a separate copy of this hashing object.
+
+ An update to this copy won't affect the original object.
+ """
+ other = HMAC("")
+ other.digestmod = self.digestmod
+ other.inner = self.inner.copy()
+ other.outer = self.outer.copy()
+ return other
+
+ def digest(self):
+ """Return the hash value of this hashing object.
+
+ This returns a string containing 8-bit data. The object is
+ not altered in any way by this function; you can continue
+ updating the object after calling this function.
+ """
+ h = self.outer.copy()
+ h.update(self.inner.digest())
+ return h.digest()
+
+ def hexdigest(self):
+ """Like digest(), but returns a string of hexadecimal digits instead.
+ """
+ return "".join([string.zfill(hex(ord(x))[2:], 2)
+ for x in tuple(self.digest())])
+
+def new(key, msg = None, digestmod = None):
+ """Create a new hashing object and return it.
+
+ key: The starting key for the hash.
+ msg: if available, will immediately be hashed into the object's starting
+ state.
+
+ You can now feed arbitrary strings into the object using its update()
+ method, and can ask for the hash value at any time by calling its digest()
+ method.
+ """
+ return HMAC(key, msg, digestmod)
+
diff --git a/python/gdata/Crypto/Hash/MD5.py b/python/gdata/Crypto/Hash/MD5.py
new file mode 100644
index 0000000..b0eba39
--- /dev/null
+++ b/python/gdata/Crypto/Hash/MD5.py
@@ -0,0 +1,13 @@
+
+# Just use the MD5 module from the Python standard library
+
+__revision__ = "$Id: MD5.py,v 1.4 2002/07/11 14:31:19 akuchling Exp $"
+
+from md5 import *
+
+import md5
+if hasattr(md5, 'digestsize'):
+ digest_size = digestsize
+ del digestsize
+del md5
+
diff --git a/python/gdata/Crypto/Hash/SHA.py b/python/gdata/Crypto/Hash/SHA.py
new file mode 100644
index 0000000..ea3c6a3
--- /dev/null
+++ b/python/gdata/Crypto/Hash/SHA.py
@@ -0,0 +1,11 @@
+
+# Just use the SHA module from the Python standard library
+
+__revision__ = "$Id: SHA.py,v 1.4 2002/07/11 14:31:19 akuchling Exp $"
+
+from sha import *
+import sha
+if hasattr(sha, 'digestsize'):
+ digest_size = digestsize
+ del digestsize
+del sha
diff --git a/python/gdata/Crypto/Hash/__init__.py b/python/gdata/Crypto/Hash/__init__.py
new file mode 100644
index 0000000..920fe74
--- /dev/null
+++ b/python/gdata/Crypto/Hash/__init__.py
@@ -0,0 +1,24 @@
+"""Hashing algorithms
+
+Hash functions take arbitrary strings as input, and produce an output
+of fixed size that is dependent on the input; it should never be
+possible to derive the input data given only the hash function's
+output. Hash functions can be used simply as a checksum, or, in
+association with a public-key algorithm, can be used to implement
+digital signatures.
+
+The hashing modules here all support the interface described in PEP
+247, "API for Cryptographic Hash Functions".
+
+Submodules:
+Crypto.Hash.HMAC RFC 2104: Keyed-Hashing for Message Authentication
+Crypto.Hash.MD2
+Crypto.Hash.MD4
+Crypto.Hash.MD5
+Crypto.Hash.RIPEMD
+Crypto.Hash.SHA
+"""
+
+__all__ = ['HMAC', 'MD2', 'MD4', 'MD5', 'RIPEMD', 'SHA', 'SHA256']
+__revision__ = "$Id: __init__.py,v 1.6 2003/12/19 14:24:25 akuchling Exp $"
+
diff --git a/python/gdata/Crypto/Protocol/AllOrNothing.py b/python/gdata/Crypto/Protocol/AllOrNothing.py
new file mode 100644
index 0000000..6f3505d
--- /dev/null
+++ b/python/gdata/Crypto/Protocol/AllOrNothing.py
@@ -0,0 +1,295 @@
+"""This file implements all-or-nothing package transformations.
+
+An all-or-nothing package transformation is one in which some text is
+transformed into message blocks, such that all blocks must be obtained before
+the reverse transformation can be applied. Thus, if any blocks are corrupted
+or lost, the original message cannot be reproduced.
+
+An all-or-nothing package transformation is not encryption, although a block
+cipher algorithm is used. The encryption key is randomly generated and is
+extractable from the message blocks.
+
+This class implements the All-Or-Nothing package transformation algorithm
+described in:
+
+Ronald L. Rivest. "All-Or-Nothing Encryption and The Package Transform"
+http://theory.lcs.mit.edu/~rivest/fusion.pdf
+
+"""
+
+__revision__ = "$Id: AllOrNothing.py,v 1.8 2003/02/28 15:23:20 akuchling Exp $"
+
+import operator
+import string
+from Crypto.Util.number import bytes_to_long, long_to_bytes
+
+
+
+class AllOrNothing:
+ """Class implementing the All-or-Nothing package transform.
+
+ Methods for subclassing:
+
+ _inventkey(key_size):
+ Returns a randomly generated key. Subclasses can use this to
+ implement better random key generating algorithms. The default
+ algorithm is probably not very cryptographically secure.
+
+ """
+
+ def __init__(self, ciphermodule, mode=None, IV=None):
+ """AllOrNothing(ciphermodule, mode=None, IV=None)
+
+ ciphermodule is a module implementing the cipher algorithm to
+ use. It must provide the PEP272 interface.
+
+ Note that the encryption key is randomly generated
+ automatically when needed. Optional arguments mode and IV are
+ passed directly through to the ciphermodule.new() method; they
+ are the feedback mode and initialization vector to use. All
+ three arguments must be the same for the object used to create
+ the digest, and to undigest'ify the message blocks.
+ """
+
+ self.__ciphermodule = ciphermodule
+ self.__mode = mode
+ self.__IV = IV
+ self.__key_size = ciphermodule.key_size
+ if self.__key_size == 0:
+ self.__key_size = 16
+
+ __K0digit = chr(0x69)
+
+ def digest(self, text):
+ """digest(text:string) : [string]
+
+ Perform the All-or-Nothing package transform on the given
+ string. Output is a list of message blocks describing the
+ transformed text, where each block is a string of bit length equal
+ to the ciphermodule's block_size.
+ """
+
+ # generate a random session key and K0, the key used to encrypt the
+ # hash blocks. Rivest calls this a fixed, publically-known encryption
+ # key, but says nothing about the security implications of this key or
+ # how to choose it.
+ key = self._inventkey(self.__key_size)
+ K0 = self.__K0digit * self.__key_size
+
+ # we need two cipher objects here, one that is used to encrypt the
+ # message blocks and one that is used to encrypt the hashes. The
+ # former uses the randomly generated key, while the latter uses the
+ # well-known key.
+ mcipher = self.__newcipher(key)
+ hcipher = self.__newcipher(K0)
+
+ # Pad the text so that its length is a multiple of the cipher's
+ # block_size. Pad with trailing spaces, which will be eliminated in
+ # the undigest() step.
+ block_size = self.__ciphermodule.block_size
+ padbytes = block_size - (len(text) % block_size)
+ text = text + ' ' * padbytes
+
+ # Run through the algorithm:
+ # s: number of message blocks (size of text / block_size)
+ # input sequence: m1, m2, ... ms
+ # random key K' (`key' in the code)
+ # Compute output sequence: m'1, m'2, ... m's' for s' = s + 1
+ # Let m'i = mi ^ E(K', i) for i = 1, 2, 3, ..., s
+ # Let m's' = K' ^ h1 ^ h2 ^ ... hs
+ # where hi = E(K0, m'i ^ i) for i = 1, 2, ... s
+ #
+ # The one complication I add is that the last message block is hard
+ # coded to the number of padbytes added, so that these can be stripped
+ # during the undigest() step
+ s = len(text) / block_size
+ blocks = []
+ hashes = []
+ for i in range(1, s+1):
+ start = (i-1) * block_size
+ end = start + block_size
+ mi = text[start:end]
+ assert len(mi) == block_size
+ cipherblock = mcipher.encrypt(long_to_bytes(i, block_size))
+ mticki = bytes_to_long(mi) ^ bytes_to_long(cipherblock)
+ blocks.append(mticki)
+ # calculate the hash block for this block
+ hi = hcipher.encrypt(long_to_bytes(mticki ^ i, block_size))
+ hashes.append(bytes_to_long(hi))
+
+ # Add the padbytes length as a message block
+ i = i + 1
+ cipherblock = mcipher.encrypt(long_to_bytes(i, block_size))
+ mticki = padbytes ^ bytes_to_long(cipherblock)
+ blocks.append(mticki)
+
+ # calculate this block's hash
+ hi = hcipher.encrypt(long_to_bytes(mticki ^ i, block_size))
+ hashes.append(bytes_to_long(hi))
+
+ # Now calculate the last message block of the sequence 1..s'. This
+ # will contain the random session key XOR'd with all the hash blocks,
+ # so that for undigest(), once all the hash blocks are calculated, the
+ # session key can be trivially extracted. Calculating all the hash
+ # blocks requires that all the message blocks be received, thus the
+ # All-or-Nothing algorithm succeeds.
+ mtick_stick = bytes_to_long(key) ^ reduce(operator.xor, hashes)
+ blocks.append(mtick_stick)
+
+ # we convert the blocks to strings since in Python, byte sequences are
+ # always represented as strings. This is more consistent with the
+ # model that encryption and hash algorithms always operate on strings.
+ return map(long_to_bytes, blocks)
+
+
+ def undigest(self, blocks):
+ """undigest(blocks : [string]) : string
+
+ Perform the reverse package transformation on a list of message
+ blocks. Note that the ciphermodule used for both transformations
+ must be the same. blocks is a list of strings of bit length
+ equal to the ciphermodule's block_size.
+ """
+
+ # better have at least 2 blocks, for the padbytes package and the hash
+ # block accumulator
+ if len(blocks) < 2:
+ raise ValueError, "List must be at least length 2."
+
+ # blocks is a list of strings. We need to deal with them as long
+ # integers
+ blocks = map(bytes_to_long, blocks)
+
+ # Calculate the well-known key, to which the hash blocks are
+ # encrypted, and create the hash cipher.
+ K0 = self.__K0digit * self.__key_size
+ hcipher = self.__newcipher(K0)
+
+ # Since we have all the blocks (or this method would have been called
+ # prematurely), we can calcualte all the hash blocks.
+ hashes = []
+ for i in range(1, len(blocks)):
+ mticki = blocks[i-1] ^ i
+ hi = hcipher.encrypt(long_to_bytes(mticki))
+ hashes.append(bytes_to_long(hi))
+
+ # now we can calculate K' (key). remember the last block contains
+ # m's' which we don't include here
+ key = blocks[-1] ^ reduce(operator.xor, hashes)
+
+ # and now we can create the cipher object
+ mcipher = self.__newcipher(long_to_bytes(key))
+ block_size = self.__ciphermodule.block_size
+
+ # And we can now decode the original message blocks
+ parts = []
+ for i in range(1, len(blocks)):
+ cipherblock = mcipher.encrypt(long_to_bytes(i, block_size))
+ mi = blocks[i-1] ^ bytes_to_long(cipherblock)
+ parts.append(mi)
+
+ # The last message block contains the number of pad bytes appended to
+ # the original text string, such that its length was an even multiple
+ # of the cipher's block_size. This number should be small enough that
+ # the conversion from long integer to integer should never overflow
+ padbytes = int(parts[-1])
+ text = string.join(map(long_to_bytes, parts[:-1]), '')
+ return text[:-padbytes]
+
+ def _inventkey(self, key_size):
+ # TBD: Not a very secure algorithm. Eventually, I'd like to use JHy's
+ # kernelrand module
+ import time
+ from Crypto.Util import randpool
+ # TBD: key_size * 2 to work around possible bug in RandomPool?
+ pool = randpool.RandomPool(key_size * 2)
+ while key_size > pool.entropy:
+ pool.add_event()
+
+ # we now have enough entropy in the pool to get a key_size'd key
+ return pool.get_bytes(key_size)
+
+ def __newcipher(self, key):
+ if self.__mode is None and self.__IV is None:
+ return self.__ciphermodule.new(key)
+ elif self.__IV is None:
+ return self.__ciphermodule.new(key, self.__mode)
+ else:
+ return self.__ciphermodule.new(key, self.__mode, self.__IV)
+
+
+
+if __name__ == '__main__':
+ import sys
+ import getopt
+ import base64
+
+ usagemsg = '''\
+Test module usage: %(program)s [-c cipher] [-l] [-h]
+
+Where:
+ --cipher module
+ -c module
+ Cipher module to use. Default: %(ciphermodule)s
+
+ --aslong
+ -l
+ Print the encoded message blocks as long integers instead of base64
+ encoded strings
+
+ --help
+ -h
+ Print this help message
+'''
+
+ ciphermodule = 'AES'
+ aslong = 0
+
+ def usage(code, msg=None):
+ if msg:
+ print msg
+ print usagemsg % {'program': sys.argv[0],
+ 'ciphermodule': ciphermodule}
+ sys.exit(code)
+
+ try:
+ opts, args = getopt.getopt(sys.argv[1:],
+ 'c:l', ['cipher=', 'aslong'])
+ except getopt.error, msg:
+ usage(1, msg)
+
+ if args:
+ usage(1, 'Too many arguments')
+
+ for opt, arg in opts:
+ if opt in ('-h', '--help'):
+ usage(0)
+ elif opt in ('-c', '--cipher'):
+ ciphermodule = arg
+ elif opt in ('-l', '--aslong'):
+ aslong = 1
+
+ # ugly hack to force __import__ to give us the end-path module
+ module = __import__('Crypto.Cipher.'+ciphermodule, None, None, ['new'])
+
+ a = AllOrNothing(module)
+ print 'Original text:\n=========='
+ print __doc__
+ print '=========='
+ msgblocks = a.digest(__doc__)
+ print 'message blocks:'
+ for i, blk in map(None, range(len(msgblocks)), msgblocks):
+ # base64 adds a trailing newline
+ print ' %3d' % i,
+ if aslong:
+ print bytes_to_long(blk)
+ else:
+ print base64.encodestring(blk)[:-1]
+ #
+ # get a new undigest-only object so there's no leakage
+ b = AllOrNothing(module)
+ text = b.undigest(msgblocks)
+ if text == __doc__:
+ print 'They match!'
+ else:
+ print 'They differ!'
diff --git a/python/gdata/Crypto/Protocol/Chaffing.py b/python/gdata/Crypto/Protocol/Chaffing.py
new file mode 100644
index 0000000..fdfb82d
--- /dev/null
+++ b/python/gdata/Crypto/Protocol/Chaffing.py
@@ -0,0 +1,229 @@
+"""This file implements the chaffing algorithm.
+
+Winnowing and chaffing is a technique for enhancing privacy without requiring
+strong encryption. In short, the technique takes a set of authenticated
+message blocks (the wheat) and adds a number of chaff blocks which have
+randomly chosen data and MAC fields. This means that to an adversary, the
+chaff blocks look as valid as the wheat blocks, and so the authentication
+would have to be performed on every block. By tailoring the number of chaff
+blocks added to the message, the sender can make breaking the message
+computationally infeasible. There are many other interesting properties of
+the winnow/chaff technique.
+
+For example, say Alice is sending a message to Bob. She packetizes the
+message and performs an all-or-nothing transformation on the packets. Then
+she authenticates each packet with a message authentication code (MAC). The
+MAC is a hash of the data packet, and there is a secret key which she must
+share with Bob (key distribution is an exercise left to the reader). She then
+adds a serial number to each packet, and sends the packets to Bob.
+
+Bob receives the packets, and using the shared secret authentication key,
+authenticates the MACs for each packet. Those packets that have bad MACs are
+simply discarded. The remainder are sorted by serial number, and passed
+through the reverse all-or-nothing transform. The transform means that an
+eavesdropper (say Eve) must acquire all the packets before any of the data can
+be read. If even one packet is missing, the data is useless.
+
+There's one twist: by adding chaff packets, Alice and Bob can make Eve's job
+much harder, since Eve now has to break the shared secret key, or try every
+combination of wheat and chaff packet to read any of the message. The cool
+thing is that Bob doesn't need to add any additional code; the chaff packets
+are already filtered out because their MACs don't match (in all likelihood --
+since the data and MACs for the chaff packets are randomly chosen it is
+possible, but very unlikely that a chaff MAC will match the chaff data). And
+Alice need not even be the party adding the chaff! She could be completely
+unaware that a third party, say Charles, is adding chaff packets to her
+messages as they are transmitted.
+
+For more information on winnowing and chaffing see this paper:
+
+Ronald L. Rivest, "Chaffing and Winnowing: Confidentiality without Encryption"
+http://theory.lcs.mit.edu/~rivest/chaffing.txt
+
+"""
+
+__revision__ = "$Id: Chaffing.py,v 1.7 2003/02/28 15:23:21 akuchling Exp $"
+
+from Crypto.Util.number import bytes_to_long
+
+class Chaff:
+ """Class implementing the chaff adding algorithm.
+
+ Methods for subclasses:
+
+ _randnum(size):
+ Returns a randomly generated number with a byte-length equal
+ to size. Subclasses can use this to implement better random
+ data and MAC generating algorithms. The default algorithm is
+ probably not very cryptographically secure. It is most
+ important that the chaff data does not contain any patterns
+ that can be used to discern it from wheat data without running
+ the MAC.
+
+ """
+
+ def __init__(self, factor=1.0, blocksper=1):
+ """Chaff(factor:float, blocksper:int)
+
+ factor is the number of message blocks to add chaff to,
+ expressed as a percentage between 0.0 and 1.0. blocksper is
+ the number of chaff blocks to include for each block being
+ chaffed. Thus the defaults add one chaff block to every
+ message block. By changing the defaults, you can adjust how
+ computationally difficult it could be for an adversary to
+ brute-force crack the message. The difficulty is expressed
+ as:
+
+ pow(blocksper, int(factor * number-of-blocks))
+
+ For ease of implementation, when factor < 1.0, only the first
+ int(factor*number-of-blocks) message blocks are chaffed.
+ """
+
+ if not (0.0<=factor<=1.0):
+ raise ValueError, "'factor' must be between 0.0 and 1.0"
+ if blocksper < 0:
+ raise ValueError, "'blocksper' must be zero or more"
+
+ self.__factor = factor
+ self.__blocksper = blocksper
+
+
+ def chaff(self, blocks):
+ """chaff( [(serial-number:int, data:string, MAC:string)] )
+ : [(int, string, string)]
+
+ Add chaff to message blocks. blocks is a list of 3-tuples of the
+ form (serial-number, data, MAC).
+
+ Chaff is created by choosing a random number of the same
+ byte-length as data, and another random number of the same
+ byte-length as MAC. The message block's serial number is
+ placed on the chaff block and all the packet's chaff blocks
+ are randomly interspersed with the single wheat block. This
+ method then returns a list of 3-tuples of the same form.
+ Chaffed blocks will contain multiple instances of 3-tuples
+ with the same serial number, but the only way to figure out
+ which blocks are wheat and which are chaff is to perform the
+ MAC hash and compare values.
+ """
+
+ chaffedblocks = []
+
+ # count is the number of blocks to add chaff to. blocksper is the
+ # number of chaff blocks to add per message block that is being
+ # chaffed.
+ count = len(blocks) * self.__factor
+ blocksper = range(self.__blocksper)
+ for i, wheat in map(None, range(len(blocks)), blocks):
+ # it shouldn't matter which of the n blocks we add chaff to, so for
+ # ease of implementation, we'll just add them to the first count
+ # blocks
+ if i < count:
+ serial, data, mac = wheat
+ datasize = len(data)
+ macsize = len(mac)
+ addwheat = 1
+ # add chaff to this block
+ for j in blocksper:
+ import sys
+ chaffdata = self._randnum(datasize)
+ chaffmac = self._randnum(macsize)
+ chaff = (serial, chaffdata, chaffmac)
+ # mix up the order, if the 5th bit is on then put the
+ # wheat on the list
+ if addwheat and bytes_to_long(self._randnum(16)) & 0x40:
+ chaffedblocks.append(wheat)
+ addwheat = 0
+ chaffedblocks.append(chaff)
+ if addwheat:
+ chaffedblocks.append(wheat)
+ else:
+ # just add the wheat
+ chaffedblocks.append(wheat)
+ return chaffedblocks
+
+ def _randnum(self, size):
+ # TBD: Not a very secure algorithm.
+ # TBD: size * 2 to work around possible bug in RandomPool
+ from Crypto.Util import randpool
+ import time
+ pool = randpool.RandomPool(size * 2)
+ while size > pool.entropy:
+ pass
+
+ # we now have enough entropy in the pool to get size bytes of random
+ # data... well, probably
+ return pool.get_bytes(size)
+
+
+
+if __name__ == '__main__':
+ text = """\
+We hold these truths to be self-evident, that all men are created equal, that
+they are endowed by their Creator with certain unalienable Rights, that among
+these are Life, Liberty, and the pursuit of Happiness. That to secure these
+rights, Governments are instituted among Men, deriving their just powers from
+the consent of the governed. That whenever any Form of Government becomes
+destructive of these ends, it is the Right of the People to alter or to
+abolish it, and to institute new Government, laying its foundation on such
+principles and organizing its powers in such form, as to them shall seem most
+likely to effect their Safety and Happiness.
+"""
+ print 'Original text:\n=========='
+ print text
+ print '=========='
+
+ # first transform the text into packets
+ blocks = [] ; size = 40
+ for i in range(0, len(text), size):
+ blocks.append( text[i:i+size] )
+
+ # now get MACs for all the text blocks. The key is obvious...
+ print 'Calculating MACs...'
+ from Crypto.Hash import HMAC, SHA
+ key = 'Jefferson'
+ macs = [HMAC.new(key, block, digestmod=SHA).digest()
+ for block in blocks]
+
+ assert len(blocks) == len(macs)
+
+ # put these into a form acceptable as input to the chaffing procedure
+ source = []
+ m = map(None, range(len(blocks)), blocks, macs)
+ print m
+ for i, data, mac in m:
+ source.append((i, data, mac))
+
+ # now chaff these
+ print 'Adding chaff...'
+ c = Chaff(factor=0.5, blocksper=2)
+ chaffed = c.chaff(source)
+
+ from base64 import encodestring
+
+ # print the chaffed message blocks. meanwhile, separate the wheat from
+ # the chaff
+
+ wheat = []
+ print 'chaffed message blocks:'
+ for i, data, mac in chaffed:
+ # do the authentication
+ h = HMAC.new(key, data, digestmod=SHA)
+ pmac = h.digest()
+ if pmac == mac:
+ tag = '-->'
+ wheat.append(data)
+ else:
+ tag = ' '
+ # base64 adds a trailing newline
+ print tag, '%3d' % i, \
+ repr(data), encodestring(mac)[:-1]
+
+ # now decode the message packets and check it against the original text
+ print 'Undigesting wheat...'
+ newtext = "".join(wheat)
+ if newtext == text:
+ print 'They match!'
+ else:
+ print 'They differ!'
diff --git a/python/gdata/Crypto/Protocol/__init__.py b/python/gdata/Crypto/Protocol/__init__.py
new file mode 100644
index 0000000..a6d68bc
--- /dev/null
+++ b/python/gdata/Crypto/Protocol/__init__.py
@@ -0,0 +1,17 @@
+
+"""Cryptographic protocols
+
+Implements various cryptographic protocols. (Don't expect to find
+network protocols here.)
+
+Crypto.Protocol.AllOrNothing Transforms a message into a set of message
+ blocks, such that the blocks can be
+ recombined to get the message back.
+
+Crypto.Protocol.Chaffing Takes a set of authenticated message blocks
+ (the wheat) and adds a number of
+ randomly generated blocks (the chaff).
+"""
+
+__all__ = ['AllOrNothing', 'Chaffing']
+__revision__ = "$Id: __init__.py,v 1.4 2003/02/28 15:23:21 akuchling Exp $"
diff --git a/python/gdata/Crypto/PublicKey/DSA.py b/python/gdata/Crypto/PublicKey/DSA.py
new file mode 100644
index 0000000..7947b6f
--- /dev/null
+++ b/python/gdata/Crypto/PublicKey/DSA.py
@@ -0,0 +1,238 @@
+
+#
+# DSA.py : Digital Signature Algorithm
+#
+# Part of the Python Cryptography Toolkit
+#
+# Distribute and use freely; there are no restrictions on further
+# dissemination and usage except those imposed by the laws of your
+# country of residence. This software is provided "as is" without
+# warranty of fitness for use or suitability for any purpose, express
+# or implied. Use at your own risk or not at all.
+#
+
+__revision__ = "$Id: DSA.py,v 1.16 2004/05/06 12:52:54 akuchling Exp $"
+
+from Crypto.PublicKey.pubkey import *
+from Crypto.Util import number
+from Crypto.Util.number import bytes_to_long, long_to_bytes
+from Crypto.Hash import SHA
+
+try:
+ from Crypto.PublicKey import _fastmath
+except ImportError:
+ _fastmath = None
+
+class error (Exception):
+ pass
+
+def generateQ(randfunc):
+ S=randfunc(20)
+ hash1=SHA.new(S).digest()
+ hash2=SHA.new(long_to_bytes(bytes_to_long(S)+1)).digest()
+ q = bignum(0)
+ for i in range(0,20):
+ c=ord(hash1[i])^ord(hash2[i])
+ if i==0:
+ c=c | 128
+ if i==19:
+ c= c | 1
+ q=q*256+c
+ while (not isPrime(q)):
+ q=q+2
+ if pow(2,159L) < q < pow(2,160L):
+ return S, q
+ raise error, 'Bad q value generated'
+
+def generate(bits, randfunc, progress_func=None):
+ """generate(bits:int, randfunc:callable, progress_func:callable)
+
+ Generate a DSA key of length 'bits', using 'randfunc' to get
+ random data and 'progress_func', if present, to display
+ the progress of the key generation.
+ """
+
+ if bits<160:
+ raise error, 'Key length <160 bits'
+ obj=DSAobj()
+ # Generate string S and prime q
+ if progress_func:
+ progress_func('p,q\n')
+ while (1):
+ S, obj.q = generateQ(randfunc)
+ n=(bits-1)/160
+ C, N, V = 0, 2, {}
+ b=(obj.q >> 5) & 15
+ powb=pow(bignum(2), b)
+ powL1=pow(bignum(2), bits-1)
+ while C<4096:
+ for k in range(0, n+1):
+ V[k]=bytes_to_long(SHA.new(S+str(N)+str(k)).digest())
+ W=V[n] % powb
+ for k in range(n-1, -1, -1):
+ W=(W<<160L)+V[k]
+ X=W+powL1
+ p=X-(X%(2*obj.q)-1)
+ if powL1<=p and isPrime(p):
+ break
+ C, N = C+1, N+n+1
+ if C<4096:
+ break
+ if progress_func:
+ progress_func('4096 multiples failed\n')
+
+ obj.p = p
+ power=(p-1)/obj.q
+ if progress_func:
+ progress_func('h,g\n')
+ while (1):
+ h=bytes_to_long(randfunc(bits)) % (p-1)
+ g=pow(h, power, p)
+ if 11:
+ break
+ obj.g=g
+ if progress_func:
+ progress_func('x,y\n')
+ while (1):
+ x=bytes_to_long(randfunc(20))
+ if 0 < x < obj.q:
+ break
+ obj.x, obj.y = x, pow(g, x, p)
+ return obj
+
+def construct(tuple):
+ """construct(tuple:(long,long,long,long)|(long,long,long,long,long)):DSAobj
+ Construct a DSA object from a 4- or 5-tuple of numbers.
+ """
+ obj=DSAobj()
+ if len(tuple) not in [4,5]:
+ raise error, 'argument for construct() wrong length'
+ for i in range(len(tuple)):
+ field = obj.keydata[i]
+ setattr(obj, field, tuple[i])
+ return obj
+
+class DSAobj(pubkey):
+ keydata=['y', 'g', 'p', 'q', 'x']
+
+ def _encrypt(self, s, Kstr):
+ raise error, 'DSA algorithm cannot encrypt data'
+
+ def _decrypt(self, s):
+ raise error, 'DSA algorithm cannot decrypt data'
+
+ def _sign(self, M, K):
+ if (K<2 or self.q<=K):
+ raise error, 'K is not between 2 and q'
+ r=pow(self.g, K, self.p) % self.q
+ s=(inverse(K, self.q)*(M+self.x*r)) % self.q
+ return (r,s)
+
+ def _verify(self, M, sig):
+ r, s = sig
+ if r<=0 or r>=self.q or s<=0 or s>=self.q:
+ return 0
+ w=inverse(s, self.q)
+ u1, u2 = (M*w) % self.q, (r*w) % self.q
+ v1 = pow(self.g, u1, self.p)
+ v2 = pow(self.y, u2, self.p)
+ v = ((v1*v2) % self.p)
+ v = v % self.q
+ if v==r:
+ return 1
+ return 0
+
+ def size(self):
+ "Return the maximum number of bits that can be handled by this key."
+ return number.size(self.p) - 1
+
+ def has_private(self):
+ """Return a Boolean denoting whether the object contains
+ private components."""
+ if hasattr(self, 'x'):
+ return 1
+ else:
+ return 0
+
+ def can_sign(self):
+ """Return a Boolean value recording whether this algorithm can generate signatures."""
+ return 1
+
+ def can_encrypt(self):
+ """Return a Boolean value recording whether this algorithm can encrypt data."""
+ return 0
+
+ def publickey(self):
+ """Return a new key object containing only the public information."""
+ return construct((self.y, self.g, self.p, self.q))
+
+object=DSAobj
+
+generate_py = generate
+construct_py = construct
+
+class DSAobj_c(pubkey):
+ keydata = ['y', 'g', 'p', 'q', 'x']
+
+ def __init__(self, key):
+ self.key = key
+
+ def __getattr__(self, attr):
+ if attr in self.keydata:
+ return getattr(self.key, attr)
+ else:
+ if self.__dict__.has_key(attr):
+ self.__dict__[attr]
+ else:
+ raise AttributeError, '%s instance has no attribute %s' % (self.__class__, attr)
+
+ def __getstate__(self):
+ d = {}
+ for k in self.keydata:
+ if hasattr(self.key, k):
+ d[k]=getattr(self.key, k)
+ return d
+
+ def __setstate__(self, state):
+ y,g,p,q = state['y'], state['g'], state['p'], state['q']
+ if not state.has_key('x'):
+ self.key = _fastmath.dsa_construct(y,g,p,q)
+ else:
+ x = state['x']
+ self.key = _fastmath.dsa_construct(y,g,p,q,x)
+
+ def _sign(self, M, K):
+ return self.key._sign(M, K)
+
+ def _verify(self, M, (r, s)):
+ return self.key._verify(M, r, s)
+
+ def size(self):
+ return self.key.size()
+
+ def has_private(self):
+ return self.key.has_private()
+
+ def publickey(self):
+ return construct_c((self.key.y, self.key.g, self.key.p, self.key.q))
+
+ def can_sign(self):
+ return 1
+
+ def can_encrypt(self):
+ return 0
+
+def generate_c(bits, randfunc, progress_func=None):
+ obj = generate_py(bits, randfunc, progress_func)
+ y,g,p,q,x = obj.y, obj.g, obj.p, obj.q, obj.x
+ return construct_c((y,g,p,q,x))
+
+def construct_c(tuple):
+ key = apply(_fastmath.dsa_construct, tuple)
+ return DSAobj_c(key)
+
+if _fastmath:
+ #print "using C version of DSA"
+ generate = generate_c
+ construct = construct_c
+ error = _fastmath.error
diff --git a/python/gdata/Crypto/PublicKey/ElGamal.py b/python/gdata/Crypto/PublicKey/ElGamal.py
new file mode 100644
index 0000000..026881c
--- /dev/null
+++ b/python/gdata/Crypto/PublicKey/ElGamal.py
@@ -0,0 +1,132 @@
+#
+# ElGamal.py : ElGamal encryption/decryption and signatures
+#
+# Part of the Python Cryptography Toolkit
+#
+# Distribute and use freely; there are no restrictions on further
+# dissemination and usage except those imposed by the laws of your
+# country of residence. This software is provided "as is" without
+# warranty of fitness for use or suitability for any purpose, express
+# or implied. Use at your own risk or not at all.
+#
+
+__revision__ = "$Id: ElGamal.py,v 1.9 2003/04/04 19:44:26 akuchling Exp $"
+
+from Crypto.PublicKey.pubkey import *
+from Crypto.Util import number
+
+class error (Exception):
+ pass
+
+# Generate an ElGamal key with N bits
+def generate(bits, randfunc, progress_func=None):
+ """generate(bits:int, randfunc:callable, progress_func:callable)
+
+ Generate an ElGamal key of length 'bits', using 'randfunc' to get
+ random data and 'progress_func', if present, to display
+ the progress of the key generation.
+ """
+ obj=ElGamalobj()
+ # Generate prime p
+ if progress_func:
+ progress_func('p\n')
+ obj.p=bignum(getPrime(bits, randfunc))
+ # Generate random number g
+ if progress_func:
+ progress_func('g\n')
+ size=bits-1-(ord(randfunc(1)) & 63) # g will be from 1--64 bits smaller than p
+ if size<1:
+ size=bits-1
+ while (1):
+ obj.g=bignum(getPrime(size, randfunc))
+ if obj.g < obj.p:
+ break
+ size=(size+1) % bits
+ if size==0:
+ size=4
+ # Generate random number x
+ if progress_func:
+ progress_func('x\n')
+ while (1):
+ size=bits-1-ord(randfunc(1)) # x will be from 1 to 256 bits smaller than p
+ if size>2:
+ break
+ while (1):
+ obj.x=bignum(getPrime(size, randfunc))
+ if obj.x < obj.p:
+ break
+ size = (size+1) % bits
+ if size==0:
+ size=4
+ if progress_func:
+ progress_func('y\n')
+ obj.y = pow(obj.g, obj.x, obj.p)
+ return obj
+
+def construct(tuple):
+ """construct(tuple:(long,long,long,long)|(long,long,long,long,long)))
+ : ElGamalobj
+ Construct an ElGamal key from a 3- or 4-tuple of numbers.
+ """
+
+ obj=ElGamalobj()
+ if len(tuple) not in [3,4]:
+ raise error, 'argument for construct() wrong length'
+ for i in range(len(tuple)):
+ field = obj.keydata[i]
+ setattr(obj, field, tuple[i])
+ return obj
+
+class ElGamalobj(pubkey):
+ keydata=['p', 'g', 'y', 'x']
+
+ def _encrypt(self, M, K):
+ a=pow(self.g, K, self.p)
+ b=( M*pow(self.y, K, self.p) ) % self.p
+ return ( a,b )
+
+ def _decrypt(self, M):
+ if (not hasattr(self, 'x')):
+ raise error, 'Private key not available in this object'
+ ax=pow(M[0], self.x, self.p)
+ plaintext=(M[1] * inverse(ax, self.p ) ) % self.p
+ return plaintext
+
+ def _sign(self, M, K):
+ if (not hasattr(self, 'x')):
+ raise error, 'Private key not available in this object'
+ p1=self.p-1
+ if (GCD(K, p1)!=1):
+ raise error, 'Bad K value: GCD(K,p-1)!=1'
+ a=pow(self.g, K, self.p)
+ t=(M-self.x*a) % p1
+ while t<0: t=t+p1
+ b=(t*inverse(K, p1)) % p1
+ return (a, b)
+
+ def _verify(self, M, sig):
+ v1=pow(self.y, sig[0], self.p)
+ v1=(v1*pow(sig[0], sig[1], self.p)) % self.p
+ v2=pow(self.g, M, self.p)
+ if v1==v2:
+ return 1
+ return 0
+
+ def size(self):
+ "Return the maximum number of bits that can be handled by this key."
+ return number.size(self.p) - 1
+
+ def has_private(self):
+ """Return a Boolean denoting whether the object contains
+ private components."""
+ if hasattr(self, 'x'):
+ return 1
+ else:
+ return 0
+
+ def publickey(self):
+ """Return a new key object containing only the public information."""
+ return construct((self.p, self.g, self.y))
+
+
+object=ElGamalobj
diff --git a/python/gdata/Crypto/PublicKey/RSA.py b/python/gdata/Crypto/PublicKey/RSA.py
new file mode 100644
index 0000000..e0e877e
--- /dev/null
+++ b/python/gdata/Crypto/PublicKey/RSA.py
@@ -0,0 +1,256 @@
+#
+# RSA.py : RSA encryption/decryption
+#
+# Part of the Python Cryptography Toolkit
+#
+# Distribute and use freely; there are no restrictions on further
+# dissemination and usage except those imposed by the laws of your
+# country of residence. This software is provided "as is" without
+# warranty of fitness for use or suitability for any purpose, express
+# or implied. Use at your own risk or not at all.
+#
+
+__revision__ = "$Id: RSA.py,v 1.20 2004/05/06 12:52:54 akuchling Exp $"
+
+from Crypto.PublicKey import pubkey
+from Crypto.Util import number
+
+try:
+ from Crypto.PublicKey import _fastmath
+except ImportError:
+ _fastmath = None
+
+class error (Exception):
+ pass
+
+def generate(bits, randfunc, progress_func=None):
+ """generate(bits:int, randfunc:callable, progress_func:callable)
+
+ Generate an RSA key of length 'bits', using 'randfunc' to get
+ random data and 'progress_func', if present, to display
+ the progress of the key generation.
+ """
+ obj=RSAobj()
+
+ # Generate the prime factors of n
+ if progress_func:
+ progress_func('p,q\n')
+ p = q = 1L
+ while number.size(p*q) < bits:
+ p = pubkey.getPrime(bits/2, randfunc)
+ q = pubkey.getPrime(bits/2, randfunc)
+
+ # p shall be smaller than q (for calc of u)
+ if p > q:
+ (p, q)=(q, p)
+ obj.p = p
+ obj.q = q
+
+ if progress_func:
+ progress_func('u\n')
+ obj.u = pubkey.inverse(obj.p, obj.q)
+ obj.n = obj.p*obj.q
+
+ obj.e = 65537L
+ if progress_func:
+ progress_func('d\n')
+ obj.d=pubkey.inverse(obj.e, (obj.p-1)*(obj.q-1))
+
+ assert bits <= 1+obj.size(), "Generated key is too small"
+
+ return obj
+
+def construct(tuple):
+ """construct(tuple:(long,) : RSAobj
+ Construct an RSA object from a 2-, 3-, 5-, or 6-tuple of numbers.
+ """
+
+ obj=RSAobj()
+ if len(tuple) not in [2,3,5,6]:
+ raise error, 'argument for construct() wrong length'
+ for i in range(len(tuple)):
+ field = obj.keydata[i]
+ setattr(obj, field, tuple[i])
+ if len(tuple) >= 5:
+ # Ensure p is smaller than q
+ if obj.p>obj.q:
+ (obj.p, obj.q)=(obj.q, obj.p)
+
+ if len(tuple) == 5:
+ # u not supplied, so we're going to have to compute it.
+ obj.u=pubkey.inverse(obj.p, obj.q)
+
+ return obj
+
+class RSAobj(pubkey.pubkey):
+ keydata = ['n', 'e', 'd', 'p', 'q', 'u']
+ def _encrypt(self, plaintext, K=''):
+ if self.n<=plaintext:
+ raise error, 'Plaintext too large'
+ return (pow(plaintext, self.e, self.n),)
+
+ def _decrypt(self, ciphertext):
+ if (not hasattr(self, 'd')):
+ raise error, 'Private key not available in this object'
+ if self.n<=ciphertext[0]:
+ raise error, 'Ciphertext too large'
+ return pow(ciphertext[0], self.d, self.n)
+
+ def _sign(self, M, K=''):
+ return (self._decrypt((M,)),)
+
+ def _verify(self, M, sig):
+ m2=self._encrypt(sig[0])
+ if m2[0]==M:
+ return 1
+ else: return 0
+
+ def _blind(self, M, B):
+ tmp = pow(B, self.e, self.n)
+ return (M * tmp) % self.n
+
+ def _unblind(self, M, B):
+ tmp = pubkey.inverse(B, self.n)
+ return (M * tmp) % self.n
+
+ def can_blind (self):
+ """can_blind() : bool
+ Return a Boolean value recording whether this algorithm can
+ blind data. (This does not imply that this
+ particular key object has the private information required to
+ to blind a message.)
+ """
+ return 1
+
+ def size(self):
+ """size() : int
+ Return the maximum number of bits that can be handled by this key.
+ """
+ return number.size(self.n) - 1
+
+ def has_private(self):
+ """has_private() : bool
+ Return a Boolean denoting whether the object contains
+ private components.
+ """
+ if hasattr(self, 'd'):
+ return 1
+ else: return 0
+
+ def publickey(self):
+ """publickey(): RSAobj
+ Return a new key object containing only the public key information.
+ """
+ return construct((self.n, self.e))
+
+class RSAobj_c(pubkey.pubkey):
+ keydata = ['n', 'e', 'd', 'p', 'q', 'u']
+
+ def __init__(self, key):
+ self.key = key
+
+ def __getattr__(self, attr):
+ if attr in self.keydata:
+ return getattr(self.key, attr)
+ else:
+ if self.__dict__.has_key(attr):
+ self.__dict__[attr]
+ else:
+ raise AttributeError, '%s instance has no attribute %s' % (self.__class__, attr)
+
+ def __getstate__(self):
+ d = {}
+ for k in self.keydata:
+ if hasattr(self.key, k):
+ d[k]=getattr(self.key, k)
+ return d
+
+ def __setstate__(self, state):
+ n,e = state['n'], state['e']
+ if not state.has_key('d'):
+ self.key = _fastmath.rsa_construct(n,e)
+ else:
+ d = state['d']
+ if not state.has_key('q'):
+ self.key = _fastmath.rsa_construct(n,e,d)
+ else:
+ p, q, u = state['p'], state['q'], state['u']
+ self.key = _fastmath.rsa_construct(n,e,d,p,q,u)
+
+ def _encrypt(self, plain, K):
+ return (self.key._encrypt(plain),)
+
+ def _decrypt(self, cipher):
+ return self.key._decrypt(cipher[0])
+
+ def _sign(self, M, K):
+ return (self.key._sign(M),)
+
+ def _verify(self, M, sig):
+ return self.key._verify(M, sig[0])
+
+ def _blind(self, M, B):
+ return self.key._blind(M, B)
+
+ def _unblind(self, M, B):
+ return self.key._unblind(M, B)
+
+ def can_blind (self):
+ return 1
+
+ def size(self):
+ return self.key.size()
+
+ def has_private(self):
+ return self.key.has_private()
+
+ def publickey(self):
+ return construct_c((self.key.n, self.key.e))
+
+def generate_c(bits, randfunc, progress_func = None):
+ # Generate the prime factors of n
+ if progress_func:
+ progress_func('p,q\n')
+
+ p = q = 1L
+ while number.size(p*q) < bits:
+ p = pubkey.getPrime(bits/2, randfunc)
+ q = pubkey.getPrime(bits/2, randfunc)
+
+ # p shall be smaller than q (for calc of u)
+ if p > q:
+ (p, q)=(q, p)
+ if progress_func:
+ progress_func('u\n')
+ u=pubkey.inverse(p, q)
+ n=p*q
+
+ e = 65537L
+ if progress_func:
+ progress_func('d\n')
+ d=pubkey.inverse(e, (p-1)*(q-1))
+ key = _fastmath.rsa_construct(n,e,d,p,q,u)
+ obj = RSAobj_c(key)
+
+## print p
+## print q
+## print number.size(p), number.size(q), number.size(q*p),
+## print obj.size(), bits
+ assert bits <= 1+obj.size(), "Generated key is too small"
+ return obj
+
+
+def construct_c(tuple):
+ key = apply(_fastmath.rsa_construct, tuple)
+ return RSAobj_c(key)
+
+object = RSAobj
+
+generate_py = generate
+construct_py = construct
+
+if _fastmath:
+ #print "using C version of RSA"
+ generate = generate_c
+ construct = construct_c
+ error = _fastmath.error
diff --git a/python/gdata/Crypto/PublicKey/__init__.py b/python/gdata/Crypto/PublicKey/__init__.py
new file mode 100644
index 0000000..ad1c80c
--- /dev/null
+++ b/python/gdata/Crypto/PublicKey/__init__.py
@@ -0,0 +1,17 @@
+"""Public-key encryption and signature algorithms.
+
+Public-key encryption uses two different keys, one for encryption and
+one for decryption. The encryption key can be made public, and the
+decryption key is kept private. Many public-key algorithms can also
+be used to sign messages, and some can *only* be used for signatures.
+
+Crypto.PublicKey.DSA Digital Signature Algorithm. (Signature only)
+Crypto.PublicKey.ElGamal (Signing and encryption)
+Crypto.PublicKey.RSA (Signing, encryption, and blinding)
+Crypto.PublicKey.qNEW (Signature only)
+
+"""
+
+__all__ = ['RSA', 'DSA', 'ElGamal', 'qNEW']
+__revision__ = "$Id: __init__.py,v 1.4 2003/04/03 20:27:13 akuchling Exp $"
+
diff --git a/python/gdata/Crypto/PublicKey/pubkey.py b/python/gdata/Crypto/PublicKey/pubkey.py
new file mode 100644
index 0000000..5c75c3e
--- /dev/null
+++ b/python/gdata/Crypto/PublicKey/pubkey.py
@@ -0,0 +1,172 @@
+#
+# pubkey.py : Internal functions for public key operations
+#
+# Part of the Python Cryptography Toolkit
+#
+# Distribute and use freely; there are no restrictions on further
+# dissemination and usage except those imposed by the laws of your
+# country of residence. This software is provided "as is" without
+# warranty of fitness for use or suitability for any purpose, express
+# or implied. Use at your own risk or not at all.
+#
+
+__revision__ = "$Id: pubkey.py,v 1.11 2003/04/03 20:36:14 akuchling Exp $"
+
+import types, warnings
+from Crypto.Util.number import *
+
+# Basic public key class
+class pubkey:
+ def __init__(self):
+ pass
+
+ def __getstate__(self):
+ """To keep key objects platform-independent, the key data is
+ converted to standard Python long integers before being
+ written out. It will then be reconverted as necessary on
+ restoration."""
+ d=self.__dict__
+ for key in self.keydata:
+ if d.has_key(key): d[key]=long(d[key])
+ return d
+
+ def __setstate__(self, d):
+ """On unpickling a key object, the key data is converted to the big
+number representation being used, whether that is Python long
+integers, MPZ objects, or whatever."""
+ for key in self.keydata:
+ if d.has_key(key): self.__dict__[key]=bignum(d[key])
+
+ def encrypt(self, plaintext, K):
+ """encrypt(plaintext:string|long, K:string|long) : tuple
+ Encrypt the string or integer plaintext. K is a random
+ parameter required by some algorithms.
+ """
+ wasString=0
+ if isinstance(plaintext, types.StringType):
+ plaintext=bytes_to_long(plaintext) ; wasString=1
+ if isinstance(K, types.StringType):
+ K=bytes_to_long(K)
+ ciphertext=self._encrypt(plaintext, K)
+ if wasString: return tuple(map(long_to_bytes, ciphertext))
+ else: return ciphertext
+
+ def decrypt(self, ciphertext):
+ """decrypt(ciphertext:tuple|string|long): string
+ Decrypt 'ciphertext' using this key.
+ """
+ wasString=0
+ if not isinstance(ciphertext, types.TupleType):
+ ciphertext=(ciphertext,)
+ if isinstance(ciphertext[0], types.StringType):
+ ciphertext=tuple(map(bytes_to_long, ciphertext)) ; wasString=1
+ plaintext=self._decrypt(ciphertext)
+ if wasString: return long_to_bytes(plaintext)
+ else: return plaintext
+
+ def sign(self, M, K):
+ """sign(M : string|long, K:string|long) : tuple
+ Return a tuple containing the signature for the message M.
+ K is a random parameter required by some algorithms.
+ """
+ if (not self.has_private()):
+ raise error, 'Private key not available in this object'
+ if isinstance(M, types.StringType): M=bytes_to_long(M)
+ if isinstance(K, types.StringType): K=bytes_to_long(K)
+ return self._sign(M, K)
+
+ def verify (self, M, signature):
+ """verify(M:string|long, signature:tuple) : bool
+ Verify that the signature is valid for the message M;
+ returns true if the signature checks out.
+ """
+ if isinstance(M, types.StringType): M=bytes_to_long(M)
+ return self._verify(M, signature)
+
+ # alias to compensate for the old validate() name
+ def validate (self, M, signature):
+ warnings.warn("validate() method name is obsolete; use verify()",
+ DeprecationWarning)
+
+ def blind(self, M, B):
+ """blind(M : string|long, B : string|long) : string|long
+ Blind message M using blinding factor B.
+ """
+ wasString=0
+ if isinstance(M, types.StringType):
+ M=bytes_to_long(M) ; wasString=1
+ if isinstance(B, types.StringType): B=bytes_to_long(B)
+ blindedmessage=self._blind(M, B)
+ if wasString: return long_to_bytes(blindedmessage)
+ else: return blindedmessage
+
+ def unblind(self, M, B):
+ """unblind(M : string|long, B : string|long) : string|long
+ Unblind message M using blinding factor B.
+ """
+ wasString=0
+ if isinstance(M, types.StringType):
+ M=bytes_to_long(M) ; wasString=1
+ if isinstance(B, types.StringType): B=bytes_to_long(B)
+ unblindedmessage=self._unblind(M, B)
+ if wasString: return long_to_bytes(unblindedmessage)
+ else: return unblindedmessage
+
+
+ # The following methods will usually be left alone, except for
+ # signature-only algorithms. They both return Boolean values
+ # recording whether this key's algorithm can sign and encrypt.
+ def can_sign (self):
+ """can_sign() : bool
+ Return a Boolean value recording whether this algorithm can
+ generate signatures. (This does not imply that this
+ particular key object has the private information required to
+ to generate a signature.)
+ """
+ return 1
+
+ def can_encrypt (self):
+ """can_encrypt() : bool
+ Return a Boolean value recording whether this algorithm can
+ encrypt data. (This does not imply that this
+ particular key object has the private information required to
+ to decrypt a message.)
+ """
+ return 1
+
+ def can_blind (self):
+ """can_blind() : bool
+ Return a Boolean value recording whether this algorithm can
+ blind data. (This does not imply that this
+ particular key object has the private information required to
+ to blind a message.)
+ """
+ return 0
+
+ # The following methods will certainly be overridden by
+ # subclasses.
+
+ def size (self):
+ """size() : int
+ Return the maximum number of bits that can be handled by this key.
+ """
+ return 0
+
+ def has_private (self):
+ """has_private() : bool
+ Return a Boolean denoting whether the object contains
+ private components.
+ """
+ return 0
+
+ def publickey (self):
+ """publickey(): object
+ Return a new key object containing only the public information.
+ """
+ return self
+
+ def __eq__ (self, other):
+ """__eq__(other): 0, 1
+ Compare us to other for equality.
+ """
+ return self.__getstate__() == other.__getstate__()
diff --git a/python/gdata/Crypto/PublicKey/qNEW.py b/python/gdata/Crypto/PublicKey/qNEW.py
new file mode 100644
index 0000000..65f8ae3
--- /dev/null
+++ b/python/gdata/Crypto/PublicKey/qNEW.py
@@ -0,0 +1,170 @@
+#
+# qNEW.py : The q-NEW signature algorithm.
+#
+# Part of the Python Cryptography Toolkit
+#
+# Distribute and use freely; there are no restrictions on further
+# dissemination and usage except those imposed by the laws of your
+# country of residence. This software is provided "as is" without
+# warranty of fitness for use or suitability for any purpose, express
+# or implied. Use at your own risk or not at all.
+#
+
+__revision__ = "$Id: qNEW.py,v 1.8 2003/04/04 15:13:35 akuchling Exp $"
+
+from Crypto.PublicKey import pubkey
+from Crypto.Util.number import *
+from Crypto.Hash import SHA
+
+class error (Exception):
+ pass
+
+HASHBITS = 160 # Size of SHA digests
+
+def generate(bits, randfunc, progress_func=None):
+ """generate(bits:int, randfunc:callable, progress_func:callable)
+
+ Generate a qNEW key of length 'bits', using 'randfunc' to get
+ random data and 'progress_func', if present, to display
+ the progress of the key generation.
+ """
+ obj=qNEWobj()
+
+ # Generate prime numbers p and q. q is a 160-bit prime
+ # number. p is another prime number (the modulus) whose bit
+ # size is chosen by the caller, and is generated so that p-1
+ # is a multiple of q.
+ #
+ # Note that only a single seed is used to
+ # generate p and q; if someone generates a key for you, you can
+ # use the seed to duplicate the key generation. This can
+ # protect you from someone generating values of p,q that have
+ # some special form that's easy to break.
+ if progress_func:
+ progress_func('p,q\n')
+ while (1):
+ obj.q = getPrime(160, randfunc)
+ # assert pow(2, 159L)1. g is kept; h can be discarded.
+ if progress_func:
+ progress_func('h,g\n')
+ while (1):
+ h=bytes_to_long(randfunc(bits)) % (p-1)
+ g=pow(h, power, p)
+ if 11:
+ break
+ obj.g=g
+
+ # x is the private key information, and is
+ # just a random number between 0 and q.
+ # y=g**x mod p, and is part of the public information.
+ if progress_func:
+ progress_func('x,y\n')
+ while (1):
+ x=bytes_to_long(randfunc(20))
+ if 0 < x < obj.q:
+ break
+ obj.x, obj.y=x, pow(g, x, p)
+
+ return obj
+
+# Construct a qNEW object
+def construct(tuple):
+ """construct(tuple:(long,long,long,long)|(long,long,long,long,long)
+ Construct a qNEW object from a 4- or 5-tuple of numbers.
+ """
+ obj=qNEWobj()
+ if len(tuple) not in [4,5]:
+ raise error, 'argument for construct() wrong length'
+ for i in range(len(tuple)):
+ field = obj.keydata[i]
+ setattr(obj, field, tuple[i])
+ return obj
+
+class qNEWobj(pubkey.pubkey):
+ keydata=['p', 'q', 'g', 'y', 'x']
+
+ def _sign(self, M, K=''):
+ if (self.q<=K):
+ raise error, 'K is greater than q'
+ if M<0:
+ raise error, 'Illegal value of M (<0)'
+ if M>=pow(2,161L):
+ raise error, 'Illegal value of M (too large)'
+ r=pow(self.g, K, self.p) % self.q
+ s=(K- (r*M*self.x % self.q)) % self.q
+ return (r,s)
+ def _verify(self, M, sig):
+ r, s = sig
+ if r<=0 or r>=self.q or s<=0 or s>=self.q:
+ return 0
+ if M<0:
+ raise error, 'Illegal value of M (<0)'
+ if M<=0 or M>=pow(2,161L):
+ return 0
+ v1 = pow(self.g, s, self.p)
+ v2 = pow(self.y, M*r, self.p)
+ v = ((v1*v2) % self.p)
+ v = v % self.q
+ if v==r:
+ return 1
+ return 0
+
+ def size(self):
+ "Return the maximum number of bits that can be handled by this key."
+ return 160
+
+ def has_private(self):
+ """Return a Boolean denoting whether the object contains
+ private components."""
+ return hasattr(self, 'x')
+
+ def can_sign(self):
+ """Return a Boolean value recording whether this algorithm can generate signatures."""
+ return 1
+
+ def can_encrypt(self):
+ """Return a Boolean value recording whether this algorithm can encrypt data."""
+ return 0
+
+ def publickey(self):
+ """Return a new key object containing only the public information."""
+ return construct((self.p, self.q, self.g, self.y))
+
+object = qNEWobj
+
diff --git a/python/gdata/Crypto/Util/RFC1751.py b/python/gdata/Crypto/Util/RFC1751.py
new file mode 100644
index 0000000..0a47952
--- /dev/null
+++ b/python/gdata/Crypto/Util/RFC1751.py
@@ -0,0 +1,342 @@
+#!/usr/local/bin/python
+# rfc1751.py : Converts between 128-bit strings and a human-readable
+# sequence of words, as defined in RFC1751: "A Convention for
+# Human-Readable 128-bit Keys", by Daniel L. McDonald.
+
+__revision__ = "$Id: RFC1751.py,v 1.6 2003/04/04 15:15:10 akuchling Exp $"
+
+
+import string, binascii
+
+binary={0:'0000', 1:'0001', 2:'0010', 3:'0011', 4:'0100', 5:'0101',
+ 6:'0110', 7:'0111', 8:'1000', 9:'1001', 10:'1010', 11:'1011',
+ 12:'1100', 13:'1101', 14:'1110', 15:'1111'}
+
+def _key2bin(s):
+ "Convert a key into a string of binary digits"
+ kl=map(lambda x: ord(x), s)
+ kl=map(lambda x: binary[x/16]+binary[x&15], kl)
+ return ''.join(kl)
+
+def _extract(key, start, length):
+ """Extract a bitstring from a string of binary digits, and return its
+ numeric value."""
+ k=key[start:start+length]
+ return reduce(lambda x,y: x*2+ord(y)-48, k, 0)
+
+def key_to_english (key):
+ """key_to_english(key:string) : string
+ Transform an arbitrary key into a string containing English words.
+ The key length must be a multiple of 8.
+ """
+ english=''
+ for index in range(0, len(key), 8): # Loop over 8-byte subkeys
+ subkey=key[index:index+8]
+ # Compute the parity of the key
+ skbin=_key2bin(subkey) ; p=0
+ for i in range(0, 64, 2): p=p+_extract(skbin, i, 2)
+ # Append parity bits to the subkey
+ skbin=_key2bin(subkey+chr((p<<6) & 255))
+ for i in range(0, 64, 11):
+ english=english+wordlist[_extract(skbin, i, 11)]+' '
+
+ return english[:-1] # Remove the trailing space
+
+def english_to_key (str):
+ """english_to_key(string):string
+ Transform a string into a corresponding key.
+ The string must contain words separated by whitespace; the number
+ of words must be a multiple of 6.
+ """
+
+ L=string.split(string.upper(str)) ; key=''
+ for index in range(0, len(L), 6):
+ sublist=L[index:index+6] ; char=9*[0] ; bits=0
+ for i in sublist:
+ index = wordlist.index(i)
+ shift = (8-(bits+11)%8) %8
+ y = index << shift
+ cl, cc, cr = (y>>16), (y>>8)&0xff, y & 0xff
+ if (shift>5):
+ char[bits/8] = char[bits/8] | cl
+ char[bits/8+1] = char[bits/8+1] | cc
+ char[bits/8+2] = char[bits/8+2] | cr
+ elif shift>-3:
+ char[bits/8] = char[bits/8] | cc
+ char[bits/8+1] = char[bits/8+1] | cr
+ else: char[bits/8] = char[bits/8] | cr
+ bits=bits+11
+ subkey=reduce(lambda x,y:x+chr(y), char, '')
+
+ # Check the parity of the resulting key
+ skbin=_key2bin(subkey)
+ p=0
+ for i in range(0, 64, 2): p=p+_extract(skbin, i, 2)
+ if (p&3) != _extract(skbin, 64, 2):
+ raise ValueError, "Parity error in resulting key"
+ key=key+subkey[0:8]
+ return key
+
+wordlist=[ "A", "ABE", "ACE", "ACT", "AD", "ADA", "ADD",
+ "AGO", "AID", "AIM", "AIR", "ALL", "ALP", "AM", "AMY", "AN", "ANA",
+ "AND", "ANN", "ANT", "ANY", "APE", "APS", "APT", "ARC", "ARE", "ARK",
+ "ARM", "ART", "AS", "ASH", "ASK", "AT", "ATE", "AUG", "AUK", "AVE",
+ "AWE", "AWK", "AWL", "AWN", "AX", "AYE", "BAD", "BAG", "BAH", "BAM",
+ "BAN", "BAR", "BAT", "BAY", "BE", "BED", "BEE", "BEG", "BEN", "BET",
+ "BEY", "BIB", "BID", "BIG", "BIN", "BIT", "BOB", "BOG", "BON", "BOO",
+ "BOP", "BOW", "BOY", "BUB", "BUD", "BUG", "BUM", "BUN", "BUS", "BUT",
+ "BUY", "BY", "BYE", "CAB", "CAL", "CAM", "CAN", "CAP", "CAR", "CAT",
+ "CAW", "COD", "COG", "COL", "CON", "COO", "COP", "COT", "COW", "COY",
+ "CRY", "CUB", "CUE", "CUP", "CUR", "CUT", "DAB", "DAD", "DAM", "DAN",
+ "DAR", "DAY", "DEE", "DEL", "DEN", "DES", "DEW", "DID", "DIE", "DIG",
+ "DIN", "DIP", "DO", "DOE", "DOG", "DON", "DOT", "DOW", "DRY", "DUB",
+ "DUD", "DUE", "DUG", "DUN", "EAR", "EAT", "ED", "EEL", "EGG", "EGO",
+ "ELI", "ELK", "ELM", "ELY", "EM", "END", "EST", "ETC", "EVA", "EVE",
+ "EWE", "EYE", "FAD", "FAN", "FAR", "FAT", "FAY", "FED", "FEE", "FEW",
+ "FIB", "FIG", "FIN", "FIR", "FIT", "FLO", "FLY", "FOE", "FOG", "FOR",
+ "FRY", "FUM", "FUN", "FUR", "GAB", "GAD", "GAG", "GAL", "GAM", "GAP",
+ "GAS", "GAY", "GEE", "GEL", "GEM", "GET", "GIG", "GIL", "GIN", "GO",
+ "GOT", "GUM", "GUN", "GUS", "GUT", "GUY", "GYM", "GYP", "HA", "HAD",
+ "HAL", "HAM", "HAN", "HAP", "HAS", "HAT", "HAW", "HAY", "HE", "HEM",
+ "HEN", "HER", "HEW", "HEY", "HI", "HID", "HIM", "HIP", "HIS", "HIT",
+ "HO", "HOB", "HOC", "HOE", "HOG", "HOP", "HOT", "HOW", "HUB", "HUE",
+ "HUG", "HUH", "HUM", "HUT", "I", "ICY", "IDA", "IF", "IKE", "ILL",
+ "INK", "INN", "IO", "ION", "IQ", "IRA", "IRE", "IRK", "IS", "IT",
+ "ITS", "IVY", "JAB", "JAG", "JAM", "JAN", "JAR", "JAW", "JAY", "JET",
+ "JIG", "JIM", "JO", "JOB", "JOE", "JOG", "JOT", "JOY", "JUG", "JUT",
+ "KAY", "KEG", "KEN", "KEY", "KID", "KIM", "KIN", "KIT", "LA", "LAB",
+ "LAC", "LAD", "LAG", "LAM", "LAP", "LAW", "LAY", "LEA", "LED", "LEE",
+ "LEG", "LEN", "LEO", "LET", "LEW", "LID", "LIE", "LIN", "LIP", "LIT",
+ "LO", "LOB", "LOG", "LOP", "LOS", "LOT", "LOU", "LOW", "LOY", "LUG",
+ "LYE", "MA", "MAC", "MAD", "MAE", "MAN", "MAO", "MAP", "MAT", "MAW",
+ "MAY", "ME", "MEG", "MEL", "MEN", "MET", "MEW", "MID", "MIN", "MIT",
+ "MOB", "MOD", "MOE", "MOO", "MOP", "MOS", "MOT", "MOW", "MUD", "MUG",
+ "MUM", "MY", "NAB", "NAG", "NAN", "NAP", "NAT", "NAY", "NE", "NED",
+ "NEE", "NET", "NEW", "NIB", "NIL", "NIP", "NIT", "NO", "NOB", "NOD",
+ "NON", "NOR", "NOT", "NOV", "NOW", "NU", "NUN", "NUT", "O", "OAF",
+ "OAK", "OAR", "OAT", "ODD", "ODE", "OF", "OFF", "OFT", "OH", "OIL",
+ "OK", "OLD", "ON", "ONE", "OR", "ORB", "ORE", "ORR", "OS", "OTT",
+ "OUR", "OUT", "OVA", "OW", "OWE", "OWL", "OWN", "OX", "PA", "PAD",
+ "PAL", "PAM", "PAN", "PAP", "PAR", "PAT", "PAW", "PAY", "PEA", "PEG",
+ "PEN", "PEP", "PER", "PET", "PEW", "PHI", "PI", "PIE", "PIN", "PIT",
+ "PLY", "PO", "POD", "POE", "POP", "POT", "POW", "PRO", "PRY", "PUB",
+ "PUG", "PUN", "PUP", "PUT", "QUO", "RAG", "RAM", "RAN", "RAP", "RAT",
+ "RAW", "RAY", "REB", "RED", "REP", "RET", "RIB", "RID", "RIG", "RIM",
+ "RIO", "RIP", "ROB", "ROD", "ROE", "RON", "ROT", "ROW", "ROY", "RUB",
+ "RUE", "RUG", "RUM", "RUN", "RYE", "SAC", "SAD", "SAG", "SAL", "SAM",
+ "SAN", "SAP", "SAT", "SAW", "SAY", "SEA", "SEC", "SEE", "SEN", "SET",
+ "SEW", "SHE", "SHY", "SIN", "SIP", "SIR", "SIS", "SIT", "SKI", "SKY",
+ "SLY", "SO", "SOB", "SOD", "SON", "SOP", "SOW", "SOY", "SPA", "SPY",
+ "SUB", "SUD", "SUE", "SUM", "SUN", "SUP", "TAB", "TAD", "TAG", "TAN",
+ "TAP", "TAR", "TEA", "TED", "TEE", "TEN", "THE", "THY", "TIC", "TIE",
+ "TIM", "TIN", "TIP", "TO", "TOE", "TOG", "TOM", "TON", "TOO", "TOP",
+ "TOW", "TOY", "TRY", "TUB", "TUG", "TUM", "TUN", "TWO", "UN", "UP",
+ "US", "USE", "VAN", "VAT", "VET", "VIE", "WAD", "WAG", "WAR", "WAS",
+ "WAY", "WE", "WEB", "WED", "WEE", "WET", "WHO", "WHY", "WIN", "WIT",
+ "WOK", "WON", "WOO", "WOW", "WRY", "WU", "YAM", "YAP", "YAW", "YE",
+ "YEA", "YES", "YET", "YOU", "ABED", "ABEL", "ABET", "ABLE", "ABUT",
+ "ACHE", "ACID", "ACME", "ACRE", "ACTA", "ACTS", "ADAM", "ADDS",
+ "ADEN", "AFAR", "AFRO", "AGEE", "AHEM", "AHOY", "AIDA", "AIDE",
+ "AIDS", "AIRY", "AJAR", "AKIN", "ALAN", "ALEC", "ALGA", "ALIA",
+ "ALLY", "ALMA", "ALOE", "ALSO", "ALTO", "ALUM", "ALVA", "AMEN",
+ "AMES", "AMID", "AMMO", "AMOK", "AMOS", "AMRA", "ANDY", "ANEW",
+ "ANNA", "ANNE", "ANTE", "ANTI", "AQUA", "ARAB", "ARCH", "AREA",
+ "ARGO", "ARID", "ARMY", "ARTS", "ARTY", "ASIA", "ASKS", "ATOM",
+ "AUNT", "AURA", "AUTO", "AVER", "AVID", "AVIS", "AVON", "AVOW",
+ "AWAY", "AWRY", "BABE", "BABY", "BACH", "BACK", "BADE", "BAIL",
+ "BAIT", "BAKE", "BALD", "BALE", "BALI", "BALK", "BALL", "BALM",
+ "BAND", "BANE", "BANG", "BANK", "BARB", "BARD", "BARE", "BARK",
+ "BARN", "BARR", "BASE", "BASH", "BASK", "BASS", "BATE", "BATH",
+ "BAWD", "BAWL", "BEAD", "BEAK", "BEAM", "BEAN", "BEAR", "BEAT",
+ "BEAU", "BECK", "BEEF", "BEEN", "BEER",
+ "BEET", "BELA", "BELL", "BELT", "BEND", "BENT", "BERG", "BERN",
+ "BERT", "BESS", "BEST", "BETA", "BETH", "BHOY", "BIAS", "BIDE",
+ "BIEN", "BILE", "BILK", "BILL", "BIND", "BING", "BIRD", "BITE",
+ "BITS", "BLAB", "BLAT", "BLED", "BLEW", "BLOB", "BLOC", "BLOT",
+ "BLOW", "BLUE", "BLUM", "BLUR", "BOAR", "BOAT", "BOCA", "BOCK",
+ "BODE", "BODY", "BOGY", "BOHR", "BOIL", "BOLD", "BOLO", "BOLT",
+ "BOMB", "BONA", "BOND", "BONE", "BONG", "BONN", "BONY", "BOOK",
+ "BOOM", "BOON", "BOOT", "BORE", "BORG", "BORN", "BOSE", "BOSS",
+ "BOTH", "BOUT", "BOWL", "BOYD", "BRAD", "BRAE", "BRAG", "BRAN",
+ "BRAY", "BRED", "BREW", "BRIG", "BRIM", "BROW", "BUCK", "BUDD",
+ "BUFF", "BULB", "BULK", "BULL", "BUNK", "BUNT", "BUOY", "BURG",
+ "BURL", "BURN", "BURR", "BURT", "BURY", "BUSH", "BUSS", "BUST",
+ "BUSY", "BYTE", "CADY", "CAFE", "CAGE", "CAIN", "CAKE", "CALF",
+ "CALL", "CALM", "CAME", "CANE", "CANT", "CARD", "CARE", "CARL",
+ "CARR", "CART", "CASE", "CASH", "CASK", "CAST", "CAVE", "CEIL",
+ "CELL", "CENT", "CERN", "CHAD", "CHAR", "CHAT", "CHAW", "CHEF",
+ "CHEN", "CHEW", "CHIC", "CHIN", "CHOU", "CHOW", "CHUB", "CHUG",
+ "CHUM", "CITE", "CITY", "CLAD", "CLAM", "CLAN", "CLAW", "CLAY",
+ "CLOD", "CLOG", "CLOT", "CLUB", "CLUE", "COAL", "COAT", "COCA",
+ "COCK", "COCO", "CODA", "CODE", "CODY", "COED", "COIL", "COIN",
+ "COKE", "COLA", "COLD", "COLT", "COMA", "COMB", "COME", "COOK",
+ "COOL", "COON", "COOT", "CORD", "CORE", "CORK", "CORN", "COST",
+ "COVE", "COWL", "CRAB", "CRAG", "CRAM", "CRAY", "CREW", "CRIB",
+ "CROW", "CRUD", "CUBA", "CUBE", "CUFF", "CULL", "CULT", "CUNY",
+ "CURB", "CURD", "CURE", "CURL", "CURT", "CUTS", "DADE", "DALE",
+ "DAME", "DANA", "DANE", "DANG", "DANK", "DARE", "DARK", "DARN",
+ "DART", "DASH", "DATA", "DATE", "DAVE", "DAVY", "DAWN", "DAYS",
+ "DEAD", "DEAF", "DEAL", "DEAN", "DEAR", "DEBT", "DECK", "DEED",
+ "DEEM", "DEER", "DEFT", "DEFY", "DELL", "DENT", "DENY", "DESK",
+ "DIAL", "DICE", "DIED", "DIET", "DIME", "DINE", "DING", "DINT",
+ "DIRE", "DIRT", "DISC", "DISH", "DISK", "DIVE", "DOCK", "DOES",
+ "DOLE", "DOLL", "DOLT", "DOME", "DONE", "DOOM", "DOOR", "DORA",
+ "DOSE", "DOTE", "DOUG", "DOUR", "DOVE", "DOWN", "DRAB", "DRAG",
+ "DRAM", "DRAW", "DREW", "DRUB", "DRUG", "DRUM", "DUAL", "DUCK",
+ "DUCT", "DUEL", "DUET", "DUKE", "DULL", "DUMB", "DUNE", "DUNK",
+ "DUSK", "DUST", "DUTY", "EACH", "EARL", "EARN", "EASE", "EAST",
+ "EASY", "EBEN", "ECHO", "EDDY", "EDEN", "EDGE", "EDGY", "EDIT",
+ "EDNA", "EGAN", "ELAN", "ELBA", "ELLA", "ELSE", "EMIL", "EMIT",
+ "EMMA", "ENDS", "ERIC", "EROS", "EVEN", "EVER", "EVIL", "EYED",
+ "FACE", "FACT", "FADE", "FAIL", "FAIN", "FAIR", "FAKE", "FALL",
+ "FAME", "FANG", "FARM", "FAST", "FATE", "FAWN", "FEAR", "FEAT",
+ "FEED", "FEEL", "FEET", "FELL", "FELT", "FEND", "FERN", "FEST",
+ "FEUD", "FIEF", "FIGS", "FILE", "FILL", "FILM", "FIND", "FINE",
+ "FINK", "FIRE", "FIRM", "FISH", "FISK", "FIST", "FITS", "FIVE",
+ "FLAG", "FLAK", "FLAM", "FLAT", "FLAW", "FLEA", "FLED", "FLEW",
+ "FLIT", "FLOC", "FLOG", "FLOW", "FLUB", "FLUE", "FOAL", "FOAM",
+ "FOGY", "FOIL", "FOLD", "FOLK", "FOND", "FONT", "FOOD", "FOOL",
+ "FOOT", "FORD", "FORE", "FORK", "FORM", "FORT", "FOSS", "FOUL",
+ "FOUR", "FOWL", "FRAU", "FRAY", "FRED", "FREE", "FRET", "FREY",
+ "FROG", "FROM", "FUEL", "FULL", "FUME", "FUND", "FUNK", "FURY",
+ "FUSE", "FUSS", "GAFF", "GAGE", "GAIL", "GAIN", "GAIT", "GALA",
+ "GALE", "GALL", "GALT", "GAME", "GANG", "GARB", "GARY", "GASH",
+ "GATE", "GAUL", "GAUR", "GAVE", "GAWK", "GEAR", "GELD", "GENE",
+ "GENT", "GERM", "GETS", "GIBE", "GIFT", "GILD", "GILL", "GILT",
+ "GINA", "GIRD", "GIRL", "GIST", "GIVE", "GLAD", "GLEE", "GLEN",
+ "GLIB", "GLOB", "GLOM", "GLOW", "GLUE", "GLUM", "GLUT", "GOAD",
+ "GOAL", "GOAT", "GOER", "GOES", "GOLD", "GOLF", "GONE", "GONG",
+ "GOOD", "GOOF", "GORE", "GORY", "GOSH", "GOUT", "GOWN", "GRAB",
+ "GRAD", "GRAY", "GREG", "GREW", "GREY", "GRID", "GRIM", "GRIN",
+ "GRIT", "GROW", "GRUB", "GULF", "GULL", "GUNK", "GURU", "GUSH",
+ "GUST", "GWEN", "GWYN", "HAAG", "HAAS", "HACK", "HAIL", "HAIR",
+ "HALE", "HALF", "HALL", "HALO", "HALT", "HAND", "HANG", "HANK",
+ "HANS", "HARD", "HARK", "HARM", "HART", "HASH", "HAST", "HATE",
+ "HATH", "HAUL", "HAVE", "HAWK", "HAYS", "HEAD", "HEAL", "HEAR",
+ "HEAT", "HEBE", "HECK", "HEED", "HEEL", "HEFT", "HELD", "HELL",
+ "HELM", "HERB", "HERD", "HERE", "HERO", "HERS", "HESS", "HEWN",
+ "HICK", "HIDE", "HIGH", "HIKE", "HILL", "HILT", "HIND", "HINT",
+ "HIRE", "HISS", "HIVE", "HOBO", "HOCK", "HOFF", "HOLD", "HOLE",
+ "HOLM", "HOLT", "HOME", "HONE", "HONK", "HOOD", "HOOF", "HOOK",
+ "HOOT", "HORN", "HOSE", "HOST", "HOUR", "HOVE", "HOWE", "HOWL",
+ "HOYT", "HUCK", "HUED", "HUFF", "HUGE", "HUGH", "HUGO", "HULK",
+ "HULL", "HUNK", "HUNT", "HURD", "HURL", "HURT", "HUSH", "HYDE",
+ "HYMN", "IBIS", "ICON", "IDEA", "IDLE", "IFFY", "INCA", "INCH",
+ "INTO", "IONS", "IOTA", "IOWA", "IRIS", "IRMA", "IRON", "ISLE",
+ "ITCH", "ITEM", "IVAN", "JACK", "JADE", "JAIL", "JAKE", "JANE",
+ "JAVA", "JEAN", "JEFF", "JERK", "JESS", "JEST", "JIBE", "JILL",
+ "JILT", "JIVE", "JOAN", "JOBS", "JOCK", "JOEL", "JOEY", "JOHN",
+ "JOIN", "JOKE", "JOLT", "JOVE", "JUDD", "JUDE", "JUDO", "JUDY",
+ "JUJU", "JUKE", "JULY", "JUNE", "JUNK", "JUNO", "JURY", "JUST",
+ "JUTE", "KAHN", "KALE", "KANE", "KANT", "KARL", "KATE", "KEEL",
+ "KEEN", "KENO", "KENT", "KERN", "KERR", "KEYS", "KICK", "KILL",
+ "KIND", "KING", "KIRK", "KISS", "KITE", "KLAN", "KNEE", "KNEW",
+ "KNIT", "KNOB", "KNOT", "KNOW", "KOCH", "KONG", "KUDO", "KURD",
+ "KURT", "KYLE", "LACE", "LACK", "LACY", "LADY", "LAID", "LAIN",
+ "LAIR", "LAKE", "LAMB", "LAME", "LAND", "LANE", "LANG", "LARD",
+ "LARK", "LASS", "LAST", "LATE", "LAUD", "LAVA", "LAWN", "LAWS",
+ "LAYS", "LEAD", "LEAF", "LEAK", "LEAN", "LEAR", "LEEK", "LEER",
+ "LEFT", "LEND", "LENS", "LENT", "LEON", "LESK", "LESS", "LEST",
+ "LETS", "LIAR", "LICE", "LICK", "LIED", "LIEN", "LIES", "LIEU",
+ "LIFE", "LIFT", "LIKE", "LILA", "LILT", "LILY", "LIMA", "LIMB",
+ "LIME", "LIND", "LINE", "LINK", "LINT", "LION", "LISA", "LIST",
+ "LIVE", "LOAD", "LOAF", "LOAM", "LOAN", "LOCK", "LOFT", "LOGE",
+ "LOIS", "LOLA", "LONE", "LONG", "LOOK", "LOON", "LOOT", "LORD",
+ "LORE", "LOSE", "LOSS", "LOST", "LOUD", "LOVE", "LOWE", "LUCK",
+ "LUCY", "LUGE", "LUKE", "LULU", "LUND", "LUNG", "LURA", "LURE",
+ "LURK", "LUSH", "LUST", "LYLE", "LYNN", "LYON", "LYRA", "MACE",
+ "MADE", "MAGI", "MAID", "MAIL", "MAIN", "MAKE", "MALE", "MALI",
+ "MALL", "MALT", "MANA", "MANN", "MANY", "MARC", "MARE", "MARK",
+ "MARS", "MART", "MARY", "MASH", "MASK", "MASS", "MAST", "MATE",
+ "MATH", "MAUL", "MAYO", "MEAD", "MEAL", "MEAN", "MEAT", "MEEK",
+ "MEET", "MELD", "MELT", "MEMO", "MEND", "MENU", "MERT", "MESH",
+ "MESS", "MICE", "MIKE", "MILD", "MILE", "MILK", "MILL", "MILT",
+ "MIMI", "MIND", "MINE", "MINI", "MINK", "MINT", "MIRE", "MISS",
+ "MIST", "MITE", "MITT", "MOAN", "MOAT", "MOCK", "MODE", "MOLD",
+ "MOLE", "MOLL", "MOLT", "MONA", "MONK", "MONT", "MOOD", "MOON",
+ "MOOR", "MOOT", "MORE", "MORN", "MORT", "MOSS", "MOST", "MOTH",
+ "MOVE", "MUCH", "MUCK", "MUDD", "MUFF", "MULE", "MULL", "MURK",
+ "MUSH", "MUST", "MUTE", "MUTT", "MYRA", "MYTH", "NAGY", "NAIL",
+ "NAIR", "NAME", "NARY", "NASH", "NAVE", "NAVY", "NEAL", "NEAR",
+ "NEAT", "NECK", "NEED", "NEIL", "NELL", "NEON", "NERO", "NESS",
+ "NEST", "NEWS", "NEWT", "NIBS", "NICE", "NICK", "NILE", "NINA",
+ "NINE", "NOAH", "NODE", "NOEL", "NOLL", "NONE", "NOOK", "NOON",
+ "NORM", "NOSE", "NOTE", "NOUN", "NOVA", "NUDE", "NULL", "NUMB",
+ "OATH", "OBEY", "OBOE", "ODIN", "OHIO", "OILY", "OINT", "OKAY",
+ "OLAF", "OLDY", "OLGA", "OLIN", "OMAN", "OMEN", "OMIT", "ONCE",
+ "ONES", "ONLY", "ONTO", "ONUS", "ORAL", "ORGY", "OSLO", "OTIS",
+ "OTTO", "OUCH", "OUST", "OUTS", "OVAL", "OVEN", "OVER", "OWLY",
+ "OWNS", "QUAD", "QUIT", "QUOD", "RACE", "RACK", "RACY", "RAFT",
+ "RAGE", "RAID", "RAIL", "RAIN", "RAKE", "RANK", "RANT", "RARE",
+ "RASH", "RATE", "RAVE", "RAYS", "READ", "REAL", "REAM", "REAR",
+ "RECK", "REED", "REEF", "REEK", "REEL", "REID", "REIN", "RENA",
+ "REND", "RENT", "REST", "RICE", "RICH", "RICK", "RIDE", "RIFT",
+ "RILL", "RIME", "RING", "RINK", "RISE", "RISK", "RITE", "ROAD",
+ "ROAM", "ROAR", "ROBE", "ROCK", "RODE", "ROIL", "ROLL", "ROME",
+ "ROOD", "ROOF", "ROOK", "ROOM", "ROOT", "ROSA", "ROSE", "ROSS",
+ "ROSY", "ROTH", "ROUT", "ROVE", "ROWE", "ROWS", "RUBE", "RUBY",
+ "RUDE", "RUDY", "RUIN", "RULE", "RUNG", "RUNS", "RUNT", "RUSE",
+ "RUSH", "RUSK", "RUSS", "RUST", "RUTH", "SACK", "SAFE", "SAGE",
+ "SAID", "SAIL", "SALE", "SALK", "SALT", "SAME", "SAND", "SANE",
+ "SANG", "SANK", "SARA", "SAUL", "SAVE", "SAYS", "SCAN", "SCAR",
+ "SCAT", "SCOT", "SEAL", "SEAM", "SEAR", "SEAT", "SEED", "SEEK",
+ "SEEM", "SEEN", "SEES", "SELF", "SELL", "SEND", "SENT", "SETS",
+ "SEWN", "SHAG", "SHAM", "SHAW", "SHAY", "SHED", "SHIM", "SHIN",
+ "SHOD", "SHOE", "SHOT", "SHOW", "SHUN", "SHUT", "SICK", "SIDE",
+ "SIFT", "SIGH", "SIGN", "SILK", "SILL", "SILO", "SILT", "SINE",
+ "SING", "SINK", "SIRE", "SITE", "SITS", "SITU", "SKAT", "SKEW",
+ "SKID", "SKIM", "SKIN", "SKIT", "SLAB", "SLAM", "SLAT", "SLAY",
+ "SLED", "SLEW", "SLID", "SLIM", "SLIT", "SLOB", "SLOG", "SLOT",
+ "SLOW", "SLUG", "SLUM", "SLUR", "SMOG", "SMUG", "SNAG", "SNOB",
+ "SNOW", "SNUB", "SNUG", "SOAK", "SOAR", "SOCK", "SODA", "SOFA",
+ "SOFT", "SOIL", "SOLD", "SOME", "SONG", "SOON", "SOOT", "SORE",
+ "SORT", "SOUL", "SOUR", "SOWN", "STAB", "STAG", "STAN", "STAR",
+ "STAY", "STEM", "STEW", "STIR", "STOW", "STUB", "STUN", "SUCH",
+ "SUDS", "SUIT", "SULK", "SUMS", "SUNG", "SUNK", "SURE", "SURF",
+ "SWAB", "SWAG", "SWAM", "SWAN", "SWAT", "SWAY", "SWIM", "SWUM",
+ "TACK", "TACT", "TAIL", "TAKE", "TALE", "TALK", "TALL", "TANK",
+ "TASK", "TATE", "TAUT", "TEAL", "TEAM", "TEAR", "TECH", "TEEM",
+ "TEEN", "TEET", "TELL", "TEND", "TENT", "TERM", "TERN", "TESS",
+ "TEST", "THAN", "THAT", "THEE", "THEM", "THEN", "THEY", "THIN",
+ "THIS", "THUD", "THUG", "TICK", "TIDE", "TIDY", "TIED", "TIER",
+ "TILE", "TILL", "TILT", "TIME", "TINA", "TINE", "TINT", "TINY",
+ "TIRE", "TOAD", "TOGO", "TOIL", "TOLD", "TOLL", "TONE", "TONG",
+ "TONY", "TOOK", "TOOL", "TOOT", "TORE", "TORN", "TOTE", "TOUR",
+ "TOUT", "TOWN", "TRAG", "TRAM", "TRAY", "TREE", "TREK", "TRIG",
+ "TRIM", "TRIO", "TROD", "TROT", "TROY", "TRUE", "TUBA", "TUBE",
+ "TUCK", "TUFT", "TUNA", "TUNE", "TUNG", "TURF", "TURN", "TUSK",
+ "TWIG", "TWIN", "TWIT", "ULAN", "UNIT", "URGE", "USED", "USER",
+ "USES", "UTAH", "VAIL", "VAIN", "VALE", "VARY", "VASE", "VAST",
+ "VEAL", "VEDA", "VEIL", "VEIN", "VEND", "VENT", "VERB", "VERY",
+ "VETO", "VICE", "VIEW", "VINE", "VISE", "VOID", "VOLT", "VOTE",
+ "WACK", "WADE", "WAGE", "WAIL", "WAIT", "WAKE", "WALE", "WALK",
+ "WALL", "WALT", "WAND", "WANE", "WANG", "WANT", "WARD", "WARM",
+ "WARN", "WART", "WASH", "WAST", "WATS", "WATT", "WAVE", "WAVY",
+ "WAYS", "WEAK", "WEAL", "WEAN", "WEAR", "WEED", "WEEK", "WEIR",
+ "WELD", "WELL", "WELT", "WENT", "WERE", "WERT", "WEST", "WHAM",
+ "WHAT", "WHEE", "WHEN", "WHET", "WHOA", "WHOM", "WICK", "WIFE",
+ "WILD", "WILL", "WIND", "WINE", "WING", "WINK", "WINO", "WIRE",
+ "WISE", "WISH", "WITH", "WOLF", "WONT", "WOOD", "WOOL", "WORD",
+ "WORE", "WORK", "WORM", "WORN", "WOVE", "WRIT", "WYNN", "YALE",
+ "YANG", "YANK", "YARD", "YARN", "YAWL", "YAWN", "YEAH", "YEAR",
+ "YELL", "YOGA", "YOKE" ]
+
+if __name__=='__main__':
+ data = [('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'),
+ ('CCAC2AED591056BE4F90FD441C534766',
+ 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'),
+ ('EFF81F9BFBC65350920CDD7416DE8009',
+ 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL')
+ ]
+
+ for key, words in data:
+ print 'Trying key', key
+ key=binascii.a2b_hex(key)
+ w2=key_to_english(key)
+ if w2!=words:
+ print 'key_to_english fails on key', repr(key), ', producing', str(w2)
+ k2=english_to_key(words)
+ if k2!=key:
+ print 'english_to_key fails on key', repr(key), ', producing', repr(k2)
+
+
diff --git a/python/gdata/Crypto/Util/__init__.py b/python/gdata/Crypto/Util/__init__.py
new file mode 100644
index 0000000..0d14768
--- /dev/null
+++ b/python/gdata/Crypto/Util/__init__.py
@@ -0,0 +1,16 @@
+"""Miscellaneous modules
+
+Contains useful modules that don't belong into any of the
+other Crypto.* subpackages.
+
+Crypto.Util.number Number-theoretic functions (primality testing, etc.)
+Crypto.Util.randpool Random number generation
+Crypto.Util.RFC1751 Converts between 128-bit keys and human-readable
+ strings of words.
+
+"""
+
+__all__ = ['randpool', 'RFC1751', 'number']
+
+__revision__ = "$Id: __init__.py,v 1.4 2003/02/28 15:26:00 akuchling Exp $"
+
diff --git a/python/gdata/Crypto/Util/number.py b/python/gdata/Crypto/Util/number.py
new file mode 100644
index 0000000..9d50563
--- /dev/null
+++ b/python/gdata/Crypto/Util/number.py
@@ -0,0 +1,201 @@
+#
+# number.py : Number-theoretic functions
+#
+# Part of the Python Cryptography Toolkit
+#
+# Distribute and use freely; there are no restrictions on further
+# dissemination and usage except those imposed by the laws of your
+# country of residence. This software is provided "as is" without
+# warranty of fitness for use or suitability for any purpose, express
+# or implied. Use at your own risk or not at all.
+#
+
+__revision__ = "$Id: number.py,v 1.13 2003/04/04 18:21:07 akuchling Exp $"
+
+bignum = long
+try:
+ from Crypto.PublicKey import _fastmath
+except ImportError:
+ _fastmath = None
+
+# Commented out and replaced with faster versions below
+## def long2str(n):
+## s=''
+## while n>0:
+## s=chr(n & 255)+s
+## n=n>>8
+## return s
+
+## import types
+## def str2long(s):
+## if type(s)!=types.StringType: return s # Integers will be left alone
+## return reduce(lambda x,y : x*256+ord(y), s, 0L)
+
+def size (N):
+ """size(N:long) : int
+ Returns the size of the number N in bits.
+ """
+ bits, power = 0,1L
+ while N >= power:
+ bits += 1
+ power = power << 1
+ return bits
+
+def getRandomNumber(N, randfunc):
+ """getRandomNumber(N:int, randfunc:callable):long
+ Return an N-bit random number."""
+
+ S = randfunc(N/8)
+ odd_bits = N % 8
+ if odd_bits != 0:
+ char = ord(randfunc(1)) >> (8-odd_bits)
+ S = chr(char) + S
+ value = bytes_to_long(S)
+ value |= 2L ** (N-1) # Ensure high bit is set
+ assert size(value) >= N
+ return value
+
+def GCD(x,y):
+ """GCD(x:long, y:long): long
+ Return the GCD of x and y.
+ """
+ x = abs(x) ; y = abs(y)
+ while x > 0:
+ x, y = y % x, x
+ return y
+
+def inverse(u, v):
+ """inverse(u:long, u:long):long
+ Return the inverse of u mod v.
+ """
+ u3, v3 = long(u), long(v)
+ u1, v1 = 1L, 0L
+ while v3 > 0:
+ q=u3 / v3
+ u1, v1 = v1, u1 - v1*q
+ u3, v3 = v3, u3 - v3*q
+ while u1<0:
+ u1 = u1 + v
+ return u1
+
+# Given a number of bits to generate and a random generation function,
+# find a prime number of the appropriate size.
+
+def getPrime(N, randfunc):
+ """getPrime(N:int, randfunc:callable):long
+ Return a random N-bit prime number.
+ """
+
+ number=getRandomNumber(N, randfunc) | 1
+ while (not isPrime(number)):
+ number=number+2
+ return number
+
+def isPrime(N):
+ """isPrime(N:long):bool
+ Return true if N is prime.
+ """
+ if N == 1:
+ return 0
+ if N in sieve:
+ return 1
+ for i in sieve:
+ if (N % i)==0:
+ return 0
+
+ # Use the accelerator if available
+ if _fastmath is not None:
+ return _fastmath.isPrime(N)
+
+ # Compute the highest bit that's set in N
+ N1 = N - 1L
+ n = 1L
+ while (n> 1L
+
+ # Rabin-Miller test
+ for c in sieve[:7]:
+ a=long(c) ; d=1L ; t=n
+ while (t): # Iterate over the bits in N1
+ x=(d*d) % N
+ if x==1L and d!=1L and d!=N1:
+ return 0 # Square root of 1 found
+ if N1 & t:
+ d=(x*a) % N
+ else:
+ d=x
+ t = t >> 1L
+ if d!=1L:
+ return 0
+ return 1
+
+# Small primes used for checking primality; these are all the primes
+# less than 256. This should be enough to eliminate most of the odd
+# numbers before needing to do a Rabin-Miller test at all.
+
+sieve=[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59,
+ 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127,
+ 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193,
+ 197, 199, 211, 223, 227, 229, 233, 239, 241, 251]
+
+# Improved conversion functions contributed by Barry Warsaw, after
+# careful benchmarking
+
+import struct
+
+def long_to_bytes(n, blocksize=0):
+ """long_to_bytes(n:long, blocksize:int) : string
+ Convert a long integer to a byte string.
+
+ If optional blocksize is given and greater than zero, pad the front of the
+ byte string with binary zeros so that the length is a multiple of
+ blocksize.
+ """
+ # after much testing, this algorithm was deemed to be the fastest
+ s = ''
+ n = long(n)
+ pack = struct.pack
+ while n > 0:
+ s = pack('>I', n & 0xffffffffL) + s
+ n = n >> 32
+ # strip off leading zeros
+ for i in range(len(s)):
+ if s[i] != '\000':
+ break
+ else:
+ # only happens when n == 0
+ s = '\000'
+ i = 0
+ s = s[i:]
+ # add back some pad bytes. this could be done more efficiently w.r.t. the
+ # de-padding being done above, but sigh...
+ if blocksize > 0 and len(s) % blocksize:
+ s = (blocksize - len(s) % blocksize) * '\000' + s
+ return s
+
+def bytes_to_long(s):
+ """bytes_to_long(string) : long
+ Convert a byte string to a long integer.
+
+ This is (essentially) the inverse of long_to_bytes().
+ """
+ acc = 0L
+ unpack = struct.unpack
+ length = len(s)
+ if length % 4:
+ extra = (4 - length % 4)
+ s = '\000' * extra + s
+ length = length + extra
+ for i in range(0, length, 4):
+ acc = (acc << 32) + unpack('>I', s[i:i+4])[0]
+ return acc
+
+# For backwards compatibility...
+import warnings
+def long2str(n, blocksize=0):
+ warnings.warn("long2str() has been replaced by long_to_bytes()")
+ return long_to_bytes(n, blocksize)
+def str2long(s):
+ warnings.warn("str2long() has been replaced by bytes_to_long()")
+ return bytes_to_long(s)
diff --git a/python/gdata/Crypto/Util/randpool.py b/python/gdata/Crypto/Util/randpool.py
new file mode 100644
index 0000000..467501c
--- /dev/null
+++ b/python/gdata/Crypto/Util/randpool.py
@@ -0,0 +1,421 @@
+#
+# randpool.py : Cryptographically strong random number generation
+#
+# Part of the Python Cryptography Toolkit
+#
+# Distribute and use freely; there are no restrictions on further
+# dissemination and usage except those imposed by the laws of your
+# country of residence. This software is provided "as is" without
+# warranty of fitness for use or suitability for any purpose, express
+# or implied. Use at your own risk or not at all.
+#
+
+__revision__ = "$Id: randpool.py,v 1.14 2004/05/06 12:56:54 akuchling Exp $"
+
+import time, array, types, warnings, os.path
+from Crypto.Util.number import long_to_bytes
+try:
+ import Crypto.Util.winrandom as winrandom
+except:
+ winrandom = None
+
+STIRNUM = 3
+
+class RandomPool:
+ """randpool.py : Cryptographically strong random number generation.
+
+ The implementation here is similar to the one in PGP. To be
+ cryptographically strong, it must be difficult to determine the RNG's
+ output, whether in the future or the past. This is done by using
+ a cryptographic hash function to "stir" the random data.
+
+ Entropy is gathered in the same fashion as PGP; the highest-resolution
+ clock around is read and the data is added to the random number pool.
+ A conservative estimate of the entropy is then kept.
+
+ If a cryptographically secure random source is available (/dev/urandom
+ on many Unixes, Windows CryptGenRandom on most Windows), then use
+ it.
+
+ Instance Attributes:
+ bits : int
+ Maximum size of pool in bits
+ bytes : int
+ Maximum size of pool in bytes
+ entropy : int
+ Number of bits of entropy in this pool.
+
+ Methods:
+ add_event([s]) : add some entropy to the pool
+ get_bytes(int) : get N bytes of random data
+ randomize([N]) : get N bytes of randomness from external source
+ """
+
+
+ def __init__(self, numbytes = 160, cipher=None, hash=None):
+ if hash is None:
+ from Crypto.Hash import SHA as hash
+
+ # The cipher argument is vestigial; it was removed from
+ # version 1.1 so RandomPool would work even in the limited
+ # exportable subset of the code
+ if cipher is not None:
+ warnings.warn("'cipher' parameter is no longer used")
+
+ if isinstance(hash, types.StringType):
+ # ugly hack to force __import__ to give us the end-path module
+ hash = __import__('Crypto.Hash.'+hash,
+ None, None, ['new'])
+ warnings.warn("'hash' parameter should now be a hashing module")
+
+ self.bytes = numbytes
+ self.bits = self.bytes*8
+ self.entropy = 0
+ self._hash = hash
+
+ # Construct an array to hold the random pool,
+ # initializing it to 0.
+ self._randpool = array.array('B', [0]*self.bytes)
+
+ self._event1 = self._event2 = 0
+ self._addPos = 0
+ self._getPos = hash.digest_size
+ self._lastcounter=time.time()
+ self.__counter = 0
+
+ self._measureTickSize() # Estimate timer resolution
+ self._randomize()
+
+ def _updateEntropyEstimate(self, nbits):
+ self.entropy += nbits
+ if self.entropy < 0:
+ self.entropy = 0
+ elif self.entropy > self.bits:
+ self.entropy = self.bits
+
+ def _randomize(self, N = 0, devname = '/dev/urandom'):
+ """_randomize(N, DEVNAME:device-filepath)
+ collects N bits of randomness from some entropy source (e.g.,
+ /dev/urandom on Unixes that have it, Windows CryptoAPI
+ CryptGenRandom, etc)
+ DEVNAME is optional, defaults to /dev/urandom. You can change it
+ to /dev/random if you want to block till you get enough
+ entropy.
+ """
+ data = ''
+ if N <= 0:
+ nbytes = int((self.bits - self.entropy)/8+0.5)
+ else:
+ nbytes = int(N/8+0.5)
+ if winrandom:
+ # Windows CryptGenRandom provides random data.
+ data = winrandom.new().get_bytes(nbytes)
+ elif os.path.exists(devname):
+ # Many OSes support a /dev/urandom device
+ try:
+ f=open(devname)
+ data=f.read(nbytes)
+ f.close()
+ except IOError, (num, msg):
+ if num!=2: raise IOError, (num, msg)
+ # If the file wasn't found, ignore the error
+ if data:
+ self._addBytes(data)
+ # Entropy estimate: The number of bits of
+ # data obtained from the random source.
+ self._updateEntropyEstimate(8*len(data))
+ self.stir_n() # Wash the random pool
+
+ def randomize(self, N=0):
+ """randomize(N:int)
+ use the class entropy source to get some entropy data.
+ This is overridden by KeyboardRandomize().
+ """
+ return self._randomize(N)
+
+ def stir_n(self, N = STIRNUM):
+ """stir_n(N)
+ stirs the random pool N times
+ """
+ for i in xrange(N):
+ self.stir()
+
+ def stir (self, s = ''):
+ """stir(s:string)
+ Mix up the randomness pool. This will call add_event() twice,
+ but out of paranoia the entropy attribute will not be
+ increased. The optional 's' parameter is a string that will
+ be hashed with the randomness pool.
+ """
+
+ entropy=self.entropy # Save inital entropy value
+ self.add_event()
+
+ # Loop over the randomness pool: hash its contents
+ # along with a counter, and add the resulting digest
+ # back into the pool.
+ for i in range(self.bytes / self._hash.digest_size):
+ h = self._hash.new(self._randpool)
+ h.update(str(self.__counter) + str(i) + str(self._addPos) + s)
+ self._addBytes( h.digest() )
+ self.__counter = (self.__counter + 1) & 0xFFFFffffL
+
+ self._addPos, self._getPos = 0, self._hash.digest_size
+ self.add_event()
+
+ # Restore the old value of the entropy.
+ self.entropy=entropy
+
+
+ def get_bytes (self, N):
+ """get_bytes(N:int) : string
+ Return N bytes of random data.
+ """
+
+ s=''
+ i, pool = self._getPos, self._randpool
+ h=self._hash.new()
+ dsize = self._hash.digest_size
+ num = N
+ while num > 0:
+ h.update( self._randpool[i:i+dsize] )
+ s = s + h.digest()
+ num = num - dsize
+ i = (i + dsize) % self.bytes
+ if i>1, bits+1
+ if bits>8: bits=8
+
+ self._event1, self._event2 = event, self._event1
+
+ self._updateEntropyEstimate(bits)
+ return bits
+
+ # Private functions
+ def _noise(self):
+ # Adds a bit of noise to the random pool, by adding in the
+ # current time and CPU usage of this process.
+ # The difference from the previous call to _noise() is taken
+ # in an effort to estimate the entropy.
+ t=time.time()
+ delta = (t - self._lastcounter)/self._ticksize*1e6
+ self._lastcounter = t
+ self._addBytes(long_to_bytes(long(1000*time.time())))
+ self._addBytes(long_to_bytes(long(1000*time.clock())))
+ self._addBytes(long_to_bytes(long(1000*time.time())))
+ self._addBytes(long_to_bytes(long(delta)))
+
+ # Reduce delta to a maximum of 8 bits so we don't add too much
+ # entropy as a result of this call.
+ delta=delta % 0xff
+ return int(delta)
+
+
+ def _measureTickSize(self):
+ # _measureTickSize() tries to estimate a rough average of the
+ # resolution of time that you can see from Python. It does
+ # this by measuring the time 100 times, computing the delay
+ # between measurements, and taking the median of the resulting
+ # list. (We also hash all the times and add them to the pool)
+ interval = [None] * 100
+ h = self._hash.new(`(id(self),id(interval))`)
+
+ # Compute 100 differences
+ t=time.time()
+ h.update(`t`)
+ i = 0
+ j = 0
+ while i < 100:
+ t2=time.time()
+ h.update(`(i,j,t2)`)
+ j += 1
+ delta=int((t2-t)*1e6)
+ if delta:
+ interval[i] = delta
+ i += 1
+ t=t2
+
+ # Take the median of the array of intervals
+ interval.sort()
+ self._ticksize=interval[len(interval)/2]
+ h.update(`(interval,self._ticksize)`)
+ # mix in the measurement times and wash the random pool
+ self.stir(h.digest())
+
+ def _addBytes(self, s):
+ "XOR the contents of the string S into the random pool"
+ i, pool = self._addPos, self._randpool
+ for j in range(0, len(s)):
+ pool[i]=pool[i] ^ ord(s[j])
+ i=(i+1) % self.bytes
+ self._addPos = i
+
+ # Deprecated method names: remove in PCT 2.1 or later.
+ def getBytes(self, N):
+ warnings.warn("getBytes() method replaced by get_bytes()",
+ DeprecationWarning)
+ return self.get_bytes(N)
+
+ def addEvent (self, event, s=""):
+ warnings.warn("addEvent() method replaced by add_event()",
+ DeprecationWarning)
+ return self.add_event(s + str(event))
+
+class PersistentRandomPool (RandomPool):
+ def __init__ (self, filename=None, *args, **kwargs):
+ RandomPool.__init__(self, *args, **kwargs)
+ self.filename = filename
+ if filename:
+ try:
+ # the time taken to open and read the file might have
+ # a little disk variability, modulo disk/kernel caching...
+ f=open(filename, 'rb')
+ self.add_event()
+ data = f.read()
+ self.add_event()
+ # mix in the data from the file and wash the random pool
+ self.stir(data)
+ f.close()
+ except IOError:
+ # Oh, well; the file doesn't exist or is unreadable, so
+ # we'll just ignore it.
+ pass
+
+ def save(self):
+ if self.filename == "":
+ raise ValueError, "No filename set for this object"
+ # wash the random pool before save, provides some forward secrecy for
+ # old values of the pool.
+ self.stir_n()
+ f=open(self.filename, 'wb')
+ self.add_event()
+ f.write(self._randpool.tostring())
+ f.close()
+ self.add_event()
+ # wash the pool again, provide some protection for future values
+ self.stir()
+
+# non-echoing Windows keyboard entry
+_kb = 0
+if not _kb:
+ try:
+ import msvcrt
+ class KeyboardEntry:
+ def getch(self):
+ c = msvcrt.getch()
+ if c in ('\000', '\xe0'):
+ # function key
+ c += msvcrt.getch()
+ return c
+ def close(self, delay = 0):
+ if delay:
+ time.sleep(delay)
+ while msvcrt.kbhit():
+ msvcrt.getch()
+ _kb = 1
+ except:
+ pass
+
+# non-echoing Posix keyboard entry
+if not _kb:
+ try:
+ import termios
+ class KeyboardEntry:
+ def __init__(self, fd = 0):
+ self._fd = fd
+ self._old = termios.tcgetattr(fd)
+ new = termios.tcgetattr(fd)
+ new[3]=new[3] & ~termios.ICANON & ~termios.ECHO
+ termios.tcsetattr(fd, termios.TCSANOW, new)
+ def getch(self):
+ termios.tcflush(0, termios.TCIFLUSH) # XXX Leave this in?
+ return os.read(self._fd, 1)
+ def close(self, delay = 0):
+ if delay:
+ time.sleep(delay)
+ termios.tcflush(self._fd, termios.TCIFLUSH)
+ termios.tcsetattr(self._fd, termios.TCSAFLUSH, self._old)
+ _kb = 1
+ except:
+ pass
+
+class KeyboardRandomPool (PersistentRandomPool):
+ def __init__(self, *args, **kwargs):
+ PersistentRandomPool.__init__(self, *args, **kwargs)
+
+ def randomize(self, N = 0):
+ "Adds N bits of entropy to random pool. If N is 0, fill up pool."
+ import os, string, time
+ if N <= 0:
+ bits = self.bits - self.entropy
+ else:
+ bits = N*8
+ if bits == 0:
+ return
+ print bits,'bits of entropy are now required. Please type on the keyboard'
+ print 'until enough randomness has been accumulated.'
+ kb = KeyboardEntry()
+ s='' # We'll save the characters typed and add them to the pool.
+ hash = self._hash
+ e = 0
+ try:
+ while e < bits:
+ temp=str(bits-e).rjust(6)
+ os.write(1, temp)
+ s=s+kb.getch()
+ e += self.add_event(s)
+ os.write(1, 6*chr(8))
+ self.add_event(s+hash.new(s).digest() )
+ finally:
+ kb.close()
+ print '\n\007 Enough. Please wait a moment.\n'
+ self.stir_n() # wash the random pool.
+ kb.close(4)
+
+if __name__ == '__main__':
+ pool = RandomPool()
+ print 'random pool entropy', pool.entropy, 'bits'
+ pool.add_event('something')
+ print `pool.get_bytes(100)`
+ import tempfile, os
+ fname = tempfile.mktemp()
+ pool = KeyboardRandomPool(filename=fname)
+ print 'keyboard random pool entropy', pool.entropy, 'bits'
+ pool.randomize()
+ print 'keyboard random pool entropy', pool.entropy, 'bits'
+ pool.randomize(128)
+ pool.save()
+ saved = open(fname, 'rb').read()
+ print 'saved', `saved`
+ print 'pool ', `pool._randpool.tostring()`
+ newpool = PersistentRandomPool(fname)
+ print 'persistent random pool entropy', pool.entropy, 'bits'
+ os.remove(fname)
diff --git a/python/gdata/Crypto/Util/test.py b/python/gdata/Crypto/Util/test.py
new file mode 100644
index 0000000..7b23e9f
--- /dev/null
+++ b/python/gdata/Crypto/Util/test.py
@@ -0,0 +1,453 @@
+#
+# test.py : Functions used for testing the modules
+#
+# Part of the Python Cryptography Toolkit
+#
+# Distribute and use freely; there are no restrictions on further
+# dissemination and usage except those imposed by the laws of your
+# country of residence. This software is provided "as is" without
+# warranty of fitness for use or suitability for any purpose, express
+# or implied. Use at your own risk or not at all.
+#
+
+__revision__ = "$Id: test.py,v 1.16 2004/08/13 22:24:18 akuchling Exp $"
+
+import binascii
+import string
+import testdata
+
+from Crypto.Cipher import *
+
+def die(string):
+ import sys
+ print '***ERROR: ', string
+# sys.exit(0) # Will default to continuing onward...
+
+def print_timing (size, delta, verbose):
+ if verbose:
+ if delta == 0:
+ print 'Unable to measure time -- elapsed time too small'
+ else:
+ print '%.2f K/sec' % (size/delta)
+
+def exerciseBlockCipher(cipher, verbose):
+ import string, time
+ try:
+ ciph = eval(cipher)
+ except NameError:
+ print cipher, 'module not available'
+ return None
+ print cipher+ ':'
+ str='1' # Build 128K of test data
+ for i in xrange(0, 17):
+ str=str+str
+ if ciph.key_size==0: ciph.key_size=16
+ password = 'password12345678Extra text for password'[0:ciph.key_size]
+ IV = 'Test IV Test IV Test IV Test'[0:ciph.block_size]
+
+ if verbose: print ' ECB mode:',
+ obj=ciph.new(password, ciph.MODE_ECB)
+ if obj.block_size != ciph.block_size:
+ die("Module and cipher object block_size don't match")
+
+ text='1234567812345678'[0:ciph.block_size]
+ c=obj.encrypt(text)
+ if (obj.decrypt(c)!=text): die('Error encrypting "'+text+'"')
+ text='KuchlingKuchling'[0:ciph.block_size]
+ c=obj.encrypt(text)
+ if (obj.decrypt(c)!=text): die('Error encrypting "'+text+'"')
+ text='NotTodayNotEver!'[0:ciph.block_size]
+ c=obj.encrypt(text)
+ if (obj.decrypt(c)!=text): die('Error encrypting "'+text+'"')
+
+ start=time.time()
+ s=obj.encrypt(str)
+ s2=obj.decrypt(s)
+ end=time.time()
+ if (str!=s2):
+ die('Error in resulting plaintext from ECB mode')
+ print_timing(256, end-start, verbose)
+ del obj
+
+ if verbose: print ' CFB mode:',
+ obj1=ciph.new(password, ciph.MODE_CFB, IV)
+ obj2=ciph.new(password, ciph.MODE_CFB, IV)
+ start=time.time()
+ ciphertext=obj1.encrypt(str[0:65536])
+ plaintext=obj2.decrypt(ciphertext)
+ end=time.time()
+ if (plaintext!=str[0:65536]):
+ die('Error in resulting plaintext from CFB mode')
+ print_timing(64, end-start, verbose)
+ del obj1, obj2
+
+ if verbose: print ' CBC mode:',
+ obj1=ciph.new(password, ciph.MODE_CBC, IV)
+ obj2=ciph.new(password, ciph.MODE_CBC, IV)
+ start=time.time()
+ ciphertext=obj1.encrypt(str)
+ plaintext=obj2.decrypt(ciphertext)
+ end=time.time()
+ if (plaintext!=str):
+ die('Error in resulting plaintext from CBC mode')
+ print_timing(256, end-start, verbose)
+ del obj1, obj2
+
+ if verbose: print ' PGP mode:',
+ obj1=ciph.new(password, ciph.MODE_PGP, IV)
+ obj2=ciph.new(password, ciph.MODE_PGP, IV)
+ start=time.time()
+ ciphertext=obj1.encrypt(str)
+ plaintext=obj2.decrypt(ciphertext)
+ end=time.time()
+ if (plaintext!=str):
+ die('Error in resulting plaintext from PGP mode')
+ print_timing(256, end-start, verbose)
+ del obj1, obj2
+
+ if verbose: print ' OFB mode:',
+ obj1=ciph.new(password, ciph.MODE_OFB, IV)
+ obj2=ciph.new(password, ciph.MODE_OFB, IV)
+ start=time.time()
+ ciphertext=obj1.encrypt(str)
+ plaintext=obj2.decrypt(ciphertext)
+ end=time.time()
+ if (plaintext!=str):
+ die('Error in resulting plaintext from OFB mode')
+ print_timing(256, end-start, verbose)
+ del obj1, obj2
+
+ def counter(length=ciph.block_size):
+ return length * 'a'
+
+ if verbose: print ' CTR mode:',
+ obj1=ciph.new(password, ciph.MODE_CTR, counter=counter)
+ obj2=ciph.new(password, ciph.MODE_CTR, counter=counter)
+ start=time.time()
+ ciphertext=obj1.encrypt(str)
+ plaintext=obj2.decrypt(ciphertext)
+ end=time.time()
+ if (plaintext!=str):
+ die('Error in resulting plaintext from CTR mode')
+ print_timing(256, end-start, verbose)
+ del obj1, obj2
+
+ # Test the IV handling
+ if verbose: print ' Testing IV handling'
+ obj1=ciph.new(password, ciph.MODE_CBC, IV)
+ plaintext='Test'*(ciph.block_size/4)*3
+ ciphertext1=obj1.encrypt(plaintext)
+ obj1.IV=IV
+ ciphertext2=obj1.encrypt(plaintext)
+ if ciphertext1!=ciphertext2:
+ die('Error in setting IV')
+
+ # Test keyword arguments
+ obj1=ciph.new(key=password)
+ obj1=ciph.new(password, mode=ciph.MODE_CBC)
+ obj1=ciph.new(mode=ciph.MODE_CBC, key=password)
+ obj1=ciph.new(IV=IV, mode=ciph.MODE_CBC, key=password)
+
+ return ciph
+
+def exerciseStreamCipher(cipher, verbose):
+ import string, time
+ try:
+ ciph = eval(cipher)
+ except (NameError):
+ print cipher, 'module not available'
+ return None
+ print cipher + ':',
+ str='1' # Build 128K of test data
+ for i in xrange(0, 17):
+ str=str+str
+ key_size = ciph.key_size or 16
+ password = 'password12345678Extra text for password'[0:key_size]
+
+ obj1=ciph.new(password)
+ obj2=ciph.new(password)
+ if obj1.block_size != ciph.block_size:
+ die("Module and cipher object block_size don't match")
+ if obj1.key_size != ciph.key_size:
+ die("Module and cipher object key_size don't match")
+
+ text='1234567812345678Python'
+ c=obj1.encrypt(text)
+ if (obj2.decrypt(c)!=text): die('Error encrypting "'+text+'"')
+ text='B1FF I2 A R3A11Y |<00L D00D!!!!!'
+ c=obj1.encrypt(text)
+ if (obj2.decrypt(c)!=text): die('Error encrypting "'+text+'"')
+ text='SpamSpamSpamSpamSpamSpamSpamSpamSpam'
+ c=obj1.encrypt(text)
+ if (obj2.decrypt(c)!=text): die('Error encrypting "'+text+'"')
+
+ start=time.time()
+ s=obj1.encrypt(str)
+ str=obj2.decrypt(s)
+ end=time.time()
+ print_timing(256, end-start, verbose)
+ del obj1, obj2
+
+ return ciph
+
+def TestStreamModules(args=['arc4', 'XOR'], verbose=1):
+ import sys, string
+ args=map(string.lower, args)
+
+ if 'arc4' in args:
+ # Test ARC4 stream cipher
+ arc4=exerciseStreamCipher('ARC4', verbose)
+ if (arc4!=None):
+ for entry in testdata.arc4:
+ key,plain,cipher=entry
+ key=binascii.a2b_hex(key)
+ plain=binascii.a2b_hex(plain)
+ cipher=binascii.a2b_hex(cipher)
+ obj=arc4.new(key)
+ ciphertext=obj.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('ARC4 failed on entry '+`entry`)
+
+ if 'xor' in args:
+ # Test XOR stream cipher
+ XOR=exerciseStreamCipher('XOR', verbose)
+ if (XOR!=None):
+ for entry in testdata.xor:
+ key,plain,cipher=entry
+ key=binascii.a2b_hex(key)
+ plain=binascii.a2b_hex(plain)
+ cipher=binascii.a2b_hex(cipher)
+ obj=XOR.new(key)
+ ciphertext=obj.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('XOR failed on entry '+`entry`)
+
+
+def TestBlockModules(args=['aes', 'arc2', 'des', 'blowfish', 'cast', 'des3',
+ 'idea', 'rc5'],
+ verbose=1):
+ import string
+ args=map(string.lower, args)
+ if 'aes' in args:
+ ciph=exerciseBlockCipher('AES', verbose) # AES
+ if (ciph!=None):
+ if verbose: print ' Verifying against test suite...'
+ for entry in testdata.aes:
+ key,plain,cipher=entry
+ key=binascii.a2b_hex(key)
+ plain=binascii.a2b_hex(plain)
+ cipher=binascii.a2b_hex(cipher)
+ obj=ciph.new(key, ciph.MODE_ECB)
+ ciphertext=obj.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('AES failed on entry '+`entry`)
+ for i in ciphertext:
+ if verbose: print hex(ord(i)),
+ if verbose: print
+
+ for entry in testdata.aes_modes:
+ mode, key, plain, cipher, kw = entry
+ key=binascii.a2b_hex(key)
+ plain=binascii.a2b_hex(plain)
+ cipher=binascii.a2b_hex(cipher)
+ obj=ciph.new(key, mode, **kw)
+ obj2=ciph.new(key, mode, **kw)
+ ciphertext=obj.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('AES encrypt failed on entry '+`entry`)
+ for i in ciphertext:
+ if verbose: print hex(ord(i)),
+ if verbose: print
+
+ plain2=obj2.decrypt(ciphertext)
+ if plain2!=plain:
+ die('AES decrypt failed on entry '+`entry`)
+ for i in plain2:
+ if verbose: print hex(ord(i)),
+ if verbose: print
+
+
+ if 'arc2' in args:
+ ciph=exerciseBlockCipher('ARC2', verbose) # Alleged RC2
+ if (ciph!=None):
+ if verbose: print ' Verifying against test suite...'
+ for entry in testdata.arc2:
+ key,plain,cipher=entry
+ key=binascii.a2b_hex(key)
+ plain=binascii.a2b_hex(plain)
+ cipher=binascii.a2b_hex(cipher)
+ obj=ciph.new(key, ciph.MODE_ECB)
+ ciphertext=obj.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('ARC2 failed on entry '+`entry`)
+ for i in ciphertext:
+ if verbose: print hex(ord(i)),
+ print
+
+ if 'blowfish' in args:
+ ciph=exerciseBlockCipher('Blowfish',verbose)# Bruce Schneier's Blowfish cipher
+ if (ciph!=None):
+ if verbose: print ' Verifying against test suite...'
+ for entry in testdata.blowfish:
+ key,plain,cipher=entry
+ key=binascii.a2b_hex(key)
+ plain=binascii.a2b_hex(plain)
+ cipher=binascii.a2b_hex(cipher)
+ obj=ciph.new(key, ciph.MODE_ECB)
+ ciphertext=obj.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('Blowfish failed on entry '+`entry`)
+ for i in ciphertext:
+ if verbose: print hex(ord(i)),
+ if verbose: print
+
+ if 'cast' in args:
+ ciph=exerciseBlockCipher('CAST', verbose) # CAST-128
+ if (ciph!=None):
+ if verbose: print ' Verifying against test suite...'
+ for entry in testdata.cast:
+ key,plain,cipher=entry
+ key=binascii.a2b_hex(key)
+ plain=binascii.a2b_hex(plain)
+ cipher=binascii.a2b_hex(cipher)
+ obj=ciph.new(key, ciph.MODE_ECB)
+ ciphertext=obj.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('CAST failed on entry '+`entry`)
+ for i in ciphertext:
+ if verbose: print hex(ord(i)),
+ if verbose: print
+
+ if 0:
+ # The full-maintenance test; it requires 4 million encryptions,
+ # and correspondingly is quite time-consuming. I've disabled
+ # it; it's faster to compile block/cast.c with -DTEST and run
+ # the resulting program.
+ a = b = '\x01\x23\x45\x67\x12\x34\x56\x78\x23\x45\x67\x89\x34\x56\x78\x9A'
+
+ for i in range(0, 1000000):
+ obj = cast.new(b, cast.MODE_ECB)
+ a = obj.encrypt(a[:8]) + obj.encrypt(a[-8:])
+ obj = cast.new(a, cast.MODE_ECB)
+ b = obj.encrypt(b[:8]) + obj.encrypt(b[-8:])
+
+ if a!="\xEE\xA9\xD0\xA2\x49\xFD\x3B\xA6\xB3\x43\x6F\xB8\x9D\x6D\xCA\x92":
+ if verbose: print 'CAST test failed: value of "a" doesn\'t match'
+ if b!="\xB2\xC9\x5E\xB0\x0C\x31\xAD\x71\x80\xAC\x05\xB8\xE8\x3D\x69\x6E":
+ if verbose: print 'CAST test failed: value of "b" doesn\'t match'
+
+ if 'des' in args:
+ # Test/benchmark DES block cipher
+ des=exerciseBlockCipher('DES', verbose)
+ if (des!=None):
+ # Various tests taken from the DES library packaged with Kerberos V4
+ obj=des.new(binascii.a2b_hex('0123456789abcdef'), des.MODE_ECB)
+ s=obj.encrypt('Now is t')
+ if (s!=binascii.a2b_hex('3fa40e8a984d4815')):
+ die('DES fails test 1')
+ obj=des.new(binascii.a2b_hex('08192a3b4c5d6e7f'), des.MODE_ECB)
+ s=obj.encrypt('\000\000\000\000\000\000\000\000')
+ if (s!=binascii.a2b_hex('25ddac3e96176467')):
+ die('DES fails test 2')
+ obj=des.new(binascii.a2b_hex('0123456789abcdef'), des.MODE_CBC,
+ binascii.a2b_hex('1234567890abcdef'))
+ s=obj.encrypt("Now is the time for all ")
+ if (s!=binascii.a2b_hex('e5c7cdde872bf27c43e934008c389c0f683788499a7c05f6')):
+ die('DES fails test 3')
+ obj=des.new(binascii.a2b_hex('0123456789abcdef'), des.MODE_CBC,
+ binascii.a2b_hex('fedcba9876543210'))
+ s=obj.encrypt("7654321 Now is the time for \000\000\000\000")
+ if (s!=binascii.a2b_hex("ccd173ffab2039f4acd8aefddfd8a1eb468e91157888ba681d269397f7fe62b4")):
+ die('DES fails test 4')
+ del obj,s
+
+ # R. Rivest's test: see http://theory.lcs.mit.edu/~rivest/destest.txt
+ x=binascii.a2b_hex('9474B8E8C73BCA7D')
+ for i in range(0, 16):
+ obj=des.new(x, des.MODE_ECB)
+ if (i & 1): x=obj.decrypt(x)
+ else: x=obj.encrypt(x)
+ if x!=binascii.a2b_hex('1B1A2DDB4C642438'):
+ die("DES fails Rivest's test")
+
+ if verbose: print ' Verifying against test suite...'
+ for entry in testdata.des:
+ key,plain,cipher=entry
+ key=binascii.a2b_hex(key)
+ plain=binascii.a2b_hex(plain)
+ cipher=binascii.a2b_hex(cipher)
+ obj=des.new(key, des.MODE_ECB)
+ ciphertext=obj.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('DES failed on entry '+`entry`)
+ for entry in testdata.des_cbc:
+ key, iv, plain, cipher=entry
+ key, iv, cipher=binascii.a2b_hex(key),binascii.a2b_hex(iv),binascii.a2b_hex(cipher)
+ obj1=des.new(key, des.MODE_CBC, iv)
+ obj2=des.new(key, des.MODE_CBC, iv)
+ ciphertext=obj1.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('DES CBC mode failed on entry '+`entry`)
+
+ if 'des3' in args:
+ ciph=exerciseBlockCipher('DES3', verbose) # Triple DES
+ if (ciph!=None):
+ if verbose: print ' Verifying against test suite...'
+ for entry in testdata.des3:
+ key,plain,cipher=entry
+ key=binascii.a2b_hex(key)
+ plain=binascii.a2b_hex(plain)
+ cipher=binascii.a2b_hex(cipher)
+ obj=ciph.new(key, ciph.MODE_ECB)
+ ciphertext=obj.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('DES3 failed on entry '+`entry`)
+ for i in ciphertext:
+ if verbose: print hex(ord(i)),
+ if verbose: print
+ for entry in testdata.des3_cbc:
+ key, iv, plain, cipher=entry
+ key, iv, cipher=binascii.a2b_hex(key),binascii.a2b_hex(iv),binascii.a2b_hex(cipher)
+ obj1=ciph.new(key, ciph.MODE_CBC, iv)
+ obj2=ciph.new(key, ciph.MODE_CBC, iv)
+ ciphertext=obj1.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('DES3 CBC mode failed on entry '+`entry`)
+
+ if 'idea' in args:
+ ciph=exerciseBlockCipher('IDEA', verbose) # IDEA block cipher
+ if (ciph!=None):
+ if verbose: print ' Verifying against test suite...'
+ for entry in testdata.idea:
+ key,plain,cipher=entry
+ key=binascii.a2b_hex(key)
+ plain=binascii.a2b_hex(plain)
+ cipher=binascii.a2b_hex(cipher)
+ obj=ciph.new(key, ciph.MODE_ECB)
+ ciphertext=obj.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('IDEA failed on entry '+`entry`)
+
+ if 'rc5' in args:
+ # Ronald Rivest's RC5 algorithm
+ ciph=exerciseBlockCipher('RC5', verbose)
+ if (ciph!=None):
+ if verbose: print ' Verifying against test suite...'
+ for entry in testdata.rc5:
+ key,plain,cipher=entry
+ key=binascii.a2b_hex(key)
+ plain=binascii.a2b_hex(plain)
+ cipher=binascii.a2b_hex(cipher)
+ obj=ciph.new(key[4:], ciph.MODE_ECB,
+ version =ord(key[0]),
+ word_size=ord(key[1]),
+ rounds =ord(key[2]) )
+ ciphertext=obj.encrypt(plain)
+ if (ciphertext!=cipher):
+ die('RC5 failed on entry '+`entry`)
+ for i in ciphertext:
+ if verbose: print hex(ord(i)),
+ if verbose: print
+
+
+
diff --git a/python/gdata/Crypto/__init__.py b/python/gdata/Crypto/__init__.py
new file mode 100644
index 0000000..2324ae8
--- /dev/null
+++ b/python/gdata/Crypto/__init__.py
@@ -0,0 +1,25 @@
+
+"""Python Cryptography Toolkit
+
+A collection of cryptographic modules implementing various algorithms
+and protocols.
+
+Subpackages:
+Crypto.Cipher Secret-key encryption algorithms (AES, DES, ARC4)
+Crypto.Hash Hashing algorithms (MD5, SHA, HMAC)
+Crypto.Protocol Cryptographic protocols (Chaffing, all-or-nothing
+ transform). This package does not contain any
+ network protocols.
+Crypto.PublicKey Public-key encryption and signature algorithms
+ (RSA, DSA)
+Crypto.Util Various useful modules and functions (long-to-string
+ conversion, random number generation, number
+ theoretic functions)
+"""
+
+__all__ = ['Cipher', 'Hash', 'Protocol', 'PublicKey', 'Util']
+
+__version__ = '2.0.1'
+__revision__ = "$Id: __init__.py,v 1.12 2005/06/14 01:20:22 akuchling Exp $"
+
+
diff --git a/python/gdata/Crypto/test.py b/python/gdata/Crypto/test.py
new file mode 100644
index 0000000..c5ed061
--- /dev/null
+++ b/python/gdata/Crypto/test.py
@@ -0,0 +1,38 @@
+#
+# Test script for the Python Cryptography Toolkit.
+#
+
+__revision__ = "$Id: test.py,v 1.7 2002/07/11 14:31:19 akuchling Exp $"
+
+import os, sys
+
+
+# Add the build directory to the front of sys.path
+from distutils.util import get_platform
+s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
+s = os.path.join(os.getcwd(), s)
+sys.path.insert(0, s)
+s = os.path.join(os.getcwd(), 'test')
+sys.path.insert(0, s)
+
+from Crypto.Util import test
+
+args = sys.argv[1:]
+quiet = "--quiet" in args
+if quiet: args.remove('--quiet')
+
+if not quiet:
+ print '\nStream Ciphers:'
+ print '==============='
+
+if args: test.TestStreamModules(args, verbose= not quiet)
+else: test.TestStreamModules(verbose= not quiet)
+
+if not quiet:
+ print '\nBlock Ciphers:'
+ print '=============='
+
+if args: test.TestBlockModules(args, verbose= not quiet)
+else: test.TestBlockModules(verbose= not quiet)
+
+
diff --git a/python/gdata/__init__.py b/python/gdata/__init__.py
new file mode 100644
index 0000000..634889b
--- /dev/null
+++ b/python/gdata/__init__.py
@@ -0,0 +1,835 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2006 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains classes representing Google Data elements.
+
+ Extends Atom classes to add Google Data specific elements.
+"""
+
+
+__author__ = 'j.s@google.com (Jeffrey Scudder)'
+
+import os
+import atom
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ try:
+ import cElementTree as ElementTree
+ except ImportError:
+ try:
+ from xml.etree import ElementTree
+ except ImportError:
+ from elementtree import ElementTree
+
+
+# XML namespaces which are often used in GData entities.
+GDATA_NAMESPACE = 'http://schemas.google.com/g/2005'
+GDATA_TEMPLATE = '{http://schemas.google.com/g/2005}%s'
+OPENSEARCH_NAMESPACE = 'http://a9.com/-/spec/opensearchrss/1.0/'
+OPENSEARCH_TEMPLATE = '{http://a9.com/-/spec/opensearchrss/1.0/}%s'
+BATCH_NAMESPACE = 'http://schemas.google.com/gdata/batch'
+GACL_NAMESPACE = 'http://schemas.google.com/acl/2007'
+GACL_TEMPLATE = '{http://schemas.google.com/acl/2007}%s'
+
+
+# Labels used in batch request entries to specify the desired CRUD operation.
+BATCH_INSERT = 'insert'
+BATCH_UPDATE = 'update'
+BATCH_DELETE = 'delete'
+BATCH_QUERY = 'query'
+
+class Error(Exception):
+ pass
+
+
+class MissingRequiredParameters(Error):
+ pass
+
+
+class MediaSource(object):
+ """GData Entries can refer to media sources, so this class provides a
+ place to store references to these objects along with some metadata.
+ """
+
+ def __init__(self, file_handle=None, content_type=None, content_length=None,
+ file_path=None, file_name=None):
+ """Creates an object of type MediaSource.
+
+ Args:
+ file_handle: A file handle pointing to the file to be encapsulated in the
+ MediaSource
+ content_type: string The MIME type of the file. Required if a file_handle
+ is given.
+ content_length: int The size of the file. Required if a file_handle is
+ given.
+ file_path: string (optional) A full path name to the file. Used in
+ place of a file_handle.
+ file_name: string The name of the file without any path information.
+ Required if a file_handle is given.
+ """
+ self.file_handle = file_handle
+ self.content_type = content_type
+ self.content_length = content_length
+ self.file_name = file_name
+
+ if (file_handle is None and content_type is not None and
+ file_path is not None):
+ self.setFile(file_path, content_type)
+
+ def setFile(self, file_name, content_type):
+ """A helper function which can create a file handle from a given filename
+ and set the content type and length all at once.
+
+ Args:
+ file_name: string The path and file name to the file containing the media
+ content_type: string A MIME type representing the type of the media
+ """
+
+ self.file_handle = open(file_name, 'rb')
+ self.content_type = content_type
+ self.content_length = os.path.getsize(file_name)
+ self.file_name = os.path.basename(file_name)
+
+
+class LinkFinder(atom.LinkFinder):
+ """An "interface" providing methods to find link elements
+
+ GData Entry elements often contain multiple links which differ in the rel
+ attribute or content type. Often, developers are interested in a specific
+ type of link so this class provides methods to find specific classes of
+ links.
+
+ This class is used as a mixin in GData entries.
+ """
+
+ def GetSelfLink(self):
+ """Find the first link with rel set to 'self'
+
+ Returns:
+ An atom.Link or none if none of the links had rel equal to 'self'
+ """
+
+ for a_link in self.link:
+ if a_link.rel == 'self':
+ return a_link
+ return None
+
+ def GetEditLink(self):
+ for a_link in self.link:
+ if a_link.rel == 'edit':
+ return a_link
+ return None
+
+ def GetEditMediaLink(self):
+ """The Picasa API mistakenly returns media-edit rather than edit-media, but
+ this may change soon.
+ """
+ for a_link in self.link:
+ if a_link.rel == 'edit-media':
+ return a_link
+ if a_link.rel == 'media-edit':
+ return a_link
+ return None
+
+ def GetHtmlLink(self):
+ """Find the first link with rel of alternate and type of text/html
+
+ Returns:
+ An atom.Link or None if no links matched
+ """
+ for a_link in self.link:
+ if a_link.rel == 'alternate' and a_link.type == 'text/html':
+ return a_link
+ return None
+
+ def GetPostLink(self):
+ """Get a link containing the POST target URL.
+
+ The POST target URL is used to insert new entries.
+
+ Returns:
+ A link object with a rel matching the POST type.
+ """
+ for a_link in self.link:
+ if a_link.rel == 'http://schemas.google.com/g/2005#post':
+ return a_link
+ return None
+
+ def GetAclLink(self):
+ for a_link in self.link:
+ if a_link.rel == 'http://schemas.google.com/acl/2007#accessControlList':
+ return a_link
+ return None
+
+ def GetFeedLink(self):
+ for a_link in self.link:
+ if a_link.rel == 'http://schemas.google.com/g/2005#feed':
+ return a_link
+ return None
+
+ def GetNextLink(self):
+ for a_link in self.link:
+ if a_link.rel == 'next':
+ return a_link
+ return None
+
+ def GetPrevLink(self):
+ for a_link in self.link:
+ if a_link.rel == 'previous':
+ return a_link
+ return None
+
+
+class TotalResults(atom.AtomBase):
+ """opensearch:TotalResults for a GData feed"""
+
+ _tag = 'totalResults'
+ _namespace = OPENSEARCH_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def TotalResultsFromString(xml_string):
+ return atom.CreateClassFromXMLString(TotalResults, xml_string)
+
+
+class StartIndex(atom.AtomBase):
+ """The opensearch:startIndex element in GData feed"""
+
+ _tag = 'startIndex'
+ _namespace = OPENSEARCH_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def StartIndexFromString(xml_string):
+ return atom.CreateClassFromXMLString(StartIndex, xml_string)
+
+
+class ItemsPerPage(atom.AtomBase):
+ """The opensearch:itemsPerPage element in GData feed"""
+
+ _tag = 'itemsPerPage'
+ _namespace = OPENSEARCH_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def ItemsPerPageFromString(xml_string):
+ return atom.CreateClassFromXMLString(ItemsPerPage, xml_string)
+
+
+class ExtendedProperty(atom.AtomBase):
+ """The Google Data extendedProperty element.
+
+ Used to store arbitrary key-value information specific to your
+ application. The value can either be a text string stored as an XML
+ attribute (.value), or an XML node (XmlBlob) as a child element.
+
+ This element is used in the Google Calendar data API and the Google
+ Contacts data API.
+ """
+
+ _tag = 'extendedProperty'
+ _namespace = GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['name'] = 'name'
+ _attributes['value'] = 'value'
+
+ def __init__(self, name=None, value=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.name = name
+ self.value = value
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+ def GetXmlBlobExtensionElement(self):
+ """Returns the XML blob as an atom.ExtensionElement.
+
+ Returns:
+ An atom.ExtensionElement representing the blob's XML, or None if no
+ blob was set.
+ """
+ if len(self.extension_elements) < 1:
+ return None
+ else:
+ return self.extension_elements[0]
+
+ def GetXmlBlobString(self):
+ """Returns the XML blob as a string.
+
+ Returns:
+ A string containing the blob's XML, or None if no blob was set.
+ """
+ blob = self.GetXmlBlobExtensionElement()
+ if blob:
+ return blob.ToString()
+ return None
+
+ def SetXmlBlob(self, blob):
+ """Sets the contents of the extendedProperty to XML as a child node.
+
+ Since the extendedProperty is only allowed one child element as an XML
+ blob, setting the XML blob will erase any preexisting extension elements
+ in this object.
+
+ Args:
+ blob: str, ElementTree Element or atom.ExtensionElement representing
+ the XML blob stored in the extendedProperty.
+ """
+ # Erase any existing extension_elements, clears the child nodes from the
+ # extendedProperty.
+ self.extension_elements = []
+ if isinstance(blob, atom.ExtensionElement):
+ self.extension_elements.append(blob)
+ elif ElementTree.iselement(blob):
+ self.extension_elements.append(atom._ExtensionElementFromElementTree(
+ blob))
+ else:
+ self.extension_elements.append(atom.ExtensionElementFromString(blob))
+
+
+def ExtendedPropertyFromString(xml_string):
+ return atom.CreateClassFromXMLString(ExtendedProperty, xml_string)
+
+
+class GDataEntry(atom.Entry, LinkFinder):
+ """Extends Atom Entry to provide data processing"""
+
+ _tag = atom.Entry._tag
+ _namespace = atom.Entry._namespace
+ _children = atom.Entry._children.copy()
+ _attributes = atom.Entry._attributes.copy()
+
+ def __GetId(self):
+ return self.__id
+
+ # This method was created to strip the unwanted whitespace from the id's
+ # text node.
+ def __SetId(self, id):
+ self.__id = id
+ if id is not None and id.text is not None:
+ self.__id.text = id.text.strip()
+
+ id = property(__GetId, __SetId)
+
+ def IsMedia(self):
+ """Determines whether or not an entry is a GData Media entry.
+ """
+ if (self.GetEditMediaLink()):
+ return True
+ else:
+ return False
+
+ def GetMediaURL(self):
+ """Returns the URL to the media content, if the entry is a media entry.
+ Otherwise returns None.
+ """
+ if not self.IsMedia():
+ return None
+ else:
+ return self.content.src
+
+
+def GDataEntryFromString(xml_string):
+ """Creates a new GDataEntry instance given a string of XML."""
+ return atom.CreateClassFromXMLString(GDataEntry, xml_string)
+
+
+class GDataFeed(atom.Feed, LinkFinder):
+ """A Feed from a GData service"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = atom.Feed._children.copy()
+ _attributes = atom.Feed._attributes.copy()
+ _children['{%s}totalResults' % OPENSEARCH_NAMESPACE] = ('total_results',
+ TotalResults)
+ _children['{%s}startIndex' % OPENSEARCH_NAMESPACE] = ('start_index',
+ StartIndex)
+ _children['{%s}itemsPerPage' % OPENSEARCH_NAMESPACE] = ('items_per_page',
+ ItemsPerPage)
+ # Add a conversion rule for atom:entry to make it into a GData
+ # Entry.
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [GDataEntry])
+
+ def __GetId(self):
+ return self.__id
+
+ def __SetId(self, id):
+ self.__id = id
+ if id is not None and id.text is not None:
+ self.__id.text = id.text.strip()
+
+ id = property(__GetId, __SetId)
+
+ def __GetGenerator(self):
+ return self.__generator
+
+ def __SetGenerator(self, generator):
+ self.__generator = generator
+ if generator is not None:
+ self.__generator.text = generator.text.strip()
+
+ generator = property(__GetGenerator, __SetGenerator)
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None, entry=None,
+ total_results=None, start_index=None, items_per_page=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ """Constructor for Source
+
+ Args:
+ author: list (optional) A list of Author instances which belong to this
+ class.
+ category: list (optional) A list of Category instances
+ contributor: list (optional) A list on Contributor instances
+ generator: Generator (optional)
+ icon: Icon (optional)
+ id: Id (optional) The entry's Id element
+ link: list (optional) A list of Link instances
+ logo: Logo (optional)
+ rights: Rights (optional) The entry's Rights element
+ subtitle: Subtitle (optional) The entry's subtitle element
+ title: Title (optional) the entry's title element
+ updated: Updated (optional) the entry's updated element
+ entry: list (optional) A list of the Entry instances contained in the
+ feed.
+ text: String (optional) The text contents of the element. This is the
+ contents of the Entry's XML text node.
+ (Example: This is the text)
+ extension_elements: list (optional) A list of ExtensionElement instances
+ which are children of this element.
+ extension_attributes: dict (optional) A dictionary of strings which are
+ the values for additional XML attributes of this element.
+ """
+
+ self.author = author or []
+ self.category = category or []
+ self.contributor = contributor or []
+ self.generator = generator
+ self.icon = icon
+ self.id = atom_id
+ self.link = link or []
+ self.logo = logo
+ self.rights = rights
+ self.subtitle = subtitle
+ self.title = title
+ self.updated = updated
+ self.entry = entry or []
+ self.total_results = total_results
+ self.start_index = start_index
+ self.items_per_page = items_per_page
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def GDataFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(GDataFeed, xml_string)
+
+
+class BatchId(atom.AtomBase):
+ _tag = 'id'
+ _namespace = BATCH_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+
+def BatchIdFromString(xml_string):
+ return atom.CreateClassFromXMLString(BatchId, xml_string)
+
+
+class BatchOperation(atom.AtomBase):
+ _tag = 'operation'
+ _namespace = BATCH_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['type'] = 'type'
+
+ def __init__(self, op_type=None, extension_elements=None,
+ extension_attributes=None,
+ text=None):
+ self.type = op_type
+ atom.AtomBase.__init__(self,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+def BatchOperationFromString(xml_string):
+ return atom.CreateClassFromXMLString(BatchOperation, xml_string)
+
+
+class BatchStatus(atom.AtomBase):
+ """The batch:status element present in a batch response entry.
+
+ A status element contains the code (HTTP response code) and
+ reason as elements. In a single request these fields would
+ be part of the HTTP response, but in a batch request each
+ Entry operation has a corresponding Entry in the response
+ feed which includes status information.
+
+ See http://code.google.com/apis/gdata/batch.html#Handling_Errors
+ """
+
+ _tag = 'status'
+ _namespace = BATCH_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['code'] = 'code'
+ _attributes['reason'] = 'reason'
+ _attributes['content-type'] = 'content_type'
+
+ def __init__(self, code=None, reason=None, content_type=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ self.code = code
+ self.reason = reason
+ self.content_type = content_type
+ atom.AtomBase.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+def BatchStatusFromString(xml_string):
+ return atom.CreateClassFromXMLString(BatchStatus, xml_string)
+
+
+class BatchEntry(GDataEntry):
+ """An atom:entry for use in batch requests.
+
+ The BatchEntry contains additional members to specify the operation to be
+ performed on this entry and a batch ID so that the server can reference
+ individual operations in the response feed. For more information, see:
+ http://code.google.com/apis/gdata/batch.html
+ """
+
+ _tag = GDataEntry._tag
+ _namespace = GDataEntry._namespace
+ _children = GDataEntry._children.copy()
+ _children['{%s}operation' % BATCH_NAMESPACE] = ('batch_operation', BatchOperation)
+ _children['{%s}id' % BATCH_NAMESPACE] = ('batch_id', BatchId)
+ _children['{%s}status' % BATCH_NAMESPACE] = ('batch_status', BatchStatus)
+ _attributes = GDataEntry._attributes.copy()
+
+ def __init__(self, author=None, category=None, content=None,
+ contributor=None, atom_id=None, link=None, published=None, rights=None,
+ source=None, summary=None, control=None, title=None, updated=None,
+ batch_operation=None, batch_id=None, batch_status=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ self.batch_operation = batch_operation
+ self.batch_id = batch_id
+ self.batch_status = batch_status
+ GDataEntry.__init__(self, author=author, category=category,
+ content=content, contributor=contributor, atom_id=atom_id, link=link,
+ published=published, rights=rights, source=source, summary=summary,
+ control=control, title=title, updated=updated,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+
+
+def BatchEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(BatchEntry, xml_string)
+
+
+class BatchInterrupted(atom.AtomBase):
+ """The batch:interrupted element sent if batch request was interrupted.
+
+ Only appears in a feed if some of the batch entries could not be processed.
+ See: http://code.google.com/apis/gdata/batch.html#Handling_Errors
+ """
+
+ _tag = 'interrupted'
+ _namespace = BATCH_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['reason'] = 'reason'
+ _attributes['success'] = 'success'
+ _attributes['failures'] = 'failures'
+ _attributes['parsed'] = 'parsed'
+
+ def __init__(self, reason=None, success=None, failures=None, parsed=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ self.reason = reason
+ self.success = success
+ self.failures = failures
+ self.parsed = parsed
+ atom.AtomBase.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+def BatchInterruptedFromString(xml_string):
+ return atom.CreateClassFromXMLString(BatchInterrupted, xml_string)
+
+
+class BatchFeed(GDataFeed):
+ """A feed containing a list of batch request entries."""
+
+ _tag = GDataFeed._tag
+ _namespace = GDataFeed._namespace
+ _children = GDataFeed._children.copy()
+ _attributes = GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [BatchEntry])
+ _children['{%s}interrupted' % BATCH_NAMESPACE] = ('interrupted', BatchInterrupted)
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None, entry=None,
+ total_results=None, start_index=None, items_per_page=None,
+ interrupted=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ self.interrupted = interrupted
+ GDataFeed.__init__(self, author=author, category=category,
+ contributor=contributor, generator=generator,
+ icon=icon, atom_id=atom_id, link=link,
+ logo=logo, rights=rights, subtitle=subtitle,
+ title=title, updated=updated, entry=entry,
+ total_results=total_results, start_index=start_index,
+ items_per_page=items_per_page,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+ def AddBatchEntry(self, entry=None, id_url_string=None,
+ batch_id_string=None, operation_string=None):
+ """Logic for populating members of a BatchEntry and adding to the feed.
+
+
+ If the entry is not a BatchEntry, it is converted to a BatchEntry so
+ that the batch specific members will be present.
+
+ The id_url_string can be used in place of an entry if the batch operation
+ applies to a URL. For example query and delete operations require just
+ the URL of an entry, no body is sent in the HTTP request. If an
+ id_url_string is sent instead of an entry, a BatchEntry is created and
+ added to the feed.
+
+ This method also assigns the desired batch id to the entry so that it
+ can be referenced in the server's response. If the batch_id_string is
+ None, this method will assign a batch_id to be the index at which this
+ entry will be in the feed's entry list.
+
+ Args:
+ entry: BatchEntry, atom.Entry, or another Entry flavor (optional) The
+ entry which will be sent to the server as part of the batch request.
+ The item must have a valid atom id so that the server knows which
+ entry this request references.
+ id_url_string: str (optional) The URL of the entry to be acted on. You
+ can find this URL in the text member of the atom id for an entry.
+ If an entry is not sent, this id will be used to construct a new
+ BatchEntry which will be added to the request feed.
+ batch_id_string: str (optional) The batch ID to be used to reference
+ this batch operation in the results feed. If this parameter is None,
+ the current length of the feed's entry array will be used as a
+ count. Note that batch_ids should either always be specified or
+ never, mixing could potentially result in duplicate batch ids.
+ operation_string: str (optional) The desired batch operation which will
+ set the batch_operation.type member of the entry. Options are
+ 'insert', 'update', 'delete', and 'query'
+
+ Raises:
+ MissingRequiredParameters: Raised if neither an id_ url_string nor an
+ entry are provided in the request.
+
+ Returns:
+ The added entry.
+ """
+ if entry is None and id_url_string is None:
+ raise MissingRequiredParameters('supply either an entry or URL string')
+ if entry is None and id_url_string is not None:
+ entry = BatchEntry(atom_id=atom.Id(text=id_url_string))
+ # TODO: handle cases in which the entry lacks batch_... members.
+ #if not isinstance(entry, BatchEntry):
+ # Convert the entry to a batch entry.
+ if batch_id_string is not None:
+ entry.batch_id = BatchId(text=batch_id_string)
+ elif entry.batch_id is None or entry.batch_id.text is None:
+ entry.batch_id = BatchId(text=str(len(self.entry)))
+ if operation_string is not None:
+ entry.batch_operation = BatchOperation(op_type=operation_string)
+ self.entry.append(entry)
+ return entry
+
+ def AddInsert(self, entry, batch_id_string=None):
+ """Add an insert request to the operations in this batch request feed.
+
+ If the entry doesn't yet have an operation or a batch id, these will
+ be set to the insert operation and a batch_id specified as a parameter.
+
+ Args:
+ entry: BatchEntry The entry which will be sent in the batch feed as an
+ insert request.
+ batch_id_string: str (optional) The batch ID to be used to reference
+ this batch operation in the results feed. If this parameter is None,
+ the current length of the feed's entry array will be used as a
+ count. Note that batch_ids should either always be specified or
+ never, mixing could potentially result in duplicate batch ids.
+ """
+ entry = self.AddBatchEntry(entry=entry, batch_id_string=batch_id_string,
+ operation_string=BATCH_INSERT)
+
+ def AddUpdate(self, entry, batch_id_string=None):
+ """Add an update request to the list of batch operations in this feed.
+
+ Sets the operation type of the entry to insert if it is not already set
+ and assigns the desired batch id to the entry so that it can be
+ referenced in the server's response.
+
+ Args:
+ entry: BatchEntry The entry which will be sent to the server as an
+ update (HTTP PUT) request. The item must have a valid atom id
+ so that the server knows which entry to replace.
+ batch_id_string: str (optional) The batch ID to be used to reference
+ this batch operation in the results feed. If this parameter is None,
+ the current length of the feed's entry array will be used as a
+ count. See also comments for AddInsert.
+ """
+ entry = self.AddBatchEntry(entry=entry, batch_id_string=batch_id_string,
+ operation_string=BATCH_UPDATE)
+
+ def AddDelete(self, url_string=None, entry=None, batch_id_string=None):
+ """Adds a delete request to the batch request feed.
+
+ This method takes either the url_string which is the atom id of the item
+ to be deleted, or the entry itself. The atom id of the entry must be
+ present so that the server knows which entry should be deleted.
+
+ Args:
+ url_string: str (optional) The URL of the entry to be deleted. You can
+ find this URL in the text member of the atom id for an entry.
+ entry: BatchEntry (optional) The entry to be deleted.
+ batch_id_string: str (optional)
+
+ Raises:
+ MissingRequiredParameters: Raised if neither a url_string nor an entry
+ are provided in the request.
+ """
+ entry = self.AddBatchEntry(entry=entry, id_url_string=url_string,
+ batch_id_string=batch_id_string,
+ operation_string=BATCH_DELETE)
+
+ def AddQuery(self, url_string=None, entry=None, batch_id_string=None):
+ """Adds a query request to the batch request feed.
+
+ This method takes either the url_string which is the query URL
+ whose results will be added to the result feed. The query URL will
+ be encapsulated in a BatchEntry, and you may pass in the BatchEntry
+ with a query URL instead of sending a url_string.
+
+ Args:
+ url_string: str (optional)
+ entry: BatchEntry (optional)
+ batch_id_string: str (optional)
+
+ Raises:
+ MissingRequiredParameters
+ """
+ entry = self.AddBatchEntry(entry=entry, id_url_string=url_string,
+ batch_id_string=batch_id_string,
+ operation_string=BATCH_QUERY)
+
+ def GetBatchLink(self):
+ for link in self.link:
+ if link.rel == 'http://schemas.google.com/g/2005#batch':
+ return link
+ return None
+
+
+def BatchFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(BatchFeed, xml_string)
+
+
+class EntryLink(atom.AtomBase):
+ """The gd:entryLink element"""
+
+ _tag = 'entryLink'
+ _namespace = GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ # The entry used to be an atom.Entry, now it is a GDataEntry.
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', GDataEntry)
+ _attributes['rel'] = 'rel'
+ _attributes['readOnly'] = 'read_only'
+ _attributes['href'] = 'href'
+
+ def __init__(self, href=None, read_only=None, rel=None,
+ entry=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.href = href
+ self.read_only = read_only
+ self.rel = rel
+ self.entry = entry
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def EntryLinkFromString(xml_string):
+ return atom.CreateClassFromXMLString(EntryLink, xml_string)
+
+
+class FeedLink(atom.AtomBase):
+ """The gd:feedLink element"""
+
+ _tag = 'feedLink'
+ _namespace = GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _children['{%s}feed' % atom.ATOM_NAMESPACE] = ('feed', GDataFeed)
+ _attributes['rel'] = 'rel'
+ _attributes['readOnly'] = 'read_only'
+ _attributes['countHint'] = 'count_hint'
+ _attributes['href'] = 'href'
+
+ def __init__(self, count_hint=None, href=None, read_only=None, rel=None,
+ feed=None, extension_elements=None, extension_attributes=None,
+ text=None):
+ self.count_hint = count_hint
+ self.href = href
+ self.read_only = read_only
+ self.rel = rel
+ self.feed = feed
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def FeedLinkFromString(xml_string):
+ return atom.CreateClassFromXMLString(FeedLink, xml_string)
diff --git a/python/gdata/acl/__init__.py b/python/gdata/acl/__init__.py
new file mode 100644
index 0000000..22071f7
--- /dev/null
+++ b/python/gdata/acl/__init__.py
@@ -0,0 +1,15 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/python/gdata/acl/data.py b/python/gdata/acl/data.py
new file mode 100644
index 0000000..c65359c
--- /dev/null
+++ b/python/gdata/acl/data.py
@@ -0,0 +1,55 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains the data classes of the Google Access Control List (ACL) Extension"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+import atom.data
+import gdata.data
+import gdata.opensearch.data
+
+
+GACL_TEMPLATE = '{http://schemas.google.com/acl/2007}%s'
+
+
+class AclRole(atom.core.XmlElement):
+ """Describes the role of an entry in an access control list."""
+ _qname = GACL_TEMPLATE % 'role'
+ value = 'value'
+
+
+class AclScope(atom.core.XmlElement):
+ """Describes the scope of an entry in an access control list."""
+ _qname = GACL_TEMPLATE % 'scope'
+ type = 'type'
+ value = 'value'
+
+
+class AclEntry(gdata.data.GDEntry):
+ """Describes an entry in a feed of an access control list (ACL)."""
+ scope = AclScope
+ role = AclRole
+
+
+class AclFeed(gdata.data.GDFeed):
+ """Describes a feed of an access control list (ACL)."""
+ entry = [AclEntry]
+
+
diff --git a/python/gdata/alt/__init__.py b/python/gdata/alt/__init__.py
new file mode 100644
index 0000000..742980e
--- /dev/null
+++ b/python/gdata/alt/__init__.py
@@ -0,0 +1,20 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""This package's modules adapt the gdata library to run in other environments
+
+The first example is the appengine module which contains functions and
+classes which modify a GDataService object to run on Google App Engine.
+"""
diff --git a/python/gdata/alt/app_engine.py b/python/gdata/alt/app_engine.py
new file mode 100644
index 0000000..afa412d
--- /dev/null
+++ b/python/gdata/alt/app_engine.py
@@ -0,0 +1,101 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Provides functions to persist serialized auth tokens in the datastore.
+
+The get_token and set_token functions should be used in conjunction with
+gdata.gauth's token_from_blob and token_to_blob to allow auth token objects
+to be reused across requests. It is up to your own code to ensure that the
+token key's are unique.
+"""
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+from google.appengine.ext import db
+from google.appengine.api import memcache
+
+
+class Token(db.Model):
+ """Datastore Model which stores a serialized auth token."""
+ t = db.BlobProperty()
+
+
+def get_token(unique_key):
+ """Searches for a stored token with the desired key.
+
+ Checks memcache and then the datastore if required.
+
+ Args:
+ unique_key: str which uniquely identifies the desired auth token.
+
+ Returns:
+ A string encoding the auth token data. Use gdata.gauth.token_from_blob to
+ convert back into a usable token object. None if the token was not found
+ in memcache or the datastore.
+ """
+ token_string = memcache.get(unique_key)
+ if token_string is None:
+ # The token wasn't in memcache, so look in the datastore.
+ token = Token.get_by_key_name(unique_key)
+ if token is None:
+ return None
+ return token.t
+ return token_string
+
+
+def set_token(unique_key, token_str):
+ """Saves the serialized auth token in the datastore.
+
+ The token is also stored in memcache to speed up retrieval on a cache hit.
+
+ Args:
+ unique_key: The unique name for this token as a string. It is up to your
+ code to ensure that this token value is unique in your application.
+ Previous values will be silently overwitten.
+ token_str: A serialized auth token as a string. I expect that this string
+ will be generated by gdata.gauth.token_to_blob.
+
+ Returns:
+ True if the token was stored sucessfully, False if the token could not be
+ safely cached (if an old value could not be cleared). If the token was
+ set in memcache, but not in the datastore, this function will return None.
+ However, in that situation an exception will likely be raised.
+
+ Raises:
+ Datastore exceptions may be raised from the App Engine SDK in the event of
+ failure.
+ """
+ # First try to save in memcache.
+ result = memcache.set(unique_key, token_str)
+ # If memcache fails to save the value, clear the cached value.
+ if not result:
+ result = memcache.delete(unique_key)
+ # If we could not clear the cached value for this token, refuse to save.
+ if result == 0:
+ return False
+ # Save to the datastore.
+ if Token(key_name=unique_key, t=token_str).put():
+ return True
+ return None
+
+
+def delete_token(unique_key):
+ # Clear from memcache.
+ memcache.delete(unique_key)
+ # Clear from the datastore.
+ Token(key_name=unique_key).delete()
diff --git a/python/gdata/alt/appengine.py b/python/gdata/alt/appengine.py
new file mode 100644
index 0000000..2251621
--- /dev/null
+++ b/python/gdata/alt/appengine.py
@@ -0,0 +1,321 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Provides HTTP functions for gdata.service to use on Google App Engine
+
+AppEngineHttpClient: Provides an HTTP request method which uses App Engine's
+ urlfetch API. Set the http_client member of a GDataService object to an
+ instance of an AppEngineHttpClient to allow the gdata library to run on
+ Google App Engine.
+
+run_on_appengine: Function which will modify an existing GDataService object
+ to allow it to run on App Engine. It works by creating a new instance of
+ the AppEngineHttpClient and replacing the GDataService object's
+ http_client.
+"""
+
+
+__author__ = 'api.jscudder (Jeff Scudder)'
+
+
+import StringIO
+import pickle
+import atom.http_interface
+import atom.token_store
+from google.appengine.api import urlfetch
+from google.appengine.ext import db
+from google.appengine.api import users
+from google.appengine.api import memcache
+
+
+def run_on_appengine(gdata_service, store_tokens=True,
+ single_user_mode=False, deadline=None):
+ """Modifies a GDataService object to allow it to run on App Engine.
+
+ Args:
+ gdata_service: An instance of AtomService, GDataService, or any
+ of their subclasses which has an http_client member and a
+ token_store member.
+ store_tokens: Boolean, defaults to True. If True, the gdata_service
+ will attempt to add each token to it's token_store when
+ SetClientLoginToken or SetAuthSubToken is called. If False
+ the tokens will not automatically be added to the
+ token_store.
+ single_user_mode: Boolean, defaults to False. If True, the current_token
+ member of gdata_service will be set when
+ SetClientLoginToken or SetAuthTubToken is called. If set
+ to True, the current_token is set in the gdata_service
+ and anyone who accesses the object will use the same
+ token.
+
+ Note: If store_tokens is set to False and
+ single_user_mode is set to False, all tokens will be
+ ignored, since the library assumes: the tokens should not
+ be stored in the datastore and they should not be stored
+ in the gdata_service object. This will make it
+ impossible to make requests which require authorization.
+ deadline: int (optional) The number of seconds to wait for a response
+ before timing out on the HTTP request. If no deadline is
+ specified, the deafault deadline for HTTP requests from App
+ Engine is used. The maximum is currently 10 (for 10 seconds).
+ The default deadline for App Engine is 5 seconds.
+ """
+ gdata_service.http_client = AppEngineHttpClient(deadline=deadline)
+ gdata_service.token_store = AppEngineTokenStore()
+ gdata_service.auto_store_tokens = store_tokens
+ gdata_service.auto_set_current_token = single_user_mode
+ return gdata_service
+
+
+class AppEngineHttpClient(atom.http_interface.GenericHttpClient):
+ def __init__(self, headers=None, deadline=None):
+ self.debug = False
+ self.headers = headers or {}
+ self.deadline = deadline
+
+ def request(self, operation, url, data=None, headers=None):
+ """Performs an HTTP call to the server, supports GET, POST, PUT, and
+ DELETE.
+
+ Usage example, perform and HTTP GET on http://www.google.com/:
+ import atom.http
+ client = atom.http.HttpClient()
+ http_response = client.request('GET', 'http://www.google.com/')
+
+ Args:
+ operation: str The HTTP operation to be performed. This is usually one
+ of 'GET', 'POST', 'PUT', or 'DELETE'
+ data: filestream, list of parts, or other object which can be converted
+ to a string. Should be set to None when performing a GET or DELETE.
+ If data is a file-like object which can be read, this method will
+ read a chunk of 100K bytes at a time and send them.
+ If the data is a list of parts to be sent, each part will be
+ evaluated and sent.
+ url: The full URL to which the request should be sent. Can be a string
+ or atom.url.Url.
+ headers: dict of strings. HTTP headers which should be sent
+ in the request.
+ """
+ all_headers = self.headers.copy()
+ if headers:
+ all_headers.update(headers)
+
+ # Construct the full payload.
+ # Assume that data is None or a string.
+ data_str = data
+ if data:
+ if isinstance(data, list):
+ # If data is a list of different objects, convert them all to strings
+ # and join them together.
+ converted_parts = [_convert_data_part(x) for x in data]
+ data_str = ''.join(converted_parts)
+ else:
+ data_str = _convert_data_part(data)
+
+ # If the list of headers does not include a Content-Length, attempt to
+ # calculate it based on the data object.
+ if data and 'Content-Length' not in all_headers:
+ all_headers['Content-Length'] = str(len(data_str))
+
+ # Set the content type to the default value if none was set.
+ if 'Content-Type' not in all_headers:
+ all_headers['Content-Type'] = 'application/atom+xml'
+
+ # Lookup the urlfetch operation which corresponds to the desired HTTP verb.
+ if operation == 'GET':
+ method = urlfetch.GET
+ elif operation == 'POST':
+ method = urlfetch.POST
+ elif operation == 'PUT':
+ method = urlfetch.PUT
+ elif operation == 'DELETE':
+ method = urlfetch.DELETE
+ else:
+ method = None
+ if self.deadline is None:
+ return HttpResponse(urlfetch.Fetch(url=str(url), payload=data_str,
+ method=method, headers=all_headers, follow_redirects=False))
+ return HttpResponse(urlfetch.Fetch(url=str(url), payload=data_str,
+ method=method, headers=all_headers, follow_redirects=False,
+ deadline=self.deadline))
+
+
+def _convert_data_part(data):
+ if not data or isinstance(data, str):
+ return data
+ elif hasattr(data, 'read'):
+ # data is a file like object, so read it completely.
+ return data.read()
+ # The data object was not a file.
+ # Try to convert to a string and send the data.
+ return str(data)
+
+
+class HttpResponse(object):
+ """Translates a urlfetch resoinse to look like an hhtplib resoinse.
+
+ Used to allow the resoinse from HttpRequest to be usable by gdata.service
+ methods.
+ """
+
+ def __init__(self, urlfetch_response):
+ self.body = StringIO.StringIO(urlfetch_response.content)
+ self.headers = urlfetch_response.headers
+ self.status = urlfetch_response.status_code
+ self.reason = ''
+
+ def read(self, length=None):
+ if not length:
+ return self.body.read()
+ else:
+ return self.body.read(length)
+
+ def getheader(self, name):
+ if not self.headers.has_key(name):
+ return self.headers[name.lower()]
+ return self.headers[name]
+
+
+class TokenCollection(db.Model):
+ """Datastore Model which associates auth tokens with the current user."""
+ user = db.UserProperty()
+ pickled_tokens = db.BlobProperty()
+
+
+class AppEngineTokenStore(atom.token_store.TokenStore):
+ """Stores the user's auth tokens in the App Engine datastore.
+
+ Tokens are only written to the datastore if a user is signed in (if
+ users.get_current_user() returns a user object).
+ """
+ def __init__(self):
+ self.user = None
+
+ def add_token(self, token):
+ """Associates the token with the current user and stores it.
+
+ If there is no current user, the token will not be stored.
+
+ Returns:
+ False if the token was not stored.
+ """
+ tokens = load_auth_tokens(self.user)
+ if not hasattr(token, 'scopes') or not token.scopes:
+ return False
+ for scope in token.scopes:
+ tokens[str(scope)] = token
+ key = save_auth_tokens(tokens, self.user)
+ if key:
+ return True
+ return False
+
+ def find_token(self, url):
+ """Searches the current user's collection of token for a token which can
+ be used for a request to the url.
+
+ Returns:
+ The stored token which belongs to the current user and is valid for the
+ desired URL. If there is no current user, or there is no valid user
+ token in the datastore, a atom.http_interface.GenericToken is returned.
+ """
+ if url is None:
+ return None
+ if isinstance(url, (str, unicode)):
+ url = atom.url.parse_url(url)
+ tokens = load_auth_tokens(self.user)
+ if url in tokens:
+ token = tokens[url]
+ if token.valid_for_scope(url):
+ return token
+ else:
+ del tokens[url]
+ save_auth_tokens(tokens, self.user)
+ for scope, token in tokens.iteritems():
+ if token.valid_for_scope(url):
+ return token
+ return atom.http_interface.GenericToken()
+
+ def remove_token(self, token):
+ """Removes the token from the current user's collection in the datastore.
+
+ Returns:
+ False if the token was not removed, this could be because the token was
+ not in the datastore, or because there is no current user.
+ """
+ token_found = False
+ scopes_to_delete = []
+ tokens = load_auth_tokens(self.user)
+ for scope, stored_token in tokens.iteritems():
+ if stored_token == token:
+ scopes_to_delete.append(scope)
+ token_found = True
+ for scope in scopes_to_delete:
+ del tokens[scope]
+ if token_found:
+ save_auth_tokens(tokens, self.user)
+ return token_found
+
+ def remove_all_tokens(self):
+ """Removes all of the current user's tokens from the datastore."""
+ save_auth_tokens({}, self.user)
+
+
+def save_auth_tokens(token_dict, user=None):
+ """Associates the tokens with the current user and writes to the datastore.
+
+ If there us no current user, the tokens are not written and this function
+ returns None.
+
+ Returns:
+ The key of the datastore entity containing the user's tokens, or None if
+ there was no current user.
+ """
+ if user is None:
+ user = users.get_current_user()
+ if user is None:
+ return None
+ memcache.set('gdata_pickled_tokens:%s' % user, pickle.dumps(token_dict))
+ user_tokens = TokenCollection.all().filter('user =', user).get()
+ if user_tokens:
+ user_tokens.pickled_tokens = pickle.dumps(token_dict)
+ return user_tokens.put()
+ else:
+ user_tokens = TokenCollection(
+ user=user,
+ pickled_tokens=pickle.dumps(token_dict))
+ return user_tokens.put()
+
+
+def load_auth_tokens(user=None):
+ """Reads a dictionary of the current user's tokens from the datastore.
+
+ If there is no current user (a user is not signed in to the app) or the user
+ does not have any tokens, an empty dictionary is returned.
+ """
+ if user is None:
+ user = users.get_current_user()
+ if user is None:
+ return {}
+ pickled_tokens = memcache.get('gdata_pickled_tokens:%s' % user)
+ if pickled_tokens:
+ return pickle.loads(pickled_tokens)
+ user_tokens = TokenCollection.all().filter('user =', user).get()
+ if user_tokens:
+ memcache.set('gdata_pickled_tokens:%s' % user, user_tokens.pickled_tokens)
+ return pickle.loads(user_tokens.pickled_tokens)
+ return {}
+
diff --git a/python/gdata/analytics/__init__.py b/python/gdata/analytics/__init__.py
new file mode 100644
index 0000000..8dfa20b
--- /dev/null
+++ b/python/gdata/analytics/__init__.py
@@ -0,0 +1,223 @@
+#!/usr/bin/python
+#
+# Original Copyright (C) 2006 Google Inc.
+# Refactored in 2009 to work for Google Analytics by Sal Uryasev at Juice Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Note that this module will not function without specifically adding
+# 'analytics': [ #Google Analytics
+# 'https://www.google.com/analytics/feeds/'],
+# to CLIENT_LOGIN_SCOPES in the gdata/service.py file
+
+"""Contains extensions to Atom objects used with Google Analytics."""
+
+__author__ = 'api.suryasev (Sal Uryasev)'
+
+import atom
+import gdata
+
+GAN_NAMESPACE = 'http://schemas.google.com/analytics/2009'
+
+class TableId(gdata.GDataEntry):
+ """tableId element."""
+ _tag = 'tableId'
+ _namespace = GAN_NAMESPACE
+
+class Property(gdata.GDataEntry):
+ _tag = 'property'
+ _namespace = GAN_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+
+ _attributes['name'] = 'name'
+ _attributes['value'] = 'value'
+
+ def __init__(self, name=None, value=None, *args, **kwargs):
+ self.name = name
+ self.value = value
+ super(Property, self).__init__(*args, **kwargs)
+
+ def __str__(self):
+ return self.value
+
+ def __repr__(self):
+ return self.value
+
+class AccountListEntry(gdata.GDataEntry):
+ """The Google Documents version of an Atom Entry"""
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}tableId' % GAN_NAMESPACE] = ('tableId',
+ [TableId])
+ _children['{%s}property' % GAN_NAMESPACE] = ('property',
+ [Property])
+
+ def __init__(self, tableId=None, property=None,
+ *args, **kwargs):
+ self.tableId = tableId
+ self.property = property
+ super(AccountListEntry, self).__init__(*args, **kwargs)
+
+
+def AccountListEntryFromString(xml_string):
+ """Converts an XML string into an AccountListEntry object.
+
+ Args:
+ xml_string: string The XML describing a Document List feed entry.
+
+ Returns:
+ A AccountListEntry object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(AccountListEntry, xml_string)
+
+
+class AccountListFeed(gdata.GDataFeed):
+ """A feed containing a list of Google Documents Items"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [AccountListEntry])
+
+
+def AccountListFeedFromString(xml_string):
+ """Converts an XML string into an AccountListFeed object.
+
+ Args:
+ xml_string: string The XML describing an AccountList feed.
+
+ Returns:
+ An AccountListFeed object corresponding to the given XML.
+ All properties are also linked to with a direct reference
+ from each entry object for convenience. (e.g. entry.AccountName)
+ """
+ feed = atom.CreateClassFromXMLString(AccountListFeed, xml_string)
+ for entry in feed.entry:
+ for pro in entry.property:
+ entry.__dict__[pro.name.replace('ga:','')] = pro
+ for td in entry.tableId:
+ td.__dict__['value'] = td.text
+ return feed
+
+class Dimension(gdata.GDataEntry):
+ _tag = 'dimension'
+ _namespace = GAN_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+
+ _attributes['name'] = 'name'
+ _attributes['value'] = 'value'
+ _attributes['type'] = 'type'
+ _attributes['confidenceInterval'] = 'confidence_interval'
+
+ def __init__(self, name=None, value=None, type=None,
+ confidence_interval = None, *args, **kwargs):
+ self.name = name
+ self.value = value
+ self.type = type
+ self.confidence_interval = confidence_interval
+ super(Dimension, self).__init__(*args, **kwargs)
+
+ def __str__(self):
+ return self.value
+
+ def __repr__(self):
+ return self.value
+
+class Metric(gdata.GDataEntry):
+ _tag = 'metric'
+ _namespace = GAN_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+
+ _attributes['name'] = 'name'
+ _attributes['value'] = 'value'
+ _attributes['type'] = 'type'
+ _attributes['confidenceInterval'] = 'confidence_interval'
+
+ def __init__(self, name=None, value=None, type=None,
+ confidence_interval = None, *args, **kwargs):
+ self.name = name
+ self.value = value
+ self.type = type
+ self.confidence_interval = confidence_interval
+ super(Metric, self).__init__(*args, **kwargs)
+
+ def __str__(self):
+ return self.value
+
+ def __repr__(self):
+ return self.value
+
+class AnalyticsDataEntry(gdata.GDataEntry):
+ """The Google Analytics version of an Atom Entry"""
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+
+ _children['{%s}dimension' % GAN_NAMESPACE] = ('dimension',
+ [Dimension])
+
+ _children['{%s}metric' % GAN_NAMESPACE] = ('metric',
+ [Metric])
+
+ def __init__(self, dimension=None, metric=None, *args, **kwargs):
+ self.dimension = dimension
+ self.metric = metric
+
+ super(AnalyticsDataEntry, self).__init__(*args, **kwargs)
+
+class AnalyticsDataFeed(gdata.GDataFeed):
+ """A feed containing a list of Google Analytics Data Feed"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [AnalyticsDataEntry])
+
+
+"""
+Data Feed
+"""
+
+def AnalyticsDataFeedFromString(xml_string):
+ """Converts an XML string into an AccountListFeed object.
+
+ Args:
+ xml_string: string The XML describing an AccountList feed.
+
+ Returns:
+ An AccountListFeed object corresponding to the given XML.
+ Each metric and dimension is also referenced directly from
+ the entry for easier access. (e.g. entry.keyword.value)
+ """
+ feed = atom.CreateClassFromXMLString(AnalyticsDataFeed, xml_string)
+ if feed.entry:
+ for entry in feed.entry:
+ for met in entry.metric:
+ entry.__dict__[met.name.replace('ga:','')] = met
+ if entry.dimension is not None:
+ for dim in entry.dimension:
+ entry.__dict__[dim.name.replace('ga:','')] = dim
+
+ return feed
diff --git a/python/gdata/analytics/client.py b/python/gdata/analytics/client.py
new file mode 100644
index 0000000..3c040ad
--- /dev/null
+++ b/python/gdata/analytics/client.py
@@ -0,0 +1,313 @@
+#!/usr/bin/python
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Streamlines requests to the Google Analytics APIs."""
+
+__author__ = 'api.nickm@google.com (Nick Mihailovski)'
+
+
+import atom.data
+import gdata.client
+import gdata.analytics.data
+import gdata.gauth
+
+
+class AnalyticsClient(gdata.client.GDClient):
+ """Client extension for the Google Analytics API service."""
+
+ api_version = '2'
+ auth_service = 'analytics'
+ auth_scopes = gdata.gauth.AUTH_SCOPES['analytics']
+ account_type = 'GOOGLE'
+
+ def __init__(self, auth_token=None, **kwargs):
+ """Initializes a new client for the Google Analytics Data Export API.
+
+ Args:
+ auth_token: gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken (optional) Authorizes this client to edit the user's data.
+ kwargs: The other parameters to pass to gdata.client.GDClient
+ constructor.
+ """
+
+ gdata.client.GDClient.__init__(self, auth_token=auth_token, **kwargs)
+
+ def get_account_feed(self, feed_uri, auth_token=None, **kwargs):
+ """Makes a request to the Analytics API Account Feed.
+
+ Args:
+ feed_uri: str or gdata.analytics.AccountFeedQuery The Analytics Account
+ Feed uri to define what data to retrieve from the API. Can also be
+ used with a gdata.analytics.AccountFeedQuery object.
+ """
+
+ return self.get_feed(feed_uri,
+ desired_class=gdata.analytics.data.AccountFeed,
+ auth_token=auth_token,
+ **kwargs)
+
+ GetAccountFeed = get_account_feed
+
+ def get_data_feed(self, feed_uri, auth_token=None, **kwargs):
+ """Makes a request to the Analytics API Data Feed.
+
+ Args:
+ feed_uri: str or gdata.analytics.AccountFeedQuery The Analytics Data
+ Feed uri to define what data to retrieve from the API. Can also be
+ used with a gdata.analytics.AccountFeedQuery object.
+ """
+
+ return self.get_feed(feed_uri,
+ desired_class=gdata.analytics.data.DataFeed,
+ auth_token=auth_token,
+ **kwargs)
+
+ GetDataFeed = get_data_feed
+
+ def get_management_feed(self, feed_uri, auth_token=None, **kwargs):
+ """Makes a request to the Google Analytics Management API.
+
+ The Management API provides read-only access to configuration data for
+ Google Analytics and supercedes the Data Export API Account Feed.
+ The Management API supports 5 feeds: account, web property, profile,
+ goal, advanced segment.
+
+ You can access each feed through the respective management query class
+ below. All requests return the same data object.
+
+ Args:
+ feed_uri: str or AccountQuery, WebPropertyQuery,
+ ProfileQuery, GoalQuery, MgmtAdvSegFeedQuery
+ The Management API Feed uri to define which feed to retrieve.
+ Either use a string or one of the wrapper classes.
+ """
+
+ return self.get_feed(feed_uri,
+ desired_class=gdata.analytics.data.ManagementFeed,
+ auth_token=auth_token,
+ **kwargs)
+
+ GetMgmtFeed = GetManagementFeed = get_management_feed
+
+
+class AnalyticsBaseQuery(gdata.client.GDQuery):
+ """Abstracts common configuration across all query objects.
+
+ Attributes:
+ scheme: string The default scheme. Should always be https.
+ host: string The default host.
+ """
+
+ scheme = 'https'
+ host = 'www.google.com'
+
+
+class AccountFeedQuery(AnalyticsBaseQuery):
+ """Account Feed query class to simplify constructing Account Feed Urls.
+
+ To use this class, you can either pass a dict in the constructor that has
+ all the data feed query parameters as keys:
+ queryUrl = AccountFeedQuery({'max-results': '10000'})
+
+ Alternatively you can add new parameters directly to the query object:
+ queryUrl = AccountFeedQuery()
+ queryUrl.query['max-results'] = '10000'
+
+ Args:
+ query: dict (optional) Contains all the GA Data Feed query parameters
+ as keys.
+ """
+
+ path = '/analytics/feeds/accounts/default'
+
+ def __init__(self, query={}, **kwargs):
+ self.query = query
+ gdata.client.GDQuery(self, **kwargs)
+
+
+class DataFeedQuery(AnalyticsBaseQuery):
+ """Data Feed query class to simplify constructing Data Feed Urls.
+
+ To use this class, you can either pass a dict in the constructor that has
+ all the data feed query parameters as keys:
+ queryUrl = DataFeedQuery({'start-date': '2008-10-01'})
+
+ Alternatively you can add new parameters directly to the query object:
+ queryUrl = DataFeedQuery()
+ queryUrl.query['start-date'] = '2008-10-01'
+
+ Args:
+ query: dict (optional) Contains all the GA Data Feed query parameters
+ as keys.
+ """
+
+ path = '/analytics/feeds/data'
+
+ def __init__(self, query={}, **kwargs):
+ self.query = query
+ gdata.client.GDQuery(self, **kwargs)
+
+
+class AccountQuery(AnalyticsBaseQuery):
+ """Management API Account Feed query class.
+
+ Example Usage:
+ queryUrl = AccountQuery()
+ queryUrl = AccountQuery({'max-results': 100})
+
+ queryUrl2 = AccountQuery()
+ queryUrl2.query['max-results'] = 100
+
+ Args:
+ query: dict (optional) A dictionary of query parameters.
+ """
+
+ path = '/analytics/feeds/datasources/ga/accounts'
+
+ def __init__(self, query={}, **kwargs):
+ self.query = query
+ gdata.client.GDQuery(self, **kwargs)
+
+class WebPropertyQuery(AnalyticsBaseQuery):
+ """Management API Web Property Feed query class.
+
+ Example Usage:
+ queryUrl = WebPropertyQuery()
+ queryUrl = WebPropertyQuery('123', {'max-results': 100})
+ queryUrl = WebPropertyQuery(acct_id='123',
+ query={'max-results': 100})
+
+ queryUrl2 = WebPropertyQuery()
+ queryUrl2.acct_id = '1234'
+ queryUrl2.query['max-results'] = 100
+
+ Args:
+ acct_id: string (optional) The account ID to filter results.
+ Default is ~all.
+ query: dict (optional) A dictionary of query parameters.
+ """
+
+ def __init__(self, acct_id='~all', query={}, **kwargs):
+ self.acct_id = acct_id
+ self.query = query
+ gdata.client.GDQuery(self, **kwargs)
+
+ @property
+ def path(self):
+ """Wrapper for path attribute."""
+ return ('/analytics/feeds/datasources/ga/accounts/%s/webproperties' %
+ self.acct_id)
+
+
+class ProfileQuery(AnalyticsBaseQuery):
+ """Management API Profile Feed query class.
+
+ Example Usage:
+ queryUrl = ProfileQuery()
+ queryUrl = ProfileQuery('123', 'UA-123-1', {'max-results': 100})
+ queryUrl = ProfileQuery(acct_id='123',
+ web_prop_id='UA-123-1',
+ query={'max-results': 100})
+
+ queryUrl2 = ProfileQuery()
+ queryUrl2.acct_id = '123'
+ queryUrl2.web_prop_id = 'UA-123-1'
+ queryUrl2.query['max-results'] = 100
+
+ Args:
+ acct_id: string (optional) The account ID to filter results.
+ Default is ~all.
+ web_prop_id: string (optional) The web property ID to filter results.
+ Default is ~all.
+ query: dict (optional) A dictionary of query parameters.
+ """
+
+ def __init__(self, acct_id='~all', web_prop_id='~all', query={}, **kwargs):
+ self.acct_id = acct_id
+ self.web_prop_id = web_prop_id
+ self.query = query
+ gdata.client.GDQuery(self, **kwargs)
+
+ @property
+ def path(self):
+ """Wrapper for path attribute."""
+ return ('/analytics/feeds/datasources/ga/accounts/%s/webproperties'
+ '/%s/profiles' % (self.acct_id, self.web_prop_id))
+
+
+class GoalQuery(AnalyticsBaseQuery):
+ """Management API Goal Feed query class.
+
+ Example Usage:
+ queryUrl = GoalQuery()
+ queryUrl = GoalQuery('123', 'UA-123-1', '555',
+ {'max-results': 100})
+ queryUrl = GoalQuery(acct_id='123',
+ web_prop_id='UA-123-1',
+ profile_id='555',
+ query={'max-results': 100})
+
+ queryUrl2 = GoalQuery()
+ queryUrl2.acct_id = '123'
+ queryUrl2.web_prop_id = 'UA-123-1'
+ queryUrl2.query['max-results'] = 100
+
+ Args:
+ acct_id: string (optional) The account ID to filter results.
+ Default is ~all.
+ web_prop_id: string (optional) The web property ID to filter results.
+ Default is ~all.
+ profile_id: string (optional) The profile ID to filter results.
+ Default is ~all.
+ query: dict (optional) A dictionary of query parameters.
+ """
+
+ def __init__(self, acct_id='~all', web_prop_id='~all', profile_id='~all',
+ query={}, **kwargs):
+ self.acct_id = acct_id
+ self.web_prop_id = web_prop_id
+ self.profile_id = profile_id
+ self.query = query or {}
+ gdata.client.GDQuery(self, **kwargs)
+
+ @property
+ def path(self):
+ """Wrapper for path attribute."""
+ return ('/analytics/feeds/datasources/ga/accounts/%s/webproperties'
+ '/%s/profiles/%s/goals' % (self.acct_id, self.web_prop_id,
+ self.profile_id))
+
+
+class AdvSegQuery(AnalyticsBaseQuery):
+ """Management API Goal Feed query class.
+
+ Example Usage:
+ queryUrl = AdvSegQuery()
+ queryUrl = AdvSegQuery({'max-results': 100})
+
+ queryUrl1 = AdvSegQuery()
+ queryUrl1.query['max-results'] = 100
+
+ Args:
+ query: dict (optional) A dictionary of query parameters.
+ """
+
+ path = '/analytics/feeds/datasources/ga/segments'
+
+ def __init__(self, query={}, **kwargs):
+ self.query = query
+ gdata.client.GDQuery(self, **kwargs)
+
diff --git a/python/gdata/analytics/data.py b/python/gdata/analytics/data.py
new file mode 100644
index 0000000..b628855
--- /dev/null
+++ b/python/gdata/analytics/data.py
@@ -0,0 +1,365 @@
+#!/usr/bin/python
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Data model classes for parsing and generating XML for both the
+Google Analytics Data Export and Management APIs. Although both APIs
+operate on different parts of Google Analytics, they share common XML
+elements and are released in the same module.
+
+The Management API supports 5 feeds all using the same ManagementFeed
+data class.
+"""
+
+__author__ = 'api.nickm@google.com (Nick Mihailovski)'
+
+
+import gdata.data
+import atom.core
+import atom.data
+
+
+# XML Namespace used in Google Analytics API entities.
+DXP_NS = '{http://schemas.google.com/analytics/2009}%s'
+GA_NS = '{http://schemas.google.com/ga/2009}%s'
+GD_NS = '{http://schemas.google.com/g/2005}%s'
+
+
+class GetProperty(object):
+ """Utility class to simplify retrieving Property objects."""
+
+ def get_property(self, name):
+ """Helper method to return a propery object by its name attribute.
+
+ Args:
+ name: string The name of the element to retrieve.
+
+ Returns:
+ A property object corresponding to the matching element.
+ if no property is found, None is returned.
+ """
+
+ for prop in self.property:
+ if prop.name == name:
+ return prop
+
+ return None
+
+ GetProperty = get_property
+
+
+class GetMetric(object):
+ """Utility class to simplify retrieving Metric objects."""
+
+ def get_metric(self, name):
+ """Helper method to return a propery value by its name attribute
+
+ Args:
+ name: string The name of the element to retrieve.
+
+ Returns:
+ A property object corresponding to the matching element.
+ if no property is found, None is returned.
+ """
+
+ for met in self.metric:
+ if met.name == name:
+ return met
+
+ return None
+
+ GetMetric = get_metric
+
+
+class GetDimension(object):
+ """Utility class to simplify retrieving Dimension objects."""
+
+ def get_dimension(self, name):
+ """Helper method to return a dimention object by its name attribute
+
+ Args:
+ name: string The name of the element to retrieve.
+
+ Returns:
+ A dimension object corresponding to the matching element.
+ if no dimension is found, None is returned.
+ """
+
+ for dim in self.dimension:
+ if dim.name == name:
+ return dim
+
+ return None
+
+ GetDimension = get_dimension
+
+
+class GaLinkFinder(object):
+ """Utility class to return specific links in Google Analytics feeds."""
+
+ def get_parent_links(self):
+ """Returns a list of all the parent links in an entry."""
+
+ links = []
+ for link in self.link:
+ if link.rel == link.parent():
+ links.append(link)
+
+ return links
+
+ GetParentLinks = get_parent_links
+
+ def get_child_links(self):
+ """Returns a list of all the child links in an entry."""
+
+ links = []
+ for link in self.link:
+ if link.rel == link.child():
+ links.append(link)
+
+ return links
+
+ GetChildLinks = get_child_links
+
+ def get_child_link(self, target_kind):
+ """Utility method to return one child link.
+
+ Returns:
+ A child link with the given target_kind. None if the target_kind was
+ not found.
+ """
+
+ for link in self.link:
+ if link.rel == link.child() and link.target_kind == target_kind:
+ return link
+
+ return None
+
+ GetChildLink = get_child_link
+
+
+class StartDate(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = DXP_NS % 'startDate'
+
+
+class EndDate(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = DXP_NS % 'endDate'
+
+
+class Metric(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = DXP_NS % 'metric'
+ name = 'name'
+ type = 'type'
+ value = 'value'
+ confidence_interval = 'confidenceInterval'
+
+
+class Aggregates(atom.core.XmlElement, GetMetric):
+ """Analytics Data Feed """
+ _qname = DXP_NS % 'aggregates'
+ metric = [Metric]
+
+
+class TableId(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = DXP_NS % 'tableId'
+
+
+class TableName(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = DXP_NS % 'tableName'
+
+
+class Property(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = DXP_NS % 'property'
+ name = 'name'
+ value = 'value'
+
+
+class Definition(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = DXP_NS % 'definition'
+
+
+class Segment(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = DXP_NS % 'segment'
+ id = 'id'
+ name = 'name'
+ definition = Definition
+
+
+class Engagement(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = GA_NS % 'engagement'
+ type = 'type'
+ comparison = 'comparison'
+ threshold_value = 'thresholdValue'
+
+
+class Step(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = GA_NS % 'step'
+ number = 'number'
+ name = 'name'
+ path = 'path'
+
+
+class Destination(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = GA_NS % 'destination'
+ step = [Step]
+ expression = 'expression'
+ case_sensitive = 'caseSensitive'
+ match_type = 'matchType'
+ step1_required = 'step1Required'
+
+
+class Goal(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = GA_NS % 'goal'
+ destination = Destination
+ engagement = Engagement
+ number = 'number'
+ name = 'name'
+ value = 'value'
+ active = 'active'
+
+
+class CustomVariable(atom.core.XmlElement):
+ """Analytics Data Feed """
+ _qname = GA_NS % 'customVariable'
+ index = 'index'
+ name = 'name'
+ scope = 'scope'
+
+
+class DataSource(atom.core.XmlElement, GetProperty):
+ """Analytics Data Feed """
+ _qname = DXP_NS % 'dataSource'
+ table_id = TableId
+ table_name = TableName
+ property = [Property]
+
+
+class Dimension(atom.core.XmlElement):
+ """Analytics Feed """
+ _qname = DXP_NS % 'dimension'
+ name = 'name'
+ value = 'value'
+
+
+class AnalyticsLink(atom.data.Link):
+ """Subclass of link """
+ target_kind = GD_NS % 'targetKind'
+
+ @classmethod
+ def parent(cls):
+ """Parent target_kind"""
+ return '%s#parent' % GA_NS[1:-3]
+
+ @classmethod
+ def child(cls):
+ """Child target_kind"""
+ return '%s#child' % GA_NS[1:-3]
+
+
+# Account Feed.
+class AccountEntry(gdata.data.GDEntry, GetProperty):
+ """Analytics Account Feed """
+ _qname = atom.data.ATOM_TEMPLATE % 'entry'
+ table_id = TableId
+ property = [Property]
+ goal = [Goal]
+ custom_variable = [CustomVariable]
+
+
+class AccountFeed(gdata.data.GDFeed):
+ """Analytics Account Feed """
+ _qname = atom.data.ATOM_TEMPLATE % 'feed'
+ segment = [Segment]
+ entry = [AccountEntry]
+
+
+# Data Feed.
+class DataEntry(gdata.data.GDEntry, GetMetric, GetDimension):
+ """Analytics Data Feed """
+ _qname = atom.data.ATOM_TEMPLATE % 'entry'
+ dimension = [Dimension]
+ metric = [Metric]
+
+ def get_object(self, name):
+ """Returns either a Dimension or Metric object with the same name as the
+ name parameter.
+
+ Args:
+ name: string The name of the object to retrieve.
+
+ Returns:
+ Either a Dimension or Object that has the same as the name parameter.
+ """
+
+ output = self.GetDimension(name)
+ if not output:
+ output = self.GetMetric(name)
+
+ return output
+
+ GetObject = get_object
+
+
+class DataFeed(gdata.data.GDFeed):
+ """Analytics Data Feed .
+
+ Although there is only one datasource, it is stored in an array to replicate
+ the design of the Java client library and ensure backwards compatibility if
+ new data sources are added in the future.
+ """
+
+ _qname = atom.data.ATOM_TEMPLATE % 'feed'
+ start_date = StartDate
+ end_date = EndDate
+ aggregates = Aggregates
+ data_source = [DataSource]
+ entry = [DataEntry]
+ segment = Segment
+
+
+# Management Feed.
+class ManagementEntry(gdata.data.GDEntry, GetProperty, GaLinkFinder):
+ """Analytics Managememt Entry ."""
+
+ _qname = atom.data.ATOM_TEMPLATE % 'entry'
+ kind = GD_NS % 'kind'
+ property = [Property]
+ goal = Goal
+ segment = Segment
+ link = [AnalyticsLink]
+
+
+class ManagementFeed(gdata.data.GDFeed):
+ """Analytics Management Feed .
+
+ This class holds the data for all 5 Management API feeds: Account,
+ Web Property, Profile, Goal, and Advanced Segment Feeds.
+ """
+
+ _qname = atom.data.ATOM_TEMPLATE % 'feed'
+ entry = [ManagementEntry]
+ kind = GD_NS % 'kind'
diff --git a/python/gdata/analytics/service.py b/python/gdata/analytics/service.py
new file mode 100644
index 0000000..0638b48
--- /dev/null
+++ b/python/gdata/analytics/service.py
@@ -0,0 +1,331 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2006 Google Inc.
+# Refactored in 2009 to work for Google Analytics by Sal Uryasev at Juice Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+ AccountsService extends the GDataService to streamline Google Analytics
+ account information operations.
+
+ AnalyticsDataService: Provides methods to query google analytics data feeds.
+ Extends GDataService.
+
+ DataQuery: Queries a Google Analytics Data list feed.
+
+ AccountQuery: Queries a Google Analytics Account list feed.
+"""
+
+
+__author__ = 'api.suryasev (Sal Uryasev)'
+
+
+import urllib
+import atom
+import gdata.service
+import gdata.analytics
+
+
+class AccountsService(gdata.service.GDataService):
+
+ """Client extension for the Google Analytics Account List feed."""
+
+ def __init__(self, email="", password=None, source=None,
+ server='www.google.com/analytics', additional_headers=None,
+ **kwargs):
+ """Creates a client for the Google Analytics service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service='analytics',
+ source=source, server=server, additional_headers=additional_headers,
+ **kwargs)
+
+ def QueryAccountListFeed(self, uri):
+ """Retrieves an AccountListFeed by retrieving a URI based off the Document
+ List feed, including any query parameters. An AccountListFeed object
+ can be used to construct these parameters.
+
+ Args:
+ uri: string The URI of the feed being retrieved possibly with query
+ parameters.
+
+ Returns:
+ An AccountListFeed object representing the feed returned by the server.
+ """
+ return self.Get(uri, converter=gdata.analytics.AccountListFeedFromString)
+
+ def GetAccountListEntry(self, uri):
+ """Retrieves a particular AccountListEntry by its unique URI.
+
+ Args:
+ uri: string The unique URI of an entry in an Account List feed.
+
+ Returns:
+ An AccountLisFeed object representing the retrieved entry.
+ """
+ return self.Get(uri, converter=gdata.analytics.AccountListEntryFromString)
+
+ def GetAccountList(self, max_results=1000, text_query=None,
+ params=None, categories=None):
+ """Retrieves a feed containing all of a user's accounts and profiles."""
+ q = gdata.analytics.service.AccountQuery(max_results=max_results,
+ text_query=text_query,
+ params=params,
+ categories=categories);
+ return self.QueryAccountListFeed(q.ToUri())
+
+
+
+
+class AnalyticsDataService(gdata.service.GDataService):
+
+ """Client extension for the Google Analytics service Data List feed."""
+
+ def __init__(self, email=None, password=None, source=None,
+ server='www.google.com/analytics', additional_headers=None,
+ **kwargs):
+ """Creates a client for the Google Analytics service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'docs.google.com'.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+
+ gdata.service.GDataService.__init__(self,
+ email=email, password=password, service='analytics', source=source,
+ server=server, additional_headers=additional_headers, **kwargs)
+
+ def GetData(self, ids='', dimensions='', metrics='',
+ sort='', filters='', start_date='',
+ end_date='', start_index='',
+ max_results=''):
+ """Retrieves a feed containing a user's data
+
+ ids: comma-separated string of analytics accounts.
+ dimensions: comma-separated string of dimensions.
+ metrics: comma-separated string of metrics.
+ sort: comma-separated string of dimensions and metrics for sorting.
+ This may be previxed with a minus to sort in reverse order.
+ (e.g. '-ga:keyword')
+ If ommited, the first dimension passed in will be used.
+ filters: comma-separated string of filter parameters.
+ (e.g. 'ga:keyword==google')
+ start_date: start date for data pull.
+ end_date: end date for data pull.
+ start_index: used in combination with max_results to pull more than 1000
+ entries. This defaults to 1.
+ max_results: maximum results that the pull will return. This defaults
+ to, and maxes out at 1000.
+ """
+ q = gdata.analytics.service.DataQuery(ids=ids,
+ dimensions=dimensions,
+ metrics=metrics,
+ filters=filters,
+ sort=sort,
+ start_date=start_date,
+ end_date=end_date,
+ start_index=start_index,
+ max_results=max_results);
+ return self.AnalyticsDataFeed(q.ToUri())
+
+ def AnalyticsDataFeed(self, uri):
+ """Retrieves an AnalyticsListFeed by retrieving a URI based off the
+ Document List feed, including any query parameters. An
+ AnalyticsListFeed object can be used to construct these parameters.
+
+ Args:
+ uri: string The URI of the feed being retrieved possibly with query
+ parameters.
+
+ Returns:
+ An AnalyticsListFeed object representing the feed returned by the
+ server.
+ """
+ return self.Get(uri,
+ converter=gdata.analytics.AnalyticsDataFeedFromString)
+
+ """
+ Account Fetching
+ """
+
+ def QueryAccountListFeed(self, uri):
+ """Retrieves an Account ListFeed by retrieving a URI based off the Account
+ List feed, including any query parameters. A AccountQuery object can
+ be used to construct these parameters.
+
+ Args:
+ uri: string The URI of the feed being retrieved possibly with query
+ parameters.
+
+ Returns:
+ An AccountListFeed object representing the feed returned by the server.
+ """
+ return self.Get(uri, converter=gdata.analytics.AccountListFeedFromString)
+
+ def GetAccountListEntry(self, uri):
+ """Retrieves a particular AccountListEntry by its unique URI.
+
+ Args:
+ uri: string The unique URI of an entry in an Account List feed.
+
+ Returns:
+ An AccountListEntry object representing the retrieved entry.
+ """
+ return self.Get(uri, converter=gdata.analytics.AccountListEntryFromString)
+
+ def GetAccountList(self, username="default", max_results=1000,
+ start_index=1):
+ """Retrieves a feed containing all of a user's accounts and profiles.
+ The username parameter is soon to be deprecated, with 'default'
+ becoming the only allowed parameter.
+ """
+ if not username:
+ raise Exception("username is a required parameter")
+ q = gdata.analytics.service.AccountQuery(username=username,
+ max_results=max_results,
+ start_index=start_index);
+ return self.QueryAccountListFeed(q.ToUri())
+
+class DataQuery(gdata.service.Query):
+ """Object used to construct a URI to a data feed"""
+ def __init__(self, feed='/feeds/data', text_query=None,
+ params=None, categories=None, ids="",
+ dimensions="", metrics="", sort="", filters="",
+ start_date="", end_date="", start_index="",
+ max_results=""):
+ """Constructor for Analytics List Query
+
+ Args:
+ feed: string (optional) The path for the feed. (e.g. '/feeds/data')
+
+ text_query: string (optional) The contents of the q query parameter.
+ This string is URL escaped upon conversion to a URI.
+ params: dict (optional) Parameter value string pairs which become URL
+ params when translated to a URI. These parameters are added to
+ the query's items.
+ categories: list (optional) List of category strings which should be
+ included as query categories. See gdata.service.Query for
+ additional documentation.
+ ids: comma-separated string of analytics accounts.
+ dimensions: comma-separated string of dimensions.
+ metrics: comma-separated string of metrics.
+ sort: comma-separated string of dimensions and metrics.
+ This may be previxed with a minus to sort in reverse order
+ (e.g. '-ga:keyword').
+ If ommited, the first dimension passed in will be used.
+ filters: comma-separated string of filter parameters
+ (e.g. 'ga:keyword==google').
+ start_date: start date for data pull.
+ end_date: end date for data pull.
+ start_index: used in combination with max_results to pull more than 1000
+ entries. This defaults to 1.
+ max_results: maximum results that the pull will return. This defaults
+ to, and maxes out at 1000.
+
+ Yields:
+ A DocumentQuery object used to construct a URI based on the Document
+ List feed.
+ """
+ self.elements = {'ids': ids,
+ 'dimensions': dimensions,
+ 'metrics': metrics,
+ 'sort': sort,
+ 'filters': filters,
+ 'start-date': start_date,
+ 'end-date': end_date,
+ 'start-index': start_index,
+ 'max-results': max_results}
+
+ gdata.service.Query.__init__(self, feed, text_query, params, categories)
+
+ def ToUri(self):
+ """Generates a URI from the query parameters set in the object.
+
+ Returns:
+ A string containing the URI used to retrieve entries from the Analytics
+ List feed.
+ """
+ old_feed = self.feed
+ self.feed = '/'.join([old_feed]) + '?' + \
+ urllib.urlencode(dict([(key, value) for key, value in \
+ self.elements.iteritems() if value]))
+ new_feed = gdata.service.Query.ToUri(self)
+ self.feed = old_feed
+ return new_feed
+
+
+class AccountQuery(gdata.service.Query):
+ """Object used to construct a URI to query the Google Account List feed"""
+ def __init__(self, feed='/feeds/accounts', start_index=1,
+ max_results=1000, username='default', text_query=None,
+ params=None, categories=None):
+ """Constructor for Account List Query
+
+ Args:
+ feed: string (optional) The path for the feed. (e.g. '/feeds/documents')
+ visibility: string (optional) The visibility chosen for the current
+ feed.
+ projection: string (optional) The projection chosen for the current
+ feed.
+ text_query: string (optional) The contents of the q query parameter.
+ This string is URL escaped upon conversion to a URI.
+ params: dict (optional) Parameter value string pairs which become URL
+ params when translated to a URI. These parameters are added to
+ the query's items.
+ categories: list (optional) List of category strings which should be
+ included as query categories. See gdata.service.Query for
+ additional documentation.
+ username: string (deprecated) This value should now always be passed as
+ 'default'.
+
+ Yields:
+ A DocumentQuery object used to construct a URI based on the Document
+ List feed.
+ """
+ self.max_results = max_results
+ self.start_index = start_index
+ self.username = username
+ gdata.service.Query.__init__(self, feed, text_query, params, categories)
+
+ def ToUri(self):
+ """Generates a URI from the query parameters set in the object.
+
+ Returns:
+ A string containing the URI used to retrieve entries from the Account
+ List feed.
+ """
+ old_feed = self.feed
+ self.feed = '/'.join([old_feed, self.username]) + '?' + \
+ '&'.join(['max-results=' + str(self.max_results),
+ 'start-index=' + str(self.start_index)])
+ new_feed = self.feed
+ self.feed = old_feed
+ return new_feed
diff --git a/python/gdata/apps/__init__.py b/python/gdata/apps/__init__.py
new file mode 100644
index 0000000..ebdf98e
--- /dev/null
+++ b/python/gdata/apps/__init__.py
@@ -0,0 +1,526 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2007 SIOS Technology, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains objects used with Google Apps."""
+
+__author__ = 'tmatsuo@sios.com (Takashi MATSUO)'
+
+
+import atom
+import gdata
+
+
+# XML namespaces which are often used in Google Apps entity.
+APPS_NAMESPACE = 'http://schemas.google.com/apps/2006'
+APPS_TEMPLATE = '{http://schemas.google.com/apps/2006}%s'
+
+
+class EmailList(atom.AtomBase):
+ """The Google Apps EmailList element"""
+
+ _tag = 'emailList'
+ _namespace = APPS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['name'] = 'name'
+
+ def __init__(self, name=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.name = name
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+def EmailListFromString(xml_string):
+ return atom.CreateClassFromXMLString(EmailList, xml_string)
+
+
+class Who(atom.AtomBase):
+ """The Google Apps Who element"""
+
+ _tag = 'who'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['rel'] = 'rel'
+ _attributes['email'] = 'email'
+
+ def __init__(self, rel=None, email=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.rel = rel
+ self.email = email
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+def WhoFromString(xml_string):
+ return atom.CreateClassFromXMLString(Who, xml_string)
+
+
+class Login(atom.AtomBase):
+ """The Google Apps Login element"""
+
+ _tag = 'login'
+ _namespace = APPS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['userName'] = 'user_name'
+ _attributes['password'] = 'password'
+ _attributes['suspended'] = 'suspended'
+ _attributes['admin'] = 'admin'
+ _attributes['changePasswordAtNextLogin'] = 'change_password'
+ _attributes['agreedToTerms'] = 'agreed_to_terms'
+ _attributes['ipWhitelisted'] = 'ip_whitelisted'
+ _attributes['hashFunctionName'] = 'hash_function_name'
+
+ def __init__(self, user_name=None, password=None, suspended=None,
+ ip_whitelisted=None, hash_function_name=None,
+ admin=None, change_password=None, agreed_to_terms=None,
+ extension_elements=None, extension_attributes=None,
+ text=None):
+ self.user_name = user_name
+ self.password = password
+ self.suspended = suspended
+ self.admin = admin
+ self.change_password = change_password
+ self.agreed_to_terms = agreed_to_terms
+ self.ip_whitelisted = ip_whitelisted
+ self.hash_function_name = hash_function_name
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def LoginFromString(xml_string):
+ return atom.CreateClassFromXMLString(Login, xml_string)
+
+
+class Quota(atom.AtomBase):
+ """The Google Apps Quota element"""
+
+ _tag = 'quota'
+ _namespace = APPS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['limit'] = 'limit'
+
+ def __init__(self, limit=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.limit = limit
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def QuotaFromString(xml_string):
+ return atom.CreateClassFromXMLString(Quota, xml_string)
+
+
+class Name(atom.AtomBase):
+ """The Google Apps Name element"""
+
+ _tag = 'name'
+ _namespace = APPS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['familyName'] = 'family_name'
+ _attributes['givenName'] = 'given_name'
+
+ def __init__(self, family_name=None, given_name=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ self.family_name = family_name
+ self.given_name = given_name
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def NameFromString(xml_string):
+ return atom.CreateClassFromXMLString(Name, xml_string)
+
+
+class Nickname(atom.AtomBase):
+ """The Google Apps Nickname element"""
+
+ _tag = 'nickname'
+ _namespace = APPS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['name'] = 'name'
+
+ def __init__(self, name=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ self.name = name
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def NicknameFromString(xml_string):
+ return atom.CreateClassFromXMLString(Nickname, xml_string)
+
+
+class NicknameEntry(gdata.GDataEntry):
+ """A Google Apps flavor of an Atom Entry for Nickname"""
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}login' % APPS_NAMESPACE] = ('login', Login)
+ _children['{%s}nickname' % APPS_NAMESPACE] = ('nickname', Nickname)
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ login=None, nickname=None,
+ extended_property=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ title=title, updated=updated)
+ self.login = login
+ self.nickname = nickname
+ self.extended_property = extended_property or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def NicknameEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(NicknameEntry, xml_string)
+
+
+class NicknameFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """A Google Apps Nickname feed flavor of an Atom Feed"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [NicknameEntry])
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None,
+ entry=None, total_results=None, start_index=None,
+ items_per_page=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ gdata.GDataFeed.__init__(self, author=author, category=category,
+ contributor=contributor, generator=generator,
+ icon=icon, atom_id=atom_id, link=link,
+ logo=logo, rights=rights, subtitle=subtitle,
+ title=title, updated=updated, entry=entry,
+ total_results=total_results,
+ start_index=start_index,
+ items_per_page=items_per_page,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+def NicknameFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(NicknameFeed, xml_string)
+
+
+class UserEntry(gdata.GDataEntry):
+ """A Google Apps flavor of an Atom Entry"""
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}login' % APPS_NAMESPACE] = ('login', Login)
+ _children['{%s}name' % APPS_NAMESPACE] = ('name', Name)
+ _children['{%s}quota' % APPS_NAMESPACE] = ('quota', Quota)
+ # This child may already be defined in GDataEntry, confirm before removing.
+ _children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
+ [gdata.FeedLink])
+ _children['{%s}who' % gdata.GDATA_NAMESPACE] = ('who', Who)
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ login=None, name=None, quota=None, who=None, feed_link=None,
+ extended_property=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ title=title, updated=updated)
+ self.login = login
+ self.name = name
+ self.quota = quota
+ self.who = who
+ self.feed_link = feed_link or []
+ self.extended_property = extended_property or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def UserEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(UserEntry, xml_string)
+
+
+class UserFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """A Google Apps User feed flavor of an Atom Feed"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [UserEntry])
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None,
+ entry=None, total_results=None, start_index=None,
+ items_per_page=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ gdata.GDataFeed.__init__(self, author=author, category=category,
+ contributor=contributor, generator=generator,
+ icon=icon, atom_id=atom_id, link=link,
+ logo=logo, rights=rights, subtitle=subtitle,
+ title=title, updated=updated, entry=entry,
+ total_results=total_results,
+ start_index=start_index,
+ items_per_page=items_per_page,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+def UserFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(UserFeed, xml_string)
+
+
+class EmailListEntry(gdata.GDataEntry):
+ """A Google Apps EmailList flavor of an Atom Entry"""
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}emailList' % APPS_NAMESPACE] = ('email_list', EmailList)
+ # Might be able to remove this _children entry.
+ _children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
+ [gdata.FeedLink])
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ email_list=None, feed_link=None,
+ extended_property=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ title=title, updated=updated)
+ self.email_list = email_list
+ self.feed_link = feed_link or []
+ self.extended_property = extended_property or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def EmailListEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(EmailListEntry, xml_string)
+
+
+class EmailListFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """A Google Apps EmailList feed flavor of an Atom Feed"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [EmailListEntry])
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None,
+ entry=None, total_results=None, start_index=None,
+ items_per_page=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ gdata.GDataFeed.__init__(self, author=author, category=category,
+ contributor=contributor, generator=generator,
+ icon=icon, atom_id=atom_id, link=link,
+ logo=logo, rights=rights, subtitle=subtitle,
+ title=title, updated=updated, entry=entry,
+ total_results=total_results,
+ start_index=start_index,
+ items_per_page=items_per_page,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+def EmailListFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(EmailListFeed, xml_string)
+
+
+class EmailListRecipientEntry(gdata.GDataEntry):
+ """A Google Apps EmailListRecipient flavor of an Atom Entry"""
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}who' % gdata.GDATA_NAMESPACE] = ('who', Who)
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ who=None,
+ extended_property=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ title=title, updated=updated)
+ self.who = who
+ self.extended_property = extended_property or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def EmailListRecipientEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(EmailListRecipientEntry, xml_string)
+
+
+class EmailListRecipientFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """A Google Apps EmailListRecipient feed flavor of an Atom Feed"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [EmailListRecipientEntry])
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None,
+ entry=None, total_results=None, start_index=None,
+ items_per_page=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ gdata.GDataFeed.__init__(self, author=author, category=category,
+ contributor=contributor, generator=generator,
+ icon=icon, atom_id=atom_id, link=link,
+ logo=logo, rights=rights, subtitle=subtitle,
+ title=title, updated=updated, entry=entry,
+ total_results=total_results,
+ start_index=start_index,
+ items_per_page=items_per_page,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+def EmailListRecipientFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(EmailListRecipientFeed, xml_string)
+
+
+class Property(atom.AtomBase):
+ """The Google Apps Property element"""
+
+ _tag = 'property'
+ _namespace = APPS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['name'] = 'name'
+ _attributes['value'] = 'value'
+
+ def __init__(self, name=None, value=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.name = name
+ self.value = value
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def PropertyFromString(xml_string):
+ return atom.CreateClassFromXMLString(Property, xml_string)
+
+
+class PropertyEntry(gdata.GDataEntry):
+ """A Google Apps Property flavor of an Atom Entry"""
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}property' % APPS_NAMESPACE] = ('property', [Property])
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ property=None,
+ extended_property=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ title=title, updated=updated)
+ self.property = property
+ self.extended_property = extended_property or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def PropertyEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(PropertyEntry, xml_string)
+
+class PropertyFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """A Google Apps Property feed flavor of an Atom Feed"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [PropertyEntry])
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None,
+ entry=None, total_results=None, start_index=None,
+ items_per_page=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ gdata.GDataFeed.__init__(self, author=author, category=category,
+ contributor=contributor, generator=generator,
+ icon=icon, atom_id=atom_id, link=link,
+ logo=logo, rights=rights, subtitle=subtitle,
+ title=title, updated=updated, entry=entry,
+ total_results=total_results,
+ start_index=start_index,
+ items_per_page=items_per_page,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+def PropertyFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(PropertyFeed, xml_string)
diff --git a/python/gdata/apps/adminsettings/__init__.py b/python/gdata/apps/adminsettings/__init__.py
new file mode 100644
index 0000000..d284c7c
--- /dev/null
+++ b/python/gdata/apps/adminsettings/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/python/gdata/apps/adminsettings/service.py b/python/gdata/apps/adminsettings/service.py
new file mode 100644
index 0000000..c69fa36
--- /dev/null
+++ b/python/gdata/apps/adminsettings/service.py
@@ -0,0 +1,471 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Allow Google Apps domain administrators to set domain admin settings.
+
+ AdminSettingsService: Set admin settings."""
+
+__author__ = 'jlee@pbu.edu'
+
+
+import gdata.apps
+import gdata.apps.service
+import gdata.service
+
+
+API_VER='2.0'
+
+class AdminSettingsService(gdata.apps.service.PropertyService):
+ """Client for the Google Apps Admin Settings service."""
+
+ def _serviceUrl(self, setting_id, domain=None):
+ if domain is None:
+ domain = self.domain
+ return '/a/feeds/domain/%s/%s/%s' % (API_VER, domain, setting_id)
+
+ def genericGet(self, location):
+ """Generic HTTP Get Wrapper
+
+ Args:
+ location: relative uri to Get
+
+ Returns:
+ A dict containing the result of the get operation."""
+
+ uri = self._serviceUrl(location)
+ try:
+ return self._GetProperties(uri)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def GetDefaultLanguage(self):
+ """Gets Domain Default Language
+
+ Args:
+ None
+
+ Returns:
+ Default Language as a string. All possible values are listed at:
+ http://code.google.com/apis/apps/email_settings/developers_guide_protocol.html#GA_email_language_tags"""
+
+ result = self.genericGet('general/defaultLanguage')
+ return result['defaultLanguage']
+
+ def UpdateDefaultLanguage(self, defaultLanguage):
+ """Updates Domain Default Language
+
+ Args:
+ defaultLanguage: Domain Language to set
+ possible values are at:
+ http://code.google.com/apis/apps/email_settings/developers_guide_protocol.html#GA_email_language_tags
+
+ Returns:
+ A dict containing the result of the put operation"""
+
+ uri = self._serviceUrl('general/defaultLanguage')
+ properties = {'defaultLanguage': defaultLanguage}
+ return self._PutProperties(uri, properties)
+
+ def GetOrganizationName(self):
+ """Gets Domain Default Language
+
+ Args:
+ None
+
+ Returns:
+ Organization Name as a string."""
+
+ result = self.genericGet('general/organizationName')
+ return result['organizationName']
+
+
+ def UpdateOrganizationName(self, organizationName):
+ """Updates Organization Name
+
+ Args:
+ organizationName: Name of organization
+
+ Returns:
+ A dict containing the result of the put operation"""
+
+ uri = self._serviceUrl('general/organizationName')
+ properties = {'organizationName': organizationName}
+ return self._PutProperties(uri, properties)
+
+ def GetMaximumNumberOfUsers(self):
+ """Gets Maximum Number of Users Allowed
+
+ Args:
+ None
+
+ Returns: An integer, the maximum number of users"""
+
+ result = self.genericGet('general/maximumNumberOfUsers')
+ return int(result['maximumNumberOfUsers'])
+
+ def GetCurrentNumberOfUsers(self):
+ """Gets Current Number of Users
+
+ Args:
+ None
+
+ Returns: An integer, the current number of users"""
+
+ result = self.genericGet('general/currentNumberOfUsers')
+ return int(result['currentNumberOfUsers'])
+
+ def IsDomainVerified(self):
+ """Is the domain verified
+
+ Args:
+ None
+
+ Returns: Boolean, is domain verified"""
+
+ result = self.genericGet('accountInformation/isVerified')
+ if result['isVerified'] == 'true':
+ return True
+ else:
+ return False
+
+ def GetSupportPIN(self):
+ """Gets Support PIN
+
+ Args:
+ None
+
+ Returns: A string, the Support PIN"""
+
+ result = self.genericGet('accountInformation/supportPIN')
+ return result['supportPIN']
+
+ def GetEdition(self):
+ """Gets Google Apps Domain Edition
+
+ Args:
+ None
+
+ Returns: A string, the domain's edition (premier, education, partner)"""
+
+ result = self.genericGet('accountInformation/edition')
+ return result['edition']
+
+ def GetCustomerPIN(self):
+ """Gets Customer PIN
+
+ Args:
+ None
+
+ Returns: A string, the customer PIN"""
+
+ result = self.genericGet('accountInformation/customerPIN')
+ return result['customerPIN']
+
+ def GetCreationTime(self):
+ """Gets Domain Creation Time
+
+ Args:
+ None
+
+ Returns: A string, the domain's creation time"""
+
+ result = self.genericGet('accountInformation/creationTime')
+ return result['creationTime']
+
+ def GetCountryCode(self):
+ """Gets Domain Country Code
+
+ Args:
+ None
+
+ Returns: A string, the domain's country code. Possible values at:
+ http://www.iso.org/iso/country_codes/iso_3166_code_lists/english_country_names_and_code_elements.htm"""
+
+ result = self.genericGet('accountInformation/countryCode')
+ return result['countryCode']
+
+ def GetAdminSecondaryEmail(self):
+ """Gets Domain Admin Secondary Email Address
+
+ Args:
+ None
+
+ Returns: A string, the secondary email address for domain admin"""
+
+ result = self.genericGet('accountInformation/adminSecondaryEmail')
+ return result['adminSecondaryEmail']
+
+ def UpdateAdminSecondaryEmail(self, adminSecondaryEmail):
+ """Gets Domain Creation Time
+
+ Args:
+ adminSecondaryEmail: string, secondary email address of admin
+
+ Returns: A dict containing the result of the put operation"""
+
+ uri = self._serviceUrl('accountInformation/adminSecondaryEmail')
+ properties = {'adminSecondaryEmail': adminSecondaryEmail}
+ return self._PutProperties(uri, properties)
+
+ def GetDomainLogo(self):
+ """Gets Domain Logo
+
+ This function does not make use of the Google Apps Admin Settings API,
+ it does an HTTP Get of a url specific to the Google Apps domain. It is
+ included for completeness sake.
+
+ Args:
+ None
+
+ Returns: binary image file"""
+
+ import urllib
+ url = 'http://www.google.com/a/cpanel/'+self.domain+'/images/logo.gif'
+ response = urllib.urlopen(url)
+ return response.read()
+
+ def UpdateDomainLogo(self, logoImage):
+ """Update Domain's Custom Logo
+
+ Args:
+ logoImage: binary image data
+
+ Returns: A dict containing the result of the put operation"""
+
+ from base64 import base64encode
+ uri = self._serviceUrl('appearance/customLogo')
+ properties = {'logoImage': base64encode(logoImage)}
+ return self._PutProperties(uri, properties)
+
+ def GetCNAMEVerificationStatus(self):
+ """Gets Domain CNAME Verification Status
+
+ Args:
+ None
+
+ Returns: A dict {recordName, verified, verifiedMethod}"""
+
+ return self.genericGet('verification/cname')
+
+ def UpdateCNAMEVerificationStatus(self, verified):
+ """Updates CNAME Verification Status
+
+ Args:
+ verified: boolean, True will retry verification process
+
+ Returns: A dict containing the result of the put operation"""
+
+ uri = self._serviceUrl('verification/cname')
+ properties = self.GetCNAMEVerificationStatus()
+ properties['verified'] = verified
+ return self._PutProperties(uri, properties)
+
+ def GetMXVerificationStatus(self):
+ """Gets Domain MX Verification Status
+
+ Args:
+ None
+
+ Returns: A dict {verified, verifiedMethod}"""
+
+ return self.genericGet('verification/mx')
+
+ def UpdateMXVerificationStatus(self, verified):
+ """Updates MX Verification Status
+
+ Args:
+ verified: boolean, True will retry verification process
+
+ Returns: A dict containing the result of the put operation"""
+
+ uri = self._serviceUrl('verification/mx')
+ properties = self.GetMXVerificationStatus()
+ properties['verified'] = verified
+ return self._PutProperties(uri, properties)
+
+ def GetSSOSettings(self):
+ """Gets Domain Single Sign-On Settings
+
+ Args:
+ None
+
+ Returns: A dict {samlSignonUri, samlLogoutUri, changePasswordUri, enableSSO, ssoWhitelist, useDomainSpecificIssuer}"""
+
+ return self.genericGet('sso/general')
+
+ def UpdateSSOSettings(self, enableSSO=None, samlSignonUri=None,
+ samlLogoutUri=None, changePasswordUri=None,
+ ssoWhitelist=None, useDomainSpecificIssuer=None):
+ """Update SSO Settings.
+
+ Args:
+ enableSSO: boolean, SSO Master on/off switch
+ samlSignonUri: string, SSO Login Page
+ samlLogoutUri: string, SSO Logout Page
+ samlPasswordUri: string, SSO Password Change Page
+ ssoWhitelist: string, Range of IP Addresses which will see SSO
+ useDomainSpecificIssuer: boolean, Include Google Apps Domain in Issuer
+
+ Returns:
+ A dict containing the result of the update operation.
+ """
+ uri = self._serviceUrl('sso/general')
+
+ #Get current settings, replace Nones with ''
+ properties = self.GetSSOSettings()
+ if properties['samlSignonUri'] == None:
+ properties['samlSignonUri'] = ''
+ if properties['samlLogoutUri'] == None:
+ properties['samlLogoutUri'] = ''
+ if properties['changePasswordUri'] == None:
+ properties['changePasswordUri'] = ''
+ if properties['ssoWhitelist'] == None:
+ properties['ssoWhitelist'] = ''
+
+ #update only the values we were passed
+ if enableSSO != None:
+ properties['enableSSO'] = gdata.apps.service._bool2str(enableSSO)
+ if samlSignonUri != None:
+ properties['samlSignonUri'] = samlSignonUri
+ if samlLogoutUri != None:
+ properties['samlLogoutUri'] = samlLogoutUri
+ if changePasswordUri != None:
+ properties['changePasswordUri'] = changePasswordUri
+ if ssoWhitelist != None:
+ properties['ssoWhitelist'] = ssoWhitelist
+ if useDomainSpecificIssuer != None:
+ properties['useDomainSpecificIssuer'] = gdata.apps.service._bool2str(useDomainSpecificIssuer)
+
+ return self._PutProperties(uri, properties)
+
+ def GetSSOKey(self):
+ """Gets Domain Single Sign-On Signing Key
+
+ Args:
+ None
+
+ Returns: A dict {modulus, exponent, algorithm, format}"""
+
+ return self.genericGet('sso/signingkey')
+
+ def UpdateSSOKey(self, signingKey):
+ """Update SSO Settings.
+
+ Args:
+ signingKey: string, public key to be uploaded
+
+ Returns:
+ A dict containing the result of the update operation."""
+
+ uri = self._serviceUrl('sso/signingkey')
+ properties = {'signingKey': signingKey}
+ return self._PutProperties(uri, properties)
+
+ def IsUserMigrationEnabled(self):
+ """Is User Migration Enabled
+
+ Args:
+ None
+
+ Returns:
+ boolean, is user migration enabled"""
+
+ result = self.genericGet('email/migration')
+ if result['enableUserMigration'] == 'true':
+ return True
+ else:
+ return False
+
+ def UpdateUserMigrationStatus(self, enableUserMigration):
+ """Update User Migration Status
+
+ Args:
+ enableUserMigration: boolean, user migration enable/disable
+
+ Returns:
+ A dict containing the result of the update operation."""
+
+ uri = self._serviceUrl('email/migration')
+ properties = {'enableUserMigration': enableUserMigration}
+ return self._PutProperties(uri, properties)
+
+ def GetOutboundGatewaySettings(self):
+ """Get Outbound Gateway Settings
+
+ Args:
+ None
+
+ Returns:
+ A dict {smartHost, smtpMode}"""
+
+ uri = self._serviceUrl('email/gateway')
+ try:
+ return self._GetProperties(uri)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+ except TypeError:
+ #if no outbound gateway is set, we get a TypeError,
+ #catch it and return nothing...
+ return {'smartHost': None, 'smtpMode': None}
+
+ def UpdateOutboundGatewaySettings(self, smartHost=None, smtpMode=None):
+ """Update Outbound Gateway Settings
+
+ Args:
+ smartHost: string, ip address or hostname of outbound gateway
+ smtpMode: string, SMTP or SMTP_TLS
+
+ Returns:
+ A dict containing the result of the update operation."""
+
+ uri = self._serviceUrl('email/gateway')
+
+ #Get current settings, replace Nones with ''
+ properties = GetOutboundGatewaySettings()
+ if properties['smartHost'] == None:
+ properties['smartHost'] = ''
+ if properties['smtpMode'] == None:
+ properties['smtpMode'] = ''
+
+ #If we were passed new values for smartHost or smtpMode, update them
+ if smartHost != None:
+ properties['smartHost'] = smartHost
+ if smtpMode != None:
+ properties['smtpMode'] = smtpMode
+
+ return self._PutProperties(uri, properties)
+
+ def AddEmailRoute(self, routeDestination, routeRewriteTo, routeEnabled, bounceNotifications, accountHandling):
+ """Adds Domain Email Route
+
+ Args:
+ routeDestination: string, destination ip address or hostname
+ routeRewriteTo: boolean, rewrite smtp envelop To:
+ routeEnabled: boolean, enable disable email routing
+ bounceNotifications: boolean, send bound notificiations to sender
+ accountHandling: string, which to route, "allAccounts", "provisionedAccounts", "unknownAccounts"
+
+ Returns:
+ A dict containing the result of the update operation."""
+
+ uri = self._serviceUrl('emailrouting')
+ properties = {}
+ properties['routeDestination'] = routeDestination
+ properties['routeRewriteTo'] = gdata.apps.service._bool2str(routeRewriteTo)
+ properties['routeEnabled'] = gdata.apps.service._bool2str(routeEnabled)
+ properties['bounceNotifications'] = gdata.apps.service._bool2str(bounceNotifications)
+ properties['accountHandling'] = accountHandling
+ return self._PostProperties(uri, properties)
diff --git a/python/gdata/apps/audit/__init__.py b/python/gdata/apps/audit/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/python/gdata/apps/audit/__init__.py
@@ -0,0 +1 @@
+
diff --git a/python/gdata/apps/audit/service.py b/python/gdata/apps/audit/service.py
new file mode 100644
index 0000000..d8cf72c
--- /dev/null
+++ b/python/gdata/apps/audit/service.py
@@ -0,0 +1,277 @@
+# Copyright (C) 2008 Google, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Allow Google Apps domain administrators to audit user data.
+
+ AuditService: Set auditing."""
+
+__author__ = 'jlee@pbu.edu'
+
+from base64 import b64encode
+
+import gdata.apps
+import gdata.apps.service
+import gdata.service
+
+class AuditService(gdata.apps.service.PropertyService):
+ """Client for the Google Apps Audit service."""
+
+ def _serviceUrl(self, setting_id, domain=None, user=None):
+ if domain is None:
+ domain = self.domain
+ if user is None:
+ return '/a/feeds/compliance/audit/%s/%s' % (setting_id, domain)
+ else:
+ return '/a/feeds/compliance/audit/%s/%s/%s' % (setting_id, domain, user)
+
+ def updatePGPKey(self, pgpkey):
+ """Updates Public PGP Key Google uses to encrypt audit data
+
+ Args:
+ pgpkey: string, ASCII text of PGP Public Key to be used
+
+ Returns:
+ A dict containing the result of the POST operation."""
+
+ uri = self._serviceUrl('publickey')
+ b64pgpkey = b64encode(pgpkey)
+ properties = {}
+ properties['publicKey'] = b64pgpkey
+ return self._PostProperties(uri, properties)
+
+ def createEmailMonitor(self, source_user, destination_user, end_date,
+ begin_date=None, incoming_headers_only=False,
+ outgoing_headers_only=False, drafts=False,
+ drafts_headers_only=False, chats=False,
+ chats_headers_only=False):
+ """Creates a email monitor, forwarding the source_users emails/chats
+
+ Args:
+ source_user: string, the user whose email will be audited
+ destination_user: string, the user to receive the audited email
+ end_date: string, the date the audit will end in
+ "yyyy-MM-dd HH:mm" format, required
+ begin_date: string, the date the audit will start in
+ "yyyy-MM-dd HH:mm" format, leave blank to use current time
+ incoming_headers_only: boolean, whether to audit only the headers of
+ mail delivered to source user
+ outgoing_headers_only: boolean, whether to audit only the headers of
+ mail sent from the source user
+ drafts: boolean, whether to audit draft messages of the source user
+ drafts_headers_only: boolean, whether to audit only the headers of
+ mail drafts saved by the user
+ chats: boolean, whether to audit archived chats of the source user
+ chats_headers_only: boolean, whether to audit only the headers of
+ archived chats of the source user
+
+ Returns:
+ A dict containing the result of the POST operation."""
+
+ uri = self._serviceUrl('mail/monitor', user=source_user)
+ properties = {}
+ properties['destUserName'] = destination_user
+ if begin_date is not None:
+ properties['beginDate'] = begin_date
+ properties['endDate'] = end_date
+ if incoming_headers_only:
+ properties['incomingEmailMonitorLevel'] = 'HEADER_ONLY'
+ else:
+ properties['incomingEmailMonitorLevel'] = 'FULL_MESSAGE'
+ if outgoing_headers_only:
+ properties['outgoingEmailMonitorLevel'] = 'HEADER_ONLY'
+ else:
+ properties['outgoingEmailMonitorLevel'] = 'FULL_MESSAGE'
+ if drafts:
+ if drafts_headers_only:
+ properties['draftMonitorLevel'] = 'HEADER_ONLY'
+ else:
+ properties['draftMonitorLevel'] = 'FULL_MESSAGE'
+ if chats:
+ if chats_headers_only:
+ properties['chatMonitorLevel'] = 'HEADER_ONLY'
+ else:
+ properties['chatMonitorLevel'] = 'FULL_MESSAGE'
+ return self._PostProperties(uri, properties)
+
+ def getEmailMonitors(self, user):
+ """"Gets the email monitors for the given user
+
+ Args:
+ user: string, the user to retrieve email monitors for
+
+ Returns:
+ list results of the POST operation
+
+ """
+ uri = self._serviceUrl('mail/monitor', user=user)
+ return self._GetPropertiesList(uri)
+
+ def deleteEmailMonitor(self, source_user, destination_user):
+ """Deletes the email monitor for the given user
+
+ Args:
+ source_user: string, the user who is being monitored
+ destination_user: string, theuser who recieves the monitored emails
+
+ Returns:
+ Nothing
+ """
+
+ uri = self._serviceUrl('mail/monitor', user=source_user+'/'+destination_user)
+ try:
+ return self._DeleteProperties(uri)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def createAccountInformationRequest(self, user):
+ """Creates a request for account auditing details
+
+ Args:
+ user: string, the user to request account information for
+
+ Returns:
+ A dict containing the result of the post operation."""
+
+ uri = self._serviceUrl('account', user=user)
+ properties = {}
+ #XML Body is left empty
+ try:
+ return self._PostProperties(uri, properties)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def getAccountInformationRequestStatus(self, user, request_id):
+ """Gets the status of an account auditing request
+
+ Args:
+ user: string, the user whose account auditing details were requested
+ request_id: string, the request_id
+
+ Returns:
+ A dict containing the result of the get operation."""
+
+ uri = self._serviceUrl('account', user=user+'/'+request_id)
+ try:
+ return self._GetProperties(uri)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def getAllAccountInformationRequestsStatus(self):
+ """Gets the status of all account auditing requests for the domain
+
+ Args:
+ None
+
+ Returns:
+ list results of the POST operation
+ """
+
+ uri = self._serviceUrl('account')
+ return self._GetPropertiesList(uri)
+
+
+ def deleteAccountInformationRequest(self, user, request_id):
+ """Deletes the request for account auditing information
+
+ Args:
+ user: string, the user whose account auditing details were requested
+ request_id: string, the request_id
+
+ Returns:
+ Nothing
+ """
+
+ uri = self._serviceUrl('account', user=user+'/'+request_id)
+ try:
+ return self._DeleteProperties(uri)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def createMailboxExportRequest(self, user, begin_date=None, end_date=None, include_deleted=False, search_query=None, headers_only=False):
+ """Creates a mailbox export request
+
+ Args:
+ user: string, the user whose mailbox export is being requested
+ begin_date: string, date of earliest emails to export, optional, defaults to date of account creation
+ format is 'yyyy-MM-dd HH:mm'
+ end_date: string, date of latest emails to export, optional, defaults to current date
+ format is 'yyyy-MM-dd HH:mm'
+ include_deleted: boolean, whether to include deleted emails in export, mutually exclusive with search_query
+ search_query: string, gmail style search query, matched emails will be exported, mutually exclusive with include_deleted
+
+ Returns:
+ A dict containing the result of the post operation."""
+
+ uri = self._serviceUrl('mail/export', user=user)
+ properties = {}
+ if begin_date is not None:
+ properties['beginDate'] = begin_date
+ if end_date is not None:
+ properties['endDate'] = end_date
+ if include_deleted is not None:
+ properties['includeDeleted'] = gdata.apps.service._bool2str(include_deleted)
+ if search_query is not None:
+ properties['searchQuery'] = search_query
+ if headers_only is True:
+ properties['packageContent'] = 'HEADER_ONLY'
+ else:
+ properties['packageContent'] = 'FULL_MESSAGE'
+ return self._PostProperties(uri, properties)
+
+ def getMailboxExportRequestStatus(self, user, request_id):
+ """Gets the status of an mailbox export request
+
+ Args:
+ user: string, the user whose mailbox were requested
+ request_id: string, the request_id
+
+ Returns:
+ A dict containing the result of the get operation."""
+
+ uri = self._serviceUrl('mail/export', user=user+'/'+request_id)
+ try:
+ return self._GetProperties(uri)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def getAllMailboxExportRequestsStatus(self):
+ """Gets the status of all mailbox export requests for the domain
+
+ Args:
+ None
+
+ Returns:
+ list results of the POST operation
+ """
+
+ uri = self._serviceUrl('mail/export')
+ return self._GetPropertiesList(uri)
+
+
+ def deleteMailboxExportRequest(self, user, request_id):
+ """Deletes the request for mailbox export
+
+ Args:
+ user: string, the user whose mailbox were requested
+ request_id: string, the request_id
+
+ Returns:
+ Nothing
+ """
+
+ uri = self._serviceUrl('mail/export', user=user+'/'+request_id)
+ try:
+ return self._DeleteProperties(uri)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
diff --git a/python/gdata/apps/emailsettings/__init__.py b/python/gdata/apps/emailsettings/__init__.py
new file mode 100644
index 0000000..275c6a0
--- /dev/null
+++ b/python/gdata/apps/emailsettings/__init__.py
@@ -0,0 +1,15 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/python/gdata/apps/emailsettings/client.py b/python/gdata/apps/emailsettings/client.py
new file mode 100644
index 0000000..ffab889
--- /dev/null
+++ b/python/gdata/apps/emailsettings/client.py
@@ -0,0 +1,400 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""EmailSettingsClient simplifies Email Settings API calls.
+
+EmailSettingsClient extends gdata.client.GDClient to ease interaction with
+the Google Apps Email Settings API. These interactions include the ability
+to create labels, filters, aliases, and update web-clip, forwarding, POP,
+IMAP, vacation-responder, signature, language, and general settings.
+"""
+
+
+__author__ = 'Claudio Cherubino '
+
+
+import gdata.apps.emailsettings.data
+import gdata.client
+
+
+# Email Settings URI template
+# The strings in this template are eventually replaced with the API version,
+# Google Apps domain name, username, and settingID, respectively.
+EMAIL_SETTINGS_URI_TEMPLATE = '/a/feeds/emailsettings/%s/%s/%s/%s'
+
+
+# The settingID value for the label requests
+SETTING_ID_LABEL = 'label'
+# The settingID value for the filter requests
+SETTING_ID_FILTER = 'filter'
+# The settingID value for the send-as requests
+SETTING_ID_SENDAS = 'sendas'
+# The settingID value for the webclip requests
+SETTING_ID_WEBCLIP = 'webclip'
+# The settingID value for the forwarding requests
+SETTING_ID_FORWARDING = 'forwarding'
+# The settingID value for the POP requests
+SETTING_ID_POP = 'pop'
+# The settingID value for the IMAP requests
+SETTING_ID_IMAP = 'imap'
+# The settingID value for the vacation responder requests
+SETTING_ID_VACATION_RESPONDER = 'vacation'
+# The settingID value for the signature requests
+SETTING_ID_SIGNATURE = 'signature'
+# The settingID value for the language requests
+SETTING_ID_LANGUAGE = 'language'
+# The settingID value for the general requests
+SETTING_ID_GENERAL = 'general'
+
+# The KEEP action for the email settings
+ACTION_KEEP = 'KEEP'
+# The ARCHIVE action for the email settings
+ACTION_ARCHIVE = 'ARCHIVE'
+# The DELETE action for the email settings
+ACTION_DELETE = 'DELETE'
+
+# The ALL_MAIL setting for POP enable_for property
+POP_ENABLE_FOR_ALL_MAIL = 'ALL_MAIL'
+# The MAIL_FROM_NOW_ON setting for POP enable_for property
+POP_ENABLE_FOR_MAIL_FROM_NOW_ON = 'MAIL_FROM_NOW_ON'
+
+
+class EmailSettingsClient(gdata.client.GDClient):
+ """Client extension for the Google Email Settings API service.
+
+ Attributes:
+ host: string The hostname for the Email Settings API service.
+ api_version: string The version of the Email Settings API.
+ """
+
+ host = 'apps-apis.google.com'
+ api_version = '2.0'
+ auth_service = 'apps'
+ auth_scopes = gdata.gauth.AUTH_SCOPES['apps']
+ ssl = True
+
+ def __init__(self, domain, auth_token=None, **kwargs):
+ """Constructs a new client for the Email Settings API.
+
+ Args:
+ domain: string The Google Apps domain with Email Settings.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the email settings.
+ kwargs: The other parameters to pass to the gdata.client.GDClient
+ constructor.
+ """
+ gdata.client.GDClient.__init__(self, auth_token=auth_token, **kwargs)
+ self.domain = domain
+
+ def make_email_settings_uri(self, username, setting_id):
+ """Creates the URI for the Email Settings API call.
+
+ Using this client's Google Apps domain, create the URI to setup
+ email settings for the given user in that domain. If params are provided,
+ append them as GET params.
+
+ Args:
+ username: string The name of the user affected by this setting.
+ setting_id: string The key of the setting to be configured.
+
+ Returns:
+ A string giving the URI for Email Settings API calls for this client's
+ Google Apps domain.
+ """
+ uri = EMAIL_SETTINGS_URI_TEMPLATE % (self.api_version, self.domain,
+ username, setting_id)
+ return uri
+
+ MakeEmailSettingsUri = make_email_settings_uri
+
+ def create_label(self, username, name, **kwargs):
+ """Creates a label with the given properties.
+
+ Args:
+ username: string The name of the user.
+ name: string The name of the label.
+ kwargs: The other parameters to pass to gdata.client.GDClient.post().
+
+ Returns:
+ gdata.apps.emailsettings.data.EmailSettingsLabel of the new resource.
+ """
+ uri = self.MakeEmailSettingsUri(username=username,
+ setting_id=SETTING_ID_LABEL)
+ new_label = gdata.apps.emailsettings.data.EmailSettingsLabel(
+ uri=uri, name=name)
+ return self.post(new_label, uri, **kwargs)
+
+ CreateLabel = create_label
+
+ def create_filter(self, username, from_address=None,
+ to_address=None, subject=None, has_the_word=None,
+ does_not_have_the_word=None, has_attachments=None,
+ label=None, mark_as_read=None, archive=None, **kwargs):
+ """Creates a filter with the given properties.
+
+ Args:
+ username: string The name of the user.
+ from_address: string The source email address for the filter.
+ to_address: string (optional) The destination email address for
+ the filter.
+ subject: string (optional) The value the email must have in its
+ subject to be filtered.
+ has_the_word: string (optional) The value the email must have
+ in its subject or body to be filtered.
+ does_not_have_the_word: string (optional) The value the email
+ cannot have in its subject or body to be filtered.
+ has_attachments: string (optional) A boolean string representing
+ whether the email must have an attachment to be filtered.
+ label: string (optional) The name of the label to apply to
+ messages matching the filter criteria.
+ mark_as_read: Boolean (optional) Whether or not to mark
+ messages matching the filter criteria as read.
+ archive: Boolean (optional) Whether or not to move messages
+ matching to Archived state.
+ kwargs: The other parameters to pass to gdata.client.GDClient.post().
+
+ Returns:
+ gdata.apps.emailsettings.data.EmailSettingsFilter of the new resource.
+ """
+ uri = self.MakeEmailSettingsUri(username=username,
+ setting_id=SETTING_ID_FILTER)
+ new_filter = gdata.apps.emailsettings.data.EmailSettingsFilter(
+ uri=uri, from_address=from_address,
+ to_address=to_address, subject=subject,
+ has_the_word=has_the_word,
+ does_not_have_the_word=does_not_have_the_word,
+ has_attachments=has_attachments, label=label,
+ mark_as_read=mark_as_read, archive=archive)
+ return self.post(new_filter, uri, **kwargs)
+
+ CreateFilter = create_filter
+
+ def create_send_as(self, username, name, address, reply_to=None,
+ make_default=None, **kwargs):
+ """Creates a send-as alias with the given properties.
+
+ Args:
+ username: string The name of the user.
+ name: string The name that will appear in the "From" field.
+ address: string The email address that appears as the
+ origination address for emails sent by this user.
+ reply_to: string (optional) The address to be used as the reply-to
+ address in email sent using the alias.
+ make_default: Boolean (optional) Whether or not this alias should
+ become the default alias for this user.
+ kwargs: The other parameters to pass to gdata.client.GDClient.post().
+
+ Returns:
+ gdata.apps.emailsettings.data.EmailSettingsSendAsAlias of the
+ new resource.
+ """
+ uri = self.MakeEmailSettingsUri(username=username,
+ setting_id=SETTING_ID_SENDAS)
+ new_alias = gdata.apps.emailsettings.data.EmailSettingsSendAsAlias(
+ uri=uri, name=name, address=address,
+ reply_to=reply_to, make_default=make_default)
+ return self.post(new_alias, uri, **kwargs)
+
+ CreateSendAs = create_send_as
+
+ def update_webclip(self, username, enable, **kwargs):
+ """Enable/Disable Google Mail web clip.
+
+ Args:
+ username: string The name of the user.
+ enable: Boolean Whether to enable showing Web clips.
+ kwargs: The other parameters to pass to the update method.
+
+ Returns:
+ gdata.apps.emailsettings.data.EmailSettingsWebClip of the
+ updated resource.
+ """
+ uri = self.MakeEmailSettingsUri(username=username,
+ setting_id=SETTING_ID_WEBCLIP)
+ new_webclip = gdata.apps.emailsettings.data.EmailSettingsWebClip(
+ uri=uri, enable=enable)
+ return self.update(new_webclip, **kwargs)
+
+ UpdateWebclip = update_webclip
+
+ def update_forwarding(self, username, enable, forward_to=None,
+ action=None, **kwargs):
+ """Update Google Mail Forwarding settings.
+
+ Args:
+ username: string The name of the user.
+ enable: Boolean Whether to enable incoming email forwarding.
+ forward_to: (optional) string The address email will be forwarded to.
+ action: string (optional) The action to perform after forwarding
+ an email (ACTION_KEEP, ACTION_ARCHIVE, ACTION_DELETE).
+ kwargs: The other parameters to pass to the update method.
+
+ Returns:
+ gdata.apps.emailsettings.data.EmailSettingsForwarding of the
+ updated resource
+ """
+ uri = self.MakeEmailSettingsUri(username=username,
+ setting_id=SETTING_ID_FORWARDING)
+ new_forwarding = gdata.apps.emailsettings.data.EmailSettingsForwarding(
+ uri=uri, enable=enable, forward_to=forward_to, action=action)
+ return self.update(new_forwarding, **kwargs)
+
+ UpdateForwarding = update_forwarding
+
+ def update_pop(self, username, enable, enable_for=None, action=None,
+ **kwargs):
+ """Update Google Mail POP settings.
+
+ Args:
+ username: string The name of the user.
+ enable: Boolean Whether to enable incoming POP3 access.
+ enable_for: string (optional) Whether to enable POP3 for all mail
+ (POP_ENABLE_FOR_ALL_MAIL), or mail from now on
+ (POP_ENABLE_FOR_MAIL_FROM_NOW_ON).
+ action: string (optional) What Google Mail should do with its copy
+ of the email after it is retrieved using POP (ACTION_KEEP,
+ ACTION_ARCHIVE, ACTION_DELETE).
+ kwargs: The other parameters to pass to the update method.
+
+ Returns:
+ gdata.apps.emailsettings.data.EmailSettingsPop of the updated resource.
+ """
+ uri = self.MakeEmailSettingsUri(username=username,
+ setting_id=SETTING_ID_POP)
+ new_pop = gdata.apps.emailsettings.data.EmailSettingsPop(
+ uri=uri, enable=enable,
+ enable_for=enable_for, action=action)
+ return self.update(new_pop, **kwargs)
+
+ UpdatePop = update_pop
+
+ def update_imap(self, username, enable, **kwargs):
+ """Update Google Mail IMAP settings.
+
+ Args:
+ username: string The name of the user.
+ enable: Boolean Whether to enable IMAP access.language
+ kwargs: The other parameters to pass to the update method.
+
+ Returns:
+ gdata.apps.emailsettings.data.EmailSettingsImap of the updated resource.
+ """
+ uri = self.MakeEmailSettingsUri(username=username,
+ setting_id=SETTING_ID_IMAP)
+ new_imap = gdata.apps.emailsettings.data.EmailSettingsImap(
+ uri=uri, enable=enable)
+ return self.update(new_imap, **kwargs)
+
+ UpdateImap = update_imap
+
+ def update_vacation(self, username, enable, subject=None, message=None,
+ contacts_only=None, **kwargs):
+ """Update Google Mail vacation-responder settings.
+
+ Args:
+ username: string The name of the user.
+ enable: Boolean Whether to enable the vacation responder.
+ subject: string (optional) The subject line of the vacation responder
+ autoresponse.
+ message: string (optional) The message body of the vacation responder
+ autoresponse.
+ contacts_only: Boolean (optional) Whether to only send autoresponses
+ to known contacts.
+ kwargs: The other parameters to pass to the update method.
+
+ Returns:
+ gdata.apps.emailsettings.data.EmailSettingsVacationResponder of the
+ updated resource.
+ """
+ uri = self.MakeEmailSettingsUri(username=username,
+ setting_id=SETTING_ID_VACATION_RESPONDER)
+ new_vacation = gdata.apps.emailsettings.data.EmailSettingsVacationResponder(
+ uri=uri, enable=enable, subject=subject,
+ message=message, contacts_only=contacts_only)
+ return self.update(new_vacation, **kwargs)
+
+ UpdateVacation = update_vacation
+
+ def update_signature(self, username, signature, **kwargs):
+ """Update Google Mail signature.
+
+ Args:
+ username: string The name of the user.
+ signature: string The signature to be appended to outgoing messages.
+ kwargs: The other parameters to pass to the update method.
+
+ Returns:
+ gdata.apps.emailsettings.data.EmailSettingsSignature of the
+ updated resource.
+ """
+ uri = self.MakeEmailSettingsUri(username=username,
+ setting_id=SETTING_ID_SIGNATURE)
+ new_signature = gdata.apps.emailsettings.data.EmailSettingsSignature(
+ uri=uri, signature=signature)
+ return self.update(new_signature, **kwargs)
+
+ UpdateSignature = update_signature
+
+ def update_language(self, username, language, **kwargs):
+ """Update Google Mail language settings.
+
+ Args:
+ username: string The name of the user.
+ language: string The language tag for Google Mail's display language.
+ kwargs: The other parameters to pass to the update method.
+
+ Returns:
+ gdata.apps.emailsettings.data.EmailSettingsLanguage of the
+ updated resource.
+ """
+ uri = self.MakeEmailSettingsUri(username=username,
+ setting_id=SETTING_ID_LANGUAGE)
+ new_language = gdata.apps.emailsettings.data.EmailSettingsLanguage(
+ uri=uri, language=language)
+ return self.update(new_language, **kwargs)
+
+ UpdateLanguage = update_language
+
+ def update_general_settings(self, username, page_size=None, shortcuts=None,
+ arrows=None, snippets=None, use_unicode=None,
+ **kwargs):
+ """Update Google Mail general settings.
+
+ Args:
+ username: string The name of the user.
+ page_size: int (optional) The number of conversations to be shown per
+ page.
+ shortcuts: Boolean (optional) Whether to enable keyboard shortcuts.
+ arrows: Boolean (optional) Whether to display arrow-shaped personal
+ indicators next to email sent specifically to the user.
+ snippets: Boolean (optional) Whether to display snippets of the messages
+ in the inbox and when searching.
+ use_unicode: Boolean (optional) Whether to use UTF-8 (unicode) encoding
+ for all outgoing messages.
+ kwargs: The other parameters to pass to the update method.
+
+ Returns:
+ gdata.apps.emailsettings.data.EmailSettingsGeneral of the
+ updated resource.
+ """
+ uri = self.MakeEmailSettingsUri(username=username,
+ setting_id=SETTING_ID_GENERAL)
+ new_general = gdata.apps.emailsettings.data.EmailSettingsGeneral(
+ uri=uri, page_size=page_size, shortcuts=shortcuts,
+ arrows=arrows, snippets=snippets, use_unicode=use_unicode)
+ return self.update(new_general, **kwargs)
+
+ UpdateGeneralSettings = update_general_settings
diff --git a/python/gdata/apps/emailsettings/data.py b/python/gdata/apps/emailsettings/data.py
new file mode 100644
index 0000000..fa8a53c
--- /dev/null
+++ b/python/gdata/apps/emailsettings/data.py
@@ -0,0 +1,1130 @@
+#!/usr/bin/python
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Data model classes for the Email Settings API."""
+
+
+__author__ = 'Claudio Cherubino '
+
+
+import atom.data
+import gdata.apps
+import gdata.apps_property
+import gdata.data
+
+
+# This is required to work around a naming conflict between the Google
+# Spreadsheets API and Python's built-in property function
+pyproperty = property
+
+
+# The apps:property label of the label property
+LABEL_NAME = 'label'
+
+# The apps:property from of the filter property
+FILTER_FROM_NAME = 'from'
+# The apps:property to of the filter property
+FILTER_TO_NAME = 'to'
+# The apps:property subject of the filter property
+FILTER_SUBJECT_NAME = 'subject'
+# The apps:property hasTheWord of the filter property
+FILTER_HAS_THE_WORD_NAME = 'hasTheWord'
+# The apps:property doesNotHaveTheWord of the filter property
+FILTER_DOES_NOT_HAVE_THE_WORD_NAME = 'doesNotHaveTheWord'
+# The apps:property hasAttachment of the filter property
+FILTER_HAS_ATTACHMENTS_NAME = 'hasAttachment'
+# The apps:property label of the filter action property
+FILTER_LABEL = 'label'
+# The apps:property shouldMarkAsRead of the filter action property
+FILTER_MARK_AS_READ = 'shouldMarkAsRead'
+# The apps:property shouldArchive of the filter action propertylabel
+FILTER_ARCHIVE = 'shouldArchive'
+
+# The apps:property name of the send-as alias property
+SENDAS_ALIAS_NAME = 'name'
+# The apps:property address of theAPPS_TEMPLATE send-as alias property
+SENDAS_ALIAS_ADDRESS = 'address'
+# The apps:property replyTo of the send-as alias property
+SENDAS_ALIAS_REPLY_TO = 'replyTo'
+# The apps:property makeDefault of the send-as alias property
+SENDAS_ALIAS_MAKE_DEFAULT = 'makeDefault'
+
+# The apps:property enable of the webclip property
+WEBCLIP_ENABLE = 'enable'
+
+# The apps:property enable of the forwarding property
+FORWARDING_ENABLE = 'enable'
+# The apps:property forwardTo of the forwarding property
+FORWARDING_TO = 'forwardTo'
+# The apps:property action of the forwarding property
+FORWARDING_ACTION = 'action'
+
+# The apps:property enable of the POP property
+POP_ENABLE = 'enable'
+# The apps:property enableFor of the POP propertyACTION
+POP_ENABLE_FOR = 'enableFor'
+# The apps:property action of the POP property
+POP_ACTION = 'action'
+
+# The apps:property enable of the IMAP property
+IMAP_ENABLE = 'enable'
+
+# The apps:property enable of the vacation responder property
+VACATION_RESPONDER_ENABLE = 'enable'
+# The apps:property subject of the vacation responder property
+VACATION_RESPONDER_SUBJECT = 'subject'
+# The apps:property message of the vacation responder property
+VACATION_RESPONDER_MESSAGE = 'message'
+# The apps:property contactsOnly of the vacation responder property
+VACATION_RESPONDER_CONTACTS_ONLY = 'contactsOnly'
+
+# The apps:property signature of the signature property
+SIGNATURE_VALUE = 'signature'
+
+# The apps:property language of the language property
+LANGUAGE_TAG = 'language'
+
+# The apps:property pageSize of the general settings property
+GENERAL_PAGE_SIZE = 'pageSize'
+# The apps:property shortcuts of the general settings property
+GENERAL_SHORTCUTS = 'shortcuts'
+# The apps:property arrows of the general settings property
+GENERAL_ARROWS = 'arrows'
+# The apps:prgdata.appsoperty snippets of the general settings property
+GENERAL_SNIPPETS = 'snippets'
+# The apps:property uniAppsProcode of the general settings property
+GENERAL_UNICODE = 'unicode'
+
+
+class EmailSettingsEntry(gdata.data.GDEntry):
+ """Represents an Email Settings entry in object form."""
+
+ property = [gdata.apps_property.AppsProperty]
+
+ def _GetProperty(self, name):
+ """Get the apps:property value with the given name.
+
+ Args:
+ name: string Name of the apps:property value to get.
+
+ Returns:
+ The apps:property value with the given name, or None if the name was
+ invalid.
+ """
+
+ value = None
+ for p in self.property:
+ if p.name == name:
+ value = p.value
+ break
+ return value
+
+ def _SetProperty(self, name, value):
+ """Set the apps:property value with the given name to the given value.
+
+ Args:
+ name: string Name of the apps:property value to set.
+ value: string Value to give the apps:property value with the given name.
+ """
+ found = False
+ for i in range(len(self.property)):
+ if self.property[i].name == name:
+ self.property[i].value = value
+ found = True
+ break
+ if not found:
+ self.property.append(gdata.apps_property.AppsProperty(name=name, value=value))
+
+ def find_edit_link(self):
+ return self.uri
+
+
+class EmailSettingsLabel(EmailSettingsEntry):
+ """Represents a Label in object form."""
+
+ def GetName(self):
+ """Get the name of the Label object.
+
+ Returns:
+ The name of this Label object as a string or None.
+ """
+
+ return self._GetProperty(LABEL_NAME)
+
+ def SetName(self, value):
+ """Set the name of this Label object.
+
+ Args:
+ value: string The new label name to give this object.
+ """
+
+ self._SetProperty(LABEL_NAME, value)
+
+ name = pyproperty(GetName, SetName)
+
+ def __init__(self, uri=None, name=None, *args, **kwargs):
+ """Constructs a new EmailSettingsLabel object with the given arguments.
+
+ Args:
+ uri: string (optional) The uri of of this object for HTTP requests.
+ name: string (optional) The name to give this new object.
+ args: The other parameters to pass to gdata.entry.GDEntry constructor.
+ kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
+ """
+ super(EmailSettingsLabel, self).__init__(*args, **kwargs)
+ if uri:
+ self.uri = uri
+ if name:
+ self.name = name
+
+
+class EmailSettingsFilter(EmailSettingsEntry):
+ """Represents an Email Settings Filter in object form."""
+
+ def GetFrom(self):
+ """Get the From value of the Filter object.
+
+ Returns:
+ The From value of this Filter object as a string or None.
+ """
+
+ return self._GetProperty(FILTER_FROM_NAME)
+
+ def SetFrom(self, value):
+ """Set the From value of this Filter object.
+
+ Args:
+ value: string The new From value to give this object.
+ """
+
+ self._SetProperty(FILTER_FROM_NAME, value)
+
+ from_address = pyproperty(GetFrom, SetFrom)
+
+ def GetTo(self):
+ """Get the To value of the Filter object.
+
+ Returns:
+ The To value of this Filter object as a string or None.
+ """
+
+ return self._GetProperty(FILTER_TO_NAME)
+
+ def SetTo(self, value):
+ """Set the To value of this Filter object.
+
+ Args:
+ value: string The new To value to give this object.
+ """
+
+ self._SetProperty(FILTER_TO_NAME, value)
+
+ to_address = pyproperty(GetTo, SetTo)
+
+ def GetSubject(self):
+ """Get the Subject value of the Filter object.
+
+ Returns:
+ The Subject value of this Filter object as a string or None.
+ """
+
+ return self._GetProperty(FILTER_SUBJECT_NAME)
+
+ def SetSubject(self, value):
+ """Set the Subject value of this Filter object.
+
+ Args:
+ value: string The new Subject value to give this object.
+ """
+
+ self._SetProperty(FILTER_SUBJECT_NAME, value)
+
+ subject = pyproperty(GetSubject, SetSubject)
+
+ def GetHasTheWord(self):
+ """Get the HasTheWord value of the Filter object.
+
+ Returns:
+ The HasTheWord value of this Filter object as a string or None.
+ """
+
+ return self._GetProperty(FILTER_HAS_THE_WORD_NAME)
+
+ def SetHasTheWord(self, value):
+ """Set the HasTheWord value of this Filter object.
+
+ Args:
+ value: string The new HasTheWord value to give this object.
+ """
+
+ self._SetProperty(FILTER_HAS_THE_WORD_NAME, value)
+
+ has_the_word = pyproperty(GetHasTheWord, SetHasTheWord)
+
+ def GetDoesNotHaveTheWord(self):
+ """Get the DoesNotHaveTheWord value of the Filter object.
+
+ Returns:
+ The DoesNotHaveTheWord value of this Filter object as a string or None.
+ """
+
+ return self._GetProperty(FILTER_DOES_NOT_HAVE_THE_WORD_NAME)
+
+ def SetDoesNotHaveTheWord(self, value):
+ """Set the DoesNotHaveTheWord value of this Filter object.
+
+ Args:
+ value: string The new DoesNotHaveTheWord value to give this object.
+ """
+
+ self._SetProperty(FILTER_DOES_NOT_HAVE_THE_WORD_NAME, value)
+
+ does_not_have_the_word = pyproperty(GetDoesNotHaveTheWord,
+ SetDoesNotHaveTheWord)
+
+ def GetHasAttachments(self):
+ """Get the HasAttachments value of the Filter object.
+
+ Returns:
+ The HasAttachments value of this Filter object as a string or None.
+ """
+
+ return self._GetProperty(FILTER_HAS_ATTACHMENTS_NAME)
+
+ def SetHasAttachments(self, value):
+ """Set the HasAttachments value of this Filter object.
+
+ Args:
+ value: string The new HasAttachments value to give this object.
+ """
+
+ self._SetProperty(FILTER_HAS_ATTACHMENTS_NAME, value)
+
+ has_attachments = pyproperty(GetHasAttachments,
+ SetHasAttachments)
+
+ def GetLabel(self):
+ """Get the Label value of the Filter object.
+
+ Returns:
+ The Label value of this Filter object as a string or None.
+ """
+
+ return self._GetProperty(FILTER_LABEL)
+
+ def SetLabel(self, value):
+ """Set the Label value of this Filter object.
+
+ Args:
+ value: string The new Label value to give this object.
+ """
+
+ self._SetProperty(FILTER_LABEL, value)
+
+ label = pyproperty(GetLabel, SetLabel)
+
+ def GetMarkAsRead(self):
+ """Get the MarkAsRead value of the Filter object.
+
+ Returns:
+ The MarkAsRead value of this Filter object as a string or None.
+ """
+
+ return self._GetProperty(FILTER_MARK_AS_READ)
+
+ def SetMarkAsRead(self, value):
+ """Set the MarkAsRead value of this Filter object.
+
+ Args:
+ value: string The new MarkAsRead value to give this object.
+ """
+
+ self._SetProperty(FILTER_MARK_AS_READ, value)
+
+ mark_as_read = pyproperty(GetMarkAsRead, SetMarkAsRead)
+
+ def GetArchive(self):
+ """Get the Archive value of the Filter object.
+
+ Returns:
+ The Archive value of this Filter object as a string or None.
+ """
+
+ return self._GetProperty(FILTER_ARCHIVE)
+
+ def SetArchive(self, value):
+ """Set the Archive value of this Filter object.
+
+ Args:
+ value: string The new Archive value to give this object.
+ """
+
+ self._SetProperty(FILTER_ARCHIVE, value)
+
+ archive = pyproperty(GetArchive, SetArchive)
+
+ def __init__(self, uri=None, from_address=None, to_address=None,
+ subject=None, has_the_word=None, does_not_have_the_word=None,
+ has_attachments=None, label=None, mark_as_read=None,
+ archive=None, *args, **kwargs):
+ """Constructs a new EmailSettingsFilter object with the given arguments.
+
+ Args:
+ uri: string (optional) The uri of of this object for HTTP requests.
+ from_address: string (optional) The source email address for the filter.
+ to_address: string (optional) The destination email address for
+ the filter.
+ subject: string (optional) The value the email must have in its
+ subject to be filtered.
+ has_the_word: string (optional) The value the email must have in its
+ subject or body to be filtered.
+ does_not_have_the_word: string (optional) The value the email cannot
+ have in its subject or body to be filtered.
+ has_attachments: Boolean (optional) Whether or not the email must
+ have an attachment to be filtered.
+ label: string (optional) The name of the label to apply to
+ messages matching the filter criteria.
+ mark_as_read: Boolean (optional) Whether or not to mark messages
+ matching the filter criteria as read.
+ archive: Boolean (optional) Whether or not to move messages
+ matching to Archived state.
+ args: The other parameters to pass to gdata.entry.GDEntry constructor.
+ kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
+ """
+ super(EmailSettingsFilter, self).__init__(*args, **kwargs)
+ if uri:
+ self.uri = uri
+ if from_address:
+ self.from_address = from_address
+ if to_address:
+ self.to_address = to_address
+ if subject:
+ self.subject = subject
+ if has_the_word:
+ self.has_the_word = has_the_word
+ if does_not_have_the_word:
+ self.does_not_have_the_word = does_not_have_the_word
+ if has_attachments is not None:
+ self.has_attachments = str(has_attachments)
+ if label:
+ self.label = label
+ if mark_as_read is not None:
+ self.mark_as_read = str(mark_as_read)
+ if archive is not None:
+ self.archive = str(archive)
+
+
+class EmailSettingsSendAsAlias(EmailSettingsEntry):
+ """Represents an Email Settings send-as Alias in object form."""
+
+ def GetName(self):
+ """Get the Name of the send-as Alias object.
+
+ Returns:
+ The Name of this send-as Alias object as a string or None.
+ """
+
+ return self._GetProperty(SENDAS_ALIAS_NAME)
+
+ def SetName(self, value):
+ """Set the Name of this send-as Alias object.
+
+ Args:
+ value: string The new Name to give this object.
+ """
+
+ self._SetProperty(SENDAS_ALIAS_NAME, value)
+
+ name = pyproperty(GetName, SetName)
+
+ def GetAddress(self):
+ """Get the Address of the send-as Alias object.
+
+ Returns:
+ The Address of this send-as Alias object as a string or None.
+ """
+
+ return self._GetProperty(SENDAS_ALIAS_ADDRESS)
+
+ def SetAddress(self, value):
+ """Set the Address of this send-as Alias object.
+
+ Args:
+ value: string The new Address to give this object.
+ """
+
+ self._SetProperty(SENDAS_ALIAS_ADDRESS, value)
+
+ address = pyproperty(GetAddress, SetAddress)
+
+ def GetReplyTo(self):
+ """Get the ReplyTo address of the send-as Alias object.
+
+ Returns:
+ The ReplyTo address of this send-as Alias object as a string or None.
+ """
+
+ return self._GetProperty(SENDAS_ALIAS_REPLY_TO)
+
+ def SetReplyTo(self, value):
+ """Set the ReplyTo address of this send-as Alias object.
+
+ Args:
+ value: string The new ReplyTo address to give this object.
+ """
+
+ self._SetProperty(SENDAS_ALIAS_REPLY_TO, value)
+
+ reply_to = pyproperty(GetReplyTo, SetReplyTo)
+
+ def GetMakeDefault(self):
+ """Get the MakeDefault value of the send-as Alias object.
+
+ Returns:
+ The MakeDefault value of this send-as Alias object as a string or None.
+ """
+
+ return self._GetProperty(SENDAS_ALIAS_MAKE_DEFAULT)
+
+ def SetMakeDefault(self, value):
+ """Set the MakeDefault value of this send-as Alias object.
+
+ Args:
+ value: string The new MakeDefault valueto give this object.WebClip
+ """
+
+ self._SetProperty(SENDAS_ALIAS_MAKE_DEFAULT, value)
+
+ make_default = pyproperty(GetMakeDefault, SetMakeDefault)
+
+ def __init__(self, uri=None, name=None, address=None, reply_to=None,
+ make_default=None, *args, **kwargs):
+ """Constructs a new EmailSettingsSendAsAlias object with the given
+ arguments.
+
+ Args:
+ uri: string (optional) The uri of of this object for HTTP requests.
+ name: string (optional) The name that will appear in the "From" field
+ for this user.
+ address: string (optional) The email address that appears as the
+ origination address for emails sent by this user.
+ reply_to: string (optional) The address to be used as the reply-to
+ address in email sent using the alias.
+ make_default: Boolean (optional) Whether or not this alias should
+ become the default alias for this user.
+ args: The other parameters to pass to gdata.entry.GDEntry constructor.
+ kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
+ """
+ super(EmailSettingsSendAsAlias, self).__init__(*args, **kwargs)
+ if uri:
+ self.uri = uri
+ if name:
+ self.name = name
+ if address:
+ self.address = address
+ if reply_to:
+ self.reply_to = reply_to
+ if make_default is not None:
+ self.make_default = str(make_default)
+
+
+class EmailSettingsWebClip(EmailSettingsEntry):
+ """Represents a WebClip in object form."""
+
+ def GetEnable(self):
+ """Get the Enable value of the WebClip object.
+
+ Returns:
+ The Enable value of this WebClip object as a string or None.
+ """
+
+ return self._GetProperty(WEBCLIP_ENABLE)
+
+ def SetEnable(self, value):
+ """Set the Enable value of this WebClip object.
+
+ Args:
+ value: string The new Enable value to give this object.
+ """
+
+ self._SetProperty(WEBCLIP_ENABLE, value)
+
+ enable = pyproperty(GetEnable, SetEnable)
+
+ def __init__(self, uri=None, enable=None, *args, **kwargs):
+ """Constructs a new EmailSettingsWebClip object with the given arguments.
+
+ Args:
+ uri: string (optional) The uri of of this object for HTTP requests.
+ enable: Boolean (optional) Whether to enable showing Web clips.
+ args: The other parameters to pass to gdata.entry.GDEntry constructor.
+ kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
+ """
+ super(EmailSettingsWebClip, self).__init__(*args, **kwargs)
+ if uri:
+ self.uri = uri
+ if enable is not None:
+ self.enable = str(enable)
+
+
+class EmailSettingsForwarding(EmailSettingsEntry):
+ """Represents Forwarding settings in object form."""
+
+ def GetEnable(self):
+ """Get the Enable value of the Forwarding object.
+
+ Returns:
+ The Enable value of this Forwarding object as a string or None.
+ """
+
+ return self._GetProperty(FORWARDING_ENABLE)
+
+ def SetEnable(self, value):
+ """Set the Enable value of this Forwarding object.
+
+ Args:
+ value: string The new Enable value to give this object.
+ """
+
+ self._SetProperty(FORWARDING_ENABLE, value)
+
+ enable = pyproperty(GetEnable, SetEnable)
+
+ def GetForwardTo(self):
+ """Get the ForwardTo value of the Forwarding object.
+
+ Returns:
+ The ForwardTo value of this Forwarding object as a string or None.
+ """
+
+ return self._GetProperty(FORWARDING_TO)
+
+ def SetForwardTo(self, value):
+ """Set the ForwardTo value of this Forwarding object.
+
+ Args:
+ value: string The new ForwardTo value to give this object.
+ """
+
+ self._SetProperty(FORWARDING_TO, value)
+
+ forward_to = pyproperty(GetForwardTo, SetForwardTo)
+
+ def GetAction(self):
+ """Get the Action value of the Forwarding object.
+
+ Returns:
+ The Action value of this Forwarding object as a string or None.
+ """
+
+ return self._GetProperty(FORWARDING_ACTION)
+
+ def SetAction(self, value):
+ """Set the Action value of this Forwarding object.
+
+ Args:
+ value: string The new Action value to give this object.
+ """
+
+ self._SetProperty(FORWARDING_ACTION, value)
+
+ action = pyproperty(GetAction, SetAction)
+
+ def __init__(self, uri=None, enable=None, forward_to=None, action=None,
+ *args, **kwargs):
+ """Constructs a new EmailSettingsForwarding object with the given arguments.
+
+ Args:
+ uri: string (optional) The uri of of this object for HTTP requests.
+ enable: Boolean (optional) Whether to enable incoming email forwarding.
+ forward_to: string (optional) The address email will be forwarded to.
+ action: string (optional) The action to perform after forwarding an
+ email ("KEEP", "ARCHIVE", "DELETE").
+ args: The other parameters to pass to gdata.entry.GDEntry constructor.
+ kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
+ """
+ super(EmailSettingsForwarding, self).__init__(*args, **kwargs)
+ if uri:
+ self.uri = uri
+ if enable is not None:
+ self.enable = str(enable)
+ if forward_to:
+ self.forward_to = forward_to
+ if action:
+ self.action = action
+
+
+class EmailSettingsPop(EmailSettingsEntry):
+ """Represents POP settings in object form."""
+
+ def GetEnable(self):
+ """Get the Enable value of the POP object.
+
+ Returns:
+ The Enable value of this POP object as a string or None.
+ """
+
+ return self._GetProperty(POP_ENABLE)
+
+ def SetEnable(self, value):
+ """Set the Enable value of this POP object.
+
+ Args:
+ value: string The new Enable value to give this object.
+ """
+
+ self._SetProperty(POP_ENABLE, value)
+
+ enable = pyproperty(GetEnable, SetEnable)
+
+ def GetEnableFor(self):
+ """Get the EnableFor value of the POP object.
+
+ Returns:
+ The EnableFor value of this POP object as a string or None.
+ """
+
+ return self._GetProperty(POP_ENABLE_FOR)
+
+ def SetEnableFor(self, value):
+ """Set the EnableFor value of this POP object.
+
+ Args:
+ value: string The new EnableFor value to give this object.
+ """
+
+ self._SetProperty(POP_ENABLE_FOR, value)
+
+ enable_for = pyproperty(GetEnableFor, SetEnableFor)
+
+ def GetPopAction(self):
+ """Get the Action value of the POP object.
+
+ Returns:
+ The Action value of this POP object as a string or None.
+ """
+
+ return self._GetProperty(POP_ACTION)
+
+ def SetPopAction(self, value):
+ """Set the Action value of this POP object.
+
+ Args:
+ value: string The new Action value to give this object.
+ """
+
+ self._SetProperty(POP_ACTION, value)
+
+ action = pyproperty(GetPopAction, SetPopAction)
+
+ def __init__(self, uri=None, enable=None, enable_for=None,
+ action=None, *args, **kwargs):
+ """Constructs a new EmailSettingsPOP object with the given arguments.
+
+ Args:
+ uri: string (optional) The uri of of this object for HTTP requests.
+ enable: Boolean (optional) Whether to enable incoming POP3 access.
+ enable_for: string (optional) Whether to enable POP3 for all mail
+ ("ALL_MAIL"), or mail from now on ("MAIL_FROM_NOW_ON").
+ action: string (optional) What Google Mail should do with its copy
+ of the email after it is retrieved using POP
+ ("KEEP", "ARCHIVE", or "DELETE").
+ args: The other parameters to pass to gdata.entry.GDEntry constructor.
+ kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
+ """
+ super(EmailSettingsPop, self).__init__(*args, **kwargs)
+ if uri:
+ self.uri = uri
+ if enable is not None:
+ self.enable = str(enable)
+ if enable_for:
+ self.enable_for = enable_for
+ if action:
+ self.action = action
+
+
+class EmailSettingsImap(EmailSettingsEntry):
+ """Represents IMAP settings in object form."""
+
+ def GetEnable(self):
+ """Get the Enable value of the IMAP object.
+
+ Returns:
+ The Enable value of this IMAP object as a string or None.
+ """
+
+ return self._GetProperty(IMAP_ENABLE)
+
+ def SetEnable(self, value):
+ """Set the Enable value of this IMAP object.
+
+ Args:
+ value: string The new Enable value to give this object.
+ """
+
+ self._SetProperty(IMAP_ENABLE, value)
+
+ enable = pyproperty(GetEnable, SetEnable)
+
+ def __init__(self, uri=None, enable=None, *args, **kwargs):
+ """Constructs a new EmailSettingsImap object with the given arguments.
+
+ Args:
+ uri: string (optional) The uri of of this object for HTTP requests.
+ enable: Boolean (optional) Whether to enable IMAP access.
+ args: The other parameters to pass to gdata.entry.GDEntry constructor.
+ kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
+ """
+ super(EmailSettingsImap, self).__init__(*args, **kwargs)
+ if uri:
+ self.uri = uri
+ if enable is not None:
+ self.enable = str(enable)
+
+
+class EmailSettingsVacationResponder(EmailSettingsEntry):
+ """Represents Vacation Responder settings in object form."""
+
+ def GetEnable(self):
+ """Get the Enable value of the Vacation Responder object.
+
+ Returns:
+ The Enable value of this Vacation Responder object as a string or None.
+ """
+
+ return self._GetProperty(VACATION_RESPONDER_ENABLE)
+
+ def SetEnable(self, value):
+ """Set the Enable value of this Vacation Responder object.
+
+ Args:
+ value: string The new Enable value to give this object.
+ """
+
+ self._SetProperty(VACATION_RESPONDER_ENABLE, value)
+
+ enable = pyproperty(GetEnable, SetEnable)
+
+ def GetSubject(self):
+ """Get the Subject value of the Vacation Responder object.
+
+ Returns:
+ The Subject value of this Vacation Responder object as a string or None.
+ """
+
+ return self._GetProperty(VACATION_RESPONDER_SUBJECT)
+
+ def SetSubject(self, value):
+ """Set the Subject value of this Vacation Responder object.
+
+ Args:
+ value: string The new Subject value to give this object.
+ """
+
+ self._SetProperty(VACATION_RESPONDER_SUBJECT, value)
+
+ subject = pyproperty(GetSubject, SetSubject)
+
+ def GetMessage(self):
+ """Get the Message value of the Vacation Responder object.
+
+ Returns:
+ The Message value of this Vacation Responder object as a string or None.
+ """
+
+ return self._GetProperty(VACATION_RESPONDER_MESSAGE)
+
+ def SetMessage(self, value):
+ """Set the Message value of this Vacation Responder object.
+
+ Args:
+ value: string The new Message value to give this object.
+ """
+
+ self._SetProperty(VACATION_RESPONDER_MESSAGE, value)
+
+ message = pyproperty(GetMessage, SetMessage)
+
+ def GetContactsOnly(self):
+ """Get the ContactsOnly value of the Vacation Responder object.
+
+ Returns:
+ The ContactsOnly value of this Vacation Responder object as a
+ string or None.
+ """
+
+ return self._GetProperty(VACATION_RESPONDER_CONTACTS_ONLY)
+
+ def SetContactsOnly(self, value):
+ """Set the ContactsOnly value of this Vacation Responder object.
+
+ Args:
+ value: string The new ContactsOnly value to give this object.
+ """
+
+ self._SetProperty(VACATION_RESPONDER_CONTACTS_ONLY, value)
+
+ contacts_only = pyproperty(GetContactsOnly, SetContactsOnly)
+
+ def __init__(self, uri=None, enable=None, subject=None,
+ message=None, contacts_only=None, *args, **kwargs):
+ """Constructs a new EmailSettingsVacationResponder object with the
+ given arguments.
+
+ Args:
+ uri: string (optional) The uri of of this object for HTTP requests.
+ enable: Boolean (optional) Whether to enable the vacation responder.
+ subject: string (optional) The subject line of the vacation responder
+ autoresponse.
+ message: string (optional) The message body of the vacation responder
+ autoresponse.
+ contacts_only: Boolean (optional) Whether to only send autoresponses
+ to known contacts.
+ args: The other parameters to pass to gdata.entry.GDEntry constructor.
+ kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
+ """
+ super(EmailSettingsVacationResponder, self).__init__(*args, **kwargs)
+ if uri:
+ self.uri = uri
+ if enable is not None:
+ self.enable = str(enable)
+ if subject:
+ self.subject = subject
+ if message:
+ self.message = message
+ if contacts_only is not None:
+ self.contacts_only = str(contacts_only)
+
+
+class EmailSettingsSignature(EmailSettingsEntry):
+ """Represents a Signature in object form."""
+
+ def GetValue(self):
+ """Get the value of the Signature object.
+
+ Returns:
+ The value of this Signature object as a string or None.
+ """
+
+ value = self._GetProperty(SIGNATURE_VALUE)
+ if value == ' ': # hack to support empty signature
+ return ''
+ else:
+ return value
+
+ def SetValue(self, value):
+ """Set the name of this Signature object.
+
+ Args:
+ value: string The new signature value to give this object.
+ """
+
+ if value == '': # hack to support empty signature
+ value = ' '
+ self._SetProperty(SIGNATURE_VALUE, value)
+
+ signature_value = pyproperty(GetValue, SetValue)
+
+ def __init__(self, uri=None, signature=None, *args, **kwargs):
+ """Constructs a new EmailSettingsSignature object with the given arguments.
+
+ Args:
+ uri: string (optional) The uri of of this object for HTTP requests.
+ signature: string (optional) The signature to be appended to outgoing
+ messages.
+ args: The other parameters to pass to gdata.entry.GDEntry constructor.
+ kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
+ """
+ super(EmailSettingsSignature, self).__init__(*args, **kwargs)
+ if uri:
+ self.uri = uri
+ if signature is not None:
+ self.signature_value = signature
+
+
+class EmailSettingsLanguage(EmailSettingsEntry):
+ """Represents Language Settings in object form."""
+
+ def GetLanguage(self):
+ """Get the tag of the Language object.
+
+ Returns:
+ The tag of this Language object as a string or None.
+ """
+
+ return self._GetProperty(LANGUAGE_TAG)
+
+ def SetLanguage(self, value):
+ """Set the tag of this Language object.
+
+ Args:
+ value: string The new tag value to give this object.
+ """
+
+ self._SetProperty(LANGUAGE_TAG, value)
+
+ language_tag = pyproperty(GetLanguage, SetLanguage)
+
+ def __init__(self, uri=None, language=None, *args, **kwargs):
+ """Constructs a new EmailSettingsLanguage object with the given arguments.
+
+ Args:
+ uri: string (optional) The uri of of this object for HTTP requests.
+ language: string (optional) The language tag for Google Mail's display
+ language.
+ args: The other parameters to pass to gdata.entry.GDEntry constructor.
+ kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
+ """
+ super(EmailSettingsLanguage, self).__init__(*args, **kwargs)
+ if uri:
+ self.uri = uri
+ if language:
+ self.language_tag = language
+
+
+class EmailSettingsGeneral(EmailSettingsEntry):
+ """Represents General Settings in object form."""
+
+ def GetPageSize(self):
+ """Get the Page Size value of the General Settings object.
+
+ Returns:
+ The Page Size value of this General Settings object as a string or None.
+ """
+
+ return self._GetProperty(GENERAL_PAGE_SIZE)
+
+ def SetPageSize(self, value):
+ """Set the Page Size value of this General Settings object.
+
+ Args:
+ value: string The new Page Size value to give this object.
+ """
+
+ self._SetProperty(GENERAL_PAGE_SIZE, value)
+
+ page_size = pyproperty(GetPageSize, SetPageSize)
+
+ def GetShortcuts(self):
+ """Get the Shortcuts value of the General Settings object.
+
+ Returns:
+ The Shortcuts value of this General Settings object as a string or None.
+ """
+
+ return self._GetProperty(GENERAL_SHORTCUTS)
+
+ def SetShortcuts(self, value):
+ """Set the Shortcuts value of this General Settings object.
+
+ Args:
+ value: string The new Shortcuts value to give this object.
+ """
+
+ self._SetProperty(GENERAL_SHORTCUTS, value)
+
+ shortcuts = pyproperty(GetShortcuts, SetShortcuts)
+
+ def GetArrows(self):
+ """Get the Arrows value of the General Settings object.
+
+ Returns:
+ The Arrows value of this General Settings object as a string or None.
+ """
+
+ return self._GetProperty(GENERAL_ARROWS)
+
+ def SetArrows(self, value):
+ """Set the Arrows value of this General Settings object.
+
+ Args:
+ value: string The new Arrows value to give this object.
+ """
+
+ self._SetProperty(GENERAL_ARROWS, value)
+
+ arrows = pyproperty(GetArrows, SetArrows)
+
+ def GetSnippets(self):
+ """Get the Snippets value of the General Settings object.
+
+ Returns:
+ The Snippets value of this General Settings object as a string or None.
+ """
+
+ return self._GetProperty(GENERAL_SNIPPETS)
+
+ def SetSnippets(self, value):
+ """Set the Snippets value of this General Settings object.
+
+ Args:
+ value: string The new Snippets value to give this object.
+ """
+
+ self._SetProperty(GENERAL_SNIPPETS, value)
+
+ snippets = pyproperty(GetSnippets, SetSnippets)
+
+ def GetUnicode(self):
+ """Get the Unicode value of the General Settings object.
+
+ Returns:
+ The Unicode value of this General Settings object as a string or None.
+ """
+
+ return self._GetProperty(GENERAL_UNICODE)
+
+ def SetUnicode(self, value):
+ """Set the Unicode value of this General Settings object.
+
+ Args:
+ value: string The new Unicode value to give this object.
+ """
+
+ self._SetProperty(GENERAL_UNICODE, value)
+
+ use_unicode = pyproperty(GetUnicode, SetUnicode)
+
+ def __init__(self, uri=None, page_size=None, shortcuts=None,
+ arrows=None, snippets=None, use_unicode=None, *args, **kwargs):
+ """Constructs a new EmailSettingsGeneral object with the given arguments.
+
+ Args:
+ uri: string (optional) The uri of of this object for HTTP requests.
+ page_size: int (optional) The number of conversations to be shown per page.
+ shortcuts: Boolean (optional) Whether to enable keyboard shortcuts.
+ arrows: Boolean (optional) Whether to display arrow-shaped personal
+ indicators next to email sent specifically to the user.
+ snippets: Boolean (optional) Whether to display snippets of the messages
+ in the inbox and when searching.
+ use_unicode: Boolean (optional) Whether to use UTF-8 (unicode) encoding
+ for all outgoing messages.
+ args: The other parameters to pass to gdata.entry.GDEntry constructor.
+ kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
+ """
+ super(EmailSettingsGeneral, self).__init__(*args, **kwargs)
+ if uri:
+ self.uri = uri
+ if page_size is not None:
+ self.page_size = str(page_size)
+ if shortcuts is not None:
+ self.shortcuts = str(shortcuts)
+ if arrows is not None:
+ self.arrows = str(arrows)
+ if snippets is not None:
+ self.snippets = str(snippets)
+ if use_unicode is not None:
+ self.use_unicode = str(use_unicode)
diff --git a/python/gdata/apps/emailsettings/service.py b/python/gdata/apps/emailsettings/service.py
new file mode 100644
index 0000000..cab61ea
--- /dev/null
+++ b/python/gdata/apps/emailsettings/service.py
@@ -0,0 +1,264 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Allow Google Apps domain administrators to set users' email settings.
+
+ EmailSettingsService: Set various email settings.
+"""
+
+__author__ = 'google-apps-apis@googlegroups.com'
+
+
+import gdata.apps
+import gdata.apps.service
+import gdata.service
+
+
+API_VER='2.0'
+# Forwarding and POP3 options
+KEEP='KEEP'
+ARCHIVE='ARCHIVE'
+DELETE='DELETE'
+ALL_MAIL='ALL_MAIL'
+MAIL_FROM_NOW_ON='MAIL_FROM_NOW_ON'
+
+
+class EmailSettingsService(gdata.apps.service.PropertyService):
+ """Client for the Google Apps Email Settings service."""
+
+ def _serviceUrl(self, setting_id, username, domain=None):
+ if domain is None:
+ domain = self.domain
+ return '/a/feeds/emailsettings/%s/%s/%s/%s' % (API_VER, domain, username,
+ setting_id)
+
+ def CreateLabel(self, username, label):
+ """Create a label.
+
+ Args:
+ username: User to create label for.
+ label: Label to create.
+
+ Returns:
+ A dict containing the result of the create operation.
+ """
+ uri = self._serviceUrl('label', username)
+ properties = {'label': label}
+ return self._PostProperties(uri, properties)
+
+ def CreateFilter(self, username, from_=None, to=None, subject=None,
+ has_the_word=None, does_not_have_the_word=None,
+ has_attachment=None, label=None, should_mark_as_read=None,
+ should_archive=None):
+ """Create a filter.
+
+ Args:
+ username: User to create filter for.
+ from_: Filter from string.
+ to: Filter to string.
+ subject: Filter subject.
+ has_the_word: Words to filter in.
+ does_not_have_the_word: Words to filter out.
+ has_attachment: Boolean for message having attachment.
+ label: Label to apply.
+ should_mark_as_read: Boolean for marking message as read.
+ should_archive: Boolean for archiving message.
+
+ Returns:
+ A dict containing the result of the create operation.
+ """
+ uri = self._serviceUrl('filter', username)
+ properties = {}
+ properties['from'] = from_
+ properties['to'] = to
+ properties['subject'] = subject
+ properties['hasTheWord'] = has_the_word
+ properties['doesNotHaveTheWord'] = does_not_have_the_word
+ properties['hasAttachment'] = gdata.apps.service._bool2str(has_attachment)
+ properties['label'] = label
+ properties['shouldMarkAsRead'] = gdata.apps.service._bool2str(should_mark_as_read)
+ properties['shouldArchive'] = gdata.apps.service._bool2str(should_archive)
+ return self._PostProperties(uri, properties)
+
+ def CreateSendAsAlias(self, username, name, address, reply_to=None,
+ make_default=None):
+ """Create alias to send mail as.
+
+ Args:
+ username: User to create alias for.
+ name: Name of alias.
+ address: Email address to send from.
+ reply_to: Email address to reply to.
+ make_default: Boolean for whether this is the new default sending alias.
+
+ Returns:
+ A dict containing the result of the create operation.
+ """
+ uri = self._serviceUrl('sendas', username)
+ properties = {}
+ properties['name'] = name
+ properties['address'] = address
+ properties['replyTo'] = reply_to
+ properties['makeDefault'] = gdata.apps.service._bool2str(make_default)
+ return self._PostProperties(uri, properties)
+
+ def UpdateWebClipSettings(self, username, enable):
+ """Update WebClip Settings
+
+ Args:
+ username: User to update forwarding for.
+ enable: Boolean whether to enable Web Clip.
+ Returns:
+ A dict containing the result of the update operation.
+ """
+ uri = self._serviceUrl('webclip', username)
+ properties = {}
+ properties['enable'] = gdata.apps.service._bool2str(enable)
+ return self._PutProperties(uri, properties)
+
+ def UpdateForwarding(self, username, enable, forward_to=None, action=None):
+ """Update forwarding settings.
+
+ Args:
+ username: User to update forwarding for.
+ enable: Boolean whether to enable this forwarding rule.
+ forward_to: Email address to forward to.
+ action: Action to take after forwarding.
+
+ Returns:
+ A dict containing the result of the update operation.
+ """
+ uri = self._serviceUrl('forwarding', username)
+ properties = {}
+ properties['enable'] = gdata.apps.service._bool2str(enable)
+ if enable is True:
+ properties['forwardTo'] = forward_to
+ properties['action'] = action
+ return self._PutProperties(uri, properties)
+
+ def UpdatePop(self, username, enable, enable_for=None, action=None):
+ """Update POP3 settings.
+
+ Args:
+ username: User to update POP3 settings for.
+ enable: Boolean whether to enable POP3.
+ enable_for: Which messages to make available via POP3.
+ action: Action to take after user retrieves email via POP3.
+
+ Returns:
+ A dict containing the result of the update operation.
+ """
+ uri = self._serviceUrl('pop', username)
+ properties = {}
+ properties['enable'] = gdata.apps.service._bool2str(enable)
+ if enable is True:
+ properties['enableFor'] = enable_for
+ properties['action'] = action
+ return self._PutProperties(uri, properties)
+
+ def UpdateImap(self, username, enable):
+ """Update IMAP settings.
+
+ Args:
+ username: User to update IMAP settings for.
+ enable: Boolean whether to enable IMAP.
+
+ Returns:
+ A dict containing the result of the update operation.
+ """
+ uri = self._serviceUrl('imap', username)
+ properties = {'enable': gdata.apps.service._bool2str(enable)}
+ return self._PutProperties(uri, properties)
+
+ def UpdateVacation(self, username, enable, subject=None, message=None,
+ contacts_only=None):
+ """Update vacation settings.
+
+ Args:
+ username: User to update vacation settings for.
+ enable: Boolean whether to enable vacation responses.
+ subject: Vacation message subject.
+ message: Vacation message body.
+ contacts_only: Boolean whether to send message only to contacts.
+
+ Returns:
+ A dict containing the result of the update operation.
+ """
+ uri = self._serviceUrl('vacation', username)
+ properties = {}
+ properties['enable'] = gdata.apps.service._bool2str(enable)
+ if enable is True:
+ properties['subject'] = subject
+ properties['message'] = message
+ properties['contactsOnly'] = gdata.apps.service._bool2str(contacts_only)
+ return self._PutProperties(uri, properties)
+
+ def UpdateSignature(self, username, signature):
+ """Update signature.
+
+ Args:
+ username: User to update signature for.
+ signature: Signature string.
+
+ Returns:
+ A dict containing the result of the update operation.
+ """
+ uri = self._serviceUrl('signature', username)
+ properties = {'signature': signature}
+ return self._PutProperties(uri, properties)
+
+ def UpdateLanguage(self, username, language):
+ """Update user interface language.
+
+ Args:
+ username: User to update language for.
+ language: Language code.
+
+ Returns:
+ A dict containing the result of the update operation.
+ """
+ uri = self._serviceUrl('language', username)
+ properties = {'language': language}
+ return self._PutProperties(uri, properties)
+
+ def UpdateGeneral(self, username, page_size=None, shortcuts=None, arrows=None,
+ snippets=None, unicode=None):
+ """Update general settings.
+
+ Args:
+ username: User to update general settings for.
+ page_size: Number of messages to show.
+ shortcuts: Boolean whether shortcuts are enabled.
+ arrows: Boolean whether arrows are enabled.
+ snippets: Boolean whether snippets are enabled.
+ unicode: Wheter unicode is enabled.
+
+ Returns:
+ A dict containing the result of the update operation.
+ """
+ uri = self._serviceUrl('general', username)
+ properties = {}
+ if page_size != None:
+ properties['pageSize'] = str(page_size)
+ if shortcuts != None:
+ properties['shortcuts'] = gdata.apps.service._bool2str(shortcuts)
+ if arrows != None:
+ properties['arrows'] = gdata.apps.service._bool2str(arrows)
+ if snippets != None:
+ properties['snippets'] = gdata.apps.service._bool2str(snippets)
+ if unicode != None:
+ properties['unicode'] = gdata.apps.service._bool2str(unicode)
+ return self._PutProperties(uri, properties)
diff --git a/python/gdata/apps/groups/__init__.py b/python/gdata/apps/groups/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/python/gdata/apps/groups/service.py b/python/gdata/apps/groups/service.py
new file mode 100644
index 0000000..80df417
--- /dev/null
+++ b/python/gdata/apps/groups/service.py
@@ -0,0 +1,387 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Allow Google Apps domain administrators to manage groups, group members and group owners.
+
+ GroupsService: Provides methods to manage groups, members and owners.
+"""
+
+__author__ = 'google-apps-apis@googlegroups.com'
+
+
+import urllib
+import gdata.apps
+import gdata.apps.service
+import gdata.service
+
+
+API_VER = '2.0'
+BASE_URL = '/a/feeds/group/' + API_VER + '/%s'
+GROUP_MEMBER_URL = BASE_URL + '?member=%s'
+GROUP_MEMBER_DIRECT_URL = GROUP_MEMBER_URL + '&directOnly=%s'
+GROUP_ID_URL = BASE_URL + '/%s'
+MEMBER_URL = BASE_URL + '/%s/member'
+MEMBER_WITH_SUSPENDED_URL = MEMBER_URL + '?includeSuspendedUsers=%s'
+MEMBER_ID_URL = MEMBER_URL + '/%s'
+OWNER_URL = BASE_URL + '/%s/owner'
+OWNER_WITH_SUSPENDED_URL = OWNER_URL + '?includeSuspendedUsers=%s'
+OWNER_ID_URL = OWNER_URL + '/%s'
+
+PERMISSION_OWNER = 'Owner'
+PERMISSION_MEMBER = 'Member'
+PERMISSION_DOMAIN = 'Domain'
+PERMISSION_ANYONE = 'Anyone'
+
+
+class GroupsService(gdata.apps.service.PropertyService):
+ """Client for the Google Apps Groups service."""
+
+ def _ServiceUrl(self, service_type, is_existed, group_id, member_id, owner_email,
+ direct_only=False, domain=None, suspended_users=False):
+ if domain is None:
+ domain = self.domain
+
+ if service_type == 'group':
+ if group_id != '' and is_existed:
+ return GROUP_ID_URL % (domain, group_id)
+ elif member_id != '':
+ if direct_only:
+ return GROUP_MEMBER_DIRECT_URL % (domain, urllib.quote_plus(member_id),
+ self._Bool2Str(direct_only))
+ else:
+ return GROUP_MEMBER_URL % (domain, urllib.quote_plus(member_id))
+ else:
+ return BASE_URL % (domain)
+
+ if service_type == 'member':
+ if member_id != '' and is_existed:
+ return MEMBER_ID_URL % (domain, group_id, urllib.quote_plus(member_id))
+ elif suspended_users:
+ return MEMBER_WITH_SUSPENDED_URL % (domain, group_id,
+ self._Bool2Str(suspended_users))
+ else:
+ return MEMBER_URL % (domain, group_id)
+
+ if service_type == 'owner':
+ if owner_email != '' and is_existed:
+ return OWNER_ID_URL % (domain, group_id, urllib.quote_plus(owner_email))
+ elif suspended_users:
+ return OWNER_WITH_SUSPENDED_URL % (domain, group_id,
+ self._Bool2Str(suspended_users))
+ else:
+ return OWNER_URL % (domain, group_id)
+
+ def _Bool2Str(self, b):
+ if b is None:
+ return None
+ return str(b is True).lower()
+
+ def _IsExisted(self, uri):
+ try:
+ self._GetProperties(uri)
+ return True
+ except gdata.apps.service.AppsForYourDomainException, e:
+ if e.error_code == gdata.apps.service.ENTITY_DOES_NOT_EXIST:
+ return False
+ else:
+ raise e
+
+ def CreateGroup(self, group_id, group_name, description, email_permission):
+ """Create a group.
+
+ Args:
+ group_id: The ID of the group (e.g. us-sales).
+ group_name: The name of the group.
+ description: A description of the group
+ email_permission: The subscription permission of the group.
+
+ Returns:
+ A dict containing the result of the create operation.
+ """
+ uri = self._ServiceUrl('group', False, group_id, '', '')
+ properties = {}
+ properties['groupId'] = group_id
+ properties['groupName'] = group_name
+ properties['description'] = description
+ properties['emailPermission'] = email_permission
+ return self._PostProperties(uri, properties)
+
+ def UpdateGroup(self, group_id, group_name, description, email_permission):
+ """Update a group's name, description and/or permission.
+
+ Args:
+ group_id: The ID of the group (e.g. us-sales).
+ group_name: The name of the group.
+ description: A description of the group
+ email_permission: The subscription permission of the group.
+
+ Returns:
+ A dict containing the result of the update operation.
+ """
+ uri = self._ServiceUrl('group', True, group_id, '', '')
+ properties = {}
+ properties['groupId'] = group_id
+ properties['groupName'] = group_name
+ properties['description'] = description
+ properties['emailPermission'] = email_permission
+ return self._PutProperties(uri, properties)
+
+ def RetrieveGroup(self, group_id):
+ """Retrieve a group based on its ID.
+
+ Args:
+ group_id: The ID of the group (e.g. us-sales).
+
+ Returns:
+ A dict containing the result of the retrieve operation.
+ """
+ uri = self._ServiceUrl('group', True, group_id, '', '')
+ return self._GetProperties(uri)
+
+ def RetrieveAllGroups(self):
+ """Retrieve all groups in the domain.
+
+ Args:
+ None
+
+ Returns:
+ A list containing the result of the retrieve operation.
+ """
+ uri = self._ServiceUrl('group', True, '', '', '')
+ return self._GetPropertiesList(uri)
+
+ def RetrievePageOfGroups(self, start_group=None):
+ """Retrieve one page of groups in the domain.
+
+ Args:
+ start_group: The key to continue for pagination through all groups.
+
+ Returns:
+ A feed object containing the result of the retrieve operation.
+ """
+ uri = self._ServiceUrl('group', True, '', '', '')
+ if start_group is not None:
+ uri += "?start="+start_group
+ property_feed = self._GetPropertyFeed(uri)
+ return property_feed
+
+ def RetrieveGroups(self, member_id, direct_only=False):
+ """Retrieve all groups that belong to the given member_id.
+
+ Args:
+ member_id: The member's email address (e.g. member@example.com).
+ direct_only: Boolean whether only return groups that this member directly belongs to.
+
+ Returns:
+ A list containing the result of the retrieve operation.
+ """
+ uri = self._ServiceUrl('group', True, '', member_id, '', direct_only=direct_only)
+ return self._GetPropertiesList(uri)
+
+ def DeleteGroup(self, group_id):
+ """Delete a group based on its ID.
+
+ Args:
+ group_id: The ID of the group (e.g. us-sales).
+
+ Returns:
+ A dict containing the result of the delete operation.
+ """
+ uri = self._ServiceUrl('group', True, group_id, '', '')
+ return self._DeleteProperties(uri)
+
+ def AddMemberToGroup(self, member_id, group_id):
+ """Add a member to a group.
+
+ Args:
+ member_id: The member's email address (e.g. member@example.com).
+ group_id: The ID of the group (e.g. us-sales).
+
+ Returns:
+ A dict containing the result of the add operation.
+ """
+ uri = self._ServiceUrl('member', False, group_id, member_id, '')
+ properties = {}
+ properties['memberId'] = member_id
+ return self._PostProperties(uri, properties)
+
+ def IsMember(self, member_id, group_id):
+ """Check whether the given member already exists in the given group.
+
+ Args:
+ member_id: The member's email address (e.g. member@example.com).
+ group_id: The ID of the group (e.g. us-sales).
+
+ Returns:
+ True if the member exists in the group. False otherwise.
+ """
+ uri = self._ServiceUrl('member', True, group_id, member_id, '')
+ return self._IsExisted(uri)
+
+ def RetrieveMember(self, member_id, group_id):
+ """Retrieve the given member in the given group.
+
+ Args:
+ member_id: The member's email address (e.g. member@example.com).
+ group_id: The ID of the group (e.g. us-sales).
+
+ Returns:
+ A dict containing the result of the retrieve operation.
+ """
+ uri = self._ServiceUrl('member', True, group_id, member_id, '')
+ return self._GetProperties(uri)
+
+ def RetrieveAllMembers(self, group_id, suspended_users=False):
+ """Retrieve all members in the given group.
+
+ Args:
+ group_id: The ID of the group (e.g. us-sales).
+ suspended_users: A boolean; should we include any suspended users in
+ the membership list returned?
+
+ Returns:
+ A list containing the result of the retrieve operation.
+ """
+ uri = self._ServiceUrl('member', True, group_id, '', '',
+ suspended_users=suspended_users)
+ return self._GetPropertiesList(uri)
+
+ def RetrievePageOfMembers(self, group_id, suspended_users=False, start=None):
+ """Retrieve one page of members of a given group.
+
+ Args:
+ group_id: The ID of the group (e.g. us-sales).
+ suspended_users: A boolean; should we include any suspended users in
+ the membership list returned?
+ start: The key to continue for pagination through all members.
+
+ Returns:
+ A feed object containing the result of the retrieve operation.
+ """
+
+ uri = self._ServiceUrl('member', True, group_id, '', '',
+ suspended_users=suspended_users)
+ if start is not None:
+ if suspended_users:
+ uri += "&start="+start
+ else:
+ uri += "?start="+start
+ property_feed = self._GetPropertyFeed(uri)
+ return property_feed
+
+ def RemoveMemberFromGroup(self, member_id, group_id):
+ """Remove the given member from the given group.
+
+ Args:
+ member_id: The member's email address (e.g. member@example.com).
+ group_id: The ID of the group (e.g. us-sales).
+
+ Returns:
+ A dict containing the result of the remove operation.
+ """
+ uri = self._ServiceUrl('member', True, group_id, member_id, '')
+ return self._DeleteProperties(uri)
+
+ def AddOwnerToGroup(self, owner_email, group_id):
+ """Add an owner to a group.
+
+ Args:
+ owner_email: The email address of a group owner.
+ group_id: The ID of the group (e.g. us-sales).
+
+ Returns:
+ A dict containing the result of the add operation.
+ """
+ uri = self._ServiceUrl('owner', False, group_id, '', owner_email)
+ properties = {}
+ properties['email'] = owner_email
+ return self._PostProperties(uri, properties)
+
+ def IsOwner(self, owner_email, group_id):
+ """Check whether the given member an owner of the given group.
+
+ Args:
+ owner_email: The email address of a group owner.
+ group_id: The ID of the group (e.g. us-sales).
+
+ Returns:
+ True if the member is an owner of the given group. False otherwise.
+ """
+ uri = self._ServiceUrl('owner', True, group_id, '', owner_email)
+ return self._IsExisted(uri)
+
+ def RetrieveOwner(self, owner_email, group_id):
+ """Retrieve the given owner in the given group.
+
+ Args:
+ owner_email: The email address of a group owner.
+ group_id: The ID of the group (e.g. us-sales).
+
+ Returns:
+ A dict containing the result of the retrieve operation.
+ """
+ uri = self._ServiceUrl('owner', True, group_id, '', owner_email)
+ return self._GetProperties(uri)
+
+ def RetrieveAllOwners(self, group_id, suspended_users=False):
+ """Retrieve all owners of the given group.
+
+ Args:
+ group_id: The ID of the group (e.g. us-sales).
+ suspended_users: A boolean; should we include any suspended users in
+ the ownership list returned?
+
+ Returns:
+ A list containing the result of the retrieve operation.
+ """
+ uri = self._ServiceUrl('owner', True, group_id, '', '',
+ suspended_users=suspended_users)
+ return self._GetPropertiesList(uri)
+
+ def RetrievePageOfOwners(self, group_id, suspended_users=False, start=None):
+ """Retrieve one page of owners of the given group.
+
+ Args:
+ group_id: The ID of the group (e.g. us-sales).
+ suspended_users: A boolean; should we include any suspended users in
+ the ownership list returned?
+ start: The key to continue for pagination through all owners.
+
+ Returns:
+ A feed object containing the result of the retrieve operation.
+ """
+ uri = self._ServiceUrl('owner', True, group_id, '', '',
+ suspended_users=suspended_users)
+ if start is not None:
+ if suspended_users:
+ uri += "&start="+start
+ else:
+ uri += "?start="+start
+ property_feed = self._GetPropertyFeed(uri)
+ return property_feed
+
+ def RemoveOwnerFromGroup(self, owner_email, group_id):
+ """Remove the given owner from the given group.
+
+ Args:
+ owner_email: The email address of a group owner.
+ group_id: The ID of the group (e.g. us-sales).
+
+ Returns:
+ A dict containing the result of the remove operation.
+ """
+ uri = self._ServiceUrl('owner', True, group_id, '', owner_email)
+ return self._DeleteProperties(uri)
diff --git a/python/gdata/apps/migration/__init__.py b/python/gdata/apps/migration/__init__.py
new file mode 100644
index 0000000..9892671
--- /dev/null
+++ b/python/gdata/apps/migration/__init__.py
@@ -0,0 +1,212 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains objects used with Google Apps."""
+
+__author__ = 'google-apps-apis@googlegroups.com'
+
+
+import atom
+import gdata
+
+
+# XML namespaces which are often used in Google Apps entity.
+APPS_NAMESPACE = 'http://schemas.google.com/apps/2006'
+APPS_TEMPLATE = '{http://schemas.google.com/apps/2006}%s'
+
+
+class Rfc822Msg(atom.AtomBase):
+ """The Migration rfc822Msg element."""
+
+ _tag = 'rfc822Msg'
+ _namespace = APPS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['encoding'] = 'encoding'
+
+ def __init__(self, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.text = text
+ self.encoding = 'base64'
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def Rfc822MsgFromString(xml_string):
+ """Parse in the Rrc822 message from the XML definition."""
+
+ return atom.CreateClassFromXMLString(Rfc822Msg, xml_string)
+
+
+class MailItemProperty(atom.AtomBase):
+ """The Migration mailItemProperty element."""
+
+ _tag = 'mailItemProperty'
+ _namespace = APPS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.value = value
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def MailItemPropertyFromString(xml_string):
+ """Parse in the MailItemProperiy from the XML definition."""
+
+ return atom.CreateClassFromXMLString(MailItemProperty, xml_string)
+
+
+class Label(atom.AtomBase):
+ """The Migration label element."""
+
+ _tag = 'label'
+ _namespace = APPS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['labelName'] = 'label_name'
+
+ def __init__(self, label_name=None,
+ extension_elements=None, extension_attributes=None,
+ text=None):
+ self.label_name = label_name
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def LabelFromString(xml_string):
+ """Parse in the mailItemProperty from the XML definition."""
+
+ return atom.CreateClassFromXMLString(Label, xml_string)
+
+
+class MailEntry(gdata.GDataEntry):
+ """A Google Migration flavor of an Atom Entry."""
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}rfc822Msg' % APPS_NAMESPACE] = ('rfc822_msg', Rfc822Msg)
+ _children['{%s}mailItemProperty' % APPS_NAMESPACE] = ('mail_item_property',
+ [MailItemProperty])
+ _children['{%s}label' % APPS_NAMESPACE] = ('label', [Label])
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ rfc822_msg=None, mail_item_property=None, label=None,
+ extended_property=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ title=title, updated=updated)
+ self.rfc822_msg = rfc822_msg
+ self.mail_item_property = mail_item_property
+ self.label = label
+ self.extended_property = extended_property or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def MailEntryFromString(xml_string):
+ """Parse in the MailEntry from the XML definition."""
+
+ return atom.CreateClassFromXMLString(MailEntry, xml_string)
+
+
+class BatchMailEntry(gdata.BatchEntry):
+ """A Google Migration flavor of an Atom Entry."""
+
+ _tag = gdata.BatchEntry._tag
+ _namespace = gdata.BatchEntry._namespace
+ _children = gdata.BatchEntry._children.copy()
+ _attributes = gdata.BatchEntry._attributes.copy()
+ _children['{%s}rfc822Msg' % APPS_NAMESPACE] = ('rfc822_msg', Rfc822Msg)
+ _children['{%s}mailItemProperty' % APPS_NAMESPACE] = ('mail_item_property',
+ [MailItemProperty])
+ _children['{%s}label' % APPS_NAMESPACE] = ('label', [Label])
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ rfc822_msg=None, mail_item_property=None, label=None,
+ batch_operation=None, batch_id=None, batch_status=None,
+ extended_property=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ gdata.BatchEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ batch_operation=batch_operation,
+ batch_id=batch_id, batch_status=batch_status,
+ title=title, updated=updated)
+ self.rfc822_msg = rfc822_msg or None
+ self.mail_item_property = mail_item_property or []
+ self.label = label or []
+ self.extended_property = extended_property or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def BatchMailEntryFromString(xml_string):
+ """Parse in the BatchMailEntry from the XML definition."""
+
+ return atom.CreateClassFromXMLString(BatchMailEntry, xml_string)
+
+
+class BatchMailEventFeed(gdata.BatchFeed):
+ """A Migration event feed flavor of an Atom Feed."""
+
+ _tag = gdata.BatchFeed._tag
+ _namespace = gdata.BatchFeed._namespace
+ _children = gdata.BatchFeed._children.copy()
+ _attributes = gdata.BatchFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [BatchMailEntry])
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None,
+ entry=None, total_results=None, start_index=None,
+ items_per_page=None, interrupted=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ gdata.BatchFeed.__init__(self, author=author, category=category,
+ contributor=contributor, generator=generator,
+ icon=icon, atom_id=atom_id, link=link,
+ logo=logo, rights=rights, subtitle=subtitle,
+ title=title, updated=updated, entry=entry,
+ total_results=total_results,
+ start_index=start_index,
+ items_per_page=items_per_page,
+ interrupted=interrupted,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+def BatchMailEventFeedFromString(xml_string):
+ """Parse in the BatchMailEventFeed from the XML definition."""
+
+ return atom.CreateClassFromXMLString(BatchMailEventFeed, xml_string)
diff --git a/python/gdata/apps/migration/service.py b/python/gdata/apps/migration/service.py
new file mode 100644
index 0000000..6319995
--- /dev/null
+++ b/python/gdata/apps/migration/service.py
@@ -0,0 +1,129 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains the methods to import mail via Google Apps Email Migration API.
+
+ MigrationService: Provides methids to import mail.
+"""
+
+__author__ = 'google-apps-apis@googlegroups.com'
+
+
+import base64
+import gdata
+import gdata.apps.service
+import gdata.service
+from gdata.apps import migration
+
+
+API_VER = '2.0'
+
+
+class MigrationService(gdata.apps.service.AppsService):
+ """Client for the EMAPI migration service. Use either ImportMail to import
+ one message at a time, or AddBatchEntry and SubmitBatch to import a batch of
+ messages at a time.
+ """
+ def __init__(self, email=None, password=None, domain=None, source=None,
+ server='apps-apis.google.com', additional_headers=None):
+ gdata.apps.service.AppsService.__init__(
+ self, email=email, password=password, domain=domain, source=source,
+ server=server, additional_headers=additional_headers)
+ self.mail_batch = migration.BatchMailEventFeed()
+
+ def _BaseURL(self):
+ return '/a/feeds/migration/%s/%s' % (API_VER, self.domain)
+
+ def ImportMail(self, user_name, mail_message, mail_item_properties,
+ mail_labels):
+ """Import a single mail message.
+
+ Args:
+ user_name: The username to import messages to.
+ mail_message: An RFC822 format email message.
+ mail_item_properties: A list of Gmail properties to apply to the message.
+ mail_labels: A list of labels to apply to the message.
+
+ Returns:
+ A MailEntry representing the successfully imported message.
+
+ Raises:
+ AppsForYourDomainException: An error occurred importing the message.
+ """
+ uri = '%s/%s/mail' % (self._BaseURL(), user_name)
+
+ mail_entry = migration.MailEntry()
+ mail_entry.rfc822_msg = migration.Rfc822Msg(text=(base64.b64encode(
+ mail_message)))
+ mail_entry.rfc822_msg.encoding = 'base64'
+ mail_entry.mail_item_property = map(
+ lambda x: migration.MailItemProperty(value=x), mail_item_properties)
+ mail_entry.label = map(lambda x: migration.Label(label_name=x),
+ mail_labels)
+
+ try:
+ return migration.MailEntryFromString(str(self.Post(mail_entry, uri)))
+ except gdata.service.RequestError, e:
+ raise gdata.apps.service.AppsForYourDomainException(e.args[0])
+
+ def AddBatchEntry(self, mail_message, mail_item_properties,
+ mail_labels):
+ """Add a message to the current batch that you later will submit.
+
+ Args:
+ mail_message: An RFC822 format email message.
+ mail_item_properties: A list of Gmail properties to apply to the message.
+ mail_labels: A list of labels to apply to the message.
+
+ Returns:
+ The length of the MailEntry representing the message.
+ """
+ mail_entry = migration.BatchMailEntry()
+ mail_entry.rfc822_msg = migration.Rfc822Msg(text=(base64.b64encode(
+ mail_message)))
+ mail_entry.rfc822_msg.encoding = 'base64'
+ mail_entry.mail_item_property = map(
+ lambda x: migration.MailItemProperty(value=x), mail_item_properties)
+ mail_entry.label = map(lambda x: migration.Label(label_name=x),
+ mail_labels)
+
+ self.mail_batch.AddBatchEntry(mail_entry)
+
+ return len(str(mail_entry))
+
+ def SubmitBatch(self, user_name):
+ """Send a all the mail items you have added to the batch to the server.
+
+ Args:
+ user_name: The username to import messages to.
+
+ Returns:
+ A HTTPResponse from the web service call.
+
+ Raises:
+ AppsForYourDomainException: An error occurred importing the batch.
+ """
+ uri = '%s/%s/mail/batch' % (self._BaseURL(), user_name)
+
+ try:
+ self.result = self.Post(self.mail_batch, uri,
+ converter=migration.BatchMailEventFeedFromString)
+ except gdata.service.RequestError, e:
+ raise gdata.apps.service.AppsForYourDomainException(e.args[0])
+
+ self.mail_batch = migration.BatchMailEventFeed()
+
+ return self.result
diff --git a/python/gdata/apps/organization/__init__.py b/python/gdata/apps/organization/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/python/gdata/apps/organization/service.py b/python/gdata/apps/organization/service.py
new file mode 100644
index 0000000..763a6bc
--- /dev/null
+++ b/python/gdata/apps/organization/service.py
@@ -0,0 +1,297 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Allow Google Apps domain administrators to manage organization unit and organization user.
+
+ OrganizationService: Provides methods to manage organization unit and organization user.
+"""
+
+__author__ = 'Alexandre Vivien (alex@simplecode.fr)'
+
+
+import gdata.apps
+import gdata.apps.service
+import gdata.service
+
+
+API_VER = '2.0'
+CUSTOMER_BASE_URL = '/a/feeds/customer/2.0/customerId'
+BASE_UNIT_URL = '/a/feeds/orgunit/' + API_VER + '/%s'
+UNIT_URL = BASE_UNIT_URL + '/%s'
+UNIT_ALL_URL = BASE_UNIT_URL + '?get=all'
+UNIT_CHILD_URL = BASE_UNIT_URL + '?get=children&orgUnitPath=%s'
+BASE_USER_URL = '/a/feeds/orguser/' + API_VER + '/%s'
+USER_URL = BASE_USER_URL + '/%s'
+USER_ALL_URL = BASE_USER_URL + '?get=all'
+USER_CHILD_URL = BASE_USER_URL + '?get=children&orgUnitPath=%s'
+
+
+class OrganizationService(gdata.apps.service.PropertyService):
+ """Client for the Google Apps Organizations service."""
+
+ def _Bool2Str(self, b):
+ if b is None:
+ return None
+ return str(b is True).lower()
+
+ def RetrieveCustomerId(self):
+ """Retrieve the Customer ID for the account of the authenticated administrator making this request.
+
+ Args:
+ None.
+
+ Returns:
+ A dict containing the result of the retrieve operation.
+ """
+
+ uri = CUSTOMER_BASE_URL
+ return self._GetProperties(uri)
+
+ def CreateOrgUnit(self, customer_id, name, parent_org_unit_path='/', description='', block_inheritance=False):
+ """Create a Organization Unit.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+ name: The simple organization unit text name, not the full path name.
+ parent_org_unit_path: The full path of the parental tree to this organization unit (default: '/').
+ Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization)
+ description: The human readable text description of the organization unit (optional).
+ block_inheritance: This parameter blocks policy setting inheritance
+ from organization units higher in the organization tree (default: False).
+
+ Returns:
+ A dict containing the result of the create operation.
+ """
+
+ uri = BASE_UNIT_URL % (customer_id)
+ properties = {}
+ properties['name'] = name
+ properties['parentOrgUnitPath'] = parent_org_unit_path
+ properties['description'] = description
+ properties['blockInheritance'] = self._Bool2Str(block_inheritance)
+ return self._PostProperties(uri, properties)
+
+ def UpdateOrgUnit(self, customer_id, org_unit_path, name=None, parent_org_unit_path=None,
+ description=None, block_inheritance=None):
+ """Update a Organization Unit.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+ org_unit_path: The organization's full path name.
+ Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization)
+ name: The simple organization unit text name, not the full path name.
+ parent_org_unit_path: The full path of the parental tree to this organization unit.
+ Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization)
+ description: The human readable text description of the organization unit.
+ block_inheritance: This parameter blocks policy setting inheritance
+ from organization units higher in the organization tree.
+
+ Returns:
+ A dict containing the result of the update operation.
+ """
+
+ uri = UNIT_URL % (customer_id, org_unit_path)
+ properties = {}
+ if name:
+ properties['name'] = name
+ if parent_org_unit_path:
+ properties['parentOrgUnitPath'] = parent_org_unit_path
+ if description:
+ properties['description'] = description
+ if block_inheritance:
+ properties['blockInheritance'] = self._Bool2Str(block_inheritance)
+ return self._PutProperties(uri, properties)
+
+ def MoveUserToOrgUnit(self, customer_id, org_unit_path, users_to_move):
+ """Move a user to an Organization Unit.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+ org_unit_path: The organization's full path name.
+ Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization)
+ users_to_move: Email addresses list of users to move. Note: You can move a maximum of 25 users at one time.
+
+ Returns:
+ A dict containing the result of the update operation.
+ """
+
+ uri = UNIT_URL % (customer_id, org_unit_path)
+ properties = {}
+ if users_to_move and isinstance(users_to_move, list):
+ properties['usersToMove'] = ', '.join(users_to_move)
+ return self._PutProperties(uri, properties)
+
+ def RetrieveOrgUnit(self, customer_id, org_unit_path):
+ """Retrieve a Orgunit based on its path.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+ org_unit_path: The organization's full path name.
+ Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization)
+
+ Returns:
+ A dict containing the result of the retrieve operation.
+ """
+ uri = UNIT_URL % (customer_id, org_unit_path)
+ return self._GetProperties(uri)
+
+ def DeleteOrgUnit(self, customer_id, org_unit_path):
+ """Delete a Orgunit based on its path.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+ org_unit_path: The organization's full path name.
+ Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization)
+
+ Returns:
+ A dict containing the result of the delete operation.
+ """
+ uri = UNIT_URL % (customer_id, org_unit_path)
+ return self._DeleteProperties(uri)
+
+ def RetrieveAllOrgUnits(self, customer_id):
+ """Retrieve all OrgUnits in the customer's domain.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+
+ Returns:
+ A list containing the result of the retrieve operation.
+ """
+ uri = UNIT_ALL_URL % (customer_id)
+ return self._GetPropertiesList(uri)
+
+ def RetrievePageOfOrgUnits(self, customer_id, startKey=None):
+ """Retrieve one page of OrgUnits in the customer's domain.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+ startKey: The key to continue for pagination through all OrgUnits.
+
+ Returns:
+ A feed object containing the result of the retrieve operation.
+ """
+ uri = UNIT_ALL_URL % (customer_id)
+ if startKey is not None:
+ uri += "&startKey=" + startKey
+ property_feed = self._GetPropertyFeed(uri)
+ return property_feed
+
+ def RetrieveSubOrgUnits(self, customer_id, org_unit_path):
+ """Retrieve all Sub-OrgUnits of the provided OrgUnit.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+ org_unit_path: The organization's full path name.
+ Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization)
+
+ Returns:
+ A list containing the result of the retrieve operation.
+ """
+ uri = UNIT_CHILD_URL % (customer_id, org_unit_path)
+ return self._GetPropertiesList(uri)
+
+ def RetrieveOrgUser(self, customer_id, user_email):
+ """Retrieve the OrgUnit of the user.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+ user_email: The email address of the user.
+
+ Returns:
+ A dict containing the result of the retrieve operation.
+ """
+ uri = USER_URL % (customer_id, user_email)
+ return self._GetProperties(uri)
+
+ def UpdateOrgUser(self, customer_id, user_email, org_unit_path):
+ """Update the OrgUnit of a OrgUser.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+ user_email: The email address of the user.
+ org_unit_path: The new organization's full path name.
+ Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization)
+
+ Returns:
+ A dict containing the result of the update operation.
+ """
+
+ uri = USER_URL % (customer_id, user_email)
+ properties = {}
+ if org_unit_path:
+ properties['orgUnitPath'] = org_unit_path
+ return self._PutProperties(uri, properties)
+
+ def RetrieveAllOrgUsers(self, customer_id):
+ """Retrieve all OrgUsers in the customer's domain.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+
+ Returns:
+ A list containing the result of the retrieve operation.
+ """
+ uri = USER_ALL_URL % (customer_id)
+ return self._GetPropertiesList(uri)
+
+ def RetrievePageOfOrgUsers(self, customer_id, startKey=None):
+ """Retrieve one page of OrgUsers in the customer's domain.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+ startKey: The key to continue for pagination through all OrgUnits.
+
+ Returns:
+ A feed object containing the result of the retrieve operation.
+ """
+ uri = USER_ALL_URL % (customer_id)
+ if startKey is not None:
+ uri += "&startKey=" + startKey
+ property_feed = self._GetPropertyFeed(uri)
+ return property_feed
+
+ def RetrieveOrgUnitUsers(self, customer_id, org_unit_path):
+ """Retrieve all OrgUsers of the provided OrgUnit.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+ org_unit_path: The organization's full path name.
+ Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization)
+
+ Returns:
+ A list containing the result of the retrieve operation.
+ """
+ uri = USER_CHILD_URL % (customer_id, org_unit_path)
+ return self._GetPropertiesList(uri)
+
+ def RetrieveOrgUnitPageOfUsers(self, customer_id, org_unit_path, startKey=None):
+ """Retrieve one page of OrgUsers of the provided OrgUnit.
+
+ Args:
+ customer_id: The ID of the Google Apps customer.
+ org_unit_path: The organization's full path name.
+ Note: Each element of the path MUST be URL encoded (example: finance%2Forganization/suborganization)
+ startKey: The key to continue for pagination through all OrgUsers.
+
+ Returns:
+ A feed object containing the result of the retrieve operation.
+ """
+ uri = USER_CHILD_URL % (customer_id, org_unit_path)
+ if startKey is not None:
+ uri += "&startKey=" + startKey
+ property_feed = self._GetPropertyFeed(uri)
+ return property_feed
diff --git a/python/gdata/apps/service.py b/python/gdata/apps/service.py
new file mode 100644
index 0000000..bc97484
--- /dev/null
+++ b/python/gdata/apps/service.py
@@ -0,0 +1,552 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2007 SIOS Technology, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__author__ = 'tmatsuo@sios.com (Takashi MATSUO)'
+
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ try:
+ import cElementTree as ElementTree
+ except ImportError:
+ try:
+ from xml.etree import ElementTree
+ except ImportError:
+ from elementtree import ElementTree
+import urllib
+import gdata
+import atom.service
+import gdata.service
+import gdata.apps
+import atom
+
+API_VER="2.0"
+HTTP_OK=200
+
+UNKOWN_ERROR=1000
+USER_DELETED_RECENTLY=1100
+USER_SUSPENDED=1101
+DOMAIN_USER_LIMIT_EXCEEDED=1200
+DOMAIN_ALIAS_LIMIT_EXCEEDED=1201
+DOMAIN_SUSPENDED=1202
+DOMAIN_FEATURE_UNAVAILABLE=1203
+ENTITY_EXISTS=1300
+ENTITY_DOES_NOT_EXIST=1301
+ENTITY_NAME_IS_RESERVED=1302
+ENTITY_NAME_NOT_VALID=1303
+INVALID_GIVEN_NAME=1400
+INVALID_FAMILY_NAME=1401
+INVALID_PASSWORD=1402
+INVALID_USERNAME=1403
+INVALID_HASH_FUNCTION_NAME=1404
+INVALID_HASH_DIGGEST_LENGTH=1405
+INVALID_EMAIL_ADDRESS=1406
+INVALID_QUERY_PARAMETER_VALUE=1407
+TOO_MANY_RECIPIENTS_ON_EMAIL_LIST=1500
+
+DEFAULT_QUOTA_LIMIT='2048'
+
+
+class Error(Exception):
+ pass
+
+
+class AppsForYourDomainException(Error):
+
+ def __init__(self, response):
+
+ Error.__init__(self, response)
+ try:
+ self.element_tree = ElementTree.fromstring(response['body'])
+ self.error_code = int(self.element_tree[0].attrib['errorCode'])
+ self.reason = self.element_tree[0].attrib['reason']
+ self.invalidInput = self.element_tree[0].attrib['invalidInput']
+ except:
+ self.error_code = UNKOWN_ERROR
+
+
+class AppsService(gdata.service.GDataService):
+ """Client for the Google Apps Provisioning service."""
+
+ def __init__(self, email=None, password=None, domain=None, source=None,
+ server='apps-apis.google.com', additional_headers=None,
+ **kwargs):
+ """Creates a client for the Google Apps Provisioning service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ domain: string (optional) The Google Apps domain name.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'apps-apis.google.com'.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service='apps', source=source,
+ server=server, additional_headers=additional_headers, **kwargs)
+ self.ssl = True
+ self.port = 443
+ self.domain = domain
+
+ def _baseURL(self):
+ return "/a/feeds/%s" % self.domain
+
+ def AddAllElementsFromAllPages(self, link_finder, func):
+ """retrieve all pages and add all elements"""
+ next = link_finder.GetNextLink()
+ while next is not None:
+ next_feed = self.Get(next.href, converter=func)
+ for a_entry in next_feed.entry:
+ link_finder.entry.append(a_entry)
+ next = next_feed.GetNextLink()
+ return link_finder
+
+ def RetrievePageOfEmailLists(self, start_email_list_name=None,
+ num_retries=gdata.service.DEFAULT_NUM_RETRIES,
+ delay=gdata.service.DEFAULT_DELAY,
+ backoff=gdata.service.DEFAULT_BACKOFF):
+ """Retrieve one page of email list"""
+ uri = "%s/emailList/%s" % (self._baseURL(), API_VER)
+ if start_email_list_name is not None:
+ uri += "?startEmailListName=%s" % start_email_list_name
+ try:
+ return gdata.apps.EmailListFeedFromString(str(self.GetWithRetries(
+ uri, num_retries=num_retries, delay=delay, backoff=backoff)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def GetGeneratorForAllEmailLists(
+ self, num_retries=gdata.service.DEFAULT_NUM_RETRIES,
+ delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF):
+ """Retrieve a generator for all emaillists in this domain."""
+ first_page = self.RetrievePageOfEmailLists(num_retries=num_retries,
+ delay=delay,
+ backoff=backoff)
+ return self.GetGeneratorFromLinkFinder(
+ first_page, gdata.apps.EmailListRecipientFeedFromString,
+ num_retries=num_retries, delay=delay, backoff=backoff)
+
+ def RetrieveAllEmailLists(self):
+ """Retrieve all email list of a domain."""
+
+ ret = self.RetrievePageOfEmailLists()
+ # pagination
+ return self.AddAllElementsFromAllPages(
+ ret, gdata.apps.EmailListFeedFromString)
+
+ def RetrieveEmailList(self, list_name):
+ """Retreive a single email list by the list's name."""
+
+ uri = "%s/emailList/%s/%s" % (
+ self._baseURL(), API_VER, list_name)
+ try:
+ return self.Get(uri, converter=gdata.apps.EmailListEntryFromString)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def RetrieveEmailLists(self, recipient):
+ """Retrieve All Email List Subscriptions for an Email Address."""
+
+ uri = "%s/emailList/%s?recipient=%s" % (
+ self._baseURL(), API_VER, recipient)
+ try:
+ ret = gdata.apps.EmailListFeedFromString(str(self.Get(uri)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ # pagination
+ return self.AddAllElementsFromAllPages(
+ ret, gdata.apps.EmailListFeedFromString)
+
+ def RemoveRecipientFromEmailList(self, recipient, list_name):
+ """Remove recipient from email list."""
+
+ uri = "%s/emailList/%s/%s/recipient/%s" % (
+ self._baseURL(), API_VER, list_name, recipient)
+ try:
+ self.Delete(uri)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def RetrievePageOfRecipients(self, list_name, start_recipient=None,
+ num_retries=gdata.service.DEFAULT_NUM_RETRIES,
+ delay=gdata.service.DEFAULT_DELAY,
+ backoff=gdata.service.DEFAULT_BACKOFF):
+ """Retrieve one page of recipient of an email list. """
+
+ uri = "%s/emailList/%s/%s/recipient" % (
+ self._baseURL(), API_VER, list_name)
+
+ if start_recipient is not None:
+ uri += "?startRecipient=%s" % start_recipient
+ try:
+ return gdata.apps.EmailListRecipientFeedFromString(str(
+ self.GetWithRetries(
+ uri, num_retries=num_retries, delay=delay, backoff=backoff)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def GetGeneratorForAllRecipients(
+ self, list_name, num_retries=gdata.service.DEFAULT_NUM_RETRIES,
+ delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF):
+ """Retrieve a generator for all recipients of a particular emaillist."""
+ first_page = self.RetrievePageOfRecipients(list_name,
+ num_retries=num_retries,
+ delay=delay,
+ backoff=backoff)
+ return self.GetGeneratorFromLinkFinder(
+ first_page, gdata.apps.EmailListRecipientFeedFromString,
+ num_retries=num_retries, delay=delay, backoff=backoff)
+
+ def RetrieveAllRecipients(self, list_name):
+ """Retrieve all recipient of an email list."""
+
+ ret = self.RetrievePageOfRecipients(list_name)
+ # pagination
+ return self.AddAllElementsFromAllPages(
+ ret, gdata.apps.EmailListRecipientFeedFromString)
+
+ def AddRecipientToEmailList(self, recipient, list_name):
+ """Add a recipient to a email list."""
+
+ uri = "%s/emailList/%s/%s/recipient" % (
+ self._baseURL(), API_VER, list_name)
+ recipient_entry = gdata.apps.EmailListRecipientEntry()
+ recipient_entry.who = gdata.apps.Who(email=recipient)
+
+ try:
+ return gdata.apps.EmailListRecipientEntryFromString(
+ str(self.Post(recipient_entry, uri)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def DeleteEmailList(self, list_name):
+ """Delete a email list"""
+
+ uri = "%s/emailList/%s/%s" % (self._baseURL(), API_VER, list_name)
+ try:
+ self.Delete(uri)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def CreateEmailList(self, list_name):
+ """Create a email list. """
+
+ uri = "%s/emailList/%s" % (self._baseURL(), API_VER)
+ email_list_entry = gdata.apps.EmailListEntry()
+ email_list_entry.email_list = gdata.apps.EmailList(name=list_name)
+ try:
+ return gdata.apps.EmailListEntryFromString(
+ str(self.Post(email_list_entry, uri)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def DeleteNickname(self, nickname):
+ """Delete a nickname"""
+
+ uri = "%s/nickname/%s/%s" % (self._baseURL(), API_VER, nickname)
+ try:
+ self.Delete(uri)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def RetrievePageOfNicknames(self, start_nickname=None,
+ num_retries=gdata.service.DEFAULT_NUM_RETRIES,
+ delay=gdata.service.DEFAULT_DELAY,
+ backoff=gdata.service.DEFAULT_BACKOFF):
+ """Retrieve one page of nicknames in the domain"""
+
+ uri = "%s/nickname/%s" % (self._baseURL(), API_VER)
+ if start_nickname is not None:
+ uri += "?startNickname=%s" % start_nickname
+ try:
+ return gdata.apps.NicknameFeedFromString(str(self.GetWithRetries(
+ uri, num_retries=num_retries, delay=delay, backoff=backoff)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def GetGeneratorForAllNicknames(
+ self, num_retries=gdata.service.DEFAULT_NUM_RETRIES,
+ delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF):
+ """Retrieve a generator for all nicknames in this domain."""
+ first_page = self.RetrievePageOfNicknames(num_retries=num_retries,
+ delay=delay,
+ backoff=backoff)
+ return self.GetGeneratorFromLinkFinder(
+ first_page, gdata.apps.NicknameFeedFromString, num_retries=num_retries,
+ delay=delay, backoff=backoff)
+
+ def RetrieveAllNicknames(self):
+ """Retrieve all nicknames in the domain"""
+
+ ret = self.RetrievePageOfNicknames()
+ # pagination
+ return self.AddAllElementsFromAllPages(
+ ret, gdata.apps.NicknameFeedFromString)
+
+ def GetGeneratorForAllNicknamesOfAUser(
+ self, user_name, num_retries=gdata.service.DEFAULT_NUM_RETRIES,
+ delay=gdata.service.DEFAULT_DELAY, backoff=gdata.service.DEFAULT_BACKOFF):
+ """Retrieve a generator for all nicknames of a particular user."""
+ uri = "%s/nickname/%s?username=%s" % (self._baseURL(), API_VER, user_name)
+ try:
+ first_page = gdata.apps.NicknameFeedFromString(str(self.GetWithRetries(
+ uri, num_retries=num_retries, delay=delay, backoff=backoff)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+ return self.GetGeneratorFromLinkFinder(
+ first_page, gdata.apps.NicknameFeedFromString, num_retries=num_retries,
+ delay=delay, backoff=backoff)
+
+ def RetrieveNicknames(self, user_name):
+ """Retrieve nicknames of the user"""
+
+ uri = "%s/nickname/%s?username=%s" % (self._baseURL(), API_VER, user_name)
+ try:
+ ret = gdata.apps.NicknameFeedFromString(str(self.Get(uri)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ # pagination
+ return self.AddAllElementsFromAllPages(
+ ret, gdata.apps.NicknameFeedFromString)
+
+ def RetrieveNickname(self, nickname):
+ """Retrieve a nickname.
+
+ Args:
+ nickname: string The nickname to retrieve
+
+ Returns:
+ gdata.apps.NicknameEntry
+ """
+
+ uri = "%s/nickname/%s/%s" % (self._baseURL(), API_VER, nickname)
+ try:
+ return gdata.apps.NicknameEntryFromString(str(self.Get(uri)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def CreateNickname(self, user_name, nickname):
+ """Create a nickname"""
+
+ uri = "%s/nickname/%s" % (self._baseURL(), API_VER)
+ nickname_entry = gdata.apps.NicknameEntry()
+ nickname_entry.login = gdata.apps.Login(user_name=user_name)
+ nickname_entry.nickname = gdata.apps.Nickname(name=nickname)
+
+ try:
+ return gdata.apps.NicknameEntryFromString(
+ str(self.Post(nickname_entry, uri)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def DeleteUser(self, user_name):
+ """Delete a user account"""
+
+ uri = "%s/user/%s/%s" % (self._baseURL(), API_VER, user_name)
+ try:
+ return self.Delete(uri)
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def UpdateUser(self, user_name, user_entry):
+ """Update a user account."""
+
+ uri = "%s/user/%s/%s" % (self._baseURL(), API_VER, user_name)
+ try:
+ return gdata.apps.UserEntryFromString(str(self.Put(user_entry, uri)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def CreateUser(self, user_name, family_name, given_name, password,
+ suspended='false', quota_limit=None,
+ password_hash_function=None,
+ change_password=None):
+ """Create a user account. """
+
+ uri = "%s/user/%s" % (self._baseURL(), API_VER)
+ user_entry = gdata.apps.UserEntry()
+ user_entry.login = gdata.apps.Login(
+ user_name=user_name, password=password, suspended=suspended,
+ hash_function_name=password_hash_function,
+ change_password=change_password)
+ user_entry.name = gdata.apps.Name(family_name=family_name,
+ given_name=given_name)
+ if quota_limit is not None:
+ user_entry.quota = gdata.apps.Quota(limit=str(quota_limit))
+
+ try:
+ return gdata.apps.UserEntryFromString(str(self.Post(user_entry, uri)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def SuspendUser(self, user_name):
+ user_entry = self.RetrieveUser(user_name)
+ if user_entry.login.suspended != 'true':
+ user_entry.login.suspended = 'true'
+ user_entry = self.UpdateUser(user_name, user_entry)
+ return user_entry
+
+ def RestoreUser(self, user_name):
+ user_entry = self.RetrieveUser(user_name)
+ if user_entry.login.suspended != 'false':
+ user_entry.login.suspended = 'false'
+ user_entry = self.UpdateUser(user_name, user_entry)
+ return user_entry
+
+ def RetrieveUser(self, user_name):
+ """Retrieve an user account.
+
+ Args:
+ user_name: string The user name to retrieve
+
+ Returns:
+ gdata.apps.UserEntry
+ """
+
+ uri = "%s/user/%s/%s" % (self._baseURL(), API_VER, user_name)
+ try:
+ return gdata.apps.UserEntryFromString(str(self.Get(uri)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def RetrievePageOfUsers(self, start_username=None,
+ num_retries=gdata.service.DEFAULT_NUM_RETRIES,
+ delay=gdata.service.DEFAULT_DELAY,
+ backoff=gdata.service.DEFAULT_BACKOFF):
+ """Retrieve one page of users in this domain."""
+
+ uri = "%s/user/%s" % (self._baseURL(), API_VER)
+ if start_username is not None:
+ uri += "?startUsername=%s" % start_username
+ try:
+ return gdata.apps.UserFeedFromString(str(self.GetWithRetries(
+ uri, num_retries=num_retries, delay=delay, backoff=backoff)))
+ except gdata.service.RequestError, e:
+ raise AppsForYourDomainException(e.args[0])
+
+ def GetGeneratorForAllUsers(self,
+ num_retries=gdata.service.DEFAULT_NUM_RETRIES,
+ delay=gdata.service.DEFAULT_DELAY,
+ backoff=gdata.service.DEFAULT_BACKOFF):
+ """Retrieve a generator for all users in this domain."""
+ first_page = self.RetrievePageOfUsers(num_retries=num_retries, delay=delay,
+ backoff=backoff)
+ return self.GetGeneratorFromLinkFinder(
+ first_page, gdata.apps.UserFeedFromString, num_retries=num_retries,
+ delay=delay, backoff=backoff)
+
+ def RetrieveAllUsers(self):
+ """Retrieve all users in this domain. OBSOLETE"""
+
+ ret = self.RetrievePageOfUsers()
+ # pagination
+ return self.AddAllElementsFromAllPages(
+ ret, gdata.apps.UserFeedFromString)
+
+
+class PropertyService(gdata.service.GDataService):
+ """Client for the Google Apps Property service."""
+
+ def __init__(self, email=None, password=None, domain=None, source=None,
+ server='apps-apis.google.com', additional_headers=None):
+ gdata.service.GDataService.__init__(self, email=email, password=password,
+ service='apps', source=source,
+ server=server,
+ additional_headers=additional_headers)
+ self.ssl = True
+ self.port = 443
+ self.domain = domain
+
+ def AddAllElementsFromAllPages(self, link_finder, func):
+ """retrieve all pages and add all elements"""
+ next = link_finder.GetNextLink()
+ while next is not None:
+ next_feed = self.Get(next.href, converter=func)
+ for a_entry in next_feed.entry:
+ link_finder.entry.append(a_entry)
+ next = next_feed.GetNextLink()
+ return link_finder
+
+ def _GetPropertyEntry(self, properties):
+ property_entry = gdata.apps.PropertyEntry()
+ property = []
+ for name, value in properties.iteritems():
+ if name is not None and value is not None:
+ property.append(gdata.apps.Property(name=name, value=value))
+ property_entry.property = property
+ return property_entry
+
+ def _PropertyEntry2Dict(self, property_entry):
+ properties = {}
+ for i, property in enumerate(property_entry.property):
+ properties[property.name] = property.value
+ return properties
+
+ def _GetPropertyFeed(self, uri):
+ try:
+ return gdata.apps.PropertyFeedFromString(str(self.Get(uri)))
+ except gdata.service.RequestError, e:
+ raise gdata.apps.service.AppsForYourDomainException(e.args[0])
+
+ def _GetPropertiesList(self, uri):
+ property_feed = self._GetPropertyFeed(uri)
+ # pagination
+ property_feed = self.AddAllElementsFromAllPages(
+ property_feed, gdata.apps.PropertyFeedFromString)
+ properties_list = []
+ for property_entry in property_feed.entry:
+ properties_list.append(self._PropertyEntry2Dict(property_entry))
+ return properties_list
+
+ def _GetProperties(self, uri):
+ try:
+ return self._PropertyEntry2Dict(gdata.apps.PropertyEntryFromString(
+ str(self.Get(uri))))
+ except gdata.service.RequestError, e:
+ raise gdata.apps.service.AppsForYourDomainException(e.args[0])
+
+ def _PostProperties(self, uri, properties):
+ property_entry = self._GetPropertyEntry(properties)
+ try:
+ return self._PropertyEntry2Dict(gdata.apps.PropertyEntryFromString(
+ str(self.Post(property_entry, uri))))
+ except gdata.service.RequestError, e:
+ raise gdata.apps.service.AppsForYourDomainException(e.args[0])
+
+ def _PutProperties(self, uri, properties):
+ property_entry = self._GetPropertyEntry(properties)
+ try:
+ return self._PropertyEntry2Dict(gdata.apps.PropertyEntryFromString(
+ str(self.Put(property_entry, uri))))
+ except gdata.service.RequestError, e:
+ raise gdata.apps.service.AppsForYourDomainException(e.args[0])
+
+ def _DeleteProperties(self, uri):
+ try:
+ self.Delete(uri)
+ except gdata.service.RequestError, e:
+ raise gdata.apps.service.AppsForYourDomainException(e.args[0])
+
+
+def _bool2str(b):
+ if b is None:
+ return None
+ return str(b is True).lower()
diff --git a/python/gdata/apps_property.py b/python/gdata/apps_property.py
new file mode 100644
index 0000000..5afa1f3
--- /dev/null
+++ b/python/gdata/apps_property.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This module is used for version 2 of the Google Data APIs.
+
+
+"""Provides a base class to represent property elements in feeds.
+
+This module is used for version 2 of the Google Data APIs. The primary class
+in this module is AppsProperty.
+"""
+
+
+__author__ = 'Vic Fryzel '
+
+
+import atom.core
+import gdata.apps
+
+
+class AppsProperty(atom.core.XmlElement):
+ """Represents an element in a feed."""
+ _qname = gdata.apps.APPS_TEMPLATE % 'property'
+ name = 'name'
+ value = 'value'
diff --git a/python/gdata/auth.py b/python/gdata/auth.py
new file mode 100644
index 0000000..139c6cd
--- /dev/null
+++ b/python/gdata/auth.py
@@ -0,0 +1,952 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2007 - 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import cgi
+import math
+import random
+import re
+import time
+import types
+import urllib
+import atom.http_interface
+import atom.token_store
+import atom.url
+import gdata.oauth as oauth
+import gdata.oauth.rsa as oauth_rsa
+import gdata.tlslite.utils.keyfactory as keyfactory
+import gdata.tlslite.utils.cryptomath as cryptomath
+
+import gdata.gauth
+
+__author__ = 'api.jscudder (Jeff Scudder)'
+
+
+PROGRAMMATIC_AUTH_LABEL = 'GoogleLogin auth='
+AUTHSUB_AUTH_LABEL = 'AuthSub token='
+
+
+"""This module provides functions and objects used with Google authentication.
+
+Details on Google authorization mechanisms used with the Google Data APIs can
+be found here:
+http://code.google.com/apis/gdata/auth.html
+http://code.google.com/apis/accounts/
+
+The essential functions are the following.
+Related to ClientLogin:
+ generate_client_login_request_body: Constructs the body of an HTTP request to
+ obtain a ClientLogin token for a specific
+ service.
+ extract_client_login_token: Creates a ClientLoginToken with the token from a
+ success response to a ClientLogin request.
+ get_captcha_challenge: If the server responded to the ClientLogin request
+ with a CAPTCHA challenge, this method extracts the
+ CAPTCHA URL and identifying CAPTCHA token.
+
+Related to AuthSub:
+ generate_auth_sub_url: Constructs a full URL for a AuthSub request. The
+ user's browser must be sent to this Google Accounts
+ URL and redirected back to the app to obtain the
+ AuthSub token.
+ extract_auth_sub_token_from_url: Once the user's browser has been
+ redirected back to the web app, use this
+ function to create an AuthSubToken with
+ the correct authorization token and scope.
+ token_from_http_body: Extracts the AuthSubToken value string from the
+ server's response to an AuthSub session token upgrade
+ request.
+"""
+
+def generate_client_login_request_body(email, password, service, source,
+ account_type='HOSTED_OR_GOOGLE', captcha_token=None,
+ captcha_response=None):
+ """Creates the body of the autentication request
+
+ See http://code.google.com/apis/accounts/AuthForInstalledApps.html#Request
+ for more details.
+
+ Args:
+ email: str
+ password: str
+ service: str
+ source: str
+ account_type: str (optional) Defaul is 'HOSTED_OR_GOOGLE', other valid
+ values are 'GOOGLE' and 'HOSTED'
+ captcha_token: str (optional)
+ captcha_response: str (optional)
+
+ Returns:
+ The HTTP body to send in a request for a client login token.
+ """
+ return gdata.gauth.generate_client_login_request_body(email, password,
+ service, source, account_type, captcha_token, captcha_response)
+
+
+GenerateClientLoginRequestBody = generate_client_login_request_body
+
+
+def GenerateClientLoginAuthToken(http_body):
+ """Returns the token value to use in Authorization headers.
+
+ Reads the token from the server's response to a Client Login request and
+ creates header value to use in requests.
+
+ Args:
+ http_body: str The body of the server's HTTP response to a Client Login
+ request
+
+ Returns:
+ The value half of an Authorization header.
+ """
+ token = get_client_login_token(http_body)
+ if token:
+ return 'GoogleLogin auth=%s' % token
+ return None
+
+
+def get_client_login_token(http_body):
+ """Returns the token value for a ClientLoginToken.
+
+ Reads the token from the server's response to a Client Login request and
+ creates the token value string to use in requests.
+
+ Args:
+ http_body: str The body of the server's HTTP response to a Client Login
+ request
+
+ Returns:
+ The token value string for a ClientLoginToken.
+ """
+ return gdata.gauth.get_client_login_token_string(http_body)
+
+
+def extract_client_login_token(http_body, scopes):
+ """Parses the server's response and returns a ClientLoginToken.
+
+ Args:
+ http_body: str The body of the server's HTTP response to a Client Login
+ request. It is assumed that the login request was successful.
+ scopes: list containing atom.url.Urls or strs. The scopes list contains
+ all of the partial URLs under which the client login token is
+ valid. For example, if scopes contains ['http://example.com/foo']
+ then the client login token would be valid for
+ http://example.com/foo/bar/baz
+
+ Returns:
+ A ClientLoginToken which is valid for the specified scopes.
+ """
+ token_string = get_client_login_token(http_body)
+ token = ClientLoginToken(scopes=scopes)
+ token.set_token_string(token_string)
+ return token
+
+
+def get_captcha_challenge(http_body,
+ captcha_base_url='http://www.google.com/accounts/'):
+ """Returns the URL and token for a CAPTCHA challenge issued by the server.
+
+ Args:
+ http_body: str The body of the HTTP response from the server which
+ contains the CAPTCHA challenge.
+ captcha_base_url: str This function returns a full URL for viewing the
+ challenge image which is built from the server's response. This
+ base_url is used as the beginning of the URL because the server
+ only provides the end of the URL. For example the server provides
+ 'Captcha?ctoken=Hi...N' and the URL for the image is
+ 'http://www.google.com/accounts/Captcha?ctoken=Hi...N'
+
+ Returns:
+ A dictionary containing the information needed to repond to the CAPTCHA
+ challenge, the image URL and the ID token of the challenge. The
+ dictionary is in the form:
+ {'token': string identifying the CAPTCHA image,
+ 'url': string containing the URL of the image}
+ Returns None if there was no CAPTCHA challenge in the response.
+ """
+ return gdata.gauth.get_captcha_challenge(http_body, captcha_base_url)
+
+
+GetCaptchaChallenge = get_captcha_challenge
+
+
+def GenerateOAuthRequestTokenUrl(
+ oauth_input_params, scopes,
+ request_token_url='https://www.google.com/accounts/OAuthGetRequestToken',
+ extra_parameters=None):
+ """Generate a URL at which a request for OAuth request token is to be sent.
+
+ Args:
+ oauth_input_params: OAuthInputParams OAuth input parameters.
+ scopes: list of strings The URLs of the services to be accessed.
+ request_token_url: string The beginning of the request token URL. This is
+ normally 'https://www.google.com/accounts/OAuthGetRequestToken' or
+ '/accounts/OAuthGetRequestToken'
+ extra_parameters: dict (optional) key-value pairs as any additional
+ parameters to be included in the URL and signature while making a
+ request for fetching an OAuth request token. All the OAuth parameters
+ are added by default. But if provided through this argument, any
+ default parameters will be overwritten. For e.g. a default parameter
+ oauth_version 1.0 can be overwritten if
+ extra_parameters = {'oauth_version': '2.0'}
+
+ Returns:
+ atom.url.Url OAuth request token URL.
+ """
+ scopes_string = ' '.join([str(scope) for scope in scopes])
+ parameters = {'scope': scopes_string}
+ if extra_parameters:
+ parameters.update(extra_parameters)
+ oauth_request = oauth.OAuthRequest.from_consumer_and_token(
+ oauth_input_params.GetConsumer(), http_url=request_token_url,
+ parameters=parameters)
+ oauth_request.sign_request(oauth_input_params.GetSignatureMethod(),
+ oauth_input_params.GetConsumer(), None)
+ return atom.url.parse_url(oauth_request.to_url())
+
+
+def GenerateOAuthAuthorizationUrl(
+ request_token,
+ authorization_url='https://www.google.com/accounts/OAuthAuthorizeToken',
+ callback_url=None, extra_params=None,
+ include_scopes_in_callback=False, scopes_param_prefix='oauth_token_scope'):
+ """Generates URL at which user will login to authorize the request token.
+
+ Args:
+ request_token: gdata.auth.OAuthToken OAuth request token.
+ authorization_url: string The beginning of the authorization URL. This is
+ normally 'https://www.google.com/accounts/OAuthAuthorizeToken' or
+ '/accounts/OAuthAuthorizeToken'
+ callback_url: string (optional) The URL user will be sent to after
+ logging in and granting access.
+ extra_params: dict (optional) Additional parameters to be sent.
+ include_scopes_in_callback: Boolean (default=False) if set to True, and
+ if 'callback_url' is present, the 'callback_url' will be modified to
+ include the scope(s) from the request token as a URL parameter. The
+ key for the 'callback' URL's scope parameter will be
+ OAUTH_SCOPE_URL_PARAM_NAME. The benefit of including the scope URL as
+ a parameter to the 'callback' URL, is that the page which receives
+ the OAuth token will be able to tell which URLs the token grants
+ access to.
+ scopes_param_prefix: string (default='oauth_token_scope') The URL
+ parameter key which maps to the list of valid scopes for the token.
+ This URL parameter will be included in the callback URL along with
+ the scopes of the token as value if include_scopes_in_callback=True.
+
+ Returns:
+ atom.url.Url OAuth authorization URL.
+ """
+ scopes = request_token.scopes
+ if isinstance(scopes, list):
+ scopes = ' '.join(scopes)
+ if include_scopes_in_callback and callback_url:
+ if callback_url.find('?') > -1:
+ callback_url += '&'
+ else:
+ callback_url += '?'
+ callback_url += urllib.urlencode({scopes_param_prefix:scopes})
+ oauth_token = oauth.OAuthToken(request_token.key, request_token.secret)
+ oauth_request = oauth.OAuthRequest.from_token_and_callback(
+ token=oauth_token, callback=callback_url,
+ http_url=authorization_url, parameters=extra_params)
+ return atom.url.parse_url(oauth_request.to_url())
+
+
+def GenerateOAuthAccessTokenUrl(
+ authorized_request_token,
+ oauth_input_params,
+ access_token_url='https://www.google.com/accounts/OAuthGetAccessToken',
+ oauth_version='1.0',
+ oauth_verifier=None):
+ """Generates URL at which user will login to authorize the request token.
+
+ Args:
+ authorized_request_token: gdata.auth.OAuthToken OAuth authorized request
+ token.
+ oauth_input_params: OAuthInputParams OAuth input parameters.
+ access_token_url: string The beginning of the authorization URL. This is
+ normally 'https://www.google.com/accounts/OAuthGetAccessToken' or
+ '/accounts/OAuthGetAccessToken'
+ oauth_version: str (default='1.0') oauth_version parameter.
+ oauth_verifier: str (optional) If present, it is assumed that the client
+ will use the OAuth v1.0a protocol which includes passing the
+ oauth_verifier (as returned by the SP) in the access token step.
+
+ Returns:
+ atom.url.Url OAuth access token URL.
+ """
+ oauth_token = oauth.OAuthToken(authorized_request_token.key,
+ authorized_request_token.secret)
+ parameters = {'oauth_version': oauth_version}
+ if oauth_verifier is not None:
+ parameters['oauth_verifier'] = oauth_verifier
+ oauth_request = oauth.OAuthRequest.from_consumer_and_token(
+ oauth_input_params.GetConsumer(), token=oauth_token,
+ http_url=access_token_url, parameters=parameters)
+ oauth_request.sign_request(oauth_input_params.GetSignatureMethod(),
+ oauth_input_params.GetConsumer(), oauth_token)
+ return atom.url.parse_url(oauth_request.to_url())
+
+
+def GenerateAuthSubUrl(next, scope, secure=False, session=True,
+ request_url='https://www.google.com/accounts/AuthSubRequest',
+ domain='default'):
+ """Generate a URL at which the user will login and be redirected back.
+
+ Users enter their credentials on a Google login page and a token is sent
+ to the URL specified in next. See documentation for AuthSub login at:
+ http://code.google.com/apis/accounts/AuthForWebApps.html
+
+ Args:
+ request_url: str The beginning of the request URL. This is normally
+ 'http://www.google.com/accounts/AuthSubRequest' or
+ '/accounts/AuthSubRequest'
+ next: string The URL user will be sent to after logging in.
+ scope: string The URL of the service to be accessed.
+ secure: boolean (optional) Determines whether or not the issued token
+ is a secure token.
+ session: boolean (optional) Determines whether or not the issued token
+ can be upgraded to a session token.
+ domain: str (optional) The Google Apps domain for this account. If this
+ is not a Google Apps account, use 'default' which is the default
+ value.
+ """
+ # Translate True/False values for parameters into numeric values acceoted
+ # by the AuthSub service.
+ if secure:
+ secure = 1
+ else:
+ secure = 0
+
+ if session:
+ session = 1
+ else:
+ session = 0
+
+ request_params = urllib.urlencode({'next': next, 'scope': scope,
+ 'secure': secure, 'session': session,
+ 'hd': domain})
+ if request_url.find('?') == -1:
+ return '%s?%s' % (request_url, request_params)
+ else:
+ # The request URL already contained url parameters so we should add
+ # the parameters using the & seperator
+ return '%s&%s' % (request_url, request_params)
+
+
+def generate_auth_sub_url(next, scopes, secure=False, session=True,
+ request_url='https://www.google.com/accounts/AuthSubRequest',
+ domain='default', scopes_param_prefix='auth_sub_scopes'):
+ """Constructs a URL string for requesting a multiscope AuthSub token.
+
+ The generated token will contain a URL parameter to pass along the
+ requested scopes to the next URL. When the Google Accounts page
+ redirects the broswser to the 'next' URL, it appends the single use
+ AuthSub token value to the URL as a URL parameter with the key 'token'.
+ However, the information about which scopes were requested is not
+ included by Google Accounts. This method adds the scopes to the next
+ URL before making the request so that the redirect will be sent to
+ a page, and both the token value and the list of scopes can be
+ extracted from the request URL.
+
+ Args:
+ next: atom.url.URL or string The URL user will be sent to after
+ authorizing this web application to access their data.
+ scopes: list containint strings The URLs of the services to be accessed.
+ secure: boolean (optional) Determines whether or not the issued token
+ is a secure token.
+ session: boolean (optional) Determines whether or not the issued token
+ can be upgraded to a session token.
+ request_url: atom.url.Url or str The beginning of the request URL. This
+ is normally 'http://www.google.com/accounts/AuthSubRequest' or
+ '/accounts/AuthSubRequest'
+ domain: The domain which the account is part of. This is used for Google
+ Apps accounts, the default value is 'default' which means that the
+ requested account is a Google Account (@gmail.com for example)
+ scopes_param_prefix: str (optional) The requested scopes are added as a
+ URL parameter to the next URL so that the page at the 'next' URL can
+ extract the token value and the valid scopes from the URL. The key
+ for the URL parameter defaults to 'auth_sub_scopes'
+
+ Returns:
+ An atom.url.Url which the user's browser should be directed to in order
+ to authorize this application to access their information.
+ """
+ if isinstance(next, (str, unicode)):
+ next = atom.url.parse_url(next)
+ scopes_string = ' '.join([str(scope) for scope in scopes])
+ next.params[scopes_param_prefix] = scopes_string
+
+ if isinstance(request_url, (str, unicode)):
+ request_url = atom.url.parse_url(request_url)
+ request_url.params['next'] = str(next)
+ request_url.params['scope'] = scopes_string
+ if session:
+ request_url.params['session'] = 1
+ else:
+ request_url.params['session'] = 0
+ if secure:
+ request_url.params['secure'] = 1
+ else:
+ request_url.params['secure'] = 0
+ request_url.params['hd'] = domain
+ return request_url
+
+
+def AuthSubTokenFromUrl(url):
+ """Extracts the AuthSub token from the URL.
+
+ Used after the AuthSub redirect has sent the user to the 'next' page and
+ appended the token to the URL. This function returns the value to be used
+ in the Authorization header.
+
+ Args:
+ url: str The URL of the current page which contains the AuthSub token as
+ a URL parameter.
+ """
+ token = TokenFromUrl(url)
+ if token:
+ return 'AuthSub token=%s' % token
+ return None
+
+
+def TokenFromUrl(url):
+ """Extracts the AuthSub token from the URL.
+
+ Returns the raw token value.
+
+ Args:
+ url: str The URL or the query portion of the URL string (after the ?) of
+ the current page which contains the AuthSub token as a URL parameter.
+ """
+ if url.find('?') > -1:
+ query_params = url.split('?')[1]
+ else:
+ query_params = url
+ for pair in query_params.split('&'):
+ if pair.startswith('token='):
+ return pair[6:]
+ return None
+
+
+def extract_auth_sub_token_from_url(url,
+ scopes_param_prefix='auth_sub_scopes', rsa_key=None):
+ """Creates an AuthSubToken and sets the token value and scopes from the URL.
+
+ After the Google Accounts AuthSub pages redirect the user's broswer back to
+ the web application (using the 'next' URL from the request) the web app must
+ extract the token from the current page's URL. The token is provided as a
+ URL parameter named 'token' and if generate_auth_sub_url was used to create
+ the request, the token's valid scopes are included in a URL parameter whose
+ name is specified in scopes_param_prefix.
+
+ Args:
+ url: atom.url.Url or str representing the current URL. The token value
+ and valid scopes should be included as URL parameters.
+ scopes_param_prefix: str (optional) The URL parameter key which maps to
+ the list of valid scopes for the token.
+
+ Returns:
+ An AuthSubToken with the token value from the URL and set to be valid for
+ the scopes passed in on the URL. If no scopes were included in the URL,
+ the AuthSubToken defaults to being valid for no scopes. If there was no
+ 'token' parameter in the URL, this function returns None.
+ """
+ if isinstance(url, (str, unicode)):
+ url = atom.url.parse_url(url)
+ if 'token' not in url.params:
+ return None
+ scopes = []
+ if scopes_param_prefix in url.params:
+ scopes = url.params[scopes_param_prefix].split(' ')
+ token_value = url.params['token']
+ if rsa_key:
+ token = SecureAuthSubToken(rsa_key, scopes=scopes)
+ else:
+ token = AuthSubToken(scopes=scopes)
+ token.set_token_string(token_value)
+ return token
+
+
+def AuthSubTokenFromHttpBody(http_body):
+ """Extracts the AuthSub token from an HTTP body string.
+
+ Used to find the new session token after making a request to upgrade a
+ single use AuthSub token.
+
+ Args:
+ http_body: str The repsonse from the server which contains the AuthSub
+ key. For example, this function would find the new session token
+ from the server's response to an upgrade token request.
+
+ Returns:
+ The header value to use for Authorization which contains the AuthSub
+ token.
+ """
+ token_value = token_from_http_body(http_body)
+ if token_value:
+ return '%s%s' % (AUTHSUB_AUTH_LABEL, token_value)
+ return None
+
+
+def token_from_http_body(http_body):
+ """Extracts the AuthSub token from an HTTP body string.
+
+ Used to find the new session token after making a request to upgrade a
+ single use AuthSub token.
+
+ Args:
+ http_body: str The repsonse from the server which contains the AuthSub
+ key. For example, this function would find the new session token
+ from the server's response to an upgrade token request.
+
+ Returns:
+ The raw token value to use in an AuthSubToken object.
+ """
+ for response_line in http_body.splitlines():
+ if response_line.startswith('Token='):
+ # Strip off Token= and return the token value string.
+ return response_line[6:]
+ return None
+
+
+TokenFromHttpBody = token_from_http_body
+
+
+def OAuthTokenFromUrl(url, scopes_param_prefix='oauth_token_scope'):
+ """Creates an OAuthToken and sets token key and scopes (if present) from URL.
+
+ After the Google Accounts OAuth pages redirect the user's broswer back to
+ the web application (using the 'callback' URL from the request) the web app
+ can extract the token from the current page's URL. The token is same as the
+ request token, but it is either authorized (if user grants access) or
+ unauthorized (if user denies access). The token is provided as a
+ URL parameter named 'oauth_token' and if it was chosen to use
+ GenerateOAuthAuthorizationUrl with include_scopes_in_param=True, the token's
+ valid scopes are included in a URL parameter whose name is specified in
+ scopes_param_prefix.
+
+ Args:
+ url: atom.url.Url or str representing the current URL. The token value
+ and valid scopes should be included as URL parameters.
+ scopes_param_prefix: str (optional) The URL parameter key which maps to
+ the list of valid scopes for the token.
+
+ Returns:
+ An OAuthToken with the token key from the URL and set to be valid for
+ the scopes passed in on the URL. If no scopes were included in the URL,
+ the OAuthToken defaults to being valid for no scopes. If there was no
+ 'oauth_token' parameter in the URL, this function returns None.
+ """
+ if isinstance(url, (str, unicode)):
+ url = atom.url.parse_url(url)
+ if 'oauth_token' not in url.params:
+ return None
+ scopes = []
+ if scopes_param_prefix in url.params:
+ scopes = url.params[scopes_param_prefix].split(' ')
+ token_key = url.params['oauth_token']
+ token = OAuthToken(key=token_key, scopes=scopes)
+ return token
+
+
+def OAuthTokenFromHttpBody(http_body):
+ """Parses the HTTP response body and returns an OAuth token.
+
+ The returned OAuth token will just have key and secret parameters set.
+ It won't have any knowledge about the scopes or oauth_input_params. It is
+ your responsibility to make it aware of the remaining parameters.
+
+ Returns:
+ OAuthToken OAuth token.
+ """
+ token = oauth.OAuthToken.from_string(http_body)
+ oauth_token = OAuthToken(key=token.key, secret=token.secret)
+ return oauth_token
+
+
+class OAuthSignatureMethod(object):
+ """Holds valid OAuth signature methods.
+
+ RSA_SHA1: Class to build signature according to RSA-SHA1 algorithm.
+ HMAC_SHA1: Class to build signature according to HMAC-SHA1 algorithm.
+ """
+
+ HMAC_SHA1 = oauth.OAuthSignatureMethod_HMAC_SHA1
+
+ class RSA_SHA1(oauth_rsa.OAuthSignatureMethod_RSA_SHA1):
+ """Provides implementation for abstract methods to return RSA certs."""
+
+ def __init__(self, private_key, public_cert):
+ self.private_key = private_key
+ self.public_cert = public_cert
+
+ def _fetch_public_cert(self, unused_oauth_request):
+ return self.public_cert
+
+ def _fetch_private_cert(self, unused_oauth_request):
+ return self.private_key
+
+
+class OAuthInputParams(object):
+ """Stores OAuth input parameters.
+
+ This class is a store for OAuth input parameters viz. consumer key and secret,
+ signature method and RSA key.
+ """
+
+ def __init__(self, signature_method, consumer_key, consumer_secret=None,
+ rsa_key=None, requestor_id=None):
+ """Initializes object with parameters required for using OAuth mechanism.
+
+ NOTE: Though consumer_secret and rsa_key are optional, either of the two
+ is required depending on the value of the signature_method.
+
+ Args:
+ signature_method: class which provides implementation for strategy class
+ oauth.oauth.OAuthSignatureMethod. Signature method to be used for
+ signing each request. Valid implementations are provided as the
+ constants defined by gdata.auth.OAuthSignatureMethod. Currently
+ they are gdata.auth.OAuthSignatureMethod.RSA_SHA1 and
+ gdata.auth.OAuthSignatureMethod.HMAC_SHA1. Instead of passing in
+ the strategy class, you may pass in a string for 'RSA_SHA1' or
+ 'HMAC_SHA1'. If you plan to use OAuth on App Engine (or another
+ WSGI environment) I recommend specifying signature method using a
+ string (the only options are 'RSA_SHA1' and 'HMAC_SHA1'). In these
+ environments there are sometimes issues with pickling an object in
+ which a member references a class or function. Storing a string to
+ refer to the signature method mitigates complications when
+ pickling.
+ consumer_key: string Domain identifying third_party web application.
+ consumer_secret: string (optional) Secret generated during registration.
+ Required only for HMAC_SHA1 signature method.
+ rsa_key: string (optional) Private key required for RSA_SHA1 signature
+ method.
+ requestor_id: string (optional) User email adress to make requests on
+ their behalf. This parameter should only be set when performing
+ 2 legged OAuth requests.
+ """
+ if (signature_method == OAuthSignatureMethod.RSA_SHA1
+ or signature_method == 'RSA_SHA1'):
+ self.__signature_strategy = 'RSA_SHA1'
+ elif (signature_method == OAuthSignatureMethod.HMAC_SHA1
+ or signature_method == 'HMAC_SHA1'):
+ self.__signature_strategy = 'HMAC_SHA1'
+ else:
+ self.__signature_strategy = signature_method
+ self.rsa_key = rsa_key
+ self._consumer = oauth.OAuthConsumer(consumer_key, consumer_secret)
+ self.requestor_id = requestor_id
+
+ def __get_signature_method(self):
+ if self.__signature_strategy == 'RSA_SHA1':
+ return OAuthSignatureMethod.RSA_SHA1(self.rsa_key, None)
+ elif self.__signature_strategy == 'HMAC_SHA1':
+ return OAuthSignatureMethod.HMAC_SHA1()
+ else:
+ return self.__signature_strategy()
+
+ def __set_signature_method(self, signature_method):
+ if (signature_method == OAuthSignatureMethod.RSA_SHA1
+ or signature_method == 'RSA_SHA1'):
+ self.__signature_strategy = 'RSA_SHA1'
+ elif (signature_method == OAuthSignatureMethod.HMAC_SHA1
+ or signature_method == 'HMAC_SHA1'):
+ self.__signature_strategy = 'HMAC_SHA1'
+ else:
+ self.__signature_strategy = signature_method
+
+ _signature_method = property(__get_signature_method, __set_signature_method,
+ doc="""Returns object capable of signing the request using RSA of HMAC.
+
+ Replaces the _signature_method member to avoid pickle errors.""")
+
+ def GetSignatureMethod(self):
+ """Gets the OAuth signature method.
+
+ Returns:
+ object of supertype
+ """
+ return self._signature_method
+
+ def GetConsumer(self):
+ """Gets the OAuth consumer.
+
+ Returns:
+ object of type
+ """
+ return self._consumer
+
+
+class ClientLoginToken(atom.http_interface.GenericToken):
+ """Stores the Authorization header in auth_header and adds to requests.
+
+ This token will add it's Authorization header to an HTTP request
+ as it is made. Ths token class is simple but
+ some Token classes must calculate portions of the Authorization header
+ based on the request being made, which is why the token is responsible
+ for making requests via an http_client parameter.
+
+ Args:
+ auth_header: str The value for the Authorization header.
+ scopes: list of str or atom.url.Url specifying the beginnings of URLs
+ for which this token can be used. For example, if scopes contains
+ 'http://example.com/foo', then this token can be used for a request to
+ 'http://example.com/foo/bar' but it cannot be used for a request to
+ 'http://example.com/baz'
+ """
+ def __init__(self, auth_header=None, scopes=None):
+ self.auth_header = auth_header
+ self.scopes = scopes or []
+
+ def __str__(self):
+ return self.auth_header
+
+ def perform_request(self, http_client, operation, url, data=None,
+ headers=None):
+ """Sets the Authorization header and makes the HTTP request."""
+ if headers is None:
+ headers = {'Authorization':self.auth_header}
+ else:
+ headers['Authorization'] = self.auth_header
+ return http_client.request(operation, url, data=data, headers=headers)
+
+ def get_token_string(self):
+ """Removes PROGRAMMATIC_AUTH_LABEL to give just the token value."""
+ return self.auth_header[len(PROGRAMMATIC_AUTH_LABEL):]
+
+ def set_token_string(self, token_string):
+ self.auth_header = '%s%s' % (PROGRAMMATIC_AUTH_LABEL, token_string)
+
+ def valid_for_scope(self, url):
+ """Tells the caller if the token authorizes access to the desired URL.
+ """
+ if isinstance(url, (str, unicode)):
+ url = atom.url.parse_url(url)
+ for scope in self.scopes:
+ if scope == atom.token_store.SCOPE_ALL:
+ return True
+ if isinstance(scope, (str, unicode)):
+ scope = atom.url.parse_url(scope)
+ if scope == url:
+ return True
+ # Check the host and the path, but ignore the port and protocol.
+ elif scope.host == url.host and not scope.path:
+ return True
+ elif scope.host == url.host and scope.path and not url.path:
+ continue
+ elif scope.host == url.host and url.path.startswith(scope.path):
+ return True
+ return False
+
+
+class AuthSubToken(ClientLoginToken):
+ def get_token_string(self):
+ """Removes AUTHSUB_AUTH_LABEL to give just the token value."""
+ return self.auth_header[len(AUTHSUB_AUTH_LABEL):]
+
+ def set_token_string(self, token_string):
+ self.auth_header = '%s%s' % (AUTHSUB_AUTH_LABEL, token_string)
+
+
+class OAuthToken(atom.http_interface.GenericToken):
+ """Stores the token key, token secret and scopes for which token is valid.
+
+ This token adds the authorization header to each request made. It
+ re-calculates authorization header for every request since the OAuth
+ signature to be added to the authorization header is dependent on the
+ request parameters.
+
+ Attributes:
+ key: str The value for the OAuth token i.e. token key.
+ secret: str The value for the OAuth token secret.
+ scopes: list of str or atom.url.Url specifying the beginnings of URLs
+ for which this token can be used. For example, if scopes contains
+ 'http://example.com/foo', then this token can be used for a request to
+ 'http://example.com/foo/bar' but it cannot be used for a request to
+ 'http://example.com/baz'
+ oauth_input_params: OAuthInputParams OAuth input parameters.
+ """
+
+ def __init__(self, key=None, secret=None, scopes=None,
+ oauth_input_params=None):
+ self.key = key
+ self.secret = secret
+ self.scopes = scopes or []
+ self.oauth_input_params = oauth_input_params
+
+ def __str__(self):
+ return self.get_token_string()
+
+ def get_token_string(self):
+ """Returns the token string.
+
+ The token string returned is of format
+ oauth_token=[0]&oauth_token_secret=[1], where [0] and [1] are some strings.
+
+ Returns:
+ A token string of format oauth_token=[0]&oauth_token_secret=[1],
+ where [0] and [1] are some strings. If self.secret is absent, it just
+ returns oauth_token=[0]. If self.key is absent, it just returns
+ oauth_token_secret=[1]. If both are absent, it returns None.
+ """
+ if self.key and self.secret:
+ return urllib.urlencode({'oauth_token': self.key,
+ 'oauth_token_secret': self.secret})
+ elif self.key:
+ return 'oauth_token=%s' % self.key
+ elif self.secret:
+ return 'oauth_token_secret=%s' % self.secret
+ else:
+ return None
+
+ def set_token_string(self, token_string):
+ """Sets the token key and secret from the token string.
+
+ Args:
+ token_string: str Token string of form
+ oauth_token=[0]&oauth_token_secret=[1]. If oauth_token is not present,
+ self.key will be None. If oauth_token_secret is not present,
+ self.secret will be None.
+ """
+ token_params = cgi.parse_qs(token_string, keep_blank_values=False)
+ if 'oauth_token' in token_params:
+ self.key = token_params['oauth_token'][0]
+ if 'oauth_token_secret' in token_params:
+ self.secret = token_params['oauth_token_secret'][0]
+
+ def GetAuthHeader(self, http_method, http_url, realm=''):
+ """Get the authentication header.
+
+ Args:
+ http_method: string HTTP method i.e. operation e.g. GET, POST, PUT, etc.
+ http_url: string or atom.url.Url HTTP URL to which request is made.
+ realm: string (default='') realm parameter to be included in the
+ authorization header.
+
+ Returns:
+ dict Header to be sent with every subsequent request after
+ authentication.
+ """
+ if isinstance(http_url, types.StringTypes):
+ http_url = atom.url.parse_url(http_url)
+ header = None
+ token = None
+ if self.key or self.secret:
+ token = oauth.OAuthToken(self.key, self.secret)
+ oauth_request = oauth.OAuthRequest.from_consumer_and_token(
+ self.oauth_input_params.GetConsumer(), token=token,
+ http_url=str(http_url), http_method=http_method,
+ parameters=http_url.params)
+ oauth_request.sign_request(self.oauth_input_params.GetSignatureMethod(),
+ self.oauth_input_params.GetConsumer(), token)
+ header = oauth_request.to_header(realm=realm)
+ header['Authorization'] = header['Authorization'].replace('+', '%2B')
+ return header
+
+ def perform_request(self, http_client, operation, url, data=None,
+ headers=None):
+ """Sets the Authorization header and makes the HTTP request."""
+ if not headers:
+ headers = {}
+ if self.oauth_input_params.requestor_id:
+ url.params['xoauth_requestor_id'] = self.oauth_input_params.requestor_id
+ headers.update(self.GetAuthHeader(operation, url))
+ return http_client.request(operation, url, data=data, headers=headers)
+
+ def valid_for_scope(self, url):
+ if isinstance(url, (str, unicode)):
+ url = atom.url.parse_url(url)
+ for scope in self.scopes:
+ if scope == atom.token_store.SCOPE_ALL:
+ return True
+ if isinstance(scope, (str, unicode)):
+ scope = atom.url.parse_url(scope)
+ if scope == url:
+ return True
+ # Check the host and the path, but ignore the port and protocol.
+ elif scope.host == url.host and not scope.path:
+ return True
+ elif scope.host == url.host and scope.path and not url.path:
+ continue
+ elif scope.host == url.host and url.path.startswith(scope.path):
+ return True
+ return False
+
+
+class SecureAuthSubToken(AuthSubToken):
+ """Stores the rsa private key, token, and scopes for the secure AuthSub token.
+
+ This token adds the authorization header to each request made. It
+ re-calculates authorization header for every request since the secure AuthSub
+ signature to be added to the authorization header is dependent on the
+ request parameters.
+
+ Attributes:
+ rsa_key: string The RSA private key in PEM format that the token will
+ use to sign requests
+ token_string: string (optional) The value for the AuthSub token.
+ scopes: list of str or atom.url.Url specifying the beginnings of URLs
+ for which this token can be used. For example, if scopes contains
+ 'http://example.com/foo', then this token can be used for a request to
+ 'http://example.com/foo/bar' but it cannot be used for a request to
+ 'http://example.com/baz'
+ """
+
+ def __init__(self, rsa_key, token_string=None, scopes=None):
+ self.rsa_key = keyfactory.parsePEMKey(rsa_key)
+ self.token_string = token_string or ''
+ self.scopes = scopes or []
+
+ def __str__(self):
+ return self.get_token_string()
+
+ def get_token_string(self):
+ return str(self.token_string)
+
+ def set_token_string(self, token_string):
+ self.token_string = token_string
+
+ def GetAuthHeader(self, http_method, http_url):
+ """Generates the Authorization header.
+
+ The form of the secure AuthSub Authorization header is
+ Authorization: AuthSub token="token" sigalg="sigalg" data="data" sig="sig"
+ and data represents a string in the form
+ data = http_method http_url timestamp nonce
+
+ Args:
+ http_method: string HTTP method i.e. operation e.g. GET, POST, PUT, etc.
+ http_url: string or atom.url.Url HTTP URL to which request is made.
+
+ Returns:
+ dict Header to be sent with every subsequent request after authentication.
+ """
+ timestamp = int(math.floor(time.time()))
+ nonce = '%lu' % random.randrange(1, 2**64)
+ data = '%s %s %d %s' % (http_method, str(http_url), timestamp, nonce)
+ sig = cryptomath.bytesToBase64(self.rsa_key.hashAndSign(data))
+ header = {'Authorization': '%s"%s" data="%s" sig="%s" sigalg="rsa-sha1"' %
+ (AUTHSUB_AUTH_LABEL, self.token_string, data, sig)}
+ return header
+
+ def perform_request(self, http_client, operation, url, data=None,
+ headers=None):
+ """Sets the Authorization header and makes the HTTP request."""
+ if not headers:
+ headers = {}
+ headers.update(self.GetAuthHeader(operation, url))
+ return http_client.request(operation, url, data=data, headers=headers)
diff --git a/python/gdata/base/__init__.py b/python/gdata/base/__init__.py
new file mode 100644
index 0000000..32401a9
--- /dev/null
+++ b/python/gdata/base/__init__.py
@@ -0,0 +1,697 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2006 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains extensions to Atom objects used with Google Base."""
+
+
+__author__ = 'api.jscudder (Jeffrey Scudder)'
+
+
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ try:
+ import cElementTree as ElementTree
+ except ImportError:
+ try:
+ from xml.etree import ElementTree
+ except ImportError:
+ from elementtree import ElementTree
+import atom
+import gdata
+
+
+# XML namespaces which are often used in Google Base entities.
+GBASE_NAMESPACE = 'http://base.google.com/ns/1.0'
+GBASE_TEMPLATE = '{http://base.google.com/ns/1.0}%s'
+GMETA_NAMESPACE = 'http://base.google.com/ns-metadata/1.0'
+GMETA_TEMPLATE = '{http://base.google.com/ns-metadata/1.0}%s'
+
+
+class ItemAttributeContainer(atom.AtomBase):
+ """Provides methods for finding Google Base Item attributes.
+
+ Google Base item attributes are child nodes in the gbase namespace. Google
+ Base allows you to define your own item attributes and this class provides
+ methods to interact with the custom attributes.
+ """
+
+ def GetItemAttributes(self, name):
+ """Returns a list of all item attributes which have the desired name.
+
+ Args:
+ name: str The tag of the desired base attributes. For example, calling
+ this method with 'rating' would return a list of ItemAttributes
+ represented by a 'g:rating' tag.
+
+ Returns:
+ A list of matching ItemAttribute objects.
+ """
+ result = []
+ for attrib in self.item_attributes:
+ if attrib.name == name:
+ result.append(attrib)
+ return result
+
+ def FindItemAttribute(self, name):
+ """Get the contents of the first Base item attribute which matches name.
+
+ This method is deprecated, please use GetItemAttributes instead.
+
+ Args:
+ name: str The tag of the desired base attribute. For example, calling
+ this method with name = 'rating' would search for a tag rating
+ in the GBase namespace in the item attributes.
+
+ Returns:
+ The text contents of the item attribute, or none if the attribute was
+ not found.
+ """
+
+ for attrib in self.item_attributes:
+ if attrib.name == name:
+ return attrib.text
+ return None
+
+ def AddItemAttribute(self, name, value, value_type=None, access=None):
+ """Adds a new item attribute tag containing the value.
+
+ Creates a new extension element in the GBase namespace to represent a
+ Google Base item attribute.
+
+ Args:
+ name: str The tag name for the new attribute. This must be a valid xml
+ tag name. The tag will be placed in the GBase namespace.
+ value: str Contents for the item attribute
+ value_type: str (optional) The type of data in the vlaue, Examples: text
+ float
+ access: str (optional) Used to hide attributes. The attribute is not
+ exposed in the snippets feed if access is set to 'private'.
+ """
+
+ new_attribute = ItemAttribute(name, text=value,
+ text_type=value_type, access=access)
+ self.item_attributes.append(new_attribute)
+ return new_attribute
+
+ def SetItemAttribute(self, name, value):
+ """Changes an existing item attribute's value."""
+
+ for attrib in self.item_attributes:
+ if attrib.name == name:
+ attrib.text = value
+ return
+
+ def RemoveItemAttribute(self, name):
+ """Deletes the first extension element which matches name.
+
+ Deletes the first extension element which matches name.
+ """
+
+ for i in xrange(len(self.item_attributes)):
+ if self.item_attributes[i].name == name:
+ del self.item_attributes[i]
+ return
+
+ # We need to overwrite _ConvertElementTreeToMember to add special logic to
+ # convert custom attributes to members
+ def _ConvertElementTreeToMember(self, child_tree):
+ # Find the element's tag in this class's list of child members
+ if self.__class__._children.has_key(child_tree.tag):
+ member_name = self.__class__._children[child_tree.tag][0]
+ member_class = self.__class__._children[child_tree.tag][1]
+ # If the class member is supposed to contain a list, make sure the
+ # matching member is set to a list, then append the new member
+ # instance to the list.
+ if isinstance(member_class, list):
+ if getattr(self, member_name) is None:
+ setattr(self, member_name, [])
+ getattr(self, member_name).append(atom._CreateClassFromElementTree(
+ member_class[0], child_tree))
+ else:
+ setattr(self, member_name,
+ atom._CreateClassFromElementTree(member_class, child_tree))
+ elif child_tree.tag.find('{%s}' % GBASE_NAMESPACE) == 0:
+ # If this is in the gbase namespace, make it into an extension element.
+ name = child_tree.tag[child_tree.tag.index('}')+1:]
+ value = child_tree.text
+ if child_tree.attrib.has_key('type'):
+ value_type = child_tree.attrib['type']
+ else:
+ value_type = None
+ attrib=self.AddItemAttribute(name, value, value_type)
+ for sub in child_tree.getchildren():
+ sub_name = sub.tag[sub.tag.index('}')+1:]
+ sub_value=sub.text
+ if sub.attrib.has_key('type'):
+ sub_type = sub.attrib['type']
+ else:
+ sub_type=None
+ attrib.AddItemAttribute(sub_name, sub_value, sub_type)
+ else:
+ atom.ExtensionContainer._ConvertElementTreeToMember(self, child_tree)
+
+ # We need to overwtite _AddMembersToElementTree to add special logic to
+ # convert custom members to XML nodes.
+ def _AddMembersToElementTree(self, tree):
+ # Convert the members of this class which are XML child nodes.
+ # This uses the class's _children dictionary to find the members which
+ # should become XML child nodes.
+ member_node_names = [values[0] for tag, values in
+ self.__class__._children.iteritems()]
+ for member_name in member_node_names:
+ member = getattr(self, member_name)
+ if member is None:
+ pass
+ elif isinstance(member, list):
+ for instance in member:
+ instance._BecomeChildElement(tree)
+ else:
+ member._BecomeChildElement(tree)
+ # Convert the members of this class which are XML attributes.
+ for xml_attribute, member_name in self.__class__._attributes.iteritems():
+ member = getattr(self, member_name)
+ if member is not None:
+ tree.attrib[xml_attribute] = member
+ # Convert all special custom item attributes to nodes
+ for attribute in self.item_attributes:
+ attribute._BecomeChildElement(tree)
+ # Lastly, call the ExtensionContainers's _AddMembersToElementTree to
+ # convert any extension attributes.
+ atom.ExtensionContainer._AddMembersToElementTree(self, tree)
+
+
+class ItemAttribute(ItemAttributeContainer):
+ """An optional or user defined attribute for a GBase item.
+
+ Google Base allows items to have custom attribute child nodes. These nodes
+ have contents and a type attribute which tells Google Base whether the
+ contents are text, a float value with units, etc. The Atom text class has
+ the same structure, so this class inherits from Text.
+ """
+
+ _namespace = GBASE_NAMESPACE
+ _children = atom.Text._children.copy()
+ _attributes = atom.Text._attributes.copy()
+ _attributes['access'] = 'access'
+
+ def __init__(self, name, text_type=None, access=None, text=None,
+ extension_elements=None, extension_attributes=None, item_attributes=None):
+ """Constructor for a GBase item attribute
+
+ Args:
+ name: str The name of the attribute. Examples include
+ price, color, make, model, pages, salary, etc.
+ text_type: str (optional) The type associated with the text contents
+ access: str (optional) If the access attribute is set to 'private', the
+ attribute will not be included in the item's description in the
+ snippets feed
+ text: str (optional) The text data in the this element
+ extension_elements: list (optional) A list of ExtensionElement
+ instances
+ extension_attributes: dict (optional) A dictionary of attribute
+ value string pairs
+ """
+
+ self.name = name
+ self.type = text_type
+ self.access = access
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+ self.item_attributes = item_attributes or []
+
+ def _BecomeChildElement(self, tree):
+ new_child = ElementTree.Element('')
+ tree.append(new_child)
+ new_child.tag = '{%s}%s' % (self.__class__._namespace,
+ self.name)
+ self._AddMembersToElementTree(new_child)
+
+ def _ToElementTree(self):
+ new_tree = ElementTree.Element('{%s}%s' % (self.__class__._namespace,
+ self.name))
+ self._AddMembersToElementTree(new_tree)
+ return new_tree
+
+
+def ItemAttributeFromString(xml_string):
+ element_tree = ElementTree.fromstring(xml_string)
+ return _ItemAttributeFromElementTree(element_tree)
+
+
+def _ItemAttributeFromElementTree(element_tree):
+ if element_tree.tag.find(GBASE_TEMPLATE % '') == 0:
+ to_return = ItemAttribute('')
+ to_return._HarvestElementTree(element_tree)
+ to_return.name = element_tree.tag[element_tree.tag.index('}')+1:]
+ if to_return.name and to_return.name != '':
+ return to_return
+ return None
+
+
+class Label(atom.AtomBase):
+ """The Google Base label element"""
+
+ _tag = 'label'
+ _namespace = GBASE_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def LabelFromString(xml_string):
+ return atom.CreateClassFromXMLString(Label, xml_string)
+
+
+class Thumbnail(atom.AtomBase):
+ """The Google Base thumbnail element"""
+
+ _tag = 'thumbnail'
+ _namespace = GMETA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['width'] = 'width'
+ _attributes['height'] = 'height'
+
+ def __init__(self, width=None, height=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+ self.width = width
+ self.height = height
+
+
+def ThumbnailFromString(xml_string):
+ return atom.CreateClassFromXMLString(Thumbnail, xml_string)
+
+
+class ImageLink(atom.Text):
+ """The Google Base image_link element"""
+
+ _tag = 'image_link'
+ _namespace = GBASE_NAMESPACE
+ _children = atom.Text._children.copy()
+ _attributes = atom.Text._attributes.copy()
+ _children['{%s}thumbnail' % GMETA_NAMESPACE] = ('thumbnail', [Thumbnail])
+
+ def __init__(self, thumbnail=None, text=None, extension_elements=None,
+ text_type=None, extension_attributes=None):
+ self.thumbnail = thumbnail or []
+ self.text = text
+ self.type = text_type
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def ImageLinkFromString(xml_string):
+ return atom.CreateClassFromXMLString(ImageLink, xml_string)
+
+
+class ItemType(atom.Text):
+ """The Google Base item_type element"""
+
+ _tag = 'item_type'
+ _namespace = GBASE_NAMESPACE
+ _children = atom.Text._children.copy()
+ _attributes = atom.Text._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ text_type=None, extension_attributes=None):
+ self.text = text
+ self.type = text_type
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def ItemTypeFromString(xml_string):
+ return atom.CreateClassFromXMLString(ItemType, xml_string)
+
+
+class MetaItemType(ItemType):
+ """The Google Base item_type element"""
+
+ _tag = 'item_type'
+ _namespace = GMETA_NAMESPACE
+ _children = ItemType._children.copy()
+ _attributes = ItemType._attributes.copy()
+
+
+def MetaItemTypeFromString(xml_string):
+ return atom.CreateClassFromXMLString(MetaItemType, xml_string)
+
+
+class Value(atom.AtomBase):
+ """Metadata about common values for a given attribute
+
+ A value is a child of an attribute which comes from the attributes feed.
+ The value's text is a commonly used value paired with an attribute name
+ and the value's count tells how often this value appears for the given
+ attribute in the search results.
+ """
+
+ _tag = 'value'
+ _namespace = GMETA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['count'] = 'count'
+
+ def __init__(self, count=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ """Constructor for Attribute metadata element
+
+ Args:
+ count: str (optional) The number of times the value in text is given
+ for the parent attribute.
+ text: str (optional) The value which appears in the search results.
+ extension_elements: list (optional) A list of ExtensionElement
+ instances
+ extension_attributes: dict (optional) A dictionary of attribute value
+ string pairs
+ """
+
+ self.count = count
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def ValueFromString(xml_string):
+ return atom.CreateClassFromXMLString(Value, xml_string)
+
+
+class Attribute(atom.Text):
+ """Metadata about an attribute from the attributes feed
+
+ An entry from the attributes feed contains a list of attributes. Each
+ attribute describes the attribute's type and count of the items which
+ use the attribute.
+ """
+
+ _tag = 'attribute'
+ _namespace = GMETA_NAMESPACE
+ _children = atom.Text._children.copy()
+ _attributes = atom.Text._attributes.copy()
+ _children['{%s}value' % GMETA_NAMESPACE] = ('value', [Value])
+ _attributes['count'] = 'count'
+ _attributes['name'] = 'name'
+
+ def __init__(self, name=None, attribute_type=None, count=None, value=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ """Constructor for Attribute metadata element
+
+ Args:
+ name: str (optional) The name of the attribute
+ attribute_type: str (optional) The type for the attribute. Examples:
+ test, float, etc.
+ count: str (optional) The number of times this attribute appears in
+ the query results.
+ value: list (optional) The values which are often used for this
+ attirbute.
+ text: str (optional) The text contents of the XML for this attribute.
+ extension_elements: list (optional) A list of ExtensionElement
+ instances
+ extension_attributes: dict (optional) A dictionary of attribute value
+ string pairs
+ """
+
+ self.name = name
+ self.type = attribute_type
+ self.count = count
+ self.value = value or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def AttributeFromString(xml_string):
+ return atom.CreateClassFromXMLString(Attribute, xml_string)
+
+
+class Attributes(atom.AtomBase):
+ """A collection of Google Base metadata attributes"""
+
+ _tag = 'attributes'
+ _namespace = GMETA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _children['{%s}attribute' % GMETA_NAMESPACE] = ('attribute', [Attribute])
+
+ def __init__(self, attribute=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.attribute = attribute or []
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+ self.text = text
+
+
+class GBaseItem(ItemAttributeContainer, gdata.BatchEntry):
+ """An Google Base flavor of an Atom Entry.
+
+ Google Base items have required attributes, recommended attributes, and user
+ defined attributes. The required attributes are stored in this class as
+ members, and other attributes are stored as extension elements. You can
+ access the recommended and user defined attributes by using
+ AddItemAttribute, SetItemAttribute, FindItemAttribute, and
+ RemoveItemAttribute.
+
+ The Base Item
+ """
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.BatchEntry._children.copy()
+ _attributes = gdata.BatchEntry._attributes.copy()
+ _children['{%s}label' % GBASE_NAMESPACE] = ('label', [Label])
+ _children['{%s}item_type' % GBASE_NAMESPACE] = ('item_type', ItemType)
+
+ def __init__(self, author=None, category=None, content=None,
+ contributor=None, atom_id=None, link=None, published=None, rights=None,
+ source=None, summary=None, title=None, updated=None, control=None,
+ label=None, item_type=None, item_attributes=None,
+ batch_operation=None, batch_id=None, batch_status=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ self.author = author or []
+ self.category = category or []
+ self.content = content
+ self.contributor = contributor or []
+ self.id = atom_id
+ self.link = link or []
+ self.published = published
+ self.rights = rights
+ self.source = source
+ self.summary = summary
+ self.title = title
+ self.updated = updated
+ self.control = control
+ self.label = label or []
+ self.item_type = item_type
+ self.item_attributes = item_attributes or []
+ self.batch_operation = batch_operation
+ self.batch_id = batch_id
+ self.batch_status = batch_status
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def GBaseItemFromString(xml_string):
+ return atom.CreateClassFromXMLString(GBaseItem, xml_string)
+
+
+class GBaseSnippet(GBaseItem):
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = GBaseItem._children.copy()
+ _attributes = GBaseItem._attributes.copy()
+
+
+def GBaseSnippetFromString(xml_string):
+ return atom.CreateClassFromXMLString(GBaseSnippet, xml_string)
+
+
+class GBaseAttributeEntry(gdata.GDataEntry):
+ """An Atom Entry from the attributes feed"""
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}attribute' % GMETA_NAMESPACE] = ('attribute', [Attribute])
+
+ def __init__(self, author=None, category=None, content=None,
+ contributor=None, atom_id=None, link=None, published=None, rights=None,
+ source=None, summary=None, title=None, updated=None, label=None,
+ attribute=None, control=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ self.author = author or []
+ self.category = category or []
+ self.content = content
+ self.contributor = contributor or []
+ self.id = atom_id
+ self.link = link or []
+ self.published = published
+ self.rights = rights
+ self.source = source
+ self.summary = summary
+ self.control = control
+ self.title = title
+ self.updated = updated
+ self.label = label or []
+ self.attribute = attribute or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def GBaseAttributeEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(GBaseAttributeEntry, xml_string)
+
+
+class GBaseItemTypeEntry(gdata.GDataEntry):
+ """An Atom entry from the item types feed
+
+ These entries contain a list of attributes which are stored in one
+ XML node called attributes. This class simplifies the data structure
+ by treating attributes as a list of attribute instances.
+
+ Note that the item_type for an item type entry is in the Google Base meta
+ namespace as opposed to item_types encountered in other feeds.
+ """
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}attributes' % GMETA_NAMESPACE] = ('attributes', Attributes)
+ _children['{%s}attribute' % GMETA_NAMESPACE] = ('attribute', [Attribute])
+ _children['{%s}item_type' % GMETA_NAMESPACE] = ('item_type', MetaItemType)
+
+ def __init__(self, author=None, category=None, content=None,
+ contributor=None, atom_id=None, link=None, published=None, rights=None,
+ source=None, summary=None, title=None, updated=None, label=None,
+ item_type=None, control=None, attribute=None, attributes=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ self.author = author or []
+ self.category = category or []
+ self.content = content
+ self.contributor = contributor or []
+ self.id = atom_id
+ self.link = link or []
+ self.published = published
+ self.rights = rights
+ self.source = source
+ self.summary = summary
+ self.title = title
+ self.updated = updated
+ self.control = control
+ self.label = label or []
+ self.item_type = item_type
+ self.attributes = attributes
+ self.attribute = attribute or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def GBaseItemTypeEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(GBaseItemTypeEntry, xml_string)
+
+
+class GBaseItemFeed(gdata.BatchFeed):
+ """A feed containing Google Base Items"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.BatchFeed._children.copy()
+ _attributes = gdata.BatchFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [GBaseItem])
+
+
+def GBaseItemFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(GBaseItemFeed, xml_string)
+
+
+class GBaseSnippetFeed(gdata.GDataFeed):
+ """A feed containing Google Base Snippets"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [GBaseSnippet])
+
+
+def GBaseSnippetFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(GBaseSnippetFeed, xml_string)
+
+
+class GBaseAttributesFeed(gdata.GDataFeed):
+ """A feed containing Google Base Attributes
+
+ A query sent to the attributes feed will return a feed of
+ attributes which are present in the items that match the
+ query.
+ """
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [GBaseAttributeEntry])
+
+
+def GBaseAttributesFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(GBaseAttributesFeed, xml_string)
+
+
+class GBaseLocalesFeed(gdata.GDataFeed):
+ """The locales feed from Google Base.
+
+ This read-only feed defines the permitted locales for Google Base. The
+ locale value identifies the language, currency, and date formats used in a
+ feed.
+ """
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+
+
+def GBaseLocalesFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(GBaseLocalesFeed, xml_string)
+
+
+class GBaseItemTypesFeed(gdata.GDataFeed):
+ """A feed from the Google Base item types feed"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [GBaseItemTypeEntry])
+
+
+def GBaseItemTypesFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(GBaseItemTypesFeed, xml_string)
diff --git a/python/gdata/base/service.py b/python/gdata/base/service.py
new file mode 100644
index 0000000..f6fbfa5
--- /dev/null
+++ b/python/gdata/base/service.py
@@ -0,0 +1,256 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2006 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""GBaseService extends the GDataService to streamline Google Base operations.
+
+ GBaseService: Provides methods to query feeds and manipulate items. Extends
+ GDataService.
+
+ DictionaryToParamList: Function which converts a dictionary into a list of
+ URL arguments (represented as strings). This is a
+ utility function used in CRUD operations.
+"""
+
+__author__ = 'api.jscudder (Jeffrey Scudder)'
+
+import urllib
+import gdata
+import atom.service
+import gdata.service
+import gdata.base
+import atom
+
+
+# URL to which all batch requests are sent.
+BASE_BATCH_URL = 'http://www.google.com/base/feeds/items/batch'
+
+
+class Error(Exception):
+ pass
+
+
+class RequestError(Error):
+ pass
+
+
+class GBaseService(gdata.service.GDataService):
+ """Client for the Google Base service."""
+
+ def __init__(self, email=None, password=None, source=None,
+ server='base.google.com', api_key=None, additional_headers=None,
+ handler=None, **kwargs):
+ """Creates a client for the Google Base service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'base.google.com'.
+ api_key: string (optional) The Google Base API key to use.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service='gbase', source=source,
+ server=server, additional_headers=additional_headers, handler=handler,
+ **kwargs)
+ self.api_key = api_key
+
+ def _SetAPIKey(self, api_key):
+ if not isinstance(self.additional_headers, dict):
+ self.additional_headers = {}
+ self.additional_headers['X-Google-Key'] = api_key
+
+ def __SetAPIKey(self, api_key):
+ self._SetAPIKey(api_key)
+
+ def _GetAPIKey(self):
+ if 'X-Google-Key' not in self.additional_headers:
+ return None
+ else:
+ return self.additional_headers['X-Google-Key']
+
+ def __GetAPIKey(self):
+ return self._GetAPIKey()
+
+ api_key = property(__GetAPIKey, __SetAPIKey,
+ doc="""Get or set the API key to be included in all requests.""")
+
+ def Query(self, uri, converter=None):
+ """Performs a style query and returns a resulting feed or entry.
+
+ Args:
+ uri: string The full URI which be queried. Examples include
+ '/base/feeds/snippets?bq=digital+camera',
+ 'http://www.google.com/base/feeds/snippets?bq=digital+camera'
+ '/base/feeds/items'
+ I recommend creating a URI using a query class.
+ converter: func (optional) A function which will be executed on the
+ server's response. Examples include GBaseItemFromString, etc.
+
+ Returns:
+ If converter was specified, returns the results of calling converter on
+ the server's response. If converter was not specified, and the result
+ was an Atom Entry, returns a GBaseItem, by default, the method returns
+ the result of calling gdata.service's Get method.
+ """
+
+ result = self.Get(uri, converter=converter)
+ if converter:
+ return result
+ elif isinstance(result, atom.Entry):
+ return gdata.base.GBaseItemFromString(result.ToString())
+ return result
+
+ def QuerySnippetsFeed(self, uri):
+ return self.Get(uri, converter=gdata.base.GBaseSnippetFeedFromString)
+
+ def QueryItemsFeed(self, uri):
+ return self.Get(uri, converter=gdata.base.GBaseItemFeedFromString)
+
+ def QueryAttributesFeed(self, uri):
+ return self.Get(uri, converter=gdata.base.GBaseAttributesFeedFromString)
+
+ def QueryItemTypesFeed(self, uri):
+ return self.Get(uri, converter=gdata.base.GBaseItemTypesFeedFromString)
+
+ def QueryLocalesFeed(self, uri):
+ return self.Get(uri, converter=gdata.base.GBaseLocalesFeedFromString)
+
+ def GetItem(self, uri):
+ return self.Get(uri, converter=gdata.base.GBaseItemFromString)
+
+ def GetSnippet(self, uri):
+ return self.Get(uri, converter=gdata.base.GBaseSnippetFromString)
+
+ def GetAttribute(self, uri):
+ return self.Get(uri, converter=gdata.base.GBaseAttributeEntryFromString)
+
+ def GetItemType(self, uri):
+ return self.Get(uri, converter=gdata.base.GBaseItemTypeEntryFromString)
+
+ def GetLocale(self, uri):
+ return self.Get(uri, converter=gdata.base.GDataEntryFromString)
+
+ def InsertItem(self, new_item, url_params=None, escape_params=True,
+ converter=None):
+ """Adds an item to Google Base.
+
+ Args:
+ new_item: atom.Entry or subclass A new item which is to be added to
+ Google Base.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ GBaseItemFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a GBaseItem.
+ """
+
+ response = self.Post(new_item, '/base/feeds/items', url_params=url_params,
+ escape_params=escape_params, converter=converter)
+
+ if not converter and isinstance(response, atom.Entry):
+ return gdata.base.GBaseItemFromString(response.ToString())
+ return response
+
+ def DeleteItem(self, item_id, url_params=None, escape_params=True):
+ """Removes an item with the specified ID from Google Base.
+
+ Args:
+ item_id: string The ID of the item to be deleted. Example:
+ 'http://www.google.com/base/feeds/items/13185446517496042648'
+ url_params: dict (optional) Additional URL parameters to be included
+ in the deletion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ True if the delete succeeded.
+ """
+
+ return self.Delete('%s' % (item_id[len('http://www.google.com'):],),
+ url_params=url_params, escape_params=escape_params)
+
+ def UpdateItem(self, item_id, updated_item, url_params=None,
+ escape_params=True,
+ converter=gdata.base.GBaseItemFromString):
+ """Updates an existing item.
+
+ Args:
+ item_id: string The ID of the item to be updated. Example:
+ 'http://www.google.com/base/feeds/items/13185446517496042648'
+ updated_item: atom.Entry, subclass, or string, containing
+ the Atom Entry which will replace the base item which is
+ stored at the item_id.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the update request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ GBaseItemFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a GBaseItem.
+ """
+
+ response = self.Put(updated_item,
+ item_id, url_params=url_params, escape_params=escape_params,
+ converter=converter)
+ if not converter and isinstance(response, atom.Entry):
+ return gdata.base.GBaseItemFromString(response.ToString())
+ return response
+
+ def ExecuteBatch(self, batch_feed,
+ converter=gdata.base.GBaseItemFeedFromString):
+ """Sends a batch request feed to the server.
+
+ Args:
+ batch_feed: gdata.BatchFeed A feed containing BatchEntry elements which
+ contain the desired CRUD operation and any necessary entry data.
+ converter: Function (optional) Function to be executed on the server's
+ response. This function should take one string as a parameter. The
+ default value is GBaseItemFeedFromString which will turn the result
+ into a gdata.base.GBaseItem object.
+
+ Returns:
+ A gdata.BatchFeed containing the results.
+ """
+
+ return self.Post(batch_feed, BASE_BATCH_URL, converter=converter)
+
+
+class BaseQuery(gdata.service.Query):
+
+ def _GetBaseQuery(self):
+ return self['bq']
+
+ def _SetBaseQuery(self, base_query):
+ self['bq'] = base_query
+
+ bq = property(_GetBaseQuery, _SetBaseQuery,
+ doc="""The bq query parameter""")
diff --git a/python/gdata/blogger/__init__.py b/python/gdata/blogger/__init__.py
new file mode 100644
index 0000000..156f25c
--- /dev/null
+++ b/python/gdata/blogger/__init__.py
@@ -0,0 +1,202 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2007, 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains extensions to Atom objects used with Blogger."""
+
+
+__author__ = 'api.jscudder (Jeffrey Scudder)'
+
+
+import atom
+import gdata
+import re
+
+
+LABEL_SCHEME = 'http://www.blogger.com/atom/ns#'
+THR_NAMESPACE = 'http://purl.org/syndication/thread/1.0'
+
+
+class BloggerEntry(gdata.GDataEntry):
+ """Adds convenience methods inherited by all Blogger entries."""
+
+ blog_name_pattern = re.compile('(http://)(\w*)')
+ blog_id_pattern = re.compile('(tag:blogger.com,1999:blog-)(\w*)')
+ blog_id2_pattern = re.compile('tag:blogger.com,1999:user-(\d+)\.blog-(\d+)')
+
+ def GetBlogId(self):
+ """Extracts the Blogger id of this blog.
+ This method is useful when contructing URLs by hand. The blog id is
+ often used in blogger operation URLs. This should not be confused with
+ the id member of a BloggerBlog. The id element is the Atom id XML element.
+ The blog id which this method returns is a part of the Atom id.
+
+ Returns:
+ The blog's unique id as a string.
+ """
+ if self.id.text:
+ match = self.blog_id_pattern.match(self.id.text)
+ if match:
+ return match.group(2)
+ else:
+ return self.blog_id2_pattern.match(self.id.text).group(2)
+ return None
+
+ def GetBlogName(self):
+ """Finds the name of this blog as used in the 'alternate' URL.
+ An alternate URL is in the form 'http://blogName.blogspot.com/'. For an
+ entry representing the above example, this method would return 'blogName'.
+
+ Returns:
+ The blog's URL name component as a string.
+ """
+ for link in self.link:
+ if link.rel == 'alternate':
+ return self.blog_name_pattern.match(link.href).group(2)
+ return None
+
+
+class BlogEntry(BloggerEntry):
+ """Describes a blog entry in the feed listing a user's blogs."""
+
+
+def BlogEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(BlogEntry, xml_string)
+
+
+class BlogFeed(gdata.GDataFeed):
+ """Describes a feed of a user's blogs."""
+
+ _children = gdata.GDataFeed._children.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [BlogEntry])
+
+
+def BlogFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(BlogFeed, xml_string)
+
+
+class BlogPostEntry(BloggerEntry):
+ """Describes a blog post entry in the feed of a blog's posts."""
+
+ post_id_pattern = re.compile('(tag:blogger.com,1999:blog-)(\w*)(.post-)(\w*)')
+
+ def AddLabel(self, label):
+ """Adds a label to the blog post.
+
+ The label is represented by an Atom category element, so this method
+ is shorthand for appending a new atom.Category object.
+
+ Args:
+ label: str
+ """
+ self.category.append(atom.Category(scheme=LABEL_SCHEME, term=label))
+
+ def GetPostId(self):
+ """Extracts the postID string from the entry's Atom id.
+
+ Returns: A string of digits which identify this post within the blog.
+ """
+ if self.id.text:
+ return self.post_id_pattern.match(self.id.text).group(4)
+ return None
+
+
+def BlogPostEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(BlogPostEntry, xml_string)
+
+
+class BlogPostFeed(gdata.GDataFeed):
+ """Describes a feed of a blog's posts."""
+
+ _children = gdata.GDataFeed._children.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [BlogPostEntry])
+
+
+def BlogPostFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(BlogPostFeed, xml_string)
+
+
+class InReplyTo(atom.AtomBase):
+ _tag = 'in-reply-to'
+ _namespace = THR_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['href'] = 'href'
+ _attributes['ref'] = 'ref'
+ _attributes['source'] = 'source'
+ _attributes['type'] = 'type'
+
+ def __init__(self, href=None, ref=None, source=None, type=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ self.href = href
+ self.ref = ref
+ self.source = source
+ self.type = type
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+ self.text = text
+
+
+def InReplyToFromString(xml_string):
+ return atom.CreateClassFromXMLString(InReplyTo, xml_string)
+
+
+class CommentEntry(BloggerEntry):
+ """Describes a blog post comment entry in the feed of a blog post's
+ comments."""
+
+ _children = BloggerEntry._children.copy()
+ _children['{%s}in-reply-to' % THR_NAMESPACE] = ('in_reply_to', InReplyTo)
+
+ comment_id_pattern = re.compile('.*-(\w*)$')
+
+ def __init__(self, author=None, category=None, content=None,
+ contributor=None, atom_id=None, link=None, published=None, rights=None,
+ source=None, summary=None, control=None, title=None, updated=None,
+ in_reply_to=None, extension_elements=None, extension_attributes=None,
+ text=None):
+ BloggerEntry.__init__(self, author=author, category=category,
+ content=content, contributor=contributor, atom_id=atom_id, link=link,
+ published=published, rights=rights, source=source, summary=summary,
+ control=control, title=title, updated=updated,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+ self.in_reply_to = in_reply_to
+
+ def GetCommentId(self):
+ """Extracts the commentID string from the entry's Atom id.
+
+ Returns: A string of digits which identify this post within the blog.
+ """
+ if self.id.text:
+ return self.comment_id_pattern.match(self.id.text).group(1)
+ return None
+
+
+def CommentEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(CommentEntry, xml_string)
+
+
+class CommentFeed(gdata.GDataFeed):
+ """Describes a feed of a blog post's comments."""
+
+ _children = gdata.GDataFeed._children.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [CommentEntry])
+
+
+def CommentFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(CommentFeed, xml_string)
+
+
diff --git a/python/gdata/blogger/client.py b/python/gdata/blogger/client.py
new file mode 100644
index 0000000..a0bad63
--- /dev/null
+++ b/python/gdata/blogger/client.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains a client to communicate with the Blogger servers.
+
+For documentation on the Blogger API, see:
+http://code.google.com/apis/blogger/
+"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import gdata.client
+import gdata.gauth
+import gdata.blogger.data
+import atom.data
+import atom.http_core
+
+
+# List user's blogs, takes a user ID, or 'default'.
+BLOGS_URL = 'http://www.blogger.com/feeds/%s/blogs'
+# Takes a blog ID.
+BLOG_POST_URL = 'http://www.blogger.com/feeds/%s/posts/default'
+# Takes a blog ID.
+BLOG_PAGE_URL = 'http://www.blogger.com/feeds/%s/pages/default'
+# Takes a blog ID and post ID.
+BLOG_POST_COMMENTS_URL = 'http://www.blogger.com/feeds/%s/%s/comments/default'
+# Takes a blog ID.
+BLOG_COMMENTS_URL = 'http://www.blogger.com/feeds/%s/comments/default'
+# Takes a blog ID.
+BLOG_ARCHIVE_URL = 'http://www.blogger.com/feeds/%s/archive/full'
+
+
+class BloggerClient(gdata.client.GDClient):
+ api_version = '2'
+ auth_service = 'blogger'
+ auth_scopes = gdata.gauth.AUTH_SCOPES['blogger']
+
+ def get_blogs(self, user_id='default', auth_token=None,
+ desired_class=gdata.blogger.data.BlogFeed, **kwargs):
+ return self.get_feed(BLOGS_URL % user_id, auth_token=auth_token,
+ desired_class=desired_class, **kwargs)
+
+ GetBlogs = get_blogs
+
+ def get_posts(self, blog_id, auth_token=None,
+ desired_class=gdata.blogger.data.BlogPostFeed, query=None,
+ **kwargs):
+ return self.get_feed(BLOG_POST_URL % blog_id, auth_token=auth_token,
+ desired_class=desired_class, query=query, **kwargs)
+
+ GetPosts = get_posts
+
+ def get_pages(self, blog_id, auth_token=None,
+ desired_class=gdata.blogger.data.BlogPageFeed, query=None,
+ **kwargs):
+ return self.get_feed(BLOG_PAGE_URL % blog_id, auth_token=auth_token,
+ desired_class=desired_class, query=query, **kwargs)
+
+ GetPages = get_pages
+
+ def get_post_comments(self, blog_id, post_id, auth_token=None,
+ desired_class=gdata.blogger.data.CommentFeed,
+ query=None, **kwargs):
+ return self.get_feed(BLOG_POST_COMMENTS_URL % (blog_id, post_id),
+ auth_token=auth_token, desired_class=desired_class,
+ query=query, **kwargs)
+
+ GetPostComments = get_post_comments
+
+ def get_blog_comments(self, blog_id, auth_token=None,
+ desired_class=gdata.blogger.data.CommentFeed,
+ query=None, **kwargs):
+ return self.get_feed(BLOG_COMMENTS_URL % blog_id, auth_token=auth_token,
+ desired_class=desired_class, query=query, **kwargs)
+
+ GetBlogComments = get_blog_comments
+
+ def get_blog_archive(self, blog_id, auth_token=None, **kwargs):
+ return self.get_feed(BLOG_ARCHIVE_URL % blog_id, auth_token=auth_token,
+ **kwargs)
+
+ GetBlogArchive = get_blog_archive
+
+ def add_post(self, blog_id, title, body, labels=None, draft=False,
+ auth_token=None, title_type='text', body_type='html', **kwargs):
+ # Construct an atom Entry for the blog post to be sent to the server.
+ new_entry = gdata.blogger.data.BlogPost(
+ title=atom.data.Title(text=title, type=title_type),
+ content=atom.data.Content(text=body, type=body_type))
+ if labels:
+ for label in labels:
+ new_entry.add_label(label)
+ if draft:
+ new_entry.control = atom.data.Control(draft=atom.data.Draft(text='yes'))
+ return self.post(new_entry, BLOG_POST_URL % blog_id, auth_token=auth_token, **kwargs)
+
+ AddPost = add_post
+
+ def add_page(self, blog_id, title, body, draft=False, auth_token=None,
+ title_type='text', body_type='html', **kwargs):
+ new_entry = gdata.blogger.data.BlogPage(
+ title=atom.data.Title(text=title, type=title_type),
+ content=atom.data.Content(text=body, type=body_type))
+ if draft:
+ new_entry.control = atom.data.Control(draft=atom.data.Draft(text='yes'))
+ return self.post(new_entry, BLOG_PAGE_URL % blog_id, auth_token=auth_token, **kwargs)
+
+ AddPage = add_page
+
+ def add_comment(self, blog_id, post_id, body, auth_token=None,
+ title_type='text', body_type='html', **kwargs):
+ new_entry = gdata.blogger.data.Comment(
+ content=atom.data.Content(text=body, type=body_type))
+ return self.post(new_entry, BLOG_POST_COMMENTS_URL % (blog_id, post_id),
+ auth_token=auth_token, **kwargs)
+
+ AddComment = add_comment
+
+ def update(self, entry, auth_token=None, **kwargs):
+ # The Blogger API does not currently support ETags, so for now remove
+ # the ETag before performing an update.
+ old_etag = entry.etag
+ entry.etag = None
+ response = gdata.client.GDClient.update(self, entry,
+ auth_token=auth_token, **kwargs)
+ entry.etag = old_etag
+ return response
+
+ Update = update
+
+ def delete(self, entry_or_uri, auth_token=None, **kwargs):
+ if isinstance(entry_or_uri, (str, unicode, atom.http_core.Uri)):
+ return gdata.client.GDClient.delete(self, entry_or_uri,
+ auth_token=auth_token, **kwargs)
+ # The Blogger API does not currently support ETags, so for now remove
+ # the ETag before performing a delete.
+ old_etag = entry_or_uri.etag
+ entry_or_uri.etag = None
+ response = gdata.client.GDClient.delete(self, entry_or_uri,
+ auth_token=auth_token, **kwargs)
+ # TODO: if GDClient.delete raises and exception, the entry's etag may be
+ # left as None. Should revisit this logic.
+ entry_or_uri.etag = old_etag
+ return response
+
+ Delete = delete
+
+
+class Query(gdata.client.Query):
+
+ def __init__(self, order_by=None, **kwargs):
+ gdata.client.Query.__init__(self, **kwargs)
+ self.order_by = order_by
+
+ def modify_request(self, http_request):
+ gdata.client._add_query_param('orderby', self.order_by, http_request)
+ gdata.client.Query.modify_request(self, http_request)
+
+ ModifyRequest = modify_request
diff --git a/python/gdata/blogger/data.py b/python/gdata/blogger/data.py
new file mode 100644
index 0000000..3cdaa73
--- /dev/null
+++ b/python/gdata/blogger/data.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Data model classes for parsing and generating XML for the Blogger API."""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import re
+import urlparse
+import atom.core
+import gdata.data
+
+
+LABEL_SCHEME = 'http://www.blogger.com/atom/ns#'
+THR_TEMPLATE = '{http://purl.org/syndication/thread/1.0}%s'
+
+BLOG_NAME_PATTERN = re.compile('(http://)(\w*)')
+BLOG_ID_PATTERN = re.compile('(tag:blogger.com,1999:blog-)(\w*)')
+BLOG_ID2_PATTERN = re.compile('tag:blogger.com,1999:user-(\d+)\.blog-(\d+)')
+POST_ID_PATTERN = re.compile(
+ '(tag:blogger.com,1999:blog-)(\w*)(.post-)(\w*)')
+PAGE_ID_PATTERN = re.compile(
+ '(tag:blogger.com,1999:blog-)(\w*)(.page-)(\w*)')
+COMMENT_ID_PATTERN = re.compile('.*-(\w*)$')
+
+
+class BloggerEntry(gdata.data.GDEntry):
+ """Adds convenience methods inherited by all Blogger entries."""
+
+ def get_blog_id(self):
+ """Extracts the Blogger id of this blog.
+
+ This method is useful when contructing URLs by hand. The blog id is
+ often used in blogger operation URLs. This should not be confused with
+ the id member of a BloggerBlog. The id element is the Atom id XML element.
+ The blog id which this method returns is a part of the Atom id.
+
+ Returns:
+ The blog's unique id as a string.
+ """
+ if self.id.text:
+ match = BLOG_ID_PATTERN.match(self.id.text)
+ if match:
+ return match.group(2)
+ else:
+ return BLOG_ID2_PATTERN.match(self.id.text).group(2)
+ return None
+
+ GetBlogId = get_blog_id
+
+ def get_blog_name(self):
+ """Finds the name of this blog as used in the 'alternate' URL.
+
+ An alternate URL is in the form 'http://blogName.blogspot.com/'. For an
+ entry representing the above example, this method would return 'blogName'.
+
+ Returns:
+ The blog's URL name component as a string.
+ """
+ for link in self.link:
+ if link.rel == 'alternate':
+ return urlparse.urlparse(link.href)[1].split(".", 1)[0]
+ return None
+
+ GetBlogName = get_blog_name
+
+
+class Blog(BloggerEntry):
+ """Represents a blog which belongs to the user."""
+
+
+class BlogFeed(gdata.data.GDFeed):
+ entry = [Blog]
+
+
+class BlogPost(BloggerEntry):
+ """Represents a single post on a blog."""
+
+ def add_label(self, label):
+ """Adds a label to the blog post.
+
+ The label is represented by an Atom category element, so this method
+ is shorthand for appending a new atom.Category object.
+
+ Args:
+ label: str
+ """
+ self.category.append(atom.data.Category(scheme=LABEL_SCHEME, term=label))
+
+ AddLabel = add_label
+
+ def get_post_id(self):
+ """Extracts the postID string from the entry's Atom id.
+
+ Returns: A string of digits which identify this post within the blog.
+ """
+ if self.id.text:
+ return POST_ID_PATTERN.match(self.id.text).group(4)
+ return None
+
+ GetPostId = get_post_id
+
+
+class BlogPostFeed(gdata.data.GDFeed):
+ entry = [BlogPost]
+
+
+class BlogPage(BloggerEntry):
+ """Represents a single page on a blog."""
+
+ def get_page_id(self):
+ """Extracts the pageID string from entry's Atom id.
+
+ Returns: A string of digits which identify this post within the blog.
+ """
+ if self.id.text:
+ return PAGE_ID_PATTERN.match(self.id.text).group(4)
+ return None
+
+ GetPageId = get_page_id
+
+
+class BlogPageFeed(gdata.data.GDFeed):
+ entry = [BlogPage]
+
+
+class InReplyTo(atom.core.XmlElement):
+ _qname = THR_TEMPLATE % 'in-reply-to'
+ href = 'href'
+ ref = 'ref'
+ source = 'source'
+ type = 'type'
+
+
+class Comment(BloggerEntry):
+ """Blog post comment entry in a feed listing comments on a post or blog."""
+ in_reply_to = InReplyTo
+
+ def get_comment_id(self):
+ """Extracts the commentID string from the entry's Atom id.
+
+ Returns: A string of digits which identify this post within the blog.
+ """
+ if self.id.text:
+ return COMMENT_ID_PATTERN.match(self.id.text).group(1)
+ return None
+
+ GetCommentId = get_comment_id
+
+
+class CommentFeed(gdata.data.GDFeed):
+ entry = [Comment]
diff --git a/python/gdata/blogger/service.py b/python/gdata/blogger/service.py
new file mode 100644
index 0000000..ad74d63
--- /dev/null
+++ b/python/gdata/blogger/service.py
@@ -0,0 +1,142 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes to interact with the Blogger server."""
+
+__author__ = 'api.jscudder (Jeffrey Scudder)'
+
+import gdata.service
+import gdata.blogger
+
+
+class BloggerService(gdata.service.GDataService):
+
+ def __init__(self, email=None, password=None, source=None,
+ server='www.blogger.com', **kwargs):
+ """Creates a client for the Blogger service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'www.blogger.com'.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service='blogger', source=source,
+ server=server, **kwargs)
+
+ def GetBlogFeed(self, uri=None):
+ """Retrieve a list of the blogs to which the current user may manage."""
+ if not uri:
+ uri = '/feeds/default/blogs'
+ return self.Get(uri, converter=gdata.blogger.BlogFeedFromString)
+
+ def GetBlogCommentFeed(self, blog_id=None, uri=None):
+ """Retrieve a list of the comments for this blog."""
+ if blog_id:
+ uri = '/feeds/%s/comments/default' % blog_id
+ return self.Get(uri, converter=gdata.blogger.CommentFeedFromString)
+
+ def GetBlogPostFeed(self, blog_id=None, uri=None):
+ if blog_id:
+ uri = '/feeds/%s/posts/default' % blog_id
+ return self.Get(uri, converter=gdata.blogger.BlogPostFeedFromString)
+
+ def GetPostCommentFeed(self, blog_id=None, post_id=None, uri=None):
+ """Retrieve a list of the comments for this particular blog post."""
+ if blog_id and post_id:
+ uri = '/feeds/%s/%s/comments/default' % (blog_id, post_id)
+ return self.Get(uri, converter=gdata.blogger.CommentFeedFromString)
+
+ def AddPost(self, entry, blog_id=None, uri=None):
+ if blog_id:
+ uri = '/feeds/%s/posts/default' % blog_id
+ return self.Post(entry, uri,
+ converter=gdata.blogger.BlogPostEntryFromString)
+
+ def UpdatePost(self, entry, uri=None):
+ if not uri:
+ uri = entry.GetEditLink().href
+ return self.Put(entry, uri,
+ converter=gdata.blogger.BlogPostEntryFromString)
+
+ def DeletePost(self, entry=None, uri=None):
+ if not uri:
+ uri = entry.GetEditLink().href
+ return self.Delete(uri)
+
+ def AddComment(self, comment_entry, blog_id=None, post_id=None, uri=None):
+ """Adds a new comment to the specified blog post."""
+ if blog_id and post_id:
+ uri = '/feeds/%s/%s/comments/default' % (blog_id, post_id)
+ return self.Post(comment_entry, uri,
+ converter=gdata.blogger.CommentEntryFromString)
+
+ def DeleteComment(self, entry=None, uri=None):
+ if not uri:
+ uri = entry.GetEditLink().href
+ return self.Delete(uri)
+
+
+class BlogQuery(gdata.service.Query):
+
+ def __init__(self, feed=None, params=None, categories=None, blog_id=None):
+ """Constructs a query object for the list of a user's Blogger blogs.
+
+ Args:
+ feed: str (optional) The beginning of the URL to be queried. If the
+ feed is not set, and there is no blog_id passed in, the default
+ value is used ('/feeds/default/blogs').
+ params: dict (optional)
+ categories: list (optional)
+ blog_id: str (optional)
+ """
+ if not feed and blog_id:
+ feed = '/feeds/default/blogs/%s' % blog_id
+ elif not feed:
+ feed = '/feeds/default/blogs'
+ gdata.service.Query.__init__(self, feed=feed, params=params,
+ categories=categories)
+
+
+class BlogPostQuery(gdata.service.Query):
+
+ def __init__(self, feed=None, params=None, categories=None, blog_id=None,
+ post_id=None):
+ if not feed and blog_id and post_id:
+ feed = '/feeds/%s/posts/default/%s' % (blog_id, post_id)
+ elif not feed and blog_id:
+ feed = '/feeds/%s/posts/default' % blog_id
+ gdata.service.Query.__init__(self, feed=feed, params=params,
+ categories=categories)
+
+
+class BlogCommentQuery(gdata.service.Query):
+
+ def __init__(self, feed=None, params=None, categories=None, blog_id=None,
+ post_id=None, comment_id=None):
+ if not feed and blog_id and comment_id:
+ feed = '/feeds/%s/comments/default/%s' % (blog_id, comment_id)
+ elif not feed and blog_id and post_id:
+ feed = '/feeds/%s/%s/comments/default' % (blog_id, post_id)
+ elif not feed and blog_id:
+ feed = '/feeds/%s/comments/default' % blog_id
+ gdata.service.Query.__init__(self, feed=feed, params=params,
+ categories=categories)
diff --git a/python/gdata/books/__init__.py b/python/gdata/books/__init__.py
new file mode 100644
index 0000000..1a961ab
--- /dev/null
+++ b/python/gdata/books/__init__.py
@@ -0,0 +1,473 @@
+#!/usr/bin/python
+
+"""
+ Data Models for books.service
+
+ All classes can be instantiated from an xml string using their FromString
+ class method.
+
+ Notes:
+ * Book.title displays the first dc:title because the returned XML
+ repeats that datum as atom:title.
+ There is an undocumented gbs:openAccess element that is not parsed.
+"""
+
+__author__ = "James Sams "
+__copyright__ = "Apache License v2.0"
+
+import atom
+import gdata
+
+
+BOOK_SEARCH_NAMESPACE = 'http://schemas.google.com/books/2008'
+DC_NAMESPACE = 'http://purl.org/dc/terms'
+ANNOTATION_REL = "http://schemas.google.com/books/2008/annotation"
+INFO_REL = "http://schemas.google.com/books/2008/info"
+LABEL_SCHEME = "http://schemas.google.com/books/2008/labels"
+PREVIEW_REL = "http://schemas.google.com/books/2008/preview"
+THUMBNAIL_REL = "http://schemas.google.com/books/2008/thumbnail"
+FULL_VIEW = "http://schemas.google.com/books/2008#view_all_pages"
+PARTIAL_VIEW = "http://schemas.google.com/books/2008#view_partial"
+NO_VIEW = "http://schemas.google.com/books/2008#view_no_pages"
+UNKNOWN_VIEW = "http://schemas.google.com/books/2008#view_unknown"
+EMBEDDABLE = "http://schemas.google.com/books/2008#embeddable"
+NOT_EMBEDDABLE = "http://schemas.google.com/books/2008#not_embeddable"
+
+
+
+class _AtomFromString(atom.AtomBase):
+
+ #@classmethod
+ def FromString(cls, s):
+ return atom.CreateClassFromXMLString(cls, s)
+
+ FromString = classmethod(FromString)
+
+
+class Creator(_AtomFromString):
+ """
+ The element identifies an author-or more generally, an entity
+ responsible for creating the volume in question. Examples of a creator
+ include a person, an organization, or a service. In the case of
+ anthologies, proceedings, or other edited works, this field may be used to
+ indicate editors or other entities responsible for collecting the volume's
+ contents.
+
+ This element appears as a child of . If there are multiple authors or
+ contributors to the book, there may be multiple elements in the
+ volume entry (one for each creator or contributor).
+ """
+
+ _tag = 'creator'
+ _namespace = DC_NAMESPACE
+
+
+class Date(_AtomFromString): #iso 8601 / W3CDTF profile
+ """
+ The element indicates the publication date of the specific volume
+ in question. If the book is a reprint, this is the reprint date, not the
+ original publication date. The date is encoded according to the ISO-8601
+ standard (and more specifically, the W3CDTF profile).
+
+ The element can appear only as a child of .
+
+ Usually only the year or the year and the month are given.
+
+ YYYY-MM-DDThh:mm:ssTZD TZD = -hh:mm or +hh:mm
+ """
+
+ _tag = 'date'
+ _namespace = DC_NAMESPACE
+
+
+class Description(_AtomFromString):
+ """
+ The element includes text that describes a book or book
+ result. In a search result feed, this may be a search result "snippet" that
+ contains the words around the user's search term. For a single volume feed,
+ this element may contain a synopsis of the book.
+
+ The element can appear only as a child of
+ """
+
+ _tag = 'description'
+ _namespace = DC_NAMESPACE
+
+
+class Format(_AtomFromString):
+ """
+ The element describes the physical properties of the volume.
+ Currently, it indicates the number of pages in the book, but more
+ information may be added to this field in the future.
+
+ This element can appear only as a child of .
+ """
+
+ _tag = 'format'
+ _namespace = DC_NAMESPACE
+
+
+class Identifier(_AtomFromString):
+ """
+ The element provides an unambiguous reference to a
+ particular book.
+ * Every contains at least one child.
+ * The first identifier is always the unique string Book Search has assigned
+ to the volume (such as s1gVAAAAYAAJ). This is the ID that appears in the
+ book's URL in the Book Search GUI, as well as in the URL of that book's
+ single item feed.
+ * Many books contain additional elements. These provide
+ alternate, external identifiers to the volume. Such identifiers may
+ include the ISBNs, ISSNs, Library of Congress Control Numbers (LCCNs),
+ and OCLC numbers; they are prepended with a corresponding namespace
+ prefix (such as "ISBN:").
+ * Any can be passed to the Dynamic Links, used to
+ instantiate an Embedded Viewer, or even used to construct static links to
+ Book Search.
+ The element can appear only as a child of .
+ """
+
+ _tag = 'identifier'
+ _namespace = DC_NAMESPACE
+
+
+class Publisher(_AtomFromString):
+ """
+ The element contains the name of the entity responsible for
+ producing and distributing the volume (usually the specific edition of this
+ book). Examples of a publisher include a person, an organization, or a
+ service.
+
+ This element can appear only as a child of . If there is more than
+ one publisher, multiple elements may appear.
+ """
+
+ _tag = 'publisher'
+ _namespace = DC_NAMESPACE
+
+
+class Subject(_AtomFromString):
+ """
+ The element identifies the topic of the book. Usually this is
+ a Library of Congress Subject Heading (LCSH) or Book Industry Standards
+ and Communications Subject Heading (BISAC).
+
+ The element can appear only as a child of . There may
+ be multiple elements per entry.
+ """
+
+ _tag = 'subject'
+ _namespace = DC_NAMESPACE
+
+
+class Title(_AtomFromString):
+ """
+ The element contains the title of a book as it was published. If
+ a book has a subtitle, it appears as a second element in the book
+ result's .
+ """
+
+ _tag = 'title'
+ _namespace = DC_NAMESPACE
+
+
+class Viewability(_AtomFromString):
+ """
+ Google Book Search respects the user's local copyright restrictions. As a
+ result, previews or full views of some books are not available in all
+ locations. The element indicates whether a book is fully
+ viewable, can be previewed, or only has "about the book" information. These
+ three "viewability modes" are the same ones returned by the Dynamic Links
+ API.
+
+ The element can appear only as a child of .
+
+ The value attribute will take the form of the following URIs to represent
+ the relevant viewing capability:
+
+ Full View: http://schemas.google.com/books/2008#view_all_pages
+ Limited Preview: http://schemas.google.com/books/2008#view_partial
+ Snippet View/No Preview: http://schemas.google.com/books/2008#view_no_pages
+ Unknown view: http://schemas.google.com/books/2008#view_unknown
+ """
+
+ _tag = 'viewability'
+ _namespace = BOOK_SEARCH_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ self.value = value
+ _AtomFromString.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+
+
+class Embeddability(_AtomFromString):
+ """
+ Many of the books found on Google Book Search can be embedded on third-party
+ sites using the Embedded Viewer. The element indicates
+ whether a particular book result is available for embedding. By definition,
+ a book that cannot be previewed on Book Search cannot be embedded on third-
+ party sites.
+
+ The element can appear only as a child of .
+
+ The value attribute will take on one of the following URIs:
+ embeddable: http://schemas.google.com/books/2008#embeddable
+ not embeddable: http://schemas.google.com/books/2008#not_embeddable
+ """
+
+ _tag = 'embeddability'
+ _namespace = BOOK_SEARCH_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ self.value = value
+ _AtomFromString.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+
+
+class Review(_AtomFromString):
+ """
+ When present, the element contains a user-generated review for
+ a given book. This element currently appears only in the user library and
+ user annotation feeds, as a child of .
+
+ type: text, html, xhtml
+ xml:lang: id of the language, a guess, (always two letters?)
+ """
+
+ _tag = 'review'
+ _namespace = BOOK_SEARCH_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['type'] = 'type'
+ _attributes['{http://www.w3.org/XML/1998/namespace}lang'] = 'lang'
+
+ def __init__(self, type=None, lang=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ self.type = type
+ self.lang = lang
+ _AtomFromString.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+
+
+class Rating(_AtomFromString):
+ """All attributes must take an integral string between 1 and 5.
+ The min, max, and average attributes represent 'community' ratings. The
+ value attribute is the user's (of the feed from which the item is fetched,
+ not necessarily the authenticated user) rating of the book.
+ """
+
+ _tag = 'rating'
+ _namespace = gdata.GDATA_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['min'] = 'min'
+ _attributes['max'] = 'max'
+ _attributes['average'] = 'average'
+ _attributes['value'] = 'value'
+
+ def __init__(self, min=None, max=None, average=None, value=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ self.min = min
+ self.max = max
+ self.average = average
+ self.value = value
+ _AtomFromString.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+
+
+class Book(_AtomFromString, gdata.GDataEntry):
+ """
+ Represents an from either a search, annotation, library, or single
+ item feed. Note that dc_title attribute is the proper title of the volume,
+ title is an atom element and may not represent the full title.
+ """
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ for i in (Creator, Identifier, Publisher, Subject,):
+ _children['{%s}%s' % (i._namespace, i._tag)] = (i._tag, [i])
+ for i in (Date, Description, Format, Viewability, Embeddability,
+ Review, Rating): # Review, Rating maybe only in anno/lib entrys
+ _children['{%s}%s' % (i._namespace, i._tag)] = (i._tag, i)
+ # there is an atom title as well, should we clobber that?
+ del(i)
+ _children['{%s}%s' % (Title._namespace, Title._tag)] = ('dc_title', [Title])
+
+ def to_dict(self):
+ """Returns a dictionary of the book's available metadata. If the data
+ cannot be discovered, it is not included as a key in the returned dict.
+ The possible keys are: authors, embeddability, date, description,
+ format, identifiers, publishers, rating, review, subjects, title, and
+ viewability.
+
+ Notes:
+ * Plural keys will be lists
+ * Singular keys will be strings
+ * Title, despite usually being a list, joins the title and subtitle
+ with a space as a single string.
+ * embeddability and viewability only return the portion of the URI
+ after #
+ * identifiers is a list of tuples, where the first item of each tuple
+ is the type of identifier and the second item is the identifying
+ string. Note that while doing dict() on this tuple may be possible,
+ some items may have multiple of the same identifier and converting
+ to a dict may resulted in collisions/dropped data.
+ * Rating returns only the user's rating. See Rating class for precise
+ definition.
+ """
+ d = {}
+ if self.GetAnnotationLink():
+ d['annotation'] = self.GetAnnotationLink().href
+ if self.creator:
+ d['authors'] = [x.text for x in self.creator]
+ if self.embeddability:
+ d['embeddability'] = self.embeddability.value.split('#')[-1]
+ if self.date:
+ d['date'] = self.date.text
+ if self.description:
+ d['description'] = self.description.text
+ if self.format:
+ d['format'] = self.format.text
+ if self.identifier:
+ d['identifiers'] = [('google_id', self.identifier[0].text)]
+ for x in self.identifier[1:]:
+ l = x.text.split(':') # should we lower the case of the ids?
+ d['identifiers'].append((l[0], ':'.join(l[1:])))
+ if self.GetInfoLink():
+ d['info'] = self.GetInfoLink().href
+ if self.GetPreviewLink():
+ d['preview'] = self.GetPreviewLink().href
+ if self.publisher:
+ d['publishers'] = [x.text for x in self.publisher]
+ if self.rating:
+ d['rating'] = self.rating.value
+ if self.review:
+ d['review'] = self.review.text
+ if self.subject:
+ d['subjects'] = [x.text for x in self.subject]
+ if self.GetThumbnailLink():
+ d['thumbnail'] = self.GetThumbnailLink().href
+ if self.dc_title:
+ d['title'] = ' '.join([x.text for x in self.dc_title])
+ if self.viewability:
+ d['viewability'] = self.viewability.value.split('#')[-1]
+ return d
+
+ def __init__(self, creator=None, date=None,
+ description=None, format=None, author=None, identifier=None,
+ publisher=None, subject=None, dc_title=None, viewability=None,
+ embeddability=None, review=None, rating=None, category=None,
+ content=None, contributor=None, atom_id=None, link=None,
+ published=None, rights=None, source=None, summary=None,
+ title=None, control=None, updated=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ self.creator = creator
+ self.date = date
+ self.description = description
+ self.format = format
+ self.identifier = identifier
+ self.publisher = publisher
+ self.subject = subject
+ self.dc_title = dc_title or []
+ self.viewability = viewability
+ self.embeddability = embeddability
+ self.review = review
+ self.rating = rating
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content, contributor=contributor, atom_id=atom_id,
+ link=link, published=published, rights=rights, source=source,
+ summary=summary, title=title, control=control, updated=updated,
+ text=text, extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+
+ def GetThumbnailLink(self):
+ """Returns the atom.Link object representing the thumbnail URI."""
+ for i in self.link:
+ if i.rel == THUMBNAIL_REL:
+ return i
+
+ def GetInfoLink(self):
+ """
+ Returns the atom.Link object representing the human-readable info URI.
+ """
+ for i in self.link:
+ if i.rel == INFO_REL:
+ return i
+
+ def GetPreviewLink(self):
+ """Returns the atom.Link object representing the preview URI."""
+ for i in self.link:
+ if i.rel == PREVIEW_REL:
+ return i
+
+ def GetAnnotationLink(self):
+ """
+ Returns the atom.Link object representing the Annotation URI.
+ Note that the use of www.books in the href of this link seems to make
+ this information useless. Using books.service.ANNOTATION_FEED and
+ BOOK_SERVER to construct your URI seems to work better.
+ """
+ for i in self.link:
+ if i.rel == ANNOTATION_REL:
+ return i
+
+ def set_rating(self, value):
+ """Set user's rating. Must be an integral string between 1 nad 5"""
+ assert (value in ('1','2','3','4','5'))
+ if not isinstance(self.rating, Rating):
+ self.rating = Rating()
+ self.rating.value = value
+
+ def set_review(self, text, type='text', lang='en'):
+ """Set user's review text"""
+ self.review = Review(text=text, type=type, lang=lang)
+
+ def get_label(self):
+ """Get users label for the item as a string"""
+ for i in self.category:
+ if i.scheme == LABEL_SCHEME:
+ return i.term
+
+ def set_label(self, term):
+ """Clear pre-existing label for the item and set term as the label."""
+ self.remove_label()
+ self.category.append(atom.Category(term=term, scheme=LABEL_SCHEME))
+
+ def remove_label(self):
+ """Clear the user's label for the item"""
+ ln = len(self.category)
+ for i, j in enumerate(self.category[::-1]):
+ if j.scheme == LABEL_SCHEME:
+ del(self.category[ln-1-i])
+
+ def clean_annotations(self):
+ """Clear all annotations from an item. Useful for taking an item from
+ another user's library/annotation feed and adding it to the
+ authenticated user's library without adopting annotations."""
+ self.remove_label()
+ self.review = None
+ self.rating = None
+
+
+ def get_google_id(self):
+ """Get Google's ID of the item."""
+ return self.id.text.split('/')[-1]
+
+
+class BookFeed(_AtomFromString, gdata.GDataFeed):
+ """Represents a feed of entries from a search."""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _children['{%s}%s' % (Book._namespace, Book._tag)] = (Book._tag, [Book])
+
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testfile('datamodels.txt')
diff --git a/python/gdata/books/data.py b/python/gdata/books/data.py
new file mode 100644
index 0000000..3f7f978
--- /dev/null
+++ b/python/gdata/books/data.py
@@ -0,0 +1,90 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains the data classes of the Google Book Search Data API"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+import atom.data
+import gdata.data
+import gdata.dublincore.data
+import gdata.opensearch.data
+
+
+GBS_TEMPLATE = '{http://schemas.google.com/books/2008/}%s'
+
+
+class CollectionEntry(gdata.data.GDEntry):
+ """Describes an entry in a feed of collections."""
+
+
+class CollectionFeed(gdata.data.BatchFeed):
+ """Describes a Book Search collection feed."""
+ entry = [CollectionEntry]
+
+
+class Embeddability(atom.core.XmlElement):
+ """Describes an embeddability."""
+ _qname = GBS_TEMPLATE % 'embeddability'
+ value = 'value'
+
+
+class OpenAccess(atom.core.XmlElement):
+ """Describes an open access."""
+ _qname = GBS_TEMPLATE % 'openAccess'
+ value = 'value'
+
+
+class Review(atom.core.XmlElement):
+ """User-provided review."""
+ _qname = GBS_TEMPLATE % 'review'
+ lang = 'lang'
+ type = 'type'
+
+
+class Viewability(atom.core.XmlElement):
+ """Describes a viewability."""
+ _qname = GBS_TEMPLATE % 'viewability'
+ value = 'value'
+
+
+class VolumeEntry(gdata.data.GDEntry):
+ """Describes an entry in a feed of Book Search volumes."""
+ comments = gdata.data.Comments
+ language = [gdata.dublincore.data.Language]
+ open_access = OpenAccess
+ format = [gdata.dublincore.data.Format]
+ dc_title = [gdata.dublincore.data.Title]
+ viewability = Viewability
+ embeddability = Embeddability
+ creator = [gdata.dublincore.data.Creator]
+ rating = gdata.data.Rating
+ description = [gdata.dublincore.data.Description]
+ publisher = [gdata.dublincore.data.Publisher]
+ date = [gdata.dublincore.data.Date]
+ subject = [gdata.dublincore.data.Subject]
+ identifier = [gdata.dublincore.data.Identifier]
+ review = Review
+
+
+class VolumeFeed(gdata.data.BatchFeed):
+ """Describes a Book Search volume feed."""
+ entry = [VolumeEntry]
+
+
diff --git a/python/gdata/books/service.py b/python/gdata/books/service.py
new file mode 100644
index 0000000..cbb846f
--- /dev/null
+++ b/python/gdata/books/service.py
@@ -0,0 +1,266 @@
+#!/usr/bin/python
+
+"""
+ Extend gdata.service.GDataService to support authenticated CRUD ops on
+ Books API
+
+ http://code.google.com/apis/books/docs/getting-started.html
+ http://code.google.com/apis/books/docs/gdata/developers_guide_protocol.html
+
+ TODO: (here and __init__)
+ * search based on label, review, or other annotations (possible?)
+ * edit (specifically, Put requests) seem to fail effect a change
+
+ Problems With API:
+ * Adding a book with a review to the library adds a note, not a review.
+ This does not get included in the returned item. You see this by
+ looking at My Library through the website.
+ * Editing a review never edits a review (unless it is freshly added, but
+ see above). More generally,
+ * a Put request with changed annotations (label/rating/review) does NOT
+ change the data. Note: Put requests only work on the href from
+ GetEditLink (as per the spec). Do not try to PUT to the annotate or
+ library feeds, this will cause a 400 Invalid URI Bad Request response.
+ Attempting to Post to one of the feeds with the updated annotations
+ does not update them. See the following for (hopefully) a follow up:
+ google.com/support/forum/p/booksearch-apis/thread?tid=27fd7f68de438fc8
+ * Attempts to workaround the edit problem continue to fail. For example,
+ removing the item, editing the data, readding the item, gives us only
+ our originally added data (annotations). This occurs even if we
+ completely shut python down, refetch the book from the public feed,
+ and re-add it. There is some kind of persistence going on that I
+ cannot change. This is likely due to the annotations being cached in
+ the annotation feed and the inability to edit (see Put, above)
+ * GetAnnotationLink has www.books.... as the server, but hitting www...
+ results in a bad URI error.
+ * Spec indicates there may be multiple labels, but there does not seem
+ to be a way to get the server to accept multiple labels, nor does the
+ web interface have an obvious way to have multiple labels. Multiple
+ labels are never returned.
+"""
+
+__author__ = "James Sams "
+__copyright__ = "Apache License v2.0"
+
+from shlex import split
+
+import gdata.service
+try:
+ import books
+except ImportError:
+ import gdata.books as books
+
+
+BOOK_SERVER = "books.google.com"
+GENERAL_FEED = "/books/feeds/volumes"
+ITEM_FEED = "/books/feeds/volumes/"
+LIBRARY_FEED = "/books/feeds/users/%s/collections/library/volumes"
+ANNOTATION_FEED = "/books/feeds/users/%s/volumes"
+PARTNER_FEED = "/books/feeds/p/%s/volumes"
+BOOK_SERVICE = "print"
+ACCOUNT_TYPE = "HOSTED_OR_GOOGLE"
+
+
+class BookService(gdata.service.GDataService):
+
+ def __init__(self, email=None, password=None, source=None,
+ server=BOOK_SERVER, account_type=ACCOUNT_TYPE,
+ exception_handlers=tuple(), **kwargs):
+ """source should be of form 'ProgramCompany - ProgramName - Version'"""
+
+ gdata.service.GDataService.__init__(self, email=email,
+ password=password, service=BOOK_SERVICE, source=source,
+ server=server, **kwargs)
+ self.exception_handlers = exception_handlers
+
+ def search(self, q, start_index="1", max_results="10",
+ min_viewability="none", feed=GENERAL_FEED,
+ converter=books.BookFeed.FromString):
+ """
+ Query the Public search feed. q is either a search string or a
+ gdata.service.Query instance with a query set.
+
+ min_viewability must be "none", "partial", or "full".
+
+ If you change the feed to a single item feed, note that you will
+ probably need to change the converter to be Book.FromString
+ """
+
+ if not isinstance(q, gdata.service.Query):
+ q = gdata.service.Query(text_query=q)
+ if feed:
+ q.feed = feed
+ q['start-index'] = start_index
+ q['max-results'] = max_results
+ q['min-viewability'] = min_viewability
+ return self.Get(uri=q.ToUri(),converter=converter)
+
+ def search_by_keyword(self, q='', feed=GENERAL_FEED, start_index="1",
+ max_results="10", min_viewability="none", **kwargs):
+ """
+ Query the Public Search Feed by keyword. Non-keyword strings can be
+ set in q. This is quite fragile. Is there a function somewhere in
+ the Google library that will parse a query the same way that Google
+ does?
+
+ Legal Identifiers are listed below and correspond to their meaning
+ at http://books.google.com/advanced_book_search:
+ all_words
+ exact_phrase
+ at_least_one
+ without_words
+ title
+ author
+ publisher
+ subject
+ isbn
+ lccn
+ oclc
+ seemingly unsupported:
+ publication_date: a sequence of two, two tuples:
+ ((min_month,min_year),(max_month,max_year))
+ where month is one/two digit month, year is 4 digit, eg:
+ (('1','2000'),('10','2003')). Lower bound is inclusive,
+ upper bound is exclusive
+ """
+
+ for k, v in kwargs.items():
+ if not v:
+ continue
+ k = k.lower()
+ if k == 'all_words':
+ q = "%s %s" % (q, v)
+ elif k == 'exact_phrase':
+ q = '%s "%s"' % (q, v.strip('"'))
+ elif k == 'at_least_one':
+ q = '%s %s' % (q, ' '.join(['OR "%s"' % x for x in split(v)]))
+ elif k == 'without_words':
+ q = '%s %s' % (q, ' '.join(['-"%s"' % x for x in split(v)]))
+ elif k in ('author','title', 'publisher'):
+ q = '%s %s' % (q, ' '.join(['in%s:"%s"'%(k,x) for x in split(v)]))
+ elif k == 'subject':
+ q = '%s %s' % (q, ' '.join(['%s:"%s"' % (k,x) for x in split(v)]))
+ elif k == 'isbn':
+ q = '%s ISBN%s' % (q, v)
+ elif k == 'issn':
+ q = '%s ISSN%s' % (q,v)
+ elif k == 'oclc':
+ q = '%s OCLC%s' % (q,v)
+ else:
+ raise ValueError("Unsupported search keyword")
+ return self.search(q.strip(),start_index=start_index, feed=feed,
+ max_results=max_results,
+ min_viewability=min_viewability)
+
+ def search_library(self, q, id='me', **kwargs):
+ """Like search, but in a library feed. Default is the authenticated
+ user's feed. Change by setting id."""
+
+ if 'feed' in kwargs:
+ raise ValueError("kwarg 'feed' conflicts with library_id")
+ feed = LIBRARY_FEED % id
+ return self.search(q, feed=feed, **kwargs)
+
+ def search_library_by_keyword(self, id='me', **kwargs):
+ """Hybrid of search_by_keyword and search_library
+ """
+
+ if 'feed' in kwargs:
+ raise ValueError("kwarg 'feed' conflicts with library_id")
+ feed = LIBRARY_FEED % id
+ return self.search_by_keyword(feed=feed,**kwargs)
+
+ def search_annotations(self, q, id='me', **kwargs):
+ """Like search, but in an annotation feed. Default is the authenticated
+ user's feed. Change by setting id."""
+
+ if 'feed' in kwargs:
+ raise ValueError("kwarg 'feed' conflicts with library_id")
+ feed = ANNOTATION_FEED % id
+ return self.search(q, feed=feed, **kwargs)
+
+ def search_annotations_by_keyword(self, id='me', **kwargs):
+ """Hybrid of search_by_keyword and search_annotations
+ """
+
+ if 'feed' in kwargs:
+ raise ValueError("kwarg 'feed' conflicts with library_id")
+ feed = ANNOTATION_FEED % id
+ return self.search_by_keyword(feed=feed,**kwargs)
+
+ def add_item_to_library(self, item):
+ """Add the item, either an XML string or books.Book instance, to the
+ user's library feed"""
+
+ feed = LIBRARY_FEED % 'me'
+ return self.Post(data=item, uri=feed, converter=books.Book.FromString)
+
+ def remove_item_from_library(self, item):
+ """
+ Remove the item, a books.Book instance, from the authenticated user's
+ library feed. Using an item retrieved from a public search will fail.
+ """
+
+ return self.Delete(item.GetEditLink().href)
+
+ def add_annotation(self, item):
+ """
+ Add the item, either an XML string or books.Book instance, to the
+ user's annotation feed.
+ """
+ # do not use GetAnnotationLink, results in 400 Bad URI due to www
+ return self.Post(data=item, uri=ANNOTATION_FEED % 'me',
+ converter=books.Book.FromString)
+
+ def edit_annotation(self, item):
+ """
+ Send an edited item, a books.Book instance, to the user's annotation
+ feed. Note that whereas extra annotations in add_annotations, minus
+ ratings which are immutable once set, are simply added to the item in
+ the annotation feed, if an annotation has been removed from the item,
+ sending an edit request will remove that annotation. This should not
+ happen with add_annotation.
+ """
+
+ return self.Put(data=item, uri=item.GetEditLink().href,
+ converter=books.Book.FromString)
+
+ def get_by_google_id(self, id):
+ return self.Get(ITEM_FEED + id, converter=books.Book.FromString)
+
+ def get_library(self, id='me',feed=LIBRARY_FEED, start_index="1",
+ max_results="100", min_viewability="none",
+ converter=books.BookFeed.FromString):
+ """
+ Return a generator object that will return gbook.Book instances until
+ the search feed no longer returns an item from the GetNextLink method.
+ Thus max_results is not the maximum number of items that will be
+ returned, but rather the number of items per page of searches. This has
+ been set high to reduce the required number of network requests.
+ """
+
+ q = gdata.service.Query()
+ q.feed = feed % id
+ q['start-index'] = start_index
+ q['max-results'] = max_results
+ q['min-viewability'] = min_viewability
+ x = self.Get(uri=q.ToUri(), converter=converter)
+ while 1:
+ for entry in x.entry:
+ yield entry
+ else:
+ l = x.GetNextLink()
+ if l: # hope the server preserves our preferences
+ x = self.Get(uri=l.href, converter=converter)
+ else:
+ break
+
+ def get_annotations(self, id='me', start_index="1", max_results="100",
+ min_viewability="none", converter=books.BookFeed.FromString):
+ """
+ Like get_library, but for the annotation feed
+ """
+
+ return self.get_library(id=id, feed=ANNOTATION_FEED,
+ max_results=max_results, min_viewability = min_viewability,
+ converter=converter)
diff --git a/python/gdata/calendar/__init__.py b/python/gdata/calendar/__init__.py
new file mode 100644
index 0000000..06c0410
--- /dev/null
+++ b/python/gdata/calendar/__init__.py
@@ -0,0 +1,1044 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2006 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains extensions to ElementWrapper objects used with Google Calendar."""
+
+
+__author__ = 'api.vli (Vivian Li), api.rboyd (Ryan Boyd)'
+
+
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ try:
+ import cElementTree as ElementTree
+ except ImportError:
+ try:
+ from xml.etree import ElementTree
+ except ImportError:
+ from elementtree import ElementTree
+import atom
+import gdata
+
+
+# XML namespaces which are often used in Google Calendar entities.
+GCAL_NAMESPACE = 'http://schemas.google.com/gCal/2005'
+GCAL_TEMPLATE = '{http://schemas.google.com/gCal/2005}%s'
+WEB_CONTENT_LINK_REL = '%s/%s' % (GCAL_NAMESPACE, 'webContent')
+GACL_NAMESPACE = gdata.GACL_NAMESPACE
+GACL_TEMPLATE = gdata.GACL_TEMPLATE
+
+
+
+class ValueAttributeContainer(atom.AtomBase):
+ """A parent class for all Calendar classes which have a value attribute.
+
+ Children include Color, AccessLevel, Hidden
+ """
+
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.value = value
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+class Color(ValueAttributeContainer):
+ """The Google Calendar color element"""
+
+ _tag = 'color'
+ _namespace = GCAL_NAMESPACE
+ _children = ValueAttributeContainer._children.copy()
+ _attributes = ValueAttributeContainer._attributes.copy()
+
+
+
+class AccessLevel(ValueAttributeContainer):
+ """The Google Calendar accesslevel element"""
+
+ _tag = 'accesslevel'
+ _namespace = GCAL_NAMESPACE
+ _children = ValueAttributeContainer._children.copy()
+ _attributes = ValueAttributeContainer._attributes.copy()
+
+
+class Hidden(ValueAttributeContainer):
+ """The Google Calendar hidden element"""
+
+ _tag = 'hidden'
+ _namespace = GCAL_NAMESPACE
+ _children = ValueAttributeContainer._children.copy()
+ _attributes = ValueAttributeContainer._attributes.copy()
+
+
+class Selected(ValueAttributeContainer):
+ """The Google Calendar selected element"""
+
+ _tag = 'selected'
+ _namespace = GCAL_NAMESPACE
+ _children = ValueAttributeContainer._children.copy()
+ _attributes = ValueAttributeContainer._attributes.copy()
+
+
+class Timezone(ValueAttributeContainer):
+ """The Google Calendar timezone element"""
+
+ _tag = 'timezone'
+ _namespace = GCAL_NAMESPACE
+ _children = ValueAttributeContainer._children.copy()
+ _attributes = ValueAttributeContainer._attributes.copy()
+
+
+class Where(atom.AtomBase):
+ """The Google Calendar Where element"""
+
+ _tag = 'where'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['valueString'] = 'value_string'
+
+ def __init__(self, value_string=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.value_string = value_string
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class CalendarListEntry(gdata.GDataEntry, gdata.LinkFinder):
+ """A Google Calendar meta Entry flavor of an Atom Entry """
+
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}color' % GCAL_NAMESPACE] = ('color', Color)
+ _children['{%s}accesslevel' % GCAL_NAMESPACE] = ('access_level',
+ AccessLevel)
+ _children['{%s}hidden' % GCAL_NAMESPACE] = ('hidden', Hidden)
+ _children['{%s}selected' % GCAL_NAMESPACE] = ('selected', Selected)
+ _children['{%s}timezone' % GCAL_NAMESPACE] = ('timezone', Timezone)
+ _children['{%s}where' % gdata.GDATA_NAMESPACE] = ('where', Where)
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ color=None, access_level=None, hidden=None, timezone=None,
+ selected=None,
+ where=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id, link=link,
+ published=published, title=title,
+ updated=updated, text=None)
+
+ self.color = color
+ self.access_level = access_level
+ self.hidden = hidden
+ self.selected = selected
+ self.timezone = timezone
+ self.where = where
+
+
+class CalendarListFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """A Google Calendar meta feed flavor of an Atom Feed"""
+
+ _tag = gdata.GDataFeed._tag
+ _namespace = gdata.GDataFeed._namespace
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [CalendarListEntry])
+
+
+class Scope(atom.AtomBase):
+ """The Google ACL scope element"""
+
+ _tag = 'scope'
+ _namespace = GACL_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+ _attributes['type'] = 'type'
+
+ def __init__(self, extension_elements=None, value=None, scope_type=None,
+ extension_attributes=None, text=None):
+ self.value = value
+ self.type = scope_type
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class Role(ValueAttributeContainer):
+ """The Google Calendar timezone element"""
+
+ _tag = 'role'
+ _namespace = GACL_NAMESPACE
+ _children = ValueAttributeContainer._children.copy()
+ _attributes = ValueAttributeContainer._attributes.copy()
+
+
+class CalendarAclEntry(gdata.GDataEntry, gdata.LinkFinder):
+ """A Google Calendar ACL Entry flavor of an Atom Entry """
+
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}scope' % GACL_NAMESPACE] = ('scope', Scope)
+ _children['{%s}role' % GACL_NAMESPACE] = ('role', Role)
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ scope=None, role=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id, link=link,
+ published=published, title=title,
+ updated=updated, text=None)
+ self.scope = scope
+ self.role = role
+
+
+class CalendarAclFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """A Google Calendar ACL feed flavor of an Atom Feed"""
+
+ _tag = gdata.GDataFeed._tag
+ _namespace = gdata.GDataFeed._namespace
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [CalendarAclEntry])
+
+
+class CalendarEventCommentEntry(gdata.GDataEntry, gdata.LinkFinder):
+ """A Google Calendar event comments entry flavor of an Atom Entry"""
+
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+
+
+class CalendarEventCommentFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """A Google Calendar event comments feed flavor of an Atom Feed"""
+
+ _tag = gdata.GDataFeed._tag
+ _namespace = gdata.GDataFeed._namespace
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [CalendarEventCommentEntry])
+
+
+class ExtendedProperty(gdata.ExtendedProperty):
+ """A transparent subclass of gdata.ExtendedProperty added to this module
+ for backwards compatibility."""
+
+
+class Reminder(atom.AtomBase):
+ """The Google Calendar reminder element"""
+
+ _tag = 'reminder'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['absoluteTime'] = 'absolute_time'
+ _attributes['days'] = 'days'
+ _attributes['hours'] = 'hours'
+ _attributes['minutes'] = 'minutes'
+ _attributes['method'] = 'method'
+
+ def __init__(self, absolute_time=None,
+ days=None, hours=None, minutes=None, method=None,
+ extension_elements=None,
+ extension_attributes=None, text=None):
+ self.absolute_time = absolute_time
+ if days is not None:
+ self.days = str(days)
+ else:
+ self.days = None
+ if hours is not None:
+ self.hours = str(hours)
+ else:
+ self.hours = None
+ if minutes is not None:
+ self.minutes = str(minutes)
+ else:
+ self.minutes = None
+ self.method = method
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class When(atom.AtomBase):
+ """The Google Calendar When element"""
+
+ _tag = 'when'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _children['{%s}reminder' % gdata.GDATA_NAMESPACE] = ('reminder', [Reminder])
+ _attributes['startTime'] = 'start_time'
+ _attributes['endTime'] = 'end_time'
+
+ def __init__(self, start_time=None, end_time=None, reminder=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ self.start_time = start_time
+ self.end_time = end_time
+ self.reminder = reminder or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class Recurrence(atom.AtomBase):
+ """The Google Calendar Recurrence element"""
+
+ _tag = 'recurrence'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+
+class UriEnumElement(atom.AtomBase):
+
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, tag, enum_map, attrib_name='value',
+ extension_elements=None, extension_attributes=None, text=None):
+ self.tag=tag
+ self.enum_map=enum_map
+ self.attrib_name=attrib_name
+ self.value=None
+ self.text=text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+ def findKey(self, value):
+ res=[item[0] for item in self.enum_map.items() if item[1] == value]
+ if res is None or len(res) == 0:
+ return None
+ return res[0]
+
+ def _ConvertElementAttributeToMember(self, attribute, value):
+ # Special logic to use the enum_map to set the value of the object's value member.
+ if attribute == self.attrib_name and value != '':
+ self.value = self.enum_map[value]
+ return
+ # Find the attribute in this class's list of attributes.
+ if self.__class__._attributes.has_key(attribute):
+ # Find the member of this class which corresponds to the XML attribute
+ # (lookup in current_class._attributes) and set this member to the
+ # desired value (using self.__dict__).
+ setattr(self, self.__class__._attributes[attribute], value)
+ else:
+ # The current class doesn't map this attribute, so try to parent class.
+ atom.ExtensionContainer._ConvertElementAttributeToMember(self,
+ attribute,
+ value)
+
+ def _AddMembersToElementTree(self, tree):
+ # Convert the members of this class which are XML child nodes.
+ # This uses the class's _children dictionary to find the members which
+ # should become XML child nodes.
+ member_node_names = [values[0] for tag, values in
+ self.__class__._children.iteritems()]
+ for member_name in member_node_names:
+ member = getattr(self, member_name)
+ if member is None:
+ pass
+ elif isinstance(member, list):
+ for instance in member:
+ instance._BecomeChildElement(tree)
+ else:
+ member._BecomeChildElement(tree)
+ # Special logic to set the desired XML attribute.
+ key = self.findKey(self.value)
+ if key is not None:
+ tree.attrib[self.attrib_name]=key
+ # Convert the members of this class which are XML attributes.
+ for xml_attribute, member_name in self.__class__._attributes.iteritems():
+ member = getattr(self, member_name)
+ if member is not None:
+ tree.attrib[xml_attribute] = member
+ # Lastly, call the parent's _AddMembersToElementTree to get any
+ # extension elements.
+ atom.ExtensionContainer._AddMembersToElementTree(self, tree)
+
+
+
+class AttendeeStatus(UriEnumElement):
+ """The Google Calendar attendeeStatus element"""
+
+ _tag = 'attendeeStatus'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = UriEnumElement._children.copy()
+ _attributes = UriEnumElement._attributes.copy()
+
+ attendee_enum = {
+ 'http://schemas.google.com/g/2005#event.accepted' : 'ACCEPTED',
+ 'http://schemas.google.com/g/2005#event.declined' : 'DECLINED',
+ 'http://schemas.google.com/g/2005#event.invited' : 'INVITED',
+ 'http://schemas.google.com/g/2005#event.tentative' : 'TENTATIVE'}
+
+ def __init__(self, extension_elements=None,
+ extension_attributes=None, text=None):
+ UriEnumElement.__init__(self, 'attendeeStatus', AttendeeStatus.attendee_enum,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+class AttendeeType(UriEnumElement):
+ """The Google Calendar attendeeType element"""
+
+ _tag = 'attendeeType'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = UriEnumElement._children.copy()
+ _attributes = UriEnumElement._attributes.copy()
+
+ attendee_type_enum = {
+ 'http://schemas.google.com/g/2005#event.optional' : 'OPTIONAL',
+ 'http://schemas.google.com/g/2005#event.required' : 'REQUIRED' }
+
+ def __init__(self, extension_elements=None,
+ extension_attributes=None, text=None):
+ UriEnumElement.__init__(self, 'attendeeType',
+ AttendeeType.attendee_type_enum,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,text=text)
+
+
+class Visibility(UriEnumElement):
+ """The Google Calendar Visibility element"""
+
+ _tag = 'visibility'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = UriEnumElement._children.copy()
+ _attributes = UriEnumElement._attributes.copy()
+
+ visibility_enum = {
+ 'http://schemas.google.com/g/2005#event.confidential' : 'CONFIDENTIAL',
+ 'http://schemas.google.com/g/2005#event.default' : 'DEFAULT',
+ 'http://schemas.google.com/g/2005#event.private' : 'PRIVATE',
+ 'http://schemas.google.com/g/2005#event.public' : 'PUBLIC' }
+
+ def __init__(self, extension_elements=None,
+ extension_attributes=None, text=None):
+ UriEnumElement.__init__(self, 'visibility', Visibility.visibility_enum,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+class Transparency(UriEnumElement):
+ """The Google Calendar Transparency element"""
+
+ _tag = 'transparency'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = UriEnumElement._children.copy()
+ _attributes = UriEnumElement._attributes.copy()
+
+ transparency_enum = {
+ 'http://schemas.google.com/g/2005#event.opaque' : 'OPAQUE',
+ 'http://schemas.google.com/g/2005#event.transparent' : 'TRANSPARENT' }
+
+ def __init__(self, extension_elements=None,
+ extension_attributes=None, text=None):
+ UriEnumElement.__init__(self, tag='transparency',
+ enum_map=Transparency.transparency_enum,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+class Comments(atom.AtomBase):
+ """The Google Calendar comments element"""
+
+ _tag = 'comments'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
+ gdata.FeedLink)
+ _attributes['rel'] = 'rel'
+
+ def __init__(self, rel=None, feed_link=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.rel = rel
+ self.feed_link = feed_link
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class EventStatus(UriEnumElement):
+ """The Google Calendar eventStatus element"""
+
+ _tag = 'eventStatus'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = UriEnumElement._children.copy()
+ _attributes = UriEnumElement._attributes.copy()
+
+ status_enum = { 'http://schemas.google.com/g/2005#event.canceled' : 'CANCELED',
+ 'http://schemas.google.com/g/2005#event.confirmed' : 'CONFIRMED',
+ 'http://schemas.google.com/g/2005#event.tentative' : 'TENTATIVE'}
+
+ def __init__(self, extension_elements=None,
+ extension_attributes=None, text=None):
+ UriEnumElement.__init__(self, tag='eventStatus',
+ enum_map=EventStatus.status_enum,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+class Who(UriEnumElement):
+ """The Google Calendar Who element"""
+
+ _tag = 'who'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = UriEnumElement._children.copy()
+ _attributes = UriEnumElement._attributes.copy()
+ _children['{%s}attendeeStatus' % gdata.GDATA_NAMESPACE] = (
+ 'attendee_status', AttendeeStatus)
+ _children['{%s}attendeeType' % gdata.GDATA_NAMESPACE] = ('attendee_type',
+ AttendeeType)
+ _attributes['valueString'] = 'name'
+ _attributes['email'] = 'email'
+
+ relEnum = { 'http://schemas.google.com/g/2005#event.attendee' : 'ATTENDEE',
+ 'http://schemas.google.com/g/2005#event.organizer' : 'ORGANIZER',
+ 'http://schemas.google.com/g/2005#event.performer' : 'PERFORMER',
+ 'http://schemas.google.com/g/2005#event.speaker' : 'SPEAKER',
+ 'http://schemas.google.com/g/2005#message.bcc' : 'BCC',
+ 'http://schemas.google.com/g/2005#message.cc' : 'CC',
+ 'http://schemas.google.com/g/2005#message.from' : 'FROM',
+ 'http://schemas.google.com/g/2005#message.reply-to' : 'REPLY_TO',
+ 'http://schemas.google.com/g/2005#message.to' : 'TO' }
+
+ def __init__(self, name=None, email=None, attendee_status=None,
+ attendee_type=None, rel=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ UriEnumElement.__init__(self, 'who', Who.relEnum, attrib_name='rel',
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+ self.name = name
+ self.email = email
+ self.attendee_status = attendee_status
+ self.attendee_type = attendee_type
+ self.rel = rel
+
+
+class OriginalEvent(atom.AtomBase):
+ """The Google Calendar OriginalEvent element"""
+
+ _tag = 'originalEvent'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ # TODO: The when tag used to map to a EntryLink, make sure it should really be a When.
+ _children['{%s}when' % gdata.GDATA_NAMESPACE] = ('when', When)
+ _attributes['id'] = 'id'
+ _attributes['href'] = 'href'
+
+ def __init__(self, id=None, href=None, when=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ self.id = id
+ self.href = href
+ self.when = when
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def GetCalendarEventEntryClass():
+ return CalendarEventEntry
+
+
+# This class is not completely defined here, because of a circular reference
+# in which CalendarEventEntryLink and CalendarEventEntry refer to one another.
+class CalendarEventEntryLink(gdata.EntryLink):
+ """An entryLink which contains a calendar event entry
+
+ Within an event's recurranceExceptions, an entry link
+ points to a calendar event entry. This class exists
+ to capture the calendar specific extensions in the entry.
+ """
+
+ _tag = 'entryLink'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = gdata.EntryLink._children.copy()
+ _attributes = gdata.EntryLink._attributes.copy()
+ # The CalendarEventEntryLink should like CalendarEventEntry as a child but
+ # that class hasn't been defined yet, so we will wait until after defining
+ # CalendarEventEntry to list it in _children.
+
+
+class RecurrenceException(atom.AtomBase):
+ """The Google Calendar RecurrenceException element"""
+
+ _tag = 'recurrenceException'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _children['{%s}entryLink' % gdata.GDATA_NAMESPACE] = ('entry_link',
+ CalendarEventEntryLink)
+ _children['{%s}originalEvent' % gdata.GDATA_NAMESPACE] = ('original_event',
+ OriginalEvent)
+ _attributes['specialized'] = 'specialized'
+
+ def __init__(self, specialized=None, entry_link=None,
+ original_event=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.specialized = specialized
+ self.entry_link = entry_link
+ self.original_event = original_event
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class SendEventNotifications(atom.AtomBase):
+ """The Google Calendar sendEventNotifications element"""
+
+ _tag = 'sendEventNotifications'
+ _namespace = GCAL_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, extension_elements=None,
+ value=None, extension_attributes=None, text=None):
+ self.value = value
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class QuickAdd(atom.AtomBase):
+ """The Google Calendar quickadd element"""
+
+ _tag = 'quickadd'
+ _namespace = GCAL_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, extension_elements=None,
+ value=None, extension_attributes=None, text=None):
+ self.value = value
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+ def _TransferToElementTree(self, element_tree):
+ if self.value:
+ element_tree.attrib['value'] = self.value
+ element_tree.tag = GCAL_TEMPLATE % 'quickadd'
+ atom.AtomBase._TransferToElementTree(self, element_tree)
+ return element_tree
+
+ def _TakeAttributeFromElementTree(self, attribute, element_tree):
+ if attribute == 'value':
+ self.value = element_tree.attrib[attribute]
+ del element_tree.attrib[attribute]
+ else:
+ atom.AtomBase._TakeAttributeFromElementTree(self, attribute,
+ element_tree)
+
+
+class SyncEvent(atom.AtomBase):
+ _tag = 'syncEvent'
+ _namespace = GCAL_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value='false', extension_elements=None,
+ extension_attributes=None, text=None):
+ self.value = value
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class UID(atom.AtomBase):
+ _tag = 'uid'
+ _namespace = GCAL_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.value = value
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class Sequence(atom.AtomBase):
+ _tag = 'sequence'
+ _namespace = GCAL_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.value = value
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class WebContentGadgetPref(atom.AtomBase):
+
+ _tag = 'webContentGadgetPref'
+ _namespace = GCAL_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['name'] = 'name'
+ _attributes['value'] = 'value'
+
+ """The Google Calendar Web Content Gadget Preferences element"""
+
+ def __init__(self, name=None, value=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.name = name
+ self.value = value
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class WebContent(atom.AtomBase):
+
+ _tag = 'webContent'
+ _namespace = GCAL_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _children['{%s}webContentGadgetPref' % GCAL_NAMESPACE] = ('gadget_pref',
+ [WebContentGadgetPref])
+ _attributes['url'] = 'url'
+ _attributes['width'] = 'width'
+ _attributes['height'] = 'height'
+
+ def __init__(self, url=None, width=None, height=None, text=None,
+ gadget_pref=None, extension_elements=None, extension_attributes=None):
+ self.url = url
+ self.width = width
+ self.height = height
+ self.text = text
+ self.gadget_pref = gadget_pref or []
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class WebContentLink(atom.Link):
+
+ _tag = 'link'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = atom.Link._children.copy()
+ _attributes = atom.Link._attributes.copy()
+ _children['{%s}webContent' % GCAL_NAMESPACE] = ('web_content', WebContent)
+
+ def __init__(self, title=None, href=None, link_type=None,
+ web_content=None):
+ atom.Link.__init__(self, rel=WEB_CONTENT_LINK_REL, title=title, href=href,
+ link_type=link_type)
+ self.web_content = web_content
+
+
+class GuestsCanInviteOthers(atom.AtomBase):
+ """Indicates whether event attendees may invite others to the event.
+
+ This element may only be changed by the organizer of the event. If not
+ included as part of the event entry, this element will default to true
+ during a POST request, and will inherit its previous value during a PUT
+ request.
+ """
+ _tag = 'guestsCanInviteOthers'
+ _namespace = GCAL_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value='true', *args, **kwargs):
+ atom.AtomBase.__init__(self, *args, **kwargs)
+ self.value = value
+
+
+class GuestsCanSeeGuests(atom.AtomBase):
+ """Indicates whether attendees can see other people invited to the event.
+
+ The organizer always sees all attendees. Guests always see themselves. This
+ property affects what attendees see in the event's guest list via both the
+ Calendar UI and API feeds.
+
+ This element may only be changed by the organizer of the event.
+
+ If not included as part of the event entry, this element will default to
+ true during a POST request, and will inherit its previous value during a
+ PUT request.
+ """
+ _tag = 'guestsCanSeeGuests'
+ _namespace = GCAL_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value='true', *args, **kwargs):
+ atom.AtomBase.__init__(self, *args, **kwargs)
+ self.value = value
+
+
+class GuestsCanModify(atom.AtomBase):
+ """Indicates whether event attendees may modify the original event.
+
+ If yes, changes are visible to organizer and other attendees. Otherwise,
+ any changes made by attendees will be restricted to that attendee's
+ calendar.
+
+ This element may only be changed by the organizer of the event, and may
+ be set to 'true' only if both gCal:guestsCanInviteOthers and
+ gCal:guestsCanSeeGuests are set to true in the same PUT/POST request.
+ Otherwise, request fails with HTTP error code 400 (Bad Request).
+
+ If not included as part of the event entry, this element will default to
+ false during a POST request, and will inherit its previous value during a
+ PUT request."""
+ _tag = 'guestsCanModify'
+ _namespace = GCAL_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value='false', *args, **kwargs):
+ atom.AtomBase.__init__(self, *args, **kwargs)
+ self.value = value
+
+
+
+class CalendarEventEntry(gdata.BatchEntry):
+ """A Google Calendar flavor of an Atom Entry """
+
+ _tag = gdata.BatchEntry._tag
+ _namespace = gdata.BatchEntry._namespace
+ _children = gdata.BatchEntry._children.copy()
+ _attributes = gdata.BatchEntry._attributes.copy()
+ # This class also contains WebContentLinks but converting those members
+ # is handled in a special version of _ConvertElementTreeToMember.
+ _children['{%s}where' % gdata.GDATA_NAMESPACE] = ('where', [Where])
+ _children['{%s}when' % gdata.GDATA_NAMESPACE] = ('when', [When])
+ _children['{%s}who' % gdata.GDATA_NAMESPACE] = ('who', [Who])
+ _children['{%s}extendedProperty' % gdata.GDATA_NAMESPACE] = (
+ 'extended_property', [ExtendedProperty])
+ _children['{%s}visibility' % gdata.GDATA_NAMESPACE] = ('visibility',
+ Visibility)
+ _children['{%s}transparency' % gdata.GDATA_NAMESPACE] = ('transparency',
+ Transparency)
+ _children['{%s}eventStatus' % gdata.GDATA_NAMESPACE] = ('event_status',
+ EventStatus)
+ _children['{%s}recurrence' % gdata.GDATA_NAMESPACE] = ('recurrence',
+ Recurrence)
+ _children['{%s}recurrenceException' % gdata.GDATA_NAMESPACE] = (
+ 'recurrence_exception', [RecurrenceException])
+ _children['{%s}sendEventNotifications' % GCAL_NAMESPACE] = (
+ 'send_event_notifications', SendEventNotifications)
+ _children['{%s}quickadd' % GCAL_NAMESPACE] = ('quick_add', QuickAdd)
+ _children['{%s}comments' % gdata.GDATA_NAMESPACE] = ('comments', Comments)
+ _children['{%s}originalEvent' % gdata.GDATA_NAMESPACE] = ('original_event',
+ OriginalEvent)
+ _children['{%s}sequence' % GCAL_NAMESPACE] = ('sequence', Sequence)
+ _children['{%s}reminder' % gdata.GDATA_NAMESPACE] = ('reminder', [Reminder])
+ _children['{%s}syncEvent' % GCAL_NAMESPACE] = ('sync_event', SyncEvent)
+ _children['{%s}uid' % GCAL_NAMESPACE] = ('uid', UID)
+ _children['{%s}guestsCanInviteOthers' % GCAL_NAMESPACE] = (
+ 'guests_can_invite_others', GuestsCanInviteOthers)
+ _children['{%s}guestsCanModify' % GCAL_NAMESPACE] = (
+ 'guests_can_modify', GuestsCanModify)
+ _children['{%s}guestsCanSeeGuests' % GCAL_NAMESPACE] = (
+ 'guests_can_see_guests', GuestsCanSeeGuests)
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ transparency=None, comments=None, event_status=None,
+ send_event_notifications=None, visibility=None,
+ recurrence=None, recurrence_exception=None,
+ where=None, when=None, who=None, quick_add=None,
+ extended_property=None, original_event=None,
+ batch_operation=None, batch_id=None, batch_status=None,
+ sequence=None, reminder=None, sync_event=None, uid=None,
+ guests_can_invite_others=None, guests_can_modify=None,
+ guests_can_see_guests=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ gdata.BatchEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ batch_operation=batch_operation, batch_id=batch_id,
+ batch_status=batch_status,
+ title=title, updated=updated)
+
+ self.transparency = transparency
+ self.comments = comments
+ self.event_status = event_status
+ self.send_event_notifications = send_event_notifications
+ self.visibility = visibility
+ self.recurrence = recurrence
+ self.recurrence_exception = recurrence_exception or []
+ self.where = where or []
+ self.when = when or []
+ self.who = who or []
+ self.quick_add = quick_add
+ self.extended_property = extended_property or []
+ self.original_event = original_event
+ self.sequence = sequence
+ self.reminder = reminder or []
+ self.sync_event = sync_event
+ self.uid = uid
+ self.text = text
+ self.guests_can_invite_others = guests_can_invite_others
+ self.guests_can_modify = guests_can_modify
+ self.guests_can_see_guests = guests_can_see_guests
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+ # We needed to add special logic to _ConvertElementTreeToMember because we
+ # want to make links with a rel of WEB_CONTENT_LINK_REL into a
+ # WebContentLink
+ def _ConvertElementTreeToMember(self, child_tree):
+ # Special logic to handle Web Content links
+ if (child_tree.tag == '{%s}link' % atom.ATOM_NAMESPACE and
+ child_tree.attrib['rel'] == WEB_CONTENT_LINK_REL):
+ if self.link is None:
+ self.link = []
+ self.link.append(atom._CreateClassFromElementTree(WebContentLink,
+ child_tree))
+ return
+ # Find the element's tag in this class's list of child members
+ if self.__class__._children.has_key(child_tree.tag):
+ member_name = self.__class__._children[child_tree.tag][0]
+ member_class = self.__class__._children[child_tree.tag][1]
+ # If the class member is supposed to contain a list, make sure the
+ # matching member is set to a list, then append the new member
+ # instance to the list.
+ if isinstance(member_class, list):
+ if getattr(self, member_name) is None:
+ setattr(self, member_name, [])
+ getattr(self, member_name).append(atom._CreateClassFromElementTree(
+ member_class[0], child_tree))
+ else:
+ setattr(self, member_name,
+ atom._CreateClassFromElementTree(member_class, child_tree))
+ else:
+ atom.ExtensionContainer._ConvertElementTreeToMember(self, child_tree)
+
+
+ def GetWebContentLink(self):
+ """Finds the first link with rel set to WEB_CONTENT_REL
+
+ Returns:
+ A gdata.calendar.WebContentLink or none if none of the links had rel
+ equal to WEB_CONTENT_REL
+ """
+
+ for a_link in self.link:
+ if a_link.rel == WEB_CONTENT_LINK_REL:
+ return a_link
+ return None
+
+
+def CalendarEventEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(CalendarEventEntry, xml_string)
+
+
+def CalendarEventCommentEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(CalendarEventCommentEntry, xml_string)
+
+
+CalendarEventEntryLink._children = {'{%s}entry' % atom.ATOM_NAMESPACE:
+ ('entry', CalendarEventEntry)}
+
+
+def CalendarEventEntryLinkFromString(xml_string):
+ return atom.CreateClassFromXMLString(CalendarEventEntryLink, xml_string)
+
+
+class CalendarEventFeed(gdata.BatchFeed, gdata.LinkFinder):
+ """A Google Calendar event feed flavor of an Atom Feed"""
+
+ _tag = gdata.BatchFeed._tag
+ _namespace = gdata.BatchFeed._namespace
+ _children = gdata.BatchFeed._children.copy()
+ _attributes = gdata.BatchFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [CalendarEventEntry])
+ _children['{%s}timezone' % GCAL_NAMESPACE] = ('timezone', Timezone)
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None, entry=None,
+ total_results=None, start_index=None, items_per_page=None,
+ interrupted=None, timezone=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ gdata.BatchFeed.__init__(self, author=author, category=category,
+ contributor=contributor, generator=generator,
+ icon=icon, atom_id=atom_id, link=link,
+ logo=logo, rights=rights, subtitle=subtitle,
+ title=title, updated=updated, entry=entry,
+ total_results=total_results,
+ start_index=start_index,
+ items_per_page=items_per_page,
+ interrupted=interrupted,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+ self.timezone = timezone
+
+
+def CalendarListEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(CalendarListEntry, xml_string)
+
+
+def CalendarAclEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(CalendarAclEntry, xml_string)
+
+
+def CalendarListFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(CalendarListFeed, xml_string)
+
+
+def CalendarAclFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(CalendarAclFeed, xml_string)
+
+
+def CalendarEventFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(CalendarEventFeed, xml_string)
+
+
+def CalendarEventCommentFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(CalendarEventCommentFeed, xml_string)
diff --git a/python/gdata/calendar/data.py b/python/gdata/calendar/data.py
new file mode 100644
index 0000000..c24f04d
--- /dev/null
+++ b/python/gdata/calendar/data.py
@@ -0,0 +1,300 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains the data classes of the Google Calendar Data API"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+import atom.data
+import gdata.acl.data
+import gdata.data
+import gdata.geo.data
+import gdata.opensearch.data
+
+
+GCAL_TEMPLATE = '{http://schemas.google.com/gCal/2005/}%s'
+
+
+class AccessLevelProperty(atom.core.XmlElement):
+ """Describes how much a given user may do with an event or calendar"""
+ _qname = GCAL_TEMPLATE % 'accesslevel'
+ value = 'value'
+
+
+class AllowGSync2Property(atom.core.XmlElement):
+ """Whether the user is permitted to run Google Apps Sync"""
+ _qname = GCAL_TEMPLATE % 'allowGSync2'
+ value = 'value'
+
+
+class AllowGSyncProperty(atom.core.XmlElement):
+ """Whether the user is permitted to run Google Apps Sync"""
+ _qname = GCAL_TEMPLATE % 'allowGSync'
+ value = 'value'
+
+
+class AnyoneCanAddSelfProperty(atom.core.XmlElement):
+ """Whether anyone can add self as attendee"""
+ _qname = GCAL_TEMPLATE % 'anyoneCanAddSelf'
+ value = 'value'
+
+
+class CalendarAclRole(gdata.acl.data.AclRole):
+ """Describes the Calendar roles of an entry in the Calendar access control list"""
+ _qname = gdata.acl.data.GACL_TEMPLATE % 'role'
+
+
+class CalendarCommentEntry(gdata.data.GDEntry):
+ """Describes an entry in a feed of a Calendar event's comments"""
+
+
+class CalendarCommentFeed(gdata.data.GDFeed):
+ """Describes feed of a Calendar event's comments"""
+ entry = [CalendarCommentEntry]
+
+
+class CalendarComments(gdata.data.Comments):
+ """Describes a container of a feed link for Calendar comment entries"""
+ _qname = gdata.data.GD_TEMPLATE % 'comments'
+
+
+class CalendarExtendedProperty(gdata.data.ExtendedProperty):
+ """Defines a value for the realm attribute that is used only in the calendar API"""
+ _qname = gdata.data.GD_TEMPLATE % 'extendedProperty'
+
+
+class CalendarWhere(gdata.data.Where):
+ """Extends the base Where class with Calendar extensions"""
+ _qname = gdata.data.GD_TEMPLATE % 'where'
+
+
+class ColorProperty(atom.core.XmlElement):
+ """Describes the color of a calendar"""
+ _qname = GCAL_TEMPLATE % 'color'
+ value = 'value'
+
+
+class GuestsCanInviteOthersProperty(atom.core.XmlElement):
+ """Whether guests can invite others to the event"""
+ _qname = GCAL_TEMPLATE % 'guestsCanInviteOthers'
+ value = 'value'
+
+
+class GuestsCanModifyProperty(atom.core.XmlElement):
+ """Whether guests can modify event"""
+ _qname = GCAL_TEMPLATE % 'guestsCanModify'
+ value = 'value'
+
+
+class GuestsCanSeeGuestsProperty(atom.core.XmlElement):
+ """Whether guests can see other attendees"""
+ _qname = GCAL_TEMPLATE % 'guestsCanSeeGuests'
+ value = 'value'
+
+
+class HiddenProperty(atom.core.XmlElement):
+ """Describes whether a calendar is hidden"""
+ _qname = GCAL_TEMPLATE % 'hidden'
+ value = 'value'
+
+
+class IcalUIDProperty(atom.core.XmlElement):
+ """Describes the UID in the ical export of the event"""
+ _qname = GCAL_TEMPLATE % 'uid'
+ value = 'value'
+
+
+class OverrideNameProperty(atom.core.XmlElement):
+ """Describes the override name property of a calendar"""
+ _qname = GCAL_TEMPLATE % 'overridename'
+ value = 'value'
+
+
+class PrivateCopyProperty(atom.core.XmlElement):
+ """Indicates whether this is a private copy of the event, changes to which should not be sent to other calendars"""
+ _qname = GCAL_TEMPLATE % 'privateCopy'
+ value = 'value'
+
+
+class QuickAddProperty(atom.core.XmlElement):
+ """Describes whether gd:content is for quick-add processing"""
+ _qname = GCAL_TEMPLATE % 'quickadd'
+ value = 'value'
+
+
+class ResourceProperty(atom.core.XmlElement):
+ """Describes whether gd:who is a resource such as a conference room"""
+ _qname = GCAL_TEMPLATE % 'resource'
+ value = 'value'
+ id = 'id'
+
+
+class EventWho(gdata.data.Who):
+ """Extends the base Who class with Calendar extensions"""
+ _qname = gdata.data.GD_TEMPLATE % 'who'
+ resource = ResourceProperty
+
+
+class SelectedProperty(atom.core.XmlElement):
+ """Describes whether a calendar is selected"""
+ _qname = GCAL_TEMPLATE % 'selected'
+ value = 'value'
+
+
+class SendAclNotificationsProperty(atom.core.XmlElement):
+ """Describes whether to send ACL notifications to grantees"""
+ _qname = GCAL_TEMPLATE % 'sendAclNotifications'
+ value = 'value'
+
+
+class CalendarAclEntry(gdata.data.GDEntry):
+ """Describes an entry in a feed of a Calendar access control list (ACL)"""
+ send_acl_notifications = SendAclNotificationsProperty
+
+
+class CalendarAclFeed(gdata.data.GDFeed):
+ """Describes a Calendar access contorl list (ACL) feed"""
+ entry = [CalendarAclEntry]
+
+
+class SendEventNotificationsProperty(atom.core.XmlElement):
+ """Describes whether to send event notifications to other participants of the event"""
+ _qname = GCAL_TEMPLATE % 'sendEventNotifications'
+ value = 'value'
+
+
+class SequenceNumberProperty(atom.core.XmlElement):
+ """Describes sequence number of an event"""
+ _qname = GCAL_TEMPLATE % 'sequence'
+ value = 'value'
+
+
+class CalendarRecurrenceExceptionEntry(gdata.data.GDEntry):
+ """Describes an entry used by a Calendar recurrence exception entry link"""
+ uid = IcalUIDProperty
+ sequence = SequenceNumberProperty
+
+
+class CalendarRecurrenceException(gdata.data.RecurrenceException):
+ """Describes an exception to a recurring Calendar event"""
+ _qname = gdata.data.GD_TEMPLATE % 'recurrenceException'
+
+
+class SettingsProperty(atom.core.XmlElement):
+ """User preference name-value pair"""
+ _qname = GCAL_TEMPLATE % 'settingsProperty'
+ name = 'name'
+ value = 'value'
+
+
+class SettingsEntry(gdata.data.GDEntry):
+ """Describes a Calendar Settings property entry"""
+ settings_property = SettingsProperty
+
+
+class CalendarSettingsFeed(gdata.data.GDFeed):
+ """Personal settings for Calendar application"""
+ entry = [SettingsEntry]
+
+
+class SuppressReplyNotificationsProperty(atom.core.XmlElement):
+ """Lists notification methods to be suppressed for this reply"""
+ _qname = GCAL_TEMPLATE % 'suppressReplyNotifications'
+ methods = 'methods'
+
+
+class SyncEventProperty(atom.core.XmlElement):
+ """Describes whether this is a sync scenario where the Ical UID and Sequence number are honored during inserts and updates"""
+ _qname = GCAL_TEMPLATE % 'syncEvent'
+ value = 'value'
+
+
+class CalendarEventEntry(gdata.data.BatchEntry):
+ """Describes a Calendar event entry"""
+ quickadd = QuickAddProperty
+ send_event_notifications = SendEventNotificationsProperty
+ sync_event = SyncEventProperty
+ anyone_can_add_self = AnyoneCanAddSelfProperty
+ extended_property = [CalendarExtendedProperty]
+ sequence = SequenceNumberProperty
+ guests_can_invite_others = GuestsCanInviteOthersProperty
+ guests_can_modify = GuestsCanModifyProperty
+ guests_can_see_guests = GuestsCanSeeGuestsProperty
+ georss_where = gdata.geo.data.GeoRssWhere
+ private_copy = PrivateCopyProperty
+ suppress_reply_notifications = SuppressReplyNotificationsProperty
+ uid = IcalUIDProperty
+
+
+class TimeZoneProperty(atom.core.XmlElement):
+ """Describes the time zone of a calendar"""
+ _qname = GCAL_TEMPLATE % 'timezone'
+ value = 'value'
+
+
+class TimesCleanedProperty(atom.core.XmlElement):
+ """Describes how many times calendar was cleaned via Manage Calendars"""
+ _qname = GCAL_TEMPLATE % 'timesCleaned'
+ value = 'value'
+
+
+class CalendarEntry(gdata.data.GDEntry):
+ """Describes a Calendar entry in the feed of a user's calendars"""
+ timezone = TimeZoneProperty
+ overridename = OverrideNameProperty
+ hidden = HiddenProperty
+ selected = SelectedProperty
+ times_cleaned = TimesCleanedProperty
+ color = ColorProperty
+ where = [CalendarWhere]
+ accesslevel = AccessLevelProperty
+
+
+class CalendarEventFeed(gdata.data.BatchFeed):
+ """Describes a Calendar event feed"""
+ allow_g_sync2 = AllowGSync2Property
+ timezone = TimeZoneProperty
+ entry = [CalendarEventEntry]
+ times_cleaned = TimesCleanedProperty
+ allow_g_sync = AllowGSyncProperty
+
+
+class CalendarFeed(gdata.data.GDFeed):
+ """Describes a feed of Calendars"""
+ entry = [CalendarEntry]
+
+
+class WebContentGadgetPref(atom.core.XmlElement):
+ """Describes a single web content gadget preference"""
+ _qname = GCAL_TEMPLATE % 'webContentGadgetPref'
+ name = 'name'
+ value = 'value'
+
+
+class WebContent(atom.core.XmlElement):
+ """Describes a "web content" extension"""
+ _qname = GCAL_TEMPLATE % 'webContent'
+ height = 'height'
+ width = 'width'
+ web_content_gadget_pref = [WebContentGadgetPref]
+ url = 'url'
+ display = 'display'
+
+
diff --git a/python/gdata/calendar/service.py b/python/gdata/calendar/service.py
new file mode 100644
index 0000000..53a94e3
--- /dev/null
+++ b/python/gdata/calendar/service.py
@@ -0,0 +1,595 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2006 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""CalendarService extends the GDataService to streamline Google Calendar operations.
+
+ CalendarService: Provides methods to query feeds and manipulate items. Extends
+ GDataService.
+
+ DictionaryToParamList: Function which converts a dictionary into a list of
+ URL arguments (represented as strings). This is a
+ utility function used in CRUD operations.
+"""
+
+
+__author__ = 'api.vli (Vivian Li)'
+
+
+import urllib
+import gdata
+import atom.service
+import gdata.service
+import gdata.calendar
+import atom
+
+
+DEFAULT_BATCH_URL = ('http://www.google.com/calendar/feeds/default/private'
+ '/full/batch')
+
+
+class Error(Exception):
+ pass
+
+
+class RequestError(Error):
+ pass
+
+
+class CalendarService(gdata.service.GDataService):
+ """Client for the Google Calendar service."""
+
+ def __init__(self, email=None, password=None, source=None,
+ server='www.google.com', additional_headers=None, **kwargs):
+ """Creates a client for the Google Calendar service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'www.google.com'.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service='cl', source=source,
+ server=server, additional_headers=additional_headers, **kwargs)
+
+ def GetCalendarEventFeed(self, uri='/calendar/feeds/default/private/full'):
+ return self.Get(uri, converter=gdata.calendar.CalendarEventFeedFromString)
+
+ def GetCalendarEventEntry(self, uri):
+ return self.Get(uri, converter=gdata.calendar.CalendarEventEntryFromString)
+
+ def GetCalendarListFeed(self, uri='/calendar/feeds/default/allcalendars/full'):
+ return self.Get(uri, converter=gdata.calendar.CalendarListFeedFromString)
+
+ def GetAllCalendarsFeed(self, uri='/calendar/feeds/default/allcalendars/full'):
+ return self.Get(uri, converter=gdata.calendar.CalendarListFeedFromString)
+
+ def GetOwnCalendarsFeed(self, uri='/calendar/feeds/default/owncalendars/full'):
+ return self.Get(uri, converter=gdata.calendar.CalendarListFeedFromString)
+
+ def GetCalendarListEntry(self, uri):
+ return self.Get(uri, converter=gdata.calendar.CalendarListEntryFromString)
+
+ def GetCalendarAclFeed(self, uri='/calendar/feeds/default/acl/full'):
+ return self.Get(uri, converter=gdata.calendar.CalendarAclFeedFromString)
+
+ def GetCalendarAclEntry(self, uri):
+ return self.Get(uri, converter=gdata.calendar.CalendarAclEntryFromString)
+
+ def GetCalendarEventCommentFeed(self, uri):
+ return self.Get(uri, converter=gdata.calendar.CalendarEventCommentFeedFromString)
+
+ def GetCalendarEventCommentEntry(self, uri):
+ return self.Get(uri, converter=gdata.calendar.CalendarEventCommentEntryFromString)
+
+ def Query(self, uri, converter=None):
+ """Performs a query and returns a resulting feed or entry.
+
+ Args:
+ feed: string The feed which is to be queried
+
+ Returns:
+ On success, a GDataFeed or Entry depending on which is sent from the
+ server.
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ if converter:
+ result = self.Get(uri, converter=converter)
+ else:
+ result = self.Get(uri)
+ return result
+
+ def CalendarQuery(self, query):
+ if isinstance(query, CalendarEventQuery):
+ return self.Query(query.ToUri(),
+ converter=gdata.calendar.CalendarEventFeedFromString)
+ elif isinstance(query, CalendarListQuery):
+ return self.Query(query.ToUri(),
+ converter=gdata.calendar.CalendarListFeedFromString)
+ elif isinstance(query, CalendarEventCommentQuery):
+ return self.Query(query.ToUri(),
+ converter=gdata.calendar.CalendarEventCommentFeedFromString)
+ else:
+ return self.Query(query.ToUri())
+
+ def InsertEvent(self, new_event, insert_uri, url_params=None,
+ escape_params=True):
+ """Adds an event to Google Calendar.
+
+ Args:
+ new_event: atom.Entry or subclass A new event which is to be added to
+ Google Calendar.
+ insert_uri: the URL to post new events to the feed
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful insert, an entry containing the event created
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ return self.Post(new_event, insert_uri, url_params=url_params,
+ escape_params=escape_params,
+ converter=gdata.calendar.CalendarEventEntryFromString)
+
+ def InsertCalendarSubscription(self, calendar, url_params=None,
+ escape_params=True):
+ """Subscribes the authenticated user to the provided calendar.
+
+ Args:
+ calendar: The calendar to which the user should be subscribed.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful insert, an entry containing the subscription created
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ insert_uri = '/calendar/feeds/default/allcalendars/full'
+ return self.Post(calendar, insert_uri, url_params=url_params,
+ escape_params=escape_params,
+ converter=gdata.calendar.CalendarListEntryFromString)
+
+ def InsertCalendar(self, new_calendar, url_params=None,
+ escape_params=True):
+ """Creates a new calendar.
+
+ Args:
+ new_calendar: The calendar to be created
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful insert, an entry containing the calendar created
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ insert_uri = '/calendar/feeds/default/owncalendars/full'
+ response = self.Post(new_calendar, insert_uri, url_params=url_params,
+ escape_params=escape_params,
+ converter=gdata.calendar.CalendarListEntryFromString)
+ return response
+
+ def UpdateCalendar(self, calendar, url_params=None,
+ escape_params=True):
+ """Updates a calendar.
+
+ Args:
+ calendar: The calendar which should be updated
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful insert, an entry containing the calendar created
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ update_uri = calendar.GetEditLink().href
+ response = self.Put(data=calendar, uri=update_uri, url_params=url_params,
+ escape_params=escape_params,
+ converter=gdata.calendar.CalendarListEntryFromString)
+ return response
+
+ def InsertAclEntry(self, new_entry, insert_uri, url_params=None,
+ escape_params=True):
+ """Adds an ACL entry (rule) to Google Calendar.
+
+ Args:
+ new_entry: atom.Entry or subclass A new ACL entry which is to be added to
+ Google Calendar.
+ insert_uri: the URL to post new entries to the ACL feed
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful insert, an entry containing the ACL entry created
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ return self.Post(new_entry, insert_uri, url_params=url_params,
+ escape_params=escape_params,
+ converter=gdata.calendar.CalendarAclEntryFromString)
+
+ def InsertEventComment(self, new_entry, insert_uri, url_params=None,
+ escape_params=True):
+ """Adds an entry to Google Calendar.
+
+ Args:
+ new_entry: atom.Entry or subclass A new entry which is to be added to
+ Google Calendar.
+ insert_uri: the URL to post new entrys to the feed
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful insert, an entry containing the comment created
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ return self.Post(new_entry, insert_uri, url_params=url_params,
+ escape_params=escape_params,
+ converter=gdata.calendar.CalendarEventCommentEntryFromString)
+
+ def _RemoveStandardUrlPrefix(self, url):
+ url_prefix = 'http://%s/' % self.server
+ if url.startswith(url_prefix):
+ return url[len(url_prefix) - 1:]
+ return url
+
+ def DeleteEvent(self, edit_uri, extra_headers=None,
+ url_params=None, escape_params=True):
+ """Removes an event with the specified ID from Google Calendar.
+
+ Args:
+ edit_uri: string The edit URL of the entry to be deleted. Example:
+ 'http://www.google.com/calendar/feeds/default/private/full/abx'
+ url_params: dict (optional) Additional URL parameters to be included
+ in the deletion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful delete, a httplib.HTTPResponse containing the server's
+ response to the DELETE request.
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ edit_uri = self._RemoveStandardUrlPrefix(edit_uri)
+ return self.Delete('%s' % edit_uri,
+ url_params=url_params, escape_params=escape_params)
+
+ def DeleteAclEntry(self, edit_uri, extra_headers=None,
+ url_params=None, escape_params=True):
+ """Removes an ACL entry at the given edit_uri from Google Calendar.
+
+ Args:
+ edit_uri: string The edit URL of the entry to be deleted. Example:
+ 'http://www.google.com/calendar/feeds/liz%40gmail.com/acl/full/default'
+ url_params: dict (optional) Additional URL parameters to be included
+ in the deletion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful delete, a httplib.HTTPResponse containing the server's
+ response to the DELETE request.
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ edit_uri = self._RemoveStandardUrlPrefix(edit_uri)
+ return self.Delete('%s' % edit_uri,
+ url_params=url_params, escape_params=escape_params)
+
+ def DeleteCalendarEntry(self, edit_uri, extra_headers=None,
+ url_params=None, escape_params=True):
+ """Removes a calendar entry at the given edit_uri from Google Calendar.
+
+ Args:
+ edit_uri: string The edit URL of the entry to be deleted. Example:
+ 'http://www.google.com/calendar/feeds/default/allcalendars/abcdef@group.calendar.google.com'
+ url_params: dict (optional) Additional URL parameters to be included
+ in the deletion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful delete, True is returned
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ return self.Delete(edit_uri, url_params=url_params,
+ escape_params=escape_params)
+
+ def UpdateEvent(self, edit_uri, updated_event, url_params=None,
+ escape_params=True):
+ """Updates an existing event.
+
+ Args:
+ edit_uri: string The edit link URI for the element being updated
+ updated_event: string, atom.Entry, or subclass containing
+ the Atom Entry which will replace the event which is
+ stored at the edit_url
+ url_params: dict (optional) Additional URL parameters to be included
+ in the update request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful update, a httplib.HTTPResponse containing the server's
+ response to the PUT request.
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ edit_uri = self._RemoveStandardUrlPrefix(edit_uri)
+ return self.Put(updated_event, '%s' % edit_uri,
+ url_params=url_params,
+ escape_params=escape_params,
+ converter=gdata.calendar.CalendarEventEntryFromString)
+
+ def UpdateAclEntry(self, edit_uri, updated_rule, url_params=None,
+ escape_params=True):
+ """Updates an existing ACL rule.
+
+ Args:
+ edit_uri: string The edit link URI for the element being updated
+ updated_rule: string, atom.Entry, or subclass containing
+ the Atom Entry which will replace the event which is
+ stored at the edit_url
+ url_params: dict (optional) Additional URL parameters to be included
+ in the update request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful update, a httplib.HTTPResponse containing the server's
+ response to the PUT request.
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ edit_uri = self._RemoveStandardUrlPrefix(edit_uri)
+ return self.Put(updated_rule, '%s' % edit_uri,
+ url_params=url_params,
+ escape_params=escape_params,
+ converter=gdata.calendar.CalendarAclEntryFromString)
+
+ def ExecuteBatch(self, batch_feed, url,
+ converter=gdata.calendar.CalendarEventFeedFromString):
+ """Sends a batch request feed to the server.
+
+ The batch request needs to be sent to the batch URL for a particular
+ calendar. You can find the URL by calling GetBatchLink().href on the
+ CalendarEventFeed.
+
+ Args:
+ batch_feed: gdata.calendar.CalendarEventFeed A feed containing batch
+ request entries. Each entry contains the operation to be performed
+ on the data contained in the entry. For example an entry with an
+ operation type of insert will be used as if the individual entry
+ had been inserted.
+ url: str The batch URL for the Calendar to which these operations should
+ be applied.
+ converter: Function (optional) The function used to convert the server's
+ response to an object. The default value is
+ CalendarEventFeedFromString.
+
+ Returns:
+ The results of the batch request's execution on the server. If the
+ default converter is used, this is stored in a CalendarEventFeed.
+ """
+ return self.Post(batch_feed, url, converter=converter)
+
+
+class CalendarEventQuery(gdata.service.Query):
+
+ def __init__(self, user='default', visibility='private', projection='full',
+ text_query=None, params=None, categories=None):
+ gdata.service.Query.__init__(self,
+ feed='http://www.google.com/calendar/feeds/%s/%s/%s' % (
+ urllib.quote(user),
+ urllib.quote(visibility),
+ urllib.quote(projection)),
+ text_query=text_query, params=params, categories=categories)
+
+ def _GetStartMin(self):
+ if 'start-min' in self.keys():
+ return self['start-min']
+ else:
+ return None
+
+ def _SetStartMin(self, val):
+ self['start-min'] = val
+
+ start_min = property(_GetStartMin, _SetStartMin,
+ doc="""The start-min query parameter""")
+
+ def _GetStartMax(self):
+ if 'start-max' in self.keys():
+ return self['start-max']
+ else:
+ return None
+
+ def _SetStartMax(self, val):
+ self['start-max'] = val
+
+ start_max = property(_GetStartMax, _SetStartMax,
+ doc="""The start-max query parameter""")
+
+ def _GetOrderBy(self):
+ if 'orderby' in self.keys():
+ return self['orderby']
+ else:
+ return None
+
+ def _SetOrderBy(self, val):
+ if val is not 'lastmodified' and val is not 'starttime':
+ raise Error, "Order By must be either 'lastmodified' or 'starttime'"
+ self['orderby'] = val
+
+ orderby = property(_GetOrderBy, _SetOrderBy,
+ doc="""The orderby query parameter""")
+
+ def _GetSortOrder(self):
+ if 'sortorder' in self.keys():
+ return self['sortorder']
+ else:
+ return None
+
+ def _SetSortOrder(self, val):
+ if (val is not 'ascending' and val is not 'descending'
+ and val is not 'a' and val is not 'd' and val is not 'ascend'
+ and val is not 'descend'):
+ raise Error, "Sort order must be either ascending, ascend, " + (
+ "a or descending, descend, or d")
+ self['sortorder'] = val
+
+ sortorder = property(_GetSortOrder, _SetSortOrder,
+ doc="""The sortorder query parameter""")
+
+ def _GetSingleEvents(self):
+ if 'singleevents' in self.keys():
+ return self['singleevents']
+ else:
+ return None
+
+ def _SetSingleEvents(self, val):
+ self['singleevents'] = val
+
+ singleevents = property(_GetSingleEvents, _SetSingleEvents,
+ doc="""The singleevents query parameter""")
+
+ def _GetFutureEvents(self):
+ if 'futureevents' in self.keys():
+ return self['futureevents']
+ else:
+ return None
+
+ def _SetFutureEvents(self, val):
+ self['futureevents'] = val
+
+ futureevents = property(_GetFutureEvents, _SetFutureEvents,
+ doc="""The futureevents query parameter""")
+
+ def _GetRecurrenceExpansionStart(self):
+ if 'recurrence-expansion-start' in self.keys():
+ return self['recurrence-expansion-start']
+ else:
+ return None
+
+ def _SetRecurrenceExpansionStart(self, val):
+ self['recurrence-expansion-start'] = val
+
+ recurrence_expansion_start = property(_GetRecurrenceExpansionStart,
+ _SetRecurrenceExpansionStart,
+ doc="""The recurrence-expansion-start query parameter""")
+
+ def _GetRecurrenceExpansionEnd(self):
+ if 'recurrence-expansion-end' in self.keys():
+ return self['recurrence-expansion-end']
+ else:
+ return None
+
+ def _SetRecurrenceExpansionEnd(self, val):
+ self['recurrence-expansion-end'] = val
+
+ recurrence_expansion_end = property(_GetRecurrenceExpansionEnd,
+ _SetRecurrenceExpansionEnd,
+ doc="""The recurrence-expansion-end query parameter""")
+
+ def _SetTimezone(self, val):
+ self['ctz'] = val
+
+ def _GetTimezone(self):
+ if 'ctz' in self.keys():
+ return self['ctz']
+ else:
+ return None
+
+ ctz = property(_GetTimezone, _SetTimezone,
+ doc="""The ctz query parameter which sets report time on the server.""")
+
+
+class CalendarListQuery(gdata.service.Query):
+ """Queries the Google Calendar meta feed"""
+
+ def __init__(self, userId=None, text_query=None,
+ params=None, categories=None):
+ if userId is None:
+ userId = 'default'
+
+ gdata.service.Query.__init__(self, feed='http://www.google.com/calendar/feeds/'
+ +userId,
+ text_query=text_query, params=params,
+ categories=categories)
+
+class CalendarEventCommentQuery(gdata.service.Query):
+ """Queries the Google Calendar event comments feed"""
+
+ def __init__(self, feed=None):
+ gdata.service.Query.__init__(self, feed=feed)
diff --git a/python/gdata/calendar_resource/__init__.py b/python/gdata/calendar_resource/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/python/gdata/calendar_resource/__init__.py
@@ -0,0 +1 @@
+
diff --git a/python/gdata/calendar_resource/client.py b/python/gdata/calendar_resource/client.py
new file mode 100644
index 0000000..54d2ea8
--- /dev/null
+++ b/python/gdata/calendar_resource/client.py
@@ -0,0 +1,200 @@
+#!/usr/bin/python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""CalendarResourceClient simplifies Calendar Resources API calls.
+
+CalendarResourceClient extends gdata.client.GDClient to ease interaction with
+the Google Apps Calendar Resources API. These interactions include the ability
+to create, retrieve, update, and delete calendar resources in a Google Apps
+domain.
+"""
+
+
+__author__ = 'Vic Fryzel '
+
+
+import gdata.calendar_resource.data
+import gdata.client
+import urllib
+
+
+# Feed URI template. This must end with a /
+# The strings in this template are eventually replaced with the API version
+# and Google Apps domain name, respectively.
+RESOURCE_FEED_TEMPLATE = '/a/feeds/calendar/resource/%s/%s/'
+
+
+class CalendarResourceClient(gdata.client.GDClient):
+ """Client extension for the Google Calendar Resource API service.
+
+ Attributes:
+ host: string The hostname for the Calendar Resouce API service.
+ api_version: string The version of the Calendar Resource API.
+ """
+
+ host = 'apps-apis.google.com'
+ api_version = '2.0'
+ auth_service = 'apps'
+ auth_scopes = gdata.gauth.AUTH_SCOPES['apps']
+ ssl = True
+
+ def __init__(self, domain, auth_token=None, **kwargs):
+ """Constructs a new client for the Calendar Resource API.
+
+ Args:
+ domain: string The Google Apps domain with Calendar Resources.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the calendar resource
+ data.
+ kwargs: The other parameters to pass to the gdata.client.GDClient
+ constructor.
+ """
+ gdata.client.GDClient.__init__(self, auth_token=auth_token, **kwargs)
+ self.domain = domain
+
+ def make_resource_feed_uri(self, resource_id=None, params=None):
+ """Creates a resource feed URI for the Calendar Resource API.
+
+ Using this client's Google Apps domain, create a feed URI for calendar
+ resources in that domain. If a resource_id is provided, return a URI
+ for that specific resource. If params are provided, append them as GET
+ params.
+
+ Args:
+ resource_id: string (optional) The ID of the calendar resource for which
+ to make a feed URI.
+ params: dict (optional) key -> value params to append as GET vars to the
+ URI. Example: params={'start': 'my-resource-id'}
+ Returns:
+ A string giving the URI for calendar resources for this client's Google
+ Apps domain.
+ """
+ uri = RESOURCE_FEED_TEMPLATE % (self.api_version, self.domain)
+ if resource_id:
+ uri += resource_id
+ if params:
+ uri += '?' + urllib.urlencode(params)
+ return uri
+
+ MakeResourceFeedUri = make_resource_feed_uri
+
+ def get_resource_feed(self, uri=None, **kwargs):
+ """Fetches a ResourceFeed of calendar resources at the given URI.
+
+ Args:
+ uri: string The URI of the feed to pull.
+ kwargs: The other parameters to pass to gdata.client.GDClient.get_feed().
+
+ Returns:
+ A ResourceFeed object representing the feed at the given URI.
+ """
+
+ if uri is None:
+ uri = self.MakeResourceFeedUri()
+ return self.get_feed(
+ uri,
+ desired_class=gdata.calendar_resource.data.CalendarResourceFeed,
+ **kwargs)
+
+ GetResourceFeed = get_resource_feed
+
+ def get_resource(self, uri=None, resource_id=None, **kwargs):
+ """Fetches a single calendar resource by resource ID.
+
+ Args:
+ uri: string The base URI of the feed from which to fetch the resource.
+ resource_id: string The string ID of the Resource to fetch.
+ kwargs: The other parameters to pass to gdata.client.GDClient.get_entry().
+
+ Returns:
+ A Resource object representing the calendar resource with the given
+ base URI and resource ID.
+ """
+
+ if uri is None:
+ uri = self.MakeResourceFeedUri(resource_id)
+ return self.get_entry(
+ uri,
+ desired_class=gdata.calendar_resource.data.CalendarResourceEntry,
+ **kwargs)
+
+ GetResource = get_resource
+
+ def create_resource(self, resource_id, resource_common_name=None,
+ resource_description=None, resource_type=None, **kwargs):
+ """Creates a calendar resource with the given properties.
+
+ Args:
+ resource_id: string The resource ID of the calendar resource.
+ resource_common_name: string (optional) The common name of the resource.
+ resource_description: string (optional) The description of the resource.
+ resource_type: string (optional) The type of the resource.
+ kwargs: The other parameters to pass to gdata.client.GDClient.post().
+
+ Returns:
+ gdata.calendar_resource.data.CalendarResourceEntry of the new resource.
+ """
+ new_resource = gdata.calendar_resource.data.CalendarResourceEntry(
+ resource_id=resource_id,
+ resource_common_name=resource_common_name,
+ resource_description=resource_description,
+ resource_type=resource_type)
+ return self.post(new_resource, self.MakeResourceFeedUri(), **kwargs)
+
+ CreateResource = create_resource
+
+ def update_resource(self, resource_id, resource_common_name=None,
+ resource_description=None, resource_type=None, **kwargs):
+ """Updates the calendar resource with the given resource ID.
+
+ Args:
+ resource_id: string The resource ID of the calendar resource to update.
+ resource_common_name: string (optional) The common name to give the
+ resource.
+ resource_description: string (optional) The description to give the
+ resource.
+ resource_type: string (optional) The type to give the resource.
+ kwargs: The other parameters to pass to gdata.client.GDClient.update().
+
+ Returns:
+ gdata.calendar_resource.data.CalendarResourceEntry of the updated
+ resource.
+ """
+ new_resource = gdata.calendar_resource.data.CalendarResourceEntry(
+ resource_id=resource_id,
+ resource_common_name=resource_common_name,
+ resource_description=resource_description,
+ resource_type=resource_type)
+ return self.update(
+ new_resource,
+ **kwargs)
+
+ UpdateResource = update_resource
+
+ def delete_resource(self, resource_id, **kwargs):
+ """Deletes the calendar resource with the given resource ID.
+
+ Args:
+ resource_id: string The resource ID of the calendar resource to delete.
+ kwargs: The other parameters to pass to gdata.client.GDClient.delete()
+
+ Returns:
+ An HTTP response object. See gdata.client.request().
+ """
+
+ return self.delete(self.MakeResourceFeedUri(resource_id), **kwargs)
+
+ DeleteResource = delete_resource
diff --git a/python/gdata/calendar_resource/data.py b/python/gdata/calendar_resource/data.py
new file mode 100644
index 0000000..527fd48
--- /dev/null
+++ b/python/gdata/calendar_resource/data.py
@@ -0,0 +1,193 @@
+#!/usr/bin/python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Data model for parsing and generating XML for the Calendar Resource API."""
+
+
+__author__ = 'Vic Fryzel '
+
+
+import atom.core
+import atom.data
+import gdata.apps
+import gdata.apps_property
+import gdata.data
+
+
+# This is required to work around a naming conflict between the Google
+# Spreadsheets API and Python's built-in property function
+pyproperty = property
+
+
+# The apps:property name of the resourceId property
+RESOURCE_ID_NAME = 'resourceId'
+# The apps:property name of the resourceCommonName property
+RESOURCE_COMMON_NAME_NAME = 'resourceCommonName'
+# The apps:property name of the resourceDescription property
+RESOURCE_DESCRIPTION_NAME = 'resourceDescription'
+# The apps:property name of the resourceType property
+RESOURCE_TYPE_NAME = 'resourceType'
+
+
+class CalendarResourceEntry(gdata.data.GDEntry):
+ """Represents a Calendar Resource entry in object form."""
+
+ property = [gdata.apps_property.AppsProperty]
+
+ def _GetProperty(self, name):
+ """Get the apps:property value with the given name.
+
+ Args:
+ name: string Name of the apps:property value to get.
+
+ Returns:
+ The apps:property value with the given name, or None if the name was
+ invalid.
+ """
+
+ for p in self.property:
+ if p.name == name:
+ return p.value
+ return None
+
+ def _SetProperty(self, name, value):
+ """Set the apps:property value with the given name to the given value.
+
+ Args:
+ name: string Name of the apps:property value to set.
+ value: string Value to give the apps:property value with the given name.
+ """
+
+ for i in range(len(self.property)):
+ if self.property[i].name == name:
+ self.property[i].value = value
+ return
+ self.property.append(gdata.apps_property.AppsProperty(name=name, value=value))
+
+ def GetResourceId(self):
+ """Get the resource ID of this Calendar Resource object.
+
+ Returns:
+ The resource ID of this Calendar Resource object as a string or None.
+ """
+
+ return self._GetProperty(RESOURCE_ID_NAME)
+
+ def SetResourceId(self, value):
+ """Set the resource ID of this Calendar Resource object.
+
+ Args:
+ value: string The new resource ID value to give this object.
+ """
+
+ self._SetProperty(RESOURCE_ID_NAME, value)
+
+ resource_id = pyproperty(GetResourceId, SetResourceId)
+
+ def GetResourceCommonName(self):
+ """Get the common name of this Calendar Resource object.
+
+ Returns:
+ The common name of this Calendar Resource object as a string or None.
+ """
+
+ return self._GetProperty(RESOURCE_COMMON_NAME_NAME)
+
+ def SetResourceCommonName(self, value):
+ """Set the common name of this Calendar Resource object.
+
+ Args:
+ value: string The new common name value to give this object.
+ """
+
+ self._SetProperty(RESOURCE_COMMON_NAME_NAME, value)
+
+ resource_common_name = pyproperty(
+ GetResourceCommonName,
+ SetResourceCommonName)
+
+ def GetResourceDescription(self):
+ """Get the description of this Calendar Resource object.
+
+ Returns:
+ The description of this Calendar Resource object as a string or None.
+ """
+
+ return self._GetProperty(RESOURCE_DESCRIPTION_NAME)
+
+ def SetResourceDescription(self, value):
+ """Set the description of this Calendar Resource object.
+
+ Args:
+ value: string The new description value to give this object.
+ """
+
+ self._SetProperty(RESOURCE_DESCRIPTION_NAME, value)
+
+ resource_description = pyproperty(
+ GetResourceDescription,
+ SetResourceDescription)
+
+ def GetResourceType(self):
+ """Get the type of this Calendar Resource object.
+
+ Returns:
+ The type of this Calendar Resource object as a string or None.
+ """
+
+ return self._GetProperty(RESOURCE_TYPE_NAME)
+
+ def SetResourceType(self, value):
+ """Set the type value of this Calendar Resource object.
+
+ Args:
+ value: string The new type value to give this object.
+ """
+
+ self._SetProperty(RESOURCE_TYPE_NAME, value)
+
+ resource_type = pyproperty(GetResourceType, SetResourceType)
+
+ def __init__(self, resource_id=None, resource_common_name=None,
+ resource_description=None, resource_type=None, *args, **kwargs):
+ """Constructs a new CalendarResourceEntry object with the given arguments.
+
+ Args:
+ resource_id: string (optional) The resource ID to give this new object.
+ resource_common_name: string (optional) The common name to give this new
+ object.
+ resource_description: string (optional) The description to give this new
+ object.
+ resource_type: string (optional) The type to give this new object.
+ args: The other parameters to pass to gdata.entry.GDEntry constructor.
+ kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
+ """
+ super(CalendarResourceEntry, self).__init__(*args, **kwargs)
+ if resource_id:
+ self.resource_id = resource_id
+ if resource_common_name:
+ self.resource_common_name = resource_common_name
+ if resource_description:
+ self.resource_description = resource_description
+ if resource_type:
+ self.resource_type = resource_type
+
+
+class CalendarResourceFeed(gdata.data.GDFeed):
+ """Represents a feed of CalendarResourceEntry objects."""
+
+ # Override entry so that this feed knows how to type its list of entries.
+ entry = [CalendarResourceEntry]
diff --git a/python/gdata/client.py b/python/gdata/client.py
new file mode 100644
index 0000000..7e2314c
--- /dev/null
+++ b/python/gdata/client.py
@@ -0,0 +1,1126 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2008, 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This module is used for version 2 of the Google Data APIs.
+
+
+"""Provides a client to interact with Google Data API servers.
+
+This module is used for version 2 of the Google Data APIs. The primary class
+in this module is GDClient.
+
+ GDClient: handles auth and CRUD operations when communicating with servers.
+ GDataClient: deprecated client for version one services. Will be removed.
+"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import re
+import atom.client
+import atom.core
+import atom.http_core
+import gdata.gauth
+import gdata.data
+
+
+class Error(Exception):
+ pass
+
+
+class RequestError(Error):
+ status = None
+ reason = None
+ body = None
+ headers = None
+
+
+class RedirectError(RequestError):
+ pass
+
+
+class CaptchaChallenge(RequestError):
+ captcha_url = None
+ captcha_token = None
+
+
+class ClientLoginTokenMissing(Error):
+ pass
+
+
+class MissingOAuthParameters(Error):
+ pass
+
+
+class ClientLoginFailed(RequestError):
+ pass
+
+
+class UnableToUpgradeToken(RequestError):
+ pass
+
+
+class Unauthorized(Error):
+ pass
+
+
+class BadAuthenticationServiceURL(RedirectError):
+ pass
+
+
+class BadAuthentication(RequestError):
+ pass
+
+
+class NotModified(RequestError):
+ pass
+
+class NotImplemented(RequestError):
+ pass
+
+
+def error_from_response(message, http_response, error_class,
+ response_body=None):
+
+ """Creates a new exception and sets the HTTP information in the error.
+
+ Args:
+ message: str human readable message to be displayed if the exception is
+ not caught.
+ http_response: The response from the server, contains error information.
+ error_class: The exception to be instantiated and populated with
+ information from the http_response
+ response_body: str (optional) specify if the response has already been read
+ from the http_response object.
+ """
+ if response_body is None:
+ body = http_response.read()
+ else:
+ body = response_body
+ error = error_class('%s: %i, %s' % (message, http_response.status, body))
+ error.status = http_response.status
+ error.reason = http_response.reason
+ error.body = body
+ error.headers = atom.http_core.get_headers(http_response)
+ return error
+
+
+def get_xml_version(version):
+ """Determines which XML schema to use based on the client API version.
+
+ Args:
+ version: string which is converted to an int. The version string is in
+ the form 'Major.Minor.x.y.z' and only the major version number
+ is considered. If None is provided assume version 1.
+ """
+ if version is None:
+ return 1
+ return int(version.split('.')[0])
+
+
+class GDClient(atom.client.AtomPubClient):
+ """Communicates with Google Data servers to perform CRUD operations.
+
+ This class is currently experimental and may change in backwards
+ incompatible ways.
+
+ This class exists to simplify the following three areas involved in using
+ the Google Data APIs.
+
+ CRUD Operations:
+
+ The client provides a generic 'request' method for making HTTP requests.
+ There are a number of convenience methods which are built on top of
+ request, which include get_feed, get_entry, get_next, post, update, and
+ delete. These methods contact the Google Data servers.
+
+ Auth:
+
+ Reading user-specific private data requires authorization from the user as
+ do any changes to user data. An auth_token object can be passed into any
+ of the HTTP requests to set the Authorization header in the request.
+
+ You may also want to set the auth_token member to a an object which can
+ use modify_request to set the Authorization header in the HTTP request.
+
+ If you are authenticating using the email address and password, you can
+ use the client_login method to obtain an auth token and set the
+ auth_token member.
+
+ If you are using browser redirects, specifically AuthSub, you will want
+ to use gdata.gauth.AuthSubToken.from_url to obtain the token after the
+ redirect, and you will probably want to updgrade this since use token
+ to a multiple use (session) token using the upgrade_token method.
+
+ API Versions:
+
+ This client is multi-version capable and can be used with Google Data API
+ version 1 and version 2. The version should be specified by setting the
+ api_version member to a string, either '1' or '2'.
+ """
+
+ # The gsessionid is used by Google Calendar to prevent redirects.
+ __gsessionid = None
+ api_version = None
+ # Name of the Google Data service when making a ClientLogin request.
+ auth_service = None
+ # URL prefixes which should be requested for AuthSub and OAuth.
+ auth_scopes = None
+
+ def request(self, method=None, uri=None, auth_token=None,
+ http_request=None, converter=None, desired_class=None,
+ redirects_remaining=4, **kwargs):
+ """Make an HTTP request to the server.
+
+ See also documentation for atom.client.AtomPubClient.request.
+
+ If a 302 redirect is sent from the server to the client, this client
+ assumes that the redirect is in the form used by the Google Calendar API.
+ The same request URI and method will be used as in the original request,
+ but a gsessionid URL parameter will be added to the request URI with
+ the value provided in the server's 302 redirect response. If the 302
+ redirect is not in the format specified by the Google Calendar API, a
+ RedirectError will be raised containing the body of the server's
+ response.
+
+ The method calls the client's modify_request method to make any changes
+ required by the client before the request is made. For example, a
+ version 2 client could add a GData-Version: 2 header to the request in
+ its modify_request method.
+
+ Args:
+ method: str The HTTP verb for this request, usually 'GET', 'POST',
+ 'PUT', or 'DELETE'
+ uri: atom.http_core.Uri, str, or unicode The URL being requested.
+ auth_token: An object which sets the Authorization HTTP header in its
+ modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others.
+ http_request: (optional) atom.http_core.HttpRequest
+ converter: function which takes the body of the response as it's only
+ argument and returns the desired object.
+ desired_class: class descended from atom.core.XmlElement to which a
+ successful response should be converted. If there is no
+ converter function specified (converter=None) then the
+ desired_class will be used in calling the
+ atom.core.parse function. If neither
+ the desired_class nor the converter is specified, an
+ HTTP reponse object will be returned.
+ redirects_remaining: (optional) int, if this number is 0 and the
+ server sends a 302 redirect, the request method
+ will raise an exception. This parameter is used in
+ recursive request calls to avoid an infinite loop.
+
+ Any additional arguments are passed through to
+ atom.client.AtomPubClient.request.
+
+ Returns:
+ An HTTP response object (see atom.http_core.HttpResponse for a
+ description of the object's interface) if no converter was
+ specified and no desired_class was specified. If a converter function
+ was provided, the results of calling the converter are returned. If no
+ converter was specified but a desired_class was provided, the response
+ body will be converted to the class using
+ atom.core.parse.
+ """
+ if isinstance(uri, (str, unicode)):
+ uri = atom.http_core.Uri.parse_uri(uri)
+
+ # Add the gsession ID to the URL to prevent further redirects.
+ # TODO: If different sessions are using the same client, there will be a
+ # multitude of redirects and session ID shuffling.
+ # If the gsession ID is in the URL, adopt it as the standard location.
+ if uri is not None and uri.query is not None and 'gsessionid' in uri.query:
+ self.__gsessionid = uri.query['gsessionid']
+ # The gsession ID could also be in the HTTP request.
+ elif (http_request is not None and http_request.uri is not None
+ and http_request.uri.query is not None
+ and 'gsessionid' in http_request.uri.query):
+ self.__gsessionid = http_request.uri.query['gsessionid']
+ # If the gsession ID is stored in the client, and was not present in the
+ # URI then add it to the URI.
+ elif self.__gsessionid is not None:
+ uri.query['gsessionid'] = self.__gsessionid
+
+ # The AtomPubClient should call this class' modify_request before
+ # performing the HTTP request.
+ #http_request = self.modify_request(http_request)
+
+ response = atom.client.AtomPubClient.request(self, method=method,
+ uri=uri, auth_token=auth_token, http_request=http_request, **kwargs)
+ # On success, convert the response body using the desired converter
+ # function if present.
+ if response is None:
+ return None
+ if response.status == 200 or response.status == 201:
+ if converter is not None:
+ return converter(response)
+ elif desired_class is not None:
+ if self.api_version is not None:
+ return atom.core.parse(response.read(), desired_class,
+ version=get_xml_version(self.api_version))
+ else:
+ # No API version was specified, so allow parse to
+ # use the default version.
+ return atom.core.parse(response.read(), desired_class)
+ else:
+ return response
+ # TODO: move the redirect logic into the Google Calendar client once it
+ # exists since the redirects are only used in the calendar API.
+ elif response.status == 302:
+ if redirects_remaining > 0:
+ location = (response.getheader('Location')
+ or response.getheader('location'))
+ if location is not None:
+ m = re.compile('[\?\&]gsessionid=(\w*)').search(location)
+ if m is not None:
+ self.__gsessionid = m.group(1)
+ # Make a recursive call with the gsession ID in the URI to follow
+ # the redirect.
+ return self.request(method=method, uri=uri, auth_token=auth_token,
+ http_request=http_request, converter=converter,
+ desired_class=desired_class,
+ redirects_remaining=redirects_remaining-1,
+ **kwargs)
+ else:
+ raise error_from_response('302 received without Location header',
+ response, RedirectError)
+ else:
+ raise error_from_response('Too many redirects from server',
+ response, RedirectError)
+ elif response.status == 401:
+ raise error_from_response('Unauthorized - Server responded with',
+ response, Unauthorized)
+ elif response.status == 304:
+ raise error_from_response('Entry Not Modified - Server responded with',
+ response, NotModified)
+ elif response.status == 501:
+ raise error_from_response(
+ 'This API operation is not implemented. - Server responded with',
+ response, NotImplemented)
+ # If the server's response was not a 200, 201, 302, 304, 401, or 501, raise
+ # an exception.
+ else:
+ raise error_from_response('Server responded with', response,
+ RequestError)
+
+ Request = request
+
+ def request_client_login_token(
+ self, email, password, source, service=None,
+ account_type='HOSTED_OR_GOOGLE',
+ auth_url=atom.http_core.Uri.parse_uri(
+ 'https://www.google.com/accounts/ClientLogin'),
+ captcha_token=None, captcha_response=None):
+ service = service or self.auth_service
+ # Set the target URL.
+ http_request = atom.http_core.HttpRequest(uri=auth_url, method='POST')
+ http_request.add_body_part(
+ gdata.gauth.generate_client_login_request_body(email=email,
+ password=password, service=service, source=source,
+ account_type=account_type, captcha_token=captcha_token,
+ captcha_response=captcha_response),
+ 'application/x-www-form-urlencoded')
+
+ # Use the underlying http_client to make the request.
+ response = self.http_client.request(http_request)
+
+ response_body = response.read()
+ if response.status == 200:
+ token_string = gdata.gauth.get_client_login_token_string(response_body)
+ if token_string is not None:
+ return gdata.gauth.ClientLoginToken(token_string)
+ else:
+ raise ClientLoginTokenMissing(
+ 'Recieved a 200 response to client login request,'
+ ' but no token was present. %s' % (response_body,))
+ elif response.status == 403:
+ captcha_challenge = gdata.gauth.get_captcha_challenge(response_body)
+ if captcha_challenge:
+ challenge = CaptchaChallenge('CAPTCHA required')
+ challenge.captcha_url = captcha_challenge['url']
+ challenge.captcha_token = captcha_challenge['token']
+ raise challenge
+ elif response_body.splitlines()[0] == 'Error=BadAuthentication':
+ raise BadAuthentication('Incorrect username or password')
+ else:
+ raise error_from_response('Server responded with a 403 code',
+ response, RequestError, response_body)
+ elif response.status == 302:
+ # Google tries to redirect all bad URLs back to
+ # http://www.google.. If a redirect
+ # attempt is made, assume the user has supplied an incorrect
+ # authentication URL
+ raise error_from_response('Server responded with a redirect',
+ response, BadAuthenticationServiceURL,
+ response_body)
+ else:
+ raise error_from_response('Server responded to ClientLogin request',
+ response, ClientLoginFailed, response_body)
+
+ RequestClientLoginToken = request_client_login_token
+
+ def client_login(self, email, password, source, service=None,
+ account_type='HOSTED_OR_GOOGLE',
+ auth_url=atom.http_core.Uri.parse_uri(
+ 'https://www.google.com/accounts/ClientLogin'),
+ captcha_token=None, captcha_response=None):
+ """Performs an auth request using the user's email address and password.
+
+ In order to modify user specific data and read user private data, your
+ application must be authorized by the user. One way to demonstrage
+ authorization is by including a Client Login token in the Authorization
+ HTTP header of all requests. This method requests the Client Login token
+ by sending the user's email address, password, the name of the
+ application, and the service code for the service which will be accessed
+ by the application. If the username and password are correct, the server
+ will respond with the client login code and a new ClientLoginToken
+ object will be set in the client's auth_token member. With the auth_token
+ set, future requests from this client will include the Client Login
+ token.
+
+ For a list of service names, see
+ http://code.google.com/apis/gdata/faq.html#clientlogin
+ For more information on Client Login, see:
+ http://code.google.com/apis/accounts/docs/AuthForInstalledApps.html
+
+ Args:
+ email: str The user's email address or username.
+ password: str The password for the user's account.
+ source: str The name of your application. This can be anything you
+ like but should should give some indication of which app is
+ making the request.
+ service: str The service code for the service you would like to access.
+ For example, 'cp' for contacts, 'cl' for calendar. For a full
+ list see
+ http://code.google.com/apis/gdata/faq.html#clientlogin
+ If you are using a subclass of the gdata.client.GDClient, the
+ service will usually be filled in for you so you do not need
+ to specify it. For example see BloggerClient,
+ SpreadsheetsClient, etc.
+ account_type: str (optional) The type of account which is being
+ authenticated. This can be either 'GOOGLE' for a Google
+ Account, 'HOSTED' for a Google Apps Account, or the
+ default 'HOSTED_OR_GOOGLE' which will select the Google
+ Apps Account if the same email address is used for both
+ a Google Account and a Google Apps Account.
+ auth_url: str (optional) The URL to which the login request should be
+ sent.
+ captcha_token: str (optional) If a previous login attempt was reponded
+ to with a CAPTCHA challenge, this is the token which
+ identifies the challenge (from the CAPTCHA's URL).
+ captcha_response: str (optional) If a previous login attempt was
+ reponded to with a CAPTCHA challenge, this is the
+ response text which was contained in the challenge.
+
+ Returns:
+ None
+
+ Raises:
+ A RequestError or one of its suclasses: BadAuthentication,
+ BadAuthenticationServiceURL, ClientLoginFailed,
+ ClientLoginTokenMissing, or CaptchaChallenge
+ """
+ service = service or self.auth_service
+ self.auth_token = self.request_client_login_token(email, password,
+ source, service=service, account_type=account_type, auth_url=auth_url,
+ captcha_token=captcha_token, captcha_response=captcha_response)
+
+ ClientLogin = client_login
+
+ def upgrade_token(self, token=None, url=atom.http_core.Uri.parse_uri(
+ 'https://www.google.com/accounts/AuthSubSessionToken')):
+ """Asks the Google auth server for a multi-use AuthSub token.
+
+ For details on AuthSub, see:
+ http://code.google.com/apis/accounts/docs/AuthSub.html
+
+ Args:
+ token: gdata.gauth.AuthSubToken or gdata.gauth.SecureAuthSubToken
+ (optional) If no token is passed in, the client's auth_token member
+ is used to request the new token. The token object will be modified
+ to contain the new session token string.
+ url: str or atom.http_core.Uri (optional) The URL to which the token
+ upgrade request should be sent. Defaults to:
+ https://www.google.com/accounts/AuthSubSessionToken
+
+ Returns:
+ The upgraded gdata.gauth.AuthSubToken object.
+ """
+ # Default to using the auth_token member if no token is provided.
+ if token is None:
+ token = self.auth_token
+ # We cannot upgrade a None token.
+ if token is None:
+ raise UnableToUpgradeToken('No token was provided.')
+ if not isinstance(token, gdata.gauth.AuthSubToken):
+ raise UnableToUpgradeToken(
+ 'Cannot upgrade the token because it is not an AuthSubToken object.')
+ http_request = atom.http_core.HttpRequest(uri=url, method='GET')
+ token.modify_request(http_request)
+ # Use the lower level HttpClient to make the request.
+ response = self.http_client.request(http_request)
+ if response.status == 200:
+ token._upgrade_token(response.read())
+ return token
+ else:
+ raise UnableToUpgradeToken(
+ 'Server responded to token upgrade request with %s: %s' % (
+ response.status, response.read()))
+
+ UpgradeToken = upgrade_token
+
+ def revoke_token(self, token=None, url=atom.http_core.Uri.parse_uri(
+ 'https://www.google.com/accounts/AuthSubRevokeToken')):
+ """Requests that the token be invalidated.
+
+ This method can be used for both AuthSub and OAuth tokens (to invalidate
+ a ClientLogin token, the user must change their password).
+
+ Returns:
+ True if the server responded with a 200.
+
+ Raises:
+ A RequestError if the server responds with a non-200 status.
+ """
+ # Default to using the auth_token member if no token is provided.
+ if token is None:
+ token = self.auth_token
+
+ http_request = atom.http_core.HttpRequest(uri=url, method='GET')
+ token.modify_request(http_request)
+ response = self.http_client.request(http_request)
+ if response.status != 200:
+ raise error_from_response('Server sent non-200 to revoke token',
+ response, RequestError, response.read())
+
+ return True
+
+ RevokeToken = revoke_token
+
+ def get_oauth_token(self, scopes, next, consumer_key, consumer_secret=None,
+ rsa_private_key=None,
+ url=gdata.gauth.REQUEST_TOKEN_URL):
+ """Obtains an OAuth request token to allow the user to authorize this app.
+
+ Once this client has a request token, the user can authorize the request
+ token by visiting the authorization URL in their browser. After being
+ redirected back to this app at the 'next' URL, this app can then exchange
+ the authorized request token for an access token.
+
+ For more information see the documentation on Google Accounts with OAuth:
+ http://code.google.com/apis/accounts/docs/OAuth.html#AuthProcess
+
+ Args:
+ scopes: list of strings or atom.http_core.Uri objects which specify the
+ URL prefixes which this app will be accessing. For example, to access
+ the Google Calendar API, you would want to use scopes:
+ ['https://www.google.com/calendar/feeds/',
+ 'http://www.google.com/calendar/feeds/']
+ next: str or atom.http_core.Uri object, The URL which the user's browser
+ should be sent to after they authorize access to their data. This
+ should be a URL in your application which will read the token
+ information from the URL and upgrade the request token to an access
+ token.
+ consumer_key: str This is the identifier for this application which you
+ should have received when you registered your application with Google
+ to use OAuth.
+ consumer_secret: str (optional) The shared secret between your app and
+ Google which provides evidence that this request is coming from you
+ application and not another app. If present, this libraries assumes
+ you want to use an HMAC signature to verify requests. Keep this data
+ a secret.
+ rsa_private_key: str (optional) The RSA private key which is used to
+ generate a digital signature which is checked by Google's server. If
+ present, this library assumes that you want to use an RSA signature
+ to verify requests. Keep this data a secret.
+ url: The URL to which a request for a token should be made. The default
+ is Google's OAuth request token provider.
+ """
+ http_request = None
+ if rsa_private_key is not None:
+ http_request = gdata.gauth.generate_request_for_request_token(
+ consumer_key, gdata.gauth.RSA_SHA1, scopes,
+ rsa_key=rsa_private_key, auth_server_url=url, next=next)
+ elif consumer_secret is not None:
+ http_request = gdata.gauth.generate_request_for_request_token(
+ consumer_key, gdata.gauth.HMAC_SHA1, scopes,
+ consumer_secret=consumer_secret, auth_server_url=url, next=next)
+ else:
+ raise MissingOAuthParameters(
+ 'To request an OAuth token, you must provide your consumer secret'
+ ' or your private RSA key.')
+
+ response = self.http_client.request(http_request)
+ response_body = response.read()
+
+ if response.status != 200:
+ raise error_from_response('Unable to obtain OAuth request token',
+ response, RequestError, response_body)
+
+ if rsa_private_key is not None:
+ return gdata.gauth.rsa_token_from_body(response_body, consumer_key,
+ rsa_private_key,
+ gdata.gauth.REQUEST_TOKEN)
+ elif consumer_secret is not None:
+ return gdata.gauth.hmac_token_from_body(response_body, consumer_key,
+ consumer_secret,
+ gdata.gauth.REQUEST_TOKEN)
+
+ GetOAuthToken = get_oauth_token
+
+ def get_access_token(self, request_token,
+ url=gdata.gauth.ACCESS_TOKEN_URL):
+ """Exchanges an authorized OAuth request token for an access token.
+
+ Contacts the Google OAuth server to upgrade a previously authorized
+ request token. Once the request token is upgraded to an access token,
+ the access token may be used to access the user's data.
+
+ For more details, see the Google Accounts OAuth documentation:
+ http://code.google.com/apis/accounts/docs/OAuth.html#AccessToken
+
+ Args:
+ request_token: An OAuth token which has been authorized by the user.
+ url: (optional) The URL to which the upgrade request should be sent.
+ Defaults to: https://www.google.com/accounts/OAuthAuthorizeToken
+ """
+ http_request = gdata.gauth.generate_request_for_access_token(
+ request_token, auth_server_url=url)
+ response = self.http_client.request(http_request)
+ response_body = response.read()
+ if response.status != 200:
+ raise error_from_response(
+ 'Unable to upgrade OAuth request token to access token',
+ response, RequestError, response_body)
+
+ return gdata.gauth.upgrade_to_access_token(request_token, response_body)
+
+ GetAccessToken = get_access_token
+
+ def modify_request(self, http_request):
+ """Adds or changes request before making the HTTP request.
+
+ This client will add the API version if it is specified.
+ Subclasses may override this method to add their own request
+ modifications before the request is made.
+ """
+ http_request = atom.client.AtomPubClient.modify_request(self,
+ http_request)
+ if self.api_version is not None:
+ http_request.headers['GData-Version'] = self.api_version
+ return http_request
+
+ ModifyRequest = modify_request
+
+ def get_feed(self, uri, auth_token=None, converter=None,
+ desired_class=gdata.data.GDFeed, **kwargs):
+ return self.request(method='GET', uri=uri, auth_token=auth_token,
+ converter=converter, desired_class=desired_class,
+ **kwargs)
+
+ GetFeed = get_feed
+
+ def get_entry(self, uri, auth_token=None, converter=None,
+ desired_class=gdata.data.GDEntry, etag=None, **kwargs):
+ http_request = atom.http_core.HttpRequest()
+ # Conditional retrieval
+ if etag is not None:
+ http_request.headers['If-None-Match'] = etag
+ return self.request(method='GET', uri=uri, auth_token=auth_token,
+ http_request=http_request, converter=converter,
+ desired_class=desired_class, **kwargs)
+
+ GetEntry = get_entry
+
+ def get_next(self, feed, auth_token=None, converter=None,
+ desired_class=None, **kwargs):
+ """Fetches the next set of results from the feed.
+
+ When requesting a feed, the number of entries returned is capped at a
+ service specific default limit (often 25 entries). You can specify your
+ own entry-count cap using the max-results URL query parameter. If there
+ are more results than could fit under max-results, the feed will contain
+ a next link. This method performs a GET against this next results URL.
+
+ Returns:
+ A new feed object containing the next set of entries in this feed.
+ """
+ if converter is None and desired_class is None:
+ desired_class = feed.__class__
+ return self.get_feed(feed.find_next_link(), auth_token=auth_token,
+ converter=converter, desired_class=desired_class,
+ **kwargs)
+
+ GetNext = get_next
+
+ # TODO: add a refresh method to re-fetch the entry/feed from the server
+ # if it has been updated.
+
+ def post(self, entry, uri, auth_token=None, converter=None,
+ desired_class=None, **kwargs):
+ if converter is None and desired_class is None:
+ desired_class = entry.__class__
+ http_request = atom.http_core.HttpRequest()
+ http_request.add_body_part(
+ entry.to_string(get_xml_version(self.api_version)),
+ 'application/atom+xml')
+ return self.request(method='POST', uri=uri, auth_token=auth_token,
+ http_request=http_request, converter=converter,
+ desired_class=desired_class, **kwargs)
+
+ Post = post
+
+ def update(self, entry, auth_token=None, force=False, **kwargs):
+ """Edits the entry on the server by sending the XML for this entry.
+
+ Performs a PUT and converts the response to a new entry object with a
+ matching class to the entry passed in.
+
+ Args:
+ entry:
+ auth_token:
+ force: boolean stating whether an update should be forced. Defaults to
+ False. Normally, if a change has been made since the passed in
+ entry was obtained, the server will not overwrite the entry since
+ the changes were based on an obsolete version of the entry.
+ Setting force to True will cause the update to silently
+ overwrite whatever version is present.
+
+ Returns:
+ A new Entry object of a matching type to the entry which was passed in.
+ """
+ http_request = atom.http_core.HttpRequest()
+ http_request.add_body_part(
+ entry.to_string(get_xml_version(self.api_version)),
+ 'application/atom+xml')
+ # Include the ETag in the request if present.
+ if force:
+ http_request.headers['If-Match'] = '*'
+ elif hasattr(entry, 'etag') and entry.etag:
+ http_request.headers['If-Match'] = entry.etag
+
+ return self.request(method='PUT', uri=entry.find_edit_link(),
+ auth_token=auth_token, http_request=http_request,
+ desired_class=entry.__class__, **kwargs)
+
+ Update = update
+
+ def delete(self, entry_or_uri, auth_token=None, force=False, **kwargs):
+ http_request = atom.http_core.HttpRequest()
+
+ # Include the ETag in the request if present.
+ if force:
+ http_request.headers['If-Match'] = '*'
+ elif hasattr(entry_or_uri, 'etag') and entry_or_uri.etag:
+ http_request.headers['If-Match'] = entry_or_uri.etag
+
+ # If the user passes in a URL, just delete directly, may not work as
+ # the service might require an ETag.
+ if isinstance(entry_or_uri, (str, unicode, atom.http_core.Uri)):
+ return self.request(method='DELETE', uri=entry_or_uri,
+ http_request=http_request, auth_token=auth_token,
+ **kwargs)
+
+ return self.request(method='DELETE', uri=entry_or_uri.find_edit_link(),
+ http_request=http_request, auth_token=auth_token,
+ **kwargs)
+
+ Delete = delete
+
+ #TODO: implement batch requests.
+ #def batch(feed, uri, auth_token=None, converter=None, **kwargs):
+ # pass
+
+ # TODO: add a refresh method to request a conditional update to an entry
+ # or feed.
+
+
+def _add_query_param(param_string, value, http_request):
+ if value:
+ http_request.uri.query[param_string] = value
+
+
+class Query(object):
+
+ def __init__(self, text_query=None, categories=None, author=None, alt=None,
+ updated_min=None, updated_max=None, pretty_print=False,
+ published_min=None, published_max=None, start_index=None,
+ max_results=None, strict=False):
+ """Constructs a Google Data Query to filter feed contents serverside.
+
+ Args:
+ text_query: Full text search str (optional)
+ categories: list of strings (optional). Each string is a required
+ category. To include an 'or' query, put a | in the string between
+ terms. For example, to find everything in the Fitz category and
+ the Laurie or Jane category (Fitz and (Laurie or Jane)) you would
+ set categories to ['Fitz', 'Laurie|Jane'].
+ author: str (optional) The service returns entries where the author
+ name and/or email address match your query string.
+ alt: str (optional) for the Alternative representation type you'd like
+ the feed in. If you don't specify an alt parameter, the service
+ returns an Atom feed. This is equivalent to alt='atom'.
+ alt='rss' returns an RSS 2.0 result feed.
+ alt='json' returns a JSON representation of the feed.
+ alt='json-in-script' Requests a response that wraps JSON in a script
+ tag.
+ alt='atom-in-script' Requests an Atom response that wraps an XML
+ string in a script tag.
+ alt='rss-in-script' Requests an RSS response that wraps an XML
+ string in a script tag.
+ updated_min: str (optional), RFC 3339 timestamp format, lower bounds.
+ For example: 2005-08-09T10:57:00-08:00
+ updated_max: str (optional) updated time must be earlier than timestamp.
+ pretty_print: boolean (optional) If True the server's XML response will
+ be indented to make it more human readable. Defaults to False.
+ published_min: str (optional), Similar to updated_min but for published
+ time.
+ published_max: str (optional), Similar to updated_max but for published
+ time.
+ start_index: int or str (optional) 1-based index of the first result to
+ be retrieved. Note that this isn't a general cursoring mechanism.
+ If you first send a query with ?start-index=1&max-results=10 and
+ then send another query with ?start-index=11&max-results=10, the
+ service cannot guarantee that the results are equivalent to
+ ?start-index=1&max-results=20, because insertions and deletions
+ could have taken place in between the two queries.
+ max_results: int or str (optional) Maximum number of results to be
+ retrieved. Each service has a default max (usually 25) which can
+ vary from service to service. There is also a service-specific
+ limit to the max_results you can fetch in a request.
+ strict: boolean (optional) If True, the server will return an error if
+ the server does not recognize any of the parameters in the request
+ URL. Defaults to False.
+ """
+ self.text_query = text_query
+ self.categories = categories or []
+ self.author = author
+ self.alt = alt
+ self.updated_min = updated_min
+ self.updated_max = updated_max
+ self.pretty_print = pretty_print
+ self.published_min = published_min
+ self.published_max = published_max
+ self.start_index = start_index
+ self.max_results = max_results
+ self.strict = strict
+
+ def modify_request(self, http_request):
+ _add_query_param('q', self.text_query, http_request)
+ if self.categories:
+ http_request.uri.query['categories'] = ','.join(self.categories)
+ _add_query_param('author', self.author, http_request)
+ _add_query_param('alt', self.alt, http_request)
+ _add_query_param('updated-min', self.updated_min, http_request)
+ _add_query_param('updated-max', self.updated_max, http_request)
+ if self.pretty_print:
+ http_request.uri.query['prettyprint'] = 'true'
+ _add_query_param('published-min', self.published_min, http_request)
+ _add_query_param('published-max', self.published_max, http_request)
+ if self.start_index is not None:
+ http_request.uri.query['start-index'] = str(self.start_index)
+ if self.max_results is not None:
+ http_request.uri.query['max-results'] = str(self.max_results)
+ if self.strict:
+ http_request.uri.query['strict'] = 'true'
+
+
+ ModifyRequest = modify_request
+
+
+class GDQuery(atom.http_core.Uri):
+
+ def _get_text_query(self):
+ return self.query['q']
+
+ def _set_text_query(self, value):
+ self.query['q'] = value
+
+ text_query = property(_get_text_query, _set_text_query,
+ doc='The q parameter for searching for an exact text match on content')
+
+
+class ResumableUploader(object):
+ """Resumable upload helper for the Google Data protocol."""
+
+ DEFAULT_CHUNK_SIZE = 5242880 # 5MB
+
+ def __init__(self, client, file_handle, content_type, total_file_size,
+ chunk_size=None, desired_class=None):
+ """Starts a resumable upload to a service that supports the protocol.
+
+ Args:
+ client: gdata.client.GDClient A Google Data API service.
+ file_handle: object A file-like object containing the file to upload.
+ content_type: str The mimetype of the file to upload.
+ total_file_size: int The file's total size in bytes.
+ chunk_size: int The size of each upload chunk. If None, the
+ DEFAULT_CHUNK_SIZE will be used.
+ desired_class: object (optional) The type of gdata.data.GDEntry to parse
+ the completed entry as. This should be specific to the API.
+ """
+ self.client = client
+ self.file_handle = file_handle
+ self.content_type = content_type
+ self.total_file_size = total_file_size
+ self.chunk_size = chunk_size or self.DEFAULT_CHUNK_SIZE
+ self.desired_class = desired_class or gdata.data.GDEntry
+ self.upload_uri = None
+
+ # Send the entire file if the chunk size is less than fize's total size.
+ if self.total_file_size <= self.chunk_size:
+ self.chunk_size = total_file_size
+
+ def _init_session(self, resumable_media_link, entry=None, headers=None,
+ auth_token=None):
+ """Starts a new resumable upload to a service that supports the protocol.
+
+ The method makes a request to initiate a new upload session. The unique
+ upload uri returned by the server (and set in this method) should be used
+ to send upload chunks to the server.
+
+ Args:
+ resumable_media_link: str The full URL for the #resumable-create-media or
+ #resumable-edit-media link for starting a resumable upload request or
+ updating media using a resumable PUT.
+ entry: A (optional) gdata.data.GDEntry containging metadata to create the
+ upload from.
+ headers: dict (optional) Additional headers to send in the initial request
+ to create the resumable upload request. These headers will override
+ any default headers sent in the request. For example:
+ headers={'Slug': 'MyTitle'}.
+ auth_token: (optional) An object which sets the Authorization HTTP header
+ in its modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others.
+
+ Returns:
+ The final Atom entry as created on the server. The entry will be
+ parsed accoring to the class specified in self.desired_class.
+
+ Raises:
+ RequestError if the unique upload uri is not set or the
+ server returns something other than an HTTP 308 when the upload is
+ incomplete.
+ """
+ http_request = atom.http_core.HttpRequest()
+
+ # Send empty POST if Atom XML wasn't specified.
+ if entry is None:
+ http_request.add_body_part('', self.content_type, size=0)
+ else:
+ http_request.add_body_part(str(entry), 'application/atom+xml',
+ size=len(str(entry)))
+ http_request.headers['X-Upload-Content-Type'] = self.content_type
+ http_request.headers['X-Upload-Content-Length'] = self.total_file_size
+
+ if headers is not None:
+ http_request.headers.update(headers)
+
+ response = self.client.request(method='POST',
+ uri=resumable_media_link,
+ auth_token=auth_token,
+ http_request=http_request)
+
+ self.upload_uri = (response.getheader('location') or
+ response.getheader('Location'))
+
+ _InitSession = _init_session
+
+ def upload_chunk(self, start_byte, content_bytes):
+ """Uploads a byte range (chunk) to the resumable upload server.
+
+ Args:
+ start_byte: int The byte offset of the total file where the byte range
+ passed in lives.
+ content_bytes: str The file contents of this chunk.
+
+ Returns:
+ The final Atom entry created on the server. The entry object's type will
+ be the class specified in self.desired_class.
+
+ Raises:
+ RequestError if the unique upload uri is not set or the
+ server returns something other than an HTTP 308 when the upload is
+ incomplete.
+ """
+ if self.upload_uri is None:
+ raise RequestError('Resumable upload request not initialized.')
+
+ # Adjustment if last byte range is less than defined chunk size.
+ chunk_size = self.chunk_size
+ if len(content_bytes) <= chunk_size:
+ chunk_size = len(content_bytes)
+
+ http_request = atom.http_core.HttpRequest()
+ http_request.add_body_part(content_bytes, self.content_type,
+ size=len(content_bytes))
+ http_request.headers['Content-Range'] = ('bytes %s-%s/%s'
+ % (start_byte,
+ start_byte + chunk_size - 1,
+ self.total_file_size))
+
+ try:
+ response = self.client.request(method='POST', uri=self.upload_uri,
+ http_request=http_request,
+ desired_class=self.desired_class)
+ return response
+ except RequestError, error:
+ if error.status == 308:
+ return None
+ else:
+ raise error
+
+ UploadChunk = upload_chunk
+
+ def upload_file(self, resumable_media_link, entry=None, headers=None,
+ auth_token=None):
+ """Uploads an entire file in chunks using the resumable upload protocol.
+
+ If you are interested in pausing an upload or controlling the chunking
+ yourself, use the upload_chunk() method instead.
+
+ Args:
+ resumable_media_link: str The full URL for the #resumable-create-media for
+ starting a resumable upload request.
+ entry: A (optional) gdata.data.GDEntry containging metadata to create the
+ upload from.
+ headers: dict Additional headers to send in the initial request to create
+ the resumable upload request. These headers will override any default
+ headers sent in the request. For example: headers={'Slug': 'MyTitle'}.
+ auth_token: (optional) An object which sets the Authorization HTTP header
+ in its modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others.
+
+ Returns:
+ The final Atom entry created on the server. The entry object's type will
+ be the class specified in self.desired_class.
+
+ Raises:
+ RequestError if anything other than a HTTP 308 is returned
+ when the request raises an exception.
+ """
+ self._init_session(resumable_media_link, headers=headers,
+ auth_token=auth_token, entry=entry)
+
+ start_byte = 0
+ entry = None
+
+ while not entry:
+ entry = self.upload_chunk(
+ start_byte, self.file_handle.read(self.chunk_size))
+ start_byte += self.chunk_size
+
+ return entry
+
+ UploadFile = upload_file
+
+ def update_file(self, entry_or_resumable_edit_link, headers=None, force=False,
+ auth_token=None):
+ """Updates the contents of an existing file using the resumable protocol.
+
+ If you are interested in pausing an upload or controlling the chunking
+ yourself, use the upload_chunk() method instead.
+
+ Args:
+ entry_or_resumable_edit_link: object or string A gdata.data.GDEntry for
+ the entry/file to update or the full uri of the link with rel
+ #resumable-edit-media.
+ headers: dict Additional headers to send in the initial request to create
+ the resumable upload request. These headers will override any default
+ headers sent in the request. For example: headers={'Slug': 'MyTitle'}.
+ force boolean (optional) True to force an update and set the If-Match
+ header to '*'. If False and entry_or_resumable_edit_link is a
+ gdata.data.GDEntry object, its etag value is used. Otherwise this
+ parameter should be set to True to force the update.
+ auth_token: (optional) An object which sets the Authorization HTTP header
+ in its modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others.
+
+ Returns:
+ The final Atom entry created on the server. The entry object's type will
+ be the class specified in self.desired_class.
+
+ Raises:
+ RequestError if anything other than a HTTP 308 is returned
+ when the request raises an exception.
+ """
+ # Need to override the POST request for a resumable update (required).
+ customer_headers = {'X-HTTP-Method-Override': 'PUT'}
+
+ if headers is not None:
+ customer_headers.update(headers)
+
+ if isinstance(entry_or_resumable_edit_link, gdata.data.GDEntry):
+ resumable_edit_link = entry_or_resumable_edit_link.find_url(
+ 'http://schemas.google.com/g/2005#resumable-edit-media')
+ customer_headers['If-Match'] = entry_or_resumable_edit_link.etag
+ else:
+ resumable_edit_link = entry_or_resumable_edit_link
+
+ if force:
+ customer_headers['If-Match'] = '*'
+
+ return self.upload_file(resumable_edit_link, headers=customer_headers,
+ auth_token=auth_token)
+
+ UpdateFile = update_file
+
+ def query_upload_status(self, uri=None):
+ """Queries the current status of a resumable upload request.
+
+ Args:
+ uri: str (optional) A resumable upload uri to query and override the one
+ that is set in this object.
+
+ Returns:
+ An integer representing the file position (byte) to resume the upload from
+ or True if the upload is complete.
+
+ Raises:
+ RequestError if anything other than a HTTP 308 is returned
+ when the request raises an exception.
+ """
+ # Override object's unique upload uri.
+ if uri is None:
+ uri = self.upload_uri
+
+ http_request = atom.http_core.HttpRequest()
+ http_request.headers['Content-Length'] = '0'
+ http_request.headers['Content-Range'] = 'bytes */%s' % self.total_file_size
+
+ try:
+ response = self.client.request(
+ method='POST', uri=uri, http_request=http_request)
+ if response.status == 201:
+ return True
+ else:
+ raise error_from_response(
+ '%s returned by server' % response.status, response, RequestError)
+ except RequestError, error:
+ if error.status == 308:
+ for pair in error.headers:
+ if pair[0].capitalize() == 'Range':
+ return int(pair[1].split('-')[1]) + 1
+ else:
+ raise error
+
+ QueryUploadStatus = query_upload_status
diff --git a/python/gdata/codesearch/__init__.py b/python/gdata/codesearch/__init__.py
new file mode 100644
index 0000000..fa23ef0
--- /dev/null
+++ b/python/gdata/codesearch/__init__.py
@@ -0,0 +1,136 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2007 Benoit Chesneau
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+
+"""Contains extensions to Atom objects used by Google Codesearch"""
+
+__author__ = 'Benoit Chesneau'
+
+
+import atom
+import gdata
+
+
+CODESEARCH_NAMESPACE='http://schemas.google.com/codesearch/2006'
+CODESEARCH_TEMPLATE='{http://shema.google.com/codesearch/2006}%s'
+
+
+class Match(atom.AtomBase):
+ """ The Google Codesearch match element """
+ _tag = 'match'
+ _namespace = CODESEARCH_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['lineNumber'] = 'line_number'
+ _attributes['type'] = 'type'
+
+ def __init__(self, line_number=None, type=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.text = text
+ self.type = type
+ self.line_number = line_number
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class File(atom.AtomBase):
+ """ The Google Codesearch file element"""
+ _tag = 'file'
+ _namespace = CODESEARCH_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['name'] = 'name'
+
+ def __init__(self, name=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.text = text
+ self.name = name
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class Package(atom.AtomBase):
+ """ The Google Codesearch package element"""
+ _tag = 'package'
+ _namespace = CODESEARCH_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['name'] = 'name'
+ _attributes['uri'] = 'uri'
+
+ def __init__(self, name=None, uri=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.text = text
+ self.name = name
+ self.uri = uri
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class CodesearchEntry(gdata.GDataEntry):
+ """ Google codesearch atom entry"""
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+
+ _children['{%s}file' % CODESEARCH_NAMESPACE] = ('file', File)
+ _children['{%s}package' % CODESEARCH_NAMESPACE] = ('package', Package)
+ _children['{%s}match' % CODESEARCH_NAMESPACE] = ('match', [Match])
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ match=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id, link=link,
+ published=published, title=title,
+ updated=updated, text=None)
+
+ self.match = match or []
+
+
+def CodesearchEntryFromString(xml_string):
+ """Converts an XML string into a CodesearchEntry object.
+
+ Args:
+ xml_string: string The XML describing a Codesearch feed entry.
+
+ Returns:
+ A CodesearchEntry object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(CodesearchEntry, xml_string)
+
+
+class CodesearchFeed(gdata.GDataFeed):
+ """feed containing list of Google codesearch Items"""
+ _tag = gdata.GDataFeed._tag
+ _namespace = gdata.GDataFeed._namespace
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [CodesearchEntry])
+
+
+def CodesearchFeedFromString(xml_string):
+ """Converts an XML string into a CodesearchFeed object.
+ Args:
+ xml_string: string The XML describing a Codesearch feed.
+ Returns:
+ A CodeseartchFeed object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(CodesearchFeed, xml_string)
diff --git a/python/gdata/codesearch/service.py b/python/gdata/codesearch/service.py
new file mode 100644
index 0000000..d6e2335
--- /dev/null
+++ b/python/gdata/codesearch/service.py
@@ -0,0 +1,109 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2007 Benoit Chesneau
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+
+"""CodesearchService extends GDataService to streamline Google Codesearch
+operations"""
+
+
+__author__ = 'Benoit Chesneau'
+
+
+import atom
+import gdata.service
+import gdata.codesearch
+
+
+class CodesearchService(gdata.service.GDataService):
+ """Client extension for Google codesearch service"""
+
+ def __init__(self, email=None, password=None, source=None,
+ server='www.google.com', additional_headers=None, **kwargs):
+ """Creates a client for the Google codesearch service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'www.google.com'.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service='codesearch',
+ source=source, server=server, additional_headers=additional_headers,
+ **kwargs)
+
+ def Query(self, uri, converter=gdata.codesearch.CodesearchFeedFromString):
+ """Queries the Codesearch feed and returns the resulting feed of
+ entries.
+
+ Args:
+ uri: string The full URI to be queried. This can contain query
+ parameters, a hostname, or simply the relative path to a Document
+ List feed. The DocumentQuery object is useful when constructing
+ query parameters.
+ converter: func (optional) A function which will be executed on the
+ retrieved item, generally to render it into a Python object.
+ By default the CodesearchFeedFromString function is used to
+ return a CodesearchFeed object. This is because most feed
+ queries will result in a feed and not a single entry.
+
+ Returns :
+ A CodesearchFeed objects representing the feed returned by the server
+ """
+ return self.Get(uri, converter=converter)
+
+ def GetSnippetsFeed(self, text_query=None):
+ """Retrieve Codesearch feed for a keyword
+
+ Args:
+ text_query : string (optional) The contents of the q query parameter. This
+ string is URL escaped upon conversion to a URI.
+ Returns:
+ A CodesearchFeed objects representing the feed returned by the server
+ """
+
+ query=gdata.codesearch.service.CodesearchQuery(text_query=text_query)
+ feed = self.Query(query.ToUri())
+ return feed
+
+
+class CodesearchQuery(gdata.service.Query):
+ """Object used to construct the query to the Google Codesearch feed. here only as a shorcut"""
+
+ def __init__(self, feed='/codesearch/feeds/search', text_query=None,
+ params=None, categories=None):
+ """Constructor for Codesearch Query.
+
+ Args:
+ feed: string (optional) The path for the feed. (e.g. '/codesearch/feeds/search')
+ text_query: string (optional) The contents of the q query parameter. This
+ string is URL escaped upon conversion to a URI.
+ params: dict (optional) Parameter value string pairs which become URL
+ params when translated to a URI. These parameters are added to
+ the query's items.
+ categories: list (optional) List of category strings which should be
+ included as query categories. See gdata.service.Query for
+ additional documentation.
+
+ Yelds:
+ A CodesearchQuery object to construct a URI based on Codesearch feed
+ """
+
+ gdata.service.Query.__init__(self, feed, text_query, params, categories)
diff --git a/python/gdata/contacts/__init__.py b/python/gdata/contacts/__init__.py
new file mode 100644
index 0000000..41e7c31
--- /dev/null
+++ b/python/gdata/contacts/__init__.py
@@ -0,0 +1,740 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains extensions to ElementWrapper objects used with Google Contacts."""
+
+__author__ = 'dbrattli (Dag Brattli)'
+
+
+import atom
+import gdata
+
+
+## Constants from http://code.google.com/apis/gdata/elements.html ##
+REL_HOME = 'http://schemas.google.com/g/2005#home'
+REL_WORK = 'http://schemas.google.com/g/2005#work'
+REL_OTHER = 'http://schemas.google.com/g/2005#other'
+
+# AOL Instant Messenger protocol
+IM_AIM = 'http://schemas.google.com/g/2005#AIM'
+IM_MSN = 'http://schemas.google.com/g/2005#MSN' # MSN Messenger protocol
+IM_YAHOO = 'http://schemas.google.com/g/2005#YAHOO' # Yahoo Messenger protocol
+IM_SKYPE = 'http://schemas.google.com/g/2005#SKYPE' # Skype protocol
+IM_QQ = 'http://schemas.google.com/g/2005#QQ' # QQ protocol
+# Google Talk protocol
+IM_GOOGLE_TALK = 'http://schemas.google.com/g/2005#GOOGLE_TALK'
+IM_ICQ = 'http://schemas.google.com/g/2005#ICQ' # ICQ protocol
+IM_JABBER = 'http://schemas.google.com/g/2005#JABBER' # Jabber protocol
+IM_NETMEETING = 'http://schemas.google.com/g/2005#netmeeting' # NetMeeting
+
+PHOTO_LINK_REL = 'http://schemas.google.com/contacts/2008/rel#photo'
+PHOTO_EDIT_LINK_REL = 'http://schemas.google.com/contacts/2008/rel#edit-photo'
+
+# Different phone types, for more info see:
+# http://code.google.com/apis/gdata/docs/2.0/elements.html#gdPhoneNumber
+PHONE_CAR = 'http://schemas.google.com/g/2005#car'
+PHONE_FAX = 'http://schemas.google.com/g/2005#fax'
+PHONE_GENERAL = 'http://schemas.google.com/g/2005#general'
+PHONE_HOME = REL_HOME
+PHONE_HOME_FAX = 'http://schemas.google.com/g/2005#home_fax'
+PHONE_INTERNAL = 'http://schemas.google.com/g/2005#internal-extension'
+PHONE_MOBILE = 'http://schemas.google.com/g/2005#mobile'
+PHONE_OTHER = REL_OTHER
+PHONE_PAGER = 'http://schemas.google.com/g/2005#pager'
+PHONE_SATELLITE = 'http://schemas.google.com/g/2005#satellite'
+PHONE_VOIP = 'http://schemas.google.com/g/2005#voip'
+PHONE_WORK = REL_WORK
+PHONE_WORK_FAX = 'http://schemas.google.com/g/2005#work_fax'
+PHONE_WORK_MOBILE = 'http://schemas.google.com/g/2005#work_mobile'
+PHONE_WORK_PAGER = 'http://schemas.google.com/g/2005#work_pager'
+PHONE_MAIN = 'http://schemas.google.com/g/2005#main'
+PHONE_ASSISTANT = 'http://schemas.google.com/g/2005#assistant'
+PHONE_CALLBACK = 'http://schemas.google.com/g/2005#callback'
+PHONE_COMPANY_MAIN = 'http://schemas.google.com/g/2005#company_main'
+PHONE_ISDN = 'http://schemas.google.com/g/2005#isdn'
+PHONE_OTHER_FAX = 'http://schemas.google.com/g/2005#other_fax'
+PHONE_RADIO = 'http://schemas.google.com/g/2005#radio'
+PHONE_TELEX = 'http://schemas.google.com/g/2005#telex'
+PHONE_TTY_TDD = 'http://schemas.google.com/g/2005#tty_tdd'
+
+EXTERNAL_ID_ORGANIZATION = 'organization'
+
+RELATION_MANAGER = 'manager'
+
+CONTACTS_NAMESPACE = 'http://schemas.google.com/contact/2008'
+
+
+class GDataBase(atom.AtomBase):
+ """The Google Contacts intermediate class from atom.AtomBase."""
+
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, text=None,
+ extension_elements=None, extension_attributes=None):
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class ContactsBase(GDataBase):
+ """The Google Contacts intermediate class for Contacts namespace."""
+
+ _namespace = CONTACTS_NAMESPACE
+
+
+class OrgName(GDataBase):
+ """The Google Contacts OrgName element."""
+
+ _tag = 'orgName'
+
+
+class OrgTitle(GDataBase):
+ """The Google Contacts OrgTitle element."""
+
+ _tag = 'orgTitle'
+
+
+class OrgDepartment(GDataBase):
+ """The Google Contacts OrgDepartment element."""
+
+ _tag = 'orgDepartment'
+
+
+class OrgJobDescription(GDataBase):
+ """The Google Contacts OrgJobDescription element."""
+
+ _tag = 'orgJobDescription'
+
+
+class Where(GDataBase):
+ """The Google Contacts Where element."""
+
+ _tag = 'where'
+ _children = GDataBase._children.copy()
+ _attributes = GDataBase._attributes.copy()
+ _attributes['rel'] = 'rel'
+ _attributes['label'] = 'label'
+ _attributes['valueString'] = 'value_string'
+
+ def __init__(self, value_string=None, rel=None, label=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ GDataBase.__init__(self, text=text, extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.rel = rel
+ self.label = label
+ self.value_string = value_string
+
+
+class When(GDataBase):
+ """The Google Contacts When element."""
+
+ _tag = 'when'
+ _children = GDataBase._children.copy()
+ _attributes = GDataBase._attributes.copy()
+ _attributes['startTime'] = 'start_time'
+ _attributes['endTime'] = 'end_time'
+ _attributes['label'] = 'label'
+
+ def __init__(self, start_time=None, end_time=None, label=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ GDataBase.__init__(self, text=text, extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.start_time = start_time
+ self.end_time = end_time
+ self.label = label
+
+
+class Organization(GDataBase):
+ """The Google Contacts Organization element."""
+
+ _tag = 'organization'
+ _children = GDataBase._children.copy()
+ _attributes = GDataBase._attributes.copy()
+ _attributes['label'] = 'label'
+ _attributes['rel'] = 'rel'
+ _attributes['primary'] = 'primary'
+ _children['{%s}orgName' % GDataBase._namespace] = (
+ 'org_name', OrgName)
+ _children['{%s}orgTitle' % GDataBase._namespace] = (
+ 'org_title', OrgTitle)
+ _children['{%s}orgDepartment' % GDataBase._namespace] = (
+ 'org_department', OrgDepartment)
+ _children['{%s}orgJobDescription' % GDataBase._namespace] = (
+ 'org_job_description', OrgJobDescription)
+ #_children['{%s}where' % GDataBase._namespace] = ('where', Where)
+
+ def __init__(self, label=None, rel=None, primary='false', org_name=None,
+ org_title=None, org_department=None, org_job_description=None,
+ where=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ GDataBase.__init__(self, text=text, extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.label = label
+ self.rel = rel or REL_OTHER
+ self.primary = primary
+ self.org_name = org_name
+ self.org_title = org_title
+ self.org_department = org_department
+ self.org_job_description = org_job_description
+ self.where = where
+
+
+class PostalAddress(GDataBase):
+ """The Google Contacts PostalAddress element."""
+
+ _tag = 'postalAddress'
+ _children = GDataBase._children.copy()
+ _attributes = GDataBase._attributes.copy()
+ _attributes['rel'] = 'rel'
+ _attributes['primary'] = 'primary'
+
+ def __init__(self, primary=None, rel=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ GDataBase.__init__(self, text=text, extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.rel = rel or REL_OTHER
+ self.primary = primary
+
+
+class FormattedAddress(GDataBase):
+ """The Google Contacts FormattedAddress element."""
+
+ _tag = 'formattedAddress'
+
+
+class StructuredPostalAddress(GDataBase):
+ """The Google Contacts StructuredPostalAddress element."""
+
+ _tag = 'structuredPostalAddress'
+ _children = GDataBase._children.copy()
+ _attributes = GDataBase._attributes.copy()
+ _attributes['rel'] = 'rel'
+ _attributes['primary'] = 'primary'
+ _children['{%s}formattedAddress' % GDataBase._namespace] = (
+ 'formatted_address', FormattedAddress)
+
+ def __init__(self, rel=None, primary=None,
+ formatted_address=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ GDataBase.__init__(self, text=text, extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.rel = rel or REL_OTHER
+ self.primary = primary
+ self.formatted_address = formatted_address
+
+
+class IM(GDataBase):
+ """The Google Contacts IM element."""
+
+ _tag = 'im'
+ _children = GDataBase._children.copy()
+ _attributes = GDataBase._attributes.copy()
+ _attributes['address'] = 'address'
+ _attributes['primary'] = 'primary'
+ _attributes['protocol'] = 'protocol'
+ _attributes['label'] = 'label'
+ _attributes['rel'] = 'rel'
+
+ def __init__(self, primary='false', rel=None, address=None, protocol=None,
+ label=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ GDataBase.__init__(self, text=text, extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.protocol = protocol
+ self.address = address
+ self.primary = primary
+ self.rel = rel or REL_OTHER
+ self.label = label
+
+
+class Email(GDataBase):
+ """The Google Contacts Email element."""
+
+ _tag = 'email'
+ _children = GDataBase._children.copy()
+ _attributes = GDataBase._attributes.copy()
+ _attributes['address'] = 'address'
+ _attributes['primary'] = 'primary'
+ _attributes['rel'] = 'rel'
+ _attributes['label'] = 'label'
+
+ def __init__(self, label=None, rel=None, address=None, primary='false',
+ text=None, extension_elements=None, extension_attributes=None):
+ GDataBase.__init__(self, text=text, extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.label = label
+ self.rel = rel or REL_OTHER
+ self.address = address
+ self.primary = primary
+
+
+class PhoneNumber(GDataBase):
+ """The Google Contacts PhoneNumber element."""
+
+ _tag = 'phoneNumber'
+ _children = GDataBase._children.copy()
+ _attributes = GDataBase._attributes.copy()
+ _attributes['label'] = 'label'
+ _attributes['rel'] = 'rel'
+ _attributes['uri'] = 'uri'
+ _attributes['primary'] = 'primary'
+
+ def __init__(self, label=None, rel=None, uri=None, primary='false',
+ text=None, extension_elements=None, extension_attributes=None):
+ GDataBase.__init__(self, text=text, extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.label = label
+ self.rel = rel or REL_OTHER
+ self.uri = uri
+ self.primary = primary
+
+
+class Nickname(ContactsBase):
+ """The Google Contacts Nickname element."""
+
+ _tag = 'nickname'
+
+
+class Occupation(ContactsBase):
+ """The Google Contacts Occupation element."""
+
+ _tag = 'occupation'
+
+
+class Gender(ContactsBase):
+ """The Google Contacts Gender element."""
+
+ _tag = 'gender'
+ _children = ContactsBase._children.copy()
+ _attributes = ContactsBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ ContactsBase.__init__(self, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.value = value
+
+
+class Birthday(ContactsBase):
+ """The Google Contacts Birthday element."""
+
+ _tag = 'birthday'
+ _children = ContactsBase._children.copy()
+ _attributes = ContactsBase._attributes.copy()
+ _attributes['when'] = 'when'
+
+ def __init__(self, when=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ ContactsBase.__init__(self, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.when = when
+
+
+class Relation(ContactsBase):
+ """The Google Contacts Relation element."""
+
+ _tag = 'relation'
+ _children = ContactsBase._children.copy()
+ _attributes = ContactsBase._attributes.copy()
+ _attributes['label'] = 'label'
+ _attributes['rel'] = 'rel'
+
+ def __init__(self, label=None, rel=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ ContactsBase.__init__(self, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.label = label
+ self.rel = rel
+
+
+def RelationFromString(xml_string):
+ return atom.CreateClassFromXMLString(Relation, xml_string)
+
+
+class UserDefinedField(ContactsBase):
+ """The Google Contacts UserDefinedField element."""
+
+ _tag = 'userDefinedField'
+ _children = ContactsBase._children.copy()
+ _attributes = ContactsBase._attributes.copy()
+ _attributes['key'] = 'key'
+ _attributes['value'] = 'value'
+
+ def __init__(self, key=None, value=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ ContactsBase.__init__(self, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.key = key
+ self.value = value
+
+
+def UserDefinedFieldFromString(xml_string):
+ return atom.CreateClassFromXMLString(UserDefinedField, xml_string)
+
+
+class Website(ContactsBase):
+ """The Google Contacts Website element."""
+
+ _tag = 'website'
+ _children = ContactsBase._children.copy()
+ _attributes = ContactsBase._attributes.copy()
+ _attributes['href'] = 'href'
+ _attributes['label'] = 'label'
+ _attributes['primary'] = 'primary'
+ _attributes['rel'] = 'rel'
+
+ def __init__(self, href=None, label=None, primary='false', rel=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ ContactsBase.__init__(self, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.href = href
+ self.label = label
+ self.primary = primary
+ self.rel = rel
+
+
+def WebsiteFromString(xml_string):
+ return atom.CreateClassFromXMLString(Website, xml_string)
+
+
+class ExternalId(ContactsBase):
+ """The Google Contacts ExternalId element."""
+
+ _tag = 'externalId'
+ _children = ContactsBase._children.copy()
+ _attributes = ContactsBase._attributes.copy()
+ _attributes['label'] = 'label'
+ _attributes['rel'] = 'rel'
+ _attributes['value'] = 'value'
+
+ def __init__(self, label=None, rel=None, value=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ ContactsBase.__init__(self, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.label = label
+ self.rel = rel
+ self.value = value
+
+
+def ExternalIdFromString(xml_string):
+ return atom.CreateClassFromXMLString(ExternalId, xml_string)
+
+
+class Event(ContactsBase):
+ """The Google Contacts Event element."""
+
+ _tag = 'event'
+ _children = ContactsBase._children.copy()
+ _attributes = ContactsBase._attributes.copy()
+ _attributes['label'] = 'label'
+ _attributes['rel'] = 'rel'
+ _children['{%s}when' % ContactsBase._namespace] = ('when', When)
+
+ def __init__(self, label=None, rel=None, when=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ ContactsBase.__init__(self, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.label = label
+ self.rel = rel
+ self.when = when
+
+
+def EventFromString(xml_string):
+ return atom.CreateClassFromXMLString(Event, xml_string)
+
+
+class Deleted(GDataBase):
+ """The Google Contacts Deleted element."""
+
+ _tag = 'deleted'
+
+
+class GroupMembershipInfo(ContactsBase):
+ """The Google Contacts GroupMembershipInfo element."""
+
+ _tag = 'groupMembershipInfo'
+
+ _children = ContactsBase._children.copy()
+ _attributes = ContactsBase._attributes.copy()
+ _attributes['deleted'] = 'deleted'
+ _attributes['href'] = 'href'
+
+ def __init__(self, deleted=None, href=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ ContactsBase.__init__(self, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.deleted = deleted
+ self.href = href
+
+
+class PersonEntry(gdata.BatchEntry):
+ """Base class for ContactEntry and ProfileEntry."""
+
+ _children = gdata.BatchEntry._children.copy()
+ _children['{%s}organization' % gdata.GDATA_NAMESPACE] = (
+ 'organization', [Organization])
+ _children['{%s}phoneNumber' % gdata.GDATA_NAMESPACE] = (
+ 'phone_number', [PhoneNumber])
+ _children['{%s}nickname' % CONTACTS_NAMESPACE] = ('nickname', Nickname)
+ _children['{%s}occupation' % CONTACTS_NAMESPACE] = ('occupation', Occupation)
+ _children['{%s}gender' % CONTACTS_NAMESPACE] = ('gender', Gender)
+ _children['{%s}birthday' % CONTACTS_NAMESPACE] = ('birthday', Birthday)
+ _children['{%s}postalAddress' % gdata.GDATA_NAMESPACE] = ('postal_address',
+ [PostalAddress])
+ _children['{%s}structuredPostalAddress' % gdata.GDATA_NAMESPACE] = (
+ 'structured_postal_address', [StructuredPostalAddress])
+ _children['{%s}email' % gdata.GDATA_NAMESPACE] = ('email', [Email])
+ _children['{%s}im' % gdata.GDATA_NAMESPACE] = ('im', [IM])
+ _children['{%s}relation' % CONTACTS_NAMESPACE] = ('relation', [Relation])
+ _children['{%s}userDefinedField' % CONTACTS_NAMESPACE] = (
+ 'user_defined_field', [UserDefinedField])
+ _children['{%s}website' % CONTACTS_NAMESPACE] = ('website', [Website])
+ _children['{%s}externalId' % CONTACTS_NAMESPACE] = (
+ 'external_id', [ExternalId])
+ _children['{%s}event' % CONTACTS_NAMESPACE] = ('event', [Event])
+ # The following line should be removed once the Python support
+ # for GData 2.0 is mature.
+ _attributes = gdata.BatchEntry._attributes.copy()
+ _attributes['{%s}etag' % gdata.GDATA_NAMESPACE] = 'etag'
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None, organization=None, phone_number=None,
+ nickname=None, occupation=None, gender=None, birthday=None,
+ postal_address=None, structured_postal_address=None, email=None,
+ im=None, relation=None, user_defined_field=None, website=None,
+ external_id=None, event=None, batch_operation=None,
+ batch_id=None, batch_status=None, text=None,
+ extension_elements=None, extension_attributes=None, etag=None):
+ gdata.BatchEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id, link=link,
+ published=published,
+ batch_operation=batch_operation,
+ batch_id=batch_id, batch_status=batch_status,
+ title=title, updated=updated)
+ self.organization = organization or []
+ self.phone_number = phone_number or []
+ self.nickname = nickname
+ self.occupation = occupation
+ self.gender = gender
+ self.birthday = birthday
+ self.postal_address = postal_address or []
+ self.structured_postal_address = structured_postal_address or []
+ self.email = email or []
+ self.im = im or []
+ self.relation = relation or []
+ self.user_defined_field = user_defined_field or []
+ self.website = website or []
+ self.external_id = external_id or []
+ self.event = event or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+ # The following line should be removed once the Python support
+ # for GData 2.0 is mature.
+ self.etag = etag
+
+
+class ContactEntry(PersonEntry):
+ """A Google Contact flavor of an Atom Entry."""
+
+ _children = PersonEntry._children.copy()
+
+ _children['{%s}deleted' % gdata.GDATA_NAMESPACE] = ('deleted', Deleted)
+ _children['{%s}groupMembershipInfo' % CONTACTS_NAMESPACE] = (
+ 'group_membership_info', [GroupMembershipInfo])
+ _children['{%s}extendedProperty' % gdata.GDATA_NAMESPACE] = (
+ 'extended_property', [gdata.ExtendedProperty])
+ # Overwrite the organization rule in PersonEntry so that a ContactEntry
+ # may only contain one element.
+ _children['{%s}organization' % gdata.GDATA_NAMESPACE] = (
+ 'organization', Organization)
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None, organization=None, phone_number=None,
+ nickname=None, occupation=None, gender=None, birthday=None,
+ postal_address=None, structured_postal_address=None, email=None,
+ im=None, relation=None, user_defined_field=None, website=None,
+ external_id=None, event=None, batch_operation=None,
+ batch_id=None, batch_status=None, text=None,
+ extension_elements=None, extension_attributes=None, etag=None,
+ deleted=None, extended_property=None,
+ group_membership_info=None):
+ PersonEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id, link=link,
+ published=published, title=title, updated=updated,
+ organization=organization, phone_number=phone_number,
+ nickname=nickname, occupation=occupation,
+ gender=gender, birthday=birthday,
+ postal_address=postal_address,
+ structured_postal_address=structured_postal_address,
+ email=email, im=im, relation=relation,
+ user_defined_field=user_defined_field,
+ website=website, external_id=external_id, event=event,
+ batch_operation=batch_operation, batch_id=batch_id,
+ batch_status=batch_status, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes, etag=etag)
+ self.deleted = deleted
+ self.extended_property = extended_property or []
+ self.group_membership_info = group_membership_info or []
+
+ def GetPhotoLink(self):
+ for a_link in self.link:
+ if a_link.rel == PHOTO_LINK_REL:
+ return a_link
+ return None
+
+ def GetPhotoEditLink(self):
+ for a_link in self.link:
+ if a_link.rel == PHOTO_EDIT_LINK_REL:
+ return a_link
+ return None
+
+
+def ContactEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(ContactEntry, xml_string)
+
+
+class ContactsFeed(gdata.BatchFeed, gdata.LinkFinder):
+ """A Google Contacts feed flavor of an Atom Feed."""
+
+ _children = gdata.BatchFeed._children.copy()
+
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [ContactEntry])
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None,
+ entry=None, total_results=None, start_index=None,
+ items_per_page=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ gdata.BatchFeed.__init__(self, author=author, category=category,
+ contributor=contributor, generator=generator,
+ icon=icon, atom_id=atom_id, link=link,
+ logo=logo, rights=rights, subtitle=subtitle,
+ title=title, updated=updated, entry=entry,
+ total_results=total_results,
+ start_index=start_index,
+ items_per_page=items_per_page,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+def ContactsFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(ContactsFeed, xml_string)
+
+
+class GroupEntry(gdata.BatchEntry):
+ """Represents a contact group."""
+ _children = gdata.BatchEntry._children.copy()
+ _children['{%s}extendedProperty' % gdata.GDATA_NAMESPACE] = (
+ 'extended_property', [gdata.ExtendedProperty])
+
+ def __init__(self, author=None, category=None, content=None,
+ contributor=None, atom_id=None, link=None, published=None,
+ rights=None, source=None, summary=None, control=None,
+ title=None, updated=None,
+ extended_property=None, batch_operation=None, batch_id=None,
+ batch_status=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ gdata.BatchEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ batch_operation=batch_operation,
+ batch_id=batch_id, batch_status=batch_status,
+ title=title, updated=updated)
+ self.extended_property = extended_property or []
+
+
+def GroupEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(GroupEntry, xml_string)
+
+
+class GroupsFeed(gdata.BatchFeed):
+ """A Google contact groups feed flavor of an Atom Feed."""
+ _children = gdata.BatchFeed._children.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [GroupEntry])
+
+
+def GroupsFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(GroupsFeed, xml_string)
+
+
+class ProfileEntry(PersonEntry):
+ """A Google Profiles flavor of an Atom Entry."""
+
+
+def ProfileEntryFromString(xml_string):
+ """Converts an XML string into a ProfileEntry object.
+
+ Args:
+ xml_string: string The XML describing a Profile entry.
+
+ Returns:
+ A ProfileEntry object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(ProfileEntry, xml_string)
+
+
+class ProfilesFeed(gdata.BatchFeed, gdata.LinkFinder):
+ """A Google Profiles feed flavor of an Atom Feed."""
+
+ _children = gdata.BatchFeed._children.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [ProfileEntry])
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None,
+ entry=None, total_results=None, start_index=None,
+ items_per_page=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ gdata.BatchFeed.__init__(self, author=author, category=category,
+ contributor=contributor, generator=generator,
+ icon=icon, atom_id=atom_id, link=link,
+ logo=logo, rights=rights, subtitle=subtitle,
+ title=title, updated=updated, entry=entry,
+ total_results=total_results,
+ start_index=start_index,
+ items_per_page=items_per_page,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+def ProfilesFeedFromString(xml_string):
+ """Converts an XML string into a ProfilesFeed object.
+
+ Args:
+ xml_string: string The XML describing a Profiles feed.
+
+ Returns:
+ A ProfilesFeed object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(ProfilesFeed, xml_string)
diff --git a/python/gdata/contacts/client.py b/python/gdata/contacts/client.py
new file mode 100644
index 0000000..1ef7559
--- /dev/null
+++ b/python/gdata/contacts/client.py
@@ -0,0 +1,495 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from types import ListType, DictionaryType
+
+
+"""Contains a client to communicate with the Contacts servers.
+
+For documentation on the Contacts API, see:
+http://code.google.com/apis/contatcs/
+"""
+
+__author__ = 'vinces1979@gmail.com (Vince Spicer)'
+
+
+import gdata.client
+import gdata.contacts.data
+import atom.data
+import atom.http_core
+import gdata.gauth
+
+
+class ContactsClient(gdata.client.GDClient):
+ api_version = '3'
+ auth_service = 'cp'
+ server = "www.google.com"
+ contact_list = "default"
+ auth_scopes = gdata.gauth.AUTH_SCOPES['cp']
+
+
+ def __init__(self, domain=None, auth_token=None, **kwargs):
+ """Constructs a new client for the Email Settings API.
+
+ Args:
+ domain: string The Google Apps domain (if any).
+ kwargs: The other parameters to pass to the gdata.client.GDClient
+ constructor.
+ """
+ gdata.client.GDClient.__init__(self, auth_token=auth_token, **kwargs)
+ self.domain = domain
+
+ def get_feed_uri(self, kind='contacts', contact_list=None, projection='full',
+ scheme="http"):
+ """Builds a feed URI.
+
+ Args:
+ kind: The type of feed to return, typically 'groups' or 'contacts'.
+ Default value: 'contacts'.
+ contact_list: The contact list to return a feed for.
+ Default value: self.contact_list.
+ projection: The projection to apply to the feed contents, for example
+ 'full', 'base', 'base/12345', 'full/batch'. Default value: 'full'.
+ scheme: The URL scheme such as 'http' or 'https', None to return a
+ relative URI without hostname.
+
+ Returns:
+ A feed URI using the given kind, contact list, and projection.
+ Example: '/m8/feeds/contacts/default/full'.
+ """
+ contact_list = contact_list or self.contact_list
+ if kind == 'profiles':
+ contact_list = 'domain/%s' % self.domain
+ prefix = scheme and '%s://%s' % (scheme, self.server) or ''
+ return '%s/m8/feeds/%s/%s/%s' % (prefix, kind, contact_list, projection)
+
+ GetFeedUri = get_feed_uri
+
+ def get_contact(self, uri, desired_class=gdata.contacts.data.ContactEntry,
+ auth_token=None, **kwargs):
+ return self.get_feed(uri, auth_token=auth_token,
+ desired_class=desired_class, **kwargs)
+
+
+ GetContact = get_contact
+
+
+ def create_contact(self, new_contact, insert_uri=None, auth_token=None, **kwargs):
+ """Adds an new contact to Google Contacts.
+
+ Args:
+ new_contact: atom.Entry or subclass A new contact which is to be added to
+ Google Contacts.
+ insert_uri: the URL to post new contacts to the feed
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful insert, an entry containing the contact created
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+ insert_uri = insert_uri or self.GetFeedUri()
+ return self.Post(new_contact, insert_uri,
+ auth_token=auth_token, **kwargs)
+
+ CreateContact = create_contact
+
+ def add_contact(self, new_contact, insert_uri=None, auth_token=None,
+ billing_information=None, birthday=None, calendar_link=None, **kwargs):
+ """Adds an new contact to Google Contacts.
+
+ Args:
+ new_contact: atom.Entry or subclass A new contact which is to be added to
+ Google Contacts.
+ insert_uri: the URL to post new contacts to the feed
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful insert, an entry containing the contact created
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+
+ contact = gdata.contacts.data.ContactEntry()
+
+ if billing_information is not None:
+ if not isinstance(billing_information, gdata.contacts.data.BillingInformation):
+ billing_information = gdata.contacts.data.BillingInformation(text=billing_information)
+
+ contact.billing_information = billing_information
+
+ if birthday is not None:
+ if not isinstance(birthday, gdata.contacts.data.Birthday):
+ birthday = gdata.contacts.data.Birthday(when=birthday)
+
+ contact.birthday = birthday
+
+ if calendar_link is not None:
+ if type(calendar_link) is not ListType:
+ calendar_link = [calendar_link]
+
+ for link in calendar_link:
+ if not isinstance(link, gdata.contacts.data.CalendarLink):
+ if type(link) is not DictionaryType:
+ raise TypeError, "calendar_link Requires dictionary not %s" % type(link)
+
+ link = gdata.contacts.data.CalendarLink(
+ rel=link.get("rel", None),
+ label=link.get("label", None),
+ primary=link.get("primary", None),
+ href=link.get("href", None),
+ )
+
+ contact.calendar_link.append(link)
+
+ insert_uri = insert_uri or self.GetFeedUri()
+ return self.Post(contact, insert_uri,
+ auth_token=auth_token, **kwargs)
+
+ AddContact = add_contact
+
+ def get_contacts(self, desired_class=gdata.contacts.data.ContactsFeed,
+ auth_token=None, **kwargs):
+ """Obtains a feed with the contacts belonging to the current user.
+
+ Args:
+ auth_token: An object which sets the Authorization HTTP header in its
+ modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others. Represents the current user. Defaults to None
+ and if None, this method will look for a value in the
+ auth_token member of SpreadsheetsClient.
+ desired_class: class descended from atom.core.XmlElement to which a
+ successful response should be converted. If there is no
+ converter function specified (desired_class=None) then the
+ desired_class will be used in calling the
+ atom.core.parse function. If neither
+ the desired_class nor the converter is specified, an
+ HTTP reponse object will be returned. Defaults to
+ gdata.spreadsheets.data.SpreadsheetsFeed.
+ """
+ return self.get_feed(self.GetFeedUri(), auth_token=auth_token,
+ desired_class=desired_class, **kwargs)
+
+ GetContacts = get_contacts
+
+ def get_group(self, uri=None, desired_class=gdata.contacts.data.GroupEntry,
+ auth_token=None, **kwargs):
+ """ Get a single groups details
+ Args:
+ uri: the group uri or id
+ """
+ return self.get(uri, desired_class=desired_class, auth_token=auth_token, **kwargs)
+
+ GetGroup = get_group
+
+ def get_groups(self, uri=None, desired_class=gdata.contacts.data.GroupsFeed,
+ auth_token=None, **kwargs):
+ uri = uri or self.GetFeedUri('groups')
+ return self.get_feed(uri, desired_class=desired_class, auth_token=auth_token, **kwargs)
+
+ GetGroups = get_groups
+
+ def create_group(self, new_group, insert_uri=None, url_params=None,
+ desired_class=None):
+ insert_uri = insert_uri or self.GetFeedUri('groups')
+ return self.Post(new_group, insert_uri, url_params=url_params,
+ desired_class=desired_class)
+
+ CreateGroup = create_group
+
+ def update_group(self, edit_uri, updated_group, url_params=None,
+ escape_params=True, desired_class=None):
+ return self.Put(updated_group, self._CleanUri(edit_uri),
+ url_params=url_params,
+ escape_params=escape_params,
+ desired_class=desired_class)
+
+ UpdateGroup = update_group
+
+ def delete_group(self, group_object, auth_token=None, force=False, **kws):
+ return self.Delete(group_object, auth_token=auth_token, force=force, **kws )
+
+ DeleteGroup = delete_group
+
+ def change_photo(self, media, contact_entry_or_url, content_type=None,
+ content_length=None):
+ """Change the photo for the contact by uploading a new photo.
+
+ Performs a PUT against the photo edit URL to send the binary data for the
+ photo.
+
+ Args:
+ media: filename, file-like-object, or a gdata.MediaSource object to send.
+ contact_entry_or_url: ContactEntry or str If it is a ContactEntry, this
+ method will search for an edit photo link URL and
+ perform a PUT to the URL.
+ content_type: str (optional) the mime type for the photo data. This is
+ necessary if media is a file or file name, but if media
+ is a MediaSource object then the media object can contain
+ the mime type. If media_type is set, it will override the
+ mime type in the media object.
+ content_length: int or str (optional) Specifying the content length is
+ only required if media is a file-like object. If media
+ is a filename, the length is determined using
+ os.path.getsize. If media is a MediaSource object, it is
+ assumed that it already contains the content length.
+ """
+ if isinstance(contact_entry_or_url, gdata.contacts.data.ContactEntry):
+ url = contact_entry_or_url.GetPhotoEditLink().href
+ else:
+ url = contact_entry_or_url
+ if isinstance(media, gdata.MediaSource):
+ payload = media
+ # If the media object is a file-like object, then use it as the file
+ # handle in the in the MediaSource.
+ elif hasattr(media, 'read'):
+ payload = gdata.MediaSource(file_handle=media,
+ content_type=content_type, content_length=content_length)
+ # Assume that the media object is a file name.
+ else:
+ payload = gdata.MediaSource(content_type=content_type,
+ content_length=content_length, file_path=media)
+ return self.Put(payload, url)
+
+ ChangePhoto = change_photo
+
+ def get_photo(self, contact_entry_or_url):
+ """Retrives the binary data for the contact's profile photo as a string.
+
+ Args:
+ contact_entry_or_url: a gdata.contacts.ContactEntry objecr or a string
+ containing the photo link's URL. If the contact entry does not
+ contain a photo link, the image will not be fetched and this method
+ will return None.
+ """
+ # TODO: add the ability to write out the binary image data to a file,
+ # reading and writing a chunk at a time to avoid potentially using up
+ # large amounts of memory.
+ url = None
+ if isinstance(contact_entry_or_url, gdata.contacts.data.ContactEntry):
+ photo_link = contact_entry_or_url.GetPhotoLink()
+ if photo_link:
+ url = photo_link.href
+ else:
+ url = contact_entry_or_url
+ if url:
+ return self.Get(url).read()
+ else:
+ return None
+
+ GetPhoto = get_photo
+
+ def delete_photo(self, contact_entry_or_url):
+ url = None
+ if isinstance(contact_entry_or_url, gdata.contacts.data.ContactEntry):
+ url = contact_entry_or_url.GetPhotoEditLink().href
+ else:
+ url = contact_entry_or_url
+ if url:
+ self.Delete(url)
+
+ DeletePhoto = delete_photo
+
+ def get_profiles_feed(self, uri=None):
+ """Retrieves a feed containing all domain's profiles.
+
+ Args:
+ uri: string (optional) the URL to retrieve the profiles feed,
+ for example /m8/feeds/profiles/default/full
+
+ Returns:
+ On success, a ProfilesFeed containing the profiles.
+ On failure, raises a RequestError.
+ """
+
+ uri = uri or self.GetFeedUri('profiles')
+ return self.Get(uri,
+ desired_class=gdata.contacts.data.ProfilesFeed)
+
+ GetProfilesFeed = get_profiles_feed
+
+ def get_profile(self, uri):
+ """Retrieves a domain's profile for the user.
+
+ Args:
+ uri: string the URL to retrieve the profiles feed,
+ for example /m8/feeds/profiles/default/full/username
+
+ Returns:
+ On success, a ProfileEntry containing the profile for the user.
+ On failure, raises a RequestError
+ """
+ return self.Get(uri,
+ desired_class=gdata.contacts.data.ProfileEntry)
+
+ GetProfile = get_profile
+
+ def update_profile(self, updated_profile, auth_token=None, force=False, **kwargs):
+ """Updates an existing profile.
+
+ Args:
+ updated_profile: atom.Entry or subclass containing
+ the Atom Entry which will replace the profile which is
+ stored at the edit_url.
+ auth_token: An object which sets the Authorization HTTP header in its
+ modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others. Represents the current user. Defaults to None
+ and if None, this method will look for a value in the
+ auth_token member of ContactsClient.
+ force: boolean stating whether an update should be forced. Defaults to
+ False. Normally, if a change has been made since the passed in
+ entry was obtained, the server will not overwrite the entry since
+ the changes were based on an obsolete version of the entry.
+ Setting force to True will cause the update to silently
+ overwrite whatever version is present.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful update, a httplib.HTTPResponse containing the server's
+ response to the PUT request.
+ On failure, raises a RequestError.
+ """
+ return self.Update(updated_profile, auth_token=auth_token, force=force, **kwargs)
+
+ UpdateProfile = update_profile
+
+ def execute_batch(self, batch_feed, url, desired_class=None):
+ """Sends a batch request feed to the server.
+
+ Args:
+ batch_feed: gdata.contacts.ContactFeed A feed containing batch
+ request entries. Each entry contains the operation to be performed
+ on the data contained in the entry. For example an entry with an
+ operation type of insert will be used as if the individual entry
+ had been inserted.
+ url: str The batch URL to which these operations should be applied.
+ converter: Function (optional) The function used to convert the server's
+ response to an object.
+
+ Returns:
+ The results of the batch request's execution on the server. If the
+ default converter is used, this is stored in a ContactsFeed.
+ """
+ return self.Post(batch_feed, url, desired_class=desired_class)
+
+ ExecuteBatch = execute_batch
+
+ def execute_batch_profiles(self, batch_feed, url,
+ desired_class=gdata.contacts.data.ProfilesFeed):
+ """Sends a batch request feed to the server.
+
+ Args:
+ batch_feed: gdata.profiles.ProfilesFeed A feed containing batch
+ request entries. Each entry contains the operation to be performed
+ on the data contained in the entry. For example an entry with an
+ operation type of insert will be used as if the individual entry
+ had been inserted.
+ url: string The batch URL to which these operations should be applied.
+ converter: Function (optional) The function used to convert the server's
+ response to an object. The default value is
+ gdata.profiles.ProfilesFeedFromString.
+
+ Returns:
+ The results of the batch request's execution on the server. If the
+ default converter is used, this is stored in a ProfilesFeed.
+ """
+ return self.Post(batch_feed, url, desired_class=desired_class)
+
+ ExecuteBatchProfiles = execute_batch_profiles
+
+ def _CleanUri(self, uri):
+ """Sanitizes a feed URI.
+
+ Args:
+ uri: The URI to sanitize, can be relative or absolute.
+
+ Returns:
+ The given URI without its http://server prefix, if any.
+ Keeps the leading slash of the URI.
+ """
+ url_prefix = 'http://%s' % self.server
+ if uri.startswith(url_prefix):
+ uri = uri[len(url_prefix):]
+ return uri
+
+class ContactsQuery(gdata.client.Query):
+ """
+ Create a custom Contacts Query
+
+ Full specs can be found at: U{Contacts query parameters reference
+ }
+ """
+
+ def __init__(self, feed=None, group=None, orderby=None, showdeleted=None,
+ sortorder=None, requirealldeleted=None, **kwargs):
+ """
+ @param max_results: The maximum number of entries to return. If you want
+ to receive all of the contacts, rather than only the default maximum, you
+ can specify a very large number for max-results.
+ @param start-index: The 1-based index of the first result to be retrieved.
+ @param updated-min: The lower bound on entry update dates.
+ @param group: Constrains the results to only the contacts belonging to the
+ group specified. Value of this parameter specifies group ID
+ @param orderby: Sorting criterion. The only supported value is
+ lastmodified.
+ @param showdeleted: Include deleted contacts in the returned contacts feed
+ @pram sortorder: Sorting order direction. Can be either ascending or
+ descending.
+ @param requirealldeleted: Only relevant if showdeleted and updated-min
+ are also provided. It dictates the behavior of the server in case it
+ detects that placeholders of some entries deleted since the point in
+ time specified as updated-min may have been lost.
+ """
+ gdata.client.Query.__init__(self, **kwargs)
+ self.group = group
+ self.orderby = orderby
+ self.sortorder = sortorder
+ self.showdeleted = showdeleted
+
+ def modify_request(self, http_request):
+ if self.group:
+ gdata.client._add_query_param('group', self.group, http_request)
+ if self.orderby:
+ gdata.client._add_query_param('orderby', self.orderby, http_request)
+ if self.sortorder:
+ gdata.client._add_query_param('sortorder', self.sortorder, http_request)
+ if self.showdeleted:
+ gdata.client._add_query_param('showdeleted', self.showdeleted, http_request)
+ gdata.client.Query.modify_request(self, http_request)
+
+ ModifyRequest = modify_request
+
+
+class ProfilesQuery(gdata.client.Query):
+ def __init__(self, feed=None):
+ self.feed = feed or 'http://www.google.com/m8/feeds/profiles/default/full'
+
+
+
diff --git a/python/gdata/contacts/data.py b/python/gdata/contacts/data.py
new file mode 100644
index 0000000..782a05d
--- /dev/null
+++ b/python/gdata/contacts/data.py
@@ -0,0 +1,474 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Data model classes for parsing and generating XML for the Contacts API."""
+
+
+__author__ = 'vinces1979@gmail.com (Vince Spicer)'
+
+
+import atom.core
+import gdata
+import gdata.data
+
+
+PHOTO_LINK_REL = 'http://schemas.google.com/contacts/2008/rel#photo'
+PHOTO_EDIT_LINK_REL = 'http://schemas.google.com/contacts/2008/rel#edit-photo'
+
+EXTERNAL_ID_ORGANIZATION = 'organization'
+
+RELATION_MANAGER = 'manager'
+
+CONTACTS_NAMESPACE = 'http://schemas.google.com/contact/2008'
+CONTACTS_TEMPLATE = '{%s}%%s' % CONTACTS_NAMESPACE
+
+
+class BillingInformation(atom.core.XmlElement):
+ """
+ gContact:billingInformation
+ Specifies billing information of the entity represented by the contact. The element cannot be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'billingInformation'
+
+
+class Birthday(atom.core.XmlElement):
+ """
+ Stores birthday date of the person represented by the contact. The element cannot be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'birthday'
+ when = 'when'
+
+
+class CalendarLink(atom.core.XmlElement):
+ """
+ Storage for URL of the contact's calendar. The element can be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'calendarLink'
+ rel = 'rel'
+ label = 'label'
+ primary = 'primary'
+ href = 'href'
+
+
+class DirectoryServer(atom.core.XmlElement):
+ """
+ A directory server associated with this contact.
+ May not be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'directoryServer'
+
+
+class Event(atom.core.XmlElement):
+ """
+ These elements describe events associated with a contact.
+ They may be repeated
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'event'
+ label = 'label'
+ rel = 'rel'
+ when = gdata.data.When
+
+
+class ExternalId(atom.core.XmlElement):
+ """
+ Describes an ID of the contact in an external system of some kind.
+ This element may be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'externalId'
+
+
+def ExternalIdFromString(xml_string):
+ return atom.core.parse(ExternalId, xml_string)
+
+
+class Gender(atom.core.XmlElement):
+ """
+ Specifies the gender of the person represented by the contact.
+ The element cannot be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'directoryServer'
+ value = 'value'
+
+
+class Hobby(atom.core.XmlElement):
+ """
+ Describes an ID of the contact in an external system of some kind.
+ This element may be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'hobby'
+
+
+class Initials(atom.core.XmlElement):
+ """ Specifies the initials of the person represented by the contact. The
+ element cannot be repeated. """
+
+ _qname = CONTACTS_TEMPLATE % 'initials'
+
+
+class Jot(atom.core.XmlElement):
+ """
+ Storage for arbitrary pieces of information about the contact. Each jot
+ has a type specified by the rel attribute and a text value.
+ The element can be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'jot'
+ rel = 'rel'
+
+
+class Language(atom.core.XmlElement):
+ """
+ Specifies the preferred languages of the contact.
+ The element can be repeated.
+
+ The language must be specified using one of two mutually exclusive methods:
+ using the freeform @label attribute, or using the @code attribute, whose value
+ must conform to the IETF BCP 47 specification.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'language'
+ code = 'code'
+ label = 'label'
+
+
+class MaidenName(atom.core.XmlElement):
+ """
+ Specifies maiden name of the person represented by the contact.
+ The element cannot be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'maidenName'
+
+
+class Mileage(atom.core.XmlElement):
+ """
+ Specifies the mileage for the entity represented by the contact.
+ Can be used for example to document distance needed for reimbursement
+ purposes. The value is not interpreted. The element cannot be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'mileage'
+
+
+class NickName(atom.core.XmlElement):
+ """
+ Specifies the nickname of the person represented by the contact.
+ The element cannot be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'nickname'
+
+
+class Occupation(atom.core.XmlElement):
+ """
+ Specifies the occupation/profession of the person specified by the contact.
+ The element cannot be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'occupation'
+
+
+class Priority(atom.core.XmlElement):
+ """
+ Classifies importance of the contact into 3 categories:
+ * Low
+ * Normal
+ * High
+
+ The priority element cannot be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'priority'
+
+
+class Relation(atom.core.XmlElement):
+ """
+ This element describe another entity (usually a person) that is in a
+ relation of some kind with the contact.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'relation'
+ rel = 'rel'
+ label = 'label'
+
+
+class Sensitivity(atom.core.XmlElement):
+ """
+ Classifies sensitivity of the contact into the following categories:
+ * Confidential
+ * Normal
+ * Personal
+ * Private
+
+ The sensitivity element cannot be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'sensitivity'
+ rel = 'rel'
+
+
+class UserDefinedField(atom.core.XmlElement):
+ """
+ Represents an arbitrary key-value pair attached to the contact.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'userDefinedField'
+ key = 'key'
+ value = 'value'
+
+
+def UserDefinedFieldFromString(xml_string):
+ return atom.core.parse(UserDefinedField, xml_string)
+
+
+class Website(atom.core.XmlElement):
+ """
+ Describes websites associated with the contact, including links.
+ May be repeated.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'website'
+
+ href = 'href'
+ label = 'label'
+ primary = 'primary'
+ rel = 'rel'
+
+
+def WebsiteFromString(xml_string):
+ return atom.core.parse(Website, xml_string)
+
+
+class HouseName(atom.core.XmlElement):
+ """
+ Used in places where houses or buildings have names (and
+ not necessarily numbers), eg. "The Pillars".
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'housename'
+
+
+class Street(atom.core.XmlElement):
+ """
+ Can be street, avenue, road, etc. This element also includes the house
+ number and room/apartment/flat/floor number.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'street'
+
+
+class POBox(atom.core.XmlElement):
+ """
+ Covers actual P.O. boxes, drawers, locked bags, etc. This is usually but not
+ always mutually exclusive with street
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'pobox'
+
+
+class Neighborhood(atom.core.XmlElement):
+ """
+ This is used to disambiguate a street address when a city contains more than
+ one street with the same name, or to specify a small place whose mail is
+ routed through a larger postal town. In China it could be a county or a
+ minor city.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'neighborhood'
+
+
+class City(atom.core.XmlElement):
+ """
+ Can be city, village, town, borough, etc. This is the postal town and not
+ necessarily the place of residence or place of business.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'city'
+
+
+class SubRegion(atom.core.XmlElement):
+ """
+ Handles administrative districts such as U.S. or U.K. counties that are not
+ used for mail addressing purposes. Subregion is not intended for
+ delivery addresses.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'subregion'
+
+
+class Region(atom.core.XmlElement):
+ """
+ A state, province, county (in Ireland), Land (in Germany),
+ departement (in France), etc.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'region'
+
+
+class PostalCode(atom.core.XmlElement):
+ """
+ Postal code. Usually country-wide, but sometimes specific to the
+ city (e.g. "2" in "Dublin 2, Ireland" addresses).
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'postcode'
+
+
+class Country(atom.core.XmlElement):
+ """ The name or code of the country. """
+
+ _qname = CONTACTS_TEMPLATE % 'country'
+
+
+class PersonEntry(gdata.data.BatchEntry):
+ """Represents a google contact"""
+
+ billing_information = BillingInformation
+ birthday = Birthday
+ calendar_link = [CalendarLink]
+ directory_server = DirectoryServer
+ event = [Event]
+ external_id = [ExternalId]
+ gender = Gender
+ hobby = [Hobby]
+ initals = Initials
+ jot = [Jot]
+ language= [Language]
+ maiden_name = MaidenName
+ mileage = Mileage
+ nickname = NickName
+ occupation = Occupation
+ priority = Priority
+ relation = [Relation]
+ sensitivity = Sensitivity
+ user_defined_field = [UserDefinedField]
+ website = [Website]
+
+ name = gdata.data.Name
+ phone_number = [gdata.data.PhoneNumber]
+ organization = gdata.data.Organization
+ postal_address = [gdata.data.PostalAddress]
+ email = [gdata.data.Email]
+ im = [gdata.data.Im]
+ structured_postal_address = [gdata.data.StructuredPostalAddress]
+ extended_property = [gdata.data.ExtendedProperty]
+
+
+class Deleted(atom.core.XmlElement):
+ """If present, indicates that this contact has been deleted."""
+ _qname = gdata.GDATA_TEMPLATE % 'deleted'
+
+
+class GroupMembershipInfo(atom.core.XmlElement):
+ """
+ Identifies the group to which the contact belongs or belonged.
+ The group is referenced by its id.
+ """
+
+ _qname = CONTACTS_TEMPLATE % 'groupMembershipInfo'
+
+ href = 'href'
+ deleted = 'deleted'
+
+
+class ContactEntry(PersonEntry):
+ """A Google Contacts flavor of an Atom Entry."""
+
+ deleted = Deleted
+ group_membership_info = [GroupMembershipInfo]
+ organization = gdata.data.Organization
+
+ def GetPhotoLink(self):
+ for a_link in self.link:
+ if a_link.rel == PHOTO_LINK_REL:
+ return a_link
+ return None
+
+ def GetPhotoEditLink(self):
+ for a_link in self.link:
+ if a_link.rel == PHOTO_EDIT_LINK_REL:
+ return a_link
+ return None
+
+
+class ContactsFeed(gdata.data.BatchFeed):
+ """A collection of Contacts."""
+ entry = [ContactEntry]
+
+
+class SystemGroup(atom.core.XmlElement):
+ """The contacts systemGroup element.
+
+ When used within a contact group entry, indicates that the group in
+ question is one of the predefined system groups."""
+
+ _qname = CONTACTS_TEMPLATE % 'systemGroup'
+ id = 'id'
+
+
+class GroupEntry(gdata.data.BatchEntry):
+ """Represents a contact group."""
+ extended_property = [gdata.data.ExtendedProperty]
+ system_group = SystemGroup
+
+
+class GroupsFeed(gdata.data.BatchFeed):
+ """A Google contact groups feed flavor of an Atom Feed."""
+ entry = [GroupEntry]
+
+
+class ProfileEntry(PersonEntry):
+ """A Google Profiles flavor of an Atom Entry."""
+
+
+def ProfileEntryFromString(xml_string):
+ """Converts an XML string into a ProfileEntry object.
+
+ Args:
+ xml_string: string The XML describing a Profile entry.
+
+ Returns:
+ A ProfileEntry object corresponding to the given XML.
+ """
+ return atom.core.parse(ProfileEntry, xml_string)
+
+
+class ProfilesFeed(gdata.data.BatchFeed):
+ """A Google Profiles feed flavor of an Atom Feed."""
+ _qname = atom.data.ATOM_TEMPLATE % 'feed'
+ entry = [ProfileEntry]
+
+
+def ProfilesFeedFromString(xml_string):
+ """Converts an XML string into a ProfilesFeed object.
+
+ Args:
+ xml_string: string The XML describing a Profiles feed.
+
+ Returns:
+ A ProfilesFeed object corresponding to the given XML.
+ """
+ return atom.core.parse(ProfilesFeed, xml_string)
+
+
diff --git a/python/gdata/contacts/service.py b/python/gdata/contacts/service.py
new file mode 100644
index 0000000..4b017c0
--- /dev/null
+++ b/python/gdata/contacts/service.py
@@ -0,0 +1,427 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""ContactsService extends the GDataService for Google Contacts operations.
+
+ ContactsService: Provides methods to query feeds and manipulate items.
+ Extends GDataService.
+
+ DictionaryToParamList: Function which converts a dictionary into a list of
+ URL arguments (represented as strings). This is a
+ utility function used in CRUD operations.
+"""
+
+__author__ = 'dbrattli (Dag Brattli)'
+
+
+import gdata
+import gdata.calendar
+import gdata.service
+
+
+DEFAULT_BATCH_URL = ('http://www.google.com/m8/feeds/contacts/default/full'
+ '/batch')
+DEFAULT_PROFILES_BATCH_URL = ('http://www.google.com'
+ '/m8/feeds/profiles/default/full/batch')
+
+GDATA_VER_HEADER = 'GData-Version'
+
+
+class Error(Exception):
+ pass
+
+
+class RequestError(Error):
+ pass
+
+
+class ContactsService(gdata.service.GDataService):
+ """Client for the Google Contacts service."""
+
+ def __init__(self, email=None, password=None, source=None,
+ server='www.google.com', additional_headers=None,
+ contact_list='default', **kwargs):
+ """Creates a client for the Contacts service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'www.google.com'.
+ contact_list: string (optional) The name of the default contact list to
+ use when no URI is specified to the methods of the service.
+ Default value: 'default' (the logged in user's contact list).
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+
+ self.contact_list = contact_list
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service='cp', source=source,
+ server=server, additional_headers=additional_headers, **kwargs)
+
+ def GetFeedUri(self, kind='contacts', contact_list=None, projection='full',
+ scheme=None):
+ """Builds a feed URI.
+
+ Args:
+ kind: The type of feed to return, typically 'groups' or 'contacts'.
+ Default value: 'contacts'.
+ contact_list: The contact list to return a feed for.
+ Default value: self.contact_list.
+ projection: The projection to apply to the feed contents, for example
+ 'full', 'base', 'base/12345', 'full/batch'. Default value: 'full'.
+ scheme: The URL scheme such as 'http' or 'https', None to return a
+ relative URI without hostname.
+
+ Returns:
+ A feed URI using the given kind, contact list, and projection.
+ Example: '/m8/feeds/contacts/default/full'.
+ """
+ contact_list = contact_list or self.contact_list
+ if kind == 'profiles':
+ contact_list = 'domain/%s' % contact_list
+ prefix = scheme and '%s://%s' % (scheme, self.server) or ''
+ return '%s/m8/feeds/%s/%s/%s' % (prefix, kind, contact_list, projection)
+
+ def GetContactsFeed(self, uri=None):
+ uri = uri or self.GetFeedUri()
+ return self.Get(uri, converter=gdata.contacts.ContactsFeedFromString)
+
+ def GetContact(self, uri):
+ return self.Get(uri, converter=gdata.contacts.ContactEntryFromString)
+
+ def CreateContact(self, new_contact, insert_uri=None, url_params=None,
+ escape_params=True):
+ """Adds an new contact to Google Contacts.
+
+ Args:
+ new_contact: atom.Entry or subclass A new contact which is to be added to
+ Google Contacts.
+ insert_uri: the URL to post new contacts to the feed
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful insert, an entry containing the contact created
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+ insert_uri = insert_uri or self.GetFeedUri()
+ return self.Post(new_contact, insert_uri, url_params=url_params,
+ escape_params=escape_params,
+ converter=gdata.contacts.ContactEntryFromString)
+
+ def UpdateContact(self, edit_uri, updated_contact, url_params=None,
+ escape_params=True):
+ """Updates an existing contact.
+
+ Args:
+ edit_uri: string The edit link URI for the element being updated
+ updated_contact: string, atom.Entry or subclass containing
+ the Atom Entry which will replace the contact which is
+ stored at the edit_url
+ url_params: dict (optional) Additional URL parameters to be included
+ in the update request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful update, a httplib.HTTPResponse containing the server's
+ response to the PUT request.
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+ return self.Put(updated_contact, self._CleanUri(edit_uri),
+ url_params=url_params,
+ escape_params=escape_params,
+ converter=gdata.contacts.ContactEntryFromString)
+
+ def DeleteContact(self, edit_uri, extra_headers=None,
+ url_params=None, escape_params=True):
+ """Removes an contact with the specified ID from Google Contacts.
+
+ Args:
+ edit_uri: string The edit URL of the entry to be deleted. Example:
+ '/m8/feeds/contacts/default/full/xxx/yyy'
+ url_params: dict (optional) Additional URL parameters to be included
+ in the deletion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful delete, a httplib.HTTPResponse containing the server's
+ response to the DELETE request.
+ On failure, a RequestError is raised of the form:
+ {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response}
+ """
+ return self.Delete(self._CleanUri(edit_uri),
+ url_params=url_params, escape_params=escape_params)
+
+ def GetGroupsFeed(self, uri=None):
+ uri = uri or self.GetFeedUri('groups')
+ return self.Get(uri, converter=gdata.contacts.GroupsFeedFromString)
+
+ def CreateGroup(self, new_group, insert_uri=None, url_params=None,
+ escape_params=True):
+ insert_uri = insert_uri or self.GetFeedUri('groups')
+ return self.Post(new_group, insert_uri, url_params=url_params,
+ escape_params=escape_params,
+ converter=gdata.contacts.GroupEntryFromString)
+
+ def UpdateGroup(self, edit_uri, updated_group, url_params=None,
+ escape_params=True):
+ return self.Put(updated_group, self._CleanUri(edit_uri),
+ url_params=url_params,
+ escape_params=escape_params,
+ converter=gdata.contacts.GroupEntryFromString)
+
+ def DeleteGroup(self, edit_uri, extra_headers=None,
+ url_params=None, escape_params=True):
+ return self.Delete(self._CleanUri(edit_uri),
+ url_params=url_params, escape_params=escape_params)
+
+ def ChangePhoto(self, media, contact_entry_or_url, content_type=None,
+ content_length=None):
+ """Change the photo for the contact by uploading a new photo.
+
+ Performs a PUT against the photo edit URL to send the binary data for the
+ photo.
+
+ Args:
+ media: filename, file-like-object, or a gdata.MediaSource object to send.
+ contact_entry_or_url: ContactEntry or str If it is a ContactEntry, this
+ method will search for an edit photo link URL and
+ perform a PUT to the URL.
+ content_type: str (optional) the mime type for the photo data. This is
+ necessary if media is a file or file name, but if media
+ is a MediaSource object then the media object can contain
+ the mime type. If media_type is set, it will override the
+ mime type in the media object.
+ content_length: int or str (optional) Specifying the content length is
+ only required if media is a file-like object. If media
+ is a filename, the length is determined using
+ os.path.getsize. If media is a MediaSource object, it is
+ assumed that it already contains the content length.
+ """
+ if isinstance(contact_entry_or_url, gdata.contacts.ContactEntry):
+ url = contact_entry_or_url.GetPhotoEditLink().href
+ else:
+ url = contact_entry_or_url
+ if isinstance(media, gdata.MediaSource):
+ payload = media
+ # If the media object is a file-like object, then use it as the file
+ # handle in the in the MediaSource.
+ elif hasattr(media, 'read'):
+ payload = gdata.MediaSource(file_handle=media,
+ content_type=content_type, content_length=content_length)
+ # Assume that the media object is a file name.
+ else:
+ payload = gdata.MediaSource(content_type=content_type,
+ content_length=content_length, file_path=media)
+ return self.Put(payload, url)
+
+ def GetPhoto(self, contact_entry_or_url):
+ """Retrives the binary data for the contact's profile photo as a string.
+
+ Args:
+ contact_entry_or_url: a gdata.contacts.ContactEntry objecr or a string
+ containing the photo link's URL. If the contact entry does not
+ contain a photo link, the image will not be fetched and this method
+ will return None.
+ """
+ # TODO: add the ability to write out the binary image data to a file,
+ # reading and writing a chunk at a time to avoid potentially using up
+ # large amounts of memory.
+ url = None
+ if isinstance(contact_entry_or_url, gdata.contacts.ContactEntry):
+ photo_link = contact_entry_or_url.GetPhotoLink()
+ if photo_link:
+ url = photo_link.href
+ else:
+ url = contact_entry_or_url
+ if url:
+ return self.Get(url, converter=str)
+ else:
+ return None
+
+ def DeletePhoto(self, contact_entry_or_url):
+ url = None
+ if isinstance(contact_entry_or_url, gdata.contacts.ContactEntry):
+ url = contact_entry_or_url.GetPhotoEditLink().href
+ else:
+ url = contact_entry_or_url
+ if url:
+ self.Delete(url)
+
+ def GetProfilesFeed(self, uri=None):
+ """Retrieves a feed containing all domain's profiles.
+
+ Args:
+ uri: string (optional) the URL to retrieve the profiles feed,
+ for example /m8/feeds/profiles/default/full
+
+ Returns:
+ On success, a ProfilesFeed containing the profiles.
+ On failure, raises a RequestError.
+ """
+
+ uri = uri or self.GetFeedUri('profiles')
+ return self.Get(uri,
+ converter=gdata.contacts.ProfilesFeedFromString)
+
+ def GetProfile(self, uri):
+ """Retrieves a domain's profile for the user.
+
+ Args:
+ uri: string the URL to retrieve the profiles feed,
+ for example /m8/feeds/profiles/default/full/username
+
+ Returns:
+ On success, a ProfileEntry containing the profile for the user.
+ On failure, raises a RequestError
+ """
+ return self.Get(uri,
+ converter=gdata.contacts.ProfileEntryFromString)
+
+ def UpdateProfile(self, edit_uri, updated_profile, url_params=None,
+ escape_params=True):
+ """Updates an existing profile.
+
+ Args:
+ edit_uri: string The edit link URI for the element being updated
+ updated_profile: string atom.Entry or subclass containing
+ the Atom Entry which will replace the profile which is
+ stored at the edit_url.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the update request.
+ escape_params: boolean (optional) If true, the url_params will be
+ escaped before they are included in the request.
+
+ Returns:
+ On successful update, a httplib.HTTPResponse containing the server's
+ response to the PUT request.
+ On failure, raises a RequestError.
+ """
+ return self.Put(updated_profile, self._CleanUri(edit_uri),
+ url_params=url_params, escape_params=escape_params,
+ converter=gdata.contacts.ProfileEntryFromString)
+
+ def ExecuteBatch(self, batch_feed, url,
+ converter=gdata.contacts.ContactsFeedFromString):
+ """Sends a batch request feed to the server.
+
+ Args:
+ batch_feed: gdata.contacts.ContactFeed A feed containing batch
+ request entries. Each entry contains the operation to be performed
+ on the data contained in the entry. For example an entry with an
+ operation type of insert will be used as if the individual entry
+ had been inserted.
+ url: str The batch URL to which these operations should be applied.
+ converter: Function (optional) The function used to convert the server's
+ response to an object. The default value is ContactsFeedFromString.
+
+ Returns:
+ The results of the batch request's execution on the server. If the
+ default converter is used, this is stored in a ContactsFeed.
+ """
+ return self.Post(batch_feed, url, converter=converter)
+
+ def ExecuteBatchProfiles(self, batch_feed, url,
+ converter=gdata.contacts.ProfilesFeedFromString):
+ """Sends a batch request feed to the server.
+
+ Args:
+ batch_feed: gdata.profiles.ProfilesFeed A feed containing batch
+ request entries. Each entry contains the operation to be performed
+ on the data contained in the entry. For example an entry with an
+ operation type of insert will be used as if the individual entry
+ had been inserted.
+ url: string The batch URL to which these operations should be applied.
+ converter: Function (optional) The function used to convert the server's
+ response to an object. The default value is
+ gdata.profiles.ProfilesFeedFromString.
+
+ Returns:
+ The results of the batch request's execution on the server. If the
+ default converter is used, this is stored in a ProfilesFeed.
+ """
+ return self.Post(batch_feed, url, converter=converter)
+
+ def _CleanUri(self, uri):
+ """Sanitizes a feed URI.
+
+ Args:
+ uri: The URI to sanitize, can be relative or absolute.
+
+ Returns:
+ The given URI without its http://server prefix, if any.
+ Keeps the leading slash of the URI.
+ """
+ url_prefix = 'http://%s' % self.server
+ if uri.startswith(url_prefix):
+ uri = uri[len(url_prefix):]
+ return uri
+
+class ContactsQuery(gdata.service.Query):
+
+ def __init__(self, feed=None, text_query=None, params=None,
+ categories=None, group=None):
+ self.feed = feed or '/m8/feeds/contacts/default/full'
+ if group:
+ self._SetGroup(group)
+ gdata.service.Query.__init__(self, feed=self.feed, text_query=text_query,
+ params=params, categories=categories)
+
+ def _GetGroup(self):
+ if 'group' in self:
+ return self['group']
+ else:
+ return None
+
+ def _SetGroup(self, group_id):
+ self['group'] = group_id
+
+ group = property(_GetGroup, _SetGroup,
+ doc='The group query parameter to find only contacts in this group')
+
+class GroupsQuery(gdata.service.Query):
+
+ def __init__(self, feed=None, text_query=None, params=None,
+ categories=None):
+ self.feed = feed or '/m8/feeds/groups/default/full'
+ gdata.service.Query.__init__(self, feed=self.feed, text_query=text_query,
+ params=params, categories=categories)
+
+
+class ProfilesQuery(gdata.service.Query):
+ """Constructs a query object for the profiles feed."""
+
+ def __init__(self, feed=None, text_query=None, params=None,
+ categories=None):
+ self.feed = feed or '/m8/feeds/profiles/default/full'
+ gdata.service.Query.__init__(self, feed=self.feed, text_query=text_query,
+ params=params, categories=categories)
diff --git a/python/gdata/core.py b/python/gdata/core.py
new file mode 100644
index 0000000..0661ec6
--- /dev/null
+++ b/python/gdata/core.py
@@ -0,0 +1,279 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This module is used for version 2 of the Google Data APIs.
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+"""Provides classes and methods for working with JSON-C.
+
+This module is experimental and subject to backwards incompatible changes.
+
+ Jsonc: Class which represents JSON-C data and provides pythonic member
+ access which is a bit cleaner than working with plain old dicts.
+ parse_json: Converts a JSON-C string into a Jsonc object.
+ jsonc_to_string: Converts a Jsonc object into a string of JSON-C.
+"""
+
+
+try:
+ import simplejson
+except ImportError:
+ try:
+ # Try to import from django, should work on App Engine
+ from django.utils import simplejson
+ except ImportError:
+ # Should work for Python2.6 and higher.
+ import json as simplejson
+
+
+def _convert_to_jsonc(x):
+ """Builds a Jsonc objects which wraps the argument's members."""
+
+ if isinstance(x, dict):
+ jsonc_obj = Jsonc()
+ # Recursively transform all members of the dict.
+ # When converting a dict, we do not convert _name items into private
+ # Jsonc members.
+ for key, value in x.iteritems():
+ jsonc_obj._dict[key] = _convert_to_jsonc(value)
+ return jsonc_obj
+ elif isinstance(x, list):
+ # Recursively transform all members of the list.
+ members = []
+ for item in x:
+ members.append(_convert_to_jsonc(item))
+ return members
+ else:
+ # Return the base object.
+ return x
+
+
+def parse_json(json_string):
+ """Converts a JSON-C string into a Jsonc object.
+
+ Args:
+ json_string: str or unicode The JSON to be parsed.
+
+ Returns:
+ A new Jsonc object.
+ """
+
+ return _convert_to_jsonc(simplejson.loads(json_string))
+
+
+def parse_json_file(json_file):
+ return _convert_to_jsonc(simplejson.load(json_file))
+
+
+def jsonc_to_string(jsonc_obj):
+ """Converts a Jsonc object into a string of JSON-C."""
+
+ return simplejson.dumps(_convert_to_object(jsonc_obj))
+
+
+def prettify_jsonc(jsonc_obj, indentation=2):
+ """Converts a Jsonc object to a pretified (intented) JSON string."""
+
+ return simplejson.dumps(_convert_to_object(jsonc_obj), indent=indentation)
+
+
+
+def _convert_to_object(jsonc_obj):
+ """Creates a new dict or list which has the data in the Jsonc object.
+
+ Used to convert the Jsonc object to a plain old Python object to simplify
+ conversion to a JSON-C string.
+
+ Args:
+ jsonc_obj: A Jsonc object to be converted into simple Python objects
+ (dicts, lists, etc.)
+
+ Returns:
+ Either a dict, list, or other object with members converted from Jsonc
+ objects to the corresponding simple Python object.
+ """
+
+ if isinstance(jsonc_obj, Jsonc):
+ plain = {}
+ for key, value in jsonc_obj._dict.iteritems():
+ plain[key] = _convert_to_object(value)
+ return plain
+ elif isinstance(jsonc_obj, list):
+ plain = []
+ for item in jsonc_obj:
+ plain.append(_convert_to_object(item))
+ return plain
+ else:
+ return jsonc_obj
+
+
+def _to_jsonc_name(member_name):
+ """Converts a Python style member name to a JSON-C style name.
+
+ JSON-C uses camelCaseWithLower while Python tends to use
+ lower_with_underscores so this method converts as follows:
+
+ spam becomes spam
+ spam_and_eggs becomes spamAndEggs
+
+ Args:
+ member_name: str or unicode The Python syle name which should be
+ converted to JSON-C style.
+
+ Returns:
+ The JSON-C style name as a str or unicode.
+ """
+
+ characters = []
+ uppercase_next = False
+ for character in member_name:
+ if character == '_':
+ uppercase_next = True
+ elif uppercase_next:
+ characters.append(character.upper())
+ uppercase_next = False
+ else:
+ characters.append(character)
+ return ''.join(characters)
+
+
+class Jsonc(object):
+ """Represents JSON-C data in an easy to access object format.
+
+ To access the members of a JSON structure which looks like this:
+ {
+ "data": {
+ "totalItems": 800,
+ "items": [
+ {
+ "content": {
+ "1": "rtsp://v5.cache3.c.youtube.com/CiILENy.../0/0/0/video.3gp"
+ },
+ "viewCount": 220101,
+ "commentCount": 22,
+ "favoriteCount": 201
+ }
+ ]
+ },
+ "apiVersion": "2.0"
+ }
+
+ You would do the following:
+ x = gdata.core.parse_json(the_above_string)
+ # Gives you 800
+ x.data.total_items
+ # Should be 22
+ x.data.items[0].comment_count
+ # The apiVersion is '2.0'
+ x.api_version
+
+ To create a Jsonc object which would produce the above JSON, you would do:
+ gdata.core.Jsonc(
+ api_version='2.0',
+ data=gdata.core.Jsonc(
+ total_items=800,
+ items=[
+ gdata.core.Jsonc(
+ view_count=220101,
+ comment_count=22,
+ favorite_count=201,
+ content={
+ '1': ('rtsp://v5.cache3.c.youtube.com'
+ '/CiILENy.../0/0/0/video.3gp')})]))
+ or
+ x = gdata.core.Jsonc()
+ x.api_version = '2.0'
+ x.data = gdata.core.Jsonc()
+ x.data.total_items = 800
+ x.data.items = []
+ # etc.
+
+ How it works:
+ The JSON-C data is stored in an internal dictionary (._dict) and the
+ getattr, setattr, and delattr methods rewrite the name which you provide
+ to mirror the expected format in JSON-C. (For more details on name
+ conversion see _to_jsonc_name.) You may also access members using
+ getitem, setitem, delitem as you would for a dictionary. For example
+ x.data.total_items is equivalent to x['data']['totalItems']
+ (Not all dict methods are supported so if you need something other than
+ the item operations, then you will want to use the ._dict member).
+
+ You may need to use getitem or the _dict member to access certain
+ properties in cases where the JSON-C syntax does not map neatly to Python
+ objects. For example the YouTube Video feed has some JSON like this:
+ "content": {"1": "rtsp://v5.cache3.c.youtube.com..."...}
+ You cannot do x.content.1 in Python, so you would use the getitem as
+ follows:
+ x.content['1']
+ or you could use the _dict member as follows:
+ x.content._dict['1']
+
+ If you need to create a new object with such a mapping you could use.
+
+ x.content = gdata.core.Jsonc(_dict={'1': 'rtsp://cache3.c.youtube.com...'})
+ """
+
+ def __init__(self, _dict=None, **kwargs):
+ json = _dict or {}
+ for key, value in kwargs.iteritems():
+ if key.startswith('_'):
+ object.__setattr__(self, key, value)
+ else:
+ json[_to_jsonc_name(key)] = _convert_to_jsonc(value)
+
+ object.__setattr__(self, '_dict', json)
+
+ def __setattr__(self, name, value):
+ if name.startswith('_'):
+ object.__setattr__(self, name, value)
+ else:
+ object.__getattribute__(
+ self, '_dict')[_to_jsonc_name(name)] = _convert_to_jsonc(value)
+
+ def __getattr__(self, name):
+ if name.startswith('_'):
+ object.__getattribute__(self, name)
+ else:
+ try:
+ return object.__getattribute__(self, '_dict')[_to_jsonc_name(name)]
+ except KeyError:
+ raise AttributeError(
+ 'No member for %s or [\'%s\']' % (name, _to_jsonc_name(name)))
+
+
+ def __delattr__(self, name):
+ if name.startswith('_'):
+ object.__delattr__(self, name)
+ else:
+ try:
+ del object.__getattribute__(self, '_dict')[_to_jsonc_name(name)]
+ except KeyError:
+ raise AttributeError(
+ 'No member for %s (or [\'%s\'])' % (name, _to_jsonc_name(name)))
+
+ # For container methods pass-through to the underlying dict.
+ def __getitem__(self, key):
+ return self._dict[key]
+
+ def __setitem__(self, key, value):
+ self._dict[key] = value
+
+ def __delitem__(self, key):
+ del self._dict[key]
diff --git a/python/gdata/data.py b/python/gdata/data.py
new file mode 100644
index 0000000..3f03d49
--- /dev/null
+++ b/python/gdata/data.py
@@ -0,0 +1,1186 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This module is used for version 2 of the Google Data APIs.
+
+
+"""Provides classes and constants for the XML in the Google Data namespace.
+
+Documentation for the raw XML which these classes represent can be found here:
+http://code.google.com/apis/gdata/docs/2.0/elements.html
+"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import os
+import atom.core
+import atom.data
+
+
+GDATA_TEMPLATE = '{http://schemas.google.com/g/2005}%s'
+GD_TEMPLATE = GDATA_TEMPLATE
+OPENSEARCH_TEMPLATE_V1 = '{http://a9.com/-/spec/opensearchrss/1.0/}%s'
+OPENSEARCH_TEMPLATE_V2 = '{http://a9.com/-/spec/opensearch/1.1/}%s'
+BATCH_TEMPLATE = '{http://schemas.google.com/gdata/batch}%s'
+
+
+# Labels used in batch request entries to specify the desired CRUD operation.
+BATCH_INSERT = 'insert'
+BATCH_UPDATE = 'update'
+BATCH_DELETE = 'delete'
+BATCH_QUERY = 'query'
+
+EVENT_LOCATION = 'http://schemas.google.com/g/2005#event'
+ALTERNATE_LOCATION = 'http://schemas.google.com/g/2005#event.alternate'
+PARKING_LOCATION = 'http://schemas.google.com/g/2005#event.parking'
+
+CANCELED_EVENT = 'http://schemas.google.com/g/2005#event.canceled'
+CONFIRMED_EVENT = 'http://schemas.google.com/g/2005#event.confirmed'
+TENTATIVE_EVENT = 'http://schemas.google.com/g/2005#event.tentative'
+
+CONFIDENTIAL_EVENT = 'http://schemas.google.com/g/2005#event.confidential'
+DEFAULT_EVENT = 'http://schemas.google.com/g/2005#event.default'
+PRIVATE_EVENT = 'http://schemas.google.com/g/2005#event.private'
+PUBLIC_EVENT = 'http://schemas.google.com/g/2005#event.public'
+
+OPAQUE_EVENT = 'http://schemas.google.com/g/2005#event.opaque'
+TRANSPARENT_EVENT = 'http://schemas.google.com/g/2005#event.transparent'
+
+CHAT_MESSAGE = 'http://schemas.google.com/g/2005#message.chat'
+INBOX_MESSAGE = 'http://schemas.google.com/g/2005#message.inbox'
+SENT_MESSAGE = 'http://schemas.google.com/g/2005#message.sent'
+SPAM_MESSAGE = 'http://schemas.google.com/g/2005#message.spam'
+STARRED_MESSAGE = 'http://schemas.google.com/g/2005#message.starred'
+UNREAD_MESSAGE = 'http://schemas.google.com/g/2005#message.unread'
+
+BCC_RECIPIENT = 'http://schemas.google.com/g/2005#message.bcc'
+CC_RECIPIENT = 'http://schemas.google.com/g/2005#message.cc'
+SENDER = 'http://schemas.google.com/g/2005#message.from'
+REPLY_TO = 'http://schemas.google.com/g/2005#message.reply-to'
+TO_RECIPIENT = 'http://schemas.google.com/g/2005#message.to'
+
+ASSISTANT_REL = 'http://schemas.google.com/g/2005#assistant'
+CALLBACK_REL = 'http://schemas.google.com/g/2005#callback'
+CAR_REL = 'http://schemas.google.com/g/2005#car'
+COMPANY_MAIN_REL = 'http://schemas.google.com/g/2005#company_main'
+FAX_REL = 'http://schemas.google.com/g/2005#fax'
+HOME_REL = 'http://schemas.google.com/g/2005#home'
+HOME_FAX_REL = 'http://schemas.google.com/g/2005#home_fax'
+ISDN_REL = 'http://schemas.google.com/g/2005#isdn'
+MAIN_REL = 'http://schemas.google.com/g/2005#main'
+MOBILE_REL = 'http://schemas.google.com/g/2005#mobile'
+OTHER_REL = 'http://schemas.google.com/g/2005#other'
+OTHER_FAX_REL = 'http://schemas.google.com/g/2005#other_fax'
+PAGER_REL = 'http://schemas.google.com/g/2005#pager'
+RADIO_REL = 'http://schemas.google.com/g/2005#radio'
+TELEX_REL = 'http://schemas.google.com/g/2005#telex'
+TTL_TDD_REL = 'http://schemas.google.com/g/2005#tty_tdd'
+WORK_REL = 'http://schemas.google.com/g/2005#work'
+WORK_FAX_REL = 'http://schemas.google.com/g/2005#work_fax'
+WORK_MOBILE_REL = 'http://schemas.google.com/g/2005#work_mobile'
+WORK_PAGER_REL = 'http://schemas.google.com/g/2005#work_pager'
+NETMEETING_REL = 'http://schemas.google.com/g/2005#netmeeting'
+OVERALL_REL = 'http://schemas.google.com/g/2005#overall'
+PRICE_REL = 'http://schemas.google.com/g/2005#price'
+QUALITY_REL = 'http://schemas.google.com/g/2005#quality'
+EVENT_REL = 'http://schemas.google.com/g/2005#event'
+EVENT_ALTERNATE_REL = 'http://schemas.google.com/g/2005#event.alternate'
+EVENT_PARKING_REL = 'http://schemas.google.com/g/2005#event.parking'
+
+AIM_PROTOCOL = 'http://schemas.google.com/g/2005#AIM'
+MSN_PROTOCOL = 'http://schemas.google.com/g/2005#MSN'
+YAHOO_MESSENGER_PROTOCOL = 'http://schemas.google.com/g/2005#YAHOO'
+SKYPE_PROTOCOL = 'http://schemas.google.com/g/2005#SKYPE'
+QQ_PROTOCOL = 'http://schemas.google.com/g/2005#QQ'
+GOOGLE_TALK_PROTOCOL = 'http://schemas.google.com/g/2005#GOOGLE_TALK'
+ICQ_PROTOCOL = 'http://schemas.google.com/g/2005#ICQ'
+JABBER_PROTOCOL = 'http://schemas.google.com/g/2005#JABBER'
+
+REGULAR_COMMENTS = 'http://schemas.google.com/g/2005#regular'
+REVIEW_COMMENTS = 'http://schemas.google.com/g/2005#reviews'
+
+MAIL_BOTH = 'http://schemas.google.com/g/2005#both'
+MAIL_LETTERS = 'http://schemas.google.com/g/2005#letters'
+MAIL_PARCELS = 'http://schemas.google.com/g/2005#parcels'
+MAIL_NEITHER = 'http://schemas.google.com/g/2005#neither'
+
+GENERAL_ADDRESS = 'http://schemas.google.com/g/2005#general'
+LOCAL_ADDRESS = 'http://schemas.google.com/g/2005#local'
+
+OPTIONAL_ATENDEE = 'http://schemas.google.com/g/2005#event.optional'
+REQUIRED_ATENDEE = 'http://schemas.google.com/g/2005#event.required'
+
+ATTENDEE_ACCEPTED = 'http://schemas.google.com/g/2005#event.accepted'
+ATTENDEE_DECLINED = 'http://schemas.google.com/g/2005#event.declined'
+ATTENDEE_INVITED = 'http://schemas.google.com/g/2005#event.invited'
+ATTENDEE_TENTATIVE = 'http://schemas.google.com/g/2005#event.tentative'
+
+FULL_PROJECTION = 'full'
+VALUES_PROJECTION = 'values'
+BASIC_PROJECTION = 'basic'
+
+PRIVATE_VISIBILITY = 'private'
+PUBLIC_VISIBILITY = 'public'
+
+ACL_REL = 'http://schemas.google.com/acl/2007#accessControlList'
+
+
+class Error(Exception):
+ pass
+
+
+class MissingRequiredParameters(Error):
+ pass
+
+
+class LinkFinder(atom.data.LinkFinder):
+ """Mixin used in Feed and Entry classes to simplify link lookups by type.
+
+ Provides lookup methods for edit, edit-media, post, ACL and other special
+ links which are common across Google Data APIs.
+ """
+
+ def find_html_link(self):
+ """Finds the first link with rel of alternate and type of text/html."""
+ for link in self.link:
+ if link.rel == 'alternate' and link.type == 'text/html':
+ return link.href
+ return None
+
+ FindHtmlLink = find_html_link
+
+ def get_html_link(self):
+ for a_link in self.link:
+ if a_link.rel == 'alternate' and a_link.type == 'text/html':
+ return a_link
+ return None
+
+ GetHtmlLink = get_html_link
+
+ def find_post_link(self):
+ """Get the URL to which new entries should be POSTed.
+
+ The POST target URL is used to insert new entries.
+
+ Returns:
+ A str for the URL in the link with a rel matching the POST type.
+ """
+ return self.find_url('http://schemas.google.com/g/2005#post')
+
+ FindPostLink = find_post_link
+
+ def get_post_link(self):
+ return self.get_link('http://schemas.google.com/g/2005#post')
+
+ GetPostLink = get_post_link
+
+ def find_acl_link(self):
+ acl_link = self.get_acl_link()
+ if acl_link:
+ return acl_link.href
+
+ return None
+
+ FindAclLink = find_acl_link
+
+ def get_acl_link(self):
+ """Searches for a link or feed_link (if present) with the rel for ACL."""
+
+ acl_link = self.get_link(ACL_REL)
+ if acl_link:
+ return acl_link
+ elif hasattr(self, 'feed_link'):
+ for a_feed_link in self.feed_link:
+ if a_feed_link.rel == ACL_REL:
+ return a_feed_link
+
+ return None
+
+ GetAclLink = get_acl_link
+
+ def find_feed_link(self):
+ return self.find_url('http://schemas.google.com/g/2005#feed')
+
+ FindFeedLink = find_feed_link
+
+ def get_feed_link(self):
+ return self.get_link('http://schemas.google.com/g/2005#feed')
+
+ GetFeedLink = get_feed_link
+
+ def find_previous_link(self):
+ return self.find_url('previous')
+
+ FindPreviousLink = find_previous_link
+
+ def get_previous_link(self):
+ return self.get_link('previous')
+
+ GetPreviousLink = get_previous_link
+
+
+class TotalResults(atom.core.XmlElement):
+ """opensearch:TotalResults for a GData feed."""
+ _qname = (OPENSEARCH_TEMPLATE_V1 % 'totalResults',
+ OPENSEARCH_TEMPLATE_V2 % 'totalResults')
+
+
+class StartIndex(atom.core.XmlElement):
+ """The opensearch:startIndex element in GData feed."""
+ _qname = (OPENSEARCH_TEMPLATE_V1 % 'startIndex',
+ OPENSEARCH_TEMPLATE_V2 % 'startIndex')
+
+
+class ItemsPerPage(atom.core.XmlElement):
+ """The opensearch:itemsPerPage element in GData feed."""
+ _qname = (OPENSEARCH_TEMPLATE_V1 % 'itemsPerPage',
+ OPENSEARCH_TEMPLATE_V2 % 'itemsPerPage')
+
+
+class ExtendedProperty(atom.core.XmlElement):
+ """The Google Data extendedProperty element.
+
+ Used to store arbitrary key-value information specific to your
+ application. The value can either be a text string stored as an XML
+ attribute (.value), or an XML node (XmlBlob) as a child element.
+
+ This element is used in the Google Calendar data API and the Google
+ Contacts data API.
+ """
+ _qname = GDATA_TEMPLATE % 'extendedProperty'
+ name = 'name'
+ value = 'value'
+
+ def get_xml_blob(self):
+ """Returns the XML blob as an atom.core.XmlElement.
+
+ Returns:
+ An XmlElement representing the blob's XML, or None if no
+ blob was set.
+ """
+ if self._other_elements:
+ return self._other_elements[0]
+ else:
+ return None
+
+ GetXmlBlob = get_xml_blob
+
+ def set_xml_blob(self, blob):
+ """Sets the contents of the extendedProperty to XML as a child node.
+
+ Since the extendedProperty is only allowed one child element as an XML
+ blob, setting the XML blob will erase any preexisting member elements
+ in this object.
+
+ Args:
+ blob: str or atom.core.XmlElement representing the XML blob stored in
+ the extendedProperty.
+ """
+ # Erase any existing extension_elements, clears the child nodes from the
+ # extendedProperty.
+ if isinstance(blob, atom.core.XmlElement):
+ self._other_elements = [blob]
+ else:
+ self._other_elements = [atom.core.parse(str(blob))]
+
+ SetXmlBlob = set_xml_blob
+
+
+class GDEntry(atom.data.Entry, LinkFinder):
+ """Extends Atom Entry to provide data processing"""
+ etag = '{http://schemas.google.com/g/2005}etag'
+
+ def get_id(self):
+ if self.id is not None and self.id.text is not None:
+ return self.id.text.strip()
+ return None
+
+ GetId = get_id
+
+ def is_media(self):
+ if self.find_edit_media_link():
+ return True
+ return False
+
+ IsMedia = is_media
+
+ def find_media_link(self):
+ """Returns the URL to the media content, if the entry is a media entry.
+ Otherwise returns None.
+ """
+ if self.is_media():
+ return self.content.src
+ return None
+
+ FindMediaLink = find_media_link
+
+
+class GDFeed(atom.data.Feed, LinkFinder):
+ """A Feed from a GData service."""
+ etag = '{http://schemas.google.com/g/2005}etag'
+ total_results = TotalResults
+ start_index = StartIndex
+ items_per_page = ItemsPerPage
+ entry = [GDEntry]
+
+ def get_id(self):
+ if self.id is not None and self.id.text is not None:
+ return self.id.text.strip()
+ return None
+
+ GetId = get_id
+
+ def get_generator(self):
+ if self.generator and self.generator.text:
+ return self.generator.text.strip()
+ return None
+
+
+class BatchId(atom.core.XmlElement):
+ """Identifies a single operation in a batch request."""
+ _qname = BATCH_TEMPLATE % 'id'
+
+
+class BatchOperation(atom.core.XmlElement):
+ """The CRUD operation which this batch entry represents."""
+ _qname = BATCH_TEMPLATE % 'operation'
+ type = 'type'
+
+
+class BatchStatus(atom.core.XmlElement):
+ """The batch:status element present in a batch response entry.
+
+ A status element contains the code (HTTP response code) and
+ reason as elements. In a single request these fields would
+ be part of the HTTP response, but in a batch request each
+ Entry operation has a corresponding Entry in the response
+ feed which includes status information.
+
+ See http://code.google.com/apis/gdata/batch.html#Handling_Errors
+ """
+ _qname = BATCH_TEMPLATE % 'status'
+ code = 'code'
+ reason = 'reason'
+ content_type = 'content-type'
+
+
+class BatchEntry(GDEntry):
+ """An atom:entry for use in batch requests.
+
+ The BatchEntry contains additional members to specify the operation to be
+ performed on this entry and a batch ID so that the server can reference
+ individual operations in the response feed. For more information, see:
+ http://code.google.com/apis/gdata/batch.html
+ """
+ batch_operation = BatchOperation
+ batch_id = BatchId
+ batch_status = BatchStatus
+
+
+class BatchInterrupted(atom.core.XmlElement):
+ """The batch:interrupted element sent if batch request was interrupted.
+
+ Only appears in a feed if some of the batch entries could not be processed.
+ See: http://code.google.com/apis/gdata/batch.html#Handling_Errors
+ """
+ _qname = BATCH_TEMPLATE % 'interrupted'
+ reason = 'reason'
+ success = 'success'
+ failures = 'failures'
+ parsed = 'parsed'
+
+
+class BatchFeed(GDFeed):
+ """A feed containing a list of batch request entries."""
+ interrupted = BatchInterrupted
+ entry = [BatchEntry]
+
+ def add_batch_entry(self, entry=None, id_url_string=None,
+ batch_id_string=None, operation_string=None):
+ """Logic for populating members of a BatchEntry and adding to the feed.
+
+ If the entry is not a BatchEntry, it is converted to a BatchEntry so
+ that the batch specific members will be present.
+
+ The id_url_string can be used in place of an entry if the batch operation
+ applies to a URL. For example query and delete operations require just
+ the URL of an entry, no body is sent in the HTTP request. If an
+ id_url_string is sent instead of an entry, a BatchEntry is created and
+ added to the feed.
+
+ This method also assigns the desired batch id to the entry so that it
+ can be referenced in the server's response. If the batch_id_string is
+ None, this method will assign a batch_id to be the index at which this
+ entry will be in the feed's entry list.
+
+ Args:
+ entry: BatchEntry, atom.data.Entry, or another Entry flavor (optional)
+ The entry which will be sent to the server as part of the batch
+ request. The item must have a valid atom id so that the server
+ knows which entry this request references.
+ id_url_string: str (optional) The URL of the entry to be acted on. You
+ can find this URL in the text member of the atom id for an entry.
+ If an entry is not sent, this id will be used to construct a new
+ BatchEntry which will be added to the request feed.
+ batch_id_string: str (optional) The batch ID to be used to reference
+ this batch operation in the results feed. If this parameter is None,
+ the current length of the feed's entry array will be used as a
+ count. Note that batch_ids should either always be specified or
+ never, mixing could potentially result in duplicate batch ids.
+ operation_string: str (optional) The desired batch operation which will
+ set the batch_operation.type member of the entry. Options are
+ 'insert', 'update', 'delete', and 'query'
+
+ Raises:
+ MissingRequiredParameters: Raised if neither an id_ url_string nor an
+ entry are provided in the request.
+
+ Returns:
+ The added entry.
+ """
+ if entry is None and id_url_string is None:
+ raise MissingRequiredParameters('supply either an entry or URL string')
+ if entry is None and id_url_string is not None:
+ entry = BatchEntry(id=atom.data.Id(text=id_url_string))
+ if batch_id_string is not None:
+ entry.batch_id = BatchId(text=batch_id_string)
+ elif entry.batch_id is None or entry.batch_id.text is None:
+ entry.batch_id = BatchId(text=str(len(self.entry)))
+ if operation_string is not None:
+ entry.batch_operation = BatchOperation(type=operation_string)
+ self.entry.append(entry)
+ return entry
+
+ AddBatchEntry = add_batch_entry
+
+ def add_insert(self, entry, batch_id_string=None):
+ """Add an insert request to the operations in this batch request feed.
+
+ If the entry doesn't yet have an operation or a batch id, these will
+ be set to the insert operation and a batch_id specified as a parameter.
+
+ Args:
+ entry: BatchEntry The entry which will be sent in the batch feed as an
+ insert request.
+ batch_id_string: str (optional) The batch ID to be used to reference
+ this batch operation in the results feed. If this parameter is None,
+ the current length of the feed's entry array will be used as a
+ count. Note that batch_ids should either always be specified or
+ never, mixing could potentially result in duplicate batch ids.
+ """
+ self.add_batch_entry(entry=entry, batch_id_string=batch_id_string,
+ operation_string=BATCH_INSERT)
+
+ AddInsert = add_insert
+
+ def add_update(self, entry, batch_id_string=None):
+ """Add an update request to the list of batch operations in this feed.
+
+ Sets the operation type of the entry to insert if it is not already set
+ and assigns the desired batch id to the entry so that it can be
+ referenced in the server's response.
+
+ Args:
+ entry: BatchEntry The entry which will be sent to the server as an
+ update (HTTP PUT) request. The item must have a valid atom id
+ so that the server knows which entry to replace.
+ batch_id_string: str (optional) The batch ID to be used to reference
+ this batch operation in the results feed. If this parameter is None,
+ the current length of the feed's entry array will be used as a
+ count. See also comments for AddInsert.
+ """
+ self.add_batch_entry(entry=entry, batch_id_string=batch_id_string,
+ operation_string=BATCH_UPDATE)
+
+ AddUpdate = add_update
+
+ def add_delete(self, url_string=None, entry=None, batch_id_string=None):
+ """Adds a delete request to the batch request feed.
+
+ This method takes either the url_string which is the atom id of the item
+ to be deleted, or the entry itself. The atom id of the entry must be
+ present so that the server knows which entry should be deleted.
+
+ Args:
+ url_string: str (optional) The URL of the entry to be deleted. You can
+ find this URL in the text member of the atom id for an entry.
+ entry: BatchEntry (optional) The entry to be deleted.
+ batch_id_string: str (optional)
+
+ Raises:
+ MissingRequiredParameters: Raised if neither a url_string nor an entry
+ are provided in the request.
+ """
+ self.add_batch_entry(entry=entry, id_url_string=url_string,
+ batch_id_string=batch_id_string, operation_string=BATCH_DELETE)
+
+ AddDelete = add_delete
+
+ def add_query(self, url_string=None, entry=None, batch_id_string=None):
+ """Adds a query request to the batch request feed.
+
+ This method takes either the url_string which is the query URL
+ whose results will be added to the result feed. The query URL will
+ be encapsulated in a BatchEntry, and you may pass in the BatchEntry
+ with a query URL instead of sending a url_string.
+
+ Args:
+ url_string: str (optional)
+ entry: BatchEntry (optional)
+ batch_id_string: str (optional)
+
+ Raises:
+ MissingRequiredParameters
+ """
+ self.add_batch_entry(entry=entry, id_url_string=url_string,
+ batch_id_string=batch_id_string, operation_string=BATCH_QUERY)
+
+ AddQuery = add_query
+
+ def find_batch_link(self):
+ return self.find_url('http://schemas.google.com/g/2005#batch')
+
+ FindBatchLink = find_batch_link
+
+
+class EntryLink(atom.core.XmlElement):
+ """The gd:entryLink element.
+
+ Represents a logically nested entry. For example, a
+ representing a contact might have a nested entry from a contact feed.
+ """
+ _qname = GDATA_TEMPLATE % 'entryLink'
+ entry = GDEntry
+ rel = 'rel'
+ read_only = 'readOnly'
+ href = 'href'
+
+
+class FeedLink(atom.core.XmlElement):
+ """The gd:feedLink element.
+
+ Represents a logically nested feed. For example, a calendar feed might
+ have a nested feed representing all comments on entries.
+ """
+ _qname = GDATA_TEMPLATE % 'feedLink'
+ feed = GDFeed
+ rel = 'rel'
+ read_only = 'readOnly'
+ count_hint = 'countHint'
+ href = 'href'
+
+
+class AdditionalName(atom.core.XmlElement):
+ """The gd:additionalName element.
+
+ Specifies additional (eg. middle) name of the person.
+ Contains an attribute for the phonetic representaton of the name.
+ """
+ _qname = GDATA_TEMPLATE % 'additionalName'
+ yomi = 'yomi'
+
+
+class Comments(atom.core.XmlElement):
+ """The gd:comments element.
+
+ Contains a comments feed for the enclosing entry (such as a calendar event).
+ """
+ _qname = GDATA_TEMPLATE % 'comments'
+ rel = 'rel'
+ feed_link = FeedLink
+
+
+class Country(atom.core.XmlElement):
+ """The gd:country element.
+
+ Country name along with optional country code. The country code is
+ given in accordance with ISO 3166-1 alpha-2:
+ http://www.iso.org/iso/iso-3166-1_decoding_table
+ """
+ _qname = GDATA_TEMPLATE % 'country'
+ code = 'code'
+
+
+class EmailImParent(atom.core.XmlElement):
+ address = 'address'
+ label = 'label'
+ rel = 'rel'
+ primary = 'primary'
+
+
+class Email(EmailImParent):
+ """The gd:email element.
+
+ An email address associated with the containing entity (which is
+ usually an entity representing a person or a location).
+ """
+ _qname = GDATA_TEMPLATE % 'email'
+ display_name = 'displayName'
+
+
+class FamilyName(atom.core.XmlElement):
+ """The gd:familyName element.
+
+ Specifies family name of the person, eg. "Smith".
+ """
+ _qname = GDATA_TEMPLATE % 'familyName'
+ yomi = 'yomi'
+
+
+class Im(EmailImParent):
+ """The gd:im element.
+
+ An instant messaging address associated with the containing entity.
+ """
+ _qname = GDATA_TEMPLATE % 'im'
+ protocol = 'protocol'
+
+
+class GivenName(atom.core.XmlElement):
+ """The gd:givenName element.
+
+ Specifies given name of the person, eg. "John".
+ """
+ _qname = GDATA_TEMPLATE % 'givenName'
+ yomi = 'yomi'
+
+
+class NamePrefix(atom.core.XmlElement):
+ """The gd:namePrefix element.
+
+ Honorific prefix, eg. 'Mr' or 'Mrs'.
+ """
+ _qname = GDATA_TEMPLATE % 'namePrefix'
+
+
+class NameSuffix(atom.core.XmlElement):
+ """The gd:nameSuffix element.
+
+ Honorific suffix, eg. 'san' or 'III'.
+ """
+ _qname = GDATA_TEMPLATE % 'nameSuffix'
+
+
+class FullName(atom.core.XmlElement):
+ """The gd:fullName element.
+
+ Unstructured representation of the name.
+ """
+ _qname = GDATA_TEMPLATE % 'fullName'
+
+
+class Name(atom.core.XmlElement):
+ """The gd:name element.
+
+ Allows storing person's name in a structured way. Consists of
+ given name, additional name, family name, prefix, suffix and full name.
+ """
+ _qname = GDATA_TEMPLATE % 'name'
+ given_name = GivenName
+ additional_name = AdditionalName
+ family_name = FamilyName
+ name_prefix = NamePrefix
+ name_suffix = NameSuffix
+ full_name = FullName
+
+
+class OrgDepartment(atom.core.XmlElement):
+ """The gd:orgDepartment element.
+
+ Describes a department within an organization. Must appear within a
+ gd:organization element.
+ """
+ _qname = GDATA_TEMPLATE % 'orgDepartment'
+
+
+class OrgJobDescription(atom.core.XmlElement):
+ """The gd:orgJobDescription element.
+
+ Describes a job within an organization. Must appear within a
+ gd:organization element.
+ """
+ _qname = GDATA_TEMPLATE % 'orgJobDescription'
+
+
+class OrgName(atom.core.XmlElement):
+ """The gd:orgName element.
+
+ The name of the organization. Must appear within a gd:organization
+ element.
+
+ Contains a Yomigana attribute (Japanese reading aid) for the
+ organization name.
+ """
+ _qname = GDATA_TEMPLATE % 'orgName'
+ yomi = 'yomi'
+
+
+class OrgSymbol(atom.core.XmlElement):
+ """The gd:orgSymbol element.
+
+ Provides a symbol of an organization. Must appear within a
+ gd:organization element.
+ """
+ _qname = GDATA_TEMPLATE % 'orgSymbol'
+
+
+class OrgTitle(atom.core.XmlElement):
+ """The gd:orgTitle element.
+
+ The title of a person within an organization. Must appear within a
+ gd:organization element.
+ """
+ _qname = GDATA_TEMPLATE % 'orgTitle'
+
+
+class Organization(atom.core.XmlElement):
+ """The gd:organization element.
+
+ An organization, typically associated with a contact.
+ """
+ _qname = GDATA_TEMPLATE % 'organization'
+ label = 'label'
+ primary = 'primary'
+ rel = 'rel'
+ department = OrgDepartment
+ job_description = OrgJobDescription
+ name = OrgName
+ symbol = OrgSymbol
+ title = OrgTitle
+
+
+class When(atom.core.XmlElement):
+ """The gd:when element.
+
+ Represents a period of time or an instant.
+ """
+ _qname = GDATA_TEMPLATE % 'when'
+ end = 'endTime'
+ start = 'startTime'
+ value = 'valueString'
+
+
+class OriginalEvent(atom.core.XmlElement):
+ """The gd:originalEvent element.
+
+ Equivalent to the Recurrence ID property specified in section 4.8.4.4
+ of RFC 2445. Appears in every instance of a recurring event, to identify
+ the original event.
+
+ Contains a element specifying the original start time of the
+ instance that has become an exception.
+ """
+ _qname = GDATA_TEMPLATE % 'originalEvent'
+ id = 'id'
+ href = 'href'
+ when = When
+
+
+class PhoneNumber(atom.core.XmlElement):
+ """The gd:phoneNumber element.
+
+ A phone number associated with the containing entity (which is usually
+ an entity representing a person or a location).
+ """
+ _qname = GDATA_TEMPLATE % 'phoneNumber'
+ label = 'label'
+ rel = 'rel'
+ uri = 'uri'
+ primary = 'primary'
+
+
+class PostalAddress(atom.core.XmlElement):
+ """The gd:postalAddress element."""
+ _qname = GDATA_TEMPLATE % 'postalAddress'
+ label = 'label'
+ rel = 'rel'
+ uri = 'uri'
+ primary = 'primary'
+
+
+class Rating(atom.core.XmlElement):
+ """The gd:rating element.
+
+ Represents a numeric rating of the enclosing entity, such as a
+ comment. Each rating supplies its own scale, although it may be
+ normalized by a service; for example, some services might convert all
+ ratings to a scale from 1 to 5.
+ """
+ _qname = GDATA_TEMPLATE % 'rating'
+ average = 'average'
+ max = 'max'
+ min = 'min'
+ num_raters = 'numRaters'
+ rel = 'rel'
+ value = 'value'
+
+
+class Recurrence(atom.core.XmlElement):
+ """The gd:recurrence element.
+
+ Represents the dates and times when a recurring event takes place.
+
+ The string that defines the recurrence consists of a set of properties,
+ each of which is defined in the iCalendar standard (RFC 2445).
+
+ Specifically, the string usually begins with a DTSTART property that
+ indicates the starting time of the first instance of the event, and
+ often a DTEND property or a DURATION property to indicate when the
+ first instance ends. Next come RRULE, RDATE, EXRULE, and/or EXDATE
+ properties, which collectively define a recurring event and its
+ exceptions (but see below). (See section 4.8.5 of RFC 2445 for more
+ information about these recurrence component properties.) Last comes a
+ VTIMEZONE component, providing detailed timezone rules for any timezone
+ ID mentioned in the preceding properties.
+
+ Google services like Google Calendar don't generally generate EXRULE
+ and EXDATE properties to represent exceptions to recurring events;
+ instead, they generate elements. However,
+ Google services may include EXRULE and/or EXDATE properties anyway;
+ for example, users can import events and exceptions into Calendar, and
+ if those imported events contain EXRULE or EXDATE properties, then
+ Calendar will provide those properties when it sends a
+ element.
+
+ Note the the use of means that you can't be
+ sure just from examining a element whether there are
+ any exceptions to the recurrence description. To ensure that you find
+ all exceptions, look for elements in the feed,
+ and use their elements to match them up with
+ elements.
+ """
+ _qname = GDATA_TEMPLATE % 'recurrence'
+
+
+class RecurrenceException(atom.core.XmlElement):
+ """The gd:recurrenceException element.
+
+ Represents an event that's an exception to a recurring event-that is,
+ an instance of a recurring event in which one or more aspects of the
+ recurring event (such as attendance list, time, or location) have been
+ changed.
+
+ Contains a element that specifies the original
+ recurring event that this event is an exception to.
+
+ When you change an instance of a recurring event, that instance becomes
+ an exception. Depending on what change you made to it, the exception
+ behaves in either of two different ways when the original recurring
+ event is changed:
+
+ - If you add, change, or remove comments, attendees, or attendee
+ responses, then the exception remains tied to the original event, and
+ changes to the original event also change the exception.
+ - If you make any other changes to the exception (such as changing the
+ time or location) then the instance becomes "specialized," which means
+ that it's no longer as tightly tied to the original event. If you
+ change the original event, specialized exceptions don't change. But
+ see below.
+
+ For example, say you have a meeting every Tuesday and Thursday at
+ 2:00 p.m. If you change the attendance list for this Thursday's meeting
+ (but not for the regularly scheduled meeting), then it becomes an
+ exception. If you change the time for this Thursday's meeting (but not
+ for the regularly scheduled meeting), then it becomes specialized.
+
+ Regardless of whether an exception is specialized or not, if you do
+ something that deletes the instance that the exception was derived from,
+ then the exception is deleted. Note that changing the day or time of a
+ recurring event deletes all instances, and creates new ones.
+
+ For example, after you've specialized this Thursday's meeting, say you
+ change the recurring meeting to happen on Monday, Wednesday, and Friday.
+ That change deletes all of the recurring instances of the
+ Tuesday/Thursday meeting, including the specialized one.
+
+ If a particular instance of a recurring event is deleted, then that
+ instance appears as a containing a
+ that has its set to
+ "http://schemas.google.com/g/2005#event.canceled". (For more
+ information about canceled events, see RFC 2445.)
+ """
+ _qname = GDATA_TEMPLATE % 'recurrenceException'
+ specialized = 'specialized'
+ entry_link = EntryLink
+ original_event = OriginalEvent
+
+
+class Reminder(atom.core.XmlElement):
+ """The gd:reminder element.
+
+ A time interval, indicating how long before the containing entity's start
+ time or due time attribute a reminder should be issued. Alternatively,
+ may specify an absolute time at which a reminder should be issued. Also
+ specifies a notification method, indicating what medium the system
+ should use to remind the user.
+ """
+ _qname = GDATA_TEMPLATE % 'reminder'
+ absolute_time = 'absoluteTime'
+ method = 'method'
+ days = 'days'
+ hours = 'hours'
+ minutes = 'minutes'
+
+
+class Agent(atom.core.XmlElement):
+ """The gd:agent element.
+
+ The agent who actually receives the mail. Used in work addresses.
+ Also for 'in care of' or 'c/o'.
+ """
+ _qname = GDATA_TEMPLATE % 'agent'
+
+
+class HouseName(atom.core.XmlElement):
+ """The gd:housename element.
+
+ Used in places where houses or buildings have names (and not
+ necessarily numbers), eg. "The Pillars".
+ """
+ _qname = GDATA_TEMPLATE % 'housename'
+
+
+class Street(atom.core.XmlElement):
+ """The gd:street element.
+
+ Can be street, avenue, road, etc. This element also includes the
+ house number and room/apartment/flat/floor number.
+ """
+ _qname = GDATA_TEMPLATE % 'street'
+
+
+class PoBox(atom.core.XmlElement):
+ """The gd:pobox element.
+
+ Covers actual P.O. boxes, drawers, locked bags, etc. This is usually
+ but not always mutually exclusive with street.
+ """
+ _qname = GDATA_TEMPLATE % 'pobox'
+
+
+class Neighborhood(atom.core.XmlElement):
+ """The gd:neighborhood element.
+
+ This is used to disambiguate a street address when a city contains more
+ than one street with the same name, or to specify a small place whose
+ mail is routed through a larger postal town. In China it could be a
+ county or a minor city.
+ """
+ _qname = GDATA_TEMPLATE % 'neighborhood'
+
+
+class City(atom.core.XmlElement):
+ """The gd:city element.
+
+ Can be city, village, town, borough, etc. This is the postal town and
+ not necessarily the place of residence or place of business.
+ """
+ _qname = GDATA_TEMPLATE % 'city'
+
+
+class Subregion(atom.core.XmlElement):
+ """The gd:subregion element.
+
+ Handles administrative districts such as U.S. or U.K. counties that are
+ not used for mail addressing purposes. Subregion is not intended for
+ delivery addresses.
+ """
+ _qname = GDATA_TEMPLATE % 'subregion'
+
+
+class Region(atom.core.XmlElement):
+ """The gd:region element.
+
+ A state, province, county (in Ireland), Land (in Germany),
+ departement (in France), etc.
+ """
+ _qname = GDATA_TEMPLATE % 'region'
+
+
+class Postcode(atom.core.XmlElement):
+ """The gd:postcode element.
+
+ Postal code. Usually country-wide, but sometimes specific to the
+ city (e.g. "2" in "Dublin 2, Ireland" addresses).
+ """
+ _qname = GDATA_TEMPLATE % 'postcode'
+
+
+class Country(atom.core.XmlElement):
+ """The gd:country element.
+
+ The name or code of the country.
+ """
+ _qname = GDATA_TEMPLATE % 'country'
+
+
+class FormattedAddress(atom.core.XmlElement):
+ """The gd:formattedAddress element.
+
+ The full, unstructured postal address.
+ """
+ _qname = GDATA_TEMPLATE % 'formattedAddress'
+
+
+class StructuredPostalAddress(atom.core.XmlElement):
+ """The gd:structuredPostalAddress element.
+
+ Postal address split into components. It allows to store the address
+ in locale independent format. The fields can be interpreted and used
+ to generate formatted, locale dependent address. The following elements
+ reperesent parts of the address: agent, house name, street, P.O. box,
+ neighborhood, city, subregion, region, postal code, country. The
+ subregion element is not used for postal addresses, it is provided for
+ extended uses of addresses only. In order to store postal address in an
+ unstructured form formatted address field is provided.
+ """
+ _qname = GDATA_TEMPLATE % 'structuredPostalAddress'
+ rel = 'rel'
+ mail_class = 'mailClass'
+ usage = 'usage'
+ label = 'label'
+ primary = 'primary'
+ agent = Agent
+ house_name = HouseName
+ street = Street
+ po_box = PoBox
+ neighborhood = Neighborhood
+ city = City
+ subregion = Subregion
+ region = Region
+ postcode = Postcode
+ country = Country
+ formatted_address = FormattedAddress
+
+
+class Where(atom.core.XmlElement):
+ """The gd:where element.
+
+ A place (such as an event location) associated with the containing
+ entity. The type of the association is determined by the rel attribute;
+ the details of the location are contained in an embedded or linked-to
+ Contact entry.
+
+ A element is more general than a element. The
+ former identifies a place using a text description and/or a Contact
+ entry, while the latter identifies a place using a specific geographic
+ location.
+ """
+ _qname = GDATA_TEMPLATE % 'where'
+ label = 'label'
+ rel = 'rel'
+ value = 'valueString'
+ entry_link = EntryLink
+
+
+class AttendeeType(atom.core.XmlElement):
+ """The gd:attendeeType element."""
+ _qname = GDATA_TEMPLATE % 'attendeeType'
+ value = 'value'
+
+
+class AttendeeStatus(atom.core.XmlElement):
+ """The gd:attendeeStatus element."""
+ _qname = GDATA_TEMPLATE % 'attendeeStatus'
+ value = 'value'
+
+
+class Who(atom.core.XmlElement):
+ """The gd:who element.
+
+ A person associated with the containing entity. The type of the
+ association is determined by the rel attribute; the details about the
+ person are contained in an embedded or linked-to Contact entry.
+
+ The element can be used to specify email senders and
+ recipients, calendar event organizers, and so on.
+ """
+ _qname = GDATA_TEMPLATE % 'who'
+ email = 'email'
+ rel = 'rel'
+ value = 'valueString'
+ attendee_status = AttendeeStatus
+ attendee_type = AttendeeType
+ entry_link = EntryLink
+
+
+class Deleted(atom.core.XmlElement):
+ """gd:deleted when present, indicates the containing entry is deleted."""
+ _qname = GD_TEMPLATE % 'deleted'
+
+
+class Money(atom.core.XmlElement):
+ """Describes money"""
+ _qname = GD_TEMPLATE % 'money'
+ amount = 'amount'
+ currency_code = 'currencyCode'
+
+
+class MediaSource(object):
+ """GData Entries can refer to media sources, so this class provides a
+ place to store references to these objects along with some metadata.
+ """
+
+ def __init__(self, file_handle=None, content_type=None, content_length=None,
+ file_path=None, file_name=None):
+ """Creates an object of type MediaSource.
+
+ Args:
+ file_handle: A file handle pointing to the file to be encapsulated in the
+ MediaSource.
+ content_type: string The MIME type of the file. Required if a file_handle
+ is given.
+ content_length: int The size of the file. Required if a file_handle is
+ given.
+ file_path: string (optional) A full path name to the file. Used in
+ place of a file_handle.
+ file_name: string The name of the file without any path information.
+ Required if a file_handle is given.
+ """
+ self.file_handle = file_handle
+ self.content_type = content_type
+ self.content_length = content_length
+ self.file_name = file_name
+
+ if (file_handle is None and content_type is not None and
+ file_path is not None):
+ self.set_file_handle(file_path, content_type)
+
+ def set_file_handle(self, file_name, content_type):
+ """A helper function which can create a file handle from a given filename
+ and set the content type and length all at once.
+
+ Args:
+ file_name: string The path and file name to the file containing the media
+ content_type: string A MIME type representing the type of the media
+ """
+
+ self.file_handle = open(file_name, 'rb')
+ self.content_type = content_type
+ self.content_length = os.path.getsize(file_name)
+ self.file_name = os.path.basename(file_name)
+
+ SetFileHandle = set_file_handle
+
+ def modify_request(self, http_request):
+ http_request.add_body_part(self.file_handle, self.content_type,
+ self.content_length)
+ return http_request
+
+ ModifyRequest = modify_request
diff --git a/python/gdata/docs/__init__.py b/python/gdata/docs/__init__.py
new file mode 100644
index 0000000..8031bc9
--- /dev/null
+++ b/python/gdata/docs/__init__.py
@@ -0,0 +1,269 @@
+#!/usr/bin/python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains extensions to Atom objects used with Google Documents."""
+
+__author__ = ('api.jfisher (Jeff Fisher), '
+ 'api.eric@google.com (Eric Bidelman)')
+
+import atom
+import gdata
+
+
+DOCUMENTS_NAMESPACE = 'http://schemas.google.com/docs/2007'
+
+
+class Scope(atom.AtomBase):
+ """The DocList ACL scope element"""
+
+ _tag = 'scope'
+ _namespace = gdata.GACL_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+ _attributes['type'] = 'type'
+
+ def __init__(self, value=None, type=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.value = value
+ self.type = type
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class Role(atom.AtomBase):
+ """The DocList ACL role element"""
+
+ _tag = 'role'
+ _namespace = gdata.GACL_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.value = value
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class FeedLink(atom.AtomBase):
+ """The DocList gd:feedLink element"""
+
+ _tag = 'feedLink'
+ _namespace = gdata.GDATA_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['rel'] = 'rel'
+ _attributes['href'] = 'href'
+
+ def __init__(self, href=None, rel=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ self.href = href
+ self.rel = rel
+ atom.AtomBase.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+
+
+class ResourceId(atom.AtomBase):
+ """The DocList gd:resourceId element"""
+
+ _tag = 'resourceId'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+ def __init__(self, value=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.value = value
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class LastModifiedBy(atom.Person):
+ """The DocList gd:lastModifiedBy element"""
+
+ _tag = 'lastModifiedBy'
+ _namespace = gdata.GDATA_NAMESPACE
+
+
+class LastViewed(atom.Person):
+ """The DocList gd:lastViewed element"""
+
+ _tag = 'lastViewed'
+ _namespace = gdata.GDATA_NAMESPACE
+
+
+class WritersCanInvite(atom.AtomBase):
+ """The DocList docs:writersCanInvite element"""
+
+ _tag = 'writersCanInvite'
+ _namespace = DOCUMENTS_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['value'] = 'value'
+
+
+class DocumentListEntry(gdata.GDataEntry):
+ """The Google Documents version of an Atom Entry"""
+
+ _tag = gdata.GDataEntry._tag
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feedLink', FeedLink)
+ _children['{%s}resourceId' % gdata.GDATA_NAMESPACE] = ('resourceId',
+ ResourceId)
+ _children['{%s}lastModifiedBy' % gdata.GDATA_NAMESPACE] = ('lastModifiedBy',
+ LastModifiedBy)
+ _children['{%s}lastViewed' % gdata.GDATA_NAMESPACE] = ('lastViewed',
+ LastViewed)
+ _children['{%s}writersCanInvite' % DOCUMENTS_NAMESPACE] = (
+ 'writersCanInvite', WritersCanInvite)
+
+ def __init__(self, resourceId=None, feedLink=None, lastViewed=None,
+ lastModifiedBy=None, writersCanInvite=None, author=None,
+ category=None, content=None, atom_id=None, link=None,
+ published=None, title=None, updated=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ self.feedLink = feedLink
+ self.lastViewed = lastViewed
+ self.lastModifiedBy = lastModifiedBy
+ self.resourceId = resourceId
+ self.writersCanInvite = writersCanInvite
+ gdata.GDataEntry.__init__(
+ self, author=author, category=category, content=content,
+ atom_id=atom_id, link=link, published=published, title=title,
+ updated=updated, extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+
+ def GetAclLink(self):
+ """Extracts the DocListEntry's .
+
+ Returns:
+ A FeedLink object.
+ """
+ return self.feedLink
+
+ def GetDocumentType(self):
+ """Extracts the type of document from the DocListEntry.
+
+ This method returns the type of document the DocListEntry
+ represents. Possible values are document, presentation,
+ spreadsheet, folder, or pdf.
+
+ Returns:
+ A string representing the type of document.
+ """
+ if self.category:
+ for category in self.category:
+ if category.scheme == gdata.GDATA_NAMESPACE + '#kind':
+ return category.label
+ else:
+ return None
+
+
+def DocumentListEntryFromString(xml_string):
+ """Converts an XML string into a DocumentListEntry object.
+
+ Args:
+ xml_string: string The XML describing a Document List feed entry.
+
+ Returns:
+ A DocumentListEntry object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(DocumentListEntry, xml_string)
+
+
+class DocumentListAclEntry(gdata.GDataEntry):
+ """A DocList ACL Entry flavor of an Atom Entry"""
+
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}scope' % gdata.GACL_NAMESPACE] = ('scope', Scope)
+ _children['{%s}role' % gdata.GACL_NAMESPACE] = ('role', Role)
+
+ def __init__(self, category=None, atom_id=None, link=None,
+ title=None, updated=None, scope=None, role=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ gdata.GDataEntry.__init__(self, author=None, category=category,
+ content=None, atom_id=atom_id, link=link,
+ published=None, title=title,
+ updated=updated, text=None)
+ self.scope = scope
+ self.role = role
+
+
+def DocumentListAclEntryFromString(xml_string):
+ """Converts an XML string into a DocumentListAclEntry object.
+
+ Args:
+ xml_string: string The XML describing a Document List ACL feed entry.
+
+ Returns:
+ A DocumentListAclEntry object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(DocumentListAclEntry, xml_string)
+
+
+class DocumentListFeed(gdata.GDataFeed):
+ """A feed containing a list of Google Documents Items"""
+
+ _tag = gdata.GDataFeed._tag
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [DocumentListEntry])
+
+
+def DocumentListFeedFromString(xml_string):
+ """Converts an XML string into a DocumentListFeed object.
+
+ Args:
+ xml_string: string The XML describing a DocumentList feed.
+
+ Returns:
+ A DocumentListFeed object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(DocumentListFeed, xml_string)
+
+
+class DocumentListAclFeed(gdata.GDataFeed):
+ """A DocList ACL feed flavor of a Atom feed"""
+
+ _tag = gdata.GDataFeed._tag
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [DocumentListAclEntry])
+
+
+def DocumentListAclFeedFromString(xml_string):
+ """Converts an XML string into a DocumentListAclFeed object.
+
+ Args:
+ xml_string: string The XML describing a DocumentList feed.
+
+ Returns:
+ A DocumentListFeed object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(DocumentListAclFeed, xml_string)
diff --git a/python/gdata/docs/client.py b/python/gdata/docs/client.py
new file mode 100644
index 0000000..36ffea9
--- /dev/null
+++ b/python/gdata/docs/client.py
@@ -0,0 +1,611 @@
+#!/usr/bin/python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""DocsClient extends gdata.client.GDClient to streamline DocList API calls."""
+
+
+__author__ = 'e.bidelman (Eric Bidelman)'
+
+import mimetypes
+import urllib
+import atom.data
+import atom.http_core
+import gdata.client
+import gdata.docs.data
+import gdata.gauth
+
+
+# Feed URI templates
+DOCLIST_FEED_URI = '/feeds/default/private/full/'
+FOLDERS_FEED_TEMPLATE = DOCLIST_FEED_URI + '%s/contents'
+ACL_FEED_TEMPLATE = DOCLIST_FEED_URI + '%s/acl'
+REVISIONS_FEED_TEMPLATE = DOCLIST_FEED_URI + '%s/revisions'
+
+
+class DocsClient(gdata.client.GDClient):
+ """Client extension for the Google Documents List API."""
+
+ host = 'docs.google.com' # default server for the API
+ api_version = '3.0' # default major version for the service.
+ auth_service = 'writely'
+ auth_scopes = gdata.gauth.AUTH_SCOPES['writely']
+ ssl = True
+
+ def __init__(self, auth_token=None, **kwargs):
+ """Constructs a new client for the DocList API.
+
+ Args:
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: The other parameters to pass to gdata.client.GDClient constructor.
+ """
+ gdata.client.GDClient.__init__(self, auth_token=auth_token, **kwargs)
+
+ def get_file_content(self, uri, auth_token=None, **kwargs):
+ """Fetches the file content from the specified uri.
+
+ This method is useful for downloading/exporting a file within enviornments
+ like Google App Engine, where the user does not have the ability to write
+ the file to a local disk.
+
+ Args:
+ uri: str The full URL to fetch the file contents from.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.request().
+
+ Returns:
+ The binary file content.
+
+ Raises:
+ gdata.client.RequestError: on error response from server.
+ """
+ server_response = self.request('GET', uri, auth_token=auth_token, **kwargs)
+ if server_response.status != 200:
+ raise gdata.client.RequestError, {'status': server_response.status,
+ 'reason': server_response.reason,
+ 'body': server_response.read()}
+ return server_response.read()
+
+ GetFileContent = get_file_content
+
+ def _download_file(self, uri, file_path, auth_token=None, **kwargs):
+ """Downloads a file to disk from the specified URI.
+
+ Note: to download a file in memory, use the GetFileContent() method.
+
+ Args:
+ uri: str The full URL to download the file from.
+ file_path: str The full path to save the file to.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.get_file_content().
+
+ Raises:
+ gdata.client.RequestError: on error response from server.
+ """
+ f = open(file_path, 'wb')
+ try:
+ f.write(self.get_file_content(uri, auth_token=auth_token, **kwargs))
+ except gdata.client.RequestError, e:
+ f.close()
+ raise e
+ f.flush()
+ f.close()
+
+ _DownloadFile = _download_file
+
+ def get_doclist(self, uri=None, limit=None, auth_token=None, **kwargs):
+ """Retrieves the main doclist feed containing the user's items.
+
+ Args:
+ uri: str (optional) A URI to query the doclist feed.
+ limit: int (optional) A maximum cap for the number of results to
+ return in the feed. By default, the API returns a maximum of 100
+ per page. Thus, if you set limit=5000, you will get <= 5000
+ documents (guarenteed no more than 5000), and will need to follow the
+ feed's next links (feed.GetNextLink()) to the rest. See
+ get_everything(). Similarly, if you set limit=50, only <= 50
+ documents are returned. Note: if the max-results parameter is set in
+ the uri parameter, it is chosen over a value set for limit.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.get_feed().
+
+ Returns:
+ gdata.docs.data.DocList feed.
+ """
+ if uri is None:
+ uri = DOCLIST_FEED_URI
+
+ if isinstance(uri, (str, unicode)):
+ uri = atom.http_core.Uri.parse_uri(uri)
+
+ # Add max-results param if it wasn't included in the uri.
+ if limit is not None and not 'max-results' in uri.query:
+ uri.query['max-results'] = limit
+
+ return self.get_feed(uri, desired_class=gdata.docs.data.DocList,
+ auth_token=auth_token, **kwargs)
+
+ GetDocList = get_doclist
+
+ def get_doc(self, resource_id, etag=None, auth_token=None, **kwargs):
+ """Retrieves a particular document given by its resource id.
+
+ Args:
+ resource_id: str The document/item's resource id. Example spreadsheet:
+ 'spreadsheet%3A0A1234567890'.
+ etag: str (optional) The document/item's etag value to be used in a
+ conditional GET. See http://code.google.com/apis/documents/docs/3.0/
+ developers_guide_protocol.html#RetrievingCached.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.get_entry().
+
+ Returns:
+ A gdata.docs.data.DocsEntry object representing the retrieved entry.
+
+ Raises:
+ ValueError if the resource_id is not a valid format.
+ """
+ match = gdata.docs.data.RESOURCE_ID_PATTERN.match(resource_id)
+ if match is None:
+ raise ValueError, 'Invalid resource id: %s' % resource_id
+ return self.get_entry(
+ DOCLIST_FEED_URI + resource_id, etag=etag,
+ desired_class=gdata.docs.data.DocsEntry,
+ auth_token=auth_token, **kwargs)
+
+ GetDoc = get_doc
+
+ def get_everything(self, uri=None, auth_token=None, **kwargs):
+ """Retrieves the user's entire doc list.
+
+ The method makes multiple HTTP requests (by following the feed's next links)
+ in order to fetch the user's entire document list.
+
+ Args:
+ uri: str (optional) A URI to query the doclist feed with.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.GetDocList().
+
+ Returns:
+ A list of gdata.docs.data.DocsEntry objects representing the retrieved
+ entries.
+ """
+ if uri is None:
+ uri = DOCLIST_FEED_URI
+
+ feed = self.GetDocList(uri=uri, auth_token=auth_token, **kwargs)
+ entries = feed.entry
+
+ while feed.GetNextLink() is not None:
+ feed = self.GetDocList(
+ feed.GetNextLink().href, auth_token=auth_token, **kwargs)
+ entries.extend(feed.entry)
+
+ return entries
+
+ GetEverything = get_everything
+
+ def get_acl_permissions(self, resource_id, auth_token=None, **kwargs):
+ """Retrieves a the ACL sharing permissions for a document.
+
+ Args:
+ resource_id: str The document/item's resource id. Example for pdf:
+ 'pdf%3A0A1234567890'.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.get_feed().
+
+ Returns:
+ A gdata.docs.data.AclFeed object representing the document's ACL entries.
+
+ Raises:
+ ValueError if the resource_id is not a valid format.
+ """
+ match = gdata.docs.data.RESOURCE_ID_PATTERN.match(resource_id)
+ if match is None:
+ raise ValueError, 'Invalid resource id: %s' % resource_id
+
+ return self.get_feed(
+ ACL_FEED_TEMPLATE % resource_id, desired_class=gdata.docs.data.AclFeed,
+ auth_token=auth_token, **kwargs)
+
+ GetAclPermissions = get_acl_permissions
+
+ def get_revisions(self, resource_id, auth_token=None, **kwargs):
+ """Retrieves the revision history for a document.
+
+ Args:
+ resource_id: str The document/item's resource id. Example for pdf:
+ 'pdf%3A0A1234567890'.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.get_feed().
+
+ Returns:
+ A gdata.docs.data.RevisionFeed representing the document's revisions.
+
+ Raises:
+ ValueError if the resource_id is not a valid format.
+ """
+ match = gdata.docs.data.RESOURCE_ID_PATTERN.match(resource_id)
+ if match is None:
+ raise ValueError, 'Invalid resource id: %s' % resource_id
+
+ return self.get_feed(
+ REVISIONS_FEED_TEMPLATE % resource_id,
+ desired_class=gdata.docs.data.RevisionFeed, auth_token=auth_token,
+ **kwargs)
+
+ GetRevisions = get_revisions
+
+ def create(self, doc_type, title, folder_or_id=None, writers_can_invite=None,
+ auth_token=None, **kwargs):
+ """Creates a new item in the user's doclist.
+
+ Args:
+ doc_type: str The type of object to create. For example: 'document',
+ 'spreadsheet', 'folder', 'presentation'.
+ title: str A title for the document.
+ folder_or_id: gdata.docs.data.DocsEntry or str (optional) Folder entry or
+ the resouce id of a folder to create the object under. Note: A valid
+ resource id for a folder is of the form: folder%3Afolder_id.
+ writers_can_invite: bool (optional) False prevents collaborators from
+ being able to invite others to edit or view the document.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.post().
+
+ Returns:
+ gdata.docs.data.DocsEntry containing information newly created item.
+ """
+ entry = gdata.docs.data.DocsEntry(title=atom.data.Title(text=title))
+ entry.category.append(gdata.docs.data.make_kind_category(doc_type))
+
+ if isinstance(writers_can_invite, gdata.docs.data.WritersCanInvite):
+ entry.writers_can_invite = writers_can_invite
+ elif isinstance(writers_can_invite, bool):
+ entry.writers_can_invite = gdata.docs.data.WritersCanInvite(
+ value=str(writers_can_invite).lower())
+
+ uri = DOCLIST_FEED_URI
+
+ if folder_or_id is not None:
+ if isinstance(folder_or_id, gdata.docs.data.DocsEntry):
+ # Verify that we're uploading the resource into to a folder.
+ if folder_or_id.get_document_type() == gdata.docs.data.FOLDER_LABEL:
+ uri = folder_or_id.content.src
+ else:
+ raise gdata.client.Error, 'Trying to upload item to a non-folder.'
+ else:
+ uri = FOLDERS_FEED_TEMPLATE % folder_or_id
+
+ return self.post(entry, uri, auth_token=auth_token, **kwargs)
+
+ Create = create
+
+ def copy(self, source_entry, title, auth_token=None, **kwargs):
+ """Copies a native Google document, spreadsheet, or presentation.
+
+ Note: arbitrary file types and PDFs do not support this feature.
+
+ Args:
+ source_entry: gdata.docs.data.DocsEntry An object representing the source
+ document/folder.
+ title: str A title for the new document.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.post().
+
+ Returns:
+ A gdata.docs.data.DocsEntry of the duplicated document.
+ """
+ entry = gdata.docs.data.DocsEntry(
+ title=atom.data.Title(text=title),
+ id=atom.data.Id(text=source_entry.GetSelfLink().href))
+ return self.post(entry, DOCLIST_FEED_URI, auth_token=auth_token, **kwargs)
+
+ Copy = copy
+
+ def move(self, source_entry, folder_entry=None,
+ keep_in_folders=False, auth_token=None, **kwargs):
+ """Moves an item into a different folder (or to the root document list).
+
+ Args:
+ source_entry: gdata.docs.data.DocsEntry An object representing the source
+ document/folder.
+ folder_entry: gdata.docs.data.DocsEntry (optional) An object representing
+ the destination folder. If None, set keep_in_folders to
+ True to remove the item from all parent folders.
+ keep_in_folders: boolean (optional) If True, the source entry
+ is not removed from any existing parent folders it is in.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.post().
+
+ Returns:
+ A gdata.docs.data.DocsEntry of the moved entry or True if just moving the
+ item out of all folders (e.g. Move(source_entry)).
+ """
+ entry = gdata.docs.data.DocsEntry(id=source_entry.id)
+
+ # Remove the item from any folders it is already in.
+ if not keep_in_folders:
+ for folder in source_entry.InFolders():
+ self.delete(
+ '%s/contents/%s' % (
+ folder.href,
+ urllib.quote(source_entry.resource_id.text)),
+ force=True)
+
+ # If we're moving the resource into a folder, verify it is a folder entry.
+ if folder_entry is not None:
+ if folder_entry.get_document_type() == gdata.docs.data.FOLDER_LABEL:
+ return self.post(entry, folder_entry.content.src,
+ auth_token=auth_token, **kwargs)
+ else:
+ raise gdata.client.Error, 'Trying to move item into a non-folder.'
+
+ return True
+
+ Move = move
+
+ def upload(self, media, title, folder_or_uri=None, content_type=None,
+ auth_token=None, **kwargs):
+ """Uploads a file to Google Docs.
+
+ Args:
+ media: A gdata.data.MediaSource object containing the file to be
+ uploaded or a string of the filepath.
+ title: str The title of the document on the server after being
+ uploaded.
+ folder_or_uri: gdata.docs.data.DocsEntry or str (optional) An object with
+ a link to the folder or the uri to upload the file to.
+ Note: A valid uri for a folder is of the form:
+ /feeds/default/private/full/folder%3Afolder_id/contents
+ content_type: str (optional) The file's mimetype. If not provided, the
+ one in the media source object is used or the mimetype is inferred
+ from the filename (if media is a string). When media is a filename,
+ it is always recommended to pass in a content type.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.post().
+
+ Returns:
+ A gdata.docs.data.DocsEntry containing information about uploaded doc.
+ """
+ uri = None
+ if folder_or_uri is not None:
+ if isinstance(folder_or_uri, gdata.docs.data.DocsEntry):
+ # Verify that we're uploading the resource into to a folder.
+ if folder_or_uri.get_document_type() == gdata.docs.data.FOLDER_LABEL:
+ uri = folder_or_uri.content.src
+ else:
+ raise gdata.client.Error, 'Trying to upload item to a non-folder.'
+ else:
+ uri = folder_or_uri
+ else:
+ uri = DOCLIST_FEED_URI
+
+ # Create media source if media is a filepath.
+ if isinstance(media, (str, unicode)):
+ mimetype = mimetypes.guess_type(media)[0]
+ if mimetype is None and content_type is None:
+ raise ValueError, ("Unknown mimetype. Please pass in the file's "
+ "content_type")
+ else:
+ media = gdata.data.MediaSource(file_path=media,
+ content_type=content_type)
+
+ entry = gdata.docs.data.DocsEntry(title=atom.data.Title(text=title))
+
+ return self.post(entry, uri, media_source=media,
+ desired_class=gdata.docs.data.DocsEntry,
+ auth_token=auth_token, **kwargs)
+
+ Upload = upload
+
+ def download(self, entry_or_id_or_url, file_path, extra_params=None,
+ auth_token=None, **kwargs):
+ """Downloads a file from the Document List to local disk.
+
+ Note: to download a file in memory, use the GetFileContent() method.
+
+ Args:
+ entry_or_id_or_url: gdata.docs.data.DocsEntry or string representing a
+ resource id or URL to download the document from (such as the content
+ src link).
+ file_path: str The full path to save the file to.
+ extra_params: dict (optional) A map of any further parameters to control
+ how the document is downloaded/exported. For example, exporting a
+ spreadsheet as a .csv: extra_params={'gid': 0, 'exportFormat': 'csv'}
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self._download_file().
+
+ Raises:
+ gdata.client.RequestError if the download URL is malformed or the server's
+ response was not successful.
+ ValueError if entry_or_id_or_url was a resource id for a filetype
+ in which the download link cannot be manually constructed (e.g. pdf).
+ """
+ if isinstance(entry_or_id_or_url, gdata.docs.data.DocsEntry):
+ url = entry_or_id_or_url.content.src
+ else:
+ if gdata.docs.data.RESOURCE_ID_PATTERN.match(entry_or_id_or_url):
+ url = gdata.docs.data.make_content_link_from_resource_id(
+ entry_or_id_or_url)
+ else:
+ url = entry_or_id_or_url
+
+ if extra_params is not None:
+ if 'exportFormat' in extra_params and url.find('/Export?') == -1:
+ raise gdata.client.Error, ('This entry type cannot be exported '
+ 'as a different format.')
+
+ if 'gid' in extra_params and url.find('spreadsheets') == -1:
+ raise gdata.client.Error, 'gid param is not valid for this doc type.'
+
+ url += '&' + urllib.urlencode(extra_params)
+
+ self._download_file(url, file_path, auth_token=auth_token, **kwargs)
+
+ Download = download
+
+ def export(self, entry_or_id_or_url, file_path, gid=None, auth_token=None,
+ **kwargs):
+ """Exports a document from the Document List in a different format.
+
+ Args:
+ entry_or_id_or_url: gdata.docs.data.DocsEntry or string representing a
+ resource id or URL to download the document from (such as the content
+ src link).
+ file_path: str The full path to save the file to. The export
+ format is inferred from the the file extension.
+ gid: str (optional) grid id for downloading a single grid of a
+ spreadsheet. The param should only be used for .csv and .tsv
+ spreadsheet exports.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.download().
+
+ Raises:
+ gdata.client.RequestError if the download URL is malformed or the server's
+ response was not successful.
+ """
+ extra_params = {}
+
+ match = gdata.docs.data.FILE_EXT_PATTERN.match(file_path)
+ if match:
+ extra_params['exportFormat'] = match.group(1)
+
+ if gid is not None:
+ extra_params['gid'] = gid
+
+ self.download(entry_or_id_or_url, file_path, extra_params,
+ auth_token=auth_token, **kwargs)
+
+ Export = export
+
+
+class DocsQuery(gdata.client.Query):
+
+ def __init__(self, title=None, title_exact=None, opened_min=None,
+ opened_max=None, edited_min=None, edited_max=None, owner=None,
+ writer=None, reader=None, show_folders=None,
+ show_deleted=None, ocr=None, target_language=None,
+ source_language=None, convert=None, **kwargs):
+ """Constructs a query URL for the Google Documents List API.
+
+ Args:
+ title: str (optional) Specifies the search terms for the title of a
+ document. This parameter used without title_exact will only
+ submit partial queries, not exact queries.
+ title_exact: str (optional) Meaningless without title. Possible values
+ are 'true' and 'false'. Note: Matches are case-insensitive.
+ opened_min: str (optional) Lower bound on the last time a document was
+ opened by the current user. Use the RFC 3339 timestamp
+ format. For example: opened_min='2005-08-09T09:57:00-08:00'.
+ opened_max: str (optional) Upper bound on the last time a document was
+ opened by the current user. (See also opened_min.)
+ edited_min: str (optional) Lower bound on the last time a document was
+ edited by the current user. This value corresponds to the
+ edited.text value in the doc's entry object, which
+ represents changes to the document's content or metadata.
+ Use the RFC 3339 timestamp format. For example:
+ edited_min='2005-08-09T09:57:00-08:00'
+ edited_max: str (optional) Upper bound on the last time a document was
+ edited by the user. (See also edited_min.)
+ owner: str (optional) Searches for documents with a specific owner. Use
+ the email address of the owner. For example:
+ owner='user@gmail.com'
+ writer: str (optional) Searches for documents which can be written to
+ by specific users. Use a single email address or a comma
+ separated list of email addresses. For example:
+ writer='user1@gmail.com,user@example.com'
+ reader: str (optional) Searches for documents which can be read by
+ specific users. (See also writer.)
+ show_folders: str (optional) Specifies whether the query should return
+ folders as well as documents. Possible values are 'true'
+ and 'false'. Default is false.
+ show_deleted: str (optional) Specifies whether the query should return
+ documents which are in the trash as well as other
+ documents. Possible values are 'true' and 'false'.
+ Default is false.
+ ocr: str (optional) Specifies whether to attempt OCR on a .jpg, .png, or
+ .gif upload. Possible values are 'true' and 'false'. Default is
+ false. See OCR in the Protocol Guide:
+ http://code.google.com/apis/documents/docs/3.0/developers_guide_protocol.html#OCR
+ target_language: str (optional) Specifies the language to translate a
+ document into. See Document Translation in the Protocol
+ Guide for a table of possible values:
+ http://code.google.com/apis/documents/docs/3.0/developers_guide_protocol.html#DocumentTranslation
+ source_language: str (optional) Specifies the source language of the
+ original document. Optional when using the translation
+ service. If not provided, Google will attempt to
+ auto-detect the source language. See Document
+ Translation in the Protocol Guide for a table of
+ possible values (link in target_language).
+ convert: str (optional) Used when uploading arbitrary file types to
+ specity if document-type uploads should convert to a native
+ Google Docs format. Possible values are 'true' and 'false'.
+ The default is 'true'.
+ """
+ gdata.client.Query.__init__(self, **kwargs)
+ self.convert = convert
+ self.title = title
+ self.title_exact = title_exact
+ self.opened_min = opened_min
+ self.opened_max = opened_max
+ self.edited_min = edited_min
+ self.edited_max = edited_max
+ self.owner = owner
+ self.writer = writer
+ self.reader = reader
+ self.show_folders = show_folders
+ self.show_deleted = show_deleted
+ self.ocr = ocr
+ self.target_language = target_language
+ self.source_language = source_language
+
+ def modify_request(self, http_request):
+ gdata.client._add_query_param('convert', self.convert, http_request)
+ gdata.client._add_query_param('title', self.title, http_request)
+ gdata.client._add_query_param('title-exact', self.title_exact,
+ http_request)
+ gdata.client._add_query_param('opened-min', self.opened_min, http_request)
+ gdata.client._add_query_param('opened-max', self.opened_max, http_request)
+ gdata.client._add_query_param('edited-min', self.edited_min, http_request)
+ gdata.client._add_query_param('edited-max', self.edited_max, http_request)
+ gdata.client._add_query_param('owner', self.owner, http_request)
+ gdata.client._add_query_param('writer', self.writer, http_request)
+ gdata.client._add_query_param('reader', self.reader, http_request)
+ gdata.client._add_query_param('showfolders', self.show_folders,
+ http_request)
+ gdata.client._add_query_param('showdeleted', self.show_deleted,
+ http_request)
+ gdata.client._add_query_param('ocr', self.ocr, http_request)
+ gdata.client._add_query_param('targetLanguage', self.target_language,
+ http_request)
+ gdata.client._add_query_param('sourceLanguage', self.source_language,
+ http_request)
+ gdata.client.Query.modify_request(self, http_request)
+
+ ModifyRequest = modify_request
diff --git a/python/gdata/docs/data.py b/python/gdata/docs/data.py
new file mode 100644
index 0000000..8e54d57
--- /dev/null
+++ b/python/gdata/docs/data.py
@@ -0,0 +1,280 @@
+#!/usr/bin/python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Data model classes for parsing and generating XML for the DocList Data API"""
+
+__author__ = 'e.bidelman (Eric Bidelman)'
+
+
+import re
+import atom.core
+import atom.data
+import gdata.acl.data
+import gdata.data
+
+DOCUMENTS_NS = 'http://schemas.google.com/docs/2007'
+DOCUMENTS_TEMPLATE = '{http://schemas.google.com/docs/2007}%s'
+ACL_FEEDLINK_REL = 'http://schemas.google.com/acl/2007#accessControlList'
+REVISION_FEEDLINK_REL = DOCUMENTS_NS + '/revisions'
+
+# XML Namespaces used in Google Documents entities.
+DATA_KIND_SCHEME = 'http://schemas.google.com/g/2005#kind'
+DOCUMENT_LABEL = 'document'
+SPREADSHEET_LABEL = 'spreadsheet'
+PRESENTATION_LABEL = 'presentation'
+FOLDER_LABEL = 'folder'
+PDF_LABEL = 'pdf'
+
+LABEL_SCHEME = 'http://schemas.google.com/g/2005/labels'
+STARRED_LABEL_TERM = LABEL_SCHEME + '#starred'
+TRASHED_LABEL_TERM = LABEL_SCHEME + '#trashed'
+HIDDEN_LABEL_TERM = LABEL_SCHEME + '#hidden'
+MINE_LABEL_TERM = LABEL_SCHEME + '#mine'
+PRIVATE_LABEL_TERM = LABEL_SCHEME + '#private'
+SHARED_WITH_DOMAIN_LABEL_TERM = LABEL_SCHEME + '#shared-with-domain'
+VIEWED_LABEL_TERM = LABEL_SCHEME + '#viewed'
+
+DOCS_PARENT_LINK_REL = DOCUMENTS_NS + '#parent'
+DOCS_PUBLISH_LINK_REL = DOCUMENTS_NS + '#publish'
+
+FILE_EXT_PATTERN = re.compile('.*\.([a-zA-Z]{3,}$)')
+RESOURCE_ID_PATTERN = re.compile('^([a-z]*)(:|%3A)([\w-]*)$')
+
+# File extension/mimetype pairs of common format.
+MIMETYPES = {
+ 'CSV': 'text/csv',
+ 'TSV': 'text/tab-separated-values',
+ 'TAB': 'text/tab-separated-values',
+ 'DOC': 'application/msword',
+ 'DOCX': ('application/vnd.openxmlformats-officedocument.'
+ 'wordprocessingml.document'),
+ 'ODS': 'application/x-vnd.oasis.opendocument.spreadsheet',
+ 'ODT': 'application/vnd.oasis.opendocument.text',
+ 'RTF': 'application/rtf',
+ 'SXW': 'application/vnd.sun.xml.writer',
+ 'TXT': 'text/plain',
+ 'XLS': 'application/vnd.ms-excel',
+ 'XLSX': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
+ 'PDF': 'application/pdf',
+ 'PNG': 'image/png',
+ 'PPT': 'application/vnd.ms-powerpoint',
+ 'PPS': 'application/vnd.ms-powerpoint',
+ 'HTM': 'text/html',
+ 'HTML': 'text/html',
+ 'ZIP': 'application/zip',
+ 'SWF': 'application/x-shockwave-flash'
+ }
+
+
+def make_kind_category(label):
+ """Builds the appropriate atom.data.Category for the label passed in.
+
+ Args:
+ label: str The value for the category entry.
+
+ Returns:
+ An atom.data.Category or None if label is None.
+ """
+ if label is None:
+ return None
+
+ return atom.data.Category(
+ scheme=DATA_KIND_SCHEME, term='%s#%s' % (DOCUMENTS_NS, label), label=label)
+
+MakeKindCategory = make_kind_category
+
+def make_content_link_from_resource_id(resource_id):
+ """Constructs export URL for a given resource.
+
+ Args:
+ resource_id: str The document/item's resource id. Example presentation:
+ 'presentation%3A0A1234567890'.
+
+ Raises:
+ gdata.client.ValueError if the resource_id is not a valid format.
+ """
+ match = RESOURCE_ID_PATTERN.match(resource_id)
+
+ if match:
+ label = match.group(1)
+ doc_id = match.group(3)
+ if label == DOCUMENT_LABEL:
+ return '/feeds/download/documents/Export?docId=%s' % doc_id
+ if label == PRESENTATION_LABEL:
+ return '/feeds/download/presentations/Export?docId=%s' % doc_id
+ if label == SPREADSHEET_LABEL:
+ return ('https://spreadsheets.google.com/feeds/download/spreadsheets/'
+ 'Export?key=%s' % doc_id)
+ raise ValueError, ('Invalid resource id: %s, or manually creating the '
+ 'download url for this type of doc is not possible'
+ % resource_id)
+
+MakeContentLinkFromResourceId = make_content_link_from_resource_id
+
+
+class ResourceId(atom.core.XmlElement):
+ """The DocList gd:resourceId element."""
+ _qname = gdata.data.GDATA_TEMPLATE % 'resourceId'
+
+
+class LastModifiedBy(atom.data.Person):
+ """The DocList gd:lastModifiedBy element."""
+ _qname = gdata.data.GDATA_TEMPLATE % 'lastModifiedBy'
+
+
+class LastViewed(atom.data.Person):
+ """The DocList gd:lastViewed element."""
+ _qname = gdata.data.GDATA_TEMPLATE % 'lastViewed'
+
+
+class WritersCanInvite(atom.core.XmlElement):
+ """The DocList docs:writersCanInvite element."""
+ _qname = DOCUMENTS_TEMPLATE % 'writersCanInvite'
+ value = 'value'
+
+
+class QuotaBytesUsed(atom.core.XmlElement):
+ """The DocList gd:quotaBytesUsed element."""
+ _qname = gdata.data.GDATA_TEMPLATE % 'quotaBytesUsed'
+
+
+class Publish(atom.core.XmlElement):
+ """The DocList docs:publish element."""
+ _qname = DOCUMENTS_TEMPLATE % 'publish'
+ value = 'value'
+
+
+class PublishAuto(atom.core.XmlElement):
+ """The DocList docs:publishAuto element."""
+ _qname = DOCUMENTS_TEMPLATE % 'publishAuto'
+ value = 'value'
+
+
+class PublishOutsideDomain(atom.core.XmlElement):
+ """The DocList docs:publishOutsideDomain element."""
+ _qname = DOCUMENTS_TEMPLATE % 'publishOutsideDomain'
+ value = 'value'
+
+
+class DocsEntry(gdata.data.GDEntry):
+ """A DocList version of an Atom Entry."""
+
+ last_viewed = LastViewed
+ last_modified_by = LastModifiedBy
+ resource_id = ResourceId
+ writers_can_invite = WritersCanInvite
+ quota_bytes_used = QuotaBytesUsed
+ feed_link = [gdata.data.FeedLink]
+
+ def get_document_type(self):
+ """Extracts the type of document this DocsEntry is.
+
+ This method returns the type of document the DocsEntry represents. Possible
+ values are document, presentation, spreadsheet, folder, or pdf.
+
+ Returns:
+ A string representing the type of document.
+ """
+ if self.category:
+ for category in self.category:
+ if category.scheme == DATA_KIND_SCHEME:
+ return category.label
+ else:
+ return None
+
+ GetDocumentType = get_document_type
+
+ def get_acl_feed_link(self):
+ """Extracts the DocsEntry's ACL feed .
+
+ Returns:
+ A gdata.data.FeedLink object.
+ """
+ for feed_link in self.feed_link:
+ if feed_link.rel == ACL_FEEDLINK_REL:
+ return feed_link
+ return None
+
+ GetAclFeedLink = get_acl_feed_link
+
+ def get_revisions_feed_link(self):
+ """Extracts the DocsEntry's revisions feed .
+
+ Returns:
+ A gdata.data.FeedLink object.
+ """
+ for feed_link in self.feed_link:
+ if feed_link.rel == REVISION_FEEDLINK_REL:
+ return feed_link
+ return None
+
+ GetRevisionsFeedLink = get_revisions_feed_link
+
+ def in_folders(self):
+ """Returns the parents link(s) (folders) of this entry."""
+ links = []
+ for link in self.link:
+ if link.rel == DOCS_PARENT_LINK_REL and link.href:
+ links.append(link)
+ return links
+
+ InFolders = in_folders
+
+
+class Acl(gdata.acl.data.AclEntry):
+ """A document ACL entry."""
+
+
+class DocList(gdata.data.GDFeed):
+ """The main DocList feed containing a list of Google Documents."""
+ entry = [DocsEntry]
+
+
+class AclFeed(gdata.acl.data.AclFeed):
+ """A DocList ACL feed."""
+ entry = [Acl]
+
+
+class Revision(gdata.data.GDEntry):
+ """A document Revision entry."""
+ publish = Publish
+ publish_auto = PublishAuto
+ publish_outside_domain = PublishOutsideDomain
+
+ def find_publish_link(self):
+ """Get the link that points to the published document on the web.
+
+ Returns:
+ A str for the URL in the link with a rel ending in #publish.
+ """
+ return self.find_url(DOCS_PUBLISH_LINK_REL)
+
+ FindPublishLink = find_publish_link
+
+ def get_publish_link(self):
+ """Get the link that points to the published document on the web.
+
+ Returns:
+ A gdata.data.Link for the link with a rel ending in #publish.
+ """
+ return self.get_link(DOCS_PUBLISH_LINK_REL)
+
+ GetPublishLink = get_publish_link
+
+
+class RevisionFeed(gdata.data.GDFeed):
+ """A DocList Revision feed."""
+ entry = [Revision]
diff --git a/python/gdata/docs/service.py b/python/gdata/docs/service.py
new file mode 100644
index 0000000..9dd1f21
--- /dev/null
+++ b/python/gdata/docs/service.py
@@ -0,0 +1,611 @@
+#!/usr/bin/python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""DocsService extends the GDataService to streamline Google Documents
+ operations.
+
+ DocsService: Provides methods to query feeds and manipulate items.
+ Extends GDataService.
+
+ DocumentQuery: Queries a Google Document list feed.
+
+ DocumentAclQuery: Queries a Google Document Acl feed.
+"""
+
+
+__author__ = ('api.jfisher (Jeff Fisher), '
+ 'e.bidelman (Eric Bidelman)')
+
+import re
+import atom
+import gdata.service
+import gdata.docs
+import urllib
+
+# XML Namespaces used in Google Documents entities.
+DATA_KIND_SCHEME = gdata.GDATA_NAMESPACE + '#kind'
+DOCUMENT_LABEL = 'document'
+SPREADSHEET_LABEL = 'spreadsheet'
+PRESENTATION_LABEL = 'presentation'
+FOLDER_LABEL = 'folder'
+PDF_LABEL = 'pdf'
+
+LABEL_SCHEME = gdata.GDATA_NAMESPACE + '/labels'
+STARRED_LABEL_TERM = LABEL_SCHEME + '#starred'
+TRASHED_LABEL_TERM = LABEL_SCHEME + '#trashed'
+HIDDEN_LABEL_TERM = LABEL_SCHEME + '#hidden'
+MINE_LABEL_TERM = LABEL_SCHEME + '#mine'
+PRIVATE_LABEL_TERM = LABEL_SCHEME + '#private'
+SHARED_WITH_DOMAIN_LABEL_TERM = LABEL_SCHEME + '#shared-with-domain'
+VIEWED_LABEL_TERM = LABEL_SCHEME + '#viewed'
+
+FOLDERS_SCHEME_PREFIX = gdata.docs.DOCUMENTS_NAMESPACE + '/folders/'
+
+# File extensions of documents that are permitted to be uploaded or downloaded.
+SUPPORTED_FILETYPES = {
+ 'CSV': 'text/csv',
+ 'TSV': 'text/tab-separated-values',
+ 'TAB': 'text/tab-separated-values',
+ 'DOC': 'application/msword',
+ 'DOCX': ('application/vnd.openxmlformats-officedocument.'
+ 'wordprocessingml.document'),
+ 'ODS': 'application/x-vnd.oasis.opendocument.spreadsheet',
+ 'ODT': 'application/vnd.oasis.opendocument.text',
+ 'RTF': 'application/rtf',
+ 'SXW': 'application/vnd.sun.xml.writer',
+ 'TXT': 'text/plain',
+ 'XLS': 'application/vnd.ms-excel',
+ 'XLSX': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
+ 'PDF': 'application/pdf',
+ 'PNG': 'image/png',
+ 'PPT': 'application/vnd.ms-powerpoint',
+ 'PPS': 'application/vnd.ms-powerpoint',
+ 'HTM': 'text/html',
+ 'HTML': 'text/html',
+ 'ZIP': 'application/zip',
+ 'SWF': 'application/x-shockwave-flash'
+ }
+
+
+class DocsService(gdata.service.GDataService):
+
+ """Client extension for the Google Documents service Document List feed."""
+
+ __FILE_EXT_PATTERN = re.compile('.*\.([a-zA-Z]{3,}$)')
+ __RESOURCE_ID_PATTERN = re.compile('^([a-z]*)(:|%3A)([\w-]*)$')
+
+ def __init__(self, email=None, password=None, source=None,
+ server='docs.google.com', additional_headers=None, **kwargs):
+ """Creates a client for the Google Documents service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'docs.google.com'.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service='writely', source=source,
+ server=server, additional_headers=additional_headers, **kwargs)
+
+ def _MakeKindCategory(self, label):
+ if label is None:
+ return None
+ return atom.Category(scheme=DATA_KIND_SCHEME,
+ term=gdata.docs.DOCUMENTS_NAMESPACE + '#' + label, label=label)
+
+ def _MakeContentLinkFromId(self, resource_id):
+ match = self.__RESOURCE_ID_PATTERN.match(resource_id)
+ label = match.group(1)
+ doc_id = match.group(3)
+ if label == DOCUMENT_LABEL:
+ return '/feeds/download/documents/Export?docId=%s' % doc_id
+ if label == PRESENTATION_LABEL:
+ return '/feeds/download/presentations/Export?docId=%s' % doc_id
+ if label == SPREADSHEET_LABEL:
+ return ('https://spreadsheets.google.com/feeds/download/spreadsheets/'
+ 'Export?key=%s' % doc_id)
+ raise ValueError, 'Invalid resource id: %s' % resource_id
+
+ def _UploadFile(self, media_source, title, category, folder_or_uri=None):
+ """Uploads a file to the Document List feed.
+
+ Args:
+ media_source: A gdata.MediaSource object containing the file to be
+ uploaded.
+ title: string The title of the document on the server after being
+ uploaded.
+ category: An atom.Category object specifying the appropriate document
+ type.
+ folder_or_uri: DocumentListEntry or string (optional) An object with a
+ link to a folder or a uri to a folder to upload to.
+ Note: A valid uri for a folder is of the form:
+ /feeds/folders/private/full/folder%3Afolder_id
+
+ Returns:
+ A DocumentListEntry containing information about the document created on
+ the Google Documents service.
+ """
+ if folder_or_uri:
+ try:
+ uri = folder_or_uri.content.src
+ except AttributeError:
+ uri = folder_or_uri
+ else:
+ uri = '/feeds/documents/private/full'
+
+ entry = gdata.docs.DocumentListEntry()
+ entry.title = atom.Title(text=title)
+ if category is not None:
+ entry.category.append(category)
+ entry = self.Post(entry, uri, media_source=media_source,
+ extra_headers={'Slug': media_source.file_name},
+ converter=gdata.docs.DocumentListEntryFromString)
+ return entry
+
+ def _DownloadFile(self, uri, file_path):
+ """Downloads a file.
+
+ Args:
+ uri: string The full Export URL to download the file from.
+ file_path: string The full path to save the file to.
+
+ Raises:
+ RequestError: on error response from server.
+ """
+ server_response = self.request('GET', uri)
+ response_body = server_response.read()
+ if server_response.status != 200:
+ raise gdata.service.RequestError, {'status': server_response.status,
+ 'reason': server_response.reason,
+ 'body': response_body}
+ f = open(file_path, 'wb')
+ f.write(response_body)
+ f.flush()
+ f.close()
+
+ def MoveIntoFolder(self, source_entry, folder_entry):
+ """Moves a document into a folder in the Document List Feed.
+
+ Args:
+ source_entry: DocumentListEntry An object representing the source
+ document/folder.
+ folder_entry: DocumentListEntry An object with a link to the destination
+ folder.
+
+ Returns:
+ A DocumentListEntry containing information about the document created on
+ the Google Documents service.
+ """
+ entry = gdata.docs.DocumentListEntry()
+ entry.id = source_entry.id
+ entry = self.Post(entry, folder_entry.content.src,
+ converter=gdata.docs.DocumentListEntryFromString)
+ return entry
+
+ def Query(self, uri, converter=gdata.docs.DocumentListFeedFromString):
+ """Queries the Document List feed and returns the resulting feed of
+ entries.
+
+ Args:
+ uri: string The full URI to be queried. This can contain query
+ parameters, a hostname, or simply the relative path to a Document
+ List feed. The DocumentQuery object is useful when constructing
+ query parameters.
+ converter: func (optional) A function which will be executed on the
+ retrieved item, generally to render it into a Python object.
+ By default the DocumentListFeedFromString function is used to
+ return a DocumentListFeed object. This is because most feed
+ queries will result in a feed and not a single entry.
+ """
+ return self.Get(uri, converter=converter)
+
+ def QueryDocumentListFeed(self, uri):
+ """Retrieves a DocumentListFeed by retrieving a URI based off the Document
+ List feed, including any query parameters. A DocumentQuery object can
+ be used to construct these parameters.
+
+ Args:
+ uri: string The URI of the feed being retrieved possibly with query
+ parameters.
+
+ Returns:
+ A DocumentListFeed object representing the feed returned by the server.
+ """
+ return self.Get(uri, converter=gdata.docs.DocumentListFeedFromString)
+
+ def GetDocumentListEntry(self, uri):
+ """Retrieves a particular DocumentListEntry by its unique URI.
+
+ Args:
+ uri: string The unique URI of an entry in a Document List feed.
+
+ Returns:
+ A DocumentListEntry object representing the retrieved entry.
+ """
+ return self.Get(uri, converter=gdata.docs.DocumentListEntryFromString)
+
+ def GetDocumentListFeed(self, uri=None):
+ """Retrieves a feed containing all of a user's documents.
+
+ Args:
+ uri: string A full URI to query the Document List feed.
+ """
+ if not uri:
+ uri = gdata.docs.service.DocumentQuery().ToUri()
+ return self.QueryDocumentListFeed(uri)
+
+ def GetDocumentListAclEntry(self, uri):
+ """Retrieves a particular DocumentListAclEntry by its unique URI.
+
+ Args:
+ uri: string The unique URI of an entry in a Document List feed.
+
+ Returns:
+ A DocumentListAclEntry object representing the retrieved entry.
+ """
+ return self.Get(uri, converter=gdata.docs.DocumentListAclEntryFromString)
+
+ def GetDocumentListAclFeed(self, uri):
+ """Retrieves a feed containing all of a user's documents.
+
+ Args:
+ uri: string The URI of a document's Acl feed to retrieve.
+
+ Returns:
+ A DocumentListAclFeed object representing the ACL feed
+ returned by the server.
+ """
+ return self.Get(uri, converter=gdata.docs.DocumentListAclFeedFromString)
+
+ def Upload(self, media_source, title, folder_or_uri=None, label=None):
+ """Uploads a document inside of a MediaSource object to the Document List
+ feed with the given title.
+
+ Args:
+ media_source: MediaSource The gdata.MediaSource object containing a
+ document file to be uploaded.
+ title: string The title of the document on the server after being
+ uploaded.
+ folder_or_uri: DocumentListEntry or string (optional) An object with a
+ link to a folder or a uri to a folder to upload to.
+ Note: A valid uri for a folder is of the form:
+ /feeds/folders/private/full/folder%3Afolder_id
+ label: optional label describing the type of the document to be created.
+
+ Returns:
+ A DocumentListEntry containing information about the document created
+ on the Google Documents service.
+ """
+
+ return self._UploadFile(media_source, title, self._MakeKindCategory(label),
+ folder_or_uri)
+
+ def Download(self, entry_or_id_or_url, file_path, export_format=None,
+ gid=None, extra_params=None):
+ """Downloads a document from the Document List.
+
+ Args:
+ entry_or_id_or_url: a DocumentListEntry, or the resource id of an entry,
+ or a url to download from (such as the content src).
+ file_path: string The full path to save the file to.
+ export_format: the format to convert to, if conversion is required.
+ gid: grid id, for downloading a single grid of a spreadsheet
+ extra_params: a map of any further parameters to control how the document
+ is downloaded
+
+ Raises:
+ RequestError if the service does not respond with success
+ """
+
+ if isinstance(entry_or_id_or_url, gdata.docs.DocumentListEntry):
+ url = entry_or_id_or_url.content.src
+ else:
+ if self.__RESOURCE_ID_PATTERN.match(entry_or_id_or_url):
+ url = self._MakeContentLinkFromId(entry_or_id_or_url)
+ else:
+ url = entry_or_id_or_url
+
+ if export_format is not None:
+ if url.find('/Export?') == -1:
+ raise gdata.service.Error, ('This entry cannot be exported '
+ 'as a different format')
+ url += '&exportFormat=%s' % export_format
+
+ if gid is not None:
+ if url.find('spreadsheets') == -1:
+ raise gdata.service.Error, 'grid id param is not valid for this entry'
+ url += '&gid=%s' % gid
+
+ if extra_params:
+ url += '&' + urllib.urlencode(extra_params)
+
+ self._DownloadFile(url, file_path)
+
+ def Export(self, entry_or_id_or_url, file_path, gid=None, extra_params=None):
+ """Downloads a document from the Document List in a different format.
+
+ Args:
+ entry_or_id_or_url: a DocumentListEntry, or the resource id of an entry,
+ or a url to download from (such as the content src).
+ file_path: string The full path to save the file to. The export
+ format is inferred from the the file extension.
+ gid: grid id, for downloading a single grid of a spreadsheet
+ extra_params: a map of any further parameters to control how the document
+ is downloaded
+
+ Raises:
+ RequestError if the service does not respond with success
+ """
+ ext = None
+ match = self.__FILE_EXT_PATTERN.match(file_path)
+ if match:
+ ext = match.group(1)
+ self.Download(entry_or_id_or_url, file_path, ext, gid, extra_params)
+
+ def CreateFolder(self, title, folder_or_uri=None):
+ """Creates a folder in the Document List feed.
+
+ Args:
+ title: string The title of the folder on the server after being created.
+ folder_or_uri: DocumentListEntry or string (optional) An object with a
+ link to a folder or a uri to a folder to upload to.
+ Note: A valid uri for a folder is of the form:
+ /feeds/folders/private/full/folder%3Afolder_id
+
+ Returns:
+ A DocumentListEntry containing information about the folder created on
+ the Google Documents service.
+ """
+ if folder_or_uri:
+ try:
+ uri = folder_or_uri.content.src
+ except AttributeError:
+ uri = folder_or_uri
+ else:
+ uri = '/feeds/documents/private/full'
+
+ folder_entry = gdata.docs.DocumentListEntry()
+ folder_entry.title = atom.Title(text=title)
+ folder_entry.category.append(self._MakeKindCategory(FOLDER_LABEL))
+ folder_entry = self.Post(folder_entry, uri,
+ converter=gdata.docs.DocumentListEntryFromString)
+
+ return folder_entry
+
+
+ def MoveOutOfFolder(self, source_entry):
+ """Moves a document into a folder in the Document List Feed.
+
+ Args:
+ source_entry: DocumentListEntry An object representing the source
+ document/folder.
+
+ Returns:
+ True if the entry was moved out.
+ """
+ return self.Delete(source_entry.GetEditLink().href)
+
+ # Deprecated methods
+
+ #@atom.deprecated('Please use Upload instead')
+ def UploadPresentation(self, media_source, title, folder_or_uri=None):
+ """Uploads a presentation inside of a MediaSource object to the Document
+ List feed with the given title.
+
+ This method is deprecated, use Upload instead.
+
+ Args:
+ media_source: MediaSource The MediaSource object containing a
+ presentation file to be uploaded.
+ title: string The title of the presentation on the server after being
+ uploaded.
+ folder_or_uri: DocumentListEntry or string (optional) An object with a
+ link to a folder or a uri to a folder to upload to.
+ Note: A valid uri for a folder is of the form:
+ /feeds/folders/private/full/folder%3Afolder_id
+
+ Returns:
+ A DocumentListEntry containing information about the presentation created
+ on the Google Documents service.
+ """
+ return self._UploadFile(
+ media_source, title, self._MakeKindCategory(PRESENTATION_LABEL),
+ folder_or_uri=folder_or_uri)
+
+ UploadPresentation = atom.deprecated('Please use Upload instead')(
+ UploadPresentation)
+
+ #@atom.deprecated('Please use Upload instead')
+ def UploadSpreadsheet(self, media_source, title, folder_or_uri=None):
+ """Uploads a spreadsheet inside of a MediaSource object to the Document
+ List feed with the given title.
+
+ This method is deprecated, use Upload instead.
+
+ Args:
+ media_source: MediaSource The MediaSource object containing a spreadsheet
+ file to be uploaded.
+ title: string The title of the spreadsheet on the server after being
+ uploaded.
+ folder_or_uri: DocumentListEntry or string (optional) An object with a
+ link to a folder or a uri to a folder to upload to.
+ Note: A valid uri for a folder is of the form:
+ /feeds/folders/private/full/folder%3Afolder_id
+
+ Returns:
+ A DocumentListEntry containing information about the spreadsheet created
+ on the Google Documents service.
+ """
+ return self._UploadFile(
+ media_source, title, self._MakeKindCategory(SPREADSHEET_LABEL),
+ folder_or_uri=folder_or_uri)
+
+ UploadSpreadsheet = atom.deprecated('Please use Upload instead')(
+ UploadSpreadsheet)
+
+ #@atom.deprecated('Please use Upload instead')
+ def UploadDocument(self, media_source, title, folder_or_uri=None):
+ """Uploads a document inside of a MediaSource object to the Document List
+ feed with the given title.
+
+ This method is deprecated, use Upload instead.
+
+ Args:
+ media_source: MediaSource The gdata.MediaSource object containing a
+ document file to be uploaded.
+ title: string The title of the document on the server after being
+ uploaded.
+ folder_or_uri: DocumentListEntry or string (optional) An object with a
+ link to a folder or a uri to a folder to upload to.
+ Note: A valid uri for a folder is of the form:
+ /feeds/folders/private/full/folder%3Afolder_id
+
+ Returns:
+ A DocumentListEntry containing information about the document created
+ on the Google Documents service.
+ """
+ return self._UploadFile(
+ media_source, title, self._MakeKindCategory(DOCUMENT_LABEL),
+ folder_or_uri=folder_or_uri)
+
+ UploadDocument = atom.deprecated('Please use Upload instead')(
+ UploadDocument)
+
+ """Calling any of these functions is the same as calling Export"""
+ DownloadDocument = atom.deprecated('Please use Export instead')(Export)
+ DownloadPresentation = atom.deprecated('Please use Export instead')(Export)
+ DownloadSpreadsheet = atom.deprecated('Please use Export instead')(Export)
+
+ """Calling any of these functions is the same as calling MoveIntoFolder"""
+ MoveDocumentIntoFolder = atom.deprecated(
+ 'Please use MoveIntoFolder instead')(MoveIntoFolder)
+ MovePresentationIntoFolder = atom.deprecated(
+ 'Please use MoveIntoFolder instead')(MoveIntoFolder)
+ MoveSpreadsheetIntoFolder = atom.deprecated(
+ 'Please use MoveIntoFolder instead')(MoveIntoFolder)
+ MoveFolderIntoFolder = atom.deprecated(
+ 'Please use MoveIntoFolder instead')(MoveIntoFolder)
+
+
+class DocumentQuery(gdata.service.Query):
+
+ """Object used to construct a URI to query the Google Document List feed"""
+
+ def __init__(self, feed='/feeds/documents', visibility='private',
+ projection='full', text_query=None, params=None,
+ categories=None):
+ """Constructor for Document List Query
+
+ Args:
+ feed: string (optional) The path for the feed. (e.g. '/feeds/documents')
+ visibility: string (optional) The visibility chosen for the current feed.
+ projection: string (optional) The projection chosen for the current feed.
+ text_query: string (optional) The contents of the q query parameter. This
+ string is URL escaped upon conversion to a URI.
+ params: dict (optional) Parameter value string pairs which become URL
+ params when translated to a URI. These parameters are added to
+ the query's items.
+ categories: list (optional) List of category strings which should be
+ included as query categories. See gdata.service.Query for
+ additional documentation.
+
+ Yields:
+ A DocumentQuery object used to construct a URI based on the Document
+ List feed.
+ """
+ self.visibility = visibility
+ self.projection = projection
+ gdata.service.Query.__init__(self, feed, text_query, params, categories)
+
+ def ToUri(self):
+ """Generates a URI from the query parameters set in the object.
+
+ Returns:
+ A string containing the URI used to retrieve entries from the Document
+ List feed.
+ """
+ old_feed = self.feed
+ self.feed = '/'.join([old_feed, self.visibility, self.projection])
+ new_feed = gdata.service.Query.ToUri(self)
+ self.feed = old_feed
+ return new_feed
+
+ def AddNamedFolder(self, email, folder_name):
+ """Adds a named folder category, qualified by a schema.
+
+ This function lets you query for documents that are contained inside a
+ named folder without fear of collision with other categories.
+
+ Args:
+ email: string The email of the user who owns the folder.
+ folder_name: string The name of the folder.
+
+ Returns:
+ The string of the category that was added to the object.
+ """
+
+ category = '{%s%s}%s' % (FOLDERS_SCHEME_PREFIX, email, folder_name)
+ self.categories.append(category)
+ return category
+
+ def RemoveNamedFolder(self, email, folder_name):
+ """Removes a named folder category, qualified by a schema.
+
+ Args:
+ email: string The email of the user who owns the folder.
+ folder_name: string The name of the folder.
+
+ Returns:
+ The string of the category that was removed to the object.
+ """
+ category = '{%s%s}%s' % (FOLDERS_SCHEME_PREFIX, email, folder_name)
+ self.categories.remove(category)
+ return category
+
+
+class DocumentAclQuery(gdata.service.Query):
+
+ """Object used to construct a URI to query a Document's ACL feed"""
+
+ def __init__(self, resource_id, feed='/feeds/acl/private/full'):
+ """Constructor for Document ACL Query
+
+ Args:
+ resource_id: string The resource id. (e.g. 'document%3Adocument_id',
+ 'spreadsheet%3Aspreadsheet_id', etc.)
+ feed: string (optional) The path for the feed.
+ (e.g. '/feeds/acl/private/full')
+
+ Yields:
+ A DocumentAclQuery object used to construct a URI based on the Document
+ ACL feed.
+ """
+ self.resource_id = resource_id
+ gdata.service.Query.__init__(self, feed)
+
+ def ToUri(self):
+ """Generates a URI from the query parameters set in the object.
+
+ Returns:
+ A string containing the URI used to retrieve entries from the Document
+ ACL feed.
+ """
+ return '%s/%s' % (gdata.service.Query.ToUri(self), self.resource_id)
diff --git a/python/gdata/dublincore/__init__.py b/python/gdata/dublincore/__init__.py
new file mode 100644
index 0000000..22071f7
--- /dev/null
+++ b/python/gdata/dublincore/__init__.py
@@ -0,0 +1,15 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/python/gdata/dublincore/data.py b/python/gdata/dublincore/data.py
new file mode 100644
index 0000000..c6345c1
--- /dev/null
+++ b/python/gdata/dublincore/data.py
@@ -0,0 +1,78 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains the data classes of the Dublin Core Metadata Initiative (DCMI) Extension"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+
+
+DC_TEMPLATE = '{http://purl.org/dc/terms/}%s'
+
+
+class Creator(atom.core.XmlElement):
+ """Entity primarily responsible for making the resource."""
+ _qname = DC_TEMPLATE % 'creator'
+
+
+class Date(atom.core.XmlElement):
+ """Point or period of time associated with an event in the lifecycle of the resource."""
+ _qname = DC_TEMPLATE % 'date'
+
+
+class Description(atom.core.XmlElement):
+ """Account of the resource."""
+ _qname = DC_TEMPLATE % 'description'
+
+
+class Format(atom.core.XmlElement):
+ """File format, physical medium, or dimensions of the resource."""
+ _qname = DC_TEMPLATE % 'format'
+
+
+class Identifier(atom.core.XmlElement):
+ """An unambiguous reference to the resource within a given context."""
+ _qname = DC_TEMPLATE % 'identifier'
+
+
+class Language(atom.core.XmlElement):
+ """Language of the resource."""
+ _qname = DC_TEMPLATE % 'language'
+
+
+class Publisher(atom.core.XmlElement):
+ """Entity responsible for making the resource available."""
+ _qname = DC_TEMPLATE % 'publisher'
+
+
+class Rights(atom.core.XmlElement):
+ """Information about rights held in and over the resource."""
+ _qname = DC_TEMPLATE % 'rights'
+
+
+class Subject(atom.core.XmlElement):
+ """Topic of the resource."""
+ _qname = DC_TEMPLATE % 'subject'
+
+
+class Title(atom.core.XmlElement):
+ """Name given to the resource."""
+ _qname = DC_TEMPLATE % 'title'
+
+
diff --git a/python/gdata/exif/__init__.py b/python/gdata/exif/__init__.py
new file mode 100644
index 0000000..7f1f9c2
--- /dev/null
+++ b/python/gdata/exif/__init__.py
@@ -0,0 +1,217 @@
+# -*-*- encoding: utf-8 -*-*-
+#
+# This is gdata.photos.exif, implementing the exif namespace in gdata
+#
+# $Id: __init__.py 81 2007-10-03 14:41:42Z havard.gulldahl $
+#
+# Copyright 2007 HÃ¥vard Gulldahl
+# Portions copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module maps elements from the {EXIF} namespace[1] to GData objects.
+These elements describe image data, using exif attributes[2].
+
+Picasa Web Albums uses the exif namespace to represent Exif data encoded
+in a photo [3].
+
+Picasa Web Albums uses the following exif elements:
+exif:distance
+exif:exposure
+exif:flash
+exif:focallength
+exif:fstop
+exif:imageUniqueID
+exif:iso
+exif:make
+exif:model
+exif:tags
+exif:time
+
+[1]: http://schemas.google.com/photos/exif/2007.
+[2]: http://en.wikipedia.org/wiki/Exif
+[3]: http://code.google.com/apis/picasaweb/reference.html#exif_reference
+"""
+
+
+__author__ = u'havard@gulldahl.no'# (HÃ¥vard Gulldahl)' #BUG: pydoc chokes on non-ascii chars in __author__
+__license__ = 'Apache License v2'
+
+
+import atom
+import gdata
+
+EXIF_NAMESPACE = 'http://schemas.google.com/photos/exif/2007'
+
+class ExifBaseElement(atom.AtomBase):
+ """Base class for elements in the EXIF_NAMESPACE (%s). To add new elements, you only need to add the element tag name to self._tag
+ """ % EXIF_NAMESPACE
+
+ _tag = ''
+ _namespace = EXIF_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, name=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.name = name
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+class Distance(ExifBaseElement):
+ "(float) The distance to the subject, e.g. 0.0"
+
+ _tag = 'distance'
+def DistanceFromString(xml_string):
+ return atom.CreateClassFromXMLString(Distance, xml_string)
+
+class Exposure(ExifBaseElement):
+ "(float) The exposure time used, e.g. 0.025 or 8.0E4"
+
+ _tag = 'exposure'
+def ExposureFromString(xml_string):
+ return atom.CreateClassFromXMLString(Exposure, xml_string)
+
+class Flash(ExifBaseElement):
+ """(string) Boolean value indicating whether the flash was used.
+ The .text attribute will either be `true' or `false'
+
+ As a convenience, this object's .bool method will return what you want,
+ so you can say:
+
+ flash_used = bool(Flash)
+
+ """
+
+ _tag = 'flash'
+ def __bool__(self):
+ if self.text.lower() in ('true','false'):
+ return self.text.lower() == 'true'
+def FlashFromString(xml_string):
+ return atom.CreateClassFromXMLString(Flash, xml_string)
+
+class Focallength(ExifBaseElement):
+ "(float) The focal length used, e.g. 23.7"
+
+ _tag = 'focallength'
+def FocallengthFromString(xml_string):
+ return atom.CreateClassFromXMLString(Focallength, xml_string)
+
+class Fstop(ExifBaseElement):
+ "(float) The fstop value used, e.g. 5.0"
+
+ _tag = 'fstop'
+def FstopFromString(xml_string):
+ return atom.CreateClassFromXMLString(Fstop, xml_string)
+
+class ImageUniqueID(ExifBaseElement):
+ "(string) The unique image ID for the photo. Generated by Google Photo servers"
+
+ _tag = 'imageUniqueID'
+def ImageUniqueIDFromString(xml_string):
+ return atom.CreateClassFromXMLString(ImageUniqueID, xml_string)
+
+class Iso(ExifBaseElement):
+ "(int) The iso equivalent value used, e.g. 200"
+
+ _tag = 'iso'
+def IsoFromString(xml_string):
+ return atom.CreateClassFromXMLString(Iso, xml_string)
+
+class Make(ExifBaseElement):
+ "(string) The make of the camera used, e.g. Fictitious Camera Company"
+
+ _tag = 'make'
+def MakeFromString(xml_string):
+ return atom.CreateClassFromXMLString(Make, xml_string)
+
+class Model(ExifBaseElement):
+ "(string) The model of the camera used,e.g AMAZING-100D"
+
+ _tag = 'model'
+def ModelFromString(xml_string):
+ return atom.CreateClassFromXMLString(Model, xml_string)
+
+class Time(ExifBaseElement):
+ """(int) The date/time the photo was taken, e.g. 1180294337000.
+ Represented as the number of milliseconds since January 1st, 1970.
+
+ The value of this element will always be identical to the value
+ of the .
+
+ Look at this object's .isoformat() for a human friendly datetime string:
+
+ photo_epoch = Time.text # 1180294337000
+ photo_isostring = Time.isoformat() # '2007-05-27T19:32:17.000Z'
+
+ Alternatively:
+ photo_datetime = Time.datetime() # (requires python >= 2.3)
+ """
+
+ _tag = 'time'
+ def isoformat(self):
+ """(string) Return the timestamp as a ISO 8601 formatted string,
+ e.g. '2007-05-27T19:32:17.000Z'
+ """
+ import time
+ epoch = float(self.text)/1000
+ return time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime(epoch))
+
+ def datetime(self):
+ """(datetime.datetime) Return the timestamp as a datetime.datetime object
+
+ Requires python 2.3
+ """
+ import datetime
+ epoch = float(self.text)/1000
+ return datetime.datetime.fromtimestamp(epoch)
+
+def TimeFromString(xml_string):
+ return atom.CreateClassFromXMLString(Time, xml_string)
+
+class Tags(ExifBaseElement):
+ """The container for all exif elements.
+ The element can appear as a child of a photo entry.
+ """
+
+ _tag = 'tags'
+ _children = atom.AtomBase._children.copy()
+ _children['{%s}fstop' % EXIF_NAMESPACE] = ('fstop', Fstop)
+ _children['{%s}make' % EXIF_NAMESPACE] = ('make', Make)
+ _children['{%s}model' % EXIF_NAMESPACE] = ('model', Model)
+ _children['{%s}distance' % EXIF_NAMESPACE] = ('distance', Distance)
+ _children['{%s}exposure' % EXIF_NAMESPACE] = ('exposure', Exposure)
+ _children['{%s}flash' % EXIF_NAMESPACE] = ('flash', Flash)
+ _children['{%s}focallength' % EXIF_NAMESPACE] = ('focallength', Focallength)
+ _children['{%s}iso' % EXIF_NAMESPACE] = ('iso', Iso)
+ _children['{%s}time' % EXIF_NAMESPACE] = ('time', Time)
+ _children['{%s}imageUniqueID' % EXIF_NAMESPACE] = ('imageUniqueID', ImageUniqueID)
+
+ def __init__(self, extension_elements=None, extension_attributes=None, text=None):
+ ExifBaseElement.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+ self.fstop=None
+ self.make=None
+ self.model=None
+ self.distance=None
+ self.exposure=None
+ self.flash=None
+ self.focallength=None
+ self.iso=None
+ self.time=None
+ self.imageUniqueID=None
+def TagsFromString(xml_string):
+ return atom.CreateClassFromXMLString(Tags, xml_string)
+
diff --git a/python/gdata/finance/__init__.py b/python/gdata/finance/__init__.py
new file mode 100644
index 0000000..28ab898
--- /dev/null
+++ b/python/gdata/finance/__init__.py
@@ -0,0 +1,486 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Tan Swee Heng
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains extensions to Atom objects used with Google Finance."""
+
+
+__author__ = 'thesweeheng@gmail.com'
+
+
+import atom
+import gdata
+
+
+GD_NAMESPACE = 'http://schemas.google.com/g/2005'
+GF_NAMESPACE = 'http://schemas.google.com/finance/2007'
+
+
+class Money(atom.AtomBase):
+ """The element."""
+ _tag = 'money'
+ _namespace = GD_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['amount'] = 'amount'
+ _attributes['currencyCode'] = 'currency_code'
+
+ def __init__(self, amount=None, currency_code=None, **kwargs):
+ self.amount = amount
+ self.currency_code = currency_code
+ atom.AtomBase.__init__(self, **kwargs)
+
+ def __str__(self):
+ return "%s %s" % (self.amount, self.currency_code)
+
+
+def MoneyFromString(xml_string):
+ return atom.CreateClassFromXMLString(Money, xml_string)
+
+
+class _Monies(atom.AtomBase):
+ """An element containing multiple in multiple currencies."""
+ _namespace = GF_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _children['{%s}money' % GD_NAMESPACE] = ('money', [Money])
+
+ def __init__(self, money=None, **kwargs):
+ self.money = money or []
+ atom.AtomBase.__init__(self, **kwargs)
+
+ def __str__(self):
+ return " / ".join(["%s" % i for i in self.money])
+
+
+class CostBasis(_Monies):
+ """The element."""
+ _tag = 'costBasis'
+
+
+def CostBasisFromString(xml_string):
+ return atom.CreateClassFromXMLString(CostBasis, xml_string)
+
+
+class DaysGain(_Monies):
+ """The element."""
+ _tag = 'daysGain'
+
+
+def DaysGainFromString(xml_string):
+ return atom.CreateClassFromXMLString(DaysGain, xml_string)
+
+
+class Gain(_Monies):
+ """The element."""
+ _tag = 'gain'
+
+
+def GainFromString(xml_string):
+ return atom.CreateClassFromXMLString(Gain, xml_string)
+
+
+class MarketValue(_Monies):
+ """The element."""
+ _tag = 'gain'
+ _tag = 'marketValue'
+
+
+def MarketValueFromString(xml_string):
+ return atom.CreateClassFromXMLString(MarketValue, xml_string)
+
+
+class Commission(_Monies):
+ """The element."""
+ _tag = 'commission'
+
+
+def CommissionFromString(xml_string):
+ return atom.CreateClassFromXMLString(Commission, xml_string)
+
+
+class Price(_Monies):
+ """The element."""
+ _tag = 'price'
+
+
+def PriceFromString(xml_string):
+ return atom.CreateClassFromXMLString(Price, xml_string)
+
+
+class Symbol(atom.AtomBase):
+ """The element."""
+ _tag = 'symbol'
+ _namespace = GF_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['fullName'] = 'full_name'
+ _attributes['exchange'] = 'exchange'
+ _attributes['symbol'] = 'symbol'
+
+ def __init__(self, full_name=None, exchange=None, symbol=None, **kwargs):
+ self.full_name = full_name
+ self.exchange = exchange
+ self.symbol = symbol
+ atom.AtomBase.__init__(self, **kwargs)
+
+ def __str__(self):
+ return "%s:%s (%s)" % (self.exchange, self.symbol, self.full_name)
+
+
+def SymbolFromString(xml_string):
+ return atom.CreateClassFromXMLString(Symbol, xml_string)
+
+
+class TransactionData(atom.AtomBase):
+ """The element."""
+ _tag = 'transactionData'
+ _namespace = GF_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['type'] = 'type'
+ _attributes['date'] = 'date'
+ _attributes['shares'] = 'shares'
+ _attributes['notes'] = 'notes'
+ _children = atom.AtomBase._children.copy()
+ _children['{%s}commission' % GF_NAMESPACE] = ('commission', Commission)
+ _children['{%s}price' % GF_NAMESPACE] = ('price', Price)
+
+ def __init__(self, type=None, date=None, shares=None,
+ notes=None, commission=None, price=None, **kwargs):
+ self.type = type
+ self.date = date
+ self.shares = shares
+ self.notes = notes
+ self.commission = commission
+ self.price = price
+ atom.AtomBase.__init__(self, **kwargs)
+
+
+def TransactionDataFromString(xml_string):
+ return atom.CreateClassFromXMLString(TransactionData, xml_string)
+
+
+class TransactionEntry(gdata.GDataEntry):
+ """An entry of the transaction feed.
+
+ A TransactionEntry contains TransactionData such as the transaction
+ type (Buy, Sell, Sell Short, or Buy to Cover), the number of units,
+ the date, the price, any commission, and any notes.
+ """
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _children['{%s}transactionData' % GF_NAMESPACE] = (
+ 'transaction_data', TransactionData)
+
+ def __init__(self, transaction_data=None, **kwargs):
+ self.transaction_data = transaction_data
+ gdata.GDataEntry.__init__(self, **kwargs)
+
+ def transaction_id(self):
+ return self.id.text.split("/")[-1]
+
+ transaction_id = property(transaction_id, doc='The transaction ID.')
+
+
+def TransactionEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(TransactionEntry, xml_string)
+
+
+class TransactionFeed(gdata.GDataFeed):
+ """A feed that lists all of the transactions that have been recorded for
+ a particular position.
+
+ A transaction is a collection of information about an instance of
+ buying or selling a particular security. The TransactionFeed lists all
+ of the transactions that have been recorded for a particular position
+ as a list of TransactionEntries.
+ """
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [TransactionEntry])
+
+
+def TransactionFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(TransactionFeed, xml_string)
+
+
+class TransactionFeedLink(atom.AtomBase):
+ """Link to TransactionFeed embedded in PositionEntry.
+
+ If a PositionFeed is queried with transactions='true', TransactionFeeds
+ are inlined in the returned PositionEntries. These TransactionFeeds are
+ accessible via TransactionFeedLink's feed attribute.
+ """
+ _tag = 'feedLink'
+ _namespace = GD_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['href'] = 'href'
+ _children = atom.AtomBase._children.copy()
+ _children['{%s}feed' % atom.ATOM_NAMESPACE] = (
+ 'feed', TransactionFeed)
+
+ def __init__(self, href=None, feed=None, **kwargs):
+ self.href = href
+ self.feed = feed
+ atom.AtomBase.__init__(self, **kwargs)
+
+
+class PositionData(atom.AtomBase):
+ """The element."""
+ _tag = 'positionData'
+ _namespace = GF_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['gainPercentage'] = 'gain_percentage'
+ _attributes['return1w'] = 'return1w'
+ _attributes['return4w'] = 'return4w'
+ _attributes['return3m'] = 'return3m'
+ _attributes['returnYTD'] = 'returnYTD'
+ _attributes['return1y'] = 'return1y'
+ _attributes['return3y'] = 'return3y'
+ _attributes['return5y'] = 'return5y'
+ _attributes['returnOverall'] = 'return_overall'
+ _attributes['shares'] = 'shares'
+ _children = atom.AtomBase._children.copy()
+ _children['{%s}costBasis' % GF_NAMESPACE] = ('cost_basis', CostBasis)
+ _children['{%s}daysGain' % GF_NAMESPACE] = ('days_gain', DaysGain)
+ _children['{%s}gain' % GF_NAMESPACE] = ('gain', Gain)
+ _children['{%s}marketValue' % GF_NAMESPACE] = ('market_value', MarketValue)
+
+ def __init__(self, gain_percentage=None,
+ return1w=None, return4w=None, return3m=None, returnYTD=None,
+ return1y=None, return3y=None, return5y=None, return_overall=None,
+ shares=None, cost_basis=None, days_gain=None,
+ gain=None, market_value=None, **kwargs):
+ self.gain_percentage = gain_percentage
+ self.return1w = return1w
+ self.return4w = return4w
+ self.return3m = return3m
+ self.returnYTD = returnYTD
+ self.return1y = return1y
+ self.return3y = return3y
+ self.return5y = return5y
+ self.return_overall = return_overall
+ self.shares = shares
+ self.cost_basis = cost_basis
+ self.days_gain = days_gain
+ self.gain = gain
+ self.market_value = market_value
+ atom.AtomBase.__init__(self, **kwargs)
+
+
+def PositionDataFromString(xml_string):
+ return atom.CreateClassFromXMLString(PositionData, xml_string)
+
+
+class PositionEntry(gdata.GDataEntry):
+ """An entry of the position feed.
+
+ A PositionEntry contains the ticker exchange and Symbol for a stock,
+ mutual fund, or other security, along with PositionData such as the
+ number of units of that security that the user holds, and performance
+ statistics.
+ """
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _children['{%s}positionData' % GF_NAMESPACE] = (
+ 'position_data', PositionData)
+ _children['{%s}symbol' % GF_NAMESPACE] = ('symbol', Symbol)
+ _children['{%s}feedLink' % GD_NAMESPACE] = (
+ 'feed_link', TransactionFeedLink)
+
+ def __init__(self, position_data=None, symbol=None, feed_link=None,
+ **kwargs):
+ self.position_data = position_data
+ self.symbol = symbol
+ self.feed_link = feed_link
+ gdata.GDataEntry.__init__(self, **kwargs)
+
+ def position_title(self):
+ return self.title.text
+
+ position_title = property(position_title,
+ doc='The position title as a string (i.e. position.title.text).')
+
+ def ticker_id(self):
+ return self.id.text.split("/")[-1]
+
+ ticker_id = property(ticker_id, doc='The position TICKER ID.')
+
+ def transactions(self):
+ if self.feed_link.feed:
+ return self.feed_link.feed.entry
+ else:
+ return None
+
+ transactions = property(transactions, doc="""
+ Inlined TransactionEntries are returned if PositionFeed is queried
+ with transactions='true'.""")
+
+
+def PositionEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(PositionEntry, xml_string)
+
+
+class PositionFeed(gdata.GDataFeed):
+ """A feed that lists all of the positions in a particular portfolio.
+
+ A position is a collection of information about a security that the
+ user holds. The PositionFeed lists all of the positions in a particular
+ portfolio as a list of PositionEntries.
+ """
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [PositionEntry])
+
+
+def PositionFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(PositionFeed, xml_string)
+
+
+class PositionFeedLink(atom.AtomBase):
+ """Link to PositionFeed embedded in PortfolioEntry.
+
+ If a PortfolioFeed is queried with positions='true', the PositionFeeds
+ are inlined in the returned PortfolioEntries. These PositionFeeds are
+ accessible via PositionFeedLink's feed attribute.
+ """
+ _tag = 'feedLink'
+ _namespace = GD_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['href'] = 'href'
+ _children = atom.AtomBase._children.copy()
+ _children['{%s}feed' % atom.ATOM_NAMESPACE] = (
+ 'feed', PositionFeed)
+
+ def __init__(self, href=None, feed=None, **kwargs):
+ self.href = href
+ self.feed = feed
+ atom.AtomBase.__init__(self, **kwargs)
+
+
+class PortfolioData(atom.AtomBase):
+ """The element."""
+ _tag = 'portfolioData'
+ _namespace = GF_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['currencyCode'] = 'currency_code'
+ _attributes['gainPercentage'] = 'gain_percentage'
+ _attributes['return1w'] = 'return1w'
+ _attributes['return4w'] = 'return4w'
+ _attributes['return3m'] = 'return3m'
+ _attributes['returnYTD'] = 'returnYTD'
+ _attributes['return1y'] = 'return1y'
+ _attributes['return3y'] = 'return3y'
+ _attributes['return5y'] = 'return5y'
+ _attributes['returnOverall'] = 'return_overall'
+ _children = atom.AtomBase._children.copy()
+ _children['{%s}costBasis' % GF_NAMESPACE] = ('cost_basis', CostBasis)
+ _children['{%s}daysGain' % GF_NAMESPACE] = ('days_gain', DaysGain)
+ _children['{%s}gain' % GF_NAMESPACE] = ('gain', Gain)
+ _children['{%s}marketValue' % GF_NAMESPACE] = ('market_value', MarketValue)
+
+ def __init__(self, currency_code=None, gain_percentage=None,
+ return1w=None, return4w=None, return3m=None, returnYTD=None,
+ return1y=None, return3y=None, return5y=None, return_overall=None,
+ cost_basis=None, days_gain=None, gain=None, market_value=None, **kwargs):
+ self.currency_code = currency_code
+ self.gain_percentage = gain_percentage
+ self.return1w = return1w
+ self.return4w = return4w
+ self.return3m = return3m
+ self.returnYTD = returnYTD
+ self.return1y = return1y
+ self.return3y = return3y
+ self.return5y = return5y
+ self.return_overall = return_overall
+ self.cost_basis = cost_basis
+ self.days_gain = days_gain
+ self.gain = gain
+ self.market_value = market_value
+ atom.AtomBase.__init__(self, **kwargs)
+
+
+def PortfolioDataFromString(xml_string):
+ return atom.CreateClassFromXMLString(PortfolioData, xml_string)
+
+
+class PortfolioEntry(gdata.GDataEntry):
+ """An entry of the PortfolioFeed.
+
+ A PortfolioEntry contains the portfolio's title along with PortfolioData
+ such as currency, total market value, and overall performance statistics.
+ """
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _children['{%s}portfolioData' % GF_NAMESPACE] = (
+ 'portfolio_data', PortfolioData)
+ _children['{%s}feedLink' % GD_NAMESPACE] = (
+ 'feed_link', PositionFeedLink)
+
+ def __init__(self, portfolio_data=None, feed_link=None, **kwargs):
+ self.portfolio_data = portfolio_data
+ self.feed_link = feed_link
+ gdata.GDataEntry.__init__(self, **kwargs)
+
+ def portfolio_title(self):
+ return self.title.text
+
+ def set_portfolio_title(self, portfolio_title):
+ self.title = atom.Title(text=portfolio_title, title_type='text')
+
+ portfolio_title = property(portfolio_title, set_portfolio_title,
+ doc='The portfolio title as a string (i.e. portfolio.title.text).')
+
+ def portfolio_id(self):
+ return self.id.text.split("/")[-1]
+
+ portfolio_id = property(portfolio_id,
+ doc='The portfolio ID. Do not confuse with portfolio.id.')
+
+ def positions(self):
+ if self.feed_link.feed:
+ return self.feed_link.feed.entry
+ else:
+ return None
+
+ positions = property(positions, doc="""
+ Inlined PositionEntries are returned if PortfolioFeed was queried
+ with positions='true'.""")
+
+
+def PortfolioEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(PortfolioEntry, xml_string)
+
+
+class PortfolioFeed(gdata.GDataFeed):
+ """A feed that lists all of the user's portfolios.
+
+ A portfolio is a collection of positions that the user holds in various
+ securities, plus metadata. The PortfolioFeed lists all of the user's
+ portfolios as a list of PortfolioEntries.
+ """
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [PortfolioEntry])
+
+
+def PortfolioFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(PortfolioFeed, xml_string)
+
+
diff --git a/python/gdata/finance/data.py b/python/gdata/finance/data.py
new file mode 100644
index 0000000..5e0caa8
--- /dev/null
+++ b/python/gdata/finance/data.py
@@ -0,0 +1,156 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains the data classes of the Google Finance Portfolio Data API"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+import atom.data
+import gdata.data
+import gdata.opensearch.data
+
+
+GF_TEMPLATE = '{http://schemas.google.com/finance/2007/}%s'
+
+
+class Commission(atom.core.XmlElement):
+ """Commission for the transaction"""
+ _qname = GF_TEMPLATE % 'commission'
+ money = [gdata.data.Money]
+
+
+class CostBasis(atom.core.XmlElement):
+ """Cost basis for the portfolio or position"""
+ _qname = GF_TEMPLATE % 'costBasis'
+ money = [gdata.data.Money]
+
+
+class DaysGain(atom.core.XmlElement):
+ """Today's gain for the portfolio or position"""
+ _qname = GF_TEMPLATE % 'daysGain'
+ money = [gdata.data.Money]
+
+
+class Gain(atom.core.XmlElement):
+ """Total gain for the portfolio or position"""
+ _qname = GF_TEMPLATE % 'gain'
+ money = [gdata.data.Money]
+
+
+class MarketValue(atom.core.XmlElement):
+ """Market value for the portfolio or position"""
+ _qname = GF_TEMPLATE % 'marketValue'
+ money = [gdata.data.Money]
+
+
+class PortfolioData(atom.core.XmlElement):
+ """Data for the portfolio"""
+ _qname = GF_TEMPLATE % 'portfolioData'
+ return_overall = 'returnOverall'
+ currency_code = 'currencyCode'
+ return3y = 'return3y'
+ return4w = 'return4w'
+ market_value = MarketValue
+ return_y_t_d = 'returnYTD'
+ cost_basis = CostBasis
+ gain_percentage = 'gainPercentage'
+ days_gain = DaysGain
+ return3m = 'return3m'
+ return5y = 'return5y'
+ return1w = 'return1w'
+ gain = Gain
+ return1y = 'return1y'
+
+
+class PortfolioEntry(gdata.data.GDEntry):
+ """Describes an entry in a feed of Finance portfolios"""
+ portfolio_data = PortfolioData
+
+
+class PortfolioFeed(gdata.data.GDFeed):
+ """Describes a Finance portfolio feed"""
+ entry = [PortfolioEntry]
+
+
+class PositionData(atom.core.XmlElement):
+ """Data for the position"""
+ _qname = GF_TEMPLATE % 'positionData'
+ return_y_t_d = 'returnYTD'
+ return5y = 'return5y'
+ return_overall = 'returnOverall'
+ cost_basis = CostBasis
+ return3y = 'return3y'
+ return1y = 'return1y'
+ return4w = 'return4w'
+ shares = 'shares'
+ days_gain = DaysGain
+ gain_percentage = 'gainPercentage'
+ market_value = MarketValue
+ gain = Gain
+ return3m = 'return3m'
+ return1w = 'return1w'
+
+
+class Price(atom.core.XmlElement):
+ """Price of the transaction"""
+ _qname = GF_TEMPLATE % 'price'
+ money = [gdata.data.Money]
+
+
+class Symbol(atom.core.XmlElement):
+ """Stock symbol for the company"""
+ _qname = GF_TEMPLATE % 'symbol'
+ symbol = 'symbol'
+ exchange = 'exchange'
+ full_name = 'fullName'
+
+
+class PositionEntry(gdata.data.GDEntry):
+ """Describes an entry in a feed of Finance positions"""
+ symbol = Symbol
+ position_data = PositionData
+
+
+class PositionFeed(gdata.data.GDFeed):
+ """Describes a Finance position feed"""
+ entry = [PositionEntry]
+
+
+class TransactionData(atom.core.XmlElement):
+ """Data for the transction"""
+ _qname = GF_TEMPLATE % 'transactionData'
+ shares = 'shares'
+ notes = 'notes'
+ date = 'date'
+ type = 'type'
+ commission = Commission
+ price = Price
+
+
+class TransactionEntry(gdata.data.GDEntry):
+ """Describes an entry in a feed of Finance transactions"""
+ transaction_data = TransactionData
+
+
+class TransactionFeed(gdata.data.GDFeed):
+ """Describes a Finance transaction feed"""
+ entry = [TransactionEntry]
+
+
diff --git a/python/gdata/finance/service.py b/python/gdata/finance/service.py
new file mode 100644
index 0000000..6e3eb86
--- /dev/null
+++ b/python/gdata/finance/service.py
@@ -0,0 +1,243 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Tan Swee Heng
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Classes to interact with the Google Finance server."""
+
+
+__author__ = 'thesweeheng@gmail.com'
+
+
+import gdata.service
+import gdata.finance
+import atom
+
+
+class PortfolioQuery(gdata.service.Query):
+ """A query object for the list of a user's portfolios."""
+
+ def returns(self):
+ return self.get('returns', False)
+
+ def set_returns(self, value):
+ if value is 'true' or value is True:
+ self['returns'] = 'true'
+
+ returns = property(returns, set_returns, doc="The returns query parameter")
+
+ def positions(self):
+ return self.get('positions', False)
+
+ def set_positions(self, value):
+ if value is 'true' or value is True:
+ self['positions'] = 'true'
+
+ positions = property(positions, set_positions,
+ doc="The positions query parameter")
+
+
+class PositionQuery(gdata.service.Query):
+ """A query object for the list of a user's positions in a portfolio."""
+
+ def returns(self):
+ return self.get('returns', False)
+
+ def set_returns(self, value):
+ if value is 'true' or value is True:
+ self['returns'] = 'true'
+
+ returns = property(returns, set_returns,
+ doc="The returns query parameter")
+
+ def transactions(self):
+ return self.get('transactions', False)
+
+ def set_transactions(self, value):
+ if value is 'true' or value is True:
+ self['transactions'] = 'true'
+
+ transactions = property(transactions, set_transactions,
+ doc="The transactions query parameter")
+
+
+class FinanceService(gdata.service.GDataService):
+
+ def __init__(self, email=None, password=None, source=None,
+ server='finance.google.com', **kwargs):
+ """Creates a client for the Finance service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'finance.google.com'.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+ gdata.service.GDataService.__init__(self,
+ email=email, password=password, service='finance', server=server,
+ **kwargs)
+
+ def GetPortfolioFeed(self, query=None):
+ uri = '/finance/feeds/default/portfolios'
+ if query:
+ uri = PortfolioQuery(feed=uri, params=query).ToUri()
+ return self.Get(uri, converter=gdata.finance.PortfolioFeedFromString)
+
+ def GetPositionFeed(self, portfolio_entry=None, portfolio_id=None,
+ query=None):
+ """
+ Args:
+ portfolio_entry: PortfolioEntry (optional; see Notes)
+ portfolio_id: string (optional; see Notes) This may be obtained
+ from a PortfolioEntry's portfolio_id attribute.
+ query: PortfolioQuery (optional)
+
+ Notes:
+ Either a PortfolioEntry OR a portfolio ID must be provided.
+ """
+ if portfolio_entry:
+ uri = portfolio_entry.GetSelfLink().href + '/positions'
+ elif portfolio_id:
+ uri = '/finance/feeds/default/portfolios/%s/positions' % portfolio_id
+ if query:
+ uri = PositionQuery(feed=uri, params=query).ToUri()
+ return self.Get(uri, converter=gdata.finance.PositionFeedFromString)
+
+ def GetTransactionFeed(self, position_entry=None,
+ portfolio_id=None, ticker_id=None):
+ """
+ Args:
+ position_entry: PositionEntry (optional; see Notes)
+ portfolio_id: string (optional; see Notes) This may be obtained
+ from a PortfolioEntry's portfolio_id attribute.
+ ticker_id: string (optional; see Notes) This may be obtained from
+ a PositionEntry's ticker_id attribute. Alternatively it can
+ be constructed using the security's exchange and symbol,
+ e.g. 'NASDAQ:GOOG'
+
+ Notes:
+ Either a PositionEntry OR (a portfolio ID AND ticker ID) must
+ be provided.
+ """
+ if position_entry:
+ uri = position_entry.GetSelfLink().href + '/transactions'
+ elif portfolio_id and ticker_id:
+ uri = '/finance/feeds/default/portfolios/%s/positions/%s/transactions' \
+ % (portfolio_id, ticker_id)
+ return self.Get(uri, converter=gdata.finance.TransactionFeedFromString)
+
+ def GetPortfolio(self, portfolio_id=None, query=None):
+ uri = '/finance/feeds/default/portfolios/%s' % portfolio_id
+ if query:
+ uri = PortfolioQuery(feed=uri, params=query).ToUri()
+ return self.Get(uri, converter=gdata.finance.PortfolioEntryFromString)
+
+ def AddPortfolio(self, portfolio_entry=None):
+ uri = '/finance/feeds/default/portfolios'
+ return self.Post(portfolio_entry, uri,
+ converter=gdata.finance.PortfolioEntryFromString)
+
+ def UpdatePortfolio(self, portfolio_entry=None):
+ uri = portfolio_entry.GetEditLink().href
+ return self.Put(portfolio_entry, uri,
+ converter=gdata.finance.PortfolioEntryFromString)
+
+ def DeletePortfolio(self, portfolio_entry=None):
+ uri = portfolio_entry.GetEditLink().href
+ return self.Delete(uri)
+
+ def GetPosition(self, portfolio_id=None, ticker_id=None, query=None):
+ uri = '/finance/feeds/default/portfolios/%s/positions/%s' \
+ % (portfolio_id, ticker_id)
+ if query:
+ uri = PositionQuery(feed=uri, params=query).ToUri()
+ return self.Get(uri, converter=gdata.finance.PositionEntryFromString)
+
+ def DeletePosition(self, position_entry=None,
+ portfolio_id=None, ticker_id=None, transaction_feed=None):
+ """A position is deleted by deleting all its transactions.
+
+ Args:
+ position_entry: PositionEntry (optional; see Notes)
+ portfolio_id: string (optional; see Notes) This may be obtained
+ from a PortfolioEntry's portfolio_id attribute.
+ ticker_id: string (optional; see Notes) This may be obtained from
+ a PositionEntry's ticker_id attribute. Alternatively it can
+ be constructed using the security's exchange and symbol,
+ e.g. 'NASDAQ:GOOG'
+ transaction_feed: TransactionFeed (optional; see Notes)
+
+ Notes:
+ Either a PositionEntry OR (a portfolio ID AND ticker ID) OR
+ a TransactionFeed must be provided.
+ """
+ if transaction_feed:
+ feed = transaction_feed
+ else:
+ if position_entry:
+ feed = self.GetTransactionFeed(position_entry=position_entry)
+ elif portfolio_id and ticker_id:
+ feed = self.GetTransactionFeed(
+ portfolio_id=portfolio_id, ticker_id=ticker_id)
+ for txn in feed.entry:
+ self.DeleteTransaction(txn)
+ return True
+
+ def GetTransaction(self, portfolio_id=None, ticker_id=None,
+ transaction_id=None):
+ uri = '/finance/feeds/default/portfolios/%s/positions/%s/transactions/%s' \
+ % (portfolio_id, ticker_id, transaction_id)
+ return self.Get(uri, converter=gdata.finance.TransactionEntryFromString)
+
+ def AddTransaction(self, transaction_entry=None, transaction_feed = None,
+ position_entry=None, portfolio_id=None, ticker_id=None):
+ """
+ Args:
+ transaction_entry: TransactionEntry (required)
+ transaction_feed: TransactionFeed (optional; see Notes)
+ position_entry: PositionEntry (optional; see Notes)
+ portfolio_id: string (optional; see Notes) This may be obtained
+ from a PortfolioEntry's portfolio_id attribute.
+ ticker_id: string (optional; see Notes) This may be obtained from
+ a PositionEntry's ticker_id attribute. Alternatively it can
+ be constructed using the security's exchange and symbol,
+ e.g. 'NASDAQ:GOOG'
+
+ Notes:
+ Either a TransactionFeed OR a PositionEntry OR (a portfolio ID AND
+ ticker ID) must be provided.
+ """
+ if transaction_feed:
+ uri = transaction_feed.GetPostLink().href
+ elif position_entry:
+ uri = position_entry.GetSelfLink().href + '/transactions'
+ elif portfolio_id and ticker_id:
+ uri = '/finance/feeds/default/portfolios/%s/positions/%s/transactions' \
+ % (portfolio_id, ticker_id)
+ return self.Post(transaction_entry, uri,
+ converter=gdata.finance.TransactionEntryFromString)
+
+ def UpdateTransaction(self, transaction_entry=None):
+ uri = transaction_entry.GetEditLink().href
+ return self.Put(transaction_entry, uri,
+ converter=gdata.finance.TransactionEntryFromString)
+
+ def DeleteTransaction(self, transaction_entry=None):
+ uri = transaction_entry.GetEditLink().href
+ return self.Delete(uri)
diff --git a/python/gdata/gauth.py b/python/gdata/gauth.py
new file mode 100644
index 0000000..563656c
--- /dev/null
+++ b/python/gdata/gauth.py
@@ -0,0 +1,1306 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This module is used for version 2 of the Google Data APIs.
+
+
+"""Provides auth related token classes and functions for Google Data APIs.
+
+Token classes represent a user's authorization of this app to access their
+data. Usually these are not created directly but by a GDClient object.
+
+ClientLoginToken
+AuthSubToken
+SecureAuthSubToken
+OAuthHmacToken
+OAuthRsaToken
+TwoLeggedOAuthHmacToken
+TwoLeggedOAuthRsaToken
+
+Functions which are often used in application code (as opposed to just within
+the gdata-python-client library) are the following:
+
+generate_auth_sub_url
+authorize_request_token
+
+The following are helper functions which are used to save and load auth token
+objects in the App Engine datastore. These should only be used if you are using
+this library within App Engine:
+
+ae_load
+ae_save
+"""
+
+
+import time
+import random
+import urllib
+import atom.http_core
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+PROGRAMMATIC_AUTH_LABEL = 'GoogleLogin auth='
+AUTHSUB_AUTH_LABEL = 'AuthSub token='
+
+
+# This dict provides the AuthSub and OAuth scopes for all services by service
+# name. The service name (key) is used in ClientLogin requests.
+AUTH_SCOPES = {
+ 'cl': ( # Google Calendar API
+ 'https://www.google.com/calendar/feeds/',
+ 'http://www.google.com/calendar/feeds/'),
+ 'gbase': ( # Google Base API
+ 'http://base.google.com/base/feeds/',
+ 'http://www.google.com/base/feeds/'),
+ 'blogger': ( # Blogger API
+ 'http://www.blogger.com/feeds/',),
+ 'codesearch': ( # Google Code Search API
+ 'http://www.google.com/codesearch/feeds/',),
+ 'cp': ( # Contacts API
+ 'https://www.google.com/m8/feeds/',
+ 'http://www.google.com/m8/feeds/'),
+ 'finance': ( # Google Finance API
+ 'http://finance.google.com/finance/feeds/',),
+ 'health': ( # Google Health API
+ 'https://www.google.com/health/feeds/',),
+ 'writely': ( # Documents List API
+ 'https://docs.google.com/feeds/',
+ 'http://docs.google.com/feeds/'),
+ 'lh2': ( # Picasa Web Albums API
+ 'http://picasaweb.google.com/data/',),
+ 'apps': ( # Google Apps Provisioning API
+ 'http://www.google.com/a/feeds/',
+ 'https://www.google.com/a/feeds/',
+ 'http://apps-apis.google.com/a/feeds/',
+ 'https://apps-apis.google.com/a/feeds/'),
+ 'weaver': ( # Health H9 Sandbox
+ 'https://www.google.com/h9/feeds/',),
+ 'wise': ( # Spreadsheets Data API
+ 'https://spreadsheets.google.com/feeds/',
+ 'http://spreadsheets.google.com/feeds/'),
+ 'sitemaps': ( # Google Webmaster Tools API
+ 'https://www.google.com/webmasters/tools/feeds/',),
+ 'youtube': ( # YouTube API
+ 'http://gdata.youtube.com/feeds/api/',
+ 'http://uploads.gdata.youtube.com/feeds/api',
+ 'http://gdata.youtube.com/action/GetUploadToken'),
+ 'books': ( # Google Books API
+ 'http://www.google.com/books/feeds/',),
+ 'analytics': ( # Google Analytics API
+ 'https://www.google.com/analytics/feeds/',),
+ 'jotspot': ( # Google Sites API
+ 'http://sites.google.com/feeds/',
+ 'https://sites.google.com/feeds/'),
+ 'local': ( # Google Maps Data API
+ 'http://maps.google.com/maps/feeds/',),
+ 'code': ( # Project Hosting Data API
+ 'http://code.google.com/feeds/issues',)}
+
+
+
+class Error(Exception):
+ pass
+
+
+class UnsupportedTokenType(Error):
+ """Raised when token to or from blob is unable to convert the token."""
+ pass
+
+
+# ClientLogin functions and classes.
+def generate_client_login_request_body(email, password, service, source,
+ account_type='HOSTED_OR_GOOGLE', captcha_token=None,
+ captcha_response=None):
+ """Creates the body of the autentication request
+
+ See http://code.google.com/apis/accounts/AuthForInstalledApps.html#Request
+ for more details.
+
+ Args:
+ email: str
+ password: str
+ service: str
+ source: str
+ account_type: str (optional) Defaul is 'HOSTED_OR_GOOGLE', other valid
+ values are 'GOOGLE' and 'HOSTED'
+ captcha_token: str (optional)
+ captcha_response: str (optional)
+
+ Returns:
+ The HTTP body to send in a request for a client login token.
+ """
+ # Create a POST body containing the user's credentials.
+ request_fields = {'Email': email,
+ 'Passwd': password,
+ 'accountType': account_type,
+ 'service': service,
+ 'source': source}
+ if captcha_token and captcha_response:
+ # Send the captcha token and response as part of the POST body if the
+ # user is responding to a captch challenge.
+ request_fields['logintoken'] = captcha_token
+ request_fields['logincaptcha'] = captcha_response
+ return urllib.urlencode(request_fields)
+
+
+GenerateClientLoginRequestBody = generate_client_login_request_body
+
+
+def get_client_login_token_string(http_body):
+ """Returns the token value for a ClientLoginToken.
+
+ Reads the token from the server's response to a Client Login request and
+ creates the token value string to use in requests.
+
+ Args:
+ http_body: str The body of the server's HTTP response to a Client Login
+ request
+
+ Returns:
+ The token value string for a ClientLoginToken.
+ """
+ for response_line in http_body.splitlines():
+ if response_line.startswith('Auth='):
+ # Strip off the leading Auth= and return the Authorization value.
+ return response_line[5:]
+ return None
+
+
+GetClientLoginTokenString = get_client_login_token_string
+
+
+def get_captcha_challenge(http_body,
+ captcha_base_url='http://www.google.com/accounts/'):
+ """Returns the URL and token for a CAPTCHA challenge issued by the server.
+
+ Args:
+ http_body: str The body of the HTTP response from the server which
+ contains the CAPTCHA challenge.
+ captcha_base_url: str This function returns a full URL for viewing the
+ challenge image which is built from the server's response. This
+ base_url is used as the beginning of the URL because the server
+ only provides the end of the URL. For example the server provides
+ 'Captcha?ctoken=Hi...N' and the URL for the image is
+ 'http://www.google.com/accounts/Captcha?ctoken=Hi...N'
+
+ Returns:
+ A dictionary containing the information needed to repond to the CAPTCHA
+ challenge, the image URL and the ID token of the challenge. The
+ dictionary is in the form:
+ {'token': string identifying the CAPTCHA image,
+ 'url': string containing the URL of the image}
+ Returns None if there was no CAPTCHA challenge in the response.
+ """
+ contains_captcha_challenge = False
+ captcha_parameters = {}
+ for response_line in http_body.splitlines():
+ if response_line.startswith('Error=CaptchaRequired'):
+ contains_captcha_challenge = True
+ elif response_line.startswith('CaptchaToken='):
+ # Strip off the leading CaptchaToken=
+ captcha_parameters['token'] = response_line[13:]
+ elif response_line.startswith('CaptchaUrl='):
+ captcha_parameters['url'] = '%s%s' % (captcha_base_url,
+ response_line[11:])
+ if contains_captcha_challenge:
+ return captcha_parameters
+ else:
+ return None
+
+
+GetCaptchaChallenge = get_captcha_challenge
+
+
+class ClientLoginToken(object):
+
+ def __init__(self, token_string):
+ self.token_string = token_string
+
+ def modify_request(self, http_request):
+ http_request.headers['Authorization'] = '%s%s' % (PROGRAMMATIC_AUTH_LABEL,
+ self.token_string)
+
+ ModifyRequest = modify_request
+
+
+# AuthSub functions and classes.
+def _to_uri(str_or_uri):
+ if isinstance(str_or_uri, (str, unicode)):
+ return atom.http_core.Uri.parse_uri(str_or_uri)
+ return str_or_uri
+
+
+def generate_auth_sub_url(next, scopes, secure=False, session=True,
+ request_url=atom.http_core.parse_uri(
+ 'https://www.google.com/accounts/AuthSubRequest'),
+ domain='default', scopes_param_prefix='auth_sub_scopes'):
+ """Constructs a URI for requesting a multiscope AuthSub token.
+
+ The generated token will contain a URL parameter to pass along the
+ requested scopes to the next URL. When the Google Accounts page
+ redirects the broswser to the 'next' URL, it appends the single use
+ AuthSub token value to the URL as a URL parameter with the key 'token'.
+ However, the information about which scopes were requested is not
+ included by Google Accounts. This method adds the scopes to the next
+ URL before making the request so that the redirect will be sent to
+ a page, and both the token value and the list of scopes for which the token
+ was requested.
+
+ Args:
+ next: atom.http_core.Uri or string The URL user will be sent to after
+ authorizing this web application to access their data.
+ scopes: list containint strings or atom.http_core.Uri objects. The URLs
+ of the services to be accessed. Could also be a single string
+ or single atom.http_core.Uri for requesting just one scope.
+ secure: boolean (optional) Determines whether or not the issued token
+ is a secure token.
+ session: boolean (optional) Determines whether or not the issued token
+ can be upgraded to a session token.
+ request_url: atom.http_core.Uri or str The beginning of the request URL.
+ This is normally
+ 'http://www.google.com/accounts/AuthSubRequest' or
+ '/accounts/AuthSubRequest'
+ domain: The domain which the account is part of. This is used for Google
+ Apps accounts, the default value is 'default' which means that
+ the requested account is a Google Account (@gmail.com for
+ example)
+ scopes_param_prefix: str (optional) The requested scopes are added as a
+ URL parameter to the next URL so that the page at
+ the 'next' URL can extract the token value and the
+ valid scopes from the URL. The key for the URL
+ parameter defaults to 'auth_sub_scopes'
+
+ Returns:
+ An atom.http_core.Uri which the user's browser should be directed to in
+ order to authorize this application to access their information.
+ """
+ if isinstance(next, (str, unicode)):
+ next = atom.http_core.Uri.parse_uri(next)
+ # If the user passed in a string instead of a list for scopes, convert to
+ # a single item tuple.
+ if isinstance(scopes, (str, unicode, atom.http_core.Uri)):
+ scopes = (scopes,)
+ scopes_string = ' '.join([str(scope) for scope in scopes])
+ next.query[scopes_param_prefix] = scopes_string
+
+ if isinstance(request_url, (str, unicode)):
+ request_url = atom.http_core.Uri.parse_uri(request_url)
+ request_url.query['next'] = str(next)
+ request_url.query['scope'] = scopes_string
+ if session:
+ request_url.query['session'] = '1'
+ else:
+ request_url.query['session'] = '0'
+ if secure:
+ request_url.query['secure'] = '1'
+ else:
+ request_url.query['secure'] = '0'
+ request_url.query['hd'] = domain
+ return request_url
+
+
+def auth_sub_string_from_url(url, scopes_param_prefix='auth_sub_scopes'):
+ """Finds the token string (and scopes) after the browser is redirected.
+
+ After the Google Accounts AuthSub pages redirect the user's broswer back to
+ the web application (using the 'next' URL from the request) the web app must
+ extract the token from the current page's URL. The token is provided as a
+ URL parameter named 'token' and if generate_auth_sub_url was used to create
+ the request, the token's valid scopes are included in a URL parameter whose
+ name is specified in scopes_param_prefix.
+
+ Args:
+ url: atom.url.Url or str representing the current URL. The token value
+ and valid scopes should be included as URL parameters.
+ scopes_param_prefix: str (optional) The URL parameter key which maps to
+ the list of valid scopes for the token.
+
+ Returns:
+ A tuple containing the token value as a string, and a tuple of scopes
+ (as atom.http_core.Uri objects) which are URL prefixes under which this
+ token grants permission to read and write user data.
+ (token_string, (scope_uri, scope_uri, scope_uri, ...))
+ If no scopes were included in the URL, the second value in the tuple is
+ None. If there was no token param in the url, the tuple returned is
+ (None, None)
+ """
+ if isinstance(url, (str, unicode)):
+ url = atom.http_core.Uri.parse_uri(url)
+ if 'token' not in url.query:
+ return (None, None)
+ token = url.query['token']
+ # TODO: decide whether no scopes should be None or ().
+ scopes = None # Default to None for no scopes.
+ if scopes_param_prefix in url.query:
+ scopes = tuple(url.query[scopes_param_prefix].split(' '))
+ return (token, scopes)
+
+
+AuthSubStringFromUrl = auth_sub_string_from_url
+
+
+def auth_sub_string_from_body(http_body):
+ """Extracts the AuthSub token from an HTTP body string.
+
+ Used to find the new session token after making a request to upgrade a
+ single use AuthSub token.
+
+ Args:
+ http_body: str The repsonse from the server which contains the AuthSub
+ key. For example, this function would find the new session token
+ from the server's response to an upgrade token request.
+
+ Returns:
+ The raw token value string to use in an AuthSubToken object.
+ """
+ for response_line in http_body.splitlines():
+ if response_line.startswith('Token='):
+ # Strip off Token= and return the token value string.
+ return response_line[6:]
+ return None
+
+
+class AuthSubToken(object):
+
+ def __init__(self, token_string, scopes=None):
+ self.token_string = token_string
+ self.scopes = scopes or []
+
+ def modify_request(self, http_request):
+ """Sets Authorization header, allows app to act on the user's behalf."""
+ http_request.headers['Authorization'] = '%s%s' % (AUTHSUB_AUTH_LABEL,
+ self.token_string)
+
+ ModifyRequest = modify_request
+
+ def from_url(str_or_uri):
+ """Creates a new AuthSubToken using information in the URL.
+
+ Uses auth_sub_string_from_url.
+
+ Args:
+ str_or_uri: The current page's URL (as a str or atom.http_core.Uri)
+ which should contain a token query parameter since the
+ Google auth server redirected the user's browser to this
+ URL.
+ """
+ token_and_scopes = auth_sub_string_from_url(str_or_uri)
+ return AuthSubToken(token_and_scopes[0], token_and_scopes[1])
+
+ from_url = staticmethod(from_url)
+ FromUrl = from_url
+
+ def _upgrade_token(self, http_body):
+ """Replaces the token value with a session token from the auth server.
+
+ Uses the response of a token upgrade request to modify this token. Uses
+ auth_sub_string_from_body.
+ """
+ self.token_string = auth_sub_string_from_body(http_body)
+
+
+# Functions and classes for Secure-mode AuthSub
+def build_auth_sub_data(http_request, timestamp, nonce):
+ """Creates the data string which must be RSA-signed in secure requests.
+
+ For more details see the documenation on secure AuthSub requests:
+ http://code.google.com/apis/accounts/docs/AuthSub.html#signingrequests
+
+ Args:
+ http_request: The request being made to the server. The Request's URL
+ must be complete before this signature is calculated as any changes
+ to the URL will invalidate the signature.
+ nonce: str Random 64-bit, unsigned number encoded as an ASCII string in
+ decimal format. The nonce/timestamp pair should always be unique to
+ prevent replay attacks.
+ timestamp: Integer representing the time the request is sent. The
+ timestamp should be expressed in number of seconds after January 1,
+ 1970 00:00:00 GMT.
+ """
+ return '%s %s %s %s' % (http_request.method, str(http_request.uri),
+ str(timestamp), nonce)
+
+
+def generate_signature(data, rsa_key):
+ """Signs the data string for a secure AuthSub request."""
+ import base64
+ try:
+ from tlslite.utils import keyfactory
+ except ImportError:
+ from gdata.tlslite.utils import keyfactory
+ private_key = keyfactory.parsePrivateKey(rsa_key)
+ signed = private_key.hashAndSign(data)
+ # Python2.3 and lower does not have the base64.b64encode function.
+ if hasattr(base64, 'b64encode'):
+ return base64.b64encode(signed)
+ else:
+ return base64.encodestring(signed).replace('\n', '')
+
+
+class SecureAuthSubToken(AuthSubToken):
+
+ def __init__(self, token_string, rsa_private_key, scopes=None):
+ self.token_string = token_string
+ self.scopes = scopes or []
+ self.rsa_private_key = rsa_private_key
+
+ def from_url(str_or_uri, rsa_private_key):
+ """Creates a new SecureAuthSubToken using information in the URL.
+
+ Uses auth_sub_string_from_url.
+
+ Args:
+ str_or_uri: The current page's URL (as a str or atom.http_core.Uri)
+ which should contain a token query parameter since the Google auth
+ server redirected the user's browser to this URL.
+ rsa_private_key: str the private RSA key cert used to sign all requests
+ made with this token.
+ """
+ token_and_scopes = auth_sub_string_from_url(str_or_uri)
+ return SecureAuthSubToken(token_and_scopes[0], rsa_private_key,
+ token_and_scopes[1])
+
+ from_url = staticmethod(from_url)
+ FromUrl = from_url
+
+ def modify_request(self, http_request):
+ """Sets the Authorization header and includes a digital signature.
+
+ Calculates a digital signature using the private RSA key, a timestamp
+ (uses now at the time this method is called) and a random nonce.
+
+ Args:
+ http_request: The atom.http_core.HttpRequest which contains all of the
+ information needed to send a request to the remote server. The
+ URL and the method of the request must be already set and cannot be
+ changed after this token signs the request, or the signature will
+ not be valid.
+ """
+ timestamp = str(int(time.time()))
+ nonce = ''.join([str(random.randint(0, 9)) for i in xrange(15)])
+ data = build_auth_sub_data(http_request, timestamp, nonce)
+ signature = generate_signature(data, self.rsa_private_key)
+ http_request.headers['Authorization'] = (
+ '%s%s sigalg="rsa-sha1" data="%s" sig="%s"' % (AUTHSUB_AUTH_LABEL,
+ self.token_string, data, signature))
+
+ ModifyRequest = modify_request
+
+
+# OAuth functions and classes.
+RSA_SHA1 = 'RSA-SHA1'
+HMAC_SHA1 = 'HMAC-SHA1'
+
+
+def build_oauth_base_string(http_request, consumer_key, nonce, signaure_type,
+ timestamp, version, next='oob', token=None,
+ verifier=None):
+ """Generates the base string to be signed in the OAuth request.
+
+ Args:
+ http_request: The request being made to the server. The Request's URL
+ must be complete before this signature is calculated as any changes
+ to the URL will invalidate the signature.
+ consumer_key: Domain identifying the third-party web application. This is
+ the domain used when registering the application with Google. It
+ identifies who is making the request on behalf of the user.
+ nonce: Random 64-bit, unsigned number encoded as an ASCII string in decimal
+ format. The nonce/timestamp pair should always be unique to prevent
+ replay attacks.
+ signaure_type: either RSA_SHA1 or HMAC_SHA1
+ timestamp: Integer representing the time the request is sent. The
+ timestamp should be expressed in number of seconds after January 1,
+ 1970 00:00:00 GMT.
+ version: The OAuth version used by the requesting web application. This
+ value must be '1.0' or '1.0a'. If not provided, Google assumes version
+ 1.0 is in use.
+ next: The URL the user should be redirected to after granting access
+ to a Google service(s). It can include url-encoded query parameters.
+ The default value is 'oob'. (This is the oauth_callback.)
+ token: The string for the OAuth request token or OAuth access token.
+ verifier: str Sent as the oauth_verifier and required when upgrading a
+ request token to an access token.
+ """
+ # First we must build the canonical base string for the request.
+ params = http_request.uri.query.copy()
+ params['oauth_consumer_key'] = consumer_key
+ params['oauth_nonce'] = nonce
+ params['oauth_signature_method'] = signaure_type
+ params['oauth_timestamp'] = str(timestamp)
+ if next is not None:
+ params['oauth_callback'] = str(next)
+ if token is not None:
+ params['oauth_token'] = token
+ if version is not None:
+ params['oauth_version'] = version
+ if verifier is not None:
+ params['oauth_verifier'] = verifier
+ # We need to get the key value pairs in lexigraphically sorted order.
+ sorted_keys = None
+ try:
+ sorted_keys = sorted(params.keys())
+ # The sorted function is not available in Python2.3 and lower
+ except NameError:
+ sorted_keys = params.keys()
+ sorted_keys.sort()
+ pairs = []
+ for key in sorted_keys:
+ pairs.append('%s=%s' % (urllib.quote(key, safe='~'),
+ urllib.quote(params[key], safe='~')))
+ # We want to escape /'s too, so use safe='~'
+ all_parameters = urllib.quote('&'.join(pairs), safe='~')
+ normailzed_host = http_request.uri.host.lower()
+ normalized_scheme = (http_request.uri.scheme or 'http').lower()
+ non_default_port = None
+ if (http_request.uri.port is not None
+ and ((normalized_scheme == 'https' and http_request.uri.port != 443)
+ or (normalized_scheme == 'http' and http_request.uri.port != 80))):
+ non_default_port = http_request.uri.port
+ path = http_request.uri.path or '/'
+ request_path = None
+ if not path.startswith('/'):
+ path = '/%s' % path
+ if non_default_port is not None:
+ # Set the only safe char in url encoding to ~ since we want to escape /
+ # as well.
+ request_path = urllib.quote('%s://%s:%s%s' % (
+ normalized_scheme, normailzed_host, non_default_port, path), safe='~')
+ else:
+ # Set the only safe char in url encoding to ~ since we want to escape /
+ # as well.
+ request_path = urllib.quote('%s://%s%s' % (
+ normalized_scheme, normailzed_host, path), safe='~')
+ # TODO: ensure that token escaping logic is correct, not sure if the token
+ # value should be double escaped instead of single.
+ base_string = '&'.join((http_request.method.upper(), request_path,
+ all_parameters))
+ # Now we have the base string, we can calculate the oauth_signature.
+ return base_string
+
+
+def generate_hmac_signature(http_request, consumer_key, consumer_secret,
+ timestamp, nonce, version, next='oob',
+ token=None, token_secret=None, verifier=None):
+ import hmac
+ import base64
+ base_string = build_oauth_base_string(
+ http_request, consumer_key, nonce, HMAC_SHA1, timestamp, version,
+ next, token, verifier=verifier)
+ hash_key = None
+ hashed = None
+ if token_secret is not None:
+ hash_key = '%s&%s' % (urllib.quote(consumer_secret, safe='~'),
+ urllib.quote(token_secret, safe='~'))
+ else:
+ hash_key = '%s&' % urllib.quote(consumer_secret, safe='~')
+ try:
+ import hashlib
+ hashed = hmac.new(hash_key, base_string, hashlib.sha1)
+ except ImportError:
+ import sha
+ hashed = hmac.new(hash_key, base_string, sha)
+ # Python2.3 does not have base64.b64encode.
+ if hasattr(base64, 'b64encode'):
+ return base64.b64encode(hashed.digest())
+ else:
+ return base64.encodestring(hashed.digest()).replace('\n', '')
+
+
+def generate_rsa_signature(http_request, consumer_key, rsa_key,
+ timestamp, nonce, version, next='oob',
+ token=None, token_secret=None, verifier=None):
+ import base64
+ try:
+ from tlslite.utils import keyfactory
+ except ImportError:
+ from gdata.tlslite.utils import keyfactory
+ base_string = build_oauth_base_string(
+ http_request, consumer_key, nonce, RSA_SHA1, timestamp, version,
+ next, token, verifier=verifier)
+ private_key = keyfactory.parsePrivateKey(rsa_key)
+ # Sign using the key
+ signed = private_key.hashAndSign(base_string)
+ # Python2.3 does not have base64.b64encode.
+ if hasattr(base64, 'b64encode'):
+ return base64.b64encode(signed)
+ else:
+ return base64.encodestring(signed).replace('\n', '')
+
+
+def generate_auth_header(consumer_key, timestamp, nonce, signature_type,
+ signature, version='1.0', next=None, token=None,
+ verifier=None):
+ """Builds the Authorization header to be sent in the request.
+
+ Args:
+ consumer_key: Identifies the application making the request (str).
+ timestamp:
+ nonce:
+ signature_type: One of either HMAC_SHA1 or RSA_SHA1
+ signature: The HMAC or RSA signature for the request as a base64
+ encoded string.
+ version: The version of the OAuth protocol that this request is using.
+ Default is '1.0'
+ next: The URL of the page that the user's browser should be sent to
+ after they authorize the token. (Optional)
+ token: str The OAuth token value to be used in the oauth_token parameter
+ of the header.
+ verifier: str The OAuth verifier which must be included when you are
+ upgrading a request token to an access token.
+ """
+ params = {
+ 'oauth_consumer_key': consumer_key,
+ 'oauth_version': version,
+ 'oauth_nonce': nonce,
+ 'oauth_timestamp': str(timestamp),
+ 'oauth_signature_method': signature_type,
+ 'oauth_signature': signature}
+ if next is not None:
+ params['oauth_callback'] = str(next)
+ if token is not None:
+ params['oauth_token'] = token
+ if verifier is not None:
+ params['oauth_verifier'] = verifier
+ pairs = [
+ '%s="%s"' % (
+ k, urllib.quote(v, safe='~')) for k, v in params.iteritems()]
+ return 'OAuth %s' % (', '.join(pairs))
+
+
+REQUEST_TOKEN_URL = 'https://www.google.com/accounts/OAuthGetRequestToken'
+ACCESS_TOKEN_URL = 'https://www.google.com/accounts/OAuthGetAccessToken'
+
+
+def generate_request_for_request_token(
+ consumer_key, signature_type, scopes, rsa_key=None, consumer_secret=None,
+ auth_server_url=REQUEST_TOKEN_URL, next='oob', version='1.0'):
+ """Creates request to be sent to auth server to get an OAuth request token.
+
+ Args:
+ consumer_key:
+ signature_type: either RSA_SHA1 or HMAC_SHA1. The rsa_key must be
+ provided if the signature type is RSA but if the signature method
+ is HMAC, the consumer_secret must be used.
+ scopes: List of URL prefixes for the data which we want to access. For
+ example, to request access to the user's Blogger and Google Calendar
+ data, we would request
+ ['http://www.blogger.com/feeds/',
+ 'https://www.google.com/calendar/feeds/',
+ 'http://www.google.com/calendar/feeds/']
+ rsa_key: Only used if the signature method is RSA_SHA1.
+ consumer_secret: Only used if the signature method is HMAC_SHA1.
+ auth_server_url: The URL to which the token request should be directed.
+ Defaults to 'https://www.google.com/accounts/OAuthGetRequestToken'.
+ next: The URL of the page that the user's browser should be sent to
+ after they authorize the token. (Optional)
+ version: The OAuth version used by the requesting web application.
+ Defaults to '1.0a'
+
+ Returns:
+ An atom.http_core.HttpRequest object with the URL, Authorization header
+ and body filled in.
+ """
+ request = atom.http_core.HttpRequest(auth_server_url, 'POST')
+ # Add the requested auth scopes to the Auth request URL.
+ if scopes:
+ request.uri.query['scope'] = ' '.join(scopes)
+
+ timestamp = str(int(time.time()))
+ nonce = ''.join([str(random.randint(0, 9)) for i in xrange(15)])
+ signature = None
+ if signature_type == HMAC_SHA1:
+ signature = generate_hmac_signature(
+ request, consumer_key, consumer_secret, timestamp, nonce, version,
+ next=next)
+ elif signature_type == RSA_SHA1:
+ signature = generate_rsa_signature(
+ request, consumer_key, rsa_key, timestamp, nonce, version, next=next)
+ else:
+ return None
+
+ request.headers['Authorization'] = generate_auth_header(
+ consumer_key, timestamp, nonce, signature_type, signature, version,
+ next)
+ request.headers['Content-Length'] = '0'
+ return request
+
+
+def generate_request_for_access_token(
+ request_token, auth_server_url=ACCESS_TOKEN_URL):
+ """Creates a request to ask the OAuth server for an access token.
+
+ Requires a request token which the user has authorized. See the
+ documentation on OAuth with Google Data for more details:
+ http://code.google.com/apis/accounts/docs/OAuth.html#AccessToken
+
+ Args:
+ request_token: An OAuthHmacToken or OAuthRsaToken which the user has
+ approved using their browser.
+ auth_server_url: (optional) The URL at which the OAuth access token is
+ requested. Defaults to
+ https://www.google.com/accounts/OAuthGetAccessToken
+
+ Returns:
+ A new HttpRequest object which can be sent to the OAuth server to
+ request an OAuth Access Token.
+ """
+ http_request = atom.http_core.HttpRequest(auth_server_url, 'POST')
+ http_request.headers['Content-Length'] = '0'
+ return request_token.modify_request(http_request)
+
+
+def oauth_token_info_from_body(http_body):
+ """Exracts an OAuth request token from the server's response.
+
+ Returns:
+ A tuple of strings containing the OAuth token and token secret. If
+ neither of these are present in the body, returns (None, None)
+ """
+ token = None
+ token_secret = None
+ for pair in http_body.split('&'):
+ if pair.startswith('oauth_token='):
+ token = urllib.unquote(pair[len('oauth_token='):])
+ if pair.startswith('oauth_token_secret='):
+ token_secret = urllib.unquote(pair[len('oauth_token_secret='):])
+ return (token, token_secret)
+
+
+def hmac_token_from_body(http_body, consumer_key, consumer_secret,
+ auth_state):
+ token_value, token_secret = oauth_token_info_from_body(http_body)
+ token = OAuthHmacToken(consumer_key, consumer_secret, token_value,
+ token_secret, auth_state)
+ return token
+
+
+def rsa_token_from_body(http_body, consumer_key, rsa_private_key,
+ auth_state):
+ token_value, token_secret = oauth_token_info_from_body(http_body)
+ token = OAuthRsaToken(consumer_key, rsa_private_key, token_value,
+ token_secret, auth_state)
+ return token
+
+
+DEFAULT_DOMAIN = 'default'
+OAUTH_AUTHORIZE_URL = 'https://www.google.com/accounts/OAuthAuthorizeToken'
+
+
+def generate_oauth_authorization_url(
+ token, next=None, hd=DEFAULT_DOMAIN, hl=None, btmpl=None,
+ auth_server=OAUTH_AUTHORIZE_URL):
+ """Creates a URL for the page where the request token can be authorized.
+
+ Args:
+ token: str The request token from the OAuth server.
+ next: str (optional) URL the user should be redirected to after granting
+ access to a Google service(s). It can include url-encoded query
+ parameters.
+ hd: str (optional) Identifies a particular hosted domain account to be
+ accessed (for example, 'mycollege.edu'). Uses 'default' to specify a
+ regular Google account ('username@gmail.com').
+ hl: str (optional) An ISO 639 country code identifying what language the
+ approval page should be translated in (for example, 'hl=en' for
+ English). The default is the user's selected language.
+ btmpl: str (optional) Forces a mobile version of the approval page. The
+ only accepted value is 'mobile'.
+ auth_server: str (optional) The start of the token authorization web
+ page. Defaults to
+ 'https://www.google.com/accounts/OAuthAuthorizeToken'
+
+ Returns:
+ An atom.http_core.Uri pointing to the token authorization page where the
+ user may allow or deny this app to access their Google data.
+ """
+ uri = atom.http_core.Uri.parse_uri(auth_server)
+ uri.query['oauth_token'] = token
+ uri.query['hd'] = hd
+ if next is not None:
+ uri.query['oauth_callback'] = str(next)
+ if hl is not None:
+ uri.query['hl'] = hl
+ if btmpl is not None:
+ uri.query['btmpl'] = btmpl
+ return uri
+
+
+def oauth_token_info_from_url(url):
+ """Exracts an OAuth access token from the redirected page's URL.
+
+ Returns:
+ A tuple of strings containing the OAuth token and the OAuth verifier which
+ need to sent when upgrading a request token to an access token.
+ """
+ if isinstance(url, (str, unicode)):
+ url = atom.http_core.Uri.parse_uri(url)
+ token = None
+ verifier = None
+ if 'oauth_token' in url.query:
+ token = urllib.unquote(url.query['oauth_token'])
+ if 'oauth_verifier' in url.query:
+ verifier = urllib.unquote(url.query['oauth_verifier'])
+ return (token, verifier)
+
+
+def authorize_request_token(request_token, url):
+ """Adds information to request token to allow it to become an access token.
+
+ Modifies the request_token object passed in by setting and unsetting the
+ necessary fields to allow this token to form a valid upgrade request.
+
+ Args:
+ request_token: The OAuth request token which has been authorized by the
+ user. In order for this token to be upgraded to an access token,
+ certain fields must be extracted from the URL and added to the token
+ so that they can be passed in an upgrade-token request.
+ url: The URL of the current page which the user's browser was redirected
+ to after they authorized access for the app. This function extracts
+ information from the URL which is needed to upgraded the token from
+ a request token to an access token.
+
+ Returns:
+ The same token object which was passed in.
+ """
+ token, verifier = oauth_token_info_from_url(url)
+ request_token.token = token
+ request_token.verifier = verifier
+ request_token.auth_state = AUTHORIZED_REQUEST_TOKEN
+ return request_token
+
+
+AuthorizeRequestToken = authorize_request_token
+
+
+def upgrade_to_access_token(request_token, server_response_body):
+ """Extracts access token information from response to an upgrade request.
+
+ Once the server has responded with the new token info for the OAuth
+ access token, this method modifies the request_token to set and unset
+ necessary fields to create valid OAuth authorization headers for requests.
+
+ Args:
+ request_token: An OAuth token which this function modifies to allow it
+ to be used as an access token.
+ server_response_body: str The server's response to an OAuthAuthorizeToken
+ request. This should contain the new token and token_secret which
+ are used to generate the signature and parameters of the Authorization
+ header in subsequent requests to Google Data APIs.
+
+ Returns:
+ The same token object which was passed in.
+ """
+ token, token_secret = oauth_token_info_from_body(server_response_body)
+ request_token.token = token
+ request_token.token_secret = token_secret
+ request_token.auth_state = ACCESS_TOKEN
+ request_token.next = None
+ request_token.verifier = None
+ return request_token
+
+
+UpgradeToAccessToken = upgrade_to_access_token
+
+
+REQUEST_TOKEN = 1
+AUTHORIZED_REQUEST_TOKEN = 2
+ACCESS_TOKEN = 3
+
+
+class OAuthHmacToken(object):
+ SIGNATURE_METHOD = HMAC_SHA1
+
+ def __init__(self, consumer_key, consumer_secret, token, token_secret,
+ auth_state, next=None, verifier=None):
+ self.consumer_key = consumer_key
+ self.consumer_secret = consumer_secret
+ self.token = token
+ self.token_secret = token_secret
+ self.auth_state = auth_state
+ self.next = next
+ self.verifier = verifier # Used to convert request token to access token.
+
+ def generate_authorization_url(
+ self, google_apps_domain=DEFAULT_DOMAIN, language=None, btmpl=None,
+ auth_server=OAUTH_AUTHORIZE_URL):
+ """Creates the URL at which the user can authorize this app to access.
+
+ Args:
+ google_apps_domain: str (optional) If the user should be signing in
+ using an account under a known Google Apps domain, provide the
+ domain name ('example.com') here. If not provided, 'default'
+ will be used, and the user will be prompted to select an account
+ if they are signed in with a Google Account and Google Apps
+ accounts.
+ language: str (optional) An ISO 639 country code identifying what
+ language the approval page should be translated in (for example,
+ 'en' for English). The default is the user's selected language.
+ btmpl: str (optional) Forces a mobile version of the approval page. The
+ only accepted value is 'mobile'.
+ auth_server: str (optional) The start of the token authorization web
+ page. Defaults to
+ 'https://www.google.com/accounts/OAuthAuthorizeToken'
+ """
+ return generate_oauth_authorization_url(
+ self.token, hd=google_apps_domain, hl=language, btmpl=btmpl,
+ auth_server=auth_server)
+
+ GenerateAuthorizationUrl = generate_authorization_url
+
+ def modify_request(self, http_request):
+ """Sets the Authorization header in the HTTP request using the token.
+
+ Calculates an HMAC signature using the information in the token to
+ indicate that the request came from this application and that this
+ application has permission to access a particular user's data.
+
+ Returns:
+ The same HTTP request object which was passed in.
+ """
+ timestamp = str(int(time.time()))
+ nonce = ''.join([str(random.randint(0, 9)) for i in xrange(15)])
+ signature = generate_hmac_signature(
+ http_request, self.consumer_key, self.consumer_secret, timestamp,
+ nonce, version='1.0', next=self.next, token=self.token,
+ token_secret=self.token_secret, verifier=self.verifier)
+ http_request.headers['Authorization'] = generate_auth_header(
+ self.consumer_key, timestamp, nonce, HMAC_SHA1, signature,
+ version='1.0', next=self.next, token=self.token,
+ verifier=self.verifier)
+ return http_request
+
+ ModifyRequest = modify_request
+
+
+class OAuthRsaToken(OAuthHmacToken):
+ SIGNATURE_METHOD = RSA_SHA1
+
+ def __init__(self, consumer_key, rsa_private_key, token, token_secret,
+ auth_state, next=None, verifier=None):
+ self.consumer_key = consumer_key
+ self.rsa_private_key = rsa_private_key
+ self.token = token
+ self.token_secret = token_secret
+ self.auth_state = auth_state
+ self.next = next
+ self.verifier = verifier # Used to convert request token to access token.
+
+ def modify_request(self, http_request):
+ """Sets the Authorization header in the HTTP request using the token.
+
+ Calculates an RSA signature using the information in the token to
+ indicate that the request came from this application and that this
+ application has permission to access a particular user's data.
+
+ Returns:
+ The same HTTP request object which was passed in.
+ """
+ timestamp = str(int(time.time()))
+ nonce = ''.join([str(random.randint(0, 9)) for i in xrange(15)])
+ signature = generate_rsa_signature(
+ http_request, self.consumer_key, self.rsa_private_key, timestamp,
+ nonce, version='1.0', next=self.next, token=self.token,
+ token_secret=self.token_secret, verifier=self.verifier)
+ http_request.headers['Authorization'] = generate_auth_header(
+ self.consumer_key, timestamp, nonce, RSA_SHA1, signature,
+ version='1.0', next=self.next, token=self.token,
+ verifier=self.verifier)
+ return http_request
+
+ ModifyRequest = modify_request
+
+
+class TwoLeggedOAuthHmacToken(OAuthHmacToken):
+
+ def __init__(self, consumer_key, consumer_secret, requestor_id):
+ self.requestor_id = requestor_id
+ OAuthHmacToken.__init__(
+ self, consumer_key, consumer_secret, None, None, ACCESS_TOKEN,
+ next=None, verifier=None)
+
+ def modify_request(self, http_request):
+ """Sets the Authorization header in the HTTP request using the token.
+
+ Calculates an HMAC signature using the information in the token to
+ indicate that the request came from this application and that this
+ application has permission to access a particular user's data using 2LO.
+
+ Returns:
+ The same HTTP request object which was passed in.
+ """
+ http_request.uri.query['xoauth_requestor_id'] = self.requestor_id
+ return OAuthHmacToken.modify_request(self, http_request)
+
+ ModifyRequest = modify_request
+
+
+class TwoLeggedOAuthRsaToken(OAuthRsaToken):
+
+ def __init__(self, consumer_key, rsa_private_key, requestor_id):
+ self.requestor_id = requestor_id
+ OAuthRsaToken.__init__(
+ self, consumer_key, rsa_private_key, None, None, ACCESS_TOKEN,
+ next=None, verifier=None)
+
+ def modify_request(self, http_request):
+ """Sets the Authorization header in the HTTP request using the token.
+
+ Calculates an RSA signature using the information in the token to
+ indicate that the request came from this application and that this
+ application has permission to access a particular user's data using 2LO.
+
+ Returns:
+ The same HTTP request object which was passed in.
+ """
+ http_request.uri.query['xoauth_requestor_id'] = self.requestor_id
+ return OAuthRsaToken.modify_request(self, http_request)
+
+ ModifyRequest = modify_request
+
+
+def _join_token_parts(*args):
+ """"Escapes and combines all strings passed in.
+
+ Used to convert a token object's members into a string instead of
+ using pickle.
+
+ Note: A None value will be converted to an empty string.
+
+ Returns:
+ A string in the form 1x|member1|member2|member3...
+ """
+ return '|'.join([urllib.quote_plus(a or '') for a in args])
+
+
+def _split_token_parts(blob):
+ """Extracts and unescapes fields from the provided binary string.
+
+ Reverses the packing performed by _join_token_parts. Used to extract
+ the members of a token object.
+
+ Note: An empty string from the blob will be interpreted as None.
+
+ Args:
+ blob: str A string of the form 1x|member1|member2|member3 as created
+ by _join_token_parts
+
+ Returns:
+ A list of unescaped strings.
+ """
+ return [urllib.unquote_plus(part) or None for part in blob.split('|')]
+
+
+def token_to_blob(token):
+ """Serializes the token data as a string for storage in a datastore.
+
+ Supported token classes: ClientLoginToken, AuthSubToken, SecureAuthSubToken,
+ OAuthRsaToken, and OAuthHmacToken, TwoLeggedOAuthRsaToken,
+ TwoLeggedOAuthHmacToken.
+
+ Args:
+ token: A token object which must be of one of the supported token classes.
+
+ Raises:
+ UnsupportedTokenType if the token is not one of the supported token
+ classes listed above.
+
+ Returns:
+ A string represenging this token. The string can be converted back into
+ an equivalent token object using token_from_blob. Note that any members
+ which are set to '' will be set to None when the token is deserialized
+ by token_from_blob.
+ """
+ if isinstance(token, ClientLoginToken):
+ return _join_token_parts('1c', token.token_string)
+ # Check for secure auth sub type first since it is a subclass of
+ # AuthSubToken.
+ elif isinstance(token, SecureAuthSubToken):
+ return _join_token_parts('1s', token.token_string, token.rsa_private_key,
+ *token.scopes)
+ elif isinstance(token, AuthSubToken):
+ return _join_token_parts('1a', token.token_string, *token.scopes)
+ elif isinstance(token, TwoLeggedOAuthRsaToken):
+ return _join_token_parts(
+ '1rtl', token.consumer_key, token.rsa_private_key, token.requestor_id)
+ elif isinstance(token, TwoLeggedOAuthHmacToken):
+ return _join_token_parts(
+ '1htl', token.consumer_key, token.consumer_secret, token.requestor_id)
+ # Check RSA OAuth token first since the OAuthRsaToken is a subclass of
+ # OAuthHmacToken.
+ elif isinstance(token, OAuthRsaToken):
+ return _join_token_parts(
+ '1r', token.consumer_key, token.rsa_private_key, token.token,
+ token.token_secret, str(token.auth_state), token.next,
+ token.verifier)
+ elif isinstance(token, OAuthHmacToken):
+ return _join_token_parts(
+ '1h', token.consumer_key, token.consumer_secret, token.token,
+ token.token_secret, str(token.auth_state), token.next,
+ token.verifier)
+ else:
+ raise UnsupportedTokenType(
+ 'Unable to serialize token of type %s' % type(token))
+
+
+TokenToBlob = token_to_blob
+
+
+def token_from_blob(blob):
+ """Deserializes a token string from the datastore back into a token object.
+
+ Supported token classes: ClientLoginToken, AuthSubToken, SecureAuthSubToken,
+ OAuthRsaToken, and OAuthHmacToken, TwoLeggedOAuthRsaToken,
+ TwoLeggedOAuthHmacToken.
+
+ Args:
+ blob: string created by token_to_blob.
+
+ Raises:
+ UnsupportedTokenType if the token is not one of the supported token
+ classes listed above.
+
+ Returns:
+ A new token object with members set to the values serialized in the
+ blob string. Note that any members which were set to '' in the original
+ token will now be None.
+ """
+ parts = _split_token_parts(blob)
+ if parts[0] == '1c':
+ return ClientLoginToken(parts[1])
+ elif parts[0] == '1a':
+ return AuthSubToken(parts[1], parts[2:])
+ elif parts[0] == '1s':
+ return SecureAuthSubToken(parts[1], parts[2], parts[3:])
+ elif parts[0] == '1rtl':
+ return TwoLeggedOAuthRsaToken(parts[1], parts[2], parts[3])
+ elif parts[0] == '1htl':
+ return TwoLeggedOAuthHmacToken(parts[1], parts[2], parts[3])
+ elif parts[0] == '1r':
+ auth_state = int(parts[5])
+ return OAuthRsaToken(parts[1], parts[2], parts[3], parts[4], auth_state,
+ parts[6], parts[7])
+ elif parts[0] == '1h':
+ auth_state = int(parts[5])
+ return OAuthHmacToken(parts[1], parts[2], parts[3], parts[4], auth_state,
+ parts[6], parts[7])
+ else:
+ raise UnsupportedTokenType(
+ 'Unable to deserialize token with type marker of %s' % parts[0])
+
+
+TokenFromBlob = token_from_blob
+
+
+def dump_tokens(tokens):
+ return ','.join([token_to_blob(t) for t in tokens])
+
+
+def load_tokens(blob):
+ return [token_from_blob(s) for s in blob.split(',')]
+
+
+def find_scopes_for_services(service_names=None):
+ """Creates a combined list of scope URLs for the desired services.
+
+ This method searches the AUTH_SCOPES dictionary.
+
+ Args:
+ service_names: list of strings (optional) Each name must be a key in the
+ AUTH_SCOPES dictionary. If no list is provided (None) then
+ the resulting list will contain all scope URLs in the
+ AUTH_SCOPES dict.
+
+ Returns:
+ A list of URL strings which are the scopes needed to access these services
+ when requesting a token using AuthSub or OAuth.
+ """
+ result_scopes = []
+ if service_names is None:
+ for service_name, scopes in AUTH_SCOPES.iteritems():
+ result_scopes.extend(scopes)
+ else:
+ for service_name in service_names:
+ result_scopes.extend(AUTH_SCOPES[service_name])
+ return result_scopes
+
+
+FindScopesForServices = find_scopes_for_services
+
+
+def ae_save(token, token_key):
+ """Stores an auth token in the App Engine datastore.
+
+ This is a convenience method for using the library with App Engine.
+ Recommended usage is to associate the auth token with the current_user.
+ If a user is signed in to the app using the App Engine users API, you
+ can use
+ gdata.gauth.ae_save(some_token, users.get_current_user().user_id())
+ If you are not using the Users API you are free to choose whatever
+ string you would like for a token_string.
+
+ Args:
+ token: an auth token object. Must be one of ClientLoginToken,
+ AuthSubToken, SecureAuthSubToken, OAuthRsaToken, or OAuthHmacToken
+ (see token_to_blob).
+ token_key: str A unique identified to be used when you want to retrieve
+ the token. If the user is signed in to App Engine using the
+ users API, I recommend using the user ID for the token_key:
+ users.get_current_user().user_id()
+ """
+ import gdata.alt.app_engine
+ key_name = ''.join(('gd_auth_token', token_key))
+ return gdata.alt.app_engine.set_token(key_name, token_to_blob(token))
+
+
+AeSave = ae_save
+
+
+def ae_load(token_key):
+ """Retrieves a token object from the App Engine datastore.
+
+ This is a convenience method for using the library with App Engine.
+ See also ae_save.
+
+ Args:
+ token_key: str The unique key associated with the desired token when it
+ was saved using ae_save.
+
+ Returns:
+ A token object if there was a token associated with the token_key or None
+ if the key could not be found.
+ """
+ import gdata.alt.app_engine
+ key_name = ''.join(('gd_auth_token', token_key))
+ token_string = gdata.alt.app_engine.get_token(key_name)
+ if token_string is not None:
+ return token_from_blob(token_string)
+ else:
+ return None
+
+
+AeLoad = ae_load
+
+
+def ae_delete(token_key):
+ """Removes the token object from the App Engine datastore."""
+ import gdata.alt.app_engine
+ key_name = ''.join(('gd_auth_token', token_key))
+ gdata.alt.app_engine.delete_token(key_name)
+
+
+AeDelete = ae_delete
diff --git a/python/gdata/geo/__init__.py b/python/gdata/geo/__init__.py
new file mode 100644
index 0000000..1fcf604
--- /dev/null
+++ b/python/gdata/geo/__init__.py
@@ -0,0 +1,185 @@
+# -*-*- encoding: utf-8 -*-*-
+#
+# This is gdata.photos.geo, implementing geological positioning in gdata structures
+#
+# $Id: __init__.py 81 2007-10-03 14:41:42Z havard.gulldahl $
+#
+# Copyright 2007 HÃ¥vard Gulldahl
+# Portions copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Picasa Web Albums uses the georss and gml namespaces for
+elements defined in the GeoRSS and Geography Markup Language specifications.
+
+Specifically, Picasa Web Albums uses the following elements:
+
+georss:where
+gml:Point
+gml:pos
+
+http://code.google.com/apis/picasaweb/reference.html#georss_reference
+
+
+Picasa Web Albums also accepts geographic-location data in two other formats:
+W3C format and plain-GeoRSS (without GML) format.
+"""
+#
+#Over the wire, the Picasa Web Albums only accepts and sends the
+#elements mentioned above, but this module will let you seamlessly convert
+#between the different formats (TODO 2007-10-18 hg)
+
+__author__ = u'havard@gulldahl.no'# (HÃ¥vard Gulldahl)' #BUG: api chokes on non-ascii chars in __author__
+__license__ = 'Apache License v2'
+
+
+import atom
+import gdata
+
+GEO_NAMESPACE = 'http://www.w3.org/2003/01/geo/wgs84_pos#'
+GML_NAMESPACE = 'http://www.opengis.net/gml'
+GEORSS_NAMESPACE = 'http://www.georss.org/georss'
+
+class GeoBaseElement(atom.AtomBase):
+ """Base class for elements.
+
+ To add new elements, you only need to add the element tag name to self._tag
+ and the namespace to self._namespace
+ """
+
+ _tag = ''
+ _namespace = GML_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, name=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.name = name
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+class Pos(GeoBaseElement):
+ """(string) Specifies a latitude and longitude, separated by a space,
+ e.g. `35.669998 139.770004'"""
+
+ _tag = 'pos'
+def PosFromString(xml_string):
+ return atom.CreateClassFromXMLString(Pos, xml_string)
+
+class Point(GeoBaseElement):
+ """(container) Specifies a particular geographical point, by means of
+ a element."""
+
+ _tag = 'Point'
+ _children = atom.AtomBase._children.copy()
+ _children['{%s}pos' % GML_NAMESPACE] = ('pos', Pos)
+ def __init__(self, pos=None, extension_elements=None, extension_attributes=None, text=None):
+ GeoBaseElement.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+ if pos is None:
+ pos = Pos()
+ self.pos=pos
+def PointFromString(xml_string):
+ return atom.CreateClassFromXMLString(Point, xml_string)
+
+class Where(GeoBaseElement):
+ """(container) Specifies a geographical location or region.
+ A container element, containing a single element.
+ (Not to be confused with .)
+
+ Note that the (only) child attribute, .Point, is title-cased.
+ This reflects the names of elements in the xml stream
+ (principle of least surprise).
+
+ As a convenience, you can get a tuple of (lat, lon) with Where.location(),
+ and set the same data with Where.setLocation( (lat, lon) ).
+
+ Similarly, there are methods to set and get only latitude and longitude.
+ """
+
+ _tag = 'where'
+ _namespace = GEORSS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _children['{%s}Point' % GML_NAMESPACE] = ('Point', Point)
+ def __init__(self, point=None, extension_elements=None, extension_attributes=None, text=None):
+ GeoBaseElement.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+ if point is None:
+ point = Point()
+ self.Point=point
+ def location(self):
+ "(float, float) Return Where.Point.pos.text as a (lat,lon) tuple"
+ try:
+ return tuple([float(z) for z in self.Point.pos.text.split(' ')])
+ except AttributeError:
+ return tuple()
+ def set_location(self, latlon):
+ """(bool) Set Where.Point.pos.text from a (lat,lon) tuple.
+
+ Arguments:
+ lat (float): The latitude in degrees, from -90.0 to 90.0
+ lon (float): The longitude in degrees, from -180.0 to 180.0
+
+ Returns True on success.
+
+ """
+
+ assert(isinstance(latlon[0], float))
+ assert(isinstance(latlon[1], float))
+ try:
+ self.Point.pos.text = "%s %s" % (latlon[0], latlon[1])
+ return True
+ except AttributeError:
+ return False
+ def latitude(self):
+ "(float) Get the latitude value of the geo-tag. See also .location()"
+ lat, lon = self.location()
+ return lat
+
+ def longitude(self):
+ "(float) Get the longtitude value of the geo-tag. See also .location()"
+ lat, lon = self.location()
+ return lon
+
+ longtitude = longitude
+
+ def set_latitude(self, lat):
+ """(bool) Set the latitude value of the geo-tag.
+
+ Args:
+ lat (float): The new latitude value
+
+ See also .set_location()
+ """
+ _lat, lon = self.location()
+ return self.set_location(lat, lon)
+
+ def set_longitude(self, lon):
+ """(bool) Set the longtitude value of the geo-tag.
+
+ Args:
+ lat (float): The new latitude value
+
+ See also .set_location()
+ """
+ lat, _lon = self.location()
+ return self.set_location(lat, lon)
+
+ set_longtitude = set_longitude
+
+def WhereFromString(xml_string):
+ return atom.CreateClassFromXMLString(Where, xml_string)
+
diff --git a/python/gdata/geo/data.py b/python/gdata/geo/data.py
new file mode 100644
index 0000000..2aec911
--- /dev/null
+++ b/python/gdata/geo/data.py
@@ -0,0 +1,92 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains the data classes of the Geography Extension"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+
+
+GEORSS_TEMPLATE = '{http://www.georss.org/georss/}%s'
+GML_TEMPLATE = '{http://www.opengis.net/gml/}%s'
+GEO_TEMPLATE = '{http://www.w3.org/2003/01/geo/wgs84_pos#/}%s'
+
+
+class GeoLat(atom.core.XmlElement):
+ """Describes a W3C latitude."""
+ _qname = GEO_TEMPLATE % 'lat'
+
+
+class GeoLong(atom.core.XmlElement):
+ """Describes a W3C longitude."""
+ _qname = GEO_TEMPLATE % 'long'
+
+
+class GeoRssBox(atom.core.XmlElement):
+ """Describes a geographical region."""
+ _qname = GEORSS_TEMPLATE % 'box'
+
+
+class GeoRssPoint(atom.core.XmlElement):
+ """Describes a geographical location."""
+ _qname = GEORSS_TEMPLATE % 'point'
+
+
+class GmlLowerCorner(atom.core.XmlElement):
+ """Describes a lower corner of a region."""
+ _qname = GML_TEMPLATE % 'lowerCorner'
+
+
+class GmlPos(atom.core.XmlElement):
+ """Describes a latitude and longitude."""
+ _qname = GML_TEMPLATE % 'pos'
+
+
+class GmlPoint(atom.core.XmlElement):
+ """Describes a particular geographical point."""
+ _qname = GML_TEMPLATE % 'Point'
+ pos = GmlPos
+
+
+class GmlUpperCorner(atom.core.XmlElement):
+ """Describes an upper corner of a region."""
+ _qname = GML_TEMPLATE % 'upperCorner'
+
+
+class GmlEnvelope(atom.core.XmlElement):
+ """Describes a Gml geographical region."""
+ _qname = GML_TEMPLATE % 'Envelope'
+ lower_corner = GmlLowerCorner
+ upper_corner = GmlUpperCorner
+
+
+class GeoRssWhere(atom.core.XmlElement):
+ """Describes a geographical location or region."""
+ _qname = GEORSS_TEMPLATE % 'where'
+ Point = GmlPoint
+ Envelope = GmlEnvelope
+
+
+class W3CPoint(atom.core.XmlElement):
+ """Describes a W3C geographical location."""
+ _qname = GEO_TEMPLATE % 'Point'
+ long = GeoLong
+ lat = GeoLat
+
+
diff --git a/python/gdata/health/__init__.py b/python/gdata/health/__init__.py
new file mode 100644
index 0000000..1904ecd
--- /dev/null
+++ b/python/gdata/health/__init__.py
@@ -0,0 +1,229 @@
+#!/usr/bin/python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains extensions to Atom objects used with Google Health."""
+
+__author__ = 'api.eric@google.com (Eric Bidelman)'
+
+import atom
+import gdata
+
+
+CCR_NAMESPACE = 'urn:astm-org:CCR'
+METADATA_NAMESPACE = 'http://schemas.google.com/health/metadata'
+
+
+class Ccr(atom.AtomBase):
+ """Represents a Google Health ."""
+
+ _tag = 'ContinuityOfCareRecord'
+ _namespace = CCR_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+
+ def __init__(self, extension_elements=None,
+ extension_attributes=None, text=None):
+ atom.AtomBase.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+
+ def GetAlerts(self):
+ """Helper for extracting Alert/Allergy data from the CCR.
+
+ Returns:
+ A list of ExtensionElements (one for each allergy found) or None if
+ no allergies where found in this CCR.
+ """
+ try:
+ body = self.FindExtensions('Body')[0]
+ return body.FindChildren('Alerts')[0].FindChildren('Alert')
+ except:
+ return None
+
+ def GetAllergies(self):
+ """Alias for GetAlerts()."""
+ return self.GetAlerts()
+
+ def GetProblems(self):
+ """Helper for extracting Problem/Condition data from the CCR.
+
+ Returns:
+ A list of ExtensionElements (one for each problem found) or None if
+ no problems where found in this CCR.
+ """
+ try:
+ body = self.FindExtensions('Body')[0]
+ return body.FindChildren('Problems')[0].FindChildren('Problem')
+ except:
+ return None
+
+ def GetConditions(self):
+ """Alias for GetProblems()."""
+ return self.GetProblems()
+
+ def GetProcedures(self):
+ """Helper for extracting Procedure data from the CCR.
+
+ Returns:
+ A list of ExtensionElements (one for each procedure found) or None if
+ no procedures where found in this CCR.
+ """
+ try:
+ body = self.FindExtensions('Body')[0]
+ return body.FindChildren('Procedures')[0].FindChildren('Procedure')
+ except:
+ return None
+
+ def GetImmunizations(self):
+ """Helper for extracting Immunization data from the CCR.
+
+ Returns:
+ A list of ExtensionElements (one for each immunization found) or None if
+ no immunizations where found in this CCR.
+ """
+ try:
+ body = self.FindExtensions('Body')[0]
+ return body.FindChildren('Immunizations')[0].FindChildren('Immunization')
+ except:
+ return None
+
+ def GetMedications(self):
+ """Helper for extracting Medication data from the CCR.
+
+ Returns:
+ A list of ExtensionElements (one for each medication found) or None if
+ no medications where found in this CCR.
+ """
+ try:
+ body = self.FindExtensions('Body')[0]
+ return body.FindChildren('Medications')[0].FindChildren('Medication')
+ except:
+ return None
+
+ def GetResults(self):
+ """Helper for extracting Results/Labresults data from the CCR.
+
+ Returns:
+ A list of ExtensionElements (one for each result found) or None if
+ no results where found in this CCR.
+ """
+ try:
+ body = self.FindExtensions('Body')[0]
+ return body.FindChildren('Results')[0].FindChildren('Result')
+ except:
+ return None
+
+
+class ProfileEntry(gdata.GDataEntry):
+ """The Google Health version of an Atom Entry."""
+
+ _tag = gdata.GDataEntry._tag
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}ContinuityOfCareRecord' % CCR_NAMESPACE] = ('ccr', Ccr)
+
+ def __init__(self, ccr=None, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None, title=None,
+ updated=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ self.ccr = ccr
+ gdata.GDataEntry.__init__(
+ self, author=author, category=category, content=content,
+ atom_id=atom_id, link=link, published=published, title=title,
+ updated=updated, extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+
+
+class ProfileFeed(gdata.GDataFeed):
+ """A feed containing a list of Google Health profile entries."""
+
+ _tag = gdata.GDataFeed._tag
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [ProfileEntry])
+
+
+class ProfileListEntry(gdata.GDataEntry):
+ """The Atom Entry in the Google Health profile list feed."""
+
+ _tag = gdata.GDataEntry._tag
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+
+ def GetProfileId(self):
+ return self.content.text
+
+ def GetProfileName(self):
+ return self.title.text
+
+
+class ProfileListFeed(gdata.GDataFeed):
+ """A feed containing a list of Google Health profile list entries."""
+
+ _tag = gdata.GDataFeed._tag
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [ProfileListEntry])
+
+
+def ProfileEntryFromString(xml_string):
+ """Converts an XML string into a ProfileEntry object.
+
+ Args:
+ xml_string: string The XML describing a Health profile feed entry.
+
+ Returns:
+ A ProfileEntry object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(ProfileEntry, xml_string)
+
+
+def ProfileListEntryFromString(xml_string):
+ """Converts an XML string into a ProfileListEntry object.
+
+ Args:
+ xml_string: string The XML describing a Health profile list feed entry.
+
+ Returns:
+ A ProfileListEntry object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(ProfileListEntry, xml_string)
+
+
+def ProfileFeedFromString(xml_string):
+ """Converts an XML string into a ProfileFeed object.
+
+ Args:
+ xml_string: string The XML describing a ProfileFeed feed.
+
+ Returns:
+ A ProfileFeed object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(ProfileFeed, xml_string)
+
+
+def ProfileListFeedFromString(xml_string):
+ """Converts an XML string into a ProfileListFeed object.
+
+ Args:
+ xml_string: string The XML describing a ProfileListFeed feed.
+
+ Returns:
+ A ProfileListFeed object corresponding to the given XML.
+ """
+ return atom.CreateClassFromXMLString(ProfileListFeed, xml_string)
diff --git a/python/gdata/health/service.py b/python/gdata/health/service.py
new file mode 100644
index 0000000..3d38411
--- /dev/null
+++ b/python/gdata/health/service.py
@@ -0,0 +1,263 @@
+#!/usr/bin/python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""HealthService extends GDataService to streamline Google Health API access.
+
+ HealthService: Provides methods to interact with the profile, profile list,
+ and register/notices feeds. Extends GDataService.
+
+ HealthProfileQuery: Queries the Google Health Profile feed.
+
+ HealthProfileListQuery: Queries the Google Health Profile list feed.
+"""
+
+__author__ = 'api.eric@google.com (Eric Bidelman)'
+
+
+import atom
+import gdata.health
+import gdata.service
+
+
+class HealthService(gdata.service.GDataService):
+
+ """Client extension for the Google Health service Document List feed."""
+
+ def __init__(self, email=None, password=None, source=None,
+ use_h9_sandbox=False, server='www.google.com',
+ additional_headers=None, **kwargs):
+ """Creates a client for the Google Health service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ use_h9_sandbox: boolean (optional) True to issue requests against the
+ /h9 developer's sandbox.
+ server: string (optional) The name of the server to which a connection
+ will be opened.
+ additional_headers: dictionary (optional) Any additional headers which
+ should be included with CRUD operations.
+ kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+ service = use_h9_sandbox and 'weaver' or 'health'
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service=service, source=source,
+ server=server, additional_headers=additional_headers, **kwargs)
+ self.ssl = True
+ self.use_h9_sandbox = use_h9_sandbox
+
+ def __get_service(self):
+ return self.use_h9_sandbox and 'h9' or 'health'
+
+ def GetProfileFeed(self, query=None, profile_id=None):
+ """Fetches the users Google Health profile feed.
+
+ Args:
+ query: HealthProfileQuery or string (optional) A query to use on the
+ profile feed. If None, a HealthProfileQuery is constructed.
+ profile_id: string (optional) The profile id to query the profile feed
+ with when using ClientLogin. Note: this parameter is ignored if
+ query is set.
+
+ Returns:
+ A gdata.health.ProfileFeed object containing the user's Health profile.
+ """
+ if query is None:
+ projection = profile_id and 'ui' or 'default'
+ uri = HealthProfileQuery(
+ service=self.__get_service(), projection=projection,
+ profile_id=profile_id).ToUri()
+ elif isinstance(query, HealthProfileQuery):
+ uri = query.ToUri()
+ else:
+ uri = query
+
+ return self.GetFeed(uri, converter=gdata.health.ProfileFeedFromString)
+
+ def GetProfileListFeed(self, query=None):
+ """Fetches the users Google Health profile feed.
+
+ Args:
+ query: HealthProfileListQuery or string (optional) A query to use
+ on the profile list feed. If None, a HealthProfileListQuery is
+ constructed to /health/feeds/profile/list or /h9/feeds/profile/list.
+
+ Returns:
+ A gdata.health.ProfileListFeed object containing the user's list
+ of profiles.
+ """
+ if not query:
+ uri = HealthProfileListQuery(service=self.__get_service()).ToUri()
+ elif isinstance(query, HealthProfileListQuery):
+ uri = query.ToUri()
+ else:
+ uri = query
+
+ return self.GetFeed(uri, converter=gdata.health.ProfileListFeedFromString)
+
+ def SendNotice(self, subject, body=None, content_type='html',
+ ccr=None, profile_id=None):
+ """Sends (posts) a notice to the user's Google Health profile.
+
+ Args:
+ subject: A string representing the message's subject line.
+ body: string (optional) The message body.
+ content_type: string (optional) The content type of the notice message
+ body. This parameter is only honored when a message body is
+ specified.
+ ccr: string (optional) The CCR XML document to reconcile into the
+ user's profile.
+ profile_id: string (optional) The profile id to work with when using
+ ClientLogin. Note: this parameter is ignored if query is set.
+
+ Returns:
+ A gdata.health.ProfileEntry object of the posted entry.
+ """
+ if body:
+ content = atom.Content(content_type=content_type, text=body)
+ else:
+ content = body
+
+ entry = gdata.GDataEntry(
+ title=atom.Title(text=subject), content=content,
+ extension_elements=[atom.ExtensionElementFromString(ccr)])
+
+ projection = profile_id and 'ui' or 'default'
+ query = HealthRegisterQuery(service=self.__get_service(),
+ projection=projection, profile_id=profile_id)
+ return self.Post(entry, query.ToUri(),
+ converter=gdata.health.ProfileEntryFromString)
+
+
+class HealthProfileQuery(gdata.service.Query):
+
+ """Object used to construct a URI to query the Google Health profile feed."""
+
+ def __init__(self, service='health', feed='feeds/profile',
+ projection='default', profile_id=None, text_query=None,
+ params=None, categories=None):
+ """Constructor for Health profile feed query.
+
+ Args:
+ service: string (optional) The service to query. Either 'health' or 'h9'.
+ feed: string (optional) The path for the feed. The default value is
+ 'feeds/profile'.
+ projection: string (optional) The visibility of the data. Possible values
+ are 'default' for AuthSub and 'ui' for ClientLogin. If this value
+ is set to 'ui', the profile_id parameter should also be set.
+ profile_id: string (optional) The profile id to query. This should only
+ be used when using ClientLogin.
+ text_query: str (optional) The contents of the q query parameter. The
+ contents of the text_query are URL escaped upon conversion to a URI.
+ Note: this parameter can only be used on the register feed using
+ ClientLogin.
+ params: dict (optional) Parameter value string pairs which become URL
+ params when translated to a URI. These parameters are added to
+ the query's items.
+ categories: list (optional) List of category strings which should be
+ included as query categories. See gdata.service.Query for
+ additional documentation.
+ """
+ self.service = service
+ self.profile_id = profile_id
+ self.projection = projection
+ gdata.service.Query.__init__(self, feed=feed, text_query=text_query,
+ params=params, categories=categories)
+
+ def ToUri(self):
+ """Generates a URI from the query parameters set in the object.
+
+ Returns:
+ A string containing the URI used to retrieve entries from the Health
+ profile feed.
+ """
+ old_feed = self.feed
+ self.feed = '/'.join([self.service, old_feed, self.projection])
+
+ if self.profile_id:
+ self.feed += '/' + self.profile_id
+ self.feed = '/%s' % (self.feed,)
+
+ new_feed = gdata.service.Query.ToUri(self)
+ self.feed = old_feed
+ return new_feed
+
+
+class HealthProfileListQuery(gdata.service.Query):
+
+ """Object used to construct a URI to query a Health profile list feed."""
+
+ def __init__(self, service='health', feed='feeds/profile/list'):
+ """Constructor for Health profile list feed query.
+
+ Args:
+ service: string (optional) The service to query. Either 'health' or 'h9'.
+ feed: string (optional) The path for the feed. The default value is
+ 'feeds/profile/list'.
+ """
+ gdata.service.Query.__init__(self, feed)
+ self.service = service
+
+ def ToUri(self):
+ """Generates a URI from the query parameters set in the object.
+
+ Returns:
+ A string containing the URI used to retrieve entries from the
+ profile list feed.
+ """
+ return '/%s' % ('/'.join([self.service, self.feed]),)
+
+
+class HealthRegisterQuery(gdata.service.Query):
+
+ """Object used to construct a URI to query a Health register/notice feed."""
+
+ def __init__(self, service='health', feed='feeds/register',
+ projection='default', profile_id=None):
+ """Constructor for Health profile list feed query.
+
+ Args:
+ service: string (optional) The service to query. Either 'health' or 'h9'.
+ feed: string (optional) The path for the feed. The default value is
+ 'feeds/register'.
+ projection: string (optional) The visibility of the data. Possible values
+ are 'default' for AuthSub and 'ui' for ClientLogin. If this value
+ is set to 'ui', the profile_id parameter should also be set.
+ profile_id: string (optional) The profile id to query. This should only
+ be used when using ClientLogin.
+ """
+ gdata.service.Query.__init__(self, feed)
+ self.service = service
+ self.projection = projection
+ self.profile_id = profile_id
+
+ def ToUri(self):
+ """Generates a URI from the query parameters set in the object.
+
+ Returns:
+ A string containing the URI needed to interact with the register feed.
+ """
+ old_feed = self.feed
+ self.feed = '/'.join([self.service, old_feed, self.projection])
+ new_feed = gdata.service.Query.ToUri(self)
+ self.feed = old_feed
+
+ if self.profile_id:
+ new_feed += '/' + self.profile_id
+ return '/%s' % (new_feed,)
diff --git a/python/gdata/maps/__init__.py b/python/gdata/maps/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/python/gdata/maps/client.py b/python/gdata/maps/client.py
new file mode 100644
index 0000000..7c7d7e9
--- /dev/null
+++ b/python/gdata/maps/client.py
@@ -0,0 +1,179 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains a client to communicate with the Maps Data servers.
+
+For documentation on the Maps Data API, see:
+http://code.google.com/apis/maps/documentation/mapsdata/
+"""
+
+
+__author__ = 'api.roman.public@google.com (Roman Nurik)'
+
+
+import gdata.client
+import gdata.maps.data
+import atom.data
+import atom.http_core
+import gdata.gauth
+
+
+# List user's maps, takes a user ID, or 'default'.
+MAP_URL_TEMPLATE = 'http://maps.google.com/maps/feeds/maps/%s/full'
+
+# List map's features, takes a user ID (or 'default') and map ID.
+MAP_FEATURE_URL_TEMPLATE = ('http://maps.google.com/maps'
+ '/feeds/features/%s/%s/full')
+
+# The KML mime type
+KML_CONTENT_TYPE = 'application/vnd.google-earth.kml+xml'
+
+
+class MapsClient(gdata.client.GDClient):
+ """Maps Data API GData client."""
+
+ api_version = '2'
+ auth_service = 'local'
+ auth_scopes = gdata.gauth.AUTH_SCOPES['local']
+
+ def get_maps(self, user_id='default', auth_token=None,
+ desired_class=gdata.maps.data.MapFeed, **kwargs):
+ """Retrieves a Map feed for the given user ID.
+
+ Args:
+ user_id: An optional string representing the user ID; should be 'default'.
+
+ Returns:
+ A gdata.maps.data.MapFeed.
+ """
+ return self.get_feed(MAP_URL_TEMPLATE % user_id, auth_token=auth_token,
+ desired_class=desired_class, **kwargs)
+
+ GetMaps = get_maps
+
+ def get_features(self, map_id, user_id='default', auth_token=None,
+ desired_class=gdata.maps.data.FeatureFeed, query=None,
+ **kwargs):
+ """Retrieves a Feature feed for the given map ID/user ID combination.
+
+ Args:
+ map_id: A string representing the ID of the map whose features should be
+ retrieved.
+ user_id: An optional string representing the user ID; should be 'default'.
+
+ Returns:
+ A gdata.maps.data.FeatureFeed.
+ """
+ return self.get_feed(MAP_FEATURE_URL_TEMPLATE % (user_id, map_id),
+ auth_token=auth_token, desired_class=desired_class,
+ query=query, **kwargs)
+
+ GetFeatures = get_features
+
+ def create_map(self, title, summary=None, unlisted=False,
+ auth_token=None, title_type='text', summary_type='text',
+ **kwargs):
+ """Creates a new map and posts it to the Maps Data servers.
+
+ Args:
+ title: A string representing the title of the new map.
+ summary: An optional string representing the new map's description.
+ unlisted: An optional boolean identifying whether the map should be
+ unlisted (True) or public (False). Default False.
+
+ Returns:
+ A gdata.maps.data.Map.
+ """
+ new_entry = gdata.maps.data.Map(
+ title=atom.data.Title(text=title, type=title_type))
+ if summary:
+ new_entry.summary = atom.data.Summary(text=summary, type=summary_type)
+ if unlisted:
+ new_entry.control = atom.data.Control(draft=atom.data.Draft(text='yes'))
+ return self.post(new_entry, MAP_URL_TEMPLATE % 'default',
+ auth_token=auth_token, **kwargs)
+
+ CreateMap = create_map
+
+ def add_feature(self, map_id, title, content,
+ auth_token=None, title_type='text',
+ content_type=KML_CONTENT_TYPE, **kwargs):
+ """Adds a new feature to the given map.
+
+ Args:
+ map_id: A string representing the ID of the map to which the new feature
+ should be added.
+ title: A string representing the name/title of the new feature.
+ content: A KML string or gdata.maps.data.KmlContent object representing
+ the new feature's KML contents, including its description.
+
+ Returns:
+ A gdata.maps.data.Feature.
+ """
+ if content_type == KML_CONTENT_TYPE:
+ if type(content) != gdata.maps.data.KmlContent:
+ content = gdata.maps.data.KmlContent(kml=content)
+ else:
+ content = atom.data.Content(content=content, type=content_type)
+ new_entry = gdata.maps.data.Feature(
+ title=atom.data.Title(text=title, type=title_type),
+ content=content)
+ return self.post(new_entry, MAP_FEATURE_URL_TEMPLATE % ('default', map_id),
+ auth_token=auth_token, **kwargs)
+
+ AddFeature = add_feature
+
+ def update(self, entry, auth_token=None, **kwargs):
+ """Sends changes to a given map or feature entry to the Maps Data servers.
+
+ Args:
+ entry: A gdata.maps.data.Map or gdata.maps.data.Feature to be updated
+ server-side.
+ """
+ # The Maps Data API does not currently support ETags, so for now remove
+ # the ETag before performing an update.
+ old_etag = entry.etag
+ entry.etag = None
+ response = gdata.client.GDClient.update(self, entry,
+ auth_token=auth_token, **kwargs)
+ entry.etag = old_etag
+ return response
+
+ Update = update
+
+ def delete(self, entry_or_uri, auth_token=None, **kwargs):
+ """Deletes the given entry or entry URI server-side.
+
+ Args:
+ entry_or_uri: A gdata.maps.data.Map, gdata.maps.data.Feature, or URI
+ string representing the entry to delete.
+ """
+ if isinstance(entry_or_uri, (str, unicode, atom.http_core.Uri)):
+ return gdata.client.GDClient.delete(self, entry_or_uri,
+ auth_token=auth_token, **kwargs)
+ # The Maps Data API does not currently support ETags, so for now remove
+ # the ETag before performing a delete.
+ old_etag = entry_or_uri.etag
+ entry_or_uri.etag = None
+ response = gdata.client.GDClient.delete(self, entry_or_uri,
+ auth_token=auth_token, **kwargs)
+ # TODO: if GDClient.delete raises and exception, the entry's etag may be
+ # left as None. Should revisit this logic.
+ entry_or_uri.etag = old_etag
+ return response
+
+ Delete = delete
diff --git a/python/gdata/maps/data.py b/python/gdata/maps/data.py
new file mode 100644
index 0000000..544611e
--- /dev/null
+++ b/python/gdata/maps/data.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Data model classes for parsing and generating XML for the Maps Data API."""
+
+
+__author__ = 'api.roman.public@google.com (Roman Nurik)'
+
+
+import re
+import atom.core
+import gdata.data
+
+
+MAP_ATOM_ID_PATTERN = re.compile('/maps/feeds/maps/'
+ '(?P\w+)/'
+ '(?P\w+)$')
+
+FEATURE_ATOM_ID_PATTERN = re.compile('/maps/feeds/features/'
+ '(?P\w+)/'
+ '(?P\w+)/'
+ '(?P\w+)$')
+
+# The KML mime type
+KML_CONTENT_TYPE = 'application/vnd.google-earth.kml+xml'
+
+# The OGC KML 2.2 namespace
+KML_NAMESPACE = 'http://www.opengis.net/kml/2.2'
+
+class MapsDataEntry(gdata.data.GDEntry):
+ """Adds convenience methods inherited by all Maps Data entries."""
+
+ def get_user_id(self):
+ """Extracts the user ID of this entry."""
+ if self.id.text:
+ match = self.__class__.atom_id_pattern.search(self.id.text)
+ if match:
+ return match.group('user_id')
+ return None
+
+ GetUserId = get_user_id
+
+ def get_map_id(self):
+ """Extracts the map ID of this entry."""
+ if self.id.text:
+ match = self.__class__.atom_id_pattern.search(self.id.text)
+ if match:
+ return match.group('map_id')
+ return None
+
+ GetMapId = get_map_id
+
+
+class Map(MapsDataEntry):
+ """Represents a map which belongs to the user."""
+ atom_id_pattern = MAP_ATOM_ID_PATTERN
+
+
+class MapFeed(gdata.data.GDFeed):
+ """Represents an atom feed of maps."""
+ entry = [Map]
+
+
+class KmlContent(atom.data.Content):
+ """Represents an atom content element that encapsulates KML content."""
+
+ def __init__(self, **kwargs):
+ super(KmlContent, self).__init__(type=KML_CONTENT_TYPE, **kwargs)
+ if 'kml' in kwargs:
+ self.kml = kwargs['kml']
+
+ def _get_kml(self):
+ if self.children:
+ return self.children[0]
+ else:
+ return ''
+
+ def _set_kml(self, kml):
+ if not kml:
+ self.children = []
+ return
+
+ if type(kml) == str:
+ kml = atom.core.parse(kml)
+ if not kml.namespace:
+ kml.namespace = KML_NAMESPACE
+
+ self.children = [kml]
+
+ kml = property(_get_kml, _set_kml)
+
+
+class Feature(MapsDataEntry):
+ """Represents a single feature in a map."""
+ atom_id_pattern = FEATURE_ATOM_ID_PATTERN
+ content = KmlContent
+
+ def get_feature_id(self):
+ """Extracts the feature ID of this feature."""
+ if self.id.text:
+ match = self.__class__.atom_id_pattern.search(self.id.text)
+ if match:
+ return match.group('feature_id')
+ return None
+
+ GetFeatureId = get_feature_id
+
+
+class FeatureFeed(gdata.data.GDFeed):
+ """Represents an atom feed of features."""
+ entry = [Feature]
diff --git a/python/gdata/media/__init__.py b/python/gdata/media/__init__.py
new file mode 100644
index 0000000..e6af1ae
--- /dev/null
+++ b/python/gdata/media/__init__.py
@@ -0,0 +1,355 @@
+# -*-*- encoding: utf-8 -*-*-
+#
+# This is gdata.photos.media, implementing parts of the MediaRSS spec in gdata structures
+#
+# $Id: __init__.py 81 2007-10-03 14:41:42Z havard.gulldahl $
+#
+# Copyright 2007 HÃ¥vard Gulldahl
+# Portions copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Essential attributes of photos in Google Photos/Picasa Web Albums are
+expressed using elements from the `media' namespace, defined in the
+MediaRSS specification[1].
+
+Due to copyright issues, the elements herein are documented sparingly, please
+consult with the Google Photos API Reference Guide[2], alternatively the
+official MediaRSS specification[1] for details.
+(If there is a version conflict between the two sources, stick to the
+Google Photos API).
+
+[1]: http://search.yahoo.com/mrss (version 1.1.1)
+[2]: http://code.google.com/apis/picasaweb/reference.html#media_reference
+
+Keep in mind that Google Photos only uses a subset of the MediaRSS elements
+(and some of the attributes are trimmed down, too):
+
+media:content
+media:credit
+media:description
+media:group
+media:keywords
+media:thumbnail
+media:title
+"""
+
+__author__ = u'havard@gulldahl.no'# (HÃ¥vard Gulldahl)' #BUG: api chokes on non-ascii chars in __author__
+__license__ = 'Apache License v2'
+
+
+import atom
+import gdata
+
+MEDIA_NAMESPACE = 'http://search.yahoo.com/mrss/'
+YOUTUBE_NAMESPACE = 'http://gdata.youtube.com/schemas/2007'
+
+
+class MediaBaseElement(atom.AtomBase):
+ """Base class for elements in the MEDIA_NAMESPACE.
+ To add new elements, you only need to add the element tag name to self._tag
+ """
+
+ _tag = ''
+ _namespace = MEDIA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, name=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.name = name
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class Content(MediaBaseElement):
+ """(attribute container) This element describes the original content,
+ e.g. an image or a video. There may be multiple Content elements
+ in a media:Group.
+
+ For example, a video may have a
+ element that specifies a JPEG
+ representation of the video, and a
+ element that specifies the URL of the video itself.
+
+ Attributes:
+ url: non-ambigous reference to online object
+ width: width of the object frame, in pixels
+ height: width of the object frame, in pixels
+ medium: one of `image' or `video', allowing the api user to quickly
+ determine the object's type
+ type: Internet media Type[1] (a.k.a. mime type) of the object -- a more
+ verbose way of determining the media type. To set the type member
+ in the contructor, use the content_type parameter.
+ (optional) fileSize: the size of the object, in bytes
+
+ [1]: http://en.wikipedia.org/wiki/Internet_media_type
+ """
+
+ _tag = 'content'
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['url'] = 'url'
+ _attributes['width'] = 'width'
+ _attributes['height'] = 'height'
+ _attributes['medium'] = 'medium'
+ _attributes['type'] = 'type'
+ _attributes['fileSize'] = 'fileSize'
+
+ def __init__(self, url=None, width=None, height=None,
+ medium=None, content_type=None, fileSize=None, format=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ MediaBaseElement.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+ self.url = url
+ self.width = width
+ self.height = height
+ self.medium = medium
+ self.type = content_type
+ self.fileSize = fileSize
+
+
+def ContentFromString(xml_string):
+ return atom.CreateClassFromXMLString(Content, xml_string)
+
+
+class Credit(MediaBaseElement):
+ """(string) Contains the nickname of the user who created the content,
+ e.g. `Liz Bennet'.
+
+ This is a user-specified value that should be used when referring to
+ the user by name.
+
+ Note that none of the attributes from the MediaRSS spec are supported.
+ """
+
+ _tag = 'credit'
+
+
+def CreditFromString(xml_string):
+ return atom.CreateClassFromXMLString(Credit, xml_string)
+
+
+class Description(MediaBaseElement):
+ """(string) A description of the media object.
+ Either plain unicode text, or entity-encoded html (look at the `type'
+ attribute).
+
+ E.g `A set of photographs I took while vacationing in Italy.'
+
+ For `api' projections, the description is in plain text;
+ for `base' projections, the description is in HTML.
+
+ Attributes:
+ type: either `text' or `html'. To set the type member in the contructor,
+ use the description_type parameter.
+ """
+
+ _tag = 'description'
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['type'] = 'type'
+ def __init__(self, description_type=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ MediaBaseElement.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+ self.type = description_type
+
+
+def DescriptionFromString(xml_string):
+ return atom.CreateClassFromXMLString(Description, xml_string)
+
+
+class Keywords(MediaBaseElement):
+ """(string) Lists the tags associated with the entry,
+ e.g `italy, vacation, sunset'.
+
+ Contains a comma-separated list of tags that have been added to the photo, or
+ all tags that have been added to photos in the album.
+ """
+
+ _tag = 'keywords'
+
+
+def KeywordsFromString(xml_string):
+ return atom.CreateClassFromXMLString(Keywords, xml_string)
+
+
+class Thumbnail(MediaBaseElement):
+ """(attributes) Contains the URL of a thumbnail of a photo or album cover.
+
+ There can be multiple elements for a given ;
+ for example, a given item may have multiple thumbnails at different sizes.
+ Photos generally have two thumbnails at different sizes;
+ albums generally have one cropped thumbnail.
+
+ If the thumbsize parameter is set to the initial query, this element points
+ to thumbnails of the requested sizes; otherwise the thumbnails are the
+ default thumbnail size.
+
+ This element must not be confused with the element.
+
+ Attributes:
+ url: The URL of the thumbnail image.
+ height: The height of the thumbnail image, in pixels.
+ width: The width of the thumbnail image, in pixels.
+ """
+
+ _tag = 'thumbnail'
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['url'] = 'url'
+ _attributes['width'] = 'width'
+ _attributes['height'] = 'height'
+ def __init__(self, url=None, width=None, height=None,
+ extension_attributes=None, text=None, extension_elements=None):
+ MediaBaseElement.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+ self.url = url
+ self.width = width
+ self.height = height
+
+
+def ThumbnailFromString(xml_string):
+ return atom.CreateClassFromXMLString(Thumbnail, xml_string)
+
+
+class Title(MediaBaseElement):
+ """(string) Contains the title of the entry's media content, in plain text.
+
+ Attributes:
+ type: Always set to plain. To set the type member in the constructor, use
+ the title_type parameter.
+ """
+
+ _tag = 'title'
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['type'] = 'type'
+ def __init__(self, title_type=None,
+ extension_attributes=None, text=None, extension_elements=None):
+ MediaBaseElement.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+ self.type = title_type
+
+
+def TitleFromString(xml_string):
+ return atom.CreateClassFromXMLString(Title, xml_string)
+
+
+class Player(MediaBaseElement):
+ """(string) Contains the embeddable player URL for the entry's media content
+ if the media is a video.
+
+ Attributes:
+ url: Always set to plain
+ """
+
+ _tag = 'player'
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['url'] = 'url'
+
+ def __init__(self, player_url=None,
+ extension_attributes=None, extension_elements=None):
+ MediaBaseElement.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.url= player_url
+
+
+class Private(atom.AtomBase):
+ """The YouTube Private element"""
+ _tag = 'private'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Duration(atom.AtomBase):
+ """The YouTube Duration element"""
+ _tag = 'duration'
+ _namespace = YOUTUBE_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['seconds'] = 'seconds'
+
+
+class Category(MediaBaseElement):
+ """The mediagroup:category element"""
+
+ _tag = 'category'
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['term'] = 'term'
+ _attributes['scheme'] = 'scheme'
+ _attributes['label'] = 'label'
+
+ def __init__(self, term=None, scheme=None, label=None, text=None,
+ extension_elements=None, extension_attributes=None):
+ """Constructor for Category
+
+ Args:
+ term: str
+ scheme: str
+ label: str
+ text: str The text data in the this element
+ extension_elements: list A list of ExtensionElement instances
+ extension_attributes: dict A dictionary of attribute value string pairs
+ """
+
+ self.term = term
+ self.scheme = scheme
+ self.label = label
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+class Group(MediaBaseElement):
+ """Container element for all media elements.
+ The element can appear as a child of an album, photo or
+ video entry."""
+
+ _tag = 'group'
+ _children = atom.AtomBase._children.copy()
+ _children['{%s}content' % MEDIA_NAMESPACE] = ('content', [Content,])
+ _children['{%s}credit' % MEDIA_NAMESPACE] = ('credit', Credit)
+ _children['{%s}description' % MEDIA_NAMESPACE] = ('description', Description)
+ _children['{%s}keywords' % MEDIA_NAMESPACE] = ('keywords', Keywords)
+ _children['{%s}thumbnail' % MEDIA_NAMESPACE] = ('thumbnail', [Thumbnail,])
+ _children['{%s}title' % MEDIA_NAMESPACE] = ('title', Title)
+ _children['{%s}category' % MEDIA_NAMESPACE] = ('category', [Category,])
+ _children['{%s}duration' % YOUTUBE_NAMESPACE] = ('duration', Duration)
+ _children['{%s}private' % YOUTUBE_NAMESPACE] = ('private', Private)
+ _children['{%s}player' % MEDIA_NAMESPACE] = ('player', Player)
+
+ def __init__(self, content=None, credit=None, description=None, keywords=None,
+ thumbnail=None, title=None, duration=None, private=None,
+ category=None, player=None, extension_elements=None,
+ extension_attributes=None, text=None):
+
+ MediaBaseElement.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+ self.content=content
+ self.credit=credit
+ self.description=description
+ self.keywords=keywords
+ self.thumbnail=thumbnail or []
+ self.title=title
+ self.duration=duration
+ self.private=private
+ self.category=category or []
+ self.player=player
+
+
+def GroupFromString(xml_string):
+ return atom.CreateClassFromXMLString(Group, xml_string)
diff --git a/python/gdata/media/data.py b/python/gdata/media/data.py
new file mode 100644
index 0000000..bb5d2c8
--- /dev/null
+++ b/python/gdata/media/data.py
@@ -0,0 +1,159 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains the data classes of the Yahoo! Media RSS Extension"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+
+
+MEDIA_TEMPLATE = '{http://search.yahoo.com/mrss//}%s'
+
+
+class MediaCategory(atom.core.XmlElement):
+ """Describes a media category."""
+ _qname = MEDIA_TEMPLATE % 'category'
+ scheme = 'scheme'
+ label = 'label'
+
+
+class MediaCopyright(atom.core.XmlElement):
+ """Describes a media copyright."""
+ _qname = MEDIA_TEMPLATE % 'copyright'
+ url = 'url'
+
+
+class MediaCredit(atom.core.XmlElement):
+ """Describes a media credit."""
+ _qname = MEDIA_TEMPLATE % 'credit'
+ role = 'role'
+ scheme = 'scheme'
+
+
+class MediaDescription(atom.core.XmlElement):
+ """Describes a media description."""
+ _qname = MEDIA_TEMPLATE % 'description'
+ type = 'type'
+
+
+class MediaHash(atom.core.XmlElement):
+ """Describes a media hash."""
+ _qname = MEDIA_TEMPLATE % 'hash'
+ algo = 'algo'
+
+
+class MediaKeywords(atom.core.XmlElement):
+ """Describes a media keywords."""
+ _qname = MEDIA_TEMPLATE % 'keywords'
+
+
+class MediaPlayer(atom.core.XmlElement):
+ """Describes a media player."""
+ _qname = MEDIA_TEMPLATE % 'player'
+ height = 'height'
+ width = 'width'
+ url = 'url'
+
+
+class MediaRating(atom.core.XmlElement):
+ """Describes a media rating."""
+ _qname = MEDIA_TEMPLATE % 'rating'
+ scheme = 'scheme'
+
+
+class MediaRestriction(atom.core.XmlElement):
+ """Describes a media restriction."""
+ _qname = MEDIA_TEMPLATE % 'restriction'
+ relationship = 'relationship'
+ type = 'type'
+
+
+class MediaText(atom.core.XmlElement):
+ """Describes a media text."""
+ _qname = MEDIA_TEMPLATE % 'text'
+ end = 'end'
+ lang = 'lang'
+ type = 'type'
+ start = 'start'
+
+
+class MediaThumbnail(atom.core.XmlElement):
+ """Describes a media thumbnail."""
+ _qname = MEDIA_TEMPLATE % 'thumbnail'
+ time = 'time'
+ url = 'url'
+ width = 'width'
+ height = 'height'
+
+
+class MediaTitle(atom.core.XmlElement):
+ """Describes a media title."""
+ _qname = MEDIA_TEMPLATE % 'title'
+ type = 'type'
+
+
+class MediaContent(atom.core.XmlElement):
+ """Describes a media content."""
+ _qname = MEDIA_TEMPLATE % 'content'
+ bitrate = 'bitrate'
+ is_default = 'isDefault'
+ medium = 'medium'
+ height = 'height'
+ credit = [MediaCredit]
+ language = 'language'
+ hash = MediaHash
+ width = 'width'
+ player = MediaPlayer
+ url = 'url'
+ file_size = 'fileSize'
+ channels = 'channels'
+ expression = 'expression'
+ text = [MediaText]
+ samplingrate = 'samplingrate'
+ title = MediaTitle
+ category = [MediaCategory]
+ rating = [MediaRating]
+ type = 'type'
+ description = MediaDescription
+ framerate = 'framerate'
+ thumbnail = [MediaThumbnail]
+ duration = 'duration'
+ copyright = MediaCopyright
+ keywords = MediaKeywords
+ restriction = [MediaRestriction]
+
+
+class MediaGroup(atom.core.XmlElement):
+ """Describes a media group."""
+ _qname = MEDIA_TEMPLATE % 'group'
+ credit = [MediaCredit]
+ content = [MediaContent]
+ copyright = MediaCopyright
+ description = MediaDescription
+ category = [MediaCategory]
+ player = MediaPlayer
+ rating = [MediaRating]
+ hash = MediaHash
+ title = MediaTitle
+ keywords = MediaKeywords
+ restriction = [MediaRestriction]
+ thumbnail = [MediaThumbnail]
+ text = [MediaText]
+
+
diff --git a/python/gdata/notebook/__init__.py b/python/gdata/notebook/__init__.py
new file mode 100644
index 0000000..22071f7
--- /dev/null
+++ b/python/gdata/notebook/__init__.py
@@ -0,0 +1,15 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/python/gdata/notebook/data.py b/python/gdata/notebook/data.py
new file mode 100644
index 0000000..53405e0
--- /dev/null
+++ b/python/gdata/notebook/data.py
@@ -0,0 +1,55 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains the data classes of the Google Notebook Data API"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+import atom.data
+import gdata.data
+import gdata.opensearch.data
+
+
+NB_TEMPLATE = '{http://schemas.google.com/notes/2008/}%s'
+
+
+class ComesAfter(atom.core.XmlElement):
+ """Preceding element."""
+ _qname = NB_TEMPLATE % 'comesAfter'
+ id = 'id'
+
+
+class NoteEntry(gdata.data.GDEntry):
+ """Describes a note entry in the feed of a user's notebook."""
+
+
+class NotebookFeed(gdata.data.GDFeed):
+ """Describes a notebook feed."""
+ entry = [NoteEntry]
+
+
+class NotebookListEntry(gdata.data.GDEntry):
+ """Describes a note list entry in the feed of a user's list of public notebooks."""
+
+
+class NotebookListFeed(gdata.data.GDFeed):
+ """Describes a notebook list feed."""
+ entry = [NotebookListEntry]
+
+
diff --git a/python/gdata/oauth/__init__.py b/python/gdata/oauth/__init__.py
new file mode 100644
index 0000000..44d9c7a
--- /dev/null
+++ b/python/gdata/oauth/__init__.py
@@ -0,0 +1,529 @@
+import cgi
+import urllib
+import time
+import random
+import urlparse
+import hmac
+import binascii
+
+VERSION = '1.0' # Hi Blaine!
+HTTP_METHOD = 'GET'
+SIGNATURE_METHOD = 'PLAINTEXT'
+
+# Generic exception class
+class OAuthError(RuntimeError):
+ def __init__(self, message='OAuth error occured.'):
+ self.message = message
+
+# optional WWW-Authenticate header (401 error)
+def build_authenticate_header(realm=''):
+ return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
+
+# url escape
+def escape(s):
+ # escape '/' too
+ return urllib.quote(s, safe='~')
+
+# util function: current timestamp
+# seconds since epoch (UTC)
+def generate_timestamp():
+ return int(time.time())
+
+# util function: nonce
+# pseudorandom number
+def generate_nonce(length=8):
+ return ''.join([str(random.randint(0, 9)) for i in range(length)])
+
+# OAuthConsumer is a data type that represents the identity of the Consumer
+# via its shared secret with the Service Provider.
+class OAuthConsumer(object):
+ key = None
+ secret = None
+
+ def __init__(self, key, secret):
+ self.key = key
+ self.secret = secret
+
+# OAuthToken is a data type that represents an End User via either an access
+# or request token.
+class OAuthToken(object):
+ # access tokens and request tokens
+ key = None
+ secret = None
+
+ '''
+ key = the token
+ secret = the token secret
+ '''
+ def __init__(self, key, secret):
+ self.key = key
+ self.secret = secret
+
+ def to_string(self):
+ return urllib.urlencode({'oauth_token': self.key, 'oauth_token_secret': self.secret})
+
+ # return a token from something like:
+ # oauth_token_secret=digg&oauth_token=digg
+ def from_string(s):
+ params = cgi.parse_qs(s, keep_blank_values=False)
+ key = params['oauth_token'][0]
+ secret = params['oauth_token_secret'][0]
+ return OAuthToken(key, secret)
+ from_string = staticmethod(from_string)
+
+ def __str__(self):
+ return self.to_string()
+
+# OAuthRequest represents the request and can be serialized
+class OAuthRequest(object):
+ '''
+ OAuth parameters:
+ - oauth_consumer_key
+ - oauth_token
+ - oauth_signature_method
+ - oauth_signature
+ - oauth_timestamp
+ - oauth_nonce
+ - oauth_version
+ ... any additional parameters, as defined by the Service Provider.
+ '''
+ parameters = None # oauth parameters
+ http_method = HTTP_METHOD
+ http_url = None
+ version = VERSION
+
+ def __init__(self, http_method=HTTP_METHOD, http_url=None, parameters=None):
+ self.http_method = http_method
+ self.http_url = http_url
+ self.parameters = parameters or {}
+
+ def set_parameter(self, parameter, value):
+ self.parameters[parameter] = value
+
+ def get_parameter(self, parameter):
+ try:
+ return self.parameters[parameter]
+ except:
+ raise OAuthError('Parameter not found: %s' % parameter)
+
+ def _get_timestamp_nonce(self):
+ return self.get_parameter('oauth_timestamp'), self.get_parameter('oauth_nonce')
+
+ # get any non-oauth parameters
+ def get_nonoauth_parameters(self):
+ parameters = {}
+ for k, v in self.parameters.iteritems():
+ # ignore oauth parameters
+ if k.find('oauth_') < 0:
+ parameters[k] = v
+ return parameters
+
+ # serialize as a header for an HTTPAuth request
+ def to_header(self, realm=''):
+ auth_header = 'OAuth realm="%s"' % realm
+ # add the oauth parameters
+ if self.parameters:
+ for k, v in self.parameters.iteritems():
+ if k[:6] == 'oauth_':
+ auth_header += ', %s="%s"' % (k, escape(str(v)))
+ return {'Authorization': auth_header}
+
+ # serialize as post data for a POST request
+ def to_postdata(self):
+ return '&'.join(['%s=%s' % (escape(str(k)), escape(str(v))) for k, v in self.parameters.iteritems()])
+
+ # serialize as a url for a GET request
+ def to_url(self):
+ return '%s?%s' % (self.get_normalized_http_url(), self.to_postdata())
+
+ # return a string that consists of all the parameters that need to be signed
+ def get_normalized_parameters(self):
+ params = self.parameters
+ try:
+ # exclude the signature if it exists
+ del params['oauth_signature']
+ except:
+ pass
+ key_values = params.items()
+ # sort lexicographically, first after key, then after value
+ key_values.sort()
+ # combine key value pairs in string and escape
+ return '&'.join(['%s=%s' % (escape(str(k)), escape(str(v))) for k, v in key_values])
+
+ # just uppercases the http method
+ def get_normalized_http_method(self):
+ return self.http_method.upper()
+
+ # parses the url and rebuilds it to be scheme://host/path
+ def get_normalized_http_url(self):
+ parts = urlparse.urlparse(self.http_url)
+ host = parts[1].lower()
+ if host.endswith(':80') or host.endswith(':443'):
+ host = host.split(':')[0]
+ url_string = '%s://%s%s' % (parts[0], host, parts[2]) # scheme, netloc, path
+ return url_string
+
+ # set the signature parameter to the result of build_signature
+ def sign_request(self, signature_method, consumer, token):
+ # set the signature method
+ self.set_parameter('oauth_signature_method', signature_method.get_name())
+ # set the signature
+ self.set_parameter('oauth_signature', self.build_signature(signature_method, consumer, token))
+
+ def build_signature(self, signature_method, consumer, token):
+ # call the build signature method within the signature method
+ return signature_method.build_signature(self, consumer, token)
+
+ def from_request(http_method, http_url, headers=None, parameters=None, query_string=None):
+ # combine multiple parameter sources
+ if parameters is None:
+ parameters = {}
+
+ # headers
+ if headers and 'Authorization' in headers:
+ auth_header = headers['Authorization']
+ # check that the authorization header is OAuth
+ if auth_header.index('OAuth') > -1:
+ try:
+ # get the parameters from the header
+ header_params = OAuthRequest._split_header(auth_header)
+ parameters.update(header_params)
+ except:
+ raise OAuthError('Unable to parse OAuth parameters from Authorization header.')
+
+ # GET or POST query string
+ if query_string:
+ query_params = OAuthRequest._split_url_string(query_string)
+ parameters.update(query_params)
+
+ # URL parameters
+ param_str = urlparse.urlparse(http_url)[4] # query
+ url_params = OAuthRequest._split_url_string(param_str)
+ parameters.update(url_params)
+
+ if parameters:
+ return OAuthRequest(http_method, http_url, parameters)
+
+ return None
+ from_request = staticmethod(from_request)
+
+ def from_consumer_and_token(oauth_consumer, token=None, http_method=HTTP_METHOD, http_url=None, parameters=None):
+ if not parameters:
+ parameters = {}
+
+ defaults = {
+ 'oauth_consumer_key': oauth_consumer.key,
+ 'oauth_timestamp': generate_timestamp(),
+ 'oauth_nonce': generate_nonce(),
+ 'oauth_version': OAuthRequest.version,
+ }
+
+ defaults.update(parameters)
+ parameters = defaults
+
+ if token:
+ parameters['oauth_token'] = token.key
+
+ return OAuthRequest(http_method, http_url, parameters)
+ from_consumer_and_token = staticmethod(from_consumer_and_token)
+
+ def from_token_and_callback(token, callback=None, http_method=HTTP_METHOD, http_url=None, parameters=None):
+ if not parameters:
+ parameters = {}
+
+ parameters['oauth_token'] = token.key
+
+ if callback:
+ parameters['oauth_callback'] = callback
+
+ return OAuthRequest(http_method, http_url, parameters)
+ from_token_and_callback = staticmethod(from_token_and_callback)
+
+ # util function: turn Authorization: header into parameters, has to do some unescaping
+ def _split_header(header):
+ params = {}
+ parts = header[6:].split(',')
+ for param in parts:
+ # ignore realm parameter
+ if param.find('realm') > -1:
+ continue
+ # remove whitespace
+ param = param.strip()
+ # split key-value
+ param_parts = param.split('=', 1)
+ # remove quotes and unescape the value
+ params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
+ return params
+ _split_header = staticmethod(_split_header)
+
+ # util function: turn url string into parameters, has to do some unescaping
+ # even empty values should be included
+ def _split_url_string(param_str):
+ parameters = cgi.parse_qs(param_str, keep_blank_values=True)
+ for k, v in parameters.iteritems():
+ parameters[k] = urllib.unquote(v[0])
+ return parameters
+ _split_url_string = staticmethod(_split_url_string)
+
+# OAuthServer is a worker to check a requests validity against a data store
+class OAuthServer(object):
+ timestamp_threshold = 300 # in seconds, five minutes
+ version = VERSION
+ signature_methods = None
+ data_store = None
+
+ def __init__(self, data_store=None, signature_methods=None):
+ self.data_store = data_store
+ self.signature_methods = signature_methods or {}
+
+ def set_data_store(self, oauth_data_store):
+ self.data_store = oauth_data_store
+
+ def get_data_store(self):
+ return self.data_store
+
+ def add_signature_method(self, signature_method):
+ self.signature_methods[signature_method.get_name()] = signature_method
+ return self.signature_methods
+
+ # process a request_token request
+ # returns the request token on success
+ def fetch_request_token(self, oauth_request):
+ try:
+ # get the request token for authorization
+ token = self._get_token(oauth_request, 'request')
+ except OAuthError:
+ # no token required for the initial token request
+ version = self._get_version(oauth_request)
+ consumer = self._get_consumer(oauth_request)
+ self._check_signature(oauth_request, consumer, None)
+ # fetch a new token
+ token = self.data_store.fetch_request_token(consumer)
+ return token
+
+ # process an access_token request
+ # returns the access token on success
+ def fetch_access_token(self, oauth_request):
+ version = self._get_version(oauth_request)
+ consumer = self._get_consumer(oauth_request)
+ # get the request token
+ token = self._get_token(oauth_request, 'request')
+ self._check_signature(oauth_request, consumer, token)
+ new_token = self.data_store.fetch_access_token(consumer, token)
+ return new_token
+
+ # verify an api call, checks all the parameters
+ def verify_request(self, oauth_request):
+ # -> consumer and token
+ version = self._get_version(oauth_request)
+ consumer = self._get_consumer(oauth_request)
+ # get the access token
+ token = self._get_token(oauth_request, 'access')
+ self._check_signature(oauth_request, consumer, token)
+ parameters = oauth_request.get_nonoauth_parameters()
+ return consumer, token, parameters
+
+ # authorize a request token
+ def authorize_token(self, token, user):
+ return self.data_store.authorize_request_token(token, user)
+
+ # get the callback url
+ def get_callback(self, oauth_request):
+ return oauth_request.get_parameter('oauth_callback')
+
+ # optional support for the authenticate header
+ def build_authenticate_header(self, realm=''):
+ return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
+
+ # verify the correct version request for this server
+ def _get_version(self, oauth_request):
+ try:
+ version = oauth_request.get_parameter('oauth_version')
+ except:
+ version = VERSION
+ if version and version != self.version:
+ raise OAuthError('OAuth version %s not supported.' % str(version))
+ return version
+
+ # figure out the signature with some defaults
+ def _get_signature_method(self, oauth_request):
+ try:
+ signature_method = oauth_request.get_parameter('oauth_signature_method')
+ except:
+ signature_method = SIGNATURE_METHOD
+ try:
+ # get the signature method object
+ signature_method = self.signature_methods[signature_method]
+ except:
+ signature_method_names = ', '.join(self.signature_methods.keys())
+ raise OAuthError('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
+
+ return signature_method
+
+ def _get_consumer(self, oauth_request):
+ consumer_key = oauth_request.get_parameter('oauth_consumer_key')
+ if not consumer_key:
+ raise OAuthError('Invalid consumer key.')
+ consumer = self.data_store.lookup_consumer(consumer_key)
+ if not consumer:
+ raise OAuthError('Invalid consumer.')
+ return consumer
+
+ # try to find the token for the provided request token key
+ def _get_token(self, oauth_request, token_type='access'):
+ token_field = oauth_request.get_parameter('oauth_token')
+ consumer = self._get_consumer(oauth_request)
+ token = self.data_store.lookup_token(consumer, token_type, token_field)
+ if not token:
+ raise OAuthError('Invalid %s token: %s' % (token_type, token_field))
+ return token
+
+ def _check_signature(self, oauth_request, consumer, token):
+ timestamp, nonce = oauth_request._get_timestamp_nonce()
+ self._check_timestamp(timestamp)
+ self._check_nonce(consumer, token, nonce)
+ signature_method = self._get_signature_method(oauth_request)
+ try:
+ signature = oauth_request.get_parameter('oauth_signature')
+ except:
+ raise OAuthError('Missing signature.')
+ # validate the signature
+ valid_sig = signature_method.check_signature(oauth_request, consumer, token, signature)
+ if not valid_sig:
+ key, base = signature_method.build_signature_base_string(oauth_request, consumer, token)
+ raise OAuthError('Invalid signature. Expected signature base string: %s' % base)
+ built = signature_method.build_signature(oauth_request, consumer, token)
+
+ def _check_timestamp(self, timestamp):
+ # verify that timestamp is recentish
+ timestamp = int(timestamp)
+ now = int(time.time())
+ lapsed = now - timestamp
+ if lapsed > self.timestamp_threshold:
+ raise OAuthError('Expired timestamp: given %d and now %s has a greater difference than threshold %d' % (timestamp, now, self.timestamp_threshold))
+
+ def _check_nonce(self, consumer, token, nonce):
+ # verify that the nonce is uniqueish
+ nonce = self.data_store.lookup_nonce(consumer, token, nonce)
+ if nonce:
+ raise OAuthError('Nonce already used: %s' % str(nonce))
+
+# OAuthClient is a worker to attempt to execute a request
+class OAuthClient(object):
+ consumer = None
+ token = None
+
+ def __init__(self, oauth_consumer, oauth_token):
+ self.consumer = oauth_consumer
+ self.token = oauth_token
+
+ def get_consumer(self):
+ return self.consumer
+
+ def get_token(self):
+ return self.token
+
+ def fetch_request_token(self, oauth_request):
+ # -> OAuthToken
+ raise NotImplementedError
+
+ def fetch_access_token(self, oauth_request):
+ # -> OAuthToken
+ raise NotImplementedError
+
+ def access_resource(self, oauth_request):
+ # -> some protected resource
+ raise NotImplementedError
+
+# OAuthDataStore is a database abstraction used to lookup consumers and tokens
+class OAuthDataStore(object):
+
+ def lookup_consumer(self, key):
+ # -> OAuthConsumer
+ raise NotImplementedError
+
+ def lookup_token(self, oauth_consumer, token_type, token_token):
+ # -> OAuthToken
+ raise NotImplementedError
+
+ def lookup_nonce(self, oauth_consumer, oauth_token, nonce, timestamp):
+ # -> OAuthToken
+ raise NotImplementedError
+
+ def fetch_request_token(self, oauth_consumer):
+ # -> OAuthToken
+ raise NotImplementedError
+
+ def fetch_access_token(self, oauth_consumer, oauth_token):
+ # -> OAuthToken
+ raise NotImplementedError
+
+ def authorize_request_token(self, oauth_token, user):
+ # -> OAuthToken
+ raise NotImplementedError
+
+# OAuthSignatureMethod is a strategy class that implements a signature method
+class OAuthSignatureMethod(object):
+ def get_name(self):
+ # -> str
+ raise NotImplementedError
+
+ def build_signature_base_string(self, oauth_request, oauth_consumer, oauth_token):
+ # -> str key, str raw
+ raise NotImplementedError
+
+ def build_signature(self, oauth_request, oauth_consumer, oauth_token):
+ # -> str
+ raise NotImplementedError
+
+ def check_signature(self, oauth_request, consumer, token, signature):
+ built = self.build_signature(oauth_request, consumer, token)
+ return built == signature
+
+class OAuthSignatureMethod_HMAC_SHA1(OAuthSignatureMethod):
+
+ def get_name(self):
+ return 'HMAC-SHA1'
+
+ def build_signature_base_string(self, oauth_request, consumer, token):
+ sig = (
+ escape(oauth_request.get_normalized_http_method()),
+ escape(oauth_request.get_normalized_http_url()),
+ escape(oauth_request.get_normalized_parameters()),
+ )
+
+ key = '%s&' % escape(consumer.secret)
+ if token:
+ key += escape(token.secret)
+ raw = '&'.join(sig)
+ return key, raw
+
+ def build_signature(self, oauth_request, consumer, token):
+ # build the base signature string
+ key, raw = self.build_signature_base_string(oauth_request, consumer, token)
+
+ # hmac object
+ try:
+ import hashlib # 2.5
+ hashed = hmac.new(key, raw, hashlib.sha1)
+ except:
+ import sha # deprecated
+ hashed = hmac.new(key, raw, sha)
+
+ # calculate the digest base 64
+ return binascii.b2a_base64(hashed.digest())[:-1]
+
+class OAuthSignatureMethod_PLAINTEXT(OAuthSignatureMethod):
+
+ def get_name(self):
+ return 'PLAINTEXT'
+
+ def build_signature_base_string(self, oauth_request, consumer, token):
+ # concatenate the consumer key and secret
+ sig = escape(consumer.secret) + '&'
+ if token:
+ sig = sig + escape(token.secret)
+ return sig
+
+ def build_signature(self, oauth_request, consumer, token):
+ return self.build_signature_base_string(oauth_request, consumer, token)
diff --git a/python/gdata/oauth/rsa.py b/python/gdata/oauth/rsa.py
new file mode 100644
index 0000000..f8d9b85
--- /dev/null
+++ b/python/gdata/oauth/rsa.py
@@ -0,0 +1,120 @@
+#!/usr/bin/python
+
+"""
+requires tlslite - http://trevp.net/tlslite/
+
+"""
+
+import binascii
+
+from gdata.tlslite.utils import keyfactory
+from gdata.tlslite.utils import cryptomath
+
+# XXX andy: ugly local import due to module name, oauth.oauth
+import gdata.oauth as oauth
+
+class OAuthSignatureMethod_RSA_SHA1(oauth.OAuthSignatureMethod):
+ def get_name(self):
+ return "RSA-SHA1"
+
+ def _fetch_public_cert(self, oauth_request):
+ # not implemented yet, ideas are:
+ # (1) do a lookup in a table of trusted certs keyed off of consumer
+ # (2) fetch via http using a url provided by the requester
+ # (3) some sort of specific discovery code based on request
+ #
+ # either way should return a string representation of the certificate
+ raise NotImplementedError
+
+ def _fetch_private_cert(self, oauth_request):
+ # not implemented yet, ideas are:
+ # (1) do a lookup in a table of trusted certs keyed off of consumer
+ #
+ # either way should return a string representation of the certificate
+ raise NotImplementedError
+
+ def build_signature_base_string(self, oauth_request, consumer, token):
+ sig = (
+ oauth.escape(oauth_request.get_normalized_http_method()),
+ oauth.escape(oauth_request.get_normalized_http_url()),
+ oauth.escape(oauth_request.get_normalized_parameters()),
+ )
+ key = ''
+ raw = '&'.join(sig)
+ return key, raw
+
+ def build_signature(self, oauth_request, consumer, token):
+ key, base_string = self.build_signature_base_string(oauth_request,
+ consumer,
+ token)
+
+ # Fetch the private key cert based on the request
+ cert = self._fetch_private_cert(oauth_request)
+
+ # Pull the private key from the certificate
+ privatekey = keyfactory.parsePrivateKey(cert)
+
+ # Convert base_string to bytes
+ #base_string_bytes = cryptomath.createByteArraySequence(base_string)
+
+ # Sign using the key
+ signed = privatekey.hashAndSign(base_string)
+
+ return binascii.b2a_base64(signed)[:-1]
+
+ def check_signature(self, oauth_request, consumer, token, signature):
+ decoded_sig = base64.b64decode(signature);
+
+ key, base_string = self.build_signature_base_string(oauth_request,
+ consumer,
+ token)
+
+ # Fetch the public key cert based on the request
+ cert = self._fetch_public_cert(oauth_request)
+
+ # Pull the public key from the certificate
+ publickey = keyfactory.parsePEMKey(cert, public=True)
+
+ # Check the signature
+ ok = publickey.hashAndVerify(decoded_sig, base_string)
+
+ return ok
+
+
+class TestOAuthSignatureMethod_RSA_SHA1(OAuthSignatureMethod_RSA_SHA1):
+ def _fetch_public_cert(self, oauth_request):
+ cert = """
+-----BEGIN CERTIFICATE-----
+MIIBpjCCAQ+gAwIBAgIBATANBgkqhkiG9w0BAQUFADAZMRcwFQYDVQQDDA5UZXN0
+IFByaW5jaXBhbDAeFw03MDAxMDEwODAwMDBaFw0zODEyMzEwODAwMDBaMBkxFzAV
+BgNVBAMMDlRlc3QgUHJpbmNpcGFsMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB
+gQC0YjCwIfYoprq/FQO6lb3asXrxLlJFuCvtinTF5p0GxvQGu5O3gYytUvtC2JlY
+zypSRjVxwxrsuRcP3e641SdASwfrmzyvIgP08N4S0IFzEURkV1wp/IpH7kH41Etb
+mUmrXSwfNZsnQRE5SYSOhh+LcK2wyQkdgcMv11l4KoBkcwIDAQABMA0GCSqGSIb3
+DQEBBQUAA4GBAGZLPEuJ5SiJ2ryq+CmEGOXfvlTtEL2nuGtr9PewxkgnOjZpUy+d
+4TvuXJbNQc8f4AMWL/tO9w0Fk80rWKp9ea8/df4qMq5qlFWlx6yOLQxumNOmECKb
+WpkUQDIDJEoFUzKMVuJf4KO/FJ345+BNLGgbJ6WujreoM1X/gYfdnJ/J
+-----END CERTIFICATE-----
+"""
+ return cert
+
+ def _fetch_private_cert(self, oauth_request):
+ cert = """
+-----BEGIN PRIVATE KEY-----
+MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBALRiMLAh9iimur8V
+A7qVvdqxevEuUkW4K+2KdMXmnQbG9Aa7k7eBjK1S+0LYmVjPKlJGNXHDGuy5Fw/d
+7rjVJ0BLB+ubPK8iA/Tw3hLQgXMRRGRXXCn8ikfuQfjUS1uZSatdLB81mydBETlJ
+hI6GH4twrbDJCR2Bwy/XWXgqgGRzAgMBAAECgYBYWVtleUzavkbrPjy0T5FMou8H
+X9u2AC2ry8vD/l7cqedtwMPp9k7TubgNFo+NGvKsl2ynyprOZR1xjQ7WgrgVB+mm
+uScOM/5HVceFuGRDhYTCObE+y1kxRloNYXnx3ei1zbeYLPCHdhxRYW7T0qcynNmw
+rn05/KO2RLjgQNalsQJBANeA3Q4Nugqy4QBUCEC09SqylT2K9FrrItqL2QKc9v0Z
+zO2uwllCbg0dwpVuYPYXYvikNHHg+aCWF+VXsb9rpPsCQQDWR9TT4ORdzoj+Nccn
+qkMsDmzt0EfNaAOwHOmVJ2RVBspPcxt5iN4HI7HNeG6U5YsFBb+/GZbgfBT3kpNG
+WPTpAkBI+gFhjfJvRw38n3g/+UeAkwMI2TJQS4n8+hid0uus3/zOjDySH3XHCUno
+cn1xOJAyZODBo47E+67R4jV1/gzbAkEAklJaspRPXP877NssM5nAZMU0/O/NGCZ+
+3jPgDUno6WbJn5cqm8MqWhW1xGkImgRk+fkDBquiq4gPiT898jusgQJAd5Zrr6Q8
+AO/0isr/3aa6O6NLQxISLKcPDk2NOccAfS/xOtfOz4sJYM3+Bs4Io9+dZGSDCA54
+Lw03eHTNQghS0A==
+-----END PRIVATE KEY-----
+"""
+ return cert
diff --git a/python/gdata/opensearch/__init__.py b/python/gdata/opensearch/__init__.py
new file mode 100644
index 0000000..22071f7
--- /dev/null
+++ b/python/gdata/opensearch/__init__.py
@@ -0,0 +1,15 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/python/gdata/opensearch/data.py b/python/gdata/opensearch/data.py
new file mode 100644
index 0000000..89d7a28
--- /dev/null
+++ b/python/gdata/opensearch/data.py
@@ -0,0 +1,48 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains the data classes of the OpenSearch Extension"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+
+
+OPENSEARCH_TEMPLATE_V1 = '{http://a9.com/-/spec/opensearchrss/1.0//}%s'
+OPENSEARCH_TEMPLATE_V2 = '{http://a9.com/-/spec/opensearch/1.1//}%s'
+
+
+class ItemsPerPage(atom.core.XmlElement):
+ """Describes the number of items that will be returned per page for paged feeds"""
+ _qname = (OPENSEARCH_TEMPLATE_V1 % 'itemsPerPage',
+ OPENSEARCH_TEMPLATE_V2 % 'itemsPerPage')
+
+
+class StartIndex(atom.core.XmlElement):
+ """Describes the starting index of the contained entries for paged feeds"""
+ _qname = (OPENSEARCH_TEMPLATE_V1 % 'startIndex',
+ OPENSEARCH_TEMPLATE_V2 % 'startIndex')
+
+
+class TotalResults(atom.core.XmlElement):
+ """Describes the total number of results associated with this feed"""
+ _qname = (OPENSEARCH_TEMPLATE_V1 % 'totalResults',
+ OPENSEARCH_TEMPLATE_V2 % 'totalResults')
+
+
diff --git a/python/gdata/photos/__init__.py b/python/gdata/photos/__init__.py
new file mode 100644
index 0000000..1952135
--- /dev/null
+++ b/python/gdata/photos/__init__.py
@@ -0,0 +1,1112 @@
+# -*-*- encoding: utf-8 -*-*-
+#
+# This is the base file for the PicasaWeb python client.
+# It is used for lower level operations.
+#
+# $Id: __init__.py 148 2007-10-28 15:09:19Z havard.gulldahl $
+#
+# Copyright 2007 HÃ¥vard Gulldahl
+# Portions (C) 2006 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module provides a pythonic, gdata-centric interface to Google Photos
+(a.k.a. Picasa Web Services.
+
+It is modelled after the gdata/* interfaces from the gdata-python-client
+project[1] by Google.
+
+You'll find the user-friendly api in photos.service. Please see the
+documentation or live help() system for available methods.
+
+[1]: http://gdata-python-client.googlecode.com/
+
+ """
+
+__author__ = u'havard@gulldahl.no'# (HÃ¥vard Gulldahl)' #BUG: pydoc chokes on non-ascii chars in __author__
+__license__ = 'Apache License v2'
+__version__ = '$Revision: 164 $'[11:-2]
+
+import re
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ try:
+ import cElementTree as ElementTree
+ except ImportError:
+ try:
+ from xml.etree import ElementTree
+ except ImportError:
+ from elementtree import ElementTree
+import atom
+import gdata
+
+# importing google photo submodules
+import gdata.media as Media, gdata.exif as Exif, gdata.geo as Geo
+
+# XML namespaces which are often used in Google Photo elements
+PHOTOS_NAMESPACE = 'http://schemas.google.com/photos/2007'
+MEDIA_NAMESPACE = 'http://search.yahoo.com/mrss/'
+EXIF_NAMESPACE = 'http://schemas.google.com/photos/exif/2007'
+OPENSEARCH_NAMESPACE = 'http://a9.com/-/spec/opensearchrss/1.0/'
+GEO_NAMESPACE = 'http://www.w3.org/2003/01/geo/wgs84_pos#'
+GML_NAMESPACE = 'http://www.opengis.net/gml'
+GEORSS_NAMESPACE = 'http://www.georss.org/georss'
+PHEED_NAMESPACE = 'http://www.pheed.com/pheed/'
+BATCH_NAMESPACE = 'http://schemas.google.com/gdata/batch'
+
+
+class PhotosBaseElement(atom.AtomBase):
+ """Base class for elements in the PHOTO_NAMESPACE. To add new elements,
+ you only need to add the element tag name to self._tag
+ """
+
+ _tag = ''
+ _namespace = PHOTOS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, name=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ self.name = name
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+ #def __str__(self):
+ #return str(self.text)
+ #def __unicode__(self):
+ #return unicode(self.text)
+ def __int__(self):
+ return int(self.text)
+ def bool(self):
+ return self.text == 'true'
+
+class GPhotosBaseFeed(gdata.GDataFeed, gdata.LinkFinder):
+ "Base class for all Feeds in gdata.photos"
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children = gdata.GDataFeed._children.copy()
+ # We deal with Entry elements ourselves
+ del _children['{%s}entry' % atom.ATOM_NAMESPACE]
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None,
+ entry=None, total_results=None, start_index=None,
+ items_per_page=None, extension_elements=None,
+ extension_attributes=None, text=None):
+ gdata.GDataFeed.__init__(self, author=author, category=category,
+ contributor=contributor, generator=generator,
+ icon=icon, atom_id=atom_id, link=link,
+ logo=logo, rights=rights, subtitle=subtitle,
+ title=title, updated=updated, entry=entry,
+ total_results=total_results,
+ start_index=start_index,
+ items_per_page=items_per_page,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+ def kind(self):
+ "(string) Returns the kind"
+ try:
+ return self.category[0].term.split('#')[1]
+ except IndexError:
+ return None
+
+ def _feedUri(self, kind):
+ "Convenience method to return a uri to a feed of a special kind"
+ assert(kind in ('album', 'tag', 'photo', 'comment', 'user'))
+ here_href = self.GetSelfLink().href
+ if 'kind=%s' % kind in here_href:
+ return here_href
+ if not 'kind=' in here_href:
+ sep = '?'
+ if '?' in here_href: sep = '&'
+ return here_href + "%skind=%s" % (sep, kind)
+ rx = re.match('.*(kind=)(album|tag|photo|comment)', here_href)
+ return here_href[:rx.end(1)] + kind + here_href[rx.end(2):]
+
+ def _ConvertElementTreeToMember(self, child_tree):
+ """Re-implementing the method from AtomBase, since we deal with
+ Entry elements specially"""
+ category = child_tree.find('{%s}category' % atom.ATOM_NAMESPACE)
+ if category is None:
+ return atom.AtomBase._ConvertElementTreeToMember(self, child_tree)
+ namespace, kind = category.get('term').split('#')
+ if namespace != PHOTOS_NAMESPACE:
+ return atom.AtomBase._ConvertElementTreeToMember(self, child_tree)
+ ## TODO: is it safe to use getattr on gdata.photos?
+ entry_class = getattr(gdata.photos, '%sEntry' % kind.title())
+ if not hasattr(self, 'entry') or self.entry is None:
+ self.entry = []
+ self.entry.append(atom._CreateClassFromElementTree(
+ entry_class, child_tree))
+
+class GPhotosBaseEntry(gdata.GDataEntry, gdata.LinkFinder):
+ "Base class for all Entry elements in gdata.photos"
+ _tag = 'entry'
+ _kind = ''
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ extended_property=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id, link=link,
+ published=published, title=title,
+ updated=updated, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+ self.category.append(
+ atom.Category(scheme='http://schemas.google.com/g/2005#kind',
+ term = 'http://schemas.google.com/photos/2007#%s' % self._kind))
+
+ def kind(self):
+ "(string) Returns the kind"
+ try:
+ return self.category[0].term.split('#')[1]
+ except IndexError:
+ return None
+
+ def _feedUri(self, kind):
+ "Convenience method to get the uri to this entry's feed of the some kind"
+ try:
+ href = self.GetFeedLink().href
+ except AttributeError:
+ return None
+ sep = '?'
+ if '?' in href: sep = '&'
+ return '%s%skind=%s' % (href, sep, kind)
+
+
+class PhotosBaseEntry(GPhotosBaseEntry):
+ pass
+
+class PhotosBaseFeed(GPhotosBaseFeed):
+ pass
+
+class GPhotosBaseData(object):
+ pass
+
+class Access(PhotosBaseElement):
+ """The Google Photo `Access' element.
+
+ The album's access level. Valid values are `public' or `private'.
+ In documentation, access level is also referred to as `visibility.'"""
+
+ _tag = 'access'
+def AccessFromString(xml_string):
+ return atom.CreateClassFromXMLString(Access, xml_string)
+
+class Albumid(PhotosBaseElement):
+ "The Google Photo `Albumid' element"
+
+ _tag = 'albumid'
+def AlbumidFromString(xml_string):
+ return atom.CreateClassFromXMLString(Albumid, xml_string)
+
+class BytesUsed(PhotosBaseElement):
+ "The Google Photo `BytesUsed' element"
+
+ _tag = 'bytesUsed'
+def BytesUsedFromString(xml_string):
+ return atom.CreateClassFromXMLString(BytesUsed, xml_string)
+
+class Client(PhotosBaseElement):
+ "The Google Photo `Client' element"
+
+ _tag = 'client'
+def ClientFromString(xml_string):
+ return atom.CreateClassFromXMLString(Client, xml_string)
+
+class Checksum(PhotosBaseElement):
+ "The Google Photo `Checksum' element"
+
+ _tag = 'checksum'
+def ChecksumFromString(xml_string):
+ return atom.CreateClassFromXMLString(Checksum, xml_string)
+
+class CommentCount(PhotosBaseElement):
+ "The Google Photo `CommentCount' element"
+
+ _tag = 'commentCount'
+def CommentCountFromString(xml_string):
+ return atom.CreateClassFromXMLString(CommentCount, xml_string)
+
+class CommentingEnabled(PhotosBaseElement):
+ "The Google Photo `CommentingEnabled' element"
+
+ _tag = 'commentingEnabled'
+def CommentingEnabledFromString(xml_string):
+ return atom.CreateClassFromXMLString(CommentingEnabled, xml_string)
+
+class Height(PhotosBaseElement):
+ "The Google Photo `Height' element"
+
+ _tag = 'height'
+def HeightFromString(xml_string):
+ return atom.CreateClassFromXMLString(Height, xml_string)
+
+class Id(PhotosBaseElement):
+ "The Google Photo `Id' element"
+
+ _tag = 'id'
+def IdFromString(xml_string):
+ return atom.CreateClassFromXMLString(Id, xml_string)
+
+class Location(PhotosBaseElement):
+ "The Google Photo `Location' element"
+
+ _tag = 'location'
+def LocationFromString(xml_string):
+ return atom.CreateClassFromXMLString(Location, xml_string)
+
+class MaxPhotosPerAlbum(PhotosBaseElement):
+ "The Google Photo `MaxPhotosPerAlbum' element"
+
+ _tag = 'maxPhotosPerAlbum'
+def MaxPhotosPerAlbumFromString(xml_string):
+ return atom.CreateClassFromXMLString(MaxPhotosPerAlbum, xml_string)
+
+class Name(PhotosBaseElement):
+ "The Google Photo `Name' element"
+
+ _tag = 'name'
+def NameFromString(xml_string):
+ return atom.CreateClassFromXMLString(Name, xml_string)
+
+class Nickname(PhotosBaseElement):
+ "The Google Photo `Nickname' element"
+
+ _tag = 'nickname'
+def NicknameFromString(xml_string):
+ return atom.CreateClassFromXMLString(Nickname, xml_string)
+
+class Numphotos(PhotosBaseElement):
+ "The Google Photo `Numphotos' element"
+
+ _tag = 'numphotos'
+def NumphotosFromString(xml_string):
+ return atom.CreateClassFromXMLString(Numphotos, xml_string)
+
+class Numphotosremaining(PhotosBaseElement):
+ "The Google Photo `Numphotosremaining' element"
+
+ _tag = 'numphotosremaining'
+def NumphotosremainingFromString(xml_string):
+ return atom.CreateClassFromXMLString(Numphotosremaining, xml_string)
+
+class Position(PhotosBaseElement):
+ "The Google Photo `Position' element"
+
+ _tag = 'position'
+def PositionFromString(xml_string):
+ return atom.CreateClassFromXMLString(Position, xml_string)
+
+class Photoid(PhotosBaseElement):
+ "The Google Photo `Photoid' element"
+
+ _tag = 'photoid'
+def PhotoidFromString(xml_string):
+ return atom.CreateClassFromXMLString(Photoid, xml_string)
+
+class Quotacurrent(PhotosBaseElement):
+ "The Google Photo `Quotacurrent' element"
+
+ _tag = 'quotacurrent'
+def QuotacurrentFromString(xml_string):
+ return atom.CreateClassFromXMLString(Quotacurrent, xml_string)
+
+class Quotalimit(PhotosBaseElement):
+ "The Google Photo `Quotalimit' element"
+
+ _tag = 'quotalimit'
+def QuotalimitFromString(xml_string):
+ return atom.CreateClassFromXMLString(Quotalimit, xml_string)
+
+class Rotation(PhotosBaseElement):
+ "The Google Photo `Rotation' element"
+
+ _tag = 'rotation'
+def RotationFromString(xml_string):
+ return atom.CreateClassFromXMLString(Rotation, xml_string)
+
+class Size(PhotosBaseElement):
+ "The Google Photo `Size' element"
+
+ _tag = 'size'
+def SizeFromString(xml_string):
+ return atom.CreateClassFromXMLString(Size, xml_string)
+
+class Snippet(PhotosBaseElement):
+ """The Google Photo `snippet' element.
+
+ When searching, the snippet element will contain a
+ string with the word you're looking for, highlighted in html markup
+ E.g. when your query is `hafjell', this element may contain:
+ `... here at Hafjell.'
+
+ You'll find this element in searches -- that is, feeds that combine the
+ `kind=photo' and `q=yoursearch' parameters in the request.
+
+ See also gphoto:truncated and gphoto:snippettype.
+
+ """
+
+ _tag = 'snippet'
+def SnippetFromString(xml_string):
+ return atom.CreateClassFromXMLString(Snippet, xml_string)
+
+class Snippettype(PhotosBaseElement):
+ """The Google Photo `Snippettype' element
+
+ When searching, this element will tell you the type of element that matches.
+
+ You'll find this element in searches -- that is, feeds that combine the
+ `kind=photo' and `q=yoursearch' parameters in the request.
+
+ See also gphoto:snippet and gphoto:truncated.
+
+ Possible values and their interpretation:
+ o ALBUM_TITLE - The album title matches
+ o PHOTO_TAGS - The match is a tag/keyword
+ o PHOTO_DESCRIPTION - The match is in the photo's description
+
+ If you discover a value not listed here, please submit a patch to update this docstring.
+
+ """
+
+ _tag = 'snippettype'
+def SnippettypeFromString(xml_string):
+ return atom.CreateClassFromXMLString(Snippettype, xml_string)
+
+class Thumbnail(PhotosBaseElement):
+ """The Google Photo `Thumbnail' element
+
+ Used to display user's photo thumbnail (hackergotchi).
+
+ (Not to be confused with the element, which gives you
+ small versions of the photo object.)"""
+
+ _tag = 'thumbnail'
+def ThumbnailFromString(xml_string):
+ return atom.CreateClassFromXMLString(Thumbnail, xml_string)
+
+class Timestamp(PhotosBaseElement):
+ """The Google Photo `Timestamp' element
+ Represented as the number of milliseconds since January 1st, 1970.
+
+
+ Take a look at the convenience methods .isoformat() and .datetime():
+
+ photo_epoch = Time.text # 1180294337000
+ photo_isostring = Time.isoformat() # '2007-05-27T19:32:17.000Z'
+
+ Alternatively:
+ photo_datetime = Time.datetime() # (requires python >= 2.3)
+ """
+
+ _tag = 'timestamp'
+ def isoformat(self):
+ """(string) Return the timestamp as a ISO 8601 formatted string,
+ e.g. '2007-05-27T19:32:17.000Z'
+ """
+ import time
+ epoch = float(self.text)/1000
+ return time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime(epoch))
+
+ def datetime(self):
+ """(datetime.datetime) Return the timestamp as a datetime.datetime object
+
+ Requires python 2.3
+ """
+ import datetime
+ epoch = float(self.text)/1000
+ return datetime.datetime.fromtimestamp(epoch)
+def TimestampFromString(xml_string):
+ return atom.CreateClassFromXMLString(Timestamp, xml_string)
+
+class Truncated(PhotosBaseElement):
+ """The Google Photo `Truncated' element
+
+ You'll find this element in searches -- that is, feeds that combine the
+ `kind=photo' and `q=yoursearch' parameters in the request.
+
+ See also gphoto:snippet and gphoto:snippettype.
+
+ Possible values and their interpretation:
+ 0 -- unknown
+ """
+
+ _tag = 'Truncated'
+def TruncatedFromString(xml_string):
+ return atom.CreateClassFromXMLString(Truncated, xml_string)
+
+class User(PhotosBaseElement):
+ "The Google Photo `User' element"
+
+ _tag = 'user'
+def UserFromString(xml_string):
+ return atom.CreateClassFromXMLString(User, xml_string)
+
+class Version(PhotosBaseElement):
+ "The Google Photo `Version' element"
+
+ _tag = 'version'
+def VersionFromString(xml_string):
+ return atom.CreateClassFromXMLString(Version, xml_string)
+
+class Width(PhotosBaseElement):
+ "The Google Photo `Width' element"
+
+ _tag = 'width'
+def WidthFromString(xml_string):
+ return atom.CreateClassFromXMLString(Width, xml_string)
+
+class Weight(PhotosBaseElement):
+ """The Google Photo `Weight' element.
+
+ The weight of the tag is the number of times the tag
+ appears in the collection of tags currently being viewed.
+ The default weight is 1, in which case this tags is omitted."""
+ _tag = 'weight'
+def WeightFromString(xml_string):
+ return atom.CreateClassFromXMLString(Weight, xml_string)
+
+class CommentAuthor(atom.Author):
+ """The Atom `Author' element in CommentEntry entries is augmented to
+ contain elements from the PHOTOS_NAMESPACE
+
+ http://groups.google.com/group/Google-Picasa-Data-API/msg/819b0025b5ff5e38
+ """
+ _children = atom.Author._children.copy()
+ _children['{%s}user' % PHOTOS_NAMESPACE] = ('user', User)
+ _children['{%s}nickname' % PHOTOS_NAMESPACE] = ('nickname', Nickname)
+ _children['{%s}thumbnail' % PHOTOS_NAMESPACE] = ('thumbnail', Thumbnail)
+def CommentAuthorFromString(xml_string):
+ return atom.CreateClassFromXMLString(CommentAuthor, xml_string)
+
+########################## ################################
+
+class AlbumData(object):
+ _children = {}
+ _children['{%s}id' % PHOTOS_NAMESPACE] = ('gphoto_id', Id)
+ _children['{%s}name' % PHOTOS_NAMESPACE] = ('name', Name)
+ _children['{%s}location' % PHOTOS_NAMESPACE] = ('location', Location)
+ _children['{%s}access' % PHOTOS_NAMESPACE] = ('access', Access)
+ _children['{%s}bytesUsed' % PHOTOS_NAMESPACE] = ('bytesUsed', BytesUsed)
+ _children['{%s}timestamp' % PHOTOS_NAMESPACE] = ('timestamp', Timestamp)
+ _children['{%s}numphotos' % PHOTOS_NAMESPACE] = ('numphotos', Numphotos)
+ _children['{%s}numphotosremaining' % PHOTOS_NAMESPACE] = \
+ ('numphotosremaining', Numphotosremaining)
+ _children['{%s}user' % PHOTOS_NAMESPACE] = ('user', User)
+ _children['{%s}nickname' % PHOTOS_NAMESPACE] = ('nickname', Nickname)
+ _children['{%s}commentingEnabled' % PHOTOS_NAMESPACE] = \
+ ('commentingEnabled', CommentingEnabled)
+ _children['{%s}commentCount' % PHOTOS_NAMESPACE] = \
+ ('commentCount', CommentCount)
+ ## NOTE: storing media:group as self.media, to create a self-explaining api
+ gphoto_id = None
+ name = None
+ location = None
+ access = None
+ bytesUsed = None
+ timestamp = None
+ numphotos = None
+ numphotosremaining = None
+ user = None
+ nickname = None
+ commentingEnabled = None
+ commentCount = None
+
+class AlbumEntry(GPhotosBaseEntry, AlbumData):
+ """All metadata for a Google Photos Album
+
+ Take a look at AlbumData for metadata accessible as attributes to this object.
+
+ Notes:
+ To avoid name clashes, and to create a more sensible api, some
+ objects have names that differ from the original elements:
+
+ o media:group -> self.media,
+ o geo:where -> self.geo,
+ o photo:id -> self.gphoto_id
+ """
+
+ _kind = 'album'
+ _children = GPhotosBaseEntry._children.copy()
+ _children.update(AlbumData._children.copy())
+ # child tags only for Album entries, not feeds
+ _children['{%s}where' % GEORSS_NAMESPACE] = ('geo', Geo.Where)
+ _children['{%s}group' % MEDIA_NAMESPACE] = ('media', Media.Group)
+ media = Media.Group()
+ geo = Geo.Where()
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ #GPHOTO NAMESPACE:
+ gphoto_id=None, name=None, location=None, access=None,
+ timestamp=None, numphotos=None, user=None, nickname=None,
+ commentingEnabled=None, commentCount=None, thumbnail=None,
+ # MEDIA NAMESPACE:
+ media=None,
+ # GEORSS NAMESPACE:
+ geo=None,
+ extended_property=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ GPhotosBaseEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id, link=link,
+ published=published, title=title,
+ updated=updated, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+
+ ## NOTE: storing photo:id as self.gphoto_id, to avoid name clash with atom:id
+ self.gphoto_id = gphoto_id
+ self.name = name
+ self.location = location
+ self.access = access
+ self.timestamp = timestamp
+ self.numphotos = numphotos
+ self.user = user
+ self.nickname = nickname
+ self.commentingEnabled = commentingEnabled
+ self.commentCount = commentCount
+ self.thumbnail = thumbnail
+ self.extended_property = extended_property or []
+ self.text = text
+ ## NOTE: storing media:group as self.media, and geo:where as geo,
+ ## to create a self-explaining api
+ self.media = media or Media.Group()
+ self.geo = geo or Geo.Where()
+
+ def GetAlbumId(self):
+ "Return the id of this album"
+
+ return self.GetFeedLink().href.split('/')[-1]
+
+ def GetPhotosUri(self):
+ "(string) Return the uri to this albums feed of the PhotoEntry kind"
+ return self._feedUri('photo')
+
+ def GetCommentsUri(self):
+ "(string) Return the uri to this albums feed of the CommentEntry kind"
+ return self._feedUri('comment')
+
+ def GetTagsUri(self):
+ "(string) Return the uri to this albums feed of the TagEntry kind"
+ return self._feedUri('tag')
+
+def AlbumEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(AlbumEntry, xml_string)
+
+class AlbumFeed(GPhotosBaseFeed, AlbumData):
+ """All metadata for a Google Photos Album, including its sub-elements
+
+ This feed represents an album as the container for other objects.
+
+ A Album feed contains entries of
+ PhotoEntry, CommentEntry or TagEntry,
+ depending on the `kind' parameter in the original query.
+
+ Take a look at AlbumData for accessible attributes.
+
+ """
+
+ _children = GPhotosBaseFeed._children.copy()
+ _children.update(AlbumData._children.copy())
+
+ def GetPhotosUri(self):
+ "(string) Return the uri to the same feed, but of the PhotoEntry kind"
+
+ return self._feedUri('photo')
+
+ def GetTagsUri(self):
+ "(string) Return the uri to the same feed, but of the TagEntry kind"
+
+ return self._feedUri('tag')
+
+ def GetCommentsUri(self):
+ "(string) Return the uri to the same feed, but of the CommentEntry kind"
+
+ return self._feedUri('comment')
+
+def AlbumFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(AlbumFeed, xml_string)
+
+
+class PhotoData(object):
+ _children = {}
+ ## NOTE: storing photo:id as self.gphoto_id, to avoid name clash with atom:id
+ _children['{%s}id' % PHOTOS_NAMESPACE] = ('gphoto_id', Id)
+ _children['{%s}albumid' % PHOTOS_NAMESPACE] = ('albumid', Albumid)
+ _children['{%s}checksum' % PHOTOS_NAMESPACE] = ('checksum', Checksum)
+ _children['{%s}client' % PHOTOS_NAMESPACE] = ('client', Client)
+ _children['{%s}height' % PHOTOS_NAMESPACE] = ('height', Height)
+ _children['{%s}position' % PHOTOS_NAMESPACE] = ('position', Position)
+ _children['{%s}rotation' % PHOTOS_NAMESPACE] = ('rotation', Rotation)
+ _children['{%s}size' % PHOTOS_NAMESPACE] = ('size', Size)
+ _children['{%s}timestamp' % PHOTOS_NAMESPACE] = ('timestamp', Timestamp)
+ _children['{%s}version' % PHOTOS_NAMESPACE] = ('version', Version)
+ _children['{%s}width' % PHOTOS_NAMESPACE] = ('width', Width)
+ _children['{%s}commentingEnabled' % PHOTOS_NAMESPACE] = \
+ ('commentingEnabled', CommentingEnabled)
+ _children['{%s}commentCount' % PHOTOS_NAMESPACE] = \
+ ('commentCount', CommentCount)
+ ## NOTE: storing media:group as self.media, exif:tags as self.exif, and
+ ## geo:where as self.geo, to create a self-explaining api
+ _children['{%s}tags' % EXIF_NAMESPACE] = ('exif', Exif.Tags)
+ _children['{%s}where' % GEORSS_NAMESPACE] = ('geo', Geo.Where)
+ _children['{%s}group' % MEDIA_NAMESPACE] = ('media', Media.Group)
+ # These elements show up in search feeds
+ _children['{%s}snippet' % PHOTOS_NAMESPACE] = ('snippet', Snippet)
+ _children['{%s}snippettype' % PHOTOS_NAMESPACE] = ('snippettype', Snippettype)
+ _children['{%s}truncated' % PHOTOS_NAMESPACE] = ('truncated', Truncated)
+ gphoto_id = None
+ albumid = None
+ checksum = None
+ client = None
+ height = None
+ position = None
+ rotation = None
+ size = None
+ timestamp = None
+ version = None
+ width = None
+ commentingEnabled = None
+ commentCount = None
+ snippet=None
+ snippettype=None
+ truncated=None
+ media = Media.Group()
+ geo = Geo.Where()
+ tags = Exif.Tags()
+
+class PhotoEntry(GPhotosBaseEntry, PhotoData):
+ """All metadata for a Google Photos Photo
+
+ Take a look at PhotoData for metadata accessible as attributes to this object.
+
+ Notes:
+ To avoid name clashes, and to create a more sensible api, some
+ objects have names that differ from the original elements:
+
+ o media:group -> self.media,
+ o exif:tags -> self.exif,
+ o geo:where -> self.geo,
+ o photo:id -> self.gphoto_id
+ """
+
+ _kind = 'photo'
+ _children = GPhotosBaseEntry._children.copy()
+ _children.update(PhotoData._children.copy())
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None, text=None,
+ # GPHOTO NAMESPACE:
+ gphoto_id=None, albumid=None, checksum=None, client=None, height=None,
+ position=None, rotation=None, size=None, timestamp=None, version=None,
+ width=None, commentCount=None, commentingEnabled=None,
+ # MEDIARSS NAMESPACE:
+ media=None,
+ # EXIF_NAMESPACE:
+ exif=None,
+ # GEORSS NAMESPACE:
+ geo=None,
+ extension_elements=None, extension_attributes=None):
+ GPhotosBaseEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ title=title, updated=updated, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+
+
+ ## NOTE: storing photo:id as self.gphoto_id, to avoid name clash with atom:id
+ self.gphoto_id = gphoto_id
+ self.albumid = albumid
+ self.checksum = checksum
+ self.client = client
+ self.height = height
+ self.position = position
+ self.rotation = rotation
+ self.size = size
+ self.timestamp = timestamp
+ self.version = version
+ self.width = width
+ self.commentingEnabled = commentingEnabled
+ self.commentCount = commentCount
+ ## NOTE: storing media:group as self.media, to create a self-explaining api
+ self.media = media or Media.Group()
+ self.exif = exif or Exif.Tags()
+ self.geo = geo or Geo.Where()
+
+ def GetPostLink(self):
+ "Return the uri to this photo's `POST' link (use it for updates of the object)"
+
+ return self.GetFeedLink()
+
+ def GetCommentsUri(self):
+ "Return the uri to this photo's feed of CommentEntry comments"
+ return self._feedUri('comment')
+
+ def GetTagsUri(self):
+ "Return the uri to this photo's feed of TagEntry tags"
+ return self._feedUri('tag')
+
+ def GetAlbumUri(self):
+ """Return the uri to the AlbumEntry containing this photo"""
+
+ href = self.GetSelfLink().href
+ return href[:href.find('/photoid')]
+
+def PhotoEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(PhotoEntry, xml_string)
+
+class PhotoFeed(GPhotosBaseFeed, PhotoData):
+ """All metadata for a Google Photos Photo, including its sub-elements
+
+ This feed represents a photo as the container for other objects.
+
+ A Photo feed contains entries of
+ CommentEntry or TagEntry,
+ depending on the `kind' parameter in the original query.
+
+ Take a look at PhotoData for metadata accessible as attributes to this object.
+
+ """
+ _children = GPhotosBaseFeed._children.copy()
+ _children.update(PhotoData._children.copy())
+
+ def GetTagsUri(self):
+ "(string) Return the uri to the same feed, but of the TagEntry kind"
+
+ return self._feedUri('tag')
+
+ def GetCommentsUri(self):
+ "(string) Return the uri to the same feed, but of the CommentEntry kind"
+
+ return self._feedUri('comment')
+
+def PhotoFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(PhotoFeed, xml_string)
+
+class TagData(GPhotosBaseData):
+ _children = {}
+ _children['{%s}weight' % PHOTOS_NAMESPACE] = ('weight', Weight)
+ weight=None
+
+class TagEntry(GPhotosBaseEntry, TagData):
+ """All metadata for a Google Photos Tag
+
+ The actual tag is stored in the .title.text attribute
+
+ """
+
+ _kind = 'tag'
+ _children = GPhotosBaseEntry._children.copy()
+ _children.update(TagData._children.copy())
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ # GPHOTO NAMESPACE:
+ weight=None,
+ extended_property=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ GPhotosBaseEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ title=title, updated=updated, text=text,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+
+ self.weight = weight
+
+ def GetAlbumUri(self):
+ """Return the uri to the AlbumEntry containing this tag"""
+
+ href = self.GetSelfLink().href
+ pos = href.find('/photoid')
+ if pos == -1:
+ return None
+ return href[:pos]
+
+ def GetPhotoUri(self):
+ """Return the uri to the PhotoEntry containing this tag"""
+
+ href = self.GetSelfLink().href
+ pos = href.find('/tag')
+ if pos == -1:
+ return None
+ return href[:pos]
+
+def TagEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(TagEntry, xml_string)
+
+
+class TagFeed(GPhotosBaseFeed, TagData):
+ """All metadata for a Google Photos Tag, including its sub-elements"""
+
+ _children = GPhotosBaseFeed._children.copy()
+ _children.update(TagData._children.copy())
+
+def TagFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(TagFeed, xml_string)
+
+class CommentData(GPhotosBaseData):
+ _children = {}
+ ## NOTE: storing photo:id as self.gphoto_id, to avoid name clash with atom:id
+ _children['{%s}id' % PHOTOS_NAMESPACE] = ('gphoto_id', Id)
+ _children['{%s}albumid' % PHOTOS_NAMESPACE] = ('albumid', Albumid)
+ _children['{%s}photoid' % PHOTOS_NAMESPACE] = ('photoid', Photoid)
+ _children['{%s}author' % atom.ATOM_NAMESPACE] = ('author', [CommentAuthor,])
+ gphoto_id=None
+ albumid=None
+ photoid=None
+ author=None
+
+class CommentEntry(GPhotosBaseEntry, CommentData):
+ """All metadata for a Google Photos Comment
+
+ The comment is stored in the .content.text attribute,
+ with a content type in .content.type.
+
+
+ """
+
+ _kind = 'comment'
+ _children = GPhotosBaseEntry._children.copy()
+ _children.update(CommentData._children.copy())
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ # GPHOTO NAMESPACE:
+ gphoto_id=None, albumid=None, photoid=None,
+ extended_property=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ GPhotosBaseEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ title=title, updated=updated,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+ self.gphoto_id = gphoto_id
+ self.albumid = albumid
+ self.photoid = photoid
+
+ def GetCommentId(self):
+ """Return the globally unique id of this comment"""
+ return self.GetSelfLink().href.split('/')[-1]
+
+ def GetAlbumUri(self):
+ """Return the uri to the AlbumEntry containing this comment"""
+
+ href = self.GetSelfLink().href
+ return href[:href.find('/photoid')]
+
+ def GetPhotoUri(self):
+ """Return the uri to the PhotoEntry containing this comment"""
+
+ href = self.GetSelfLink().href
+ return href[:href.find('/commentid')]
+
+def CommentEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(CommentEntry, xml_string)
+
+class CommentFeed(GPhotosBaseFeed, CommentData):
+ """All metadata for a Google Photos Comment, including its sub-elements"""
+
+ _children = GPhotosBaseFeed._children.copy()
+ _children.update(CommentData._children.copy())
+
+def CommentFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(CommentFeed, xml_string)
+
+class UserData(GPhotosBaseData):
+ _children = {}
+ _children['{%s}maxPhotosPerAlbum' % PHOTOS_NAMESPACE] = ('maxPhotosPerAlbum', MaxPhotosPerAlbum)
+ _children['{%s}nickname' % PHOTOS_NAMESPACE] = ('nickname', Nickname)
+ _children['{%s}quotalimit' % PHOTOS_NAMESPACE] = ('quotalimit', Quotalimit)
+ _children['{%s}quotacurrent' % PHOTOS_NAMESPACE] = ('quotacurrent', Quotacurrent)
+ _children['{%s}thumbnail' % PHOTOS_NAMESPACE] = ('thumbnail', Thumbnail)
+ _children['{%s}user' % PHOTOS_NAMESPACE] = ('user', User)
+ _children['{%s}id' % PHOTOS_NAMESPACE] = ('gphoto_id', Id)
+
+ maxPhotosPerAlbum=None
+ nickname=None
+ quotalimit=None
+ quotacurrent=None
+ thumbnail=None
+ user=None
+ gphoto_id=None
+
+
+class UserEntry(GPhotosBaseEntry, UserData):
+ """All metadata for a Google Photos User
+
+ This entry represents an album owner and all appropriate metadata.
+
+ Take a look at at the attributes of the UserData for metadata available.
+ """
+ _children = GPhotosBaseEntry._children.copy()
+ _children.update(UserData._children.copy())
+ _kind = 'user'
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None,
+ title=None, updated=None,
+ # GPHOTO NAMESPACE:
+ gphoto_id=None, maxPhotosPerAlbum=None, nickname=None, quotalimit=None,
+ quotacurrent=None, thumbnail=None, user=None,
+ extended_property=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ GPhotosBaseEntry.__init__(self, author=author, category=category,
+ content=content,
+ atom_id=atom_id, link=link, published=published,
+ title=title, updated=updated,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+ self.gphoto_id=gphoto_id
+ self.maxPhotosPerAlbum=maxPhotosPerAlbum
+ self.nickname=nickname
+ self.quotalimit=quotalimit
+ self.quotacurrent=quotacurrent
+ self.thumbnail=thumbnail
+ self.user=user
+
+ def GetAlbumsUri(self):
+ "(string) Return the uri to this user's feed of the AlbumEntry kind"
+ return self._feedUri('album')
+
+ def GetPhotosUri(self):
+ "(string) Return the uri to this user's feed of the PhotoEntry kind"
+ return self._feedUri('photo')
+
+ def GetCommentsUri(self):
+ "(string) Return the uri to this user's feed of the CommentEntry kind"
+ return self._feedUri('comment')
+
+ def GetTagsUri(self):
+ "(string) Return the uri to this user's feed of the TagEntry kind"
+ return self._feedUri('tag')
+
+def UserEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(UserEntry, xml_string)
+
+class UserFeed(GPhotosBaseFeed, UserData):
+ """Feed for a User in the google photos api.
+
+ This feed represents a user as the container for other objects.
+
+ A User feed contains entries of
+ AlbumEntry, PhotoEntry, CommentEntry, UserEntry or TagEntry,
+ depending on the `kind' parameter in the original query.
+
+ The user feed itself also contains all of the metadata available
+ as part of a UserData object."""
+ _children = GPhotosBaseFeed._children.copy()
+ _children.update(UserData._children.copy())
+
+ def GetAlbumsUri(self):
+ """Get the uri to this feed, but with entries of the AlbumEntry kind."""
+ return self._feedUri('album')
+
+ def GetTagsUri(self):
+ """Get the uri to this feed, but with entries of the TagEntry kind."""
+ return self._feedUri('tag')
+
+ def GetPhotosUri(self):
+ """Get the uri to this feed, but with entries of the PhotosEntry kind."""
+ return self._feedUri('photo')
+
+ def GetCommentsUri(self):
+ """Get the uri to this feed, but with entries of the CommentsEntry kind."""
+ return self._feedUri('comment')
+
+def UserFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(UserFeed, xml_string)
+
+
+
+def AnyFeedFromString(xml_string):
+ """Creates an instance of the appropriate feed class from the
+ xml string contents.
+
+ Args:
+ xml_string: str A string which contains valid XML. The root element
+ of the XML string should match the tag and namespace of the desired
+ class.
+
+ Returns:
+ An instance of the target class with members assigned according to the
+ contents of the XML - or a basic gdata.GDataFeed instance if it is
+ impossible to determine the appropriate class (look for extra elements
+ in GDataFeed's .FindExtensions() and extension_elements[] ).
+ """
+ tree = ElementTree.fromstring(xml_string)
+ category = tree.find('{%s}category' % atom.ATOM_NAMESPACE)
+ if category is None:
+ # TODO: is this the best way to handle this?
+ return atom._CreateClassFromElementTree(GPhotosBaseFeed, tree)
+ namespace, kind = category.get('term').split('#')
+ if namespace != PHOTOS_NAMESPACE:
+ # TODO: is this the best way to handle this?
+ return atom._CreateClassFromElementTree(GPhotosBaseFeed, tree)
+ ## TODO: is getattr safe this way?
+ feed_class = getattr(gdata.photos, '%sFeed' % kind.title())
+ return atom._CreateClassFromElementTree(feed_class, tree)
+
+def AnyEntryFromString(xml_string):
+ """Creates an instance of the appropriate entry class from the
+ xml string contents.
+
+ Args:
+ xml_string: str A string which contains valid XML. The root element
+ of the XML string should match the tag and namespace of the desired
+ class.
+
+ Returns:
+ An instance of the target class with members assigned according to the
+ contents of the XML - or a basic gdata.GDataEndry instance if it is
+ impossible to determine the appropriate class (look for extra elements
+ in GDataEntry's .FindExtensions() and extension_elements[] ).
+ """
+ tree = ElementTree.fromstring(xml_string)
+ category = tree.find('{%s}category' % atom.ATOM_NAMESPACE)
+ if category is None:
+ # TODO: is this the best way to handle this?
+ return atom._CreateClassFromElementTree(GPhotosBaseEntry, tree)
+ namespace, kind = category.get('term').split('#')
+ if namespace != PHOTOS_NAMESPACE:
+ # TODO: is this the best way to handle this?
+ return atom._CreateClassFromElementTree(GPhotosBaseEntry, tree)
+ ## TODO: is getattr safe this way?
+ feed_class = getattr(gdata.photos, '%sEntry' % kind.title())
+ return atom._CreateClassFromElementTree(feed_class, tree)
+
diff --git a/python/gdata/photos/service.py b/python/gdata/photos/service.py
new file mode 100644
index 0000000..7170379
--- /dev/null
+++ b/python/gdata/photos/service.py
@@ -0,0 +1,680 @@
+#!/usr/bin/env python
+# -*-*- encoding: utf-8 -*-*-
+#
+# This is the service file for the Google Photo python client.
+# It is used for higher level operations.
+#
+# $Id: service.py 144 2007-10-25 21:03:34Z havard.gulldahl $
+#
+# Copyright 2007 HÃ¥vard Gulldahl
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google PhotoService provides a human-friendly interface to
+Google Photo (a.k.a Picasa Web) services[1].
+
+It extends gdata.service.GDataService and as such hides all the
+nasty details about authenticating, parsing and communicating with
+Google Photos.
+
+[1]: http://code.google.com/apis/picasaweb/gdata.html
+
+Example:
+ import gdata.photos, gdata.photos.service
+ pws = gdata.photos.service.PhotosService()
+ pws.ClientLogin(username, password)
+ #Get all albums
+ albums = pws.GetUserFeed().entry
+ # Get all photos in second album
+ photos = pws.GetFeed(albums[1].GetPhotosUri()).entry
+ # Get all tags for photos in second album and print them
+ tags = pws.GetFeed(albums[1].GetTagsUri()).entry
+ print [ tag.summary.text for tag in tags ]
+ # Get all comments for the first photos in list and print them
+ comments = pws.GetCommentFeed(photos[0].GetCommentsUri()).entry
+ print [ c.summary.text for c in comments ]
+
+ # Get a photo to work with
+ photo = photos[0]
+ # Update metadata
+
+ # Attributes from the namespace
+ photo.summary.text = u'A nice view from my veranda'
+ photo.title.text = u'Verandaview.jpg'
+
+ # Attributes from the namespace
+ photo.media.keywords.text = u'Home, Long-exposure, Sunset' # Comma-separated
+
+ # Adding attributes to media object
+
+ # Rotate 90 degrees clockwise
+ photo.rotation = gdata.photos.Rotation(text='90')
+
+ # Submit modified photo object
+ photo = pws.UpdatePhotoMetadata(photo)
+
+ # Make sure you only modify the newly returned object, else you'll get
+ # versioning errors. See Optimistic-concurrency
+
+ # Add comment to a picture
+ comment = pws.InsertComment(photo, u'I wish the water always was this warm')
+
+ # Remove comment because it was silly
+ print "*blush*"
+ pws.Delete(comment.GetEditLink().href)
+
+"""
+
+__author__ = u'havard@gulldahl.no'# (HÃ¥vard Gulldahl)' #BUG: pydoc chokes on non-ascii chars in __author__
+__license__ = 'Apache License v2'
+__version__ = '$Revision: 176 $'[11:-2]
+
+
+import sys, os.path, StringIO
+import time
+import gdata.service
+import gdata
+import atom.service
+import atom
+import gdata.photos
+
+SUPPORTED_UPLOAD_TYPES = ('bmp', 'jpeg', 'jpg', 'gif', 'png')
+
+UNKOWN_ERROR=1000
+GPHOTOS_BAD_REQUEST=400
+GPHOTOS_CONFLICT=409
+GPHOTOS_INTERNAL_SERVER_ERROR=500
+GPHOTOS_INVALID_ARGUMENT=601
+GPHOTOS_INVALID_CONTENT_TYPE=602
+GPHOTOS_NOT_AN_IMAGE=603
+GPHOTOS_INVALID_KIND=604
+
+class GooglePhotosException(Exception):
+ def __init__(self, response):
+
+ self.error_code = response['status']
+ self.reason = response['reason'].strip()
+ if '' in str(response['body']): #general html message, discard it
+ response['body'] = ""
+ self.body = response['body'].strip()
+ self.message = "(%(status)s) %(body)s -- %(reason)s" % response
+
+ #return explicit error codes
+ error_map = { '(12) Not an image':GPHOTOS_NOT_AN_IMAGE,
+ 'kind: That is not one of the acceptable values':
+ GPHOTOS_INVALID_KIND,
+
+ }
+ for msg, code in error_map.iteritems():
+ if self.body == msg:
+ self.error_code = code
+ break
+ self.args = [self.error_code, self.reason, self.body]
+
+class PhotosService(gdata.service.GDataService):
+ userUri = '/data/feed/api/user/%s'
+
+ def __init__(self, email=None, password=None, source=None,
+ server='picasaweb.google.com', additional_headers=None,
+ **kwargs):
+ """Creates a client for the Google Photos service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'picasaweb.google.com'.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+ self.email = email
+ self.client = source
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service='lh2', source=source,
+ server=server, additional_headers=additional_headers, **kwargs)
+
+ def GetFeed(self, uri, limit=None, start_index=None):
+ """Get a feed.
+
+ The results are ordered by the values of their `updated' elements,
+ with the most recently updated entry appearing first in the feed.
+
+ Arguments:
+ uri: the uri to fetch
+ limit (optional): the maximum number of entries to return. Defaults to what
+ the server returns.
+
+ Returns:
+ one of gdata.photos.AlbumFeed,
+ gdata.photos.UserFeed,
+ gdata.photos.PhotoFeed,
+ gdata.photos.CommentFeed,
+ gdata.photos.TagFeed,
+ depending on the results of the query.
+ Raises:
+ GooglePhotosException
+
+ See:
+ http://code.google.com/apis/picasaweb/gdata.html#Get_Album_Feed_Manual
+ """
+ if limit is not None:
+ uri += '&max-results=%s' % limit
+ if start_index is not None:
+ uri += '&start-index=%s' % start_index
+ try:
+ return self.Get(uri, converter=gdata.photos.AnyFeedFromString)
+ except gdata.service.RequestError, e:
+ raise GooglePhotosException(e.args[0])
+
+ def GetEntry(self, uri, limit=None, start_index=None):
+ """Get an Entry.
+
+ Arguments:
+ uri: the uri to the entry
+ limit (optional): the maximum number of entries to return. Defaults to what
+ the server returns.
+
+ Returns:
+ one of gdata.photos.AlbumEntry,
+ gdata.photos.UserEntry,
+ gdata.photos.PhotoEntry,
+ gdata.photos.CommentEntry,
+ gdata.photos.TagEntry,
+ depending on the results of the query.
+ Raises:
+ GooglePhotosException
+ """
+ if limit is not None:
+ uri += '&max-results=%s' % limit
+ if start_index is not None:
+ uri += '&start-index=%s' % start_index
+ try:
+ return self.Get(uri, converter=gdata.photos.AnyEntryFromString)
+ except gdata.service.RequestError, e:
+ raise GooglePhotosException(e.args[0])
+
+ def GetUserFeed(self, kind='album', user='default', limit=None):
+ """Get user-based feed, containing albums, photos, comments or tags;
+ defaults to albums.
+
+ The entries are ordered by the values of their `updated' elements,
+ with the most recently updated entry appearing first in the feed.
+
+ Arguments:
+ kind: the kind of entries to get, either `album', `photo',
+ `comment' or `tag', or a python list of these. Defaults to `album'.
+ user (optional): whose albums we're querying. Defaults to current user.
+ limit (optional): the maximum number of entries to return.
+ Defaults to everything the server returns.
+
+
+ Returns:
+ gdata.photos.UserFeed, containing appropriate Entry elements
+
+ See:
+ http://code.google.com/apis/picasaweb/gdata.html#Get_Album_Feed_Manual
+ http://googledataapis.blogspot.com/2007/07/picasa-web-albums-adds-new-api-features.html
+ """
+ if isinstance(kind, (list, tuple) ):
+ kind = ",".join(kind)
+
+ uri = '/data/feed/api/user/%s?kind=%s' % (user, kind)
+ return self.GetFeed(uri, limit=limit)
+
+ def GetTaggedPhotos(self, tag, user='default', limit=None):
+ """Get all photos belonging to a specific user, tagged by the given keyword
+
+ Arguments:
+ tag: The tag you're looking for, e.g. `dog'
+ user (optional): Whose images/videos you want to search, defaults
+ to current user
+ limit (optional): the maximum number of entries to return.
+ Defaults to everything the server returns.
+
+ Returns:
+ gdata.photos.UserFeed containing PhotoEntry elements
+ """
+ # Lower-casing because of
+ # http://code.google.com/p/gdata-issues/issues/detail?id=194
+ uri = '/data/feed/api/user/%s?kind=photo&tag=%s' % (user, tag.lower())
+ return self.GetFeed(uri, limit)
+
+ def SearchUserPhotos(self, query, user='default', limit=100):
+ """Search through all photos for a specific user and return a feed.
+ This will look for matches in file names and image tags (a.k.a. keywords)
+
+ Arguments:
+ query: The string you're looking for, e.g. `vacation'
+ user (optional): The username of whose photos you want to search, defaults
+ to current user.
+ limit (optional): Don't return more than `limit' hits, defaults to 100
+
+ Only public photos are searched, unless you are authenticated and
+ searching through your own photos.
+
+ Returns:
+ gdata.photos.UserFeed with PhotoEntry elements
+ """
+ uri = '/data/feed/api/user/%s?kind=photo&q=%s' % (user, query)
+ return self.GetFeed(uri, limit=limit)
+
+ def SearchCommunityPhotos(self, query, limit=100):
+ """Search through all public photos and return a feed.
+ This will look for matches in file names and image tags (a.k.a. keywords)
+
+ Arguments:
+ query: The string you're looking for, e.g. `vacation'
+ limit (optional): Don't return more than `limit' hits, defaults to 100
+
+ Returns:
+ gdata.GDataFeed with PhotoEntry elements
+ """
+ uri='/data/feed/api/all?q=%s' % query
+ return self.GetFeed(uri, limit=limit)
+
+ def GetContacts(self, user='default', limit=None):
+ """Retrieve a feed that contains a list of your contacts
+
+ Arguments:
+ user: Username of the user whose contacts you want
+
+ Returns
+ gdata.photos.UserFeed, with UserEntry entries
+
+ See:
+ http://groups.google.com/group/Google-Picasa-Data-API/msg/819b0025b5ff5e38
+ """
+ uri = '/data/feed/api/user/%s/contacts?kind=user' % user
+ return self.GetFeed(uri, limit=limit)
+
+ def SearchContactsPhotos(self, user='default', search=None, limit=None):
+ """Search over your contacts' photos and return a feed
+
+ Arguments:
+ user: Username of the user whose contacts you want
+ search (optional): What to search for (photo title, description and keywords)
+
+ Returns
+ gdata.photos.UserFeed, with PhotoEntry elements
+
+ See:
+ http://groups.google.com/group/Google-Picasa-Data-API/msg/819b0025b5ff5e38
+ """
+
+ uri = '/data/feed/api/user/%s/contacts?kind=photo&q=%s' % (user, search)
+ return self.GetFeed(uri, limit=limit)
+
+ def InsertAlbum(self, title, summary, location=None, access='public',
+ commenting_enabled='true', timestamp=None):
+ """Add an album.
+
+ Needs authentication, see self.ClientLogin()
+
+ Arguments:
+ title: Album title
+ summary: Album summary / description
+ access (optional): `private' or `public'. Public albums are searchable
+ by everyone on the internet. Defaults to `public'
+ commenting_enabled (optional): `true' or `false'. Defaults to `true'.
+ timestamp (optional): A date and time for the album, in milliseconds since
+ Unix epoch[1] UTC. Defaults to now.
+
+ Returns:
+ The newly created gdata.photos.AlbumEntry
+
+ See:
+ http://code.google.com/apis/picasaweb/gdata.html#Add_Album_Manual_Installed
+
+ [1]: http://en.wikipedia.org/wiki/Unix_epoch
+ """
+ album = gdata.photos.AlbumEntry()
+ album.title = atom.Title(text=title, title_type='text')
+ album.summary = atom.Summary(text=summary, summary_type='text')
+ if location is not None:
+ album.location = gdata.photos.Location(text=location)
+ album.access = gdata.photos.Access(text=access)
+ if commenting_enabled in ('true', 'false'):
+ album.commentingEnabled = gdata.photos.CommentingEnabled(text=commenting_enabled)
+ if timestamp is None:
+ timestamp = '%i' % int(time.time() * 1000)
+ album.timestamp = gdata.photos.Timestamp(text=timestamp)
+ try:
+ return self.Post(album, uri=self.userUri % self.email,
+ converter=gdata.photos.AlbumEntryFromString)
+ except gdata.service.RequestError, e:
+ raise GooglePhotosException(e.args[0])
+
+ def InsertPhoto(self, album_or_uri, photo, filename_or_handle,
+ content_type='image/jpeg'):
+ """Add a PhotoEntry
+
+ Needs authentication, see self.ClientLogin()
+
+ Arguments:
+ album_or_uri: AlbumFeed or uri of the album where the photo should go
+ photo: PhotoEntry to add
+ filename_or_handle: A file-like object or file name where the image/video
+ will be read from
+ content_type (optional): Internet media type (a.k.a. mime type) of
+ media object. Currently Google Photos supports these types:
+ o image/bmp
+ o image/gif
+ o image/jpeg
+ o image/png
+
+ Images will be converted to jpeg on upload. Defaults to `image/jpeg'
+
+ """
+
+ try:
+ assert(isinstance(photo, gdata.photos.PhotoEntry))
+ except AssertionError:
+ raise GooglePhotosException({'status':GPHOTOS_INVALID_ARGUMENT,
+ 'body':'`photo` must be a gdata.photos.PhotoEntry instance',
+ 'reason':'Found %s, not PhotoEntry' % type(photo)
+ })
+ try:
+ majtype, mintype = content_type.split('/')
+ assert(mintype in SUPPORTED_UPLOAD_TYPES)
+ except (ValueError, AssertionError):
+ raise GooglePhotosException({'status':GPHOTOS_INVALID_CONTENT_TYPE,
+ 'body':'This is not a valid content type: %s' % content_type,
+ 'reason':'Accepted content types: %s' % \
+ ['image/'+t for t in SUPPORTED_UPLOAD_TYPES]
+ })
+ if isinstance(filename_or_handle, (str, unicode)) and \
+ os.path.exists(filename_or_handle): # it's a file name
+ mediasource = gdata.MediaSource()
+ mediasource.setFile(filename_or_handle, content_type)
+ elif hasattr(filename_or_handle, 'read'):# it's a file-like resource
+ if hasattr(filename_or_handle, 'seek'):
+ filename_or_handle.seek(0) # rewind pointer to the start of the file
+ # gdata.MediaSource needs the content length, so read the whole image
+ file_handle = StringIO.StringIO(filename_or_handle.read())
+ name = 'image'
+ if hasattr(filename_or_handle, 'name'):
+ name = filename_or_handle.name
+ mediasource = gdata.MediaSource(file_handle, content_type,
+ content_length=file_handle.len, file_name=name)
+ else: #filename_or_handle is not valid
+ raise GooglePhotosException({'status':GPHOTOS_INVALID_ARGUMENT,
+ 'body':'`filename_or_handle` must be a path name or a file-like object',
+ 'reason':'Found %s, not path name or object with a .read() method' % \
+ type(filename_or_handle)
+ })
+
+ if isinstance(album_or_uri, (str, unicode)): # it's a uri
+ feed_uri = album_or_uri
+ elif hasattr(album_or_uri, 'GetFeedLink'): # it's a AlbumFeed object
+ feed_uri = album_or_uri.GetFeedLink().href
+
+ try:
+ return self.Post(photo, uri=feed_uri, media_source=mediasource,
+ converter=gdata.photos.PhotoEntryFromString)
+ except gdata.service.RequestError, e:
+ raise GooglePhotosException(e.args[0])
+
+ def InsertPhotoSimple(self, album_or_uri, title, summary, filename_or_handle,
+ content_type='image/jpeg', keywords=None):
+ """Add a photo without constructing a PhotoEntry.
+
+ Needs authentication, see self.ClientLogin()
+
+ Arguments:
+ album_or_uri: AlbumFeed or uri of the album where the photo should go
+ title: Photo title
+ summary: Photo summary / description
+ filename_or_handle: A file-like object or file name where the image/video
+ will be read from
+ content_type (optional): Internet media type (a.k.a. mime type) of
+ media object. Currently Google Photos supports these types:
+ o image/bmp
+ o image/gif
+ o image/jpeg
+ o image/png
+
+ Images will be converted to jpeg on upload. Defaults to `image/jpeg'
+ keywords (optional): a 1) comma separated string or 2) a python list() of
+ keywords (a.k.a. tags) to add to the image.
+ E.g. 1) `dog, vacation, happy' 2) ['dog', 'happy', 'vacation']
+
+ Returns:
+ The newly created gdata.photos.PhotoEntry or GooglePhotosException on errors
+
+ See:
+ http://code.google.com/apis/picasaweb/gdata.html#Add_Album_Manual_Installed
+ [1]: http://en.wikipedia.org/wiki/Unix_epoch
+ """
+
+ metadata = gdata.photos.PhotoEntry()
+ metadata.title=atom.Title(text=title)
+ metadata.summary = atom.Summary(text=summary, summary_type='text')
+ if keywords is not None:
+ if isinstance(keywords, list):
+ keywords = ','.join(keywords)
+ metadata.media.keywords = gdata.media.Keywords(text=keywords)
+ return self.InsertPhoto(album_or_uri, metadata, filename_or_handle,
+ content_type)
+
+ def UpdatePhotoMetadata(self, photo):
+ """Update a photo's metadata.
+
+ Needs authentication, see self.ClientLogin()
+
+ You can update any or all of the following metadata properties:
+ *
+ *
+ *
+ *
+ *
+ *
+ *
+
+ Arguments:
+ photo: a gdata.photos.PhotoEntry object with updated elements
+
+ Returns:
+ The modified gdata.photos.PhotoEntry
+
+ Example:
+ p = GetFeed(uri).entry[0]
+ p.title.text = u'My new text'
+ p.commentingEnabled.text = 'false'
+ p = UpdatePhotoMetadata(p)
+
+ It is important that you don't keep the old object around, once
+ it has been updated. See
+ http://code.google.com/apis/gdata/reference.html#Optimistic-concurrency
+ """
+ try:
+ return self.Put(data=photo, uri=photo.GetEditLink().href,
+ converter=gdata.photos.PhotoEntryFromString)
+ except gdata.service.RequestError, e:
+ raise GooglePhotosException(e.args[0])
+
+
+ def UpdatePhotoBlob(self, photo_or_uri, filename_or_handle,
+ content_type = 'image/jpeg'):
+ """Update a photo's binary data.
+
+ Needs authentication, see self.ClientLogin()
+
+ Arguments:
+ photo_or_uri: a gdata.photos.PhotoEntry that will be updated, or a
+ `edit-media' uri pointing to it
+ filename_or_handle: A file-like object or file name where the image/video
+ will be read from
+ content_type (optional): Internet media type (a.k.a. mime type) of
+ media object. Currently Google Photos supports these types:
+ o image/bmp
+ o image/gif
+ o image/jpeg
+ o image/png
+ Images will be converted to jpeg on upload. Defaults to `image/jpeg'
+
+ Returns:
+ The modified gdata.photos.PhotoEntry
+
+ Example:
+ p = GetFeed(PhotoUri)
+ p = UpdatePhotoBlob(p, '/tmp/newPic.jpg')
+
+ It is important that you don't keep the old object around, once
+ it has been updated. See
+ http://code.google.com/apis/gdata/reference.html#Optimistic-concurrency
+ """
+
+ try:
+ majtype, mintype = content_type.split('/')
+ assert(mintype in SUPPORTED_UPLOAD_TYPES)
+ except (ValueError, AssertionError):
+ raise GooglePhotosException({'status':GPHOTOS_INVALID_CONTENT_TYPE,
+ 'body':'This is not a valid content type: %s' % content_type,
+ 'reason':'Accepted content types: %s' % \
+ ['image/'+t for t in SUPPORTED_UPLOAD_TYPES]
+ })
+
+ if isinstance(filename_or_handle, (str, unicode)) and \
+ os.path.exists(filename_or_handle): # it's a file name
+ photoblob = gdata.MediaSource()
+ photoblob.setFile(filename_or_handle, content_type)
+ elif hasattr(filename_or_handle, 'read'):# it's a file-like resource
+ if hasattr(filename_or_handle, 'seek'):
+ filename_or_handle.seek(0) # rewind pointer to the start of the file
+ # gdata.MediaSource needs the content length, so read the whole image
+ file_handle = StringIO.StringIO(filename_or_handle.read())
+ name = 'image'
+ if hasattr(filename_or_handle, 'name'):
+ name = filename_or_handle.name
+ mediasource = gdata.MediaSource(file_handle, content_type,
+ content_length=file_handle.len, file_name=name)
+ else: #filename_or_handle is not valid
+ raise GooglePhotosException({'status':GPHOTOS_INVALID_ARGUMENT,
+ 'body':'`filename_or_handle` must be a path name or a file-like object',
+ 'reason':'Found %s, not path name or an object with .read() method' % \
+ type(filename_or_handle)
+ })
+
+ if isinstance(photo_or_uri, (str, unicode)):
+ entry_uri = photo_or_uri # it's a uri
+ elif hasattr(photo_or_uri, 'GetEditMediaLink'):
+ entry_uri = photo_or_uri.GetEditMediaLink().href
+ try:
+ return self.Put(photoblob, entry_uri,
+ converter=gdata.photos.PhotoEntryFromString)
+ except gdata.service.RequestError, e:
+ raise GooglePhotosException(e.args[0])
+
+ def InsertTag(self, photo_or_uri, tag):
+ """Add a tag (a.k.a. keyword) to a photo.
+
+ Needs authentication, see self.ClientLogin()
+
+ Arguments:
+ photo_or_uri: a gdata.photos.PhotoEntry that will be tagged, or a
+ `post' uri pointing to it
+ (string) tag: The tag/keyword
+
+ Returns:
+ The new gdata.photos.TagEntry
+
+ Example:
+ p = GetFeed(PhotoUri)
+ tag = InsertTag(p, 'Beautiful sunsets')
+
+ """
+ tag = gdata.photos.TagEntry(title=atom.Title(text=tag))
+ if isinstance(photo_or_uri, (str, unicode)):
+ post_uri = photo_or_uri # it's a uri
+ elif hasattr(photo_or_uri, 'GetEditMediaLink'):
+ post_uri = photo_or_uri.GetPostLink().href
+ try:
+ return self.Post(data=tag, uri=post_uri,
+ converter=gdata.photos.TagEntryFromString)
+ except gdata.service.RequestError, e:
+ raise GooglePhotosException(e.args[0])
+
+
+ def InsertComment(self, photo_or_uri, comment):
+ """Add a comment to a photo.
+
+ Needs authentication, see self.ClientLogin()
+
+ Arguments:
+ photo_or_uri: a gdata.photos.PhotoEntry that is about to be commented
+ , or a `post' uri pointing to it
+ (string) comment: The actual comment
+
+ Returns:
+ The new gdata.photos.CommentEntry
+
+ Example:
+ p = GetFeed(PhotoUri)
+ tag = InsertComment(p, 'OOOH! I would have loved to be there.
+ Who's that in the back?')
+
+ """
+ comment = gdata.photos.CommentEntry(content=atom.Content(text=comment))
+ if isinstance(photo_or_uri, (str, unicode)):
+ post_uri = photo_or_uri # it's a uri
+ elif hasattr(photo_or_uri, 'GetEditMediaLink'):
+ post_uri = photo_or_uri.GetPostLink().href
+ try:
+ return self.Post(data=comment, uri=post_uri,
+ converter=gdata.photos.CommentEntryFromString)
+ except gdata.service.RequestError, e:
+ raise GooglePhotosException(e.args[0])
+
+ def Delete(self, object_or_uri, *args, **kwargs):
+ """Delete an object.
+
+ Re-implementing the GDataService.Delete method, to add some
+ convenience.
+
+ Arguments:
+ object_or_uri: Any object that has a GetEditLink() method that
+ returns a link, or a uri to that object.
+
+ Returns:
+ ? or GooglePhotosException on errors
+ """
+ try:
+ uri = object_or_uri.GetEditLink().href
+ except AttributeError:
+ uri = object_or_uri
+ try:
+ return gdata.service.GDataService.Delete(self, uri, *args, **kwargs)
+ except gdata.service.RequestError, e:
+ raise GooglePhotosException(e.args[0])
+
+def GetSmallestThumbnail(media_thumbnail_list):
+ """Helper function to get the smallest thumbnail of a list of
+ gdata.media.Thumbnail.
+ Returns gdata.media.Thumbnail """
+ r = {}
+ for thumb in media_thumbnail_list:
+ r[int(thumb.width)*int(thumb.height)] = thumb
+ keys = r.keys()
+ keys.sort()
+ return r[keys[0]]
+
+def ConvertAtomTimestampToEpoch(timestamp):
+ """Helper function to convert a timestamp string, for instance
+ from atom:updated or atom:published, to milliseconds since Unix epoch
+ (a.k.a. POSIX time).
+
+ `2007-07-22T00:45:10.000Z' -> """
+ return time.mktime(time.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.000Z'))
+ ## TODO: Timezone aware
diff --git a/python/gdata/projecthosting/__init__.py b/python/gdata/projecthosting/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/python/gdata/projecthosting/__init__.py
@@ -0,0 +1 @@
+
diff --git a/python/gdata/projecthosting/client.py b/python/gdata/projecthosting/client.py
new file mode 100644
index 0000000..8e36a7c
--- /dev/null
+++ b/python/gdata/projecthosting/client.py
@@ -0,0 +1,200 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import atom.data
+import gdata.client
+import gdata.gauth
+import gdata.projecthosting.data
+
+
+class ProjectHostingClient(gdata.client.GDClient):
+ """Client to interact with the Project Hosting GData API."""
+ api_version = '1.0'
+ auth_service = 'code'
+ auth_scopes = gdata.gauth.AUTH_SCOPES['code']
+ host = 'code.google.com'
+
+ def get_issues(self, project_name,
+ desired_class=gdata.projecthosting.data.IssuesFeed, **kwargs):
+ """Get a feed of issues for a particular project.
+
+ Args:
+ project_name str The name of the project.
+ query Query Set returned issues parameters.
+
+ Returns:
+ data.IssuesFeed
+ """
+ return self.get_feed(gdata.projecthosting.data.ISSUES_FULL_FEED %
+ project_name, desired_class=desired_class, **kwargs)
+
+ def add_issue(self, project_name, title, content, author,
+ status=None, owner=None, labels=None, ccs=None, **kwargs):
+ """Create a new issue for the project.
+
+ Args:
+ project_name str The name of the project.
+ title str The title of the new issue.
+ content str The summary of the new issue.
+ author str The authenticated user's username.
+ status str The status of the new issue, Accepted, etc.
+ owner str The username of new issue's owner.
+ labels [str] Labels to associate with the new issue.
+ ccs [str] usernames to Cc on the new issue.
+ Returns:
+ data.IssueEntry
+ """
+ new_entry = gdata.projecthosting.data.IssueEntry(
+ title=atom.data.Title(text=title),
+ content=atom.data.Content(text=content),
+ author=[atom.data.Author(name=atom.data.Name(text=author))])
+
+ if status:
+ new_entry.status = gdata.projecthosting.data.Status(text=status)
+
+ if owner:
+ owner = [gdata.projecthosting.data.Owner(
+ username=gdata.projecthosting.data.Username(text=owner))]
+
+ if labels:
+ new_entry.label = [gdata.projecthosting.data.Label(text=label)
+ for label in labels]
+ if ccs:
+ new_entry.cc = [
+ gdata.projecthosting.data.Cc(
+ username=gdata.projecthosting.data.Username(text=cc))
+ for cc in ccs]
+
+ return self.post(
+ new_entry,
+ gdata.projecthosting.data.ISSUES_FULL_FEED % project_name,
+ **kwargs)
+
+ def update_issue(self, project_name, issue_id, author, comment=None,
+ summary=None, status=None, owner=None, labels=None, ccs=None,
+ **kwargs):
+ """Update or comment on one issue for the project.
+
+ Args:
+ project_name str The name of the issue's project.
+ issue_id str The issue number needing updated.
+ author str The authenticated user's username.
+ comment str A comment to append to the issue
+ summary str Rewrite the summary of the issue.
+ status str A new status for the issue.
+ owner str The username of the new owner.
+ labels [str] Labels to set on the issue (prepend issue with - to remove a
+ label).
+ ccs [str] Ccs to set on th enew issue (prepend cc with - to remove a cc).
+
+ Returns:
+ data.CommentEntry
+ """
+ updates = gdata.projecthosting.data.Updates()
+
+ if summary:
+ updates.summary = gdata.projecthosting.data.Summary(text=summary)
+
+ if status:
+ updates.status = gdata.projecthosting.data.Status(text=status)
+
+ if owner:
+ updates.ownerUpdate = gdata.projecthosting.data.OwnerUpdate(text=owner)
+
+ if labels:
+ updates.label = [gdata.projecthosting.data.Label(text=label)
+ for label in labels]
+ if ccs:
+ updates.ccUpdate = [gdata.projecthosting.data.CcUpdate(text=cc)
+ for cc in ccs]
+
+ update_entry = gdata.projecthosting.data.CommentEntry(
+ content=atom.data.Content(text=comment),
+ author=[atom.data.Author(name=atom.data.Name(text=author))],
+ updates=updates)
+
+ return self.post(
+ update_entry,
+ gdata.projecthosting.data.COMMENTS_FULL_FEED % (project_name, issue_id),
+ **kwargs)
+
+ def get_comments(self, project_name, issue_id,
+ desired_class=gdata.projecthosting.data.CommentsFeed,
+ **kwargs):
+ """Get a feed of all updates to an issue.
+
+ Args:
+ project_name str The name of the issue's project.
+ issue_id str The issue number needing updated.
+
+ Returns:
+ data.CommentsFeed
+ """
+ return self.get_feed(
+ gdata.projecthosting.data.COMMENTS_FULL_FEED % (project_name, issue_id),
+ desired_class=desired_class, **kwargs)
+
+ def update(self, entry, auth_token=None, force=False, **kwargs):
+ """Unsupported GData update method.
+
+ Use update_*() instead.
+ """
+ raise NotImplementedError(
+ 'GData Update operation unsupported, try update_*')
+
+ def delete(self, entry_or_uri, auth_token=None, force=False, **kwargs):
+ """Unsupported GData delete method.
+
+ Use update_issue(status='Closed') instead.
+ """
+ raise NotImplementedError(
+ 'GData Delete API unsupported, try closing the issue instead.')
+
+
+class Query(gdata.client.Query):
+
+ def __init__(self, issue_id=None, label=None, canned_query=None, owner=None,
+ status=None, **kwargs):
+ """Constructs a Google Data Query to filter feed contents serverside.
+ Args:
+ issue_id: int or str The issue to return based on the issue id.
+ label: str A label returned issues must have.
+ canned_query: str Return issues based on a canned query identifier
+ owner: str Return issues based on the owner of the issue. For Gmail users,
+ this will be the part of the email preceding the '@' sign.
+ status: str Return issues based on the status of the issue.
+ """
+ super(Query, self).__init__(**kwargs)
+ self.label = label
+ self.issue_id = issue_id
+ self.canned_query = canned_query
+ self.owner = owner
+ self.status = status
+
+ def modify_request(self, http_request):
+ if self.issue_id:
+ gdata.client._add_query_param('id', self.issue_id, http_request)
+ if self.label:
+ gdata.client._add_query_param('label', self.label, http_request)
+ if self.canned_query:
+ gdata.client._add_query_param('can', self.canned_query, http_request)
+ if self.owner:
+ gdata.client._add_query_param('owner', self.owner, http_request)
+ if self.status:
+ gdata.client._add_query_param('status', self.status, http_request)
+ super(Query, self).modify_request(http_request)
+
+ ModifyRequest = modify_request
diff --git a/python/gdata/projecthosting/data.py b/python/gdata/projecthosting/data.py
new file mode 100644
index 0000000..b0af2f5
--- /dev/null
+++ b/python/gdata/projecthosting/data.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+#
+# Copyright 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This module is used for version 2 of the Google Data APIs.
+
+
+"""Provides classes and constants for XML in the Google Project Hosting API.
+
+Canonical documentation for the raw XML which these classes represent can be
+found here: http://code.google.com/p/support/wiki/IssueTrackerAPI
+"""
+
+
+__author__ = 'jlapenna@google.com (Joe LaPenna)'
+
+import atom.core
+import gdata.data
+
+
+ISSUES_TEMPLATE = '{http://schemas.google.com/projecthosting/issues/2009}%s'
+
+
+ISSUES_FULL_FEED = '/feeds/issues/p/%s/issues/full'
+COMMENTS_FULL_FEED = '/feeds/issues/p/%s/issues/%s/comments/full'
+
+
+class Uri(atom.core.XmlElement):
+ """The issues:uri element."""
+ _qname = ISSUES_TEMPLATE % 'uri'
+
+
+class Username(atom.core.XmlElement):
+ """The issues:username element."""
+ _qname = ISSUES_TEMPLATE % 'username'
+
+
+class Cc(atom.core.XmlElement):
+ """The issues:cc element."""
+ _qname = ISSUES_TEMPLATE % 'cc'
+ uri = Uri
+ username = Username
+
+
+class Label(atom.core.XmlElement):
+ """The issues:label element."""
+ _qname = ISSUES_TEMPLATE % 'label'
+
+
+class Owner(atom.core.XmlElement):
+ """The issues:owner element."""
+ _qname = ISSUES_TEMPLATE % 'owner'
+ uri = Uri
+ username = Username
+
+
+class Stars(atom.core.XmlElement):
+ """The issues:stars element."""
+ _qname = ISSUES_TEMPLATE % 'stars'
+
+
+class State(atom.core.XmlElement):
+ """The issues:state element."""
+ _qname = ISSUES_TEMPLATE % 'state'
+
+
+class Status(atom.core.XmlElement):
+ """The issues:status element."""
+ _qname = ISSUES_TEMPLATE % 'status'
+
+
+class Summary(atom.core.XmlElement):
+ """The issues:summary element."""
+ _qname = ISSUES_TEMPLATE % 'summary'
+
+
+class OwnerUpdate(atom.core.XmlElement):
+ """The issues:ownerUpdate element."""
+ _qname = ISSUES_TEMPLATE % 'ownerUpdate'
+
+
+class CcUpdate(atom.core.XmlElement):
+ """The issues:ccUpdate element."""
+ _qname = ISSUES_TEMPLATE % 'ccUpdate'
+
+
+class Updates(atom.core.XmlElement):
+ """The issues:updates element."""
+ _qname = ISSUES_TEMPLATE % 'updates'
+ summary = Summary
+ status = Status
+ ownerUpdate = OwnerUpdate
+ label = [Label]
+ ccUpdate = [CcUpdate]
+
+
+class IssueEntry(gdata.data.GDEntry):
+ """Represents the information of one issue."""
+ _qname = atom.data.ATOM_TEMPLATE % 'entry'
+ owner = Owner
+ cc = [Cc]
+ label = [Label]
+ stars = Stars
+ state = State
+ status = Status
+
+
+class IssuesFeed(gdata.data.GDFeed):
+ """An Atom feed listing a project's issues."""
+ entry = [IssueEntry]
+
+
+class CommentEntry(gdata.data.GDEntry):
+ """An entry detailing one comment on an issue."""
+ _qname = atom.data.ATOM_TEMPLATE % 'entry'
+ updates = Updates
+
+
+class CommentsFeed(gdata.data.GDFeed):
+ """An Atom feed listing a project's issue's comments."""
+ entry = [CommentEntry]
diff --git a/python/gdata/sample_util.py b/python/gdata/sample_util.py
new file mode 100644
index 0000000..aae866e
--- /dev/null
+++ b/python/gdata/sample_util.py
@@ -0,0 +1,269 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Provides utility functions used with command line samples."""
+
+# This module is used for version 2 of the Google Data APIs.
+
+import sys
+import getpass
+import urllib
+import gdata.gauth
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+CLIENT_LOGIN = 1
+AUTHSUB = 2
+OAUTH = 3
+
+HMAC = 1
+RSA = 2
+
+
+class SettingsUtil(object):
+ """Gather's user preferences from flags or command prompts.
+
+ An instance of this object stores the choices made by the user. At some
+ point it might be useful to save the user's preferences so that they do
+ not need to always set flags or answer preference prompts.
+ """
+
+ def __init__(self, prefs=None):
+ self.prefs = prefs or {}
+
+ def get_param(self, name, prompt='', secret=False, ask=True, reuse=False):
+ # First, check in this objects stored preferences.
+ if name in self.prefs:
+ return self.prefs[name]
+ # Second, check for a command line parameter.
+ value = None
+ for i in xrange(len(sys.argv)):
+ if sys.argv[i].startswith('--%s=' % name):
+ value = sys.argv[i].split('=')[1]
+ elif sys.argv[i] == '--%s' % name:
+ value = sys.argv[i + 1]
+ # Third, if it was not on the command line, ask the user to input the
+ # value.
+ if value is None and ask:
+ prompt = '%s: ' % prompt
+ if secret:
+ value = getpass.getpass(prompt)
+ else:
+ value = raw_input(prompt)
+ # If we want to save the preference for reuse in future requests, add it
+ # to this object's prefs.
+ if value is not None and reuse:
+ self.prefs[name] = value
+ return value
+
+ def authorize_client(self, client, auth_type=None, service=None,
+ source=None, scopes=None, oauth_type=None,
+ consumer_key=None, consumer_secret=None):
+ """Uses command line arguments, or prompts user for token values."""
+ if 'client_auth_token' in self.prefs:
+ return
+ if auth_type is None:
+ auth_type = int(self.get_param(
+ 'auth_type', 'Please choose the authorization mechanism you want'
+ ' to use.\n'
+ '1. to use your email address and password (ClientLogin)\n'
+ '2. to use a web browser to visit an auth web page (AuthSub)\n'
+ '3. if you have registed to use OAuth\n', reuse=True))
+
+ # Get the scopes for the services we want to access.
+ if auth_type == AUTHSUB or auth_type == OAUTH:
+ if scopes is None:
+ scopes = self.get_param(
+ 'scopes', 'Enter the URL prefixes (scopes) for the resources you '
+ 'would like to access.\nFor multiple scope URLs, place a comma '
+ 'between each URL.\n'
+ 'Example: http://www.google.com/calendar/feeds/,'
+ 'http://www.google.com/m8/feeds/\n', reuse=True).split(',')
+ elif isinstance(scopes, (str, unicode)):
+ scopes = scopes.split(',')
+
+ if auth_type == CLIENT_LOGIN:
+ email = self.get_param('email', 'Please enter your username',
+ reuse=False)
+ password = self.get_param('password', 'Password', True, reuse=False)
+ if service is None:
+ service = self.get_param(
+ 'service', 'What is the name of the service you wish to access?'
+ '\n(See list:'
+ ' http://code.google.com/apis/gdata/faq.html#clientlogin)',
+ reuse=True)
+ if source is None:
+ source = self.get_param('source', ask=False, reuse=True)
+ client.client_login(email, password, source=source, service=service)
+ elif auth_type == AUTHSUB:
+ auth_sub_token = self.get_param('auth_sub_token', ask=False, reuse=True)
+ session_token = self.get_param('session_token', ask=False, reuse=True)
+ private_key = None
+ auth_url = None
+ single_use_token = None
+ rsa_private_key = self.get_param(
+ 'rsa_private_key',
+ 'If you want to use secure mode AuthSub, please provide the\n'
+ ' location of your RSA private key which corresponds to the\n'
+ ' certificate you have uploaded for your domain. If you do not\n'
+ ' have an RSA key, simply press enter', reuse=True)
+
+ if rsa_private_key:
+ try:
+ private_key_file = open(rsa_private_key, 'rb')
+ private_key = private_key_file.read()
+ private_key_file.close()
+ except IOError:
+ print 'Unable to read private key from file'
+
+ if private_key is not None:
+ if client.auth_token is None:
+ if session_token:
+ client.auth_token = gdata.gauth.SecureAuthSubToken(
+ session_token, private_key, scopes)
+ self.prefs['client_auth_token'] = gdata.gauth.token_to_blob(
+ client.auth_token)
+ return
+ elif auth_sub_token:
+ client.auth_token = gdata.gauth.SecureAuthSubToken(
+ auth_sub_token, private_key, scopes)
+ client.upgrade_token()
+ self.prefs['client_auth_token'] = gdata.gauth.token_to_blob(
+ client.auth_token)
+ return
+
+ auth_url = gdata.gauth.generate_auth_sub_url(
+ 'http://gauthmachine.appspot.com/authsub', scopes, True)
+ print 'with a private key, get ready for this URL', auth_url
+
+ else:
+ if client.auth_token is None:
+ if session_token:
+ client.auth_token = gdata.gauth.AuthSubToken(session_token,
+ scopes)
+ self.prefs['client_auth_token'] = gdata.gauth.token_to_blob(
+ client.auth_token)
+ return
+ elif auth_sub_token:
+ client.auth_token = gdata.gauth.AuthSubToken(auth_sub_token,
+ scopes)
+ client.upgrade_token()
+ self.prefs['client_auth_token'] = gdata.gauth.token_to_blob(
+ client.auth_token)
+ return
+
+ auth_url = gdata.gauth.generate_auth_sub_url(
+ 'http://gauthmachine.appspot.com/authsub', scopes)
+
+ print 'Visit the following URL in your browser to authorize this app:'
+ print str(auth_url)
+ print 'After agreeing to authorize the app, copy the token value from'
+ print ' the URL. Example: "www.google.com/?token=ab12" token value is'
+ print ' ab12'
+ token_value = raw_input('Please enter the token value: ')
+ if private_key is not None:
+ single_use_token = gdata.gauth.SecureAuthSubToken(
+ token_value, private_key, scopes)
+ else:
+ single_use_token = gdata.gauth.AuthSubToken(token_value, scopes)
+ client.auth_token = single_use_token
+ client.upgrade_token()
+
+ elif auth_type == OAUTH:
+ if oauth_type is None:
+ oauth_type = int(self.get_param(
+ 'oauth_type', 'Please choose the authorization mechanism you want'
+ ' to use.\n'
+ '1. use an HMAC signature using your consumer key and secret\n'
+ '2. use RSA with your private key to sign requests\n',
+ reuse=True))
+
+ consumer_key = self.get_param(
+ 'consumer_key', 'Please enter your OAuth conumer key '
+ 'which identifies your app', reuse=True)
+
+ if oauth_type == HMAC:
+ consumer_secret = self.get_param(
+ 'consumer_secret', 'Please enter your OAuth conumer secret '
+ 'which you share with the OAuth provider', True, reuse=False)
+ # Swap out this code once the client supports requesting an oauth
+ # token.
+ # Get a request token.
+ request_token = client.get_oauth_token(
+ scopes, 'http://gauthmachine.appspot.com/oauth', consumer_key,
+ consumer_secret=consumer_secret)
+ elif oauth_type == RSA:
+ rsa_private_key = self.get_param(
+ 'rsa_private_key',
+ 'Please provide the location of your RSA private key which\n'
+ ' corresponds to the certificate you have uploaded for your'
+ ' domain.',
+ reuse=True)
+ try:
+ private_key_file = open(rsa_private_key, 'rb')
+ private_key = private_key_file.read()
+ private_key_file.close()
+ except IOError:
+ print 'Unable to read private key from file'
+
+ request_token = client.get_oauth_token(
+ scopes, 'http://gauthmachine.appspot.com/oauth', consumer_key,
+ rsa_private_key=private_key)
+ else:
+ print 'Invalid OAuth signature type'
+ return None
+
+ # Authorize the request token in the browser.
+ print 'Visit the following URL in your browser to authorize this app:'
+ print str(request_token.generate_authorization_url())
+ print 'After agreeing to authorize the app, copy URL from the browser\'s'
+ print ' address bar.'
+ url = raw_input('Please enter the url: ')
+ gdata.gauth.authorize_request_token(request_token, url)
+ # Exchange for an access token.
+ client.auth_token = client.get_access_token(request_token)
+ else:
+ print 'Invalid authorization type.'
+ return None
+ if client.auth_token:
+ self.prefs['client_auth_token'] = gdata.gauth.token_to_blob(
+ client.auth_token)
+
+
+def get_param(name, prompt='', secret=False, ask=True):
+ settings = SettingsUtil()
+ return settings.get_param(name=name, prompt=prompt, secret=secret, ask=ask)
+
+
+def authorize_client(client, auth_type=None, service=None, source=None,
+ scopes=None, oauth_type=None, consumer_key=None,
+ consumer_secret=None):
+ """Uses command line arguments, or prompts user for token values."""
+ settings = SettingsUtil()
+ return settings.authorize_client(client=client, auth_type=auth_type,
+ service=service, source=source,
+ scopes=scopes, oauth_type=oauth_type,
+ consumer_key=consumer_key,
+ consumer_secret=consumer_secret)
+
+
+def print_options():
+ """Displays usage information, available command line params."""
+ # TODO: fill in the usage description for authorizing the client.
+ print ''
+
diff --git a/python/gdata/service.py b/python/gdata/service.py
new file mode 100644
index 0000000..6bdae76
--- /dev/null
+++ b/python/gdata/service.py
@@ -0,0 +1,1718 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2006,2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""GDataService provides CRUD ops. and programmatic login for GData services.
+
+ Error: A base exception class for all exceptions in the gdata_client
+ module.
+
+ CaptchaRequired: This exception is thrown when a login attempt results in a
+ captcha challenge from the ClientLogin service. When this
+ exception is thrown, the captcha_token and captcha_url are
+ set to the values provided in the server's response.
+
+ BadAuthentication: Raised when a login attempt is made with an incorrect
+ username or password.
+
+ NotAuthenticated: Raised if an operation requiring authentication is called
+ before a user has authenticated.
+
+ NonAuthSubToken: Raised if a method to modify an AuthSub token is used when
+ the user is either not authenticated or is authenticated
+ through another authentication mechanism.
+
+ NonOAuthToken: Raised if a method to modify an OAuth token is used when the
+ user is either not authenticated or is authenticated through
+ another authentication mechanism.
+
+ RequestError: Raised if a CRUD request returned a non-success code.
+
+ UnexpectedReturnType: Raised if the response from the server was not of the
+ desired type. For example, this would be raised if the
+ server sent a feed when the client requested an entry.
+
+ GDataService: Encapsulates user credentials needed to perform insert, update
+ and delete operations with the GData API. An instance can
+ perform user authentication, query, insertion, deletion, and
+ update.
+
+ Query: Eases query URI creation by allowing URI parameters to be set as
+ dictionary attributes. For example a query with a feed of
+ '/base/feeds/snippets' and ['bq'] set to 'digital camera' will
+ produce '/base/feeds/snippets?bq=digital+camera' when .ToUri() is
+ called on it.
+"""
+
+
+__author__ = 'api.jscudder (Jeffrey Scudder)'
+
+import re
+import urllib
+import urlparse
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ try:
+ import cElementTree as ElementTree
+ except ImportError:
+ try:
+ from xml.etree import ElementTree
+ except ImportError:
+ from elementtree import ElementTree
+import atom.service
+import gdata
+import atom
+import atom.http_interface
+import atom.token_store
+import gdata.auth
+import gdata.gauth
+
+
+AUTH_SERVER_HOST = 'https://www.google.com'
+
+
+# When requesting an AuthSub token, it is often helpful to track the scope
+# which is being requested. One way to accomplish this is to add a URL
+# parameter to the 'next' URL which contains the requested scope. This
+# constant is the default name (AKA key) for the URL parameter.
+SCOPE_URL_PARAM_NAME = 'authsub_token_scope'
+# When requesting an OAuth access token or authorization of an existing OAuth
+# request token, it is often helpful to track the scope(s) which is/are being
+# requested. One way to accomplish this is to add a URL parameter to the
+# 'callback' URL which contains the requested scope. This constant is the
+# default name (AKA key) for the URL parameter.
+OAUTH_SCOPE_URL_PARAM_NAME = 'oauth_token_scope'
+# Maps the service names used in ClientLogin to scope URLs.
+CLIENT_LOGIN_SCOPES = gdata.gauth.AUTH_SCOPES
+# Default parameters for GDataService.GetWithRetries method
+DEFAULT_NUM_RETRIES = 3
+DEFAULT_DELAY = 1
+DEFAULT_BACKOFF = 2
+
+
+def lookup_scopes(service_name):
+ """Finds the scope URLs for the desired service.
+
+ In some cases, an unknown service may be used, and in those cases this
+ function will return None.
+ """
+ if service_name in CLIENT_LOGIN_SCOPES:
+ return CLIENT_LOGIN_SCOPES[service_name]
+ return None
+
+
+# Module level variable specifies which module should be used by GDataService
+# objects to make HttpRequests. This setting can be overridden on each
+# instance of GDataService.
+# This module level variable is deprecated. Reassign the http_client member
+# of a GDataService object instead.
+http_request_handler = atom.service
+
+
+class Error(Exception):
+ pass
+
+
+class CaptchaRequired(Error):
+ pass
+
+
+class BadAuthentication(Error):
+ pass
+
+
+class NotAuthenticated(Error):
+ pass
+
+
+class NonAuthSubToken(Error):
+ pass
+
+
+class NonOAuthToken(Error):
+ pass
+
+
+class RequestError(Error):
+ pass
+
+
+class UnexpectedReturnType(Error):
+ pass
+
+
+class BadAuthenticationServiceURL(Error):
+ pass
+
+
+class FetchingOAuthRequestTokenFailed(RequestError):
+ pass
+
+
+class TokenUpgradeFailed(RequestError):
+ pass
+
+
+class RevokingOAuthTokenFailed(RequestError):
+ pass
+
+
+class AuthorizationRequired(Error):
+ pass
+
+
+class TokenHadNoScope(Error):
+ pass
+
+
+class RanOutOfTries(Error):
+ pass
+
+
+class GDataService(atom.service.AtomService):
+ """Contains elements needed for GData login and CRUD request headers.
+
+ Maintains additional headers (tokens for example) needed for the GData
+ services to allow a user to perform inserts, updates, and deletes.
+ """
+ # The hander member is deprecated, use http_client instead.
+ handler = None
+ # The auth_token member is deprecated, use the token_store instead.
+ auth_token = None
+ # The tokens dict is deprecated in favor of the token_store.
+ tokens = None
+
+ def __init__(self, email=None, password=None, account_type='HOSTED_OR_GOOGLE',
+ service=None, auth_service_url=None, source=None, server=None,
+ additional_headers=None, handler=None, tokens=None,
+ http_client=None, token_store=None):
+ """Creates an object of type GDataService.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ account_type: string (optional) The type of account to use. Use
+ 'GOOGLE' for regular Google accounts or 'HOSTED' for Google
+ Apps accounts, or 'HOSTED_OR_GOOGLE' to try finding a HOSTED
+ account first and, if it doesn't exist, try finding a regular
+ GOOGLE account. Default value: 'HOSTED_OR_GOOGLE'.
+ service: string (optional) The desired service for which credentials
+ will be obtained.
+ auth_service_url: string (optional) User-defined auth token request URL
+ allows users to explicitly specify where to send auth token requests.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'base.google.com'.
+ additional_headers: dictionary (optional) Any additional headers which
+ should be included with CRUD operations.
+ handler: module (optional) This parameter is deprecated and has been
+ replaced by http_client.
+ tokens: This parameter is deprecated, calls should be made to
+ token_store instead.
+ http_client: An object responsible for making HTTP requests using a
+ request method. If none is provided, a new instance of
+ atom.http.ProxiedHttpClient will be used.
+ token_store: Keeps a collection of authorization tokens which can be
+ applied to requests for a specific URLs. Critical methods are
+ find_token based on a URL (atom.url.Url or a string), add_token,
+ and remove_token.
+ """
+ atom.service.AtomService.__init__(self, http_client=http_client,
+ token_store=token_store)
+ self.email = email
+ self.password = password
+ self.account_type = account_type
+ self.service = service
+ self.auth_service_url = auth_service_url
+ self.server = server
+ self.additional_headers = additional_headers or {}
+ self._oauth_input_params = None
+ self.__SetSource(source)
+ self.__captcha_token = None
+ self.__captcha_url = None
+ self.__gsessionid = None
+
+ if http_request_handler.__name__ == 'gdata.urlfetch':
+ import gdata.alt.appengine
+ self.http_client = gdata.alt.appengine.AppEngineHttpClient()
+
+ def _SetSessionId(self, session_id):
+ """Used in unit tests to simulate a 302 which sets a gsessionid."""
+ self.__gsessionid = session_id
+
+ # Define properties for GDataService
+ def _SetAuthSubToken(self, auth_token, scopes=None):
+ """Deprecated, use SetAuthSubToken instead."""
+ self.SetAuthSubToken(auth_token, scopes=scopes)
+
+ def __SetAuthSubToken(self, auth_token, scopes=None):
+ """Deprecated, use SetAuthSubToken instead."""
+ self._SetAuthSubToken(auth_token, scopes=scopes)
+
+ def _GetAuthToken(self):
+ """Returns the auth token used for authenticating requests.
+
+ Returns:
+ string
+ """
+ current_scopes = lookup_scopes(self.service)
+ if current_scopes:
+ token = self.token_store.find_token(current_scopes[0])
+ if hasattr(token, 'auth_header'):
+ return token.auth_header
+ return None
+
+ def _GetCaptchaToken(self):
+ """Returns a captcha token if the most recent login attempt generated one.
+
+ The captcha token is only set if the Programmatic Login attempt failed
+ because the Google service issued a captcha challenge.
+
+ Returns:
+ string
+ """
+ return self.__captcha_token
+
+ def __GetCaptchaToken(self):
+ return self._GetCaptchaToken()
+
+ captcha_token = property(__GetCaptchaToken,
+ doc="""Get the captcha token for a login request.""")
+
+ def _GetCaptchaURL(self):
+ """Returns the URL of the captcha image if a login attempt generated one.
+
+ The captcha URL is only set if the Programmatic Login attempt failed
+ because the Google service issued a captcha challenge.
+
+ Returns:
+ string
+ """
+ return self.__captcha_url
+
+ def __GetCaptchaURL(self):
+ return self._GetCaptchaURL()
+
+ captcha_url = property(__GetCaptchaURL,
+ doc="""Get the captcha URL for a login request.""")
+
+ def GetGeneratorFromLinkFinder(self, link_finder, func,
+ num_retries=DEFAULT_NUM_RETRIES,
+ delay=DEFAULT_DELAY,
+ backoff=DEFAULT_BACKOFF):
+ """returns a generator for pagination"""
+ yield link_finder
+ next = link_finder.GetNextLink()
+ while next is not None:
+ next_feed = func(str(self.GetWithRetries(
+ next.href, num_retries=num_retries, delay=delay, backoff=backoff)))
+ yield next_feed
+ next = next_feed.GetNextLink()
+
+ def _GetElementGeneratorFromLinkFinder(self, link_finder, func,
+ num_retries=DEFAULT_NUM_RETRIES,
+ delay=DEFAULT_DELAY,
+ backoff=DEFAULT_BACKOFF):
+ for element in self.GetGeneratorFromLinkFinder(link_finder, func,
+ num_retries=num_retries,
+ delay=delay,
+ backoff=backoff).entry:
+ yield element
+
+ def GetOAuthInputParameters(self):
+ return self._oauth_input_params
+
+ def SetOAuthInputParameters(self, signature_method, consumer_key,
+ consumer_secret=None, rsa_key=None,
+ two_legged_oauth=False, requestor_id=None):
+ """Sets parameters required for using OAuth authentication mechanism.
+
+ NOTE: Though consumer_secret and rsa_key are optional, either of the two
+ is required depending on the value of the signature_method.
+
+ Args:
+ signature_method: class which provides implementation for strategy class
+ oauth.oauth.OAuthSignatureMethod. Signature method to be used for
+ signing each request. Valid implementations are provided as the
+ constants defined by gdata.auth.OAuthSignatureMethod. Currently
+ they are gdata.auth.OAuthSignatureMethod.RSA_SHA1 and
+ gdata.auth.OAuthSignatureMethod.HMAC_SHA1
+ consumer_key: string Domain identifying third_party web application.
+ consumer_secret: string (optional) Secret generated during registration.
+ Required only for HMAC_SHA1 signature method.
+ rsa_key: string (optional) Private key required for RSA_SHA1 signature
+ method.
+ two_legged_oauth: boolean (optional) Enables two-legged OAuth process.
+ requestor_id: string (optional) User email adress to make requests on
+ their behalf. This parameter should only be set when two_legged_oauth
+ is True.
+ """
+ self._oauth_input_params = gdata.auth.OAuthInputParams(
+ signature_method, consumer_key, consumer_secret=consumer_secret,
+ rsa_key=rsa_key, requestor_id=requestor_id)
+ if two_legged_oauth:
+ oauth_token = gdata.auth.OAuthToken(
+ oauth_input_params=self._oauth_input_params)
+ self.SetOAuthToken(oauth_token)
+
+ def FetchOAuthRequestToken(self, scopes=None, extra_parameters=None,
+ request_url='%s/accounts/OAuthGetRequestToken' % \
+ AUTH_SERVER_HOST, oauth_callback=None):
+ """Fetches and sets the OAuth request token and returns it.
+
+ Args:
+ scopes: string or list of string base URL(s) of the service(s) to be
+ accessed. If None, then this method tries to determine the
+ scope(s) from the current service.
+ extra_parameters: dict (optional) key-value pairs as any additional
+ parameters to be included in the URL and signature while making a
+ request for fetching an OAuth request token. All the OAuth parameters
+ are added by default. But if provided through this argument, any
+ default parameters will be overwritten. For e.g. a default parameter
+ oauth_version 1.0 can be overwritten if
+ extra_parameters = {'oauth_version': '2.0'}
+ request_url: Request token URL. The default is
+ 'https://www.google.com/accounts/OAuthGetRequestToken'.
+ oauth_callback: str (optional) If set, it is assume the client is using
+ the OAuth v1.0a protocol where the callback url is sent in the
+ request token step. If the oauth_callback is also set in
+ extra_params, this value will override that one.
+
+ Returns:
+ The fetched request token as a gdata.auth.OAuthToken object.
+
+ Raises:
+ FetchingOAuthRequestTokenFailed if the server responded to the request
+ with an error.
+ """
+ if scopes is None:
+ scopes = lookup_scopes(self.service)
+ if not isinstance(scopes, (list, tuple)):
+ scopes = [scopes,]
+ if oauth_callback:
+ if extra_parameters is not None:
+ extra_parameters['oauth_callback'] = oauth_callback
+ else:
+ extra_parameters = {'oauth_callback': oauth_callback}
+ request_token_url = gdata.auth.GenerateOAuthRequestTokenUrl(
+ self._oauth_input_params, scopes,
+ request_token_url=request_url,
+ extra_parameters=extra_parameters)
+ response = self.http_client.request('GET', str(request_token_url))
+ if response.status == 200:
+ token = gdata.auth.OAuthToken()
+ token.set_token_string(response.read())
+ token.scopes = scopes
+ token.oauth_input_params = self._oauth_input_params
+ self.SetOAuthToken(token)
+ return token
+ error = {
+ 'status': response.status,
+ 'reason': 'Non 200 response on fetch request token',
+ 'body': response.read()
+ }
+ raise FetchingOAuthRequestTokenFailed(error)
+
+ def SetOAuthToken(self, oauth_token):
+ """Attempts to set the current token and add it to the token store.
+
+ The oauth_token can be any OAuth token i.e. unauthorized request token,
+ authorized request token or access token.
+ This method also attempts to add the token to the token store.
+ Use this method any time you want the current token to point to the
+ oauth_token passed. For e.g. call this method with the request token
+ you receive from FetchOAuthRequestToken.
+
+ Args:
+ request_token: gdata.auth.OAuthToken OAuth request token.
+ """
+ if self.auto_set_current_token:
+ self.current_token = oauth_token
+ if self.auto_store_tokens:
+ self.token_store.add_token(oauth_token)
+
+ def GenerateOAuthAuthorizationURL(
+ self, request_token=None, callback_url=None, extra_params=None,
+ include_scopes_in_callback=False,
+ scopes_param_prefix=OAUTH_SCOPE_URL_PARAM_NAME,
+ request_url='%s/accounts/OAuthAuthorizeToken' % AUTH_SERVER_HOST):
+ """Generates URL at which user will login to authorize the request token.
+
+ Args:
+ request_token: gdata.auth.OAuthToken (optional) OAuth request token.
+ If not specified, then the current token will be used if it is of
+ type , else it is found by looking in the
+ token_store by looking for a token for the current scope.
+ callback_url: string (optional) The URL user will be sent to after
+ logging in and granting access.
+ extra_params: dict (optional) Additional parameters to be sent.
+ include_scopes_in_callback: Boolean (default=False) if set to True, and
+ if 'callback_url' is present, the 'callback_url' will be modified to
+ include the scope(s) from the request token as a URL parameter. The
+ key for the 'callback' URL's scope parameter will be
+ OAUTH_SCOPE_URL_PARAM_NAME. The benefit of including the scope URL as
+ a parameter to the 'callback' URL, is that the page which receives
+ the OAuth token will be able to tell which URLs the token grants
+ access to.
+ scopes_param_prefix: string (default='oauth_token_scope') The URL
+ parameter key which maps to the list of valid scopes for the token.
+ This URL parameter will be included in the callback URL along with
+ the scopes of the token as value if include_scopes_in_callback=True.
+ request_url: Authorization URL. The default is
+ 'https://www.google.com/accounts/OAuthAuthorizeToken'.
+ Returns:
+ A string URL at which the user is required to login.
+
+ Raises:
+ NonOAuthToken if the user's request token is not an OAuth token or if a
+ request token was not available.
+ """
+ if request_token and not isinstance(request_token, gdata.auth.OAuthToken):
+ raise NonOAuthToken
+ if not request_token:
+ if isinstance(self.current_token, gdata.auth.OAuthToken):
+ request_token = self.current_token
+ else:
+ current_scopes = lookup_scopes(self.service)
+ if current_scopes:
+ token = self.token_store.find_token(current_scopes[0])
+ if isinstance(token, gdata.auth.OAuthToken):
+ request_token = token
+ if not request_token:
+ raise NonOAuthToken
+ return str(gdata.auth.GenerateOAuthAuthorizationUrl(
+ request_token,
+ authorization_url=request_url,
+ callback_url=callback_url, extra_params=extra_params,
+ include_scopes_in_callback=include_scopes_in_callback,
+ scopes_param_prefix=scopes_param_prefix))
+
+ def UpgradeToOAuthAccessToken(self, authorized_request_token=None,
+ request_url='%s/accounts/OAuthGetAccessToken' \
+ % AUTH_SERVER_HOST, oauth_version='1.0',
+ oauth_verifier=None):
+ """Upgrades the authorized request token to an access token and returns it
+
+ Args:
+ authorized_request_token: gdata.auth.OAuthToken (optional) OAuth request
+ token. If not specified, then the current token will be used if it is
+ of type , else it is found by looking in the
+ token_store by looking for a token for the current scope.
+ request_url: Access token URL. The default is
+ 'https://www.google.com/accounts/OAuthGetAccessToken'.
+ oauth_version: str (default='1.0') oauth_version parameter. All other
+ 'oauth_' parameters are added by default. This parameter too, is
+ added by default but here you can override it's value.
+ oauth_verifier: str (optional) If present, it is assumed that the client
+ will use the OAuth v1.0a protocol which includes passing the
+ oauth_verifier (as returned by the SP) in the access token step.
+
+ Returns:
+ Access token
+
+ Raises:
+ NonOAuthToken if the user's authorized request token is not an OAuth
+ token or if an authorized request token was not available.
+ TokenUpgradeFailed if the server responded to the request with an
+ error.
+ """
+ if (authorized_request_token and
+ not isinstance(authorized_request_token, gdata.auth.OAuthToken)):
+ raise NonOAuthToken
+ if not authorized_request_token:
+ if isinstance(self.current_token, gdata.auth.OAuthToken):
+ authorized_request_token = self.current_token
+ else:
+ current_scopes = lookup_scopes(self.service)
+ if current_scopes:
+ token = self.token_store.find_token(current_scopes[0])
+ if isinstance(token, gdata.auth.OAuthToken):
+ authorized_request_token = token
+ if not authorized_request_token:
+ raise NonOAuthToken
+ access_token_url = gdata.auth.GenerateOAuthAccessTokenUrl(
+ authorized_request_token,
+ self._oauth_input_params,
+ access_token_url=request_url,
+ oauth_version=oauth_version,
+ oauth_verifier=oauth_verifier)
+ response = self.http_client.request('GET', str(access_token_url))
+ if response.status == 200:
+ token = gdata.auth.OAuthTokenFromHttpBody(response.read())
+ token.scopes = authorized_request_token.scopes
+ token.oauth_input_params = authorized_request_token.oauth_input_params
+ self.SetOAuthToken(token)
+ return token
+ else:
+ raise TokenUpgradeFailed({'status': response.status,
+ 'reason': 'Non 200 response on upgrade',
+ 'body': response.read()})
+
+ def RevokeOAuthToken(self, request_url='%s/accounts/AuthSubRevokeToken' % \
+ AUTH_SERVER_HOST):
+ """Revokes an existing OAuth token.
+
+ request_url: Token revoke URL. The default is
+ 'https://www.google.com/accounts/AuthSubRevokeToken'.
+ Raises:
+ NonOAuthToken if the user's auth token is not an OAuth token.
+ RevokingOAuthTokenFailed if request for revoking an OAuth token failed.
+ """
+ scopes = lookup_scopes(self.service)
+ token = self.token_store.find_token(scopes[0])
+ if not isinstance(token, gdata.auth.OAuthToken):
+ raise NonOAuthToken
+
+ response = token.perform_request(self.http_client, 'GET', request_url,
+ headers={'Content-Type':'application/x-www-form-urlencoded'})
+ if response.status == 200:
+ self.token_store.remove_token(token)
+ else:
+ raise RevokingOAuthTokenFailed
+
+ def GetAuthSubToken(self):
+ """Returns the AuthSub token as a string.
+
+ If the token is an gdta.auth.AuthSubToken, the Authorization Label
+ ("AuthSub token") is removed.
+
+ This method examines the current_token to see if it is an AuthSubToken
+ or SecureAuthSubToken. If not, it searches the token_store for a token
+ which matches the current scope.
+
+ The current scope is determined by the service name string member.
+
+ Returns:
+ If the current_token is set to an AuthSubToken/SecureAuthSubToken,
+ return the token string. If there is no current_token, a token string
+ for a token which matches the service object's default scope is returned.
+ If there are no tokens valid for the scope, returns None.
+ """
+ if isinstance(self.current_token, gdata.auth.AuthSubToken):
+ return self.current_token.get_token_string()
+ current_scopes = lookup_scopes(self.service)
+ if current_scopes:
+ token = self.token_store.find_token(current_scopes[0])
+ if isinstance(token, gdata.auth.AuthSubToken):
+ return token.get_token_string()
+ else:
+ token = self.token_store.find_token(atom.token_store.SCOPE_ALL)
+ if isinstance(token, gdata.auth.ClientLoginToken):
+ return token.get_token_string()
+ return None
+
+ def SetAuthSubToken(self, token, scopes=None, rsa_key=None):
+ """Sets the token sent in requests to an AuthSub token.
+
+ Sets the current_token and attempts to add the token to the token_store.
+
+ Only use this method if you have received a token from the AuthSub
+ service. The auth token is set automatically when UpgradeToSessionToken()
+ is used. See documentation for Google AuthSub here:
+ http://code.google.com/apis/accounts/AuthForWebApps.html
+
+ Args:
+ token: gdata.auth.AuthSubToken or gdata.auth.SecureAuthSubToken or string
+ The token returned by the AuthSub service. If the token is an
+ AuthSubToken or SecureAuthSubToken, the scope information stored in
+ the token is used. If the token is a string, the scopes parameter is
+ used to determine the valid scopes.
+ scopes: list of URLs for which the token is valid. This is only used
+ if the token parameter is a string.
+ rsa_key: string (optional) Private key required for RSA_SHA1 signature
+ method. This parameter is necessary if the token is a string
+ representing a secure token.
+ """
+ if not isinstance(token, gdata.auth.AuthSubToken):
+ token_string = token
+ if rsa_key:
+ token = gdata.auth.SecureAuthSubToken(rsa_key)
+ else:
+ token = gdata.auth.AuthSubToken()
+
+ token.set_token_string(token_string)
+
+ # If no scopes were set for the token, use the scopes passed in, or
+ # try to determine the scopes based on the current service name. If
+ # all else fails, set the token to match all requests.
+ if not token.scopes:
+ if scopes is None:
+ scopes = lookup_scopes(self.service)
+ if scopes is None:
+ scopes = [atom.token_store.SCOPE_ALL]
+ token.scopes = scopes
+ if self.auto_set_current_token:
+ self.current_token = token
+ if self.auto_store_tokens:
+ self.token_store.add_token(token)
+
+ def GetClientLoginToken(self):
+ """Returns the token string for the current token or a token matching the
+ service scope.
+
+ If the current_token is a ClientLoginToken, the token string for
+ the current token is returned. If the current_token is not set, this method
+ searches for a token in the token_store which is valid for the service
+ object's current scope.
+
+ The current scope is determined by the service name string member.
+ The token string is the end of the Authorization header, it doesn not
+ include the ClientLogin label.
+ """
+ if isinstance(self.current_token, gdata.auth.ClientLoginToken):
+ return self.current_token.get_token_string()
+ current_scopes = lookup_scopes(self.service)
+ if current_scopes:
+ token = self.token_store.find_token(current_scopes[0])
+ if isinstance(token, gdata.auth.ClientLoginToken):
+ return token.get_token_string()
+ else:
+ token = self.token_store.find_token(atom.token_store.SCOPE_ALL)
+ if isinstance(token, gdata.auth.ClientLoginToken):
+ return token.get_token_string()
+ return None
+
+ def SetClientLoginToken(self, token, scopes=None):
+ """Sets the token sent in requests to a ClientLogin token.
+
+ This method sets the current_token to a new ClientLoginToken and it
+ also attempts to add the ClientLoginToken to the token_store.
+
+ Only use this method if you have received a token from the ClientLogin
+ service. The auth_token is set automatically when ProgrammaticLogin()
+ is used. See documentation for Google ClientLogin here:
+ http://code.google.com/apis/accounts/docs/AuthForInstalledApps.html
+
+ Args:
+ token: string or instance of a ClientLoginToken.
+ """
+ if not isinstance(token, gdata.auth.ClientLoginToken):
+ token_string = token
+ token = gdata.auth.ClientLoginToken()
+ token.set_token_string(token_string)
+
+ if not token.scopes:
+ if scopes is None:
+ scopes = lookup_scopes(self.service)
+ if scopes is None:
+ scopes = [atom.token_store.SCOPE_ALL]
+ token.scopes = scopes
+ if self.auto_set_current_token:
+ self.current_token = token
+ if self.auto_store_tokens:
+ self.token_store.add_token(token)
+
+ # Private methods to create the source property.
+ def __GetSource(self):
+ return self.__source
+
+ def __SetSource(self, new_source):
+ self.__source = new_source
+ # Update the UserAgent header to include the new application name.
+ self.additional_headers['User-Agent'] = atom.http_interface.USER_AGENT % (
+ self.__source,)
+
+ source = property(__GetSource, __SetSource,
+ doc="""The source is the name of the application making the request.
+ It should be in the form company_id-app_name-app_version""")
+
+ # Authentication operations
+
+ def ProgrammaticLogin(self, captcha_token=None, captcha_response=None):
+ """Authenticates the user and sets the GData Auth token.
+
+ Login retreives a temporary auth token which must be used with all
+ requests to GData services. The auth token is stored in the GData client
+ object.
+
+ Login is also used to respond to a captcha challenge. If the user's login
+ attempt failed with a CaptchaRequired error, the user can respond by
+ calling Login with the captcha token and the answer to the challenge.
+
+ Args:
+ captcha_token: string (optional) The identifier for the captcha challenge
+ which was presented to the user.
+ captcha_response: string (optional) The user's answer to the captch
+ challenge.
+
+ Raises:
+ CaptchaRequired if the login service will require a captcha response
+ BadAuthentication if the login service rejected the username or password
+ Error if the login service responded with a 403 different from the above
+ """
+ request_body = gdata.auth.generate_client_login_request_body(self.email,
+ self.password, self.service, self.source, self.account_type,
+ captcha_token, captcha_response)
+
+ # If the user has defined their own authentication service URL,
+ # send the ClientLogin requests to this URL:
+ if not self.auth_service_url:
+ auth_request_url = AUTH_SERVER_HOST + '/accounts/ClientLogin'
+ else:
+ auth_request_url = self.auth_service_url
+
+ auth_response = self.http_client.request('POST', auth_request_url,
+ data=request_body,
+ headers={'Content-Type':'application/x-www-form-urlencoded'})
+ response_body = auth_response.read()
+
+ if auth_response.status == 200:
+ # TODO: insert the token into the token_store directly.
+ self.SetClientLoginToken(
+ gdata.auth.get_client_login_token(response_body))
+ self.__captcha_token = None
+ self.__captcha_url = None
+
+ elif auth_response.status == 403:
+ # Examine each line to find the error type and the captcha token and
+ # captch URL if they are present.
+ captcha_parameters = gdata.auth.get_captcha_challenge(response_body,
+ captcha_base_url='%s/accounts/' % AUTH_SERVER_HOST)
+ if captcha_parameters:
+ self.__captcha_token = captcha_parameters['token']
+ self.__captcha_url = captcha_parameters['url']
+ raise CaptchaRequired, 'Captcha Required'
+ elif response_body.splitlines()[0] == 'Error=BadAuthentication':
+ self.__captcha_token = None
+ self.__captcha_url = None
+ raise BadAuthentication, 'Incorrect username or password'
+ else:
+ self.__captcha_token = None
+ self.__captcha_url = None
+ raise Error, 'Server responded with a 403 code'
+ elif auth_response.status == 302:
+ self.__captcha_token = None
+ self.__captcha_url = None
+ # Google tries to redirect all bad URLs back to
+ # http://www.google.. If a redirect
+ # attempt is made, assume the user has supplied an incorrect authentication URL
+ raise BadAuthenticationServiceURL, 'Server responded with a 302 code.'
+
+ def ClientLogin(self, username, password, account_type=None, service=None,
+ auth_service_url=None, source=None, captcha_token=None,
+ captcha_response=None):
+ """Convenience method for authenticating using ProgrammaticLogin.
+
+ Sets values for email, password, and other optional members.
+
+ Args:
+ username:
+ password:
+ account_type: string (optional)
+ service: string (optional)
+ auth_service_url: string (optional)
+ captcha_token: string (optional)
+ captcha_response: string (optional)
+ """
+ self.email = username
+ self.password = password
+
+ if account_type:
+ self.account_type = account_type
+ if service:
+ self.service = service
+ if source:
+ self.source = source
+ if auth_service_url:
+ self.auth_service_url = auth_service_url
+
+ self.ProgrammaticLogin(captcha_token, captcha_response)
+
+ def GenerateAuthSubURL(self, next, scope, secure=False, session=True,
+ domain='default'):
+ """Generate a URL at which the user will login and be redirected back.
+
+ Users enter their credentials on a Google login page and a token is sent
+ to the URL specified in next. See documentation for AuthSub login at:
+ http://code.google.com/apis/accounts/docs/AuthSub.html
+
+ Args:
+ next: string The URL user will be sent to after logging in.
+ scope: string or list of strings. The URLs of the services to be
+ accessed.
+ secure: boolean (optional) Determines whether or not the issued token
+ is a secure token.
+ session: boolean (optional) Determines whether or not the issued token
+ can be upgraded to a session token.
+ """
+ if not isinstance(scope, (list, tuple)):
+ scope = (scope,)
+ return gdata.auth.generate_auth_sub_url(next, scope, secure=secure,
+ session=session,
+ request_url='%s/accounts/AuthSubRequest' % AUTH_SERVER_HOST,
+ domain=domain)
+
+ def UpgradeToSessionToken(self, token=None):
+ """Upgrades a single use AuthSub token to a session token.
+
+ Args:
+ token: A gdata.auth.AuthSubToken or gdata.auth.SecureAuthSubToken
+ (optional) which is good for a single use but can be upgraded
+ to a session token. If no token is passed in, the token
+ is found by looking in the token_store by looking for a token
+ for the current scope.
+
+ Raises:
+ NonAuthSubToken if the user's auth token is not an AuthSub token
+ TokenUpgradeFailed if the server responded to the request with an
+ error.
+ """
+ if token is None:
+ scopes = lookup_scopes(self.service)
+ if scopes:
+ token = self.token_store.find_token(scopes[0])
+ else:
+ token = self.token_store.find_token(atom.token_store.SCOPE_ALL)
+ if not isinstance(token, gdata.auth.AuthSubToken):
+ raise NonAuthSubToken
+
+ self.SetAuthSubToken(self.upgrade_to_session_token(token))
+
+ def upgrade_to_session_token(self, token):
+ """Upgrades a single use AuthSub token to a session token.
+
+ Args:
+ token: A gdata.auth.AuthSubToken or gdata.auth.SecureAuthSubToken
+ which is good for a single use but can be upgraded to a
+ session token.
+
+ Returns:
+ The upgraded token as a gdata.auth.AuthSubToken object.
+
+ Raises:
+ TokenUpgradeFailed if the server responded to the request with an
+ error.
+ """
+ response = token.perform_request(self.http_client, 'GET',
+ AUTH_SERVER_HOST + '/accounts/AuthSubSessionToken',
+ headers={'Content-Type':'application/x-www-form-urlencoded'})
+ response_body = response.read()
+ if response.status == 200:
+ token.set_token_string(
+ gdata.auth.token_from_http_body(response_body))
+ return token
+ else:
+ raise TokenUpgradeFailed({'status': response.status,
+ 'reason': 'Non 200 response on upgrade',
+ 'body': response_body})
+
+ def RevokeAuthSubToken(self):
+ """Revokes an existing AuthSub token.
+
+ Raises:
+ NonAuthSubToken if the user's auth token is not an AuthSub token
+ """
+ scopes = lookup_scopes(self.service)
+ token = self.token_store.find_token(scopes[0])
+ if not isinstance(token, gdata.auth.AuthSubToken):
+ raise NonAuthSubToken
+
+ response = token.perform_request(self.http_client, 'GET',
+ AUTH_SERVER_HOST + '/accounts/AuthSubRevokeToken',
+ headers={'Content-Type':'application/x-www-form-urlencoded'})
+ if response.status == 200:
+ self.token_store.remove_token(token)
+
+ def AuthSubTokenInfo(self):
+ """Fetches the AuthSub token's metadata from the server.
+
+ Raises:
+ NonAuthSubToken if the user's auth token is not an AuthSub token
+ """
+ scopes = lookup_scopes(self.service)
+ token = self.token_store.find_token(scopes[0])
+ if not isinstance(token, gdata.auth.AuthSubToken):
+ raise NonAuthSubToken
+
+ response = token.perform_request(self.http_client, 'GET',
+ AUTH_SERVER_HOST + '/accounts/AuthSubTokenInfo',
+ headers={'Content-Type':'application/x-www-form-urlencoded'})
+ result_body = response.read()
+ if response.status == 200:
+ return result_body
+ else:
+ raise RequestError, {'status': response.status,
+ 'body': result_body}
+
+ def GetWithRetries(self, uri, extra_headers=None, redirects_remaining=4,
+ encoding='UTF-8', converter=None, num_retries=DEFAULT_NUM_RETRIES,
+ delay=DEFAULT_DELAY, backoff=DEFAULT_BACKOFF, logger=None):
+ """This is a wrapper method for Get with retrying capability.
+
+ To avoid various errors while retrieving bulk entities by retrying
+ specified times.
+
+ Note this method relies on the time module and so may not be usable
+ by default in Python2.2.
+
+ Args:
+ num_retries: Integer; the retry count.
+ delay: Integer; the initial delay for retrying.
+ backoff: Integer; how much the delay should lengthen after each failure.
+ logger: An object which has a debug(str) method to receive logging
+ messages. Recommended that you pass in the logging module.
+ Raises:
+ ValueError if any of the parameters has an invalid value.
+ RanOutOfTries on failure after number of retries.
+ """
+ # Moved import for time module inside this method since time is not a
+ # default module in Python2.2. This method will not be usable in
+ # Python2.2.
+ import time
+ if backoff <= 1:
+ raise ValueError("backoff must be greater than 1")
+ num_retries = int(num_retries)
+
+ if num_retries < 0:
+ raise ValueError("num_retries must be 0 or greater")
+
+ if delay <= 0:
+ raise ValueError("delay must be greater than 0")
+
+ # Let's start
+ mtries, mdelay = num_retries, delay
+ while mtries > 0:
+ if mtries != num_retries:
+ if logger:
+ logger.debug("Retrying: %s" % uri)
+ try:
+ rv = self.Get(uri, extra_headers=extra_headers,
+ redirects_remaining=redirects_remaining,
+ encoding=encoding, converter=converter)
+ except SystemExit:
+ # Allow this error
+ raise
+ except RequestError, e:
+ # Error 500 is 'internal server error' and warrants a retry
+ # Error 503 is 'service unavailable' and warrants a retry
+ if e[0]['status'] not in [500, 503]:
+ raise e
+ # Else, fall through to the retry code...
+ except Exception, e:
+ if logger:
+ logger.debug(e)
+ # Fall through to the retry code...
+ else:
+ # This is the right path.
+ return rv
+ mtries -= 1
+ time.sleep(mdelay)
+ mdelay *= backoff
+ raise RanOutOfTries('Ran out of tries.')
+
+ # CRUD operations
+ def Get(self, uri, extra_headers=None, redirects_remaining=4,
+ encoding='UTF-8', converter=None):
+ """Query the GData API with the given URI
+
+ The uri is the portion of the URI after the server value
+ (ex: www.google.com).
+
+ To perform a query against Google Base, set the server to
+ 'base.google.com' and set the uri to '/base/feeds/...', where ... is
+ your query. For example, to find snippets for all digital cameras uri
+ should be set to: '/base/feeds/snippets?bq=digital+camera'
+
+ Args:
+ uri: string The query in the form of a URI. Example:
+ '/base/feeds/snippets?bq=digital+camera'.
+ extra_headers: dictionary (optional) Extra HTTP headers to be included
+ in the GET request. These headers are in addition to
+ those stored in the client's additional_headers property.
+ The client automatically sets the Content-Type and
+ Authorization headers.
+ redirects_remaining: int (optional) Tracks the number of additional
+ redirects this method will allow. If the service object receives
+ a redirect and remaining is 0, it will not follow the redirect.
+ This was added to avoid infinite redirect loops.
+ encoding: string (optional) The character encoding for the server's
+ response. Default is UTF-8
+ converter: func (optional) A function which will transform
+ the server's results before it is returned. Example: use
+ GDataFeedFromString to parse the server response as if it
+ were a GDataFeed.
+
+ Returns:
+ If there is no ResultsTransformer specified in the call, a GDataFeed
+ or GDataEntry depending on which is sent from the server. If the
+ response is niether a feed or entry and there is no ResultsTransformer,
+ return a string. If there is a ResultsTransformer, the returned value
+ will be that of the ResultsTransformer function.
+ """
+
+ if extra_headers is None:
+ extra_headers = {}
+
+ if self.__gsessionid is not None:
+ if uri.find('gsessionid=') < 0:
+ if uri.find('?') > -1:
+ uri += '&gsessionid=%s' % (self.__gsessionid,)
+ else:
+ uri += '?gsessionid=%s' % (self.__gsessionid,)
+
+ server_response = self.request('GET', uri,
+ headers=extra_headers)
+ result_body = server_response.read()
+
+ if server_response.status == 200:
+ if converter:
+ return converter(result_body)
+ # There was no ResultsTransformer specified, so try to convert the
+ # server's response into a GDataFeed.
+ feed = gdata.GDataFeedFromString(result_body)
+ if not feed:
+ # If conversion to a GDataFeed failed, try to convert the server's
+ # response to a GDataEntry.
+ entry = gdata.GDataEntryFromString(result_body)
+ if not entry:
+ # The server's response wasn't a feed, or an entry, so return the
+ # response body as a string.
+ return result_body
+ return entry
+ return feed
+ elif server_response.status == 302:
+ if redirects_remaining > 0:
+ location = (server_response.getheader('Location')
+ or server_response.getheader('location'))
+ if location is not None:
+ m = re.compile('[\?\&]gsessionid=(\w*)').search(location)
+ if m is not None:
+ self.__gsessionid = m.group(1)
+ return GDataService.Get(self, location, extra_headers, redirects_remaining - 1,
+ encoding=encoding, converter=converter)
+ else:
+ raise RequestError, {'status': server_response.status,
+ 'reason': '302 received without Location header',
+ 'body': result_body}
+ else:
+ raise RequestError, {'status': server_response.status,
+ 'reason': 'Redirect received, but redirects_remaining <= 0',
+ 'body': result_body}
+ else:
+ raise RequestError, {'status': server_response.status,
+ 'reason': server_response.reason, 'body': result_body}
+
+ def GetMedia(self, uri, extra_headers=None):
+ """Returns a MediaSource containing media and its metadata from the given
+ URI string.
+ """
+ response_handle = self.request('GET', uri,
+ headers=extra_headers)
+ return gdata.MediaSource(response_handle, response_handle.getheader(
+ 'Content-Type'),
+ response_handle.getheader('Content-Length'))
+
+ def GetEntry(self, uri, extra_headers=None):
+ """Query the GData API with the given URI and receive an Entry.
+
+ See also documentation for gdata.service.Get
+
+ Args:
+ uri: string The query in the form of a URI. Example:
+ '/base/feeds/snippets?bq=digital+camera'.
+ extra_headers: dictionary (optional) Extra HTTP headers to be included
+ in the GET request. These headers are in addition to
+ those stored in the client's additional_headers property.
+ The client automatically sets the Content-Type and
+ Authorization headers.
+
+ Returns:
+ A GDataEntry built from the XML in the server's response.
+ """
+
+ result = GDataService.Get(self, uri, extra_headers,
+ converter=atom.EntryFromString)
+ if isinstance(result, atom.Entry):
+ return result
+ else:
+ raise UnexpectedReturnType, 'Server did not send an entry'
+
+ def GetFeed(self, uri, extra_headers=None,
+ converter=gdata.GDataFeedFromString):
+ """Query the GData API with the given URI and receive a Feed.
+
+ See also documentation for gdata.service.Get
+
+ Args:
+ uri: string The query in the form of a URI. Example:
+ '/base/feeds/snippets?bq=digital+camera'.
+ extra_headers: dictionary (optional) Extra HTTP headers to be included
+ in the GET request. These headers are in addition to
+ those stored in the client's additional_headers property.
+ The client automatically sets the Content-Type and
+ Authorization headers.
+
+ Returns:
+ A GDataFeed built from the XML in the server's response.
+ """
+
+ result = GDataService.Get(self, uri, extra_headers, converter=converter)
+ if isinstance(result, atom.Feed):
+ return result
+ else:
+ raise UnexpectedReturnType, 'Server did not send a feed'
+
+ def GetNext(self, feed):
+ """Requests the next 'page' of results in the feed.
+
+ This method uses the feed's next link to request an additional feed
+ and uses the class of the feed to convert the results of the GET request.
+
+ Args:
+ feed: atom.Feed or a subclass. The feed should contain a next link and
+ the type of the feed will be applied to the results from the
+ server. The new feed which is returned will be of the same class
+ as this feed which was passed in.
+
+ Returns:
+ A new feed representing the next set of results in the server's feed.
+ The type of this feed will match that of the feed argument.
+ """
+ next_link = feed.GetNextLink()
+ # Create a closure which will convert an XML string to the class of
+ # the feed object passed in.
+ def ConvertToFeedClass(xml_string):
+ return atom.CreateClassFromXMLString(feed.__class__, xml_string)
+ # Make a GET request on the next link and use the above closure for the
+ # converted which processes the XML string from the server.
+ if next_link and next_link.href:
+ return GDataService.Get(self, next_link.href,
+ converter=ConvertToFeedClass)
+ else:
+ return None
+
+ def Post(self, data, uri, extra_headers=None, url_params=None,
+ escape_params=True, redirects_remaining=4, media_source=None,
+ converter=None):
+ """Insert or update data into a GData service at the given URI.
+
+ Args:
+ data: string, ElementTree._Element, atom.Entry, or gdata.GDataEntry The
+ XML to be sent to the uri.
+ uri: string The location (feed) to which the data should be inserted.
+ Example: '/base/feeds/items'.
+ extra_headers: dict (optional) HTTP headers which are to be included.
+ The client automatically sets the Content-Type,
+ Authorization, and Content-Length headers.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the URI. These are translated into query arguments
+ in the form '&dict_key=value&...'.
+ Example: {'max-results': '250'} becomes &max-results=250
+ escape_params: boolean (optional) If false, the calling code has already
+ ensured that the query will form a valid URL (all
+ reserved characters have been escaped). If true, this
+ method will escape the query and any URL parameters
+ provided.
+ media_source: MediaSource (optional) Container for the media to be sent
+ along with the entry, if provided.
+ converter: func (optional) A function which will be executed on the
+ server's response. Often this is a function like
+ GDataEntryFromString which will parse the body of the server's
+ response and return a GDataEntry.
+
+ Returns:
+ If the post succeeded, this method will return a GDataFeed, GDataEntry,
+ or the results of running converter on the server's result body (if
+ converter was specified).
+ """
+ return GDataService.PostOrPut(self, 'POST', data, uri,
+ extra_headers=extra_headers, url_params=url_params,
+ escape_params=escape_params, redirects_remaining=redirects_remaining,
+ media_source=media_source, converter=converter)
+
+ def PostOrPut(self, verb, data, uri, extra_headers=None, url_params=None,
+ escape_params=True, redirects_remaining=4, media_source=None,
+ converter=None):
+ """Insert data into a GData service at the given URI.
+
+ Args:
+ verb: string, either 'POST' or 'PUT'
+ data: string, ElementTree._Element, atom.Entry, or gdata.GDataEntry The
+ XML to be sent to the uri.
+ uri: string The location (feed) to which the data should be inserted.
+ Example: '/base/feeds/items'.
+ extra_headers: dict (optional) HTTP headers which are to be included.
+ The client automatically sets the Content-Type,
+ Authorization, and Content-Length headers.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the URI. These are translated into query arguments
+ in the form '&dict_key=value&...'.
+ Example: {'max-results': '250'} becomes &max-results=250
+ escape_params: boolean (optional) If false, the calling code has already
+ ensured that the query will form a valid URL (all
+ reserved characters have been escaped). If true, this
+ method will escape the query and any URL parameters
+ provided.
+ media_source: MediaSource (optional) Container for the media to be sent
+ along with the entry, if provided.
+ converter: func (optional) A function which will be executed on the
+ server's response. Often this is a function like
+ GDataEntryFromString which will parse the body of the server's
+ response and return a GDataEntry.
+
+ Returns:
+ If the post succeeded, this method will return a GDataFeed, GDataEntry,
+ or the results of running converter on the server's result body (if
+ converter was specified).
+ """
+ if extra_headers is None:
+ extra_headers = {}
+
+ if self.__gsessionid is not None:
+ if uri.find('gsessionid=') < 0:
+ if url_params is None:
+ url_params = {}
+ url_params['gsessionid'] = self.__gsessionid
+
+ if data and media_source:
+ if ElementTree.iselement(data):
+ data_str = ElementTree.tostring(data)
+ else:
+ data_str = str(data)
+
+ multipart = []
+ multipart.append('Media multipart posting\r\n--END_OF_PART\r\n' + \
+ 'Content-Type: application/atom+xml\r\n\r\n')
+ multipart.append('\r\n--END_OF_PART\r\nContent-Type: ' + \
+ media_source.content_type+'\r\n\r\n')
+ multipart.append('\r\n--END_OF_PART--\r\n')
+
+ extra_headers['MIME-version'] = '1.0'
+ extra_headers['Content-Length'] = str(len(multipart[0]) +
+ len(multipart[1]) + len(multipart[2]) +
+ len(data_str) + media_source.content_length)
+
+ extra_headers['Content-Type'] = 'multipart/related; boundary=END_OF_PART'
+ server_response = self.request(verb, uri,
+ data=[multipart[0], data_str, multipart[1], media_source.file_handle,
+ multipart[2]], headers=extra_headers, url_params=url_params)
+ result_body = server_response.read()
+
+ elif media_source or isinstance(data, gdata.MediaSource):
+ if isinstance(data, gdata.MediaSource):
+ media_source = data
+ extra_headers['Content-Length'] = str(media_source.content_length)
+ extra_headers['Content-Type'] = media_source.content_type
+ server_response = self.request(verb, uri,
+ data=media_source.file_handle, headers=extra_headers,
+ url_params=url_params)
+ result_body = server_response.read()
+
+ else:
+ http_data = data
+ if 'Content-Type' not in extra_headers:
+ content_type = 'application/atom+xml'
+ extra_headers['Content-Type'] = content_type
+ server_response = self.request(verb, uri, data=http_data,
+ headers=extra_headers, url_params=url_params)
+ result_body = server_response.read()
+
+ # Server returns 201 for most post requests, but when performing a batch
+ # request the server responds with a 200 on success.
+ if server_response.status == 201 or server_response.status == 200:
+ if converter:
+ return converter(result_body)
+ feed = gdata.GDataFeedFromString(result_body)
+ if not feed:
+ entry = gdata.GDataEntryFromString(result_body)
+ if not entry:
+ return result_body
+ return entry
+ return feed
+ elif server_response.status == 302:
+ if redirects_remaining > 0:
+ location = (server_response.getheader('Location')
+ or server_response.getheader('location'))
+ if location is not None:
+ m = re.compile('[\?\&]gsessionid=(\w*)').search(location)
+ if m is not None:
+ self.__gsessionid = m.group(1)
+ return GDataService.PostOrPut(self, verb, data, location,
+ extra_headers, url_params, escape_params,
+ redirects_remaining - 1, media_source, converter=converter)
+ else:
+ raise RequestError, {'status': server_response.status,
+ 'reason': '302 received without Location header',
+ 'body': result_body}
+ else:
+ raise RequestError, {'status': server_response.status,
+ 'reason': 'Redirect received, but redirects_remaining <= 0',
+ 'body': result_body}
+ else:
+ raise RequestError, {'status': server_response.status,
+ 'reason': server_response.reason, 'body': result_body}
+
+ def Put(self, data, uri, extra_headers=None, url_params=None,
+ escape_params=True, redirects_remaining=3, media_source=None,
+ converter=None):
+ """Updates an entry at the given URI.
+
+ Args:
+ data: string, ElementTree._Element, or xml_wrapper.ElementWrapper The
+ XML containing the updated data.
+ uri: string A URI indicating entry to which the update will be applied.
+ Example: '/base/feeds/items/ITEM-ID'
+ extra_headers: dict (optional) HTTP headers which are to be included.
+ The client automatically sets the Content-Type,
+ Authorization, and Content-Length headers.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the URI. These are translated into query arguments
+ in the form '&dict_key=value&...'.
+ Example: {'max-results': '250'} becomes &max-results=250
+ escape_params: boolean (optional) If false, the calling code has already
+ ensured that the query will form a valid URL (all
+ reserved characters have been escaped). If true, this
+ method will escape the query and any URL parameters
+ provided.
+ converter: func (optional) A function which will be executed on the
+ server's response. Often this is a function like
+ GDataEntryFromString which will parse the body of the server's
+ response and return a GDataEntry.
+
+ Returns:
+ If the put succeeded, this method will return a GDataFeed, GDataEntry,
+ or the results of running converter on the server's result body (if
+ converter was specified).
+ """
+ return GDataService.PostOrPut(self, 'PUT', data, uri,
+ extra_headers=extra_headers, url_params=url_params,
+ escape_params=escape_params, redirects_remaining=redirects_remaining,
+ media_source=media_source, converter=converter)
+
+ def Delete(self, uri, extra_headers=None, url_params=None,
+ escape_params=True, redirects_remaining=4):
+ """Deletes the entry at the given URI.
+
+ Args:
+ uri: string The URI of the entry to be deleted. Example:
+ '/base/feeds/items/ITEM-ID'
+ extra_headers: dict (optional) HTTP headers which are to be included.
+ The client automatically sets the Content-Type and
+ Authorization headers.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the URI. These are translated into query arguments
+ in the form '&dict_key=value&...'.
+ Example: {'max-results': '250'} becomes &max-results=250
+ escape_params: boolean (optional) If false, the calling code has already
+ ensured that the query will form a valid URL (all
+ reserved characters have been escaped). If true, this
+ method will escape the query and any URL parameters
+ provided.
+
+ Returns:
+ True if the entry was deleted.
+ """
+ if extra_headers is None:
+ extra_headers = {}
+
+ if self.__gsessionid is not None:
+ if uri.find('gsessionid=') < 0:
+ if url_params is None:
+ url_params = {}
+ url_params['gsessionid'] = self.__gsessionid
+
+ server_response = self.request('DELETE', uri,
+ headers=extra_headers, url_params=url_params)
+ result_body = server_response.read()
+
+ if server_response.status == 200:
+ return True
+ elif server_response.status == 302:
+ if redirects_remaining > 0:
+ location = (server_response.getheader('Location')
+ or server_response.getheader('location'))
+ if location is not None:
+ m = re.compile('[\?\&]gsessionid=(\w*)').search(location)
+ if m is not None:
+ self.__gsessionid = m.group(1)
+ return GDataService.Delete(self, location, extra_headers,
+ url_params, escape_params, redirects_remaining - 1)
+ else:
+ raise RequestError, {'status': server_response.status,
+ 'reason': '302 received without Location header',
+ 'body': result_body}
+ else:
+ raise RequestError, {'status': server_response.status,
+ 'reason': 'Redirect received, but redirects_remaining <= 0',
+ 'body': result_body}
+ else:
+ raise RequestError, {'status': server_response.status,
+ 'reason': server_response.reason, 'body': result_body}
+
+
+def ExtractToken(url, scopes_included_in_next=True):
+ """Gets the AuthSub token from the current page's URL.
+
+ Designed to be used on the URL that the browser is sent to after the user
+ authorizes this application at the page given by GenerateAuthSubRequestUrl.
+
+ Args:
+ url: The current page's URL. It should contain the token as a URL
+ parameter. Example: 'http://example.com/?...&token=abcd435'
+ scopes_included_in_next: If True, this function looks for a scope value
+ associated with the token. The scope is a URL parameter with the
+ key set to SCOPE_URL_PARAM_NAME. This parameter should be present
+ if the AuthSub request URL was generated using
+ GenerateAuthSubRequestUrl with include_scope_in_next set to True.
+
+ Returns:
+ A tuple containing the token string and a list of scope strings for which
+ this token should be valid. If the scope was not included in the URL, the
+ tuple will contain (token, None).
+ """
+ parsed = urlparse.urlparse(url)
+ token = gdata.auth.AuthSubTokenFromUrl(parsed[4])
+ scopes = ''
+ if scopes_included_in_next:
+ for pair in parsed[4].split('&'):
+ if pair.startswith('%s=' % SCOPE_URL_PARAM_NAME):
+ scopes = urllib.unquote_plus(pair.split('=')[1])
+ return (token, scopes.split(' '))
+
+
+def GenerateAuthSubRequestUrl(next, scopes, hd='default', secure=False,
+ session=True, request_url='https://www.google.com/accounts/AuthSubRequest',
+ include_scopes_in_next=True):
+ """Creates a URL to request an AuthSub token to access Google services.
+
+ For more details on AuthSub, see the documentation here:
+ http://code.google.com/apis/accounts/docs/AuthSub.html
+
+ Args:
+ next: The URL where the browser should be sent after the user authorizes
+ the application. This page is responsible for receiving the token
+ which is embeded in the URL as a parameter.
+ scopes: The base URL to which access will be granted. Example:
+ 'http://www.google.com/calendar/feeds' will grant access to all
+ URLs in the Google Calendar data API. If you would like a token for
+ multiple scopes, pass in a list of URL strings.
+ hd: The domain to which the user's account belongs. This is set to the
+ domain name if you are using Google Apps. Example: 'example.org'
+ Defaults to 'default'
+ secure: If set to True, all requests should be signed. The default is
+ False.
+ session: If set to True, the token received by the 'next' URL can be
+ upgraded to a multiuse session token. If session is set to False, the
+ token may only be used once and cannot be upgraded. Default is True.
+ request_url: The base of the URL to which the user will be sent to
+ authorize this application to access their data. The default is
+ 'https://www.google.com/accounts/AuthSubRequest'.
+ include_scopes_in_next: Boolean if set to true, the 'next' parameter will
+ be modified to include the requested scope as a URL parameter. The
+ key for the next's scope parameter will be SCOPE_URL_PARAM_NAME. The
+ benefit of including the scope URL as a parameter to the next URL, is
+ that the page which receives the AuthSub token will be able to tell
+ which URLs the token grants access to.
+
+ Returns:
+ A URL string to which the browser should be sent.
+ """
+ if isinstance(scopes, list):
+ scope = ' '.join(scopes)
+ else:
+ scope = scopes
+ if include_scopes_in_next:
+ if next.find('?') > -1:
+ next += '&%s' % urllib.urlencode({SCOPE_URL_PARAM_NAME:scope})
+ else:
+ next += '?%s' % urllib.urlencode({SCOPE_URL_PARAM_NAME:scope})
+ return gdata.auth.GenerateAuthSubUrl(next=next, scope=scope, secure=secure,
+ session=session, request_url=request_url, domain=hd)
+
+
+class Query(dict):
+ """Constructs a query URL to be used in GET requests
+
+ Url parameters are created by adding key-value pairs to this object as a
+ dict. For example, to add &max-results=25 to the URL do
+ my_query['max-results'] = 25
+
+ Category queries are created by adding category strings to the categories
+ member. All items in the categories list will be concatenated with the /
+ symbol (symbolizing a category x AND y restriction). If you would like to OR
+ 2 categories, append them as one string with a | between the categories.
+ For example, do query.categories.append('Fritz|Laurie') to create a query
+ like this feed/-/Fritz%7CLaurie . This query will look for results in both
+ categories.
+ """
+
+ def __init__(self, feed=None, text_query=None, params=None,
+ categories=None):
+ """Constructor for Query
+
+ Args:
+ feed: str (optional) The path for the feed (Examples:
+ '/base/feeds/snippets' or 'calendar/feeds/jo@gmail.com/private/full'
+ text_query: str (optional) The contents of the q query parameter. The
+ contents of the text_query are URL escaped upon conversion to a URI.
+ params: dict (optional) Parameter value string pairs which become URL
+ params when translated to a URI. These parameters are added to the
+ query's items (key-value pairs).
+ categories: list (optional) List of category strings which should be
+ included as query categories. See
+ http://code.google.com/apis/gdata/reference.html#Queries for
+ details. If you want to get results from category A or B (both
+ categories), specify a single list item 'A|B'.
+ """
+
+ self.feed = feed
+ self.categories = []
+ if text_query:
+ self.text_query = text_query
+ if isinstance(params, dict):
+ for param in params:
+ self[param] = params[param]
+ if isinstance(categories, list):
+ for category in categories:
+ self.categories.append(category)
+
+ def _GetTextQuery(self):
+ if 'q' in self.keys():
+ return self['q']
+ else:
+ return None
+
+ def _SetTextQuery(self, query):
+ self['q'] = query
+
+ text_query = property(_GetTextQuery, _SetTextQuery,
+ doc="""The feed query's q parameter""")
+
+ def _GetAuthor(self):
+ if 'author' in self.keys():
+ return self['author']
+ else:
+ return None
+
+ def _SetAuthor(self, query):
+ self['author'] = query
+
+ author = property(_GetAuthor, _SetAuthor,
+ doc="""The feed query's author parameter""")
+
+ def _GetAlt(self):
+ if 'alt' in self.keys():
+ return self['alt']
+ else:
+ return None
+
+ def _SetAlt(self, query):
+ self['alt'] = query
+
+ alt = property(_GetAlt, _SetAlt,
+ doc="""The feed query's alt parameter""")
+
+ def _GetUpdatedMin(self):
+ if 'updated-min' in self.keys():
+ return self['updated-min']
+ else:
+ return None
+
+ def _SetUpdatedMin(self, query):
+ self['updated-min'] = query
+
+ updated_min = property(_GetUpdatedMin, _SetUpdatedMin,
+ doc="""The feed query's updated-min parameter""")
+
+ def _GetUpdatedMax(self):
+ if 'updated-max' in self.keys():
+ return self['updated-max']
+ else:
+ return None
+
+ def _SetUpdatedMax(self, query):
+ self['updated-max'] = query
+
+ updated_max = property(_GetUpdatedMax, _SetUpdatedMax,
+ doc="""The feed query's updated-max parameter""")
+
+ def _GetPublishedMin(self):
+ if 'published-min' in self.keys():
+ return self['published-min']
+ else:
+ return None
+
+ def _SetPublishedMin(self, query):
+ self['published-min'] = query
+
+ published_min = property(_GetPublishedMin, _SetPublishedMin,
+ doc="""The feed query's published-min parameter""")
+
+ def _GetPublishedMax(self):
+ if 'published-max' in self.keys():
+ return self['published-max']
+ else:
+ return None
+
+ def _SetPublishedMax(self, query):
+ self['published-max'] = query
+
+ published_max = property(_GetPublishedMax, _SetPublishedMax,
+ doc="""The feed query's published-max parameter""")
+
+ def _GetStartIndex(self):
+ if 'start-index' in self.keys():
+ return self['start-index']
+ else:
+ return None
+
+ def _SetStartIndex(self, query):
+ if not isinstance(query, str):
+ query = str(query)
+ self['start-index'] = query
+
+ start_index = property(_GetStartIndex, _SetStartIndex,
+ doc="""The feed query's start-index parameter""")
+
+ def _GetMaxResults(self):
+ if 'max-results' in self.keys():
+ return self['max-results']
+ else:
+ return None
+
+ def _SetMaxResults(self, query):
+ if not isinstance(query, str):
+ query = str(query)
+ self['max-results'] = query
+
+ max_results = property(_GetMaxResults, _SetMaxResults,
+ doc="""The feed query's max-results parameter""")
+
+ def _GetOrderBy(self):
+ if 'orderby' in self.keys():
+ return self['orderby']
+ else:
+ return None
+
+ def _SetOrderBy(self, query):
+ self['orderby'] = query
+
+ orderby = property(_GetOrderBy, _SetOrderBy,
+ doc="""The feed query's orderby parameter""")
+
+ def ToUri(self):
+ q_feed = self.feed or ''
+ category_string = '/'.join(
+ [urllib.quote_plus(c) for c in self.categories])
+ # Add categories to the feed if there are any.
+ if len(self.categories) > 0:
+ q_feed = q_feed + '/-/' + category_string
+ return atom.service.BuildUri(q_feed, self)
+
+ def __str__(self):
+ return self.ToUri()
diff --git a/python/gdata/sites/__init__.py b/python/gdata/sites/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/python/gdata/sites/client.py b/python/gdata/sites/client.py
new file mode 100644
index 0000000..2915fc5
--- /dev/null
+++ b/python/gdata/sites/client.py
@@ -0,0 +1,462 @@
+#!/usr/bin/python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""SitesClient extends gdata.client.GDClient to streamline Sites API calls."""
+
+
+__author__ = 'e.bidelman (Eric Bidelman)'
+
+import atom.data
+import gdata.client
+import gdata.sites.data
+import gdata.gauth
+
+
+# Feed URI templates
+CONTENT_FEED_TEMPLATE = '/feeds/content/%s/%s/'
+REVISION_FEED_TEMPLATE = '/feeds/revision/%s/%s/'
+ACTIVITY_FEED_TEMPLATE = '/feeds/activity/%s/%s/'
+SITE_FEED_TEMPLATE = '/feeds/site/%s/'
+ACL_FEED_TEMPLATE = '/feeds/acl/site/%s/%s/'
+
+
+class SitesClient(gdata.client.GDClient):
+
+ """Client extension for the Google Sites API service."""
+
+ host = 'sites.google.com' # default server for the API
+ domain = 'site' # default site domain name
+ api_version = '1.1' # default major version for the service.
+ auth_service = 'jotspot'
+ auth_scopes = gdata.gauth.AUTH_SCOPES['jotspot']
+ ssl = True
+
+ def __init__(self, site=None, domain=None, auth_token=None, **kwargs):
+ """Constructs a new client for the Sites API.
+
+ Args:
+ site: string (optional) Name (webspace) of the Google Site
+ domain: string (optional) Domain of the (Google Apps hosted) Site.
+ If no domain is given, the Site is assumed to be a consumer Google
+ Site, in which case the value 'site' is used.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: The other parameters to pass to gdata.client.GDClient
+ constructor.
+ """
+ gdata.client.GDClient.__init__(self, auth_token=auth_token, **kwargs)
+ self.site = site
+ if domain is not None:
+ self.domain = domain
+
+ def __make_kind_category(self, label):
+ if label is None:
+ return None
+ return atom.data.Category(
+ scheme=gdata.sites.data.SITES_KIND_SCHEME,
+ term='%s#%s' % (gdata.sites.data.SITES_NAMESPACE, label), label=label)
+
+ __MakeKindCategory = __make_kind_category
+
+ def __upload(self, entry, media_source, auth_token=None, **kwargs):
+ """Uploads an attachment file to the Sites API.
+
+ Args:
+ entry: gdata.sites.data.ContentEntry The Atom XML to include.
+ media_source: gdata.data.MediaSource The file payload to be uploaded.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to gdata.client.post().
+
+ Returns:
+ The created entry.
+ """
+ uri = self.make_content_feed_uri()
+ return self.post(entry, uri, media_source=media_source,
+ auth_token=auth_token, **kwargs)
+
+ def _get_file_content(self, uri):
+ """Fetches the file content from the specified URI.
+
+ Args:
+ uri: string The full URL to fetch the file contents from.
+
+ Returns:
+ The binary file content.
+
+ Raises:
+ gdata.client.RequestError: on error response from server.
+ """
+ server_response = self.request('GET', uri)
+ if server_response.status != 200:
+ raise gdata.client.RequestError, {'status': server_response.status,
+ 'reason': server_response.reason,
+ 'body': server_response.read()}
+ return server_response.read()
+
+ _GetFileContent = _get_file_content
+
+ def make_content_feed_uri(self):
+ return CONTENT_FEED_TEMPLATE % (self.domain, self.site)
+
+ MakeContentFeedUri = make_content_feed_uri
+
+ def make_revision_feed_uri(self):
+ return REVISION_FEED_TEMPLATE % (self.domain, self.site)
+
+ MakeRevisionFeedUri = make_revision_feed_uri
+
+ def make_activity_feed_uri(self):
+ return ACTIVITY_FEED_TEMPLATE % (self.domain, self.site)
+
+ MakeActivityFeedUri = make_activity_feed_uri
+
+ def make_site_feed_uri(self, site_name=None):
+ if site_name is not None:
+ return (SITE_FEED_TEMPLATE % self.domain) + site_name
+ else:
+ return SITE_FEED_TEMPLATE % self.domain
+
+ MakeSiteFeedUri = make_site_feed_uri
+
+ def make_acl_feed_uri(self):
+ return ACL_FEED_TEMPLATE % (self.domain, self.site)
+
+ MakeAclFeedUri = make_acl_feed_uri
+
+ def get_content_feed(self, uri=None, auth_token=None, **kwargs):
+ """Retrieves the content feed containing the current state of site.
+
+ Args:
+ uri: string (optional) A full URI to query the Content feed with.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.get_feed().
+
+ Returns:
+ gdata.sites.data.ContentFeed
+ """
+ if uri is None:
+ uri = self.make_content_feed_uri()
+ return self.get_feed(uri, desired_class=gdata.sites.data.ContentFeed,
+ auth_token=auth_token, **kwargs)
+
+ GetContentFeed = get_content_feed
+
+ def get_revision_feed(self, entry_or_uri_or_id, auth_token=None, **kwargs):
+ """Retrieves the revision feed containing the revision history for a node.
+
+ Args:
+ entry_or_uri_or_id: string or gdata.sites.data.ContentEntry A full URI,
+ content entry node ID, or a content entry object of the entry to
+ retrieve revision information for.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.get_feed().
+
+ Returns:
+ gdata.sites.data.RevisionFeed
+ """
+ uri = self.make_revision_feed_uri()
+ if isinstance(entry_or_uri_or_id, gdata.sites.data.ContentEntry):
+ uri = entry_or_uri_or_id.FindRevisionLink()
+ elif entry_or_uri_or_id.find('/') == -1:
+ uri += entry_or_uri_or_id
+ else:
+ uri = entry_or_uri_or_id
+ return self.get_feed(uri, desired_class=gdata.sites.data.RevisionFeed,
+ auth_token=auth_token, **kwargs)
+
+ GetRevisionFeed = get_revision_feed
+
+ def get_activity_feed(self, uri=None, auth_token=None, **kwargs):
+ """Retrieves the activity feed containing recent Site activity.
+
+ Args:
+ uri: string (optional) A full URI to query the Activity feed.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.get_feed().
+
+ Returns:
+ gdata.sites.data.ActivityFeed
+ """
+ if uri is None:
+ uri = self.make_activity_feed_uri()
+ return self.get_feed(uri, desired_class=gdata.sites.data.ActivityFeed,
+ auth_token=auth_token, **kwargs)
+
+ GetActivityFeed = get_activity_feed
+
+ def get_site_feed(self, uri=None, auth_token=None, **kwargs):
+ """Retrieves the site feed containing a list of sites a user has access to.
+
+ Args:
+ uri: string (optional) A full URI to query the site feed.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.get_feed().
+
+ Returns:
+ gdata.sites.data.SiteFeed
+ """
+ if uri is None:
+ uri = self.make_site_feed_uri()
+ return self.get_feed(uri, desired_class=gdata.sites.data.SiteFeed,
+ auth_token=auth_token, **kwargs)
+
+ GetSiteFeed = get_site_feed
+
+ def get_acl_feed(self, uri=None, auth_token=None, **kwargs):
+ """Retrieves the acl feed containing a site's sharing permissions.
+
+ Args:
+ uri: string (optional) A full URI to query the acl feed.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.get_feed().
+
+ Returns:
+ gdata.sites.data.AclFeed
+ """
+ if uri is None:
+ uri = self.make_acl_feed_uri()
+ return self.get_feed(uri, desired_class=gdata.sites.data.AclFeed,
+ auth_token=auth_token, **kwargs)
+
+ GetAclFeed = get_acl_feed
+
+ def create_site(self, title, description=None, source_site=None,
+ theme=None, uri=None, auth_token=None, **kwargs):
+ """Creates a new Google Site.
+
+ Note: This feature is only available to Google Apps domains.
+
+ Args:
+ title: string Title for the site.
+ description: string (optional) A description/summary for the site.
+ source_site: string (optional) The site feed URI of the site to copy.
+ This parameter should only be specified when copying a site.
+ theme: string (optional) The name of the theme to create the site with.
+ uri: string (optional) A full site feed URI to override where the site
+ is created/copied. By default, the site will be created under
+ the currently set domain (e.g. self.domain).
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to gdata.client.post().
+
+ Returns:
+ gdata.sites.data.SiteEntry of the created site.
+ """
+ new_entry = gdata.sites.data.SiteEntry(title=atom.data.Title(text=title))
+
+ if description is not None:
+ new_entry.summary = gdata.sites.data.Summary(text=description)
+
+ # Add the source link if we're making a copy of a site.
+ if source_site is not None:
+ source_link = atom.data.Link(rel=gdata.sites.data.SITES_SOURCE_LINK_REL,
+ type='application/atom+xml',
+ href=source_site)
+ new_entry.link.append(source_link)
+
+ if theme is not None:
+ new_entry.theme = gdata.sites.data.Theme(text=theme)
+
+ if uri is None:
+ uri = self.make_site_feed_uri()
+
+ return self.post(new_entry, uri, auth_token=auth_token, **kwargs)
+
+ CreateSite = create_site
+
+ def create_page(self, kind, title, html='', page_name=None, parent=None,
+ auth_token=None, **kwargs):
+ """Creates a new page (specified by kind) on a Google Site.
+
+ Args:
+ kind: string The type of page/item to create. For example, webpage,
+ listpage, comment, announcementspage, filecabinet, etc. The full list
+ of supported kinds can be found in gdata.sites.gdata.SUPPORT_KINDS.
+ title: string Title for the page.
+ html: string (optional) XHTML for the page's content body.
+ page_name: string (optional) The URL page name to set. If not set, the
+ title will be normalized and used as the page's URL path.
+ parent: string or gdata.sites.data.ContentEntry (optional) The parent
+ entry or parent link url to create the page under.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to gdata.client.post().
+
+ Returns:
+ gdata.sites.data.ContentEntry of the created page.
+ """
+ new_entry = gdata.sites.data.ContentEntry(
+ title=atom.data.Title(text=title), kind=kind,
+ content=gdata.sites.data.Content(text=html))
+
+ if page_name is not None:
+ new_entry.page_name = gdata.sites.data.PageName(text=page_name)
+
+ # Add parent link to entry if it should be uploaded as a subpage.
+ if isinstance(parent, gdata.sites.data.ContentEntry):
+ parent_link = atom.data.Link(rel=gdata.sites.data.SITES_PARENT_LINK_REL,
+ type='application/atom+xml',
+ href=parent.GetSelfLink().href)
+ new_entry.link.append(parent_link)
+ elif parent is not None:
+ parent_link = atom.data.Link(rel=gdata.sites.data.SITES_PARENT_LINK_REL,
+ type='application/atom+xml',
+ href=parent)
+ new_entry.link.append(parent_link)
+
+ return self.post(new_entry, self.make_content_feed_uri(),
+ auth_token=auth_token, **kwargs)
+
+ CreatePage = create_page
+
+ def create_webattachment(self, src, content_type, title, parent,
+ description=None, auth_token=None, **kwargs):
+ """Creates a new webattachment within a filecabinet.
+
+ Args:
+ src: string The url of the web attachment.
+ content_type: string The MIME type of the web attachment.
+ title: string The title to name the web attachment.
+ parent: string or gdata.sites.data.ContentEntry (optional) The
+ parent entry or url of the filecabinet to create the attachment under.
+ description: string (optional) A summary/description for the attachment.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to gdata.client.post().
+
+ Returns:
+ gdata.sites.data.ContentEntry of the created page.
+ """
+ new_entry = gdata.sites.data.ContentEntry(
+ title=atom.data.Title(text=title), kind='webattachment',
+ content=gdata.sites.data.Content(src=src, type=content_type))
+
+ if isinstance(parent, gdata.sites.data.ContentEntry):
+ link = atom.data.Link(rel=gdata.sites.data.SITES_PARENT_LINK_REL,
+ type='application/atom+xml',
+ href=parent.GetSelfLink().href)
+ elif parent is not None:
+ link = atom.data.Link(rel=gdata.sites.data.SITES_PARENT_LINK_REL,
+ type='application/atom+xml', href=parent)
+
+ new_entry.link.append(link)
+
+ # Add file decription if it was specified
+ if description is not None:
+ new_entry.summary = gdata.sites.data.Summary(type='text',
+ text=description)
+
+ return self.post(new_entry, self.make_content_feed_uri(),
+ auth_token=auth_token, **kwargs)
+
+ CreateWebAttachment = create_webattachment
+
+ def upload_attachment(self, file_handle, parent, content_type=None,
+ title=None, description=None, folder_name=None,
+ auth_token=None, **kwargs):
+ """Uploads an attachment to a parent page.
+
+ Args:
+ file_handle: MediaSource or string A gdata.data.MediaSource object
+ containing the file to be uploaded or the full path name to the
+ file on disk.
+ parent: gdata.sites.data.ContentEntry or string The parent page to
+ upload the file to or the full URI of the entry's self link.
+ content_type: string (optional) The MIME type of the file
+ (e.g 'application/pdf'). This should be provided if file is not a
+ MediaSource object.
+ title: string (optional) The title to name the attachment. If not
+ included, the filepath or media source's filename is used.
+ description: string (optional) A summary/description for the attachment.
+ folder_name: string (optional) The name of an existing folder to upload
+ the attachment to. This only applies when the parent parameter points
+ to a filecabinet entry.
+ auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
+ OAuthToken which authorizes this client to edit the user's data.
+ kwargs: Other parameters to pass to self.__upload().
+
+ Returns:
+ A gdata.sites.data.ContentEntry containing information about the created
+ attachment.
+ """
+ if isinstance(parent, gdata.sites.data.ContentEntry):
+ link = atom.data.Link(rel=gdata.sites.data.SITES_PARENT_LINK_REL,
+ type='application/atom+xml',
+ href=parent.GetSelfLink().href)
+ else:
+ link = atom.data.Link(rel=gdata.sites.data.SITES_PARENT_LINK_REL,
+ type='application/atom+xml',
+ href=parent)
+
+ if not isinstance(file_handle, gdata.data.MediaSource):
+ ms = gdata.data.MediaSource(file_path=file_handle,
+ content_type=content_type)
+ else:
+ ms = file_handle
+
+ # If no title specified, use the file name
+ if title is None:
+ title = ms.file_name
+
+ new_entry = gdata.sites.data.ContentEntry(kind='attachment')
+ new_entry.title = atom.data.Title(text=title)
+ new_entry.link.append(link)
+
+ # Add file decription if it was specified
+ if description is not None:
+ new_entry.summary = gdata.sites.data.Summary(type='text',
+ text=description)
+
+ # Upload the attachment to a filecabinet folder?
+ if parent.Kind() == 'filecabinet' and folder_name is not None:
+ folder_category = atom.data.Category(
+ scheme=gdata.sites.data.FOLDER_KIND_TERM, term=folder_name)
+ new_entry.category.append(folder_category)
+
+ return self.__upload(new_entry, ms, auth_token=auth_token, **kwargs)
+
+ UploadAttachment = upload_attachment
+
+ def download_attachment(self, uri_or_entry, file_path):
+ """Downloads an attachment file to disk.
+
+ Args:
+ uri_or_entry: string The full URL to download the file from.
+ file_path: string The full path to save the file to.
+
+ Raises:
+ gdata.client.RequestError: on error response from server.
+ """
+ uri = uri_or_entry
+ if isinstance(uri_or_entry, gdata.sites.data.ContentEntry):
+ uri = uri_or_entry.content.src
+
+ f = open(file_path, 'wb')
+ try:
+ f.write(self._get_file_content(uri))
+ except gdata.client.RequestError, e:
+ f.close()
+ raise e
+ f.flush()
+ f.close()
+
+ DownloadAttachment = download_attachment
diff --git a/python/gdata/sites/data.py b/python/gdata/sites/data.py
new file mode 100644
index 0000000..dc8dfb2
--- /dev/null
+++ b/python/gdata/sites/data.py
@@ -0,0 +1,376 @@
+#!/usr/bin/python
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Data model classes for parsing and generating XML for the Sites Data API."""
+
+__author__ = 'e.bidelman (Eric Bidelman)'
+
+
+import atom.core
+import atom.data
+import gdata.acl.data
+import gdata.data
+
+# XML Namespaces used in Google Sites entities.
+SITES_NAMESPACE = 'http://schemas.google.com/sites/2008'
+SITES_TEMPLATE = '{http://schemas.google.com/sites/2008}%s'
+SPREADSHEETS_NAMESPACE = 'http://schemas.google.com/spreadsheets/2006'
+SPREADSHEETS_TEMPLATE = '{http://schemas.google.com/spreadsheets/2006}%s'
+DC_TERMS_TEMPLATE = '{http://purl.org/dc/terms}%s'
+THR_TERMS_TEMPLATE = '{http://purl.org/syndication/thread/1.0}%s'
+XHTML_NAMESPACE = 'http://www.w3.org/1999/xhtml'
+XHTML_TEMPLATE = '{http://www.w3.org/1999/xhtml}%s'
+
+SITES_PARENT_LINK_REL = SITES_NAMESPACE + '#parent'
+SITES_REVISION_LINK_REL = SITES_NAMESPACE + '#revision'
+SITES_SOURCE_LINK_REL = SITES_NAMESPACE + '#source'
+
+SITES_KIND_SCHEME = 'http://schemas.google.com/g/2005#kind'
+ANNOUNCEMENT_KIND_TERM = SITES_NAMESPACE + '#announcement'
+ANNOUNCEMENT_PAGE_KIND_TERM = SITES_NAMESPACE + '#announcementspage'
+ATTACHMENT_KIND_TERM = SITES_NAMESPACE + '#attachment'
+COMMENT_KIND_TERM = SITES_NAMESPACE + '#comment'
+FILECABINET_KIND_TERM = SITES_NAMESPACE + '#filecabinet'
+LISTITEM_KIND_TERM = SITES_NAMESPACE + '#listitem'
+LISTPAGE_KIND_TERM = SITES_NAMESPACE + '#listpage'
+WEBPAGE_KIND_TERM = SITES_NAMESPACE + '#webpage'
+WEBATTACHMENT_KIND_TERM = SITES_NAMESPACE + '#webattachment'
+FOLDER_KIND_TERM = SITES_NAMESPACE + '#folder'
+
+SUPPORT_KINDS = [
+ 'announcement', 'announcementspage', 'attachment', 'comment', 'filecabinet',
+ 'listitem', 'listpage', 'webpage', 'webattachment'
+ ]
+
+
+class Revision(atom.core.XmlElement):
+ """Google Sites ."""
+ _qname = SITES_TEMPLATE % 'revision'
+
+
+class PageName(atom.core.XmlElement):
+ """Google Sites ."""
+ _qname = SITES_TEMPLATE % 'pageName'
+
+
+class SiteName(atom.core.XmlElement):
+ """Google Sites ."""
+ _qname = SITES_TEMPLATE % 'siteName'
+
+
+class Theme(atom.core.XmlElement):
+ """Google Sites ."""
+ _qname = SITES_TEMPLATE % 'theme'
+
+
+class Deleted(atom.core.XmlElement):
+ """Google Sites ."""
+ _qname = gdata.data.GDATA_TEMPLATE % 'deleted'
+
+
+class Publisher(atom.core.XmlElement):
+ """Google Sites ."""
+ _qname = DC_TERMS_TEMPLATE % 'publisher'
+
+
+class Worksheet(atom.core.XmlElement):
+ """Google Sites List Page ."""
+
+ _qname = SPREADSHEETS_TEMPLATE % 'worksheet'
+ name = 'name'
+
+
+class Header(atom.core.XmlElement):
+ """Google Sites List Page ."""
+
+ _qname = SPREADSHEETS_TEMPLATE % 'header'
+ row = 'row'
+
+
+class Column(atom.core.XmlElement):
+ """Google Sites List Page ."""
+
+ _qname = SPREADSHEETS_TEMPLATE % 'column'
+ index = 'index'
+ name = 'name'
+
+
+class Data(atom.core.XmlElement):
+ """Google Sites List Page ."""
+
+ _qname = SPREADSHEETS_TEMPLATE % 'data'
+ startRow = 'startRow'
+ column = [Column]
+
+
+class Field(atom.core.XmlElement):
+ """Google Sites List Item ."""
+
+ _qname = SPREADSHEETS_TEMPLATE % 'field'
+ index = 'index'
+ name = 'name'
+
+
+class InReplyTo(atom.core.XmlElement):
+ """Google Sites List Item ."""
+
+ _qname = THR_TERMS_TEMPLATE % 'in-reply-to'
+ href = 'href'
+ ref = 'ref'
+ source = 'source'
+ type = 'type'
+
+
+class Content(atom.data.Content):
+ """Google Sites version of that encapsulates XHTML."""
+
+ def __init__(self, html=None, type=None, **kwargs):
+ if type is None and html:
+ type = 'xhtml'
+ super(Content, self).__init__(type=type, **kwargs)
+ if html is not None:
+ self.html = html
+
+ def _get_html(self):
+ if self.children:
+ return self.children[0]
+ else:
+ return ''
+
+ def _set_html(self, html):
+ if not html:
+ self.children = []
+ return
+
+ if type(html) == str:
+ html = atom.core.parse(html)
+ if not html.namespace:
+ html.namespace = XHTML_NAMESPACE
+
+ self.children = [html]
+
+ html = property(_get_html, _set_html)
+
+
+class Summary(atom.data.Summary):
+ """Google Sites version of ."""
+
+ def __init__(self, html=None, type=None, text=None, **kwargs):
+ if type is None and html:
+ type = 'xhtml'
+
+ super(Summary, self).__init__(type=type, text=text, **kwargs)
+ if html is not None:
+ self.html = html
+
+ def _get_html(self):
+ if self.children:
+ return self.children[0]
+ else:
+ return ''
+
+ def _set_html(self, html):
+ if not html:
+ self.children = []
+ return
+
+ if type(html) == str:
+ html = atom.core.parse(html)
+ if not html.namespace:
+ html.namespace = XHTML_NAMESPACE
+
+ self.children = [html]
+
+ html = property(_get_html, _set_html)
+
+
+class BaseSiteEntry(gdata.data.GDEntry):
+ """Google Sites Entry."""
+
+ def __init__(self, kind=None, **kwargs):
+ super(BaseSiteEntry, self).__init__(**kwargs)
+ if kind is not None:
+ self.category.append(
+ atom.data.Category(scheme=SITES_KIND_SCHEME,
+ term='%s#%s' % (SITES_NAMESPACE, kind),
+ label=kind))
+
+ def __find_category_scheme(self, scheme):
+ for category in self.category:
+ if category.scheme == scheme:
+ return category
+ return None
+
+ def kind(self):
+ kind = self.__find_category_scheme(SITES_KIND_SCHEME)
+ if kind is not None:
+ return kind.term[len(SITES_NAMESPACE) + 1:]
+ else:
+ return None
+
+ Kind = kind
+
+ def get_node_id(self):
+ return self.id.text[self.id.text.rfind('/') + 1:]
+
+ GetNodeId = get_node_id
+
+ def find_parent_link(self):
+ return self.find_url(SITES_PARENT_LINK_REL)
+
+ FindParentLink = find_parent_link
+
+ def is_deleted(self):
+ return self.deleted is not None
+
+ IsDeleted = is_deleted
+
+
+class ContentEntry(BaseSiteEntry):
+ """Google Sites Content Entry."""
+ content = Content
+ deleted = Deleted
+ publisher = Publisher
+ in_reply_to = InReplyTo
+ worksheet = Worksheet
+ header = Header
+ data = Data
+ field = [Field]
+ revision = Revision
+ page_name = PageName
+ feed_link = gdata.data.FeedLink
+
+ def find_revison_link(self):
+ return self.find_url(SITES_REVISION_LINK_REL)
+
+ FindRevisionLink = find_revison_link
+
+
+class ContentFeed(gdata.data.GDFeed):
+ """Google Sites Content Feed.
+
+ The Content feed is a feed containing the current, editable site content.
+ """
+ entry = [ContentEntry]
+
+ def __get_entry_type(self, kind):
+ matches = []
+ for entry in self.entry:
+ if entry.Kind() == kind:
+ matches.append(entry)
+ return matches
+
+ def get_announcements(self):
+ return self.__get_entry_type('announcement')
+
+ GetAnnouncements = get_announcements
+
+ def get_announcement_pages(self):
+ return self.__get_entry_type('announcementspage')
+
+ GetAnnouncementPages = get_announcement_pages
+
+ def get_attachments(self):
+ return self.__get_entry_type('attachment')
+
+ GetAttachments = get_attachments
+
+ def get_comments(self):
+ return self.__get_entry_type('comment')
+
+ GetComments = get_comments
+
+ def get_file_cabinets(self):
+ return self.__get_entry_type('filecabinet')
+
+ GetFileCabinets = get_file_cabinets
+
+ def get_list_items(self):
+ return self.__get_entry_type('listitem')
+
+ GetListItems = get_list_items
+
+ def get_list_pages(self):
+ return self.__get_entry_type('listpage')
+
+ GetListPages = get_list_pages
+
+ def get_webpages(self):
+ return self.__get_entry_type('webpage')
+
+ GetWebpages = get_webpages
+
+ def get_webattachments(self):
+ return self.__get_entry_type('webattachment')
+
+ GetWebattachments = get_webattachments
+
+
+class ActivityEntry(BaseSiteEntry):
+ """Google Sites Activity Entry."""
+ summary = Summary
+
+
+class ActivityFeed(gdata.data.GDFeed):
+ """Google Sites Activity Feed.
+
+ The Activity feed is a feed containing recent Site activity.
+ """
+ entry = [ActivityEntry]
+
+
+class RevisionEntry(BaseSiteEntry):
+ """Google Sites Revision Entry."""
+ content = Content
+
+
+class RevisionFeed(gdata.data.GDFeed):
+ """Google Sites Revision Feed.
+
+ The Activity feed is a feed containing recent Site activity.
+ """
+ entry = [RevisionEntry]
+
+
+class SiteEntry(gdata.data.GDEntry):
+ """Google Sites Site Feed Entry."""
+ site_name = SiteName
+ theme = Theme
+
+ def find_source_link(self):
+ return self.find_url(SITES_SOURCE_LINK_REL)
+
+ FindSourceLink = find_source_link
+
+
+class SiteFeed(gdata.data.GDFeed):
+ """Google Sites Site Feed.
+
+ The Site feed can be used to list a user's sites and create new sites.
+ """
+ entry = [SiteEntry]
+
+
+class AclEntry(gdata.acl.data.AclEntry):
+ """Google Sites ACL Entry."""
+
+
+class AclFeed(gdata.acl.data.AclFeed):
+ """Google Sites ACL Feed.
+
+ The ACL feed can be used to modify the sharing permissions of a Site.
+ """
+ entry = [AclEntry]
diff --git a/python/gdata/spreadsheet/__init__.py b/python/gdata/spreadsheet/__init__.py
new file mode 100644
index 0000000..e9a0fb3
--- /dev/null
+++ b/python/gdata/spreadsheet/__init__.py
@@ -0,0 +1,474 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains extensions to Atom objects used with Google Spreadsheets.
+"""
+
+__author__ = 'api.laurabeth@gmail.com (Laura Beth Lincoln)'
+
+
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ try:
+ import cElementTree as ElementTree
+ except ImportError:
+ try:
+ from xml.etree import ElementTree
+ except ImportError:
+ from elementtree import ElementTree
+import atom
+import gdata
+import re
+import string
+
+
+# XML namespaces which are often used in Google Spreadsheets entities.
+GSPREADSHEETS_NAMESPACE = 'http://schemas.google.com/spreadsheets/2006'
+GSPREADSHEETS_TEMPLATE = '{http://schemas.google.com/spreadsheets/2006}%s'
+
+GSPREADSHEETS_EXTENDED_NAMESPACE = ('http://schemas.google.com/spreadsheets'
+ '/2006/extended')
+GSPREADSHEETS_EXTENDED_TEMPLATE = ('{http://schemas.google.com/spreadsheets'
+ '/2006/extended}%s')
+
+
+class ColCount(atom.AtomBase):
+ """The Google Spreadsheets colCount element """
+
+ _tag = 'colCount'
+ _namespace = GSPREADSHEETS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def ColCountFromString(xml_string):
+ return atom.CreateClassFromXMLString(ColCount, xml_string)
+
+
+class RowCount(atom.AtomBase):
+ """The Google Spreadsheets rowCount element """
+
+ _tag = 'rowCount'
+ _namespace = GSPREADSHEETS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, text=None, extension_elements=None,
+ extension_attributes=None):
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+def RowCountFromString(xml_string):
+ return atom.CreateClassFromXMLString(RowCount, xml_string)
+
+
+class Cell(atom.AtomBase):
+ """The Google Spreadsheets cell element """
+
+ _tag = 'cell'
+ _namespace = GSPREADSHEETS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['row'] = 'row'
+ _attributes['col'] = 'col'
+ _attributes['inputValue'] = 'inputValue'
+ _attributes['numericValue'] = 'numericValue'
+
+ def __init__(self, text=None, row=None, col=None, inputValue=None,
+ numericValue=None, extension_elements=None, extension_attributes=None):
+ self.text = text
+ self.row = row
+ self.col = col
+ self.inputValue = inputValue
+ self.numericValue = numericValue
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def CellFromString(xml_string):
+ return atom.CreateClassFromXMLString(Cell, xml_string)
+
+
+class Custom(atom.AtomBase):
+ """The Google Spreadsheets custom element"""
+
+ _namespace = GSPREADSHEETS_EXTENDED_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+
+ def __init__(self, column=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ self.column = column # The name of the column
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+ def _BecomeChildElement(self, tree):
+ new_child = ElementTree.Element('')
+ tree.append(new_child)
+ new_child.tag = '{%s}%s' % (self.__class__._namespace,
+ self.column)
+ self._AddMembersToElementTree(new_child)
+
+ def _ToElementTree(self):
+ new_tree = ElementTree.Element('{%s}%s' % (self.__class__._namespace,
+ self.column))
+ self._AddMembersToElementTree(new_tree)
+ return new_tree
+
+ def _HarvestElementTree(self, tree):
+ namespace_uri, local_tag = string.split(tree.tag[1:], "}", 1)
+ self.column = local_tag
+ # Fill in the instance members from the contents of the XML tree.
+ for child in tree:
+ self._ConvertElementTreeToMember(child)
+ for attribute, value in tree.attrib.iteritems():
+ self._ConvertElementAttributeToMember(attribute, value)
+ self.text = tree.text
+
+
+def CustomFromString(xml_string):
+ element_tree = ElementTree.fromstring(xml_string)
+ return _CustomFromElementTree(element_tree)
+
+
+def _CustomFromElementTree(element_tree):
+ namespace_uri, local_tag = string.split(element_tree.tag[1:], "}", 1)
+ if namespace_uri == GSPREADSHEETS_EXTENDED_NAMESPACE:
+ new_custom = Custom()
+ new_custom._HarvestElementTree(element_tree)
+ new_custom.column = local_tag
+ return new_custom
+ return None
+
+
+
+
+
+class SpreadsheetsSpreadsheet(gdata.GDataEntry):
+ """A Google Spreadsheets flavor of a Spreadsheet Atom Entry """
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+
+ def __init__(self, author=None, category=None, content=None,
+ contributor=None, atom_id=None, link=None, published=None, rights=None,
+ source=None, summary=None, title=None, control=None, updated=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ self.author = author or []
+ self.category = category or []
+ self.content = content
+ self.contributor = contributor or []
+ self.id = atom_id
+ self.link = link or []
+ self.published = published
+ self.rights = rights
+ self.source = source
+ self.summary = summary
+ self.control = control
+ self.title = title
+ self.updated = updated
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def SpreadsheetsSpreadsheetFromString(xml_string):
+ return atom.CreateClassFromXMLString(SpreadsheetsSpreadsheet,
+ xml_string)
+
+
+class SpreadsheetsWorksheet(gdata.GDataEntry):
+ """A Google Spreadsheets flavor of a Worksheet Atom Entry """
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}rowCount' % GSPREADSHEETS_NAMESPACE] = ('row_count',
+ RowCount)
+ _children['{%s}colCount' % GSPREADSHEETS_NAMESPACE] = ('col_count',
+ ColCount)
+
+ def __init__(self, author=None, category=None, content=None,
+ contributor=None, atom_id=None, link=None, published=None, rights=None,
+ source=None, summary=None, title=None, control=None, updated=None,
+ row_count=None, col_count=None, text=None, extension_elements=None,
+ extension_attributes=None):
+ self.author = author or []
+ self.category = category or []
+ self.content = content
+ self.contributor = contributor or []
+ self.id = atom_id
+ self.link = link or []
+ self.published = published
+ self.rights = rights
+ self.source = source
+ self.summary = summary
+ self.control = control
+ self.title = title
+ self.updated = updated
+ self.row_count = row_count
+ self.col_count = col_count
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def SpreadsheetsWorksheetFromString(xml_string):
+ return atom.CreateClassFromXMLString(SpreadsheetsWorksheet,
+ xml_string)
+
+
+class SpreadsheetsCell(gdata.BatchEntry):
+ """A Google Spreadsheets flavor of a Cell Atom Entry """
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.BatchEntry._children.copy()
+ _attributes = gdata.BatchEntry._attributes.copy()
+ _children['{%s}cell' % GSPREADSHEETS_NAMESPACE] = ('cell', Cell)
+
+ def __init__(self, author=None, category=None, content=None,
+ contributor=None, atom_id=None, link=None, published=None, rights=None,
+ source=None, summary=None, title=None, control=None, updated=None,
+ cell=None, batch_operation=None, batch_id=None, batch_status=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ self.author = author or []
+ self.category = category or []
+ self.content = content
+ self.contributor = contributor or []
+ self.id = atom_id
+ self.link = link or []
+ self.published = published
+ self.rights = rights
+ self.source = source
+ self.summary = summary
+ self.control = control
+ self.title = title
+ self.batch_operation = batch_operation
+ self.batch_id = batch_id
+ self.batch_status = batch_status
+ self.updated = updated
+ self.cell = cell
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def SpreadsheetsCellFromString(xml_string):
+ return atom.CreateClassFromXMLString(SpreadsheetsCell,
+ xml_string)
+
+
+class SpreadsheetsList(gdata.GDataEntry):
+ """A Google Spreadsheets flavor of a List Atom Entry """
+
+ _tag = 'entry'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+
+ def __init__(self, author=None, category=None, content=None,
+ contributor=None, atom_id=None, link=None, published=None, rights=None,
+ source=None, summary=None, title=None, control=None, updated=None,
+ custom=None,
+ text=None, extension_elements=None, extension_attributes=None):
+ self.author = author or []
+ self.category = category or []
+ self.content = content
+ self.contributor = contributor or []
+ self.id = atom_id
+ self.link = link or []
+ self.published = published
+ self.rights = rights
+ self.source = source
+ self.summary = summary
+ self.control = control
+ self.title = title
+ self.updated = updated
+ self.custom = custom or {}
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+ # We need to overwrite _ConvertElementTreeToMember to add special logic to
+ # convert custom attributes to members
+ def _ConvertElementTreeToMember(self, child_tree):
+ # Find the element's tag in this class's list of child members
+ if self.__class__._children.has_key(child_tree.tag):
+ member_name = self.__class__._children[child_tree.tag][0]
+ member_class = self.__class__._children[child_tree.tag][1]
+ # If the class member is supposed to contain a list, make sure the
+ # matching member is set to a list, then append the new member
+ # instance to the list.
+ if isinstance(member_class, list):
+ if getattr(self, member_name) is None:
+ setattr(self, member_name, [])
+ getattr(self, member_name).append(atom._CreateClassFromElementTree(
+ member_class[0], child_tree))
+ else:
+ setattr(self, member_name,
+ atom._CreateClassFromElementTree(member_class, child_tree))
+ elif child_tree.tag.find('{%s}' % GSPREADSHEETS_EXTENDED_NAMESPACE) == 0:
+ # If this is in the custom namespace, make add it to the custom dict.
+ name = child_tree.tag[child_tree.tag.index('}')+1:]
+ custom = _CustomFromElementTree(child_tree)
+ if custom:
+ self.custom[name] = custom
+ else:
+ atom.ExtensionContainer._ConvertElementTreeToMember(self, child_tree)
+
+ # We need to overwtite _AddMembersToElementTree to add special logic to
+ # convert custom members to XML nodes.
+ def _AddMembersToElementTree(self, tree):
+ # Convert the members of this class which are XML child nodes.
+ # This uses the class's _children dictionary to find the members which
+ # should become XML child nodes.
+ member_node_names = [values[0] for tag, values in
+ self.__class__._children.iteritems()]
+ for member_name in member_node_names:
+ member = getattr(self, member_name)
+ if member is None:
+ pass
+ elif isinstance(member, list):
+ for instance in member:
+ instance._BecomeChildElement(tree)
+ else:
+ member._BecomeChildElement(tree)
+ # Convert the members of this class which are XML attributes.
+ for xml_attribute, member_name in self.__class__._attributes.iteritems():
+ member = getattr(self, member_name)
+ if member is not None:
+ tree.attrib[xml_attribute] = member
+ # Convert all special custom item attributes to nodes
+ for name, custom in self.custom.iteritems():
+ custom._BecomeChildElement(tree)
+ # Lastly, call the ExtensionContainers's _AddMembersToElementTree to
+ # convert any extension attributes.
+ atom.ExtensionContainer._AddMembersToElementTree(self, tree)
+
+
+def SpreadsheetsListFromString(xml_string):
+ return atom.CreateClassFromXMLString(SpreadsheetsList,
+ xml_string)
+ element_tree = ElementTree.fromstring(xml_string)
+ return _SpreadsheetsListFromElementTree(element_tree)
+
+
+class SpreadsheetsSpreadsheetsFeed(gdata.GDataFeed):
+ """A feed containing Google Spreadsheets Spreadsheets"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [SpreadsheetsSpreadsheet])
+
+
+def SpreadsheetsSpreadsheetsFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(SpreadsheetsSpreadsheetsFeed,
+ xml_string)
+
+
+class SpreadsheetsWorksheetsFeed(gdata.GDataFeed):
+ """A feed containing Google Spreadsheets Spreadsheets"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [SpreadsheetsWorksheet])
+
+
+def SpreadsheetsWorksheetsFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(SpreadsheetsWorksheetsFeed,
+ xml_string)
+
+
+class SpreadsheetsCellsFeed(gdata.BatchFeed):
+ """A feed containing Google Spreadsheets Cells"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.BatchFeed._children.copy()
+ _attributes = gdata.BatchFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [SpreadsheetsCell])
+ _children['{%s}rowCount' % GSPREADSHEETS_NAMESPACE] = ('row_count',
+ RowCount)
+ _children['{%s}colCount' % GSPREADSHEETS_NAMESPACE] = ('col_count',
+ ColCount)
+
+ def __init__(self, author=None, category=None, contributor=None,
+ generator=None, icon=None, atom_id=None, link=None, logo=None,
+ rights=None, subtitle=None, title=None, updated=None,
+ entry=None, total_results=None, start_index=None,
+ items_per_page=None, extension_elements=None,
+ extension_attributes=None, text=None, row_count=None,
+ col_count=None, interrupted=None):
+ gdata.BatchFeed.__init__(self, author=author, category=category,
+ contributor=contributor, generator=generator,
+ icon=icon, atom_id=atom_id, link=link,
+ logo=logo, rights=rights, subtitle=subtitle,
+ title=title, updated=updated, entry=entry,
+ total_results=total_results,
+ start_index=start_index,
+ items_per_page=items_per_page,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text, interrupted=interrupted)
+ self.row_count = row_count
+ self.col_count = col_count
+
+ def GetBatchLink(self):
+ for link in self.link:
+ if link.rel == 'http://schemas.google.com/g/2005#batch':
+ return link
+ return None
+
+
+def SpreadsheetsCellsFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(SpreadsheetsCellsFeed,
+ xml_string)
+
+
+class SpreadsheetsListFeed(gdata.GDataFeed):
+ """A feed containing Google Spreadsheets Spreadsheets"""
+
+ _tag = 'feed'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [SpreadsheetsList])
+
+
+def SpreadsheetsListFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(SpreadsheetsListFeed,
+ xml_string)
diff --git a/python/gdata/spreadsheet/service.py b/python/gdata/spreadsheet/service.py
new file mode 100644
index 0000000..66c82ce
--- /dev/null
+++ b/python/gdata/spreadsheet/service.py
@@ -0,0 +1,484 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""SpreadsheetsService extends the GDataService to streamline Google
+Spreadsheets operations.
+
+ SpreadsheetService: Provides methods to query feeds and manipulate items.
+ Extends GDataService.
+
+ DictionaryToParamList: Function which converts a dictionary into a list of
+ URL arguments (represented as strings). This is a
+ utility function used in CRUD operations.
+"""
+
+__author__ = 'api.laurabeth@gmail.com (Laura Beth Lincoln)'
+
+
+import gdata
+import atom.service
+import gdata.service
+import gdata.spreadsheet
+import atom
+
+
+class Error(Exception):
+ """Base class for exceptions in this module."""
+ pass
+
+
+class RequestError(Error):
+ pass
+
+
+class SpreadsheetsService(gdata.service.GDataService):
+ """Client for the Google Spreadsheets service."""
+
+ def __init__(self, email=None, password=None, source=None,
+ server='spreadsheets.google.com', additional_headers=None,
+ **kwargs):
+ """Creates a client for the Google Spreadsheets service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'spreadsheets.google.com'.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service='wise', source=source,
+ server=server, additional_headers=additional_headers, **kwargs)
+
+ def GetSpreadsheetsFeed(self, key=None, query=None, visibility='private',
+ projection='full'):
+ """Gets a spreadsheets feed or a specific entry if a key is defined
+ Args:
+ key: string (optional) The spreadsheet key defined in /ccc?key=
+ query: DocumentQuery (optional) Query parameters
+
+ Returns:
+ If there is no key, then a SpreadsheetsSpreadsheetsFeed.
+ If there is a key, then a SpreadsheetsSpreadsheet.
+ """
+
+ uri = ('https://%s/feeds/spreadsheets/%s/%s'
+ % (self.server, visibility, projection))
+
+ if key is not None:
+ uri = '%s/%s' % (uri, key)
+
+ if query != None:
+ query.feed = uri
+ uri = query.ToUri()
+
+ if key:
+ return self.Get(uri,
+ converter=gdata.spreadsheet.SpreadsheetsSpreadsheetFromString)
+ else:
+ return self.Get(uri,
+ converter=gdata.spreadsheet.SpreadsheetsSpreadsheetsFeedFromString)
+
+ def GetWorksheetsFeed(self, key, wksht_id=None, query=None,
+ visibility='private', projection='full'):
+ """Gets a worksheets feed or a specific entry if a wksht is defined
+ Args:
+ key: string The spreadsheet key defined in /ccc?key=
+ wksht_id: string (optional) The id for a specific worksheet entry
+ query: DocumentQuery (optional) Query parameters
+
+ Returns:
+ If there is no wksht_id, then a SpreadsheetsWorksheetsFeed.
+ If there is a wksht_id, then a SpreadsheetsWorksheet.
+ """
+
+ uri = ('https://%s/feeds/worksheets/%s/%s/%s'
+ % (self.server, key, visibility, projection))
+
+ if wksht_id != None:
+ uri = '%s/%s' % (uri, wksht_id)
+
+ if query != None:
+ query.feed = uri
+ uri = query.ToUri()
+
+ if wksht_id:
+ return self.Get(uri,
+ converter=gdata.spreadsheet.SpreadsheetsWorksheetFromString)
+ else:
+ return self.Get(uri,
+ converter=gdata.spreadsheet.SpreadsheetsWorksheetsFeedFromString)
+
+ def AddWorksheet(self, title, row_count, col_count, key):
+ """Creates a new worksheet in the desired spreadsheet.
+
+ The new worksheet is appended to the end of the list of worksheets. The
+ new worksheet will only have the available number of columns and cells
+ specified.
+
+ Args:
+ title: str The title which will be displayed in the list of worksheets.
+ row_count: int or str The number of rows in the new worksheet.
+ col_count: int or str The number of columns in the new worksheet.
+ key: str The spreadsheet key to the spreadsheet to which the new
+ worksheet should be added.
+
+ Returns:
+ A SpreadsheetsWorksheet if the new worksheet was created succesfully.
+ """
+ new_worksheet = gdata.spreadsheet.SpreadsheetsWorksheet(
+ title=atom.Title(text=title),
+ row_count=gdata.spreadsheet.RowCount(text=str(row_count)),
+ col_count=gdata.spreadsheet.ColCount(text=str(col_count)))
+ return self.Post(new_worksheet,
+ 'https://%s/feeds/worksheets/%s/private/full' % (self.server, key),
+ converter=gdata.spreadsheet.SpreadsheetsWorksheetFromString)
+
+ def UpdateWorksheet(self, worksheet_entry, url=None):
+ """Changes the size and/or title of the desired worksheet.
+
+ Args:
+ worksheet_entry: SpreadsheetWorksheet The new contents of the
+ worksheet.
+ url: str (optional) The URL to which the edited worksheet entry should
+ be sent. If the url is None, the edit URL from the worksheet will
+ be used.
+
+ Returns:
+ A SpreadsheetsWorksheet with the new information about the worksheet.
+ """
+ target_url = url or worksheet_entry.GetEditLink().href
+ return self.Put(worksheet_entry, target_url,
+ converter=gdata.spreadsheet.SpreadsheetsWorksheetFromString)
+
+ def DeleteWorksheet(self, worksheet_entry=None, url=None):
+ """Removes the desired worksheet from the spreadsheet
+
+ Args:
+ worksheet_entry: SpreadsheetWorksheet (optional) The worksheet to
+ be deleted. If this is none, then the DELETE reqest is sent to
+ the url specified in the url parameter.
+ url: str (optaional) The URL to which the DELETE request should be
+ sent. If left as None, the worksheet's edit URL is used.
+
+ Returns:
+ True if the worksheet was deleted successfully.
+ """
+ if url:
+ target_url = url
+ else:
+ target_url = worksheet_entry.GetEditLink().href
+ return self.Delete(target_url)
+
+ def GetCellsFeed(self, key, wksht_id='default', cell=None, query=None,
+ visibility='private', projection='full'):
+ """Gets a cells feed or a specific entry if a cell is defined
+ Args:
+ key: string The spreadsheet key defined in /ccc?key=
+ wksht_id: string The id for a specific worksheet entry
+ cell: string (optional) The R1C1 address of the cell
+ query: DocumentQuery (optional) Query parameters
+
+ Returns:
+ If there is no cell, then a SpreadsheetsCellsFeed.
+ If there is a cell, then a SpreadsheetsCell.
+ """
+
+ uri = ('https://%s/feeds/cells/%s/%s/%s/%s'
+ % (self.server, key, wksht_id, visibility, projection))
+
+ if cell != None:
+ uri = '%s/%s' % (uri, cell)
+
+ if query != None:
+ query.feed = uri
+ uri = query.ToUri()
+
+ if cell:
+ return self.Get(uri,
+ converter=gdata.spreadsheet.SpreadsheetsCellFromString)
+ else:
+ return self.Get(uri,
+ converter=gdata.spreadsheet.SpreadsheetsCellsFeedFromString)
+
+ def GetListFeed(self, key, wksht_id='default', row_id=None, query=None,
+ visibility='private', projection='full'):
+ """Gets a list feed or a specific entry if a row_id is defined
+ Args:
+ key: string The spreadsheet key defined in /ccc?key=
+ wksht_id: string The id for a specific worksheet entry
+ row_id: string (optional) The row_id of a row in the list
+ query: DocumentQuery (optional) Query parameters
+
+ Returns:
+ If there is no row_id, then a SpreadsheetsListFeed.
+ If there is a row_id, then a SpreadsheetsList.
+ """
+
+ uri = ('https://%s/feeds/list/%s/%s/%s/%s'
+ % (self.server, key, wksht_id, visibility, projection))
+
+ if row_id is not None:
+ uri = '%s/%s' % (uri, row_id)
+
+ if query is not None:
+ query.feed = uri
+ uri = query.ToUri()
+
+ if row_id:
+ return self.Get(uri,
+ converter=gdata.spreadsheet.SpreadsheetsListFromString)
+ else:
+ return self.Get(uri,
+ converter=gdata.spreadsheet.SpreadsheetsListFeedFromString)
+
+ def UpdateCell(self, row, col, inputValue, key, wksht_id='default'):
+ """Updates an existing cell.
+
+ Args:
+ row: int The row the cell to be editted is in
+ col: int The column the cell to be editted is in
+ inputValue: str the new value of the cell
+ key: str The key of the spreadsheet in which this cell resides.
+ wksht_id: str The ID of the worksheet which holds this cell.
+
+ Returns:
+ The updated cell entry
+ """
+ row = str(row)
+ col = str(col)
+ # make the new cell
+ new_cell = gdata.spreadsheet.Cell(row=row, col=col, inputValue=inputValue)
+ # get the edit uri and PUT
+ cell = 'R%sC%s' % (row, col)
+ entry = self.GetCellsFeed(key, wksht_id, cell)
+ for a_link in entry.link:
+ if a_link.rel == 'edit':
+ entry.cell = new_cell
+ return self.Put(entry, a_link.href,
+ converter=gdata.spreadsheet.SpreadsheetsCellFromString)
+
+ def _GenerateCellsBatchUrl(self, spreadsheet_key, worksheet_id):
+ return ('https://spreadsheets.google.com/feeds/cells/%s/%s/'
+ 'private/full/batch' % (spreadsheet_key, worksheet_id))
+
+ def ExecuteBatch(self, batch_feed, url=None, spreadsheet_key=None,
+ worksheet_id=None,
+ converter=gdata.spreadsheet.SpreadsheetsCellsFeedFromString):
+ """Sends a batch request feed to the server.
+
+ The batch request needs to be sent to the batch URL for a particular
+ worksheet. You can specify the worksheet by providing the spreadsheet_key
+ and worksheet_id, or by sending the URL from the cells feed's batch link.
+
+ Args:
+ batch_feed: gdata.spreadsheet.SpreadsheetsCellFeed A feed containing
+ BatchEntry elements which contain the desired CRUD operation and
+ any necessary data to modify a cell.
+ url: str (optional) The batch URL for the cells feed to which these
+ changes should be applied. This can be found by calling
+ cells_feed.GetBatchLink().href.
+ spreadsheet_key: str (optional) Used to generate the batch request URL
+ if the url argument is None. If using the spreadsheet key to
+ generate the URL, the worksheet id is also required.
+ worksheet_id: str (optional) Used if the url is not provided, it is
+ oart of the batch feed target URL. This is used with the spreadsheet
+ key.
+ converter: Function (optional) Function to be executed on the server's
+ response. This function should take one string as a parameter. The
+ default value is SpreadsheetsCellsFeedFromString which will turn the result
+ into a gdata.spreadsheet.SpreadsheetsCellsFeed object.
+
+ Returns:
+ A gdata.BatchFeed containing the results.
+ """
+
+ if url is None:
+ url = self._GenerateCellsBatchUrl(spreadsheet_key, worksheet_id)
+ return self.Post(batch_feed, url, converter=converter)
+
+ def InsertRow(self, row_data, key, wksht_id='default'):
+ """Inserts a new row with the provided data
+
+ Args:
+ uri: string The post uri of the list feed
+ row_data: dict A dictionary of column header to row data
+
+ Returns:
+ The inserted row
+ """
+ new_entry = gdata.spreadsheet.SpreadsheetsList()
+ for k, v in row_data.iteritems():
+ new_custom = gdata.spreadsheet.Custom()
+ new_custom.column = k
+ new_custom.text = v
+ new_entry.custom[new_custom.column] = new_custom
+ # Generate the post URL for the worksheet which will receive the new entry.
+ post_url = 'https://spreadsheets.google.com/feeds/list/%s/%s/private/full'%(
+ key, wksht_id)
+ return self.Post(new_entry, post_url,
+ converter=gdata.spreadsheet.SpreadsheetsListFromString)
+
+ def UpdateRow(self, entry, new_row_data):
+ """Updates a row with the provided data
+
+ If you want to add additional information to a row, it is often
+ easier to change the values in entry.custom, then use the Put
+ method instead of UpdateRow. This UpdateRow method will replace
+ the contents of the row with new_row_data - it will change all columns
+ not just the columns specified in the new_row_data dict.
+
+ Args:
+ entry: gdata.spreadsheet.SpreadsheetsList The entry to be updated
+ new_row_data: dict A dictionary of column header to row data
+
+ Returns:
+ The updated row
+ """
+ entry.custom = {}
+ for k, v in new_row_data.iteritems():
+ new_custom = gdata.spreadsheet.Custom()
+ new_custom.column = k
+ new_custom.text = v
+ entry.custom[k] = new_custom
+ for a_link in entry.link:
+ if a_link.rel == 'edit':
+ return self.Put(entry, a_link.href,
+ converter=gdata.spreadsheet.SpreadsheetsListFromString)
+
+ def DeleteRow(self, entry):
+ """Deletes a row, the provided entry
+
+ Args:
+ entry: gdata.spreadsheet.SpreadsheetsList The row to be deleted
+
+ Returns:
+ The delete response
+ """
+ for a_link in entry.link:
+ if a_link.rel == 'edit':
+ return self.Delete(a_link.href)
+
+
+class DocumentQuery(gdata.service.Query):
+
+ def _GetTitleQuery(self):
+ return self['title']
+
+ def _SetTitleQuery(self, document_query):
+ self['title'] = document_query
+
+ title = property(_GetTitleQuery, _SetTitleQuery,
+ doc="""The title query parameter""")
+
+ def _GetTitleExactQuery(self):
+ return self['title-exact']
+
+ def _SetTitleExactQuery(self, document_query):
+ self['title-exact'] = document_query
+
+ title_exact = property(_GetTitleExactQuery, _SetTitleExactQuery,
+ doc="""The title-exact query parameter""")
+
+
+class CellQuery(gdata.service.Query):
+
+ def _GetMinRowQuery(self):
+ return self['min-row']
+
+ def _SetMinRowQuery(self, cell_query):
+ self['min-row'] = cell_query
+
+ min_row = property(_GetMinRowQuery, _SetMinRowQuery,
+ doc="""The min-row query parameter""")
+
+ def _GetMaxRowQuery(self):
+ return self['max-row']
+
+ def _SetMaxRowQuery(self, cell_query):
+ self['max-row'] = cell_query
+
+ max_row = property(_GetMaxRowQuery, _SetMaxRowQuery,
+ doc="""The max-row query parameter""")
+
+ def _GetMinColQuery(self):
+ return self['min-col']
+
+ def _SetMinColQuery(self, cell_query):
+ self['min-col'] = cell_query
+
+ min_col = property(_GetMinColQuery, _SetMinColQuery,
+ doc="""The min-col query parameter""")
+
+ def _GetMaxColQuery(self):
+ return self['max-col']
+
+ def _SetMaxColQuery(self, cell_query):
+ self['max-col'] = cell_query
+
+ max_col = property(_GetMaxColQuery, _SetMaxColQuery,
+ doc="""The max-col query parameter""")
+
+ def _GetRangeQuery(self):
+ return self['range']
+
+ def _SetRangeQuery(self, cell_query):
+ self['range'] = cell_query
+
+ range = property(_GetRangeQuery, _SetRangeQuery,
+ doc="""The range query parameter""")
+
+ def _GetReturnEmptyQuery(self):
+ return self['return-empty']
+
+ def _SetReturnEmptyQuery(self, cell_query):
+ self['return-empty'] = cell_query
+
+ return_empty = property(_GetReturnEmptyQuery, _SetReturnEmptyQuery,
+ doc="""The return-empty query parameter""")
+
+
+class ListQuery(gdata.service.Query):
+
+ def _GetSpreadsheetQuery(self):
+ return self['sq']
+
+ def _SetSpreadsheetQuery(self, list_query):
+ self['sq'] = list_query
+
+ sq = property(_GetSpreadsheetQuery, _SetSpreadsheetQuery,
+ doc="""The sq query parameter""")
+
+ def _GetOrderByQuery(self):
+ return self['orderby']
+
+ def _SetOrderByQuery(self, list_query):
+ self['orderby'] = list_query
+
+ orderby = property(_GetOrderByQuery, _SetOrderByQuery,
+ doc="""The orderby query parameter""")
+
+ def _GetReverseQuery(self):
+ return self['reverse']
+
+ def _SetReverseQuery(self, list_query):
+ self['reverse'] = list_query
+
+ reverse = property(_GetReverseQuery, _SetReverseQuery,
+ doc="""The reverse query parameter""")
diff --git a/python/gdata/spreadsheet/text_db.py b/python/gdata/spreadsheet/text_db.py
new file mode 100644
index 0000000..a8de546
--- /dev/null
+++ b/python/gdata/spreadsheet/text_db.py
@@ -0,0 +1,559 @@
+#!/usr/bin/python
+#
+# Copyright Google 2007-2008, all rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import StringIO
+import gdata
+import gdata.service
+import gdata.spreadsheet
+import gdata.spreadsheet.service
+import gdata.docs
+import gdata.docs.service
+
+
+"""Make the Google Documents API feel more like using a database.
+
+This module contains a client and other classes which make working with the
+Google Documents List Data API and the Google Spreadsheets Data API look a
+bit more like working with a heirarchical database. Using the DatabaseClient,
+you can create or find spreadsheets and use them like a database, with
+worksheets representing tables and rows representing records.
+
+Example Usage:
+# Create a new database, a new table, and add records.
+client = gdata.spreadsheet.text_db.DatabaseClient(username='jo@example.com',
+ password='12345')
+database = client.CreateDatabase('My Text Database')
+table = database.CreateTable('addresses', ['name','email',
+ 'phonenumber', 'mailingaddress'])
+record = table.AddRecord({'name':'Bob', 'email':'bob@example.com',
+ 'phonenumber':'555-555-1234', 'mailingaddress':'900 Imaginary St.'})
+
+# Edit a record
+record.content['email'] = 'bob2@example.com'
+record.Push()
+
+# Delete a table
+table.Delete
+
+Warnings:
+Care should be exercised when using this module on spreadsheets
+which contain formulas. This module treats all rows as containing text and
+updating a row will overwrite any formula with the output of the formula.
+The intended use case is to allow easy storage of text data in a spreadsheet.
+
+ Error: Domain specific extension of Exception.
+ BadCredentials: Error raised is username or password was incorrect.
+ CaptchaRequired: Raised if a login attempt failed and a CAPTCHA challenge
+ was issued.
+ DatabaseClient: Communicates with Google Docs APIs servers.
+ Database: Represents a spreadsheet and interacts with tables.
+ Table: Represents a worksheet and interacts with records.
+ RecordResultSet: A list of records in a table.
+ Record: Represents a row in a worksheet allows manipulation of text data.
+"""
+
+
+__author__ = 'api.jscudder (Jeffrey Scudder)'
+
+
+class Error(Exception):
+ pass
+
+
+class BadCredentials(Error):
+ pass
+
+
+class CaptchaRequired(Error):
+ pass
+
+
+class DatabaseClient(object):
+ """Allows creation and finding of Google Spreadsheets databases.
+
+ The DatabaseClient simplifies the process of creating and finding Google
+ Spreadsheets and will talk to both the Google Spreadsheets API and the
+ Google Documents List API.
+ """
+
+ def __init__(self, username=None, password=None):
+ """Constructor for a Database Client.
+
+ If the username and password are present, the constructor will contact
+ the Google servers to authenticate.
+
+ Args:
+ username: str (optional) Example: jo@example.com
+ password: str (optional)
+ """
+ self.__docs_client = gdata.docs.service.DocsService()
+ self.__spreadsheets_client = (
+ gdata.spreadsheet.service.SpreadsheetsService())
+ self.SetCredentials(username, password)
+
+ def SetCredentials(self, username, password):
+ """Attempts to log in to Google APIs using the provided credentials.
+
+ If the username or password are None, the client will not request auth
+ tokens.
+
+ Args:
+ username: str (optional) Example: jo@example.com
+ password: str (optional)
+ """
+ self.__docs_client.email = username
+ self.__docs_client.password = password
+ self.__spreadsheets_client.email = username
+ self.__spreadsheets_client.password = password
+ if username and password:
+ try:
+ self.__docs_client.ProgrammaticLogin()
+ self.__spreadsheets_client.ProgrammaticLogin()
+ except gdata.service.CaptchaRequired:
+ raise CaptchaRequired('Please visit https://www.google.com/accounts/'
+ 'DisplayUnlockCaptcha to unlock your account.')
+ except gdata.service.BadAuthentication:
+ raise BadCredentials('Username or password incorrect.')
+
+ def CreateDatabase(self, name):
+ """Creates a new Google Spreadsheet with the desired name.
+
+ Args:
+ name: str The title for the spreadsheet.
+
+ Returns:
+ A Database instance representing the new spreadsheet.
+ """
+ # Create a Google Spreadsheet to form the foundation of this database.
+ # Spreadsheet is created by uploading a file to the Google Documents
+ # List API.
+ virtual_csv_file = StringIO.StringIO(',,,')
+ virtual_media_source = gdata.MediaSource(file_handle=virtual_csv_file, content_type='text/csv', content_length=3)
+ db_entry = self.__docs_client.UploadSpreadsheet(virtual_media_source, name)
+ return Database(spreadsheet_entry=db_entry, database_client=self)
+
+ def GetDatabases(self, spreadsheet_key=None, name=None):
+ """Finds spreadsheets which have the unique key or title.
+
+ If querying on the spreadsheet_key there will be at most one result, but
+ searching by name could yield multiple results.
+
+ Args:
+ spreadsheet_key: str The unique key for the spreadsheet, this
+ usually in the the form 'pk23...We' or 'o23...423.12,,,3'.
+ name: str The title of the spreadsheets.
+
+ Returns:
+ A list of Database objects representing the desired spreadsheets.
+ """
+ if spreadsheet_key:
+ db_entry = self.__docs_client.GetDocumentListEntry(
+ r'/feeds/documents/private/full/spreadsheet%3A' + spreadsheet_key)
+ return [Database(spreadsheet_entry=db_entry, database_client=self)]
+ else:
+ title_query = gdata.docs.service.DocumentQuery()
+ title_query['title'] = name
+ db_feed = self.__docs_client.QueryDocumentListFeed(title_query.ToUri())
+ matching_databases = []
+ for entry in db_feed.entry:
+ matching_databases.append(Database(spreadsheet_entry=entry,
+ database_client=self))
+ return matching_databases
+
+ def _GetDocsClient(self):
+ return self.__docs_client
+
+ def _GetSpreadsheetsClient(self):
+ return self.__spreadsheets_client
+
+
+class Database(object):
+ """Provides interface to find and create tables.
+
+ The database represents a Google Spreadsheet.
+ """
+
+ def __init__(self, spreadsheet_entry=None, database_client=None):
+ """Constructor for a database object.
+
+ Args:
+ spreadsheet_entry: gdata.docs.DocumentListEntry The
+ Atom entry which represents the Google Spreadsheet. The
+ spreadsheet's key is extracted from the entry and stored as a
+ member.
+ database_client: DatabaseClient A client which can talk to the
+ Google Spreadsheets servers to perform operations on worksheets
+ within this spreadsheet.
+ """
+ self.entry = spreadsheet_entry
+ if self.entry:
+ id_parts = spreadsheet_entry.id.text.split('/')
+ self.spreadsheet_key = id_parts[-1].replace('spreadsheet%3A', '')
+ self.client = database_client
+
+ def CreateTable(self, name, fields=None):
+ """Add a new worksheet to this spreadsheet and fill in column names.
+
+ Args:
+ name: str The title of the new worksheet.
+ fields: list of strings The column names which are placed in the
+ first row of this worksheet. These names are converted into XML
+ tags by the server. To avoid changes during the translation
+ process I recommend using all lowercase alphabetic names. For
+ example ['somelongname', 'theothername']
+
+ Returns:
+ Table representing the newly created worksheet.
+ """
+ worksheet = self.client._GetSpreadsheetsClient().AddWorksheet(title=name,
+ row_count=1, col_count=len(fields), key=self.spreadsheet_key)
+ return Table(name=name, worksheet_entry=worksheet,
+ database_client=self.client,
+ spreadsheet_key=self.spreadsheet_key, fields=fields)
+
+ def GetTables(self, worksheet_id=None, name=None):
+ """Searches for a worksheet with the specified ID or name.
+
+ The list of results should have one table at most, or no results
+ if the id or name were not found.
+
+ Args:
+ worksheet_id: str The ID of the worksheet, example: 'od6'
+ name: str The title of the worksheet.
+
+ Returns:
+ A list of length 0 or 1 containing the desired Table. A list is returned
+ to make this method feel like GetDatabases and GetRecords.
+ """
+ if worksheet_id:
+ worksheet_entry = self.client._GetSpreadsheetsClient().GetWorksheetsFeed(
+ self.spreadsheet_key, wksht_id=worksheet_id)
+ return [Table(name=worksheet_entry.title.text,
+ worksheet_entry=worksheet_entry, database_client=self.client,
+ spreadsheet_key=self.spreadsheet_key)]
+ else:
+ matching_tables = []
+ query = None
+ if name:
+ query = gdata.spreadsheet.service.DocumentQuery()
+ query.title = name
+
+ worksheet_feed = self.client._GetSpreadsheetsClient().GetWorksheetsFeed(
+ self.spreadsheet_key, query=query)
+ for entry in worksheet_feed.entry:
+ matching_tables.append(Table(name=entry.title.text,
+ worksheet_entry=entry, database_client=self.client,
+ spreadsheet_key=self.spreadsheet_key))
+ return matching_tables
+
+ def Delete(self):
+ """Deletes the entire database spreadsheet from Google Spreadsheets."""
+ entry = self.client._GetDocsClient().Get(
+ r'http://docs.google.com/feeds/documents/private/full/spreadsheet%3A' +
+ self.spreadsheet_key)
+ self.client._GetDocsClient().Delete(entry.GetEditLink().href)
+
+
+class Table(object):
+
+ def __init__(self, name=None, worksheet_entry=None, database_client=None,
+ spreadsheet_key=None, fields=None):
+ self.name = name
+ self.entry = worksheet_entry
+ id_parts = worksheet_entry.id.text.split('/')
+ self.worksheet_id = id_parts[-1]
+ self.spreadsheet_key = spreadsheet_key
+ self.client = database_client
+ self.fields = fields or []
+ if fields:
+ self.SetFields(fields)
+
+ def LookupFields(self):
+ """Queries to find the column names in the first row of the worksheet.
+
+ Useful when you have retrieved the table from the server and you don't
+ know the column names.
+ """
+ if self.entry:
+ first_row_contents = []
+ query = gdata.spreadsheet.service.CellQuery()
+ query.max_row = '1'
+ query.min_row = '1'
+ feed = self.client._GetSpreadsheetsClient().GetCellsFeed(
+ self.spreadsheet_key, wksht_id=self.worksheet_id, query=query)
+ for entry in feed.entry:
+ first_row_contents.append(entry.content.text)
+ # Get the next set of cells if needed.
+ next_link = feed.GetNextLink()
+ while next_link:
+ feed = self.client._GetSpreadsheetsClient().Get(next_link.href,
+ converter=gdata.spreadsheet.SpreadsheetsCellsFeedFromString)
+ for entry in feed.entry:
+ first_row_contents.append(entry.content.text)
+ next_link = feed.GetNextLink()
+ # Convert the contents of the cells to valid headers.
+ self.fields = ConvertStringsToColumnHeaders(first_row_contents)
+
+ def SetFields(self, fields):
+ """Changes the contents of the cells in the first row of this worksheet.
+
+ Args:
+ fields: list of strings The names in the list comprise the
+ first row of the worksheet. These names are converted into XML
+ tags by the server. To avoid changes during the translation
+ process I recommend using all lowercase alphabetic names. For
+ example ['somelongname', 'theothername']
+ """
+ # TODO: If the table already had fields, we might want to clear out the,
+ # current column headers.
+ self.fields = fields
+ i = 0
+ for column_name in fields:
+ i = i + 1
+ # TODO: speed this up by using a batch request to update cells.
+ self.client._GetSpreadsheetsClient().UpdateCell(1, i, column_name,
+ self.spreadsheet_key, self.worksheet_id)
+
+ def Delete(self):
+ """Deletes this worksheet from the spreadsheet."""
+ worksheet = self.client._GetSpreadsheetsClient().GetWorksheetsFeed(
+ self.spreadsheet_key, wksht_id=self.worksheet_id)
+ self.client._GetSpreadsheetsClient().DeleteWorksheet(
+ worksheet_entry=worksheet)
+
+ def AddRecord(self, data):
+ """Adds a new row to this worksheet.
+
+ Args:
+ data: dict of strings Mapping of string values to column names.
+
+ Returns:
+ Record which represents this row of the spreadsheet.
+ """
+ new_row = self.client._GetSpreadsheetsClient().InsertRow(data,
+ self.spreadsheet_key, wksht_id=self.worksheet_id)
+ return Record(content=data, row_entry=new_row,
+ spreadsheet_key=self.spreadsheet_key, worksheet_id=self.worksheet_id,
+ database_client=self.client)
+
+ def GetRecord(self, row_id=None, row_number=None):
+ """Gets a single record from the worksheet based on row ID or number.
+
+ Args:
+ row_id: The ID for the individual row.
+ row_number: str or int The position of the desired row. Numbering
+ begins at 1, which refers to the second row in the worksheet since
+ the first row is used for column names.
+
+ Returns:
+ Record for the desired row.
+ """
+ if row_id:
+ row_entry = self.client._GetSpreadsheetsClient().GetListFeed(
+ self.spreadsheet_key, wksht_id=self.worksheet_id, row_id=row_id)
+ return Record(content=None, row_entry=row_entry,
+ spreadsheet_key=self.spreadsheet_key,
+ worksheet_id=self.worksheet_id, database_client=self.client)
+ else:
+ row_query = gdata.spreadsheet.service.ListQuery()
+ row_query.start_index = str(row_number)
+ row_query.max_results = '1'
+ row_feed = self.client._GetSpreadsheetsClient().GetListFeed(
+ self.spreadsheet_key, wksht_id=self.worksheet_id, query=row_query)
+ if len(row_feed.entry) >= 1:
+ return Record(content=None, row_entry=row_feed.entry[0],
+ spreadsheet_key=self.spreadsheet_key,
+ worksheet_id=self.worksheet_id, database_client=self.client)
+ else:
+ return None
+
+ def GetRecords(self, start_row, end_row):
+ """Gets all rows between the start and end row numbers inclusive.
+
+ Args:
+ start_row: str or int
+ end_row: str or int
+
+ Returns:
+ RecordResultSet for the desired rows.
+ """
+ start_row = int(start_row)
+ end_row = int(end_row)
+ max_rows = end_row - start_row + 1
+ row_query = gdata.spreadsheet.service.ListQuery()
+ row_query.start_index = str(start_row)
+ row_query.max_results = str(max_rows)
+ rows_feed = self.client._GetSpreadsheetsClient().GetListFeed(
+ self.spreadsheet_key, wksht_id=self.worksheet_id, query=row_query)
+ return RecordResultSet(rows_feed, self.client, self.spreadsheet_key,
+ self.worksheet_id)
+
+ def FindRecords(self, query_string):
+ """Performs a query against the worksheet to find rows which match.
+
+ For details on query string syntax see the section on sq under
+ http://code.google.com/apis/spreadsheets/reference.html#list_Parameters
+
+ Args:
+ query_string: str Examples: 'name == john' to find all rows with john
+ in the name column, '(cost < 19.50 and name != toy) or cost > 500'
+
+ Returns:
+ RecordResultSet with the first group of matches.
+ """
+ row_query = gdata.spreadsheet.service.ListQuery()
+ row_query.sq = query_string
+ matching_feed = self.client._GetSpreadsheetsClient().GetListFeed(
+ self.spreadsheet_key, wksht_id=self.worksheet_id, query=row_query)
+ return RecordResultSet(matching_feed, self.client,
+ self.spreadsheet_key, self.worksheet_id)
+
+
+class RecordResultSet(list):
+ """A collection of rows which allows fetching of the next set of results.
+
+ The server may not send all rows in the requested range because there are
+ too many. Using this result set you can access the first set of results
+ as if it is a list, then get the next batch (if there are more results) by
+ calling GetNext().
+ """
+
+ def __init__(self, feed, client, spreadsheet_key, worksheet_id):
+ self.client = client
+ self.spreadsheet_key = spreadsheet_key
+ self.worksheet_id = worksheet_id
+ self.feed = feed
+ list(self)
+ for entry in self.feed.entry:
+ self.append(Record(content=None, row_entry=entry,
+ spreadsheet_key=spreadsheet_key, worksheet_id=worksheet_id,
+ database_client=client))
+
+ def GetNext(self):
+ """Fetches the next batch of rows in the result set.
+
+ Returns:
+ A new RecordResultSet.
+ """
+ next_link = self.feed.GetNextLink()
+ if next_link and next_link.href:
+ new_feed = self.client._GetSpreadsheetsClient().Get(next_link.href,
+ converter=gdata.spreadsheet.SpreadsheetsListFeedFromString)
+ return RecordResultSet(new_feed, self.client, self.spreadsheet_key,
+ self.worksheet_id)
+
+
+class Record(object):
+ """Represents one row in a worksheet and provides a dictionary of values.
+
+ Attributes:
+ custom: dict Represents the contents of the row with cell values mapped
+ to column headers.
+ """
+
+ def __init__(self, content=None, row_entry=None, spreadsheet_key=None,
+ worksheet_id=None, database_client=None):
+ """Constructor for a record.
+
+ Args:
+ content: dict of strings Mapping of string values to column names.
+ row_entry: gdata.spreadsheet.SpreadsheetsList The Atom entry
+ representing this row in the worksheet.
+ spreadsheet_key: str The ID of the spreadsheet in which this row
+ belongs.
+ worksheet_id: str The ID of the worksheet in which this row belongs.
+ database_client: DatabaseClient The client which can be used to talk
+ the Google Spreadsheets server to edit this row.
+ """
+ self.entry = row_entry
+ self.spreadsheet_key = spreadsheet_key
+ self.worksheet_id = worksheet_id
+ if row_entry:
+ self.row_id = row_entry.id.text.split('/')[-1]
+ else:
+ self.row_id = None
+ self.client = database_client
+ self.content = content or {}
+ if not content:
+ self.ExtractContentFromEntry(row_entry)
+
+ def ExtractContentFromEntry(self, entry):
+ """Populates the content and row_id based on content of the entry.
+
+ This method is used in the Record's contructor.
+
+ Args:
+ entry: gdata.spreadsheet.SpreadsheetsList The Atom entry
+ representing this row in the worksheet.
+ """
+ self.content = {}
+ if entry:
+ self.row_id = entry.id.text.split('/')[-1]
+ for label, custom in entry.custom.iteritems():
+ self.content[label] = custom.text
+
+ def Push(self):
+ """Send the content of the record to spreadsheets to edit the row.
+
+ All items in the content dictionary will be sent. Items which have been
+ removed from the content may remain in the row. The content member
+ of the record will not be modified so additional fields in the row
+ might be absent from this local copy.
+ """
+ self.entry = self.client._GetSpreadsheetsClient().UpdateRow(self.entry, self.content)
+
+ def Pull(self):
+ """Query Google Spreadsheets to get the latest data from the server.
+
+ Fetches the entry for this row and repopulates the content dictionary
+ with the data found in the row.
+ """
+ if self.row_id:
+ self.entry = self.client._GetSpreadsheetsClient().GetListFeed(
+ self.spreadsheet_key, wksht_id=self.worksheet_id, row_id=self.row_id)
+ self.ExtractContentFromEntry(self.entry)
+
+ def Delete(self):
+ self.client._GetSpreadsheetsClient().DeleteRow(self.entry)
+
+
+def ConvertStringsToColumnHeaders(proposed_headers):
+ """Converts a list of strings to column names which spreadsheets accepts.
+
+ When setting values in a record, the keys which represent column names must
+ fit certain rules. They are all lower case, contain no spaces or special
+ characters. If two columns have the same name after being sanitized, the
+ columns further to the right have _2, _3 _4, etc. appended to them.
+
+ If there are column names which consist of all special characters, or if
+ the column header is blank, an obfuscated value will be used for a column
+ name. This method does not handle blank column names or column names with
+ only special characters.
+ """
+ headers = []
+ for input_string in proposed_headers:
+ # TODO: probably a more efficient way to do this. Perhaps regex.
+ sanitized = input_string.lower().replace('_', '').replace(
+ ':', '').replace(' ', '')
+ # When the same sanitized header appears multiple times in the first row
+ # of a spreadsheet, _n is appended to the name to make it unique.
+ header_count = headers.count(sanitized)
+ if header_count > 0:
+ headers.append('%s_%i' % (sanitized, header_count+1))
+ else:
+ headers.append(sanitized)
+ return headers
diff --git a/python/gdata/spreadsheets/__init__.py b/python/gdata/spreadsheets/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/python/gdata/spreadsheets/client.py b/python/gdata/spreadsheets/client.py
new file mode 100644
index 0000000..e270c3d
--- /dev/null
+++ b/python/gdata/spreadsheets/client.py
@@ -0,0 +1,452 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains a client to communicate with the Google Spreadsheets servers.
+
+For documentation on the Spreadsheets API, see:
+http://code.google.com/apis/spreadsheets/
+"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import gdata.client
+import gdata.gauth
+import gdata.spreadsheets.data
+import atom.data
+import atom.http_core
+
+
+SPREADSHEETS_URL = ('https://spreadsheets.google.com/feeds/spreadsheets'
+ '/private/full')
+WORKSHEETS_URL = ('https://spreadsheets.google.com/feeds/worksheets/'
+ '%s/private/full')
+WORKSHEET_URL = ('https://spreadsheets.google.com/feeds/worksheets/'
+ '%s/private/full/%s')
+TABLES_URL = 'https://spreadsheets.google.com/feeds/%s/tables'
+RECORDS_URL = 'https://spreadsheets.google.com/feeds/%s/records/%s'
+RECORD_URL = 'https://spreadsheets.google.com/feeds/%s/records/%s/%s'
+
+
+class SpreadsheetsClient(gdata.client.GDClient):
+ api_version = '3'
+ auth_service = 'wise'
+ auth_scopes = gdata.gauth.AUTH_SCOPES['wise']
+ ssl = True
+
+ def get_spreadsheets(self, auth_token=None,
+ desired_class=gdata.spreadsheets.data.SpreadsheetsFeed,
+ **kwargs):
+ """Obtains a feed with the spreadsheets belonging to the current user.
+
+ Args:
+ auth_token: An object which sets the Authorization HTTP header in its
+ modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others. Represents the current user. Defaults to None
+ and if None, this method will look for a value in the
+ auth_token member of SpreadsheetsClient.
+ desired_class: class descended from atom.core.XmlElement to which a
+ successful response should be converted. If there is no
+ converter function specified (converter=None) then the
+ desired_class will be used in calling the
+ atom.core.parse function. If neither
+ the desired_class nor the converter is specified, an
+ HTTP reponse object will be returned. Defaults to
+ gdata.spreadsheets.data.SpreadsheetsFeed.
+ """
+ return self.get_feed(SPREADSHEETS_URL, auth_token=auth_token,
+ desired_class=desired_class, **kwargs)
+
+ GetSpreadsheets = get_spreadsheets
+
+ def get_worksheets(self, spreadsheet_key, auth_token=None,
+ desired_class=gdata.spreadsheets.data.WorksheetsFeed,
+ **kwargs):
+ """Finds the worksheets within a given spreadsheet.
+
+ Args:
+ spreadsheet_key: str, The unique ID of this containing spreadsheet. This
+ can be the ID from the URL or as provided in a
+ Spreadsheet entry.
+ auth_token: An object which sets the Authorization HTTP header in its
+ modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others. Represents the current user. Defaults to None
+ and if None, this method will look for a value in the
+ auth_token member of SpreadsheetsClient.
+ desired_class: class descended from atom.core.XmlElement to which a
+ successful response should be converted. If there is no
+ converter function specified (converter=None) then the
+ desired_class will be used in calling the
+ atom.core.parse function. If neither
+ the desired_class nor the converter is specified, an
+ HTTP reponse object will be returned. Defaults to
+ gdata.spreadsheets.data.WorksheetsFeed.
+ """
+ return self.get_feed(WORKSHEETS_URL % spreadsheet_key,
+ auth_token=auth_token, desired_class=desired_class,
+ **kwargs)
+
+ GetWorksheets = get_worksheets
+
+ def add_worksheet(self, spreadsheet_key, title, rows, cols,
+ auth_token=None, **kwargs):
+ """Creates a new worksheet entry in the spreadsheet.
+
+ Args:
+ spreadsheet_key: str, The unique ID of this containing spreadsheet. This
+ can be the ID from the URL or as provided in a
+ Spreadsheet entry.
+ title: str, The title to be used in for the worksheet.
+ rows: str or int, The number of rows this worksheet should start with.
+ cols: str or int, The number of columns this worksheet should start with.
+ auth_token: An object which sets the Authorization HTTP header in its
+ modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others. Represents the current user. Defaults to None
+ and if None, this method will look for a value in the
+ auth_token member of SpreadsheetsClient.
+ """
+ new_worksheet = gdata.spreadsheets.data.WorksheetEntry(
+ title=atom.data.Title(text=title),
+ row_count=gdata.spreadsheets.data.RowCount(text=str(rows)),
+ col_count=gdata.spreadsheets.data.ColCount(text=str(cols)))
+ return self.post(new_worksheet, WORKSHEETS_URL % spreadsheet_key,
+ auth_token=auth_token, **kwargs)
+
+ AddWorksheet = add_worksheet
+
+ def get_worksheet(self, spreadsheet_key, worksheet_id,
+ desired_class=gdata.spreadsheets.data.WorksheetEntry,
+ auth_token=None, **kwargs):
+ """Retrieves a single worksheet.
+
+ Args:
+ spreadsheet_key: str, The unique ID of this containing spreadsheet. This
+ can be the ID from the URL or as provided in a
+ Spreadsheet entry.
+ worksheet_id: str, The unique ID for the worksheet withing the desired
+ spreadsheet.
+ auth_token: An object which sets the Authorization HTTP header in its
+ modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others. Represents the current user. Defaults to None
+ and if None, this method will look for a value in the
+ auth_token member of SpreadsheetsClient.
+ desired_class: class descended from atom.core.XmlElement to which a
+ successful response should be converted. If there is no
+ converter function specified (converter=None) then the
+ desired_class will be used in calling the
+ atom.core.parse function. If neither
+ the desired_class nor the converter is specified, an
+ HTTP reponse object will be returned. Defaults to
+ gdata.spreadsheets.data.WorksheetEntry.
+
+ """
+ return self.get_entry(WORKSHEET_URL % (spreadsheet_key, worksheet_id,),
+ auth_token=auth_token, desired_class=desired_class,
+ **kwargs)
+
+ GetWorksheet = get_worksheet
+
+ def add_table(self, spreadsheet_key, title, summary, worksheet_name,
+ header_row, num_rows, start_row, insertion_mode,
+ column_headers, auth_token=None, **kwargs):
+ """Creates a new table within the worksheet.
+
+ Args:
+ spreadsheet_key: str, The unique ID of this containing spreadsheet. This
+ can be the ID from the URL or as provided in a
+ Spreadsheet entry.
+ title: str, The title for the new table within a worksheet.
+ summary: str, A description of the table.
+ worksheet_name: str The name of the worksheet in which this table
+ should live.
+ header_row: int or str, The number of the row in the worksheet which
+ will contain the column names for the data in this table.
+ num_rows: int or str, The number of adjacent rows in this table.
+ start_row: int or str, The number of the row at which the data begins.
+ insertion_mode: str
+ column_headers: dict of strings, maps the column letters (A, B, C) to
+ the desired name which will be viewable in the
+ worksheet.
+ auth_token: An object which sets the Authorization HTTP header in its
+ modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others. Represents the current user. Defaults to None
+ and if None, this method will look for a value in the
+ auth_token member of SpreadsheetsClient.
+ """
+ data = gdata.spreadsheets.data.Data(
+ insertion_mode=insertion_mode, num_rows=str(num_rows),
+ start_row=str(start_row))
+ for index, name in column_headers.iteritems():
+ data.column.append(gdata.spreadsheets.data.Column(
+ index=index, name=name))
+ new_table = gdata.spreadsheets.data.Table(
+ title=atom.data.Title(text=title), summary=atom.data.Summary(summary),
+ worksheet=gdata.spreadsheets.data.Worksheet(name=worksheet_name),
+ header=gdata.spreadsheets.data.Header(row=str(header_row)), data=data)
+ return self.post(new_table, TABLES_URL % spreadsheet_key,
+ auth_token=auth_token, **kwargs)
+
+ AddTable = add_table
+
+ def get_tables(self, spreadsheet_key,
+ desired_class=gdata.spreadsheets.data.TablesFeed,
+ auth_token=None, **kwargs):
+ """Retrieves a feed listing the tables in this spreadsheet.
+
+ Args:
+ spreadsheet_key: str, The unique ID of this containing spreadsheet. This
+ can be the ID from the URL or as provided in a
+ Spreadsheet entry.
+ desired_class: class descended from atom.core.XmlElement to which a
+ successful response should be converted. If there is no
+ converter function specified (converter=None) then the
+ desired_class will be used in calling the
+ atom.core.parse function. If neither
+ the desired_class nor the converter is specified, an
+ HTTP reponse object will be returned. Defaults to
+ gdata.spreadsheets.data.TablesFeed.
+ auth_token: An object which sets the Authorization HTTP header in its
+ modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others. Represents the current user. Defaults to None
+ and if None, this method will look for a value in the
+ auth_token member of SpreadsheetsClient.
+ """
+ return self.get_feed(TABLES_URL % spreadsheet_key,
+ desired_class=desired_class, auth_token=auth_token,
+ **kwargs)
+
+ GetTables = get_tables
+
+ def add_record(self, spreadsheet_key, table_id, fields,
+ title=None, auth_token=None, **kwargs):
+ """Adds a new row to the table.
+
+ Args:
+ spreadsheet_key: str, The unique ID of this containing spreadsheet. This
+ can be the ID from the URL or as provided in a
+ Spreadsheet entry.
+ table_id: str, The ID of the table within the worksheet which should
+ receive this new record. The table ID can be found using the
+ get_table_id method of a gdata.spreadsheets.data.Table.
+ fields: dict of strings mapping column names to values.
+ title: str, optional The title for this row.
+ auth_token: An object which sets the Authorization HTTP header in its
+ modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others. Represents the current user. Defaults to None
+ and if None, this method will look for a value in the
+ auth_token member of SpreadsheetsClient.
+ """
+ new_record = gdata.spreadsheets.data.Record()
+ if title is not None:
+ new_record.title = atom.data.Title(text=title)
+ for name, value in fields.iteritems():
+ new_record.field.append(gdata.spreadsheets.data.Field(
+ name=name, text=value))
+ return self.post(new_record, RECORDS_URL % (spreadsheet_key, table_id),
+ auth_token=auth_token, **kwargs)
+
+ AddRecord = add_record
+
+ def get_records(self, spreadsheet_key, table_id,
+ desired_class=gdata.spreadsheets.data.RecordsFeed,
+ auth_token=None, **kwargs):
+ """Retrieves the records in a table.
+
+ Args:
+ spreadsheet_key: str, The unique ID of this containing spreadsheet. This
+ can be the ID from the URL or as provided in a
+ Spreadsheet entry.
+ table_id: str, The ID of the table within the worksheet whose records
+ we would like to fetch. The table ID can be found using the
+ get_table_id method of a gdata.spreadsheets.data.Table.
+ desired_class: class descended from atom.core.XmlElement to which a
+ successful response should be converted. If there is no
+ converter function specified (converter=None) then the
+ desired_class will be used in calling the
+ atom.core.parse function. If neither
+ the desired_class nor the converter is specified, an
+ HTTP reponse object will be returned. Defaults to
+ gdata.spreadsheets.data.RecordsFeed.
+ auth_token: An object which sets the Authorization HTTP header in its
+ modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others. Represents the current user. Defaults to None
+ and if None, this method will look for a value in the
+ auth_token member of SpreadsheetsClient.
+ """
+ return self.get_feed(RECORDS_URL % (spreadsheet_key, table_id),
+ desired_class=desired_class, auth_token=auth_token,
+ **kwargs)
+
+ GetRecords = get_records
+
+ def get_record(self, spreadsheet_key, table_id, record_id,
+ desired_class=gdata.spreadsheets.data.Record,
+ auth_token=None, **kwargs):
+ """Retrieves a single record from the table.
+
+ Args:
+ spreadsheet_key: str, The unique ID of this containing spreadsheet. This
+ can be the ID from the URL or as provided in a
+ Spreadsheet entry.
+ table_id: str, The ID of the table within the worksheet whose records
+ we would like to fetch. The table ID can be found using the
+ get_table_id method of a gdata.spreadsheets.data.Table.
+ record_id: str, The ID of the record within this table which we want to
+ fetch. You can find the record ID using get_record_id() on
+ an instance of the gdata.spreadsheets.data.Record class.
+ desired_class: class descended from atom.core.XmlElement to which a
+ successful response should be converted. If there is no
+ converter function specified (converter=None) then the
+ desired_class will be used in calling the
+ atom.core.parse function. If neither
+ the desired_class nor the converter is specified, an
+ HTTP reponse object will be returned. Defaults to
+ gdata.spreadsheets.data.RecordsFeed.
+ auth_token: An object which sets the Authorization HTTP header in its
+ modify_request method. Recommended classes include
+ gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
+ among others. Represents the current user. Defaults to None
+ and if None, this method will look for a value in the
+ auth_token member of SpreadsheetsClient."""
+ return self.get_entry(RECORD_URL % (spreadsheet_key, table_id, record_id),
+ desired_class=desired_class, auth_token=auth_token,
+ **kwargs)
+
+ GetRecord = get_record
+
+
+class SpreadsheetQuery(gdata.client.Query):
+
+ def __init__(self, title=None, title_exact=None, **kwargs):
+ """Adds Spreadsheets feed query parameters to a request.
+
+ Args:
+ title: str Specifies the search terms for the title of a document.
+ This parameter used without title-exact will only submit partial
+ queries, not exact queries.
+ title_exact: str Specifies whether the title query should be taken as an
+ exact string. Meaningless without title. Possible values are
+ 'true' and 'false'.
+ """
+ gdata.client.Query.__init__(self, **kwargs)
+ self.title = title
+ self.title_exact = title_exact
+
+ def modify_request(self, http_request):
+ gdata.client._add_query_param('title', self.title, http_request)
+ gdata.client._add_query_param('title-exact', self.title_exact,
+ http_request)
+ gdata.client.Query.modify_request(self, http_request)
+
+ ModifyRequest = modify_request
+
+
+class WorksheetQuery(SpreadsheetQuery):
+ pass
+
+
+class ListQuery(gdata.client.Query):
+
+ def __init__(self, order_by=None, reverse=None, sq=None, **kwargs):
+ """Adds List-feed specific query parameters to a request.
+
+ Args:
+ order_by: str Specifies what column to use in ordering the entries in
+ the feed. By position (the default): 'position' returns
+ rows in the order in which they appear in the GUI. Row 1, then
+ row 2, then row 3, and so on. By column:
+ 'column:columnName' sorts rows in ascending order based on the
+ values in the column with the given columnName, where
+ columnName is the value in the header row for that column.
+ reverse: str Specifies whether to sort in descending or ascending order.
+ Reverses default sort order: 'true' results in a descending
+ sort; 'false' (the default) results in an ascending sort.
+ sq: str Structured query on the full text in the worksheet.
+ [columnName][binaryOperator][value]
+ Supported binaryOperators are:
+ - (), for overriding order of operations
+ - = or ==, for strict equality
+ - <> or !=, for strict inequality
+ - and or &&, for boolean and
+ - or or ||, for boolean or
+ """
+ gdata.client.Query.__init__(self, **kwargs)
+ self.order_by = order_by
+ self.reverse = reverse
+ self.sq = sq
+
+ def modify_request(self, http_request):
+ gdata.client._add_query_param('orderby', self.order_by, http_request)
+ gdata.client._add_query_param('reverse', self.reverse, http_request)
+ gdata.client._add_query_param('sq', self.sq, http_request)
+ gdata.client.Query.modify_request(self, http_request)
+
+ ModifyRequest = modify_request
+
+
+class TableQuery(ListQuery):
+ pass
+
+
+class CellQuery(gdata.client.Query):
+
+ def __init__(self, min_row=None, max_row=None, min_col=None, max_col=None,
+ range=None, return_empty=None, **kwargs):
+ """Adds Cells-feed specific query parameters to a request.
+
+ Args:
+ min_row: str or int Positional number of minimum row returned in query.
+ max_row: str or int Positional number of maximum row returned in query.
+ min_col: str or int Positional number of minimum column returned in query.
+ max_col: str or int Positional number of maximum column returned in query.
+ range: str A single cell or a range of cells. Use standard spreadsheet
+ cell-range notations, using a colon to separate start and end of
+ range. Examples:
+ - 'A1' and 'R1C1' both specify only cell A1.
+ - 'D1:F3' and 'R1C4:R3C6' both specify the rectangle of cells with
+ corners at D1 and F3.
+ return_empty: str If 'true' then empty cells will be returned in the feed.
+ If omitted, the default is 'false'.
+ """
+ gdata.client.Query.__init__(self, **kwargs)
+ self.min_row = min_row
+ self.max_row = max_row
+ self.min_col = min_col
+ self.max_col = max_col
+ self.range = range
+ self.return_empty = return_empty
+
+ def modify_request(self, http_request):
+ gdata.client._add_query_param('min-row', self.min_row, http_request)
+ gdata.client._add_query_param('max-row', self.max_row, http_request)
+ gdata.client._add_query_param('min-col', self.min_col, http_request)
+ gdata.client._add_query_param('max-col', self.max_col, http_request)
+ gdata.client._add_query_param('range', self.range, http_request)
+ gdata.client._add_query_param('return-empty', self.return_empty,
+ http_request)
+ gdata.client.Query.modify_request(self, http_request)
+
+ ModifyRequest = modify_request
diff --git a/python/gdata/spreadsheets/data.py b/python/gdata/spreadsheets/data.py
new file mode 100644
index 0000000..efb729f
--- /dev/null
+++ b/python/gdata/spreadsheets/data.py
@@ -0,0 +1,317 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# This module is used for version 2 of the Google Data APIs.
+
+
+"""Provides classes and constants for the XML in the Google Spreadsheets API.
+
+Documentation for the raw XML which these classes represent can be found here:
+http://code.google.com/apis/spreadsheets/docs/3.0/reference.html#Elements
+"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+import gdata.data
+
+
+GS_TEMPLATE = '{http://schemas.google.com/spreadsheets/2006}%s'
+GSX_NAMESPACE = 'http://schemas.google.com/spreadsheets/2006/extended'
+
+
+INSERT_MODE = 'insert'
+OVERWRITE_MODE = 'overwrite'
+
+
+WORKSHEETS_REL = 'http://schemas.google.com/spreadsheets/2006#worksheetsfeed'
+
+
+class Error(Exception):
+ pass
+
+
+class FieldMissing(Exception):
+ pass
+
+
+class HeaderNotSet(Error):
+ """The desired column header had no value for the row in the list feed."""
+
+
+class Cell(atom.core.XmlElement):
+ """The gs:cell element.
+
+ A cell in the worksheet. The element can appear only as a child
+ of .
+ """
+ _qname = GS_TEMPLATE % 'cell'
+ col = 'col'
+ input_value = 'inputValue'
+ numeric_value = 'numericValue'
+ row = 'row'
+
+
+class ColCount(atom.core.XmlElement):
+ """The gs:colCount element.
+
+ Indicates the number of columns in the worksheet, including columns that
+ contain only empty cells. The element can appear as a child
+ of or
+ """
+ _qname = GS_TEMPLATE % 'colCount'
+
+
+class Field(atom.core.XmlElement):
+ """The gs:field element.
+
+ A field single cell within a record. Contained in an .
+ """
+ _qname = GS_TEMPLATE % 'field'
+ index = 'index'
+ name = 'name'
+
+
+class Column(Field):
+ """The gs:column element."""
+ _qname = GS_TEMPLATE % 'column'
+
+
+class Data(atom.core.XmlElement):
+ """The gs:data element.
+
+ A data region of a table. Contained in an element.
+ """
+ _qname = GS_TEMPLATE % 'data'
+ column = [Column]
+ insertion_mode = 'insertionMode'
+ num_rows = 'numRows'
+ start_row = 'startRow'
+
+
+class Header(atom.core.XmlElement):
+ """The gs:header element.
+
+ Indicates which row is the header row. Contained in an .
+ """
+ _qname = GS_TEMPLATE % 'header'
+ row = 'row'
+
+
+class RowCount(atom.core.XmlElement):
+ """The gs:rowCount element.
+
+ Indicates the number of total rows in the worksheet, including rows that
+ contain only empty cells. The element can appear as a
+ child of or .
+ """
+ _qname = GS_TEMPLATE % 'rowCount'
+
+
+class Worksheet(atom.core.XmlElement):
+ """The gs:worksheet element.
+
+ The worksheet where the table lives.Contained in an .
+ """
+ _qname = GS_TEMPLATE % 'worksheet'
+ name = 'name'
+
+
+class Spreadsheet(gdata.data.GDEntry):
+ """An Atom entry which represents a Google Spreadsheet."""
+
+ def find_worksheets_feed(self):
+ return self.find_url(WORKSHEETS_REL)
+
+ FindWorksheetsFeed = find_worksheets_feed
+
+
+class SpreadsheetsFeed(gdata.data.GDFeed):
+ """An Atom feed listing a user's Google Spreadsheets."""
+ entry = [Spreadsheet]
+
+
+class WorksheetEntry(gdata.data.GDEntry):
+ """An Atom entry representing a single worksheet in a spreadsheet."""
+ row_count = RowCount
+ col_count = ColCount
+
+
+class WorksheetsFeed(gdata.data.GDFeed):
+ """A feed containing the worksheets in a single spreadsheet."""
+ entry = [WorksheetEntry]
+
+
+class Table(gdata.data.GDEntry):
+ """An Atom entry that represents a subsection of a worksheet.
+
+ A table allows you to treat part or all of a worksheet somewhat like a
+ table in a database that is, as a set of structured data items. Tables
+ don't exist until you explicitly create them before you can use a table
+ feed, you have to explicitly define where the table data comes from.
+ """
+ data = Data
+ header = Header
+ worksheet = Worksheet
+
+ def get_table_id(self):
+ if self.id.text:
+ return self.id.text.split('/')[-1]
+ return None
+
+ GetTableId = get_table_id
+
+
+class TablesFeed(gdata.data.GDFeed):
+ """An Atom feed containing the tables defined within a worksheet."""
+ entry = [Table]
+
+
+class Record(gdata.data.GDEntry):
+ """An Atom entry representing a single record in a table.
+
+ Note that the order of items in each record is the same as the order of
+ columns in the table definition, which may not match the order of
+ columns in the GUI.
+ """
+ field = [Field]
+
+ def value_for_index(self, column_index):
+ for field in self.field:
+ if field.index == column_index:
+ return field.text
+ raise FieldMissing('There is no field for %s' % column_index)
+
+ ValueForIndex = value_for_index
+
+ def value_for_name(self, name):
+ for field in self.field:
+ if field.name == name:
+ return field.text
+ raise FieldMissing('There is no field for %s' % name)
+
+ ValueForName = value_for_name
+
+ def get_record_id(self):
+ if self.id.text:
+ return self.id.text.split('/')[-1]
+ return None
+
+
+class RecordsFeed(gdata.data.GDFeed):
+ """An Atom feed containing the individuals records in a table."""
+ entry = [Record]
+
+
+class ListRow(atom.core.XmlElement):
+ """A gsx column value within a row.
+
+ The local tag in the _qname is blank and must be set to the column
+ name. For example, when adding to a ListEntry, do:
+ col_value = ListRow(text='something')
+ col_value._qname = col_value._qname % 'mycolumnname'
+ """
+ _qname = '{http://schemas.google.com/spreadsheets/2006/extended}%s'
+
+
+class ListEntry(gdata.data.GDEntry):
+ """An Atom entry representing a worksheet row in the list feed.
+
+ The values for a particular column can be get and set using
+ x.get_value('columnheader') and x.set_value('columnheader', 'value').
+ See also the explanation of column names in the ListFeed class.
+ """
+
+ def get_value(self, column_name):
+ """Returns the displayed text for the desired column in this row.
+
+ The formula or input which generated the displayed value is not accessible
+ through the list feed, to see the user's input, use the cells feed.
+
+ If a column is not present in this spreadsheet, or there is no value
+ for a column in this row, this method will return None.
+ """
+ values = self.get_elements(column_name, GSX_NAMESPACE)
+ if len(values) == 0:
+ return None
+ return values[0].text
+
+ def set_value(self, column_name, value):
+ """Changes the value of cell in this row under the desired column name.
+
+ Warning: if the cell contained a formula, it will be wiped out by setting
+ the value using the list feed since the list feed only works with
+ displayed values.
+
+ No client side checking is performed on the column_name, you need to
+ ensure that the column_name is the local tag name in the gsx tag for the
+ column. For example, the column_name will not contain special characters,
+ spaces, uppercase letters, etc.
+ """
+ # Try to find the column in this row to change an existing value.
+ values = self.get_elements(column_name, GSX_NAMESPACE)
+ if len(values) > 0:
+ values[0].text = value
+ else:
+ # There is no value in this row for the desired column, so add a new
+ # gsx:column_name element.
+ new_value = ListRow(text=value)
+ new_value._qname = new_value._qname % (column_name,)
+ self._other_elements.append(new_value)
+
+
+class ListsFeed(gdata.data.GDFeed):
+ """An Atom feed in which each entry represents a row in a worksheet.
+
+ The first row in the worksheet is used as the column names for the values
+ in each row. If a header cell is empty, then a unique column ID is used
+ for the gsx element name.
+
+ Spaces in a column name are removed from the name of the corresponding
+ gsx element.
+
+ Caution: The columnNames are case-insensitive. For example, if you see
+ a element in a feed, you can't know whether the column
+ heading in the original worksheet was "e-mail" or "E-Mail".
+
+ Note: If two or more columns have the same name, then subsequent columns
+ of the same name have _n appended to the columnName. For example, if the
+ first column name is "e-mail", followed by columns named "E-Mail" and
+ "E-mail", then the columnNames will be gsx:e-mail, gsx:e-mail_2, and
+ gsx:e-mail_3 respectively.
+ """
+ entry = [ListEntry]
+
+
+class CellEntry(gdata.data.BatchEntry):
+ """An Atom entry representing a single cell in a worksheet."""
+ cell = Cell
+
+
+class CellsFeed(gdata.data.BatchFeed):
+ """An Atom feed contains one entry per cell in a worksheet.
+
+ The cell feed supports batch operations, you can send multiple cell
+ operations in one HTTP request.
+ """
+ entry = [CellEntry]
+
+ def batch_set_cell(row, col, input):
+ pass
+
diff --git a/python/gdata/test_config.py b/python/gdata/test_config.py
new file mode 100644
index 0000000..5e597eb
--- /dev/null
+++ b/python/gdata/test_config.py
@@ -0,0 +1,421 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import sys
+import unittest
+import getpass
+import inspect
+import atom.mock_http_core
+import gdata.gauth
+
+
+"""Loads configuration for tests which connect to Google servers.
+
+Settings used in tests are stored in a ConfigCollection instance in this
+module called options. If your test needs to get a test related setting,
+use
+
+import gdata.test_config
+option_value = gdata.test_config.options.get_value('x')
+
+The above will check the command line for an '--x' argument, and if not
+found will either use the default value for 'x' or prompt the user to enter
+one.
+
+Your test can override the value specified by the user by performing:
+
+gdata.test_config.options.set_value('x', 'y')
+
+If your test uses a new option which you would like to allow the user to
+specify on the command line or via a prompt, you can use the register_option
+method as follows:
+
+gdata.test_config.options.register(
+ 'option_name', 'Prompt shown to the user', secret=False #As for password.
+ 'This is the description of the option, shown when help is requested.',
+ 'default value, provide only if you do not want the user to be prompted')
+"""
+
+
+class Option(object):
+
+ def __init__(self, name, prompt, secret=False, description=None, default=None):
+ self.name = name
+ self.prompt = prompt
+ self.secret = secret
+ self.description = description
+ self.default = default
+
+ def get(self):
+ value = self.default
+ # Check for a command line parameter.
+ for i in xrange(len(sys.argv)):
+ if sys.argv[i].startswith('--%s=' % self.name):
+ value = sys.argv[i].split('=')[1]
+ elif sys.argv[i] == '--%s' % self.name:
+ value = sys.argv[i + 1]
+ # If the param was not on the command line, ask the user to input the
+ # value.
+ # In order for this to prompt the user, the default value for the option
+ # must be None.
+ if value is None:
+ prompt = '%s: ' % self.prompt
+ if self.secret:
+ value = getpass.getpass(prompt)
+ else:
+ print 'You can specify this on the command line using --%s' % self.name
+ value = raw_input(prompt)
+ return value
+
+
+class ConfigCollection(object):
+
+ def __init__(self, options=None):
+ self.options = options or {}
+ self.values = {}
+
+ def register_option(self, option):
+ self.options[option.name] = option
+
+ def register(self, *args, **kwargs):
+ self.register_option(Option(*args, **kwargs))
+
+ def get_value(self, option_name):
+ if option_name in self.values:
+ return self.values[option_name]
+ value = self.options[option_name].get()
+ if value is not None:
+ self.values[option_name] = value
+ return value
+
+ def set_value(self, option_name, value):
+ self.values[option_name] = value
+
+ def render_usage(self):
+ message_parts = []
+ for opt_name, option in self.options.iteritems():
+ message_parts.append('--%s: %s' % (opt_name, option.description))
+ return '\n'.join(message_parts)
+
+
+options = ConfigCollection()
+
+
+# Register the default options.
+options.register(
+ 'username',
+ 'Please enter the email address of your test account',
+ description=('The email address you want to sign in with. '
+ 'Make sure this is a test account as these tests may edit'
+ ' or delete data.'))
+options.register(
+ 'password',
+ 'Please enter the password for your test account',
+ secret=True, description='The test account password.')
+options.register(
+ 'clearcache',
+ 'Delete cached data? (enter true or false)',
+ description=('If set to true, any temporary files which cache test'
+ ' requests and responses will be deleted.'),
+ default='true')
+options.register(
+ 'savecache',
+ 'Save requests and responses in a temporary file? (enter true or false)',
+ description=('If set to true, requests to the server and responses will'
+ ' be saved in temporary files.'),
+ default='false')
+options.register(
+ 'runlive',
+ 'Run the live tests which contact the server? (enter true or false)',
+ description=('If set to true, the tests will make real HTTP requests to'
+ ' the servers. This slows down test execution and may'
+ ' modify the users data, be sure to use a test account.'),
+ default='true')
+options.register(
+ 'ssl',
+ 'Run the live tests over SSL (enter true or false)',
+ description='If set to true, all tests will be performed over HTTPS (SSL)',
+ default='false')
+options.register(
+ 'appsusername',
+ 'Please enter the email address of your test Apps domain account',
+ description=('The email address you want to sign in with. '
+ 'Make sure this is a test account on your Apps domain as '
+ 'these tests may edit or delete data.'))
+options.register(
+ 'appspassword',
+ 'Please enter the password for your test Apps domain account',
+ secret=True, description='The test Apps account password.')
+
+# Other options which may be used if needed.
+BLOG_ID_OPTION = Option(
+ 'blogid',
+ 'Please enter the ID of your test blog',
+ description=('The blog ID for the blog which should have test posts added'
+ ' to it. Example 7682659670455539811'))
+TEST_IMAGE_LOCATION_OPTION = Option(
+ 'imgpath',
+ 'Please enter the full path to a test image to upload',
+ description=('This test image will be uploaded to a service which'
+ ' accepts a media file, it must be a jpeg.'))
+SPREADSHEET_ID_OPTION = Option(
+ 'spreadsheetid',
+ 'Please enter the ID of a spreadsheet to use in these tests',
+ description=('The spreadsheet ID for the spreadsheet which should be'
+ ' modified by theses tests.'))
+APPS_DOMAIN_OPTION = Option(
+ 'appsdomain',
+ 'Please enter your Google Apps domain',
+ description=('The domain the Google Apps is hosted on or leave blank'
+ ' if n/a'))
+SITES_NAME_OPTION = Option(
+ 'sitename',
+ 'Please enter name of your Google Site',
+ description='The webspace name of the Site found in its URL.')
+PROJECT_NAME_OPTION = Option(
+ 'project_name',
+ 'Please enter the name of your project hosting project',
+ description=('The name of the project which should have test issues added'
+ ' to it. Example gdata-python-client'))
+ISSUE_ASSIGNEE_OPTION = Option(
+ 'issue_assignee',
+ 'Enter the email address of the target owner of the updated issue.',
+ description=('The email address of the user a created issue\'s owner will '
+ ' become. Example testuser2@gmail.com'))
+GA_TABLE_ID = Option(
+ 'table_id',
+ 'Enter the Table ID of the Google Analytics profile to test',
+ description=('The Table ID of the Google Analytics profile to test.'
+ ' Example ga:1174'))
+TARGET_USERNAME_OPTION = Option(
+ 'targetusername',
+ 'Please enter the username (without domain) of the user which will be'
+ ' affected by the tests',
+ description=('The username of the user to be tested'))
+YT_DEVELOPER_KEY_OPTION = Option(
+ 'developerkey',
+ 'Please enter your YouTube developer key',
+ description=('The YouTube developer key for your account'))
+YT_CLIENT_ID_OPTION = Option(
+ 'clientid',
+ 'Please enter your YouTube client ID',
+ description=('The YouTube client ID for your account'))
+YT_VIDEO_ID_OPTION= Option(
+ 'videoid',
+ 'Please enter the ID of a YouTube video you uploaded',
+ description=('The video ID of a YouTube video uploaded to your account'))
+
+
+# Functions to inject a cachable HTTP client into a service client.
+def configure_client(client, case_name, service_name, use_apps_auth=False):
+ """Sets up a mock client which will reuse a saved session.
+
+ Should be called during setUp of each unit test.
+
+ Handles authentication to allow the GDClient to make requests which
+ require an auth header.
+
+ Args:
+ client: a gdata.GDClient whose http_client member should be replaced
+ with a atom.mock_http_core.MockHttpClient so that repeated
+ executions can used cached responses instead of contacting
+ the server.
+ case_name: str The name of the test case class. Examples: 'BloggerTest',
+ 'ContactsTest'. Used to save a session
+ for the ClientLogin auth token request, so the case_name
+ should be reused if and only if the same username, password,
+ and service are being used.
+ service_name: str The service name as used for ClientLogin to identify
+ the Google Data API being accessed. Example: 'blogger',
+ 'wise', etc.
+ use_apps_auth: bool (optional) If set to True, use appsusername and
+ appspassword command-line args instead of username and
+ password respectively.
+ """
+ # Use a mock HTTP client which will record and replay the HTTP traffic
+ # from these tests.
+ client.http_client = atom.mock_http_core.MockHttpClient()
+ client.http_client.cache_case_name = case_name
+ # Getting the auth token only needs to be done once in the course of test
+ # runs.
+ auth_token_key = '%s_auth_token' % service_name
+ if (auth_token_key not in options.values
+ and options.get_value('runlive') == 'true'):
+ client.http_client.cache_test_name = 'client_login'
+ cache_name = client.http_client.get_cache_file_name()
+ if options.get_value('clearcache') == 'true':
+ client.http_client.delete_session(cache_name)
+ client.http_client.use_cached_session(cache_name)
+ if not use_apps_auth:
+ username = options.get_value('username')
+ password = options.get_value('password')
+ else:
+ username = options.get_value('appsusername')
+ password = options.get_value('appspassword')
+ auth_token = client.request_client_login_token(username, password,
+ case_name, service=service_name)
+ options.values[auth_token_key] = gdata.gauth.token_to_blob(auth_token)
+ client.http_client.close_session()
+ # Allow a config auth_token of False to prevent the client's auth header
+ # from being modified.
+ if auth_token_key in options.values:
+ client.auth_token = gdata.gauth.token_from_blob(
+ options.values[auth_token_key])
+
+
+def configure_cache(client, test_name):
+ """Loads or begins a cached session to record HTTP traffic.
+
+ Should be called at the beginning of each test method.
+
+ Args:
+ client: a gdata.GDClient whose http_client member has been replaced
+ with a atom.mock_http_core.MockHttpClient so that repeated
+ executions can used cached responses instead of contacting
+ the server.
+ test_name: str The name of this test method. Examples:
+ 'TestClass.test_x_works', 'TestClass.test_crud_operations'.
+ This is used to name the recording of the HTTP requests and
+ responses, so it should be unique to each test method in the
+ test case.
+ """
+ # Auth token is obtained in configure_client which is called as part of
+ # setUp.
+ client.http_client.cache_test_name = test_name
+ cache_name = client.http_client.get_cache_file_name()
+ if options.get_value('clearcache') == 'true':
+ client.http_client.delete_session(cache_name)
+ client.http_client.use_cached_session(cache_name)
+
+
+def close_client(client):
+ """Saves the recoded responses to a temp file if the config file allows.
+
+ This should be called in the unit test's tearDown method.
+
+ Checks to see if the 'savecache' option is set to 'true', to make sure we
+ only save sessions to repeat if the user desires.
+ """
+ if client and options.get_value('savecache') == 'true':
+ # If this was a live request, save the recording.
+ client.http_client.close_session()
+
+
+def configure_service(service, case_name, service_name):
+ """Sets up a mock GDataService v1 client to reuse recorded sessions.
+
+ Should be called during setUp of each unit test. This is a duplicate of
+ configure_client, modified to handle old v1 service classes.
+ """
+ service.http_client.v2_http_client = atom.mock_http_core.MockHttpClient()
+ service.http_client.v2_http_client.cache_case_name = case_name
+ # Getting the auth token only needs to be done once in the course of test
+ # runs.
+ auth_token_key = 'service_%s_auth_token' % service_name
+ if (auth_token_key not in options.values
+ and options.get_value('runlive') == 'true'):
+ service.http_client.v2_http_client.cache_test_name = 'client_login'
+ cache_name = service.http_client.v2_http_client.get_cache_file_name()
+ if options.get_value('clearcache') == 'true':
+ service.http_client.v2_http_client.delete_session(cache_name)
+ service.http_client.v2_http_client.use_cached_session(cache_name)
+ service.ClientLogin(options.get_value('username'),
+ options.get_value('password'),
+ service=service_name, source=case_name)
+ options.values[auth_token_key] = service.GetClientLoginToken()
+ service.http_client.v2_http_client.close_session()
+ if auth_token_key in options.values:
+ service.SetClientLoginToken(options.values[auth_token_key])
+
+
+def configure_service_cache(service, test_name):
+ """Loads or starts a session recording for a v1 Service object.
+
+ Duplicates the behavior of configure_cache, but the target for this
+ function is a v1 Service object instead of a v2 Client.
+ """
+ service.http_client.v2_http_client.cache_test_name = test_name
+ cache_name = service.http_client.v2_http_client.get_cache_file_name()
+ if options.get_value('clearcache') == 'true':
+ service.http_client.v2_http_client.delete_session(cache_name)
+ service.http_client.v2_http_client.use_cached_session(cache_name)
+
+
+def close_service(service):
+ if service and options.get_value('savecache') == 'true':
+ # If this was a live request, save the recording.
+ service.http_client.v2_http_client.close_session()
+
+
+def build_suite(classes):
+ """Creates a TestSuite for all unit test classes in the list.
+
+ Assumes that each of the classes in the list has unit test methods which
+ begin with 'test'. Calls unittest.makeSuite.
+
+ Returns:
+ A new unittest.TestSuite containing a test suite for all classes.
+ """
+ suites = [unittest.makeSuite(a_class, 'test') for a_class in classes]
+ return unittest.TestSuite(suites)
+
+
+def check_data_classes(test, classes):
+ import inspect
+ for data_class in classes:
+ test.assert_(data_class.__doc__ is not None,
+ 'The class %s should have a docstring' % data_class)
+ if hasattr(data_class, '_qname'):
+ qname_versions = None
+ if isinstance(data_class._qname, tuple):
+ qname_versions = data_class._qname
+ else:
+ qname_versions = (data_class._qname,)
+ for versioned_qname in qname_versions:
+ test.assert_(isinstance(versioned_qname, str),
+ 'The class %s has a non-string _qname' % data_class)
+ test.assert_(not versioned_qname.endswith('}'),
+ 'The _qname for class %s is only a namespace' % (
+ data_class))
+
+ for attribute_name, value in data_class.__dict__.iteritems():
+ # Ignore all elements that start with _ (private members)
+ if not attribute_name.startswith('_'):
+ try:
+ if not (isinstance(value, str) or inspect.isfunction(value)
+ or (isinstance(value, list)
+ and issubclass(value[0], atom.core.XmlElement))
+ or type(value) == property # Allow properties.
+ or inspect.ismethod(value) # Allow methods.
+ or issubclass(value, atom.core.XmlElement)):
+ test.fail(
+ 'XmlElement member should have an attribute, XML class,'
+ ' or list of XML classes as attributes.')
+
+ except TypeError:
+ test.fail('Element %s in %s was of type %s' % (
+ attribute_name, data_class._qname, type(value)))
+
+
+def check_clients_with_auth(test, classes):
+ for client_class in classes:
+ test.assert_(hasattr(client_class, 'api_version'))
+ test.assert_(isinstance(client_class.auth_service, (str, unicode, int)))
+ test.assert_(hasattr(client_class, 'auth_service'))
+ test.assert_(isinstance(client_class.auth_service, (str, unicode)))
+ test.assert_(hasattr(client_class, 'auth_scopes'))
+ test.assert_(isinstance(client_class.auth_scopes, (list, tuple)))
diff --git a/python/gdata/test_data.py b/python/gdata/test_data.py
new file mode 100644
index 0000000..46ee8f5
--- /dev/null
+++ b/python/gdata/test_data.py
@@ -0,0 +1,5495 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2006 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+
+XML_ENTRY_1 = """
+
+
+ http://www.google.com/test/id/url
+ Testing 2000 series laptop
+
+
+
+
+
+
+
+ User
+ user@gmail.com
+
+
+1
+
+'''
+
+SITES_SITE_FEED = '''
+
+https://sites.google.com/feeds/site/example.com
+2009-12-09T01:05:54.631Z
+Site
+
+
+
+Google Sites
+1
+
+https://sites.google.com/feeds/site/example.com/new-test-site
+2009-12-02T22:55:31.040Z
+2009-12-02T22:55:31.040Z
+New Test Site
+A new site to hold memories
+
+
+
+
+
+new-test-site
+iceberg
+
+
+https://sites.google.com/feeds/site/example.com/newautosite2
+2009-12-05T00:28:01.077Z
+2009-12-05T00:28:01.077Z
+newAutoSite3
+A new site to hold memories2
+
+
+
+
+newautosite2
+default
+
+'''
+
+SITES_ACL_FEED = '''
+
+https://sites.google.comsites.google.com/feeds/acl/site/example.com/new-test-site
+2009-12-09T01:24:59.080Z
+
+Acl
+
+
+
+Google Sites
+1
+
+ https://sites.google.com/feeds/acl/site/google.com/new-test-site/user%3Auser%40example.com
+ 2009-12-09T01:24:59.080Z
+ 2009-12-09T01:24:59.080Z
+
+
+
+
+
+
+'''
+
+ANALYTICS_ACCOUNT_FEED_old = '''
+
+http://www.google.com/analytics/feeds/accounts/abc@test.com
+2009-06-25T03:55:22.000-07:00
+Profile list for abc@test.com
+
+
+Google Analytics
+
+Google Analytics
+12
+1
+12
+
+http://www.google.com/analytics/feeds/accounts/ga:1174
+2009-06-25T03:55:22.000-07:00
+www.googlestore.com
+
+ga:1174
+
+
+
+
+
+
+
+'''
+
+ANALYTICS_ACCOUNT_FEED = '''
+
+ http://www.google.com/analytics/feeds/accounts/api.nickm@google.com
+ 2009-10-14T09:14:25.000-07:00
+ Profile list for abc@test.com
+
+
+ Google Analytics
+
+ Google Analytics
+ 37
+ 1
+ 37
+
+ ga:operatingSystem==iPhone
+
+
+ http://www.google.com/analytics/feeds/accounts/ga:1174
+ 2009-10-14T09:14:25.000-07:00
+ www.googlestore.com
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ga:1174
+
+'''
+
+ANALYTICS_DATA_FEED = '''
+
+ http://www.google.com/analytics/feeds/data?ids=ga:1174&dimensions=ga:medium,ga:source&metrics=ga:bounces,ga:visits&filters=ga:medium%3D%3Dreferral&start-date=2008-10-01&end-date=2008-10-31
+ 2008-10-31T16:59:59.999-07:00
+ Google Analytics Data for Profile 1174
+
+
+
+ Google Analytics
+
+ Google Analytics
+ 6451
+ 1
+ 2
+ 2008-10-01
+ 2008-10-31
+
+ ga:operatingSystem==iPhone
+
+
+
+
+
+
+ ga:1174
+ www.googlestore.com
+
+
+
+
+
+ http://www.google.com/analytics/feeds/data?ids=ga:1174&ga:medium=referral&ga:source=blogger.com&filters=ga:medium%3D%3Dreferral&start-date=2008-10-01&end-date=2008-10-31
+ 2008-10-30T17:00:00.001-07:00
+ ga:source=blogger.com | ga:medium=referral
+
+
+
+
+
+
+'''
+
+
+ANALYTICS_MGMT_PROFILE_FEED = '''
+
+ https://www.google.com/analytics/feeds/datasources/ga/accounts/~all/webproperties/~all/profiles
+ 2010-06-14T22:18:48.676Z
+ Google Analytics Profiles for superman@gmail.com
+
+
+ Google Analytics
+
+ Google Analytics
+ 1
+ 1
+ 1000
+
+ https://www.google.com/analytics/feeds/datasources/ga/accounts/30481/webproperties/UA-30481-1/profiles/1174
+ 2010-06-09T05:58:15.436-07:00
+ Google Analytics Profile www.googlestore.com
+
+
+
+
+
+
+
+
+
+
+
+
+'''
+
+ANALYTICS_MGMT_GOAL_FEED = '''
+
+ https://www.google.com/analytics/feeds/datasources/ga/accounts/~all/webproperties/~all/profiles/~all/goals
+ 2010-06-14T22:21:18.485Z
+ Google Analytics Goals for superman@gmail.com
+
+
+ Google Analytics
+
+ Google Analytics
+ 3
+ 1
+ 1000
+
+ https://www.google.com/analytics/feeds/datasources/ga/accounts/30481/webproperties/UA-30481-1/profiles/1174/goals/1
+ 2010-02-07T13:12:43.377-08:00
+ Google Analytics Goal 1
+
+
+
+
+
+
+
+
+
+
+ https://www.google.com/analytics/feeds/datasources/ga/accounts/30481/webproperties/UA-30481-1/profiles/1174/goals/2
+ 2010-02-07T13:12:43.376-08:00
+ Google Analytics Goal 2
+
+
+
+
+
+
+
+
+'''
+
+ANALYTICS_MGMT_ADV_SEGMENT_FEED = '''
+
+ https://www.google.com/analytics/feeds/datasources/ga/segments
+ 2010-06-14T22:22:02.728Z
+ Google Analytics Advanced Segments for superman@gmail.com
+
+
+ Google Analytics
+
+ Google Analytics
+ 2
+ 1
+ 1000
+
+ https://www.google.com/analytics/feeds/datasources/ga/segments/gaid::0
+ 2009-10-26T13:00:44.915-07:00
+ Google Analytics Advanced Segment Sources Form Google
+
+
+ ga:source=~^\Qgoogle\E
+
+
+
+'''
+
diff --git a/python/gdata/tlslite/BaseDB.py b/python/gdata/tlslite/BaseDB.py
new file mode 100644
index 0000000..ca8dff6
--- /dev/null
+++ b/python/gdata/tlslite/BaseDB.py
@@ -0,0 +1,120 @@
+"""Base class for SharedKeyDB and VerifierDB."""
+
+import anydbm
+import thread
+
+class BaseDB:
+ def __init__(self, filename, type):
+ self.type = type
+ self.filename = filename
+ if self.filename:
+ self.db = None
+ else:
+ self.db = {}
+ self.lock = thread.allocate_lock()
+
+ def create(self):
+ """Create a new on-disk database.
+
+ @raise anydbm.error: If there's a problem creating the database.
+ """
+ if self.filename:
+ self.db = anydbm.open(self.filename, "n") #raises anydbm.error
+ self.db["--Reserved--type"] = self.type
+ self.db.sync()
+ else:
+ self.db = {}
+
+ def open(self):
+ """Open a pre-existing on-disk database.
+
+ @raise anydbm.error: If there's a problem opening the database.
+ @raise ValueError: If the database is not of the right type.
+ """
+ if not self.filename:
+ raise ValueError("Can only open on-disk databases")
+ self.db = anydbm.open(self.filename, "w") #raises anydbm.error
+ try:
+ if self.db["--Reserved--type"] != self.type:
+ raise ValueError("Not a %s database" % self.type)
+ except KeyError:
+ raise ValueError("Not a recognized database")
+
+ def __getitem__(self, username):
+ if self.db == None:
+ raise AssertionError("DB not open")
+
+ self.lock.acquire()
+ try:
+ valueStr = self.db[username]
+ finally:
+ self.lock.release()
+
+ return self._getItem(username, valueStr)
+
+ def __setitem__(self, username, value):
+ if self.db == None:
+ raise AssertionError("DB not open")
+
+ valueStr = self._setItem(username, value)
+
+ self.lock.acquire()
+ try:
+ self.db[username] = valueStr
+ if self.filename:
+ self.db.sync()
+ finally:
+ self.lock.release()
+
+ def __delitem__(self, username):
+ if self.db == None:
+ raise AssertionError("DB not open")
+
+ self.lock.acquire()
+ try:
+ del(self.db[username])
+ if self.filename:
+ self.db.sync()
+ finally:
+ self.lock.release()
+
+ def __contains__(self, username):
+ """Check if the database contains the specified username.
+
+ @type username: str
+ @param username: The username to check for.
+
+ @rtype: bool
+ @return: True if the database contains the username, False
+ otherwise.
+
+ """
+ if self.db == None:
+ raise AssertionError("DB not open")
+
+ self.lock.acquire()
+ try:
+ return self.db.has_key(username)
+ finally:
+ self.lock.release()
+
+ def check(self, username, param):
+ value = self.__getitem__(username)
+ return self._checkItem(value, username, param)
+
+ def keys(self):
+ """Return a list of usernames in the database.
+
+ @rtype: list
+ @return: The usernames in the database.
+ """
+ if self.db == None:
+ raise AssertionError("DB not open")
+
+ self.lock.acquire()
+ try:
+ usernames = self.db.keys()
+ finally:
+ self.lock.release()
+ usernames = [u for u in usernames if not u.startswith("--Reserved--")]
+ return usernames
\ No newline at end of file
diff --git a/python/gdata/tlslite/Checker.py b/python/gdata/tlslite/Checker.py
new file mode 100644
index 0000000..f978697
--- /dev/null
+++ b/python/gdata/tlslite/Checker.py
@@ -0,0 +1,146 @@
+"""Class for post-handshake certificate checking."""
+
+from utils.cryptomath import hashAndBase64
+from X509 import X509
+from X509CertChain import X509CertChain
+from errors import *
+
+
+class Checker:
+ """This class is passed to a handshake function to check the other
+ party's certificate chain.
+
+ If a handshake function completes successfully, but the Checker
+ judges the other party's certificate chain to be missing or
+ inadequate, a subclass of
+ L{tlslite.errors.TLSAuthenticationError} will be raised.
+
+ Currently, the Checker can check either an X.509 or a cryptoID
+ chain (for the latter, cryptoIDlib must be installed).
+ """
+
+ def __init__(self, cryptoID=None, protocol=None,
+ x509Fingerprint=None,
+ x509TrustList=None, x509CommonName=None,
+ checkResumedSession=False):
+ """Create a new Checker instance.
+
+ You must pass in one of these argument combinations:
+ - cryptoID[, protocol] (requires cryptoIDlib)
+ - x509Fingerprint
+ - x509TrustList[, x509CommonName] (requires cryptlib_py)
+
+ @type cryptoID: str
+ @param cryptoID: A cryptoID which the other party's certificate
+ chain must match. The cryptoIDlib module must be installed.
+ Mutually exclusive with all of the 'x509...' arguments.
+
+ @type protocol: str
+ @param protocol: A cryptoID protocol URI which the other
+ party's certificate chain must match. Requires the 'cryptoID'
+ argument.
+
+ @type x509Fingerprint: str
+ @param x509Fingerprint: A hex-encoded X.509 end-entity
+ fingerprint which the other party's end-entity certificate must
+ match. Mutually exclusive with the 'cryptoID' and
+ 'x509TrustList' arguments.
+
+ @type x509TrustList: list of L{tlslite.X509.X509}
+ @param x509TrustList: A list of trusted root certificates. The
+ other party must present a certificate chain which extends to
+ one of these root certificates. The cryptlib_py module must be
+ installed. Mutually exclusive with the 'cryptoID' and
+ 'x509Fingerprint' arguments.
+
+ @type x509CommonName: str
+ @param x509CommonName: The end-entity certificate's 'CN' field
+ must match this value. For a web server, this is typically a
+ server name such as 'www.amazon.com'. Mutually exclusive with
+ the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
+ 'x509TrustList' argument.
+
+ @type checkResumedSession: bool
+ @param checkResumedSession: If resumed sessions should be
+ checked. This defaults to False, on the theory that if the
+ session was checked once, we don't need to bother
+ re-checking it.
+ """
+
+ if cryptoID and (x509Fingerprint or x509TrustList):
+ raise ValueError()
+ if x509Fingerprint and x509TrustList:
+ raise ValueError()
+ if x509CommonName and not x509TrustList:
+ raise ValueError()
+ if protocol and not cryptoID:
+ raise ValueError()
+ if cryptoID:
+ import cryptoIDlib #So we raise an error here
+ if x509TrustList:
+ import cryptlib_py #So we raise an error here
+ self.cryptoID = cryptoID
+ self.protocol = protocol
+ self.x509Fingerprint = x509Fingerprint
+ self.x509TrustList = x509TrustList
+ self.x509CommonName = x509CommonName
+ self.checkResumedSession = checkResumedSession
+
+ def __call__(self, connection):
+ """Check a TLSConnection.
+
+ When a Checker is passed to a handshake function, this will
+ be called at the end of the function.
+
+ @type connection: L{tlslite.TLSConnection.TLSConnection}
+ @param connection: The TLSConnection to examine.
+
+ @raise tlslite.errors.TLSAuthenticationError: If the other
+ party's certificate chain is missing or bad.
+ """
+ if not self.checkResumedSession and connection.resumed:
+ return
+
+ if self.cryptoID or self.x509Fingerprint or self.x509TrustList:
+ if connection._client:
+ chain = connection.session.serverCertChain
+ else:
+ chain = connection.session.clientCertChain
+
+ if self.x509Fingerprint or self.x509TrustList:
+ if isinstance(chain, X509CertChain):
+ if self.x509Fingerprint:
+ if chain.getFingerprint() != self.x509Fingerprint:
+ raise TLSFingerprintError(\
+ "X.509 fingerprint mismatch: %s, %s" % \
+ (chain.getFingerprint(), self.x509Fingerprint))
+ else: #self.x509TrustList
+ if not chain.validate(self.x509TrustList):
+ raise TLSValidationError("X.509 validation failure")
+ if self.x509CommonName and \
+ (chain.getCommonName() != self.x509CommonName):
+ raise TLSAuthorizationError(\
+ "X.509 Common Name mismatch: %s, %s" % \
+ (chain.getCommonName(), self.x509CommonName))
+ elif chain:
+ raise TLSAuthenticationTypeError()
+ else:
+ raise TLSNoAuthenticationError()
+ elif self.cryptoID:
+ import cryptoIDlib.CertChain
+ if isinstance(chain, cryptoIDlib.CertChain.CertChain):
+ if chain.cryptoID != self.cryptoID:
+ raise TLSFingerprintError(\
+ "cryptoID mismatch: %s, %s" % \
+ (chain.cryptoID, self.cryptoID))
+ if self.protocol:
+ if not chain.checkProtocol(self.protocol):
+ raise TLSAuthorizationError(\
+ "cryptoID protocol mismatch")
+ if not chain.validate():
+ raise TLSValidationError("cryptoID validation failure")
+ elif chain:
+ raise TLSAuthenticationTypeError()
+ else:
+ raise TLSNoAuthenticationError()
+
diff --git a/python/gdata/tlslite/FileObject.py b/python/gdata/tlslite/FileObject.py
new file mode 100644
index 0000000..6ee02b2
--- /dev/null
+++ b/python/gdata/tlslite/FileObject.py
@@ -0,0 +1,220 @@
+"""Class returned by TLSConnection.makefile()."""
+
+class FileObject:
+ """This class provides a file object interface to a
+ L{tlslite.TLSConnection.TLSConnection}.
+
+ Call makefile() on a TLSConnection to create a FileObject instance.
+
+ This class was copied, with minor modifications, from the
+ _fileobject class in socket.py. Note that fileno() is not
+ implemented."""
+
+ default_bufsize = 16384 #TREV: changed from 8192
+
+ def __init__(self, sock, mode='rb', bufsize=-1):
+ self._sock = sock
+ self.mode = mode # Not actually used in this version
+ if bufsize < 0:
+ bufsize = self.default_bufsize
+ self.bufsize = bufsize
+ self.softspace = False
+ if bufsize == 0:
+ self._rbufsize = 1
+ elif bufsize == 1:
+ self._rbufsize = self.default_bufsize
+ else:
+ self._rbufsize = bufsize
+ self._wbufsize = bufsize
+ self._rbuf = "" # A string
+ self._wbuf = [] # A list of strings
+
+ def _getclosed(self):
+ return self._sock is not None
+ closed = property(_getclosed, doc="True if the file is closed")
+
+ def close(self):
+ try:
+ if self._sock:
+ for result in self._sock._decrefAsync(): #TREV
+ pass
+ finally:
+ self._sock = None
+
+ def __del__(self):
+ try:
+ self.close()
+ except:
+ # close() may fail if __init__ didn't complete
+ pass
+
+ def flush(self):
+ if self._wbuf:
+ buffer = "".join(self._wbuf)
+ self._wbuf = []
+ self._sock.sendall(buffer)
+
+ #def fileno(self):
+ # raise NotImplementedError() #TREV
+
+ def write(self, data):
+ data = str(data) # XXX Should really reject non-string non-buffers
+ if not data:
+ return
+ self._wbuf.append(data)
+ if (self._wbufsize == 0 or
+ self._wbufsize == 1 and '\n' in data or
+ self._get_wbuf_len() >= self._wbufsize):
+ self.flush()
+
+ def writelines(self, list):
+ # XXX We could do better here for very long lists
+ # XXX Should really reject non-string non-buffers
+ self._wbuf.extend(filter(None, map(str, list)))
+ if (self._wbufsize <= 1 or
+ self._get_wbuf_len() >= self._wbufsize):
+ self.flush()
+
+ def _get_wbuf_len(self):
+ buf_len = 0
+ for x in self._wbuf:
+ buf_len += len(x)
+ return buf_len
+
+ def read(self, size=-1):
+ data = self._rbuf
+ if size < 0:
+ # Read until EOF
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ""
+ if self._rbufsize <= 1:
+ recv_size = self.default_bufsize
+ else:
+ recv_size = self._rbufsize
+ while True:
+ data = self._sock.recv(recv_size)
+ if not data:
+ break
+ buffers.append(data)
+ return "".join(buffers)
+ else:
+ # Read until size bytes or EOF seen, whichever comes first
+ buf_len = len(data)
+ if buf_len >= size:
+ self._rbuf = data[size:]
+ return data[:size]
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ""
+ while True:
+ left = size - buf_len
+ recv_size = max(self._rbufsize, left)
+ data = self._sock.recv(recv_size)
+ if not data:
+ break
+ buffers.append(data)
+ n = len(data)
+ if n >= left:
+ self._rbuf = data[left:]
+ buffers[-1] = data[:left]
+ break
+ buf_len += n
+ return "".join(buffers)
+
+ def readline(self, size=-1):
+ data = self._rbuf
+ if size < 0:
+ # Read until \n or EOF, whichever comes first
+ if self._rbufsize <= 1:
+ # Speed up unbuffered case
+ assert data == ""
+ buffers = []
+ recv = self._sock.recv
+ while data != "\n":
+ data = recv(1)
+ if not data:
+ break
+ buffers.append(data)
+ return "".join(buffers)
+ nl = data.find('\n')
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ return data[:nl]
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ""
+ while True:
+ data = self._sock.recv(self._rbufsize)
+ if not data:
+ break
+ buffers.append(data)
+ nl = data.find('\n')
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ buffers[-1] = data[:nl]
+ break
+ return "".join(buffers)
+ else:
+ # Read until size bytes or \n or EOF seen, whichever comes first
+ nl = data.find('\n', 0, size)
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ return data[:nl]
+ buf_len = len(data)
+ if buf_len >= size:
+ self._rbuf = data[size:]
+ return data[:size]
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ""
+ while True:
+ data = self._sock.recv(self._rbufsize)
+ if not data:
+ break
+ buffers.append(data)
+ left = size - buf_len
+ nl = data.find('\n', 0, left)
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ buffers[-1] = data[:nl]
+ break
+ n = len(data)
+ if n >= left:
+ self._rbuf = data[left:]
+ buffers[-1] = data[:left]
+ break
+ buf_len += n
+ return "".join(buffers)
+
+ def readlines(self, sizehint=0):
+ total = 0
+ list = []
+ while True:
+ line = self.readline()
+ if not line:
+ break
+ list.append(line)
+ total += len(line)
+ if sizehint and total >= sizehint:
+ break
+ return list
+
+ # Iterator protocols
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ line = self.readline()
+ if not line:
+ raise StopIteration
+ return line
diff --git a/python/gdata/tlslite/HandshakeSettings.py b/python/gdata/tlslite/HandshakeSettings.py
new file mode 100644
index 0000000..c7c3223
--- /dev/null
+++ b/python/gdata/tlslite/HandshakeSettings.py
@@ -0,0 +1,159 @@
+"""Class for setting handshake parameters."""
+
+from constants import CertificateType
+from utils import cryptomath
+from utils import cipherfactory
+
+class HandshakeSettings:
+ """This class encapsulates various parameters that can be used with
+ a TLS handshake.
+ @sort: minKeySize, maxKeySize, cipherNames, certificateTypes,
+ minVersion, maxVersion
+
+ @type minKeySize: int
+ @ivar minKeySize: The minimum bit length for asymmetric keys.
+
+ If the other party tries to use SRP, RSA, or Diffie-Hellman
+ parameters smaller than this length, an alert will be
+ signalled. The default is 1023.
+
+ @type maxKeySize: int
+ @ivar maxKeySize: The maximum bit length for asymmetric keys.
+
+ If the other party tries to use SRP, RSA, or Diffie-Hellman
+ parameters larger than this length, an alert will be signalled.
+ The default is 8193.
+
+ @type cipherNames: list
+ @ivar cipherNames: The allowed ciphers, in order of preference.
+
+ The allowed values in this list are 'aes256', 'aes128', '3des', and
+ 'rc4'. If these settings are used with a client handshake, they
+ determine the order of the ciphersuites offered in the ClientHello
+ message.
+
+ If these settings are used with a server handshake, the server will
+ choose whichever ciphersuite matches the earliest entry in this
+ list.
+
+ NOTE: If '3des' is used in this list, but TLS Lite can't find an
+ add-on library that supports 3DES, then '3des' will be silently
+ removed.
+
+ The default value is ['aes256', 'aes128', '3des', 'rc4'].
+
+ @type certificateTypes: list
+ @ivar certificateTypes: The allowed certificate types, in order of
+ preference.
+
+ The allowed values in this list are 'x509' and 'cryptoID'. This
+ list is only used with a client handshake. The client will
+ advertise to the server which certificate types are supported, and
+ will check that the server uses one of the appropriate types.
+
+ NOTE: If 'cryptoID' is used in this list, but cryptoIDlib is not
+ installed, then 'cryptoID' will be silently removed.
+
+ @type minVersion: tuple
+ @ivar minVersion: The minimum allowed SSL/TLS version.
+
+ This variable can be set to (3,0) for SSL 3.0, (3,1) for
+ TLS 1.0, or (3,2) for TLS 1.1. If the other party wishes to
+ use a lower version, a protocol_version alert will be signalled.
+ The default is (3,0).
+
+ @type maxVersion: tuple
+ @ivar maxVersion: The maximum allowed SSL/TLS version.
+
+ This variable can be set to (3,0) for SSL 3.0, (3,1) for
+ TLS 1.0, or (3,2) for TLS 1.1. If the other party wishes to
+ use a higher version, a protocol_version alert will be signalled.
+ The default is (3,2). (WARNING: Some servers may (improperly)
+ reject clients which offer support for TLS 1.1. In this case,
+ try lowering maxVersion to (3,1)).
+ """
+ def __init__(self):
+ self.minKeySize = 1023
+ self.maxKeySize = 8193
+ self.cipherNames = ["aes256", "aes128", "3des", "rc4"]
+ self.cipherImplementations = ["cryptlib", "openssl", "pycrypto",
+ "python"]
+ self.certificateTypes = ["x509", "cryptoID"]
+ self.minVersion = (3,0)
+ self.maxVersion = (3,2)
+
+ #Filters out options that are not supported
+ def _filter(self):
+ other = HandshakeSettings()
+ other.minKeySize = self.minKeySize
+ other.maxKeySize = self.maxKeySize
+ other.cipherNames = self.cipherNames
+ other.cipherImplementations = self.cipherImplementations
+ other.certificateTypes = self.certificateTypes
+ other.minVersion = self.minVersion
+ other.maxVersion = self.maxVersion
+
+ if not cipherfactory.tripleDESPresent:
+ other.cipherNames = [e for e in self.cipherNames if e != "3des"]
+ if len(other.cipherNames)==0:
+ raise ValueError("No supported ciphers")
+
+ try:
+ import cryptoIDlib
+ except ImportError:
+ other.certificateTypes = [e for e in self.certificateTypes \
+ if e != "cryptoID"]
+ if len(other.certificateTypes)==0:
+ raise ValueError("No supported certificate types")
+
+ if not cryptomath.cryptlibpyLoaded:
+ other.cipherImplementations = [e for e in \
+ self.cipherImplementations if e != "cryptlib"]
+ if not cryptomath.m2cryptoLoaded:
+ other.cipherImplementations = [e for e in \
+ other.cipherImplementations if e != "openssl"]
+ if not cryptomath.pycryptoLoaded:
+ other.cipherImplementations = [e for e in \
+ other.cipherImplementations if e != "pycrypto"]
+ if len(other.cipherImplementations)==0:
+ raise ValueError("No supported cipher implementations")
+
+ if other.minKeySize<512:
+ raise ValueError("minKeySize too small")
+ if other.minKeySize>16384:
+ raise ValueError("minKeySize too large")
+ if other.maxKeySize<512:
+ raise ValueError("maxKeySize too small")
+ if other.maxKeySize>16384:
+ raise ValueError("maxKeySize too large")
+ for s in other.cipherNames:
+ if s not in ("aes256", "aes128", "rc4", "3des"):
+ raise ValueError("Unknown cipher name: '%s'" % s)
+ for s in other.cipherImplementations:
+ if s not in ("cryptlib", "openssl", "python", "pycrypto"):
+ raise ValueError("Unknown cipher implementation: '%s'" % s)
+ for s in other.certificateTypes:
+ if s not in ("x509", "cryptoID"):
+ raise ValueError("Unknown certificate type: '%s'" % s)
+
+ if other.minVersion > other.maxVersion:
+ raise ValueError("Versions set incorrectly")
+
+ if not other.minVersion in ((3,0), (3,1), (3,2)):
+ raise ValueError("minVersion set incorrectly")
+
+ if not other.maxVersion in ((3,0), (3,1), (3,2)):
+ raise ValueError("maxVersion set incorrectly")
+
+ return other
+
+ def _getCertificateTypes(self):
+ l = []
+ for ct in self.certificateTypes:
+ if ct == "x509":
+ l.append(CertificateType.x509)
+ elif ct == "cryptoID":
+ l.append(CertificateType.cryptoID)
+ else:
+ raise AssertionError()
+ return l
diff --git a/python/gdata/tlslite/Session.py b/python/gdata/tlslite/Session.py
new file mode 100644
index 0000000..a951f45
--- /dev/null
+++ b/python/gdata/tlslite/Session.py
@@ -0,0 +1,131 @@
+"""Class representing a TLS session."""
+
+from utils.compat import *
+from mathtls import *
+from constants import *
+
+class Session:
+ """
+ This class represents a TLS session.
+
+ TLS distinguishes between connections and sessions. A new
+ handshake creates both a connection and a session. Data is
+ transmitted over the connection.
+
+ The session contains a more permanent record of the handshake. The
+ session can be inspected to determine handshake results. The
+ session can also be used to create a new connection through
+ "session resumption". If the client and server both support this,
+ they can create a new connection based on an old session without
+ the overhead of a full handshake.
+
+ The session for a L{tlslite.TLSConnection.TLSConnection} can be
+ retrieved from the connection's 'session' attribute.
+
+ @type srpUsername: str
+ @ivar srpUsername: The client's SRP username (or None).
+
+ @type sharedKeyUsername: str
+ @ivar sharedKeyUsername: The client's shared-key username (or
+ None).
+
+ @type clientCertChain: L{tlslite.X509CertChain.X509CertChain} or
+ L{cryptoIDlib.CertChain.CertChain}
+ @ivar clientCertChain: The client's certificate chain (or None).
+
+ @type serverCertChain: L{tlslite.X509CertChain.X509CertChain} or
+ L{cryptoIDlib.CertChain.CertChain}
+ @ivar serverCertChain: The server's certificate chain (or None).
+ """
+
+ def __init__(self):
+ self.masterSecret = createByteArraySequence([])
+ self.sessionID = createByteArraySequence([])
+ self.cipherSuite = 0
+ self.srpUsername = None
+ self.sharedKeyUsername = None
+ self.clientCertChain = None
+ self.serverCertChain = None
+ self.resumable = False
+ self.sharedKey = False
+
+ def _clone(self):
+ other = Session()
+ other.masterSecret = self.masterSecret
+ other.sessionID = self.sessionID
+ other.cipherSuite = self.cipherSuite
+ other.srpUsername = self.srpUsername
+ other.sharedKeyUsername = self.sharedKeyUsername
+ other.clientCertChain = self.clientCertChain
+ other.serverCertChain = self.serverCertChain
+ other.resumable = self.resumable
+ other.sharedKey = self.sharedKey
+ return other
+
+ def _calcMasterSecret(self, version, premasterSecret, clientRandom,
+ serverRandom):
+ if version == (3,0):
+ self.masterSecret = PRF_SSL(premasterSecret,
+ concatArrays(clientRandom, serverRandom), 48)
+ elif version in ((3,1), (3,2)):
+ self.masterSecret = PRF(premasterSecret, "master secret",
+ concatArrays(clientRandom, serverRandom), 48)
+ else:
+ raise AssertionError()
+
+ def valid(self):
+ """If this session can be used for session resumption.
+
+ @rtype: bool
+ @return: If this session can be used for session resumption.
+ """
+ return self.resumable or self.sharedKey
+
+ def _setResumable(self, boolean):
+ #Only let it be set if this isn't a shared key
+ if not self.sharedKey:
+ #Only let it be set to True if the sessionID is non-null
+ if (not boolean) or (boolean and self.sessionID):
+ self.resumable = boolean
+
+ def getCipherName(self):
+ """Get the name of the cipher used with this connection.
+
+ @rtype: str
+ @return: The name of the cipher used with this connection.
+ Either 'aes128', 'aes256', 'rc4', or '3des'.
+ """
+ if self.cipherSuite in CipherSuite.aes128Suites:
+ return "aes128"
+ elif self.cipherSuite in CipherSuite.aes256Suites:
+ return "aes256"
+ elif self.cipherSuite in CipherSuite.rc4Suites:
+ return "rc4"
+ elif self.cipherSuite in CipherSuite.tripleDESSuites:
+ return "3des"
+ else:
+ return None
+
+ def _createSharedKey(self, sharedKeyUsername, sharedKey):
+ if len(sharedKeyUsername)>16:
+ raise ValueError()
+ if len(sharedKey)>47:
+ raise ValueError()
+
+ self.sharedKeyUsername = sharedKeyUsername
+
+ self.sessionID = createByteArrayZeros(16)
+ for x in range(len(sharedKeyUsername)):
+ self.sessionID[x] = ord(sharedKeyUsername[x])
+
+ premasterSecret = createByteArrayZeros(48)
+ sharedKey = chr(len(sharedKey)) + sharedKey
+ for x in range(48):
+ premasterSecret[x] = ord(sharedKey[x % len(sharedKey)])
+
+ self.masterSecret = PRF(premasterSecret, "shared secret",
+ createByteArraySequence([]), 48)
+ self.sharedKey = True
+ return self
+
+
diff --git a/python/gdata/tlslite/SessionCache.py b/python/gdata/tlslite/SessionCache.py
new file mode 100644
index 0000000..34cf0b0
--- /dev/null
+++ b/python/gdata/tlslite/SessionCache.py
@@ -0,0 +1,103 @@
+"""Class for caching TLS sessions."""
+
+import thread
+import time
+
+class SessionCache:
+ """This class is used by the server to cache TLS sessions.
+
+ Caching sessions allows the client to use TLS session resumption
+ and avoid the expense of a full handshake. To use this class,
+ simply pass a SessionCache instance into the server handshake
+ function.
+
+ This class is thread-safe.
+ """
+
+ #References to these instances
+ #are also held by the caller, who may change the 'resumable'
+ #flag, so the SessionCache must return the same instances
+ #it was passed in.
+
+ def __init__(self, maxEntries=10000, maxAge=14400):
+ """Create a new SessionCache.
+
+ @type maxEntries: int
+ @param maxEntries: The maximum size of the cache. When this
+ limit is reached, the oldest sessions will be deleted as
+ necessary to make room for new ones. The default is 10000.
+
+ @type maxAge: int
+ @param maxAge: The number of seconds before a session expires
+ from the cache. The default is 14400 (i.e. 4 hours)."""
+
+ self.lock = thread.allocate_lock()
+
+ # Maps sessionIDs to sessions
+ self.entriesDict = {}
+
+ #Circular list of (sessionID, timestamp) pairs
+ self.entriesList = [(None,None)] * maxEntries
+
+ self.firstIndex = 0
+ self.lastIndex = 0
+ self.maxAge = maxAge
+
+ def __getitem__(self, sessionID):
+ self.lock.acquire()
+ try:
+ self._purge() #Delete old items, so we're assured of a new one
+ session = self.entriesDict[sessionID]
+
+ #When we add sessions they're resumable, but it's possible
+ #for the session to be invalidated later on (if a fatal alert
+ #is returned), so we have to check for resumability before
+ #returning the session.
+
+ if session.valid():
+ return session
+ else:
+ raise KeyError()
+ finally:
+ self.lock.release()
+
+
+ def __setitem__(self, sessionID, session):
+ self.lock.acquire()
+ try:
+ #Add the new element
+ self.entriesDict[sessionID] = session
+ self.entriesList[self.lastIndex] = (sessionID, time.time())
+ self.lastIndex = (self.lastIndex+1) % len(self.entriesList)
+
+ #If the cache is full, we delete the oldest element to make an
+ #empty space
+ if self.lastIndex == self.firstIndex:
+ del(self.entriesDict[self.entriesList[self.firstIndex][0]])
+ self.firstIndex = (self.firstIndex+1) % len(self.entriesList)
+ finally:
+ self.lock.release()
+
+ #Delete expired items
+ def _purge(self):
+ currentTime = time.time()
+
+ #Search through the circular list, deleting expired elements until
+ #we reach a non-expired element. Since elements in list are
+ #ordered in time, we can break once we reach the first non-expired
+ #element
+ index = self.firstIndex
+ while index != self.lastIndex:
+ if currentTime - self.entriesList[index][1] > self.maxAge:
+ del(self.entriesDict[self.entriesList[index][0]])
+ index = (index+1) % len(self.entriesList)
+ else:
+ break
+ self.firstIndex = index
+
+def _test():
+ import doctest, SessionCache
+ return doctest.testmod(SessionCache)
+
+if __name__ == "__main__":
+ _test()
diff --git a/python/gdata/tlslite/SharedKeyDB.py b/python/gdata/tlslite/SharedKeyDB.py
new file mode 100644
index 0000000..3246ec7
--- /dev/null
+++ b/python/gdata/tlslite/SharedKeyDB.py
@@ -0,0 +1,58 @@
+"""Class for storing shared keys."""
+
+from utils.cryptomath import *
+from utils.compat import *
+from mathtls import *
+from Session import Session
+from BaseDB import BaseDB
+
+class SharedKeyDB(BaseDB):
+ """This class represent an in-memory or on-disk database of shared
+ keys.
+
+ A SharedKeyDB can be passed to a server handshake function to
+ authenticate a client based on one of the shared keys.
+
+ This class is thread-safe.
+ """
+
+ def __init__(self, filename=None):
+ """Create a new SharedKeyDB.
+
+ @type filename: str
+ @param filename: Filename for an on-disk database, or None for
+ an in-memory database. If the filename already exists, follow
+ this with a call to open(). To create a new on-disk database,
+ follow this with a call to create().
+ """
+ BaseDB.__init__(self, filename, "shared key")
+
+ def _getItem(self, username, valueStr):
+ session = Session()
+ session._createSharedKey(username, valueStr)
+ return session
+
+ def __setitem__(self, username, sharedKey):
+ """Add a shared key to the database.
+
+ @type username: str
+ @param username: The username to associate the shared key with.
+ Must be less than or equal to 16 characters in length, and must
+ not already be in the database.
+
+ @type sharedKey: str
+ @param sharedKey: The shared key to add. Must be less than 48
+ characters in length.
+ """
+ BaseDB.__setitem__(self, username, sharedKey)
+
+ def _setItem(self, username, value):
+ if len(username)>16:
+ raise ValueError("username too long")
+ if len(value)>=48:
+ raise ValueError("shared key too long")
+ return value
+
+ def _checkItem(self, value, username, param):
+ newSession = self._getItem(username, param)
+ return value.masterSecret == newSession.masterSecret
\ No newline at end of file
diff --git a/python/gdata/tlslite/TLSConnection.py b/python/gdata/tlslite/TLSConnection.py
new file mode 100644
index 0000000..d125f8f
--- /dev/null
+++ b/python/gdata/tlslite/TLSConnection.py
@@ -0,0 +1,1600 @@
+"""
+MAIN CLASS FOR TLS LITE (START HERE!).
+"""
+from __future__ import generators
+
+import socket
+from utils.compat import formatExceptionTrace
+from TLSRecordLayer import TLSRecordLayer
+from Session import Session
+from constants import *
+from utils.cryptomath import getRandomBytes
+from errors import *
+from messages import *
+from mathtls import *
+from HandshakeSettings import HandshakeSettings
+
+
+class TLSConnection(TLSRecordLayer):
+ """
+ This class wraps a socket and provides TLS handshaking and data
+ transfer.
+
+ To use this class, create a new instance, passing a connected
+ socket into the constructor. Then call some handshake function.
+ If the handshake completes without raising an exception, then a TLS
+ connection has been negotiated. You can transfer data over this
+ connection as if it were a socket.
+
+ This class provides both synchronous and asynchronous versions of
+ its key functions. The synchronous versions should be used when
+ writing single-or multi-threaded code using blocking sockets. The
+ asynchronous versions should be used when performing asynchronous,
+ event-based I/O with non-blocking sockets.
+
+ Asynchronous I/O is a complicated subject; typically, you should
+ not use the asynchronous functions directly, but should use some
+ framework like asyncore or Twisted which TLS Lite integrates with
+ (see
+ L{tlslite.integration.TLSAsyncDispatcherMixIn.TLSAsyncDispatcherMixIn} or
+ L{tlslite.integration.TLSTwistedProtocolWrapper.TLSTwistedProtocolWrapper}).
+ """
+
+
+ def __init__(self, sock):
+ """Create a new TLSConnection instance.
+
+ @param sock: The socket data will be transmitted on. The
+ socket should already be connected. It may be in blocking or
+ non-blocking mode.
+
+ @type sock: L{socket.socket}
+ """
+ TLSRecordLayer.__init__(self, sock)
+
+ def handshakeClientSRP(self, username, password, session=None,
+ settings=None, checker=None, async=False):
+ """Perform an SRP handshake in the role of client.
+
+ This function performs a TLS/SRP handshake. SRP mutually
+ authenticates both parties to each other using only a
+ username and password. This function may also perform a
+ combined SRP and server-certificate handshake, if the server
+ chooses to authenticate itself with a certificate chain in
+ addition to doing SRP.
+
+ TLS/SRP is non-standard. Most TLS implementations don't
+ support it. See
+ U{http://www.ietf.org/html.charters/tls-charter.html} or
+ U{http://trevp.net/tlssrp/} for the latest information on
+ TLS/SRP.
+
+ Like any handshake function, this can be called on a closed
+ TLS connection, or on a TLS connection that is already open.
+ If called on an open connection it performs a re-handshake.
+
+ If the function completes without raising an exception, the
+ TLS connection will be open and available for data transfer.
+
+ If an exception is raised, the connection will have been
+ automatically closed (if it was ever open).
+
+ @type username: str
+ @param username: The SRP username.
+
+ @type password: str
+ @param password: The SRP password.
+
+ @type session: L{tlslite.Session.Session}
+ @param session: A TLS session to attempt to resume. This
+ session must be an SRP session performed with the same username
+ and password as were passed in. If the resumption does not
+ succeed, a full SRP handshake will be performed.
+
+ @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
+ @param settings: Various settings which can be used to control
+ the ciphersuites, certificate types, and SSL/TLS versions
+ offered by the client.
+
+ @type checker: L{tlslite.Checker.Checker}
+ @param checker: A Checker instance. This instance will be
+ invoked to examine the other party's authentication
+ credentials, if the handshake completes succesfully.
+
+ @type async: bool
+ @param async: If False, this function will block until the
+ handshake is completed. If True, this function will return a
+ generator. Successive invocations of the generator will
+ return 0 if it is waiting to read from the socket, 1 if it is
+ waiting to write to the socket, or will raise StopIteration if
+ the handshake operation is completed.
+
+ @rtype: None or an iterable
+ @return: If 'async' is True, a generator object will be
+ returned.
+
+ @raise socket.error: If a socket error occurs.
+ @raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
+ without a preceding alert.
+ @raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
+ @raise tlslite.errors.TLSAuthenticationError: If the checker
+ doesn't like the other party's authentication credentials.
+ """
+ handshaker = self._handshakeClientAsync(srpParams=(username, password),
+ session=session, settings=settings, checker=checker)
+ if async:
+ return handshaker
+ for result in handshaker:
+ pass
+
+ def handshakeClientCert(self, certChain=None, privateKey=None,
+ session=None, settings=None, checker=None,
+ async=False):
+ """Perform a certificate-based handshake in the role of client.
+
+ This function performs an SSL or TLS handshake. The server
+ will authenticate itself using an X.509 or cryptoID certificate
+ chain. If the handshake succeeds, the server's certificate
+ chain will be stored in the session's serverCertChain attribute.
+ Unless a checker object is passed in, this function does no
+ validation or checking of the server's certificate chain.
+
+ If the server requests client authentication, the
+ client will send the passed-in certificate chain, and use the
+ passed-in private key to authenticate itself. If no
+ certificate chain and private key were passed in, the client
+ will attempt to proceed without client authentication. The
+ server may or may not allow this.
+
+ Like any handshake function, this can be called on a closed
+ TLS connection, or on a TLS connection that is already open.
+ If called on an open connection it performs a re-handshake.
+
+ If the function completes without raising an exception, the
+ TLS connection will be open and available for data transfer.
+
+ If an exception is raised, the connection will have been
+ automatically closed (if it was ever open).
+
+ @type certChain: L{tlslite.X509CertChain.X509CertChain} or
+ L{cryptoIDlib.CertChain.CertChain}
+ @param certChain: The certificate chain to be used if the
+ server requests client authentication.
+
+ @type privateKey: L{tlslite.utils.RSAKey.RSAKey}
+ @param privateKey: The private key to be used if the server
+ requests client authentication.
+
+ @type session: L{tlslite.Session.Session}
+ @param session: A TLS session to attempt to resume. If the
+ resumption does not succeed, a full handshake will be
+ performed.
+
+ @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
+ @param settings: Various settings which can be used to control
+ the ciphersuites, certificate types, and SSL/TLS versions
+ offered by the client.
+
+ @type checker: L{tlslite.Checker.Checker}
+ @param checker: A Checker instance. This instance will be
+ invoked to examine the other party's authentication
+ credentials, if the handshake completes succesfully.
+
+ @type async: bool
+ @param async: If False, this function will block until the
+ handshake is completed. If True, this function will return a
+ generator. Successive invocations of the generator will
+ return 0 if it is waiting to read from the socket, 1 if it is
+ waiting to write to the socket, or will raise StopIteration if
+ the handshake operation is completed.
+
+ @rtype: None or an iterable
+ @return: If 'async' is True, a generator object will be
+ returned.
+
+ @raise socket.error: If a socket error occurs.
+ @raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
+ without a preceding alert.
+ @raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
+ @raise tlslite.errors.TLSAuthenticationError: If the checker
+ doesn't like the other party's authentication credentials.
+ """
+ handshaker = self._handshakeClientAsync(certParams=(certChain,
+ privateKey), session=session, settings=settings,
+ checker=checker)
+ if async:
+ return handshaker
+ for result in handshaker:
+ pass
+
+ def handshakeClientUnknown(self, srpCallback=None, certCallback=None,
+ session=None, settings=None, checker=None,
+ async=False):
+ """Perform a to-be-determined type of handshake in the role of client.
+
+ This function performs an SSL or TLS handshake. If the server
+ requests client certificate authentication, the
+ certCallback will be invoked and should return a (certChain,
+ privateKey) pair. If the callback returns None, the library
+ will attempt to proceed without client authentication. The
+ server may or may not allow this.
+
+ If the server requests SRP authentication, the srpCallback
+ will be invoked and should return a (username, password) pair.
+ If the callback returns None, the local implementation will
+ signal a user_canceled error alert.
+
+ After the handshake completes, the client can inspect the
+ connection's session attribute to determine what type of
+ authentication was performed.
+
+ Like any handshake function, this can be called on a closed
+ TLS connection, or on a TLS connection that is already open.
+ If called on an open connection it performs a re-handshake.
+
+ If the function completes without raising an exception, the
+ TLS connection will be open and available for data transfer.
+
+ If an exception is raised, the connection will have been
+ automatically closed (if it was ever open).
+
+ @type srpCallback: callable
+ @param srpCallback: The callback to be used if the server
+ requests SRP authentication. If None, the client will not
+ offer support for SRP ciphersuites.
+
+ @type certCallback: callable
+ @param certCallback: The callback to be used if the server
+ requests client certificate authentication.
+
+ @type session: L{tlslite.Session.Session}
+ @param session: A TLS session to attempt to resume. If the
+ resumption does not succeed, a full handshake will be
+ performed.
+
+ @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
+ @param settings: Various settings which can be used to control
+ the ciphersuites, certificate types, and SSL/TLS versions
+ offered by the client.
+
+ @type checker: L{tlslite.Checker.Checker}
+ @param checker: A Checker instance. This instance will be
+ invoked to examine the other party's authentication
+ credentials, if the handshake completes succesfully.
+
+ @type async: bool
+ @param async: If False, this function will block until the
+ handshake is completed. If True, this function will return a
+ generator. Successive invocations of the generator will
+ return 0 if it is waiting to read from the socket, 1 if it is
+ waiting to write to the socket, or will raise StopIteration if
+ the handshake operation is completed.
+
+ @rtype: None or an iterable
+ @return: If 'async' is True, a generator object will be
+ returned.
+
+ @raise socket.error: If a socket error occurs.
+ @raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
+ without a preceding alert.
+ @raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
+ @raise tlslite.errors.TLSAuthenticationError: If the checker
+ doesn't like the other party's authentication credentials.
+ """
+ handshaker = self._handshakeClientAsync(unknownParams=(srpCallback,
+ certCallback), session=session, settings=settings,
+ checker=checker)
+ if async:
+ return handshaker
+ for result in handshaker:
+ pass
+
+ def handshakeClientSharedKey(self, username, sharedKey, settings=None,
+ checker=None, async=False):
+ """Perform a shared-key handshake in the role of client.
+
+ This function performs a shared-key handshake. Using shared
+ symmetric keys of high entropy (128 bits or greater) mutually
+ authenticates both parties to each other.
+
+ TLS with shared-keys is non-standard. Most TLS
+ implementations don't support it. See
+ U{http://www.ietf.org/html.charters/tls-charter.html} for the
+ latest information on TLS with shared-keys. If the shared-keys
+ Internet-Draft changes or is superceded, TLS Lite will track
+ those changes, so the shared-key support in later versions of
+ TLS Lite may become incompatible with this version.
+
+ Like any handshake function, this can be called on a closed
+ TLS connection, or on a TLS connection that is already open.
+ If called on an open connection it performs a re-handshake.
+
+ If the function completes without raising an exception, the
+ TLS connection will be open and available for data transfer.
+
+ If an exception is raised, the connection will have been
+ automatically closed (if it was ever open).
+
+ @type username: str
+ @param username: The shared-key username.
+
+ @type sharedKey: str
+ @param sharedKey: The shared key.
+
+ @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
+ @param settings: Various settings which can be used to control
+ the ciphersuites, certificate types, and SSL/TLS versions
+ offered by the client.
+
+ @type checker: L{tlslite.Checker.Checker}
+ @param checker: A Checker instance. This instance will be
+ invoked to examine the other party's authentication
+ credentials, if the handshake completes succesfully.
+
+ @type async: bool
+ @param async: If False, this function will block until the
+ handshake is completed. If True, this function will return a
+ generator. Successive invocations of the generator will
+ return 0 if it is waiting to read from the socket, 1 if it is
+ waiting to write to the socket, or will raise StopIteration if
+ the handshake operation is completed.
+
+ @rtype: None or an iterable
+ @return: If 'async' is True, a generator object will be
+ returned.
+
+ @raise socket.error: If a socket error occurs.
+ @raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
+ without a preceding alert.
+ @raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
+ @raise tlslite.errors.TLSAuthenticationError: If the checker
+ doesn't like the other party's authentication credentials.
+ """
+ handshaker = self._handshakeClientAsync(sharedKeyParams=(username,
+ sharedKey), settings=settings, checker=checker)
+ if async:
+ return handshaker
+ for result in handshaker:
+ pass
+
+ def _handshakeClientAsync(self, srpParams=(), certParams=(),
+ unknownParams=(), sharedKeyParams=(),
+ session=None, settings=None, checker=None,
+ recursive=False):
+
+ handshaker = self._handshakeClientAsyncHelper(srpParams=srpParams,
+ certParams=certParams, unknownParams=unknownParams,
+ sharedKeyParams=sharedKeyParams, session=session,
+ settings=settings, recursive=recursive)
+ for result in self._handshakeWrapperAsync(handshaker, checker):
+ yield result
+
+
+ def _handshakeClientAsyncHelper(self, srpParams, certParams, unknownParams,
+ sharedKeyParams, session, settings, recursive):
+ if not recursive:
+ self._handshakeStart(client=True)
+
+ #Unpack parameters
+ srpUsername = None # srpParams
+ password = None # srpParams
+ clientCertChain = None # certParams
+ privateKey = None # certParams
+ srpCallback = None # unknownParams
+ certCallback = None # unknownParams
+ #session # sharedKeyParams (or session)
+ #settings # settings
+
+ if srpParams:
+ srpUsername, password = srpParams
+ elif certParams:
+ clientCertChain, privateKey = certParams
+ elif unknownParams:
+ srpCallback, certCallback = unknownParams
+ elif sharedKeyParams:
+ session = Session()._createSharedKey(*sharedKeyParams)
+
+ if not settings:
+ settings = HandshakeSettings()
+ settings = settings._filter()
+
+ #Validate parameters
+ if srpUsername and not password:
+ raise ValueError("Caller passed a username but no password")
+ if password and not srpUsername:
+ raise ValueError("Caller passed a password but no username")
+
+ if clientCertChain and not privateKey:
+ raise ValueError("Caller passed a certChain but no privateKey")
+ if privateKey and not clientCertChain:
+ raise ValueError("Caller passed a privateKey but no certChain")
+
+ if clientCertChain:
+ foundType = False
+ try:
+ import cryptoIDlib.CertChain
+ if isinstance(clientCertChain, cryptoIDlib.CertChain.CertChain):
+ if "cryptoID" not in settings.certificateTypes:
+ raise ValueError("Client certificate doesn't "\
+ "match Handshake Settings")
+ settings.certificateTypes = ["cryptoID"]
+ foundType = True
+ except ImportError:
+ pass
+ if not foundType and isinstance(clientCertChain,
+ X509CertChain):
+ if "x509" not in settings.certificateTypes:
+ raise ValueError("Client certificate doesn't match "\
+ "Handshake Settings")
+ settings.certificateTypes = ["x509"]
+ foundType = True
+ if not foundType:
+ raise ValueError("Unrecognized certificate type")
+
+
+ if session:
+ if not session.valid():
+ session = None #ignore non-resumable sessions...
+ elif session.resumable and \
+ (session.srpUsername != srpUsername):
+ raise ValueError("Session username doesn't match")
+
+ #Add Faults to parameters
+ if srpUsername and self.fault == Fault.badUsername:
+ srpUsername += "GARBAGE"
+ if password and self.fault == Fault.badPassword:
+ password += "GARBAGE"
+ if sharedKeyParams:
+ identifier = sharedKeyParams[0]
+ sharedKey = sharedKeyParams[1]
+ if self.fault == Fault.badIdentifier:
+ identifier += "GARBAGE"
+ session = Session()._createSharedKey(identifier, sharedKey)
+ elif self.fault == Fault.badSharedKey:
+ sharedKey += "GARBAGE"
+ session = Session()._createSharedKey(identifier, sharedKey)
+
+
+ #Initialize locals
+ serverCertChain = None
+ cipherSuite = 0
+ certificateType = CertificateType.x509
+ premasterSecret = None
+
+ #Get client nonce
+ clientRandom = getRandomBytes(32)
+
+ #Initialize acceptable ciphersuites
+ cipherSuites = []
+ if srpParams:
+ cipherSuites += CipherSuite.getSrpRsaSuites(settings.cipherNames)
+ cipherSuites += CipherSuite.getSrpSuites(settings.cipherNames)
+ elif certParams:
+ cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
+ elif unknownParams:
+ if srpCallback:
+ cipherSuites += \
+ CipherSuite.getSrpRsaSuites(settings.cipherNames)
+ cipherSuites += \
+ CipherSuite.getSrpSuites(settings.cipherNames)
+ cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
+ elif sharedKeyParams:
+ cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
+ else:
+ cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
+
+ #Initialize acceptable certificate types
+ certificateTypes = settings._getCertificateTypes()
+
+ #Tentatively set the version to the client's minimum version.
+ #We'll use this for the ClientHello, and if an error occurs
+ #parsing the Server Hello, we'll use this version for the response
+ self.version = settings.maxVersion
+
+ #Either send ClientHello (with a resumable session)...
+ if session:
+ #If it's a resumable (i.e. not a shared-key session), then its
+ #ciphersuite must be one of the acceptable ciphersuites
+ if (not sharedKeyParams) and \
+ session.cipherSuite not in cipherSuites:
+ raise ValueError("Session's cipher suite not consistent "\
+ "with parameters")
+ else:
+ clientHello = ClientHello()
+ clientHello.create(settings.maxVersion, clientRandom,
+ session.sessionID, cipherSuites,
+ certificateTypes, session.srpUsername)
+
+ #Or send ClientHello (without)
+ else:
+ clientHello = ClientHello()
+ clientHello.create(settings.maxVersion, clientRandom,
+ createByteArraySequence([]), cipherSuites,
+ certificateTypes, srpUsername)
+ for result in self._sendMsg(clientHello):
+ yield result
+
+ #Get ServerHello (or missing_srp_username)
+ for result in self._getMsg((ContentType.handshake,
+ ContentType.alert),
+ HandshakeType.server_hello):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ msg = result
+
+ if isinstance(msg, ServerHello):
+ serverHello = msg
+ elif isinstance(msg, Alert):
+ alert = msg
+
+ #If it's not a missing_srp_username, re-raise
+ if alert.description != AlertDescription.missing_srp_username:
+ self._shutdown(False)
+ raise TLSRemoteAlert(alert)
+
+ #If we're not in SRP callback mode, we won't have offered SRP
+ #without a username, so we shouldn't get this alert
+ if not srpCallback:
+ for result in self._sendError(\
+ AlertDescription.unexpected_message):
+ yield result
+ srpParams = srpCallback()
+ #If the callback returns None, cancel the handshake
+ if srpParams == None:
+ for result in self._sendError(AlertDescription.user_canceled):
+ yield result
+
+ #Recursively perform handshake
+ for result in self._handshakeClientAsyncHelper(srpParams,
+ None, None, None, None, settings, True):
+ yield result
+ return
+
+ #Get the server version. Do this before anything else, so any
+ #error alerts will use the server's version
+ self.version = serverHello.server_version
+
+ #Future responses from server must use this version
+ self._versionCheck = True
+
+ #Check ServerHello
+ if serverHello.server_version < settings.minVersion:
+ for result in self._sendError(\
+ AlertDescription.protocol_version,
+ "Too old version: %s" % str(serverHello.server_version)):
+ yield result
+ if serverHello.server_version > settings.maxVersion:
+ for result in self._sendError(\
+ AlertDescription.protocol_version,
+ "Too new version: %s" % str(serverHello.server_version)):
+ yield result
+ if serverHello.cipher_suite not in cipherSuites:
+ for result in self._sendError(\
+ AlertDescription.illegal_parameter,
+ "Server responded with incorrect ciphersuite"):
+ yield result
+ if serverHello.certificate_type not in certificateTypes:
+ for result in self._sendError(\
+ AlertDescription.illegal_parameter,
+ "Server responded with incorrect certificate type"):
+ yield result
+ if serverHello.compression_method != 0:
+ for result in self._sendError(\
+ AlertDescription.illegal_parameter,
+ "Server responded with incorrect compression method"):
+ yield result
+
+ #Get the server nonce
+ serverRandom = serverHello.random
+
+ #If the server agrees to resume
+ if session and session.sessionID and \
+ serverHello.session_id == session.sessionID:
+
+ #If a shared-key, we're flexible about suites; otherwise the
+ #server-chosen suite has to match the session's suite
+ if sharedKeyParams:
+ session.cipherSuite = serverHello.cipher_suite
+ elif serverHello.cipher_suite != session.cipherSuite:
+ for result in self._sendError(\
+ AlertDescription.illegal_parameter,\
+ "Server's ciphersuite doesn't match session"):
+ yield result
+
+ #Set the session for this connection
+ self.session = session
+
+ #Calculate pending connection states
+ self._calcPendingStates(clientRandom, serverRandom,
+ settings.cipherImplementations)
+
+ #Exchange ChangeCipherSpec and Finished messages
+ for result in self._getFinished():
+ yield result
+ for result in self._sendFinished():
+ yield result
+
+ #Mark the connection as open
+ self._handshakeDone(resumed=True)
+
+ #If server DOES NOT agree to resume
+ else:
+
+ if sharedKeyParams:
+ for result in self._sendError(\
+ AlertDescription.user_canceled,
+ "Was expecting a shared-key resumption"):
+ yield result
+
+ #We've already validated these
+ cipherSuite = serverHello.cipher_suite
+ certificateType = serverHello.certificate_type
+
+ #If the server chose an SRP suite...
+ if cipherSuite in CipherSuite.srpSuites:
+ #Get ServerKeyExchange, ServerHelloDone
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.server_key_exchange, cipherSuite):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ serverKeyExchange = result
+
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.server_hello_done):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ serverHelloDone = result
+
+ #If the server chose an SRP+RSA suite...
+ elif cipherSuite in CipherSuite.srpRsaSuites:
+ #Get Certificate, ServerKeyExchange, ServerHelloDone
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.certificate, certificateType):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ serverCertificate = result
+
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.server_key_exchange, cipherSuite):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ serverKeyExchange = result
+
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.server_hello_done):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ serverHelloDone = result
+
+ #If the server chose an RSA suite...
+ elif cipherSuite in CipherSuite.rsaSuites:
+ #Get Certificate[, CertificateRequest], ServerHelloDone
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.certificate, certificateType):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ serverCertificate = result
+
+ for result in self._getMsg(ContentType.handshake,
+ (HandshakeType.server_hello_done,
+ HandshakeType.certificate_request)):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ msg = result
+
+ certificateRequest = None
+ if isinstance(msg, CertificateRequest):
+ certificateRequest = msg
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.server_hello_done):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ serverHelloDone = result
+ elif isinstance(msg, ServerHelloDone):
+ serverHelloDone = msg
+ else:
+ raise AssertionError()
+
+
+ #Calculate SRP premaster secret, if server chose an SRP or
+ #SRP+RSA suite
+ if cipherSuite in CipherSuite.srpSuites + \
+ CipherSuite.srpRsaSuites:
+ #Get and check the server's group parameters and B value
+ N = serverKeyExchange.srp_N
+ g = serverKeyExchange.srp_g
+ s = serverKeyExchange.srp_s
+ B = serverKeyExchange.srp_B
+
+ if (g,N) not in goodGroupParameters:
+ for result in self._sendError(\
+ AlertDescription.untrusted_srp_parameters,
+ "Unknown group parameters"):
+ yield result
+ if numBits(N) < settings.minKeySize:
+ for result in self._sendError(\
+ AlertDescription.untrusted_srp_parameters,
+ "N value is too small: %d" % numBits(N)):
+ yield result
+ if numBits(N) > settings.maxKeySize:
+ for result in self._sendError(\
+ AlertDescription.untrusted_srp_parameters,
+ "N value is too large: %d" % numBits(N)):
+ yield result
+ if B % N == 0:
+ for result in self._sendError(\
+ AlertDescription.illegal_parameter,
+ "Suspicious B value"):
+ yield result
+
+ #Check the server's signature, if server chose an
+ #SRP+RSA suite
+ if cipherSuite in CipherSuite.srpRsaSuites:
+ #Hash ServerKeyExchange/ServerSRPParams
+ hashBytes = serverKeyExchange.hash(clientRandom,
+ serverRandom)
+
+ #Extract signature bytes from ServerKeyExchange
+ sigBytes = serverKeyExchange.signature
+ if len(sigBytes) == 0:
+ for result in self._sendError(\
+ AlertDescription.illegal_parameter,
+ "Server sent an SRP ServerKeyExchange "\
+ "message without a signature"):
+ yield result
+
+ #Get server's public key from the Certificate message
+ for result in self._getKeyFromChain(serverCertificate,
+ settings):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ publicKey, serverCertChain = result
+
+ #Verify signature
+ if not publicKey.verify(sigBytes, hashBytes):
+ for result in self._sendError(\
+ AlertDescription.decrypt_error,
+ "Signature failed to verify"):
+ yield result
+
+
+ #Calculate client's ephemeral DH values (a, A)
+ a = bytesToNumber(getRandomBytes(32))
+ A = powMod(g, a, N)
+
+ #Calculate client's static DH values (x, v)
+ x = makeX(bytesToString(s), srpUsername, password)
+ v = powMod(g, x, N)
+
+ #Calculate u
+ u = makeU(N, A, B)
+
+ #Calculate premaster secret
+ k = makeK(N, g)
+ S = powMod((B - (k*v)) % N, a+(u*x), N)
+
+ if self.fault == Fault.badA:
+ A = N
+ S = 0
+ premasterSecret = numberToBytes(S)
+
+ #Send ClientKeyExchange
+ for result in self._sendMsg(\
+ ClientKeyExchange(cipherSuite).createSRP(A)):
+ yield result
+
+
+ #Calculate RSA premaster secret, if server chose an RSA suite
+ elif cipherSuite in CipherSuite.rsaSuites:
+
+ #Handle the presence of a CertificateRequest
+ if certificateRequest:
+ if unknownParams and certCallback:
+ certParamsNew = certCallback()
+ if certParamsNew:
+ clientCertChain, privateKey = certParamsNew
+
+ #Get server's public key from the Certificate message
+ for result in self._getKeyFromChain(serverCertificate,
+ settings):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ publicKey, serverCertChain = result
+
+
+ #Calculate premaster secret
+ premasterSecret = getRandomBytes(48)
+ premasterSecret[0] = settings.maxVersion[0]
+ premasterSecret[1] = settings.maxVersion[1]
+
+ if self.fault == Fault.badPremasterPadding:
+ premasterSecret[0] = 5
+ if self.fault == Fault.shortPremasterSecret:
+ premasterSecret = premasterSecret[:-1]
+
+ #Encrypt premaster secret to server's public key
+ encryptedPreMasterSecret = publicKey.encrypt(premasterSecret)
+
+ #If client authentication was requested, send Certificate
+ #message, either with certificates or empty
+ if certificateRequest:
+ clientCertificate = Certificate(certificateType)
+
+ if clientCertChain:
+ #Check to make sure we have the same type of
+ #certificates the server requested
+ wrongType = False
+ if certificateType == CertificateType.x509:
+ if not isinstance(clientCertChain, X509CertChain):
+ wrongType = True
+ elif certificateType == CertificateType.cryptoID:
+ if not isinstance(clientCertChain,
+ cryptoIDlib.CertChain.CertChain):
+ wrongType = True
+ if wrongType:
+ for result in self._sendError(\
+ AlertDescription.handshake_failure,
+ "Client certificate is of wrong type"):
+ yield result
+
+ clientCertificate.create(clientCertChain)
+
+ for result in self._sendMsg(clientCertificate):
+ yield result
+ else:
+ #The server didn't request client auth, so we
+ #zeroize these so the clientCertChain won't be
+ #stored in the session.
+ privateKey = None
+ clientCertChain = None
+
+ #Send ClientKeyExchange
+ clientKeyExchange = ClientKeyExchange(cipherSuite,
+ self.version)
+ clientKeyExchange.createRSA(encryptedPreMasterSecret)
+ for result in self._sendMsg(clientKeyExchange):
+ yield result
+
+ #If client authentication was requested and we have a
+ #private key, send CertificateVerify
+ if certificateRequest and privateKey:
+ if self.version == (3,0):
+ #Create a temporary session object, just for the
+ #purpose of creating the CertificateVerify
+ session = Session()
+ session._calcMasterSecret(self.version,
+ premasterSecret,
+ clientRandom,
+ serverRandom)
+ verifyBytes = self._calcSSLHandshakeHash(\
+ session.masterSecret, "")
+ elif self.version in ((3,1), (3,2)):
+ verifyBytes = stringToBytes(\
+ self._handshake_md5.digest() + \
+ self._handshake_sha.digest())
+ if self.fault == Fault.badVerifyMessage:
+ verifyBytes[0] = ((verifyBytes[0]+1) % 256)
+ signedBytes = privateKey.sign(verifyBytes)
+ certificateVerify = CertificateVerify()
+ certificateVerify.create(signedBytes)
+ for result in self._sendMsg(certificateVerify):
+ yield result
+
+
+ #Create the session object
+ self.session = Session()
+ self.session._calcMasterSecret(self.version, premasterSecret,
+ clientRandom, serverRandom)
+ self.session.sessionID = serverHello.session_id
+ self.session.cipherSuite = cipherSuite
+ self.session.srpUsername = srpUsername
+ self.session.clientCertChain = clientCertChain
+ self.session.serverCertChain = serverCertChain
+
+ #Calculate pending connection states
+ self._calcPendingStates(clientRandom, serverRandom,
+ settings.cipherImplementations)
+
+ #Exchange ChangeCipherSpec and Finished messages
+ for result in self._sendFinished():
+ yield result
+ for result in self._getFinished():
+ yield result
+
+ #Mark the connection as open
+ self.session._setResumable(True)
+ self._handshakeDone(resumed=False)
+
+
+
+ def handshakeServer(self, sharedKeyDB=None, verifierDB=None,
+ certChain=None, privateKey=None, reqCert=False,
+ sessionCache=None, settings=None, checker=None):
+ """Perform a handshake in the role of server.
+
+ This function performs an SSL or TLS handshake. Depending on
+ the arguments and the behavior of the client, this function can
+ perform a shared-key, SRP, or certificate-based handshake. It
+ can also perform a combined SRP and server-certificate
+ handshake.
+
+ Like any handshake function, this can be called on a closed
+ TLS connection, or on a TLS connection that is already open.
+ If called on an open connection it performs a re-handshake.
+ This function does not send a Hello Request message before
+ performing the handshake, so if re-handshaking is required,
+ the server must signal the client to begin the re-handshake
+ through some other means.
+
+ If the function completes without raising an exception, the
+ TLS connection will be open and available for data transfer.
+
+ If an exception is raised, the connection will have been
+ automatically closed (if it was ever open).
+
+ @type sharedKeyDB: L{tlslite.SharedKeyDB.SharedKeyDB}
+ @param sharedKeyDB: A database of shared symmetric keys
+ associated with usernames. If the client performs a
+ shared-key handshake, the session's sharedKeyUsername
+ attribute will be set.
+
+ @type verifierDB: L{tlslite.VerifierDB.VerifierDB}
+ @param verifierDB: A database of SRP password verifiers
+ associated with usernames. If the client performs an SRP
+ handshake, the session's srpUsername attribute will be set.
+
+ @type certChain: L{tlslite.X509CertChain.X509CertChain} or
+ L{cryptoIDlib.CertChain.CertChain}
+ @param certChain: The certificate chain to be used if the
+ client requests server certificate authentication.
+
+ @type privateKey: L{tlslite.utils.RSAKey.RSAKey}
+ @param privateKey: The private key to be used if the client
+ requests server certificate authentication.
+
+ @type reqCert: bool
+ @param reqCert: Whether to request client certificate
+ authentication. This only applies if the client chooses server
+ certificate authentication; if the client chooses SRP or
+ shared-key authentication, this will be ignored. If the client
+ performs a client certificate authentication, the sessions's
+ clientCertChain attribute will be set.
+
+ @type sessionCache: L{tlslite.SessionCache.SessionCache}
+ @param sessionCache: An in-memory cache of resumable sessions.
+ The client can resume sessions from this cache. Alternatively,
+ if the client performs a full handshake, a new session will be
+ added to the cache.
+
+ @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
+ @param settings: Various settings which can be used to control
+ the ciphersuites and SSL/TLS version chosen by the server.
+
+ @type checker: L{tlslite.Checker.Checker}
+ @param checker: A Checker instance. This instance will be
+ invoked to examine the other party's authentication
+ credentials, if the handshake completes succesfully.
+
+ @raise socket.error: If a socket error occurs.
+ @raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
+ without a preceding alert.
+ @raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
+ @raise tlslite.errors.TLSAuthenticationError: If the checker
+ doesn't like the other party's authentication credentials.
+ """
+ for result in self.handshakeServerAsync(sharedKeyDB, verifierDB,
+ certChain, privateKey, reqCert, sessionCache, settings,
+ checker):
+ pass
+
+
+ def handshakeServerAsync(self, sharedKeyDB=None, verifierDB=None,
+ certChain=None, privateKey=None, reqCert=False,
+ sessionCache=None, settings=None, checker=None):
+ """Start a server handshake operation on the TLS connection.
+
+ This function returns a generator which behaves similarly to
+ handshakeServer(). Successive invocations of the generator
+ will return 0 if it is waiting to read from the socket, 1 if it is
+ waiting to write to the socket, or it will raise StopIteration
+ if the handshake operation is complete.
+
+ @rtype: iterable
+ @return: A generator; see above for details.
+ """
+ handshaker = self._handshakeServerAsyncHelper(\
+ sharedKeyDB=sharedKeyDB,
+ verifierDB=verifierDB, certChain=certChain,
+ privateKey=privateKey, reqCert=reqCert,
+ sessionCache=sessionCache, settings=settings)
+ for result in self._handshakeWrapperAsync(handshaker, checker):
+ yield result
+
+
+ def _handshakeServerAsyncHelper(self, sharedKeyDB, verifierDB,
+ certChain, privateKey, reqCert, sessionCache,
+ settings):
+
+ self._handshakeStart(client=False)
+
+ if (not sharedKeyDB) and (not verifierDB) and (not certChain):
+ raise ValueError("Caller passed no authentication credentials")
+ if certChain and not privateKey:
+ raise ValueError("Caller passed a certChain but no privateKey")
+ if privateKey and not certChain:
+ raise ValueError("Caller passed a privateKey but no certChain")
+
+ if not settings:
+ settings = HandshakeSettings()
+ settings = settings._filter()
+
+ #Initialize acceptable cipher suites
+ cipherSuites = []
+ if verifierDB:
+ if certChain:
+ cipherSuites += \
+ CipherSuite.getSrpRsaSuites(settings.cipherNames)
+ cipherSuites += CipherSuite.getSrpSuites(settings.cipherNames)
+ if sharedKeyDB or certChain:
+ cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
+
+ #Initialize acceptable certificate type
+ certificateType = None
+ if certChain:
+ try:
+ import cryptoIDlib.CertChain
+ if isinstance(certChain, cryptoIDlib.CertChain.CertChain):
+ certificateType = CertificateType.cryptoID
+ except ImportError:
+ pass
+ if isinstance(certChain, X509CertChain):
+ certificateType = CertificateType.x509
+ if certificateType == None:
+ raise ValueError("Unrecognized certificate type")
+
+ #Initialize locals
+ clientCertChain = None
+ serverCertChain = None #We may set certChain to this later
+ postFinishedError = None
+
+ #Tentatively set version to most-desirable version, so if an error
+ #occurs parsing the ClientHello, this is what we'll use for the
+ #error alert
+ self.version = settings.maxVersion
+
+ #Get ClientHello
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.client_hello):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ clientHello = result
+
+ #If client's version is too low, reject it
+ if clientHello.client_version < settings.minVersion:
+ self.version = settings.minVersion
+ for result in self._sendError(\
+ AlertDescription.protocol_version,
+ "Too old version: %s" % str(clientHello.client_version)):
+ yield result
+
+ #If client's version is too high, propose my highest version
+ elif clientHello.client_version > settings.maxVersion:
+ self.version = settings.maxVersion
+
+ else:
+ #Set the version to the client's version
+ self.version = clientHello.client_version
+
+ #Get the client nonce; create server nonce
+ clientRandom = clientHello.random
+ serverRandom = getRandomBytes(32)
+
+ #Calculate the first cipher suite intersection.
+ #This is the 'privileged' ciphersuite. We'll use it if we're
+ #doing a shared-key resumption or a new negotiation. In fact,
+ #the only time we won't use it is if we're resuming a non-sharedkey
+ #session, in which case we use the ciphersuite from the session.
+ #
+ #Given the current ciphersuite ordering, this means we prefer SRP
+ #over non-SRP.
+ for cipherSuite in cipherSuites:
+ if cipherSuite in clientHello.cipher_suites:
+ break
+ else:
+ for result in self._sendError(\
+ AlertDescription.handshake_failure):
+ yield result
+
+ #If resumption was requested...
+ if clientHello.session_id and (sharedKeyDB or sessionCache):
+ session = None
+
+ #Check in the sharedKeys container
+ if sharedKeyDB and len(clientHello.session_id)==16:
+ try:
+ #Trim off zero padding, if any
+ for x in range(16):
+ if clientHello.session_id[x]==0:
+ break
+ self.allegedSharedKeyUsername = bytesToString(\
+ clientHello.session_id[:x])
+ session = sharedKeyDB[self.allegedSharedKeyUsername]
+ if not session.sharedKey:
+ raise AssertionError()
+ #use privileged ciphersuite
+ session.cipherSuite = cipherSuite
+ except KeyError:
+ pass
+
+ #Then check in the session cache
+ if sessionCache and not session:
+ try:
+ session = sessionCache[bytesToString(\
+ clientHello.session_id)]
+ if session.sharedKey:
+ raise AssertionError()
+ if not session.resumable:
+ raise AssertionError()
+ #Check for consistency with ClientHello
+ if session.cipherSuite not in cipherSuites:
+ for result in self._sendError(\
+ AlertDescription.handshake_failure):
+ yield result
+ if session.cipherSuite not in clientHello.cipher_suites:
+ for result in self._sendError(\
+ AlertDescription.handshake_failure):
+ yield result
+ if clientHello.srp_username:
+ if clientHello.srp_username != session.srpUsername:
+ for result in self._sendError(\
+ AlertDescription.handshake_failure):
+ yield result
+ except KeyError:
+ pass
+
+ #If a session is found..
+ if session:
+ #Set the session
+ self.session = session
+
+ #Send ServerHello
+ serverHello = ServerHello()
+ serverHello.create(self.version, serverRandom,
+ session.sessionID, session.cipherSuite,
+ certificateType)
+ for result in self._sendMsg(serverHello):
+ yield result
+
+ #From here on, the client's messages must have the right version
+ self._versionCheck = True
+
+ #Calculate pending connection states
+ self._calcPendingStates(clientRandom, serverRandom,
+ settings.cipherImplementations)
+
+ #Exchange ChangeCipherSpec and Finished messages
+ for result in self._sendFinished():
+ yield result
+ for result in self._getFinished():
+ yield result
+
+ #Mark the connection as open
+ self._handshakeDone(resumed=True)
+ return
+
+
+ #If not a resumption...
+
+ #TRICKY: we might have chosen an RSA suite that was only deemed
+ #acceptable because of the shared-key resumption. If the shared-
+ #key resumption failed, because the identifier wasn't recognized,
+ #we might fall through to here, where we have an RSA suite
+ #chosen, but no certificate.
+ if cipherSuite in CipherSuite.rsaSuites and not certChain:
+ for result in self._sendError(\
+ AlertDescription.handshake_failure):
+ yield result
+
+ #If an RSA suite is chosen, check for certificate type intersection
+ #(We do this check down here because if the mismatch occurs but the
+ # client is using a shared-key session, it's okay)
+ if cipherSuite in CipherSuite.rsaSuites + \
+ CipherSuite.srpRsaSuites:
+ if certificateType not in clientHello.certificate_types:
+ for result in self._sendError(\
+ AlertDescription.handshake_failure,
+ "the client doesn't support my certificate type"):
+ yield result
+
+ #Move certChain -> serverCertChain, now that we're using it
+ serverCertChain = certChain
+
+
+ #Create sessionID
+ if sessionCache:
+ sessionID = getRandomBytes(32)
+ else:
+ sessionID = createByteArraySequence([])
+
+ #If we've selected an SRP suite, exchange keys and calculate
+ #premaster secret:
+ if cipherSuite in CipherSuite.srpSuites + CipherSuite.srpRsaSuites:
+
+ #If there's no SRP username...
+ if not clientHello.srp_username:
+
+ #Ask the client to re-send ClientHello with one
+ for result in self._sendMsg(Alert().create(\
+ AlertDescription.missing_srp_username,
+ AlertLevel.warning)):
+ yield result
+
+ #Get ClientHello
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.client_hello):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ clientHello = result
+
+ #Check ClientHello
+ #If client's version is too low, reject it (COPIED CODE; BAD!)
+ if clientHello.client_version < settings.minVersion:
+ self.version = settings.minVersion
+ for result in self._sendError(\
+ AlertDescription.protocol_version,
+ "Too old version: %s" % str(clientHello.client_version)):
+ yield result
+
+ #If client's version is too high, propose my highest version
+ elif clientHello.client_version > settings.maxVersion:
+ self.version = settings.maxVersion
+
+ else:
+ #Set the version to the client's version
+ self.version = clientHello.client_version
+
+ #Recalculate the privileged cipher suite, making sure to
+ #pick an SRP suite
+ cipherSuites = [c for c in cipherSuites if c in \
+ CipherSuite.srpSuites + \
+ CipherSuite.srpRsaSuites]
+ for cipherSuite in cipherSuites:
+ if cipherSuite in clientHello.cipher_suites:
+ break
+ else:
+ for result in self._sendError(\
+ AlertDescription.handshake_failure):
+ yield result
+
+ #Get the client nonce; create server nonce
+ clientRandom = clientHello.random
+ serverRandom = getRandomBytes(32)
+
+ #The username better be there, this time
+ if not clientHello.srp_username:
+ for result in self._sendError(\
+ AlertDescription.illegal_parameter,
+ "Client resent a hello, but without the SRP"\
+ " username"):
+ yield result
+
+
+ #Get username
+ self.allegedSrpUsername = clientHello.srp_username
+
+ #Get parameters from username
+ try:
+ entry = verifierDB[self.allegedSrpUsername]
+ except KeyError:
+ for result in self._sendError(\
+ AlertDescription.unknown_srp_username):
+ yield result
+ (N, g, s, v) = entry
+
+ #Calculate server's ephemeral DH values (b, B)
+ b = bytesToNumber(getRandomBytes(32))
+ k = makeK(N, g)
+ B = (powMod(g, b, N) + (k*v)) % N
+
+ #Create ServerKeyExchange, signing it if necessary
+ serverKeyExchange = ServerKeyExchange(cipherSuite)
+ serverKeyExchange.createSRP(N, g, stringToBytes(s), B)
+ if cipherSuite in CipherSuite.srpRsaSuites:
+ hashBytes = serverKeyExchange.hash(clientRandom,
+ serverRandom)
+ serverKeyExchange.signature = privateKey.sign(hashBytes)
+
+ #Send ServerHello[, Certificate], ServerKeyExchange,
+ #ServerHelloDone
+ msgs = []
+ serverHello = ServerHello()
+ serverHello.create(self.version, serverRandom, sessionID,
+ cipherSuite, certificateType)
+ msgs.append(serverHello)
+ if cipherSuite in CipherSuite.srpRsaSuites:
+ certificateMsg = Certificate(certificateType)
+ certificateMsg.create(serverCertChain)
+ msgs.append(certificateMsg)
+ msgs.append(serverKeyExchange)
+ msgs.append(ServerHelloDone())
+ for result in self._sendMsgs(msgs):
+ yield result
+
+ #From here on, the client's messages must have the right version
+ self._versionCheck = True
+
+ #Get and check ClientKeyExchange
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.client_key_exchange,
+ cipherSuite):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ clientKeyExchange = result
+ A = clientKeyExchange.srp_A
+ if A % N == 0:
+ postFinishedError = (AlertDescription.illegal_parameter,
+ "Suspicious A value")
+ #Calculate u
+ u = makeU(N, A, B)
+
+ #Calculate premaster secret
+ S = powMod((A * powMod(v,u,N)) % N, b, N)
+ premasterSecret = numberToBytes(S)
+
+
+ #If we've selected an RSA suite, exchange keys and calculate
+ #premaster secret:
+ elif cipherSuite in CipherSuite.rsaSuites:
+
+ #Send ServerHello, Certificate[, CertificateRequest],
+ #ServerHelloDone
+ msgs = []
+ msgs.append(ServerHello().create(self.version, serverRandom,
+ sessionID, cipherSuite, certificateType))
+ msgs.append(Certificate(certificateType).create(serverCertChain))
+ if reqCert:
+ msgs.append(CertificateRequest())
+ msgs.append(ServerHelloDone())
+ for result in self._sendMsgs(msgs):
+ yield result
+
+ #From here on, the client's messages must have the right version
+ self._versionCheck = True
+
+ #Get [Certificate,] (if was requested)
+ if reqCert:
+ if self.version == (3,0):
+ for result in self._getMsg((ContentType.handshake,
+ ContentType.alert),
+ HandshakeType.certificate,
+ certificateType):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ msg = result
+
+ if isinstance(msg, Alert):
+ #If it's not a no_certificate alert, re-raise
+ alert = msg
+ if alert.description != \
+ AlertDescription.no_certificate:
+ self._shutdown(False)
+ raise TLSRemoteAlert(alert)
+ elif isinstance(msg, Certificate):
+ clientCertificate = msg
+ if clientCertificate.certChain and \
+ clientCertificate.certChain.getNumCerts()!=0:
+ clientCertChain = clientCertificate.certChain
+ else:
+ raise AssertionError()
+ elif self.version in ((3,1), (3,2)):
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.certificate,
+ certificateType):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ clientCertificate = result
+ if clientCertificate.certChain and \
+ clientCertificate.certChain.getNumCerts()!=0:
+ clientCertChain = clientCertificate.certChain
+ else:
+ raise AssertionError()
+
+ #Get ClientKeyExchange
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.client_key_exchange,
+ cipherSuite):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ clientKeyExchange = result
+
+ #Decrypt ClientKeyExchange
+ premasterSecret = privateKey.decrypt(\
+ clientKeyExchange.encryptedPreMasterSecret)
+
+ randomPreMasterSecret = getRandomBytes(48)
+ versionCheck = (premasterSecret[0], premasterSecret[1])
+ if not premasterSecret:
+ premasterSecret = randomPreMasterSecret
+ elif len(premasterSecret)!=48:
+ premasterSecret = randomPreMasterSecret
+ elif versionCheck != clientHello.client_version:
+ if versionCheck != self.version: #Tolerate buggy IE clients
+ premasterSecret = randomPreMasterSecret
+
+ #Get and check CertificateVerify, if relevant
+ if clientCertChain:
+ if self.version == (3,0):
+ #Create a temporary session object, just for the purpose
+ #of checking the CertificateVerify
+ session = Session()
+ session._calcMasterSecret(self.version, premasterSecret,
+ clientRandom, serverRandom)
+ verifyBytes = self._calcSSLHandshakeHash(\
+ session.masterSecret, "")
+ elif self.version in ((3,1), (3,2)):
+ verifyBytes = stringToBytes(self._handshake_md5.digest() +\
+ self._handshake_sha.digest())
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.certificate_verify):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ certificateVerify = result
+ publicKey = clientCertChain.getEndEntityPublicKey()
+ if len(publicKey) < settings.minKeySize:
+ postFinishedError = (AlertDescription.handshake_failure,
+ "Client's public key too small: %d" % len(publicKey))
+ if len(publicKey) > settings.maxKeySize:
+ postFinishedError = (AlertDescription.handshake_failure,
+ "Client's public key too large: %d" % len(publicKey))
+
+ if not publicKey.verify(certificateVerify.signature,
+ verifyBytes):
+ postFinishedError = (AlertDescription.decrypt_error,
+ "Signature failed to verify")
+
+
+ #Create the session object
+ self.session = Session()
+ self.session._calcMasterSecret(self.version, premasterSecret,
+ clientRandom, serverRandom)
+ self.session.sessionID = sessionID
+ self.session.cipherSuite = cipherSuite
+ self.session.srpUsername = self.allegedSrpUsername
+ self.session.clientCertChain = clientCertChain
+ self.session.serverCertChain = serverCertChain
+
+ #Calculate pending connection states
+ self._calcPendingStates(clientRandom, serverRandom,
+ settings.cipherImplementations)
+
+ #Exchange ChangeCipherSpec and Finished messages
+ for result in self._getFinished():
+ yield result
+
+ #If we were holding a post-finished error until receiving the client
+ #finished message, send it now. We delay the call until this point
+ #because calling sendError() throws an exception, and our caller might
+ #shut down the socket upon receiving the exception. If he did, and the
+ #client was still sending its ChangeCipherSpec or Finished messages, it
+ #would cause a socket error on the client side. This is a lot of
+ #consideration to show to misbehaving clients, but this would also
+ #cause problems with fault-testing.
+ if postFinishedError:
+ for result in self._sendError(*postFinishedError):
+ yield result
+
+ for result in self._sendFinished():
+ yield result
+
+ #Add the session object to the session cache
+ if sessionCache and sessionID:
+ sessionCache[bytesToString(sessionID)] = self.session
+
+ #Mark the connection as open
+ self.session._setResumable(True)
+ self._handshakeDone(resumed=False)
+
+
+ def _handshakeWrapperAsync(self, handshaker, checker):
+ if not self.fault:
+ try:
+ for result in handshaker:
+ yield result
+ if checker:
+ try:
+ checker(self)
+ except TLSAuthenticationError:
+ alert = Alert().create(AlertDescription.close_notify,
+ AlertLevel.fatal)
+ for result in self._sendMsg(alert):
+ yield result
+ raise
+ except:
+ self._shutdown(False)
+ raise
+ else:
+ try:
+ for result in handshaker:
+ yield result
+ if checker:
+ try:
+ checker(self)
+ except TLSAuthenticationError:
+ alert = Alert().create(AlertDescription.close_notify,
+ AlertLevel.fatal)
+ for result in self._sendMsg(alert):
+ yield result
+ raise
+ except socket.error, e:
+ raise TLSFaultError("socket error!")
+ except TLSAbruptCloseError, e:
+ raise TLSFaultError("abrupt close error!")
+ except TLSAlert, alert:
+ if alert.description not in Fault.faultAlerts[self.fault]:
+ raise TLSFaultError(str(alert))
+ else:
+ pass
+ except:
+ self._shutdown(False)
+ raise
+ else:
+ raise TLSFaultError("No error!")
+
+
+ def _getKeyFromChain(self, certificate, settings):
+ #Get and check cert chain from the Certificate message
+ certChain = certificate.certChain
+ if not certChain or certChain.getNumCerts() == 0:
+ for result in self._sendError(AlertDescription.illegal_parameter,
+ "Other party sent a Certificate message without "\
+ "certificates"):
+ yield result
+
+ #Get and check public key from the cert chain
+ publicKey = certChain.getEndEntityPublicKey()
+ if len(publicKey) < settings.minKeySize:
+ for result in self._sendError(AlertDescription.handshake_failure,
+ "Other party's public key too small: %d" % len(publicKey)):
+ yield result
+ if len(publicKey) > settings.maxKeySize:
+ for result in self._sendError(AlertDescription.handshake_failure,
+ "Other party's public key too large: %d" % len(publicKey)):
+ yield result
+
+ yield publicKey, certChain
diff --git a/python/gdata/tlslite/TLSRecordLayer.py b/python/gdata/tlslite/TLSRecordLayer.py
new file mode 100644
index 0000000..875ce80
--- /dev/null
+++ b/python/gdata/tlslite/TLSRecordLayer.py
@@ -0,0 +1,1123 @@
+"""Helper class for TLSConnection."""
+from __future__ import generators
+
+from utils.compat import *
+from utils.cryptomath import *
+from utils.cipherfactory import createAES, createRC4, createTripleDES
+from utils.codec import *
+from errors import *
+from messages import *
+from mathtls import *
+from constants import *
+from utils.cryptomath import getRandomBytes
+from utils import hmac
+from FileObject import FileObject
+import sha
+import md5
+import socket
+import errno
+import traceback
+
+class _ConnectionState:
+ def __init__(self):
+ self.macContext = None
+ self.encContext = None
+ self.seqnum = 0
+
+ def getSeqNumStr(self):
+ w = Writer(8)
+ w.add(self.seqnum, 8)
+ seqnumStr = bytesToString(w.bytes)
+ self.seqnum += 1
+ return seqnumStr
+
+
+class TLSRecordLayer:
+ """
+ This class handles data transmission for a TLS connection.
+
+ Its only subclass is L{tlslite.TLSConnection.TLSConnection}. We've
+ separated the code in this class from TLSConnection to make things
+ more readable.
+
+
+ @type sock: socket.socket
+ @ivar sock: The underlying socket object.
+
+ @type session: L{tlslite.Session.Session}
+ @ivar session: The session corresponding to this connection.
+
+ Due to TLS session resumption, multiple connections can correspond
+ to the same underlying session.
+
+ @type version: tuple
+ @ivar version: The TLS version being used for this connection.
+
+ (3,0) means SSL 3.0, and (3,1) means TLS 1.0.
+
+ @type closed: bool
+ @ivar closed: If this connection is closed.
+
+ @type resumed: bool
+ @ivar resumed: If this connection is based on a resumed session.
+
+ @type allegedSharedKeyUsername: str or None
+ @ivar allegedSharedKeyUsername: This is set to the shared-key
+ username asserted by the client, whether the handshake succeeded or
+ not. If the handshake fails, this can be inspected to
+ determine if a guessing attack is in progress against a particular
+ user account.
+
+ @type allegedSrpUsername: str or None
+ @ivar allegedSrpUsername: This is set to the SRP username
+ asserted by the client, whether the handshake succeeded or not.
+ If the handshake fails, this can be inspected to determine
+ if a guessing attack is in progress against a particular user
+ account.
+
+ @type closeSocket: bool
+ @ivar closeSocket: If the socket should be closed when the
+ connection is closed (writable).
+
+ If you set this to True, TLS Lite will assume the responsibility of
+ closing the socket when the TLS Connection is shutdown (either
+ through an error or through the user calling close()). The default
+ is False.
+
+ @type ignoreAbruptClose: bool
+ @ivar ignoreAbruptClose: If an abrupt close of the socket should
+ raise an error (writable).
+
+ If you set this to True, TLS Lite will not raise a
+ L{tlslite.errors.TLSAbruptCloseError} exception if the underlying
+ socket is unexpectedly closed. Such an unexpected closure could be
+ caused by an attacker. However, it also occurs with some incorrect
+ TLS implementations.
+
+ You should set this to True only if you're not worried about an
+ attacker truncating the connection, and only if necessary to avoid
+ spurious errors. The default is False.
+
+ @sort: __init__, read, readAsync, write, writeAsync, close, closeAsync,
+ getCipherImplementation, getCipherName
+ """
+
+ def __init__(self, sock):
+ self.sock = sock
+
+ #My session object (Session instance; read-only)
+ self.session = None
+
+ #Am I a client or server?
+ self._client = None
+
+ #Buffers for processing messages
+ self._handshakeBuffer = []
+ self._readBuffer = ""
+
+ #Handshake digests
+ self._handshake_md5 = md5.md5()
+ self._handshake_sha = sha.sha()
+
+ #TLS Protocol Version
+ self.version = (0,0) #read-only
+ self._versionCheck = False #Once we choose a version, this is True
+
+ #Current and Pending connection states
+ self._writeState = _ConnectionState()
+ self._readState = _ConnectionState()
+ self._pendingWriteState = _ConnectionState()
+ self._pendingReadState = _ConnectionState()
+
+ #Is the connection open?
+ self.closed = True #read-only
+ self._refCount = 0 #Used to trigger closure
+
+ #Is this a resumed (or shared-key) session?
+ self.resumed = False #read-only
+
+ #What username did the client claim in his handshake?
+ self.allegedSharedKeyUsername = None
+ self.allegedSrpUsername = None
+
+ #On a call to close(), do we close the socket? (writeable)
+ self.closeSocket = False
+
+ #If the socket is abruptly closed, do we ignore it
+ #and pretend the connection was shut down properly? (writeable)
+ self.ignoreAbruptClose = False
+
+ #Fault we will induce, for testing purposes
+ self.fault = None
+
+ #*********************************************************
+ # Public Functions START
+ #*********************************************************
+
+ def read(self, max=None, min=1):
+ """Read some data from the TLS connection.
+
+ This function will block until at least 'min' bytes are
+ available (or the connection is closed).
+
+ If an exception is raised, the connection will have been
+ automatically closed.
+
+ @type max: int
+ @param max: The maximum number of bytes to return.
+
+ @type min: int
+ @param min: The minimum number of bytes to return
+
+ @rtype: str
+ @return: A string of no more than 'max' bytes, and no fewer
+ than 'min' (unless the connection has been closed, in which
+ case fewer than 'min' bytes may be returned).
+
+ @raise socket.error: If a socket error occurs.
+ @raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
+ without a preceding alert.
+ @raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
+ """
+ for result in self.readAsync(max, min):
+ pass
+ return result
+
+ def readAsync(self, max=None, min=1):
+ """Start a read operation on the TLS connection.
+
+ This function returns a generator which behaves similarly to
+ read(). Successive invocations of the generator will return 0
+ if it is waiting to read from the socket, 1 if it is waiting
+ to write to the socket, or a string if the read operation has
+ completed.
+
+ @rtype: iterable
+ @return: A generator; see above for details.
+ """
+ try:
+ while len(self._readBuffer)= len(s):
+ break
+ if endIndex > len(s):
+ endIndex = len(s)
+ block = stringToBytes(s[startIndex : endIndex])
+ applicationData = ApplicationData().create(block)
+ for result in self._sendMsg(applicationData, skipEmptyFrag):
+ yield result
+ skipEmptyFrag = True #only send an empy fragment on 1st message
+ index += 1
+ except:
+ self._shutdown(False)
+ raise
+
+ def close(self):
+ """Close the TLS connection.
+
+ This function will block until it has exchanged close_notify
+ alerts with the other party. After doing so, it will shut down the
+ TLS connection. Further attempts to read through this connection
+ will return "". Further attempts to write through this connection
+ will raise ValueError.
+
+ If makefile() has been called on this connection, the connection
+ will be not be closed until the connection object and all file
+ objects have been closed.
+
+ Even if an exception is raised, the connection will have been
+ closed.
+
+ @raise socket.error: If a socket error occurs.
+ @raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
+ without a preceding alert.
+ @raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
+ """
+ if not self.closed:
+ for result in self._decrefAsync():
+ pass
+
+ def closeAsync(self):
+ """Start a close operation on the TLS connection.
+
+ This function returns a generator which behaves similarly to
+ close(). Successive invocations of the generator will return 0
+ if it is waiting to read from the socket, 1 if it is waiting
+ to write to the socket, or will raise StopIteration if the
+ close operation has completed.
+
+ @rtype: iterable
+ @return: A generator; see above for details.
+ """
+ if not self.closed:
+ for result in self._decrefAsync():
+ yield result
+
+ def _decrefAsync(self):
+ self._refCount -= 1
+ if self._refCount == 0 and not self.closed:
+ try:
+ for result in self._sendMsg(Alert().create(\
+ AlertDescription.close_notify, AlertLevel.warning)):
+ yield result
+ alert = None
+ while not alert:
+ for result in self._getMsg((ContentType.alert, \
+ ContentType.application_data)):
+ if result in (0,1):
+ yield result
+ if result.contentType == ContentType.alert:
+ alert = result
+ if alert.description == AlertDescription.close_notify:
+ self._shutdown(True)
+ else:
+ raise TLSRemoteAlert(alert)
+ except (socket.error, TLSAbruptCloseError):
+ #If the other side closes the socket, that's okay
+ self._shutdown(True)
+ except:
+ self._shutdown(False)
+ raise
+
+ def getCipherName(self):
+ """Get the name of the cipher used with this connection.
+
+ @rtype: str
+ @return: The name of the cipher used with this connection.
+ Either 'aes128', 'aes256', 'rc4', or '3des'.
+ """
+ if not self._writeState.encContext:
+ return None
+ return self._writeState.encContext.name
+
+ def getCipherImplementation(self):
+ """Get the name of the cipher implementation used with
+ this connection.
+
+ @rtype: str
+ @return: The name of the cipher implementation used with
+ this connection. Either 'python', 'cryptlib', 'openssl',
+ or 'pycrypto'.
+ """
+ if not self._writeState.encContext:
+ return None
+ return self._writeState.encContext.implementation
+
+
+
+ #Emulate a socket, somewhat -
+ def send(self, s):
+ """Send data to the TLS connection (socket emulation).
+
+ @raise socket.error: If a socket error occurs.
+ """
+ self.write(s)
+ return len(s)
+
+ def sendall(self, s):
+ """Send data to the TLS connection (socket emulation).
+
+ @raise socket.error: If a socket error occurs.
+ """
+ self.write(s)
+
+ def recv(self, bufsize):
+ """Get some data from the TLS connection (socket emulation).
+
+ @raise socket.error: If a socket error occurs.
+ @raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
+ without a preceding alert.
+ @raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
+ """
+ return self.read(bufsize)
+
+ def makefile(self, mode='r', bufsize=-1):
+ """Create a file object for the TLS connection (socket emulation).
+
+ @rtype: L{tlslite.FileObject.FileObject}
+ """
+ self._refCount += 1
+ return FileObject(self, mode, bufsize)
+
+ def getsockname(self):
+ """Return the socket's own address (socket emulation)."""
+ return self.sock.getsockname()
+
+ def getpeername(self):
+ """Return the remote address to which the socket is connected
+ (socket emulation)."""
+ return self.sock.getpeername()
+
+ def settimeout(self, value):
+ """Set a timeout on blocking socket operations (socket emulation)."""
+ return self.sock.settimeout(value)
+
+ def gettimeout(self):
+ """Return the timeout associated with socket operations (socket
+ emulation)."""
+ return self.sock.gettimeout()
+
+ def setsockopt(self, level, optname, value):
+ """Set the value of the given socket option (socket emulation)."""
+ return self.sock.setsockopt(level, optname, value)
+
+
+ #*********************************************************
+ # Public Functions END
+ #*********************************************************
+
+ def _shutdown(self, resumable):
+ self._writeState = _ConnectionState()
+ self._readState = _ConnectionState()
+ #Don't do this: self._readBuffer = ""
+ self.version = (0,0)
+ self._versionCheck = False
+ self.closed = True
+ if self.closeSocket:
+ self.sock.close()
+
+ #Even if resumable is False, we'll never toggle this on
+ if not resumable and self.session:
+ self.session.resumable = False
+
+
+ def _sendError(self, alertDescription, errorStr=None):
+ alert = Alert().create(alertDescription, AlertLevel.fatal)
+ for result in self._sendMsg(alert):
+ yield result
+ self._shutdown(False)
+ raise TLSLocalAlert(alert, errorStr)
+
+ def _sendMsgs(self, msgs):
+ skipEmptyFrag = False
+ for msg in msgs:
+ for result in self._sendMsg(msg, skipEmptyFrag):
+ yield result
+ skipEmptyFrag = True
+
+ def _sendMsg(self, msg, skipEmptyFrag=False):
+ bytes = msg.write()
+ contentType = msg.contentType
+
+ #Whenever we're connected and asked to send a message,
+ #we first send an empty Application Data message. This prevents
+ #an attacker from launching a chosen-plaintext attack based on
+ #knowing the next IV.
+ if not self.closed and not skipEmptyFrag and self.version == (3,1):
+ if self._writeState.encContext:
+ if self._writeState.encContext.isBlockCipher:
+ for result in self._sendMsg(ApplicationData(),
+ skipEmptyFrag=True):
+ yield result
+
+ #Update handshake hashes
+ if contentType == ContentType.handshake:
+ bytesStr = bytesToString(bytes)
+ self._handshake_md5.update(bytesStr)
+ self._handshake_sha.update(bytesStr)
+
+ #Calculate MAC
+ if self._writeState.macContext:
+ seqnumStr = self._writeState.getSeqNumStr()
+ bytesStr = bytesToString(bytes)
+ mac = self._writeState.macContext.copy()
+ mac.update(seqnumStr)
+ mac.update(chr(contentType))
+ if self.version == (3,0):
+ mac.update( chr( int(len(bytes)/256) ) )
+ mac.update( chr( int(len(bytes)%256) ) )
+ elif self.version in ((3,1), (3,2)):
+ mac.update(chr(self.version[0]))
+ mac.update(chr(self.version[1]))
+ mac.update( chr( int(len(bytes)/256) ) )
+ mac.update( chr( int(len(bytes)%256) ) )
+ else:
+ raise AssertionError()
+ mac.update(bytesStr)
+ macString = mac.digest()
+ macBytes = stringToBytes(macString)
+ if self.fault == Fault.badMAC:
+ macBytes[0] = (macBytes[0]+1) % 256
+
+ #Encrypt for Block or Stream Cipher
+ if self._writeState.encContext:
+ #Add padding and encrypt (for Block Cipher):
+ if self._writeState.encContext.isBlockCipher:
+
+ #Add TLS 1.1 fixed block
+ if self.version == (3,2):
+ bytes = self.fixedIVBlock + bytes
+
+ #Add padding: bytes = bytes + (macBytes + paddingBytes)
+ currentLength = len(bytes) + len(macBytes) + 1
+ blockLength = self._writeState.encContext.block_size
+ paddingLength = blockLength-(currentLength % blockLength)
+
+ paddingBytes = createByteArraySequence([paddingLength] * \
+ (paddingLength+1))
+ if self.fault == Fault.badPadding:
+ paddingBytes[0] = (paddingBytes[0]+1) % 256
+ endBytes = concatArrays(macBytes, paddingBytes)
+ bytes = concatArrays(bytes, endBytes)
+ #Encrypt
+ plaintext = stringToBytes(bytes)
+ ciphertext = self._writeState.encContext.encrypt(plaintext)
+ bytes = stringToBytes(ciphertext)
+
+ #Encrypt (for Stream Cipher)
+ else:
+ bytes = concatArrays(bytes, macBytes)
+ plaintext = bytesToString(bytes)
+ ciphertext = self._writeState.encContext.encrypt(plaintext)
+ bytes = stringToBytes(ciphertext)
+
+ #Add record header and send
+ r = RecordHeader3().create(self.version, contentType, len(bytes))
+ s = bytesToString(concatArrays(r.write(), bytes))
+ while 1:
+ try:
+ bytesSent = self.sock.send(s) #Might raise socket.error
+ except socket.error, why:
+ if why[0] == errno.EWOULDBLOCK:
+ yield 1
+ continue
+ else:
+ raise
+ if bytesSent == len(s):
+ return
+ s = s[bytesSent:]
+ yield 1
+
+
+ def _getMsg(self, expectedType, secondaryType=None, constructorType=None):
+ try:
+ if not isinstance(expectedType, tuple):
+ expectedType = (expectedType,)
+
+ #Spin in a loop, until we've got a non-empty record of a type we
+ #expect. The loop will be repeated if:
+ # - we receive a renegotiation attempt; we send no_renegotiation,
+ # then try again
+ # - we receive an empty application-data fragment; we try again
+ while 1:
+ for result in self._getNextRecord():
+ if result in (0,1):
+ yield result
+ recordHeader, p = result
+
+ #If this is an empty application-data fragment, try again
+ if recordHeader.type == ContentType.application_data:
+ if p.index == len(p.bytes):
+ continue
+
+ #If we received an unexpected record type...
+ if recordHeader.type not in expectedType:
+
+ #If we received an alert...
+ if recordHeader.type == ContentType.alert:
+ alert = Alert().parse(p)
+
+ #We either received a fatal error, a warning, or a
+ #close_notify. In any case, we're going to close the
+ #connection. In the latter two cases we respond with
+ #a close_notify, but ignore any socket errors, since
+ #the other side might have already closed the socket.
+ if alert.level == AlertLevel.warning or \
+ alert.description == AlertDescription.close_notify:
+
+ #If the sendMsg() call fails because the socket has
+ #already been closed, we will be forgiving and not
+ #report the error nor invalidate the "resumability"
+ #of the session.
+ try:
+ alertMsg = Alert()
+ alertMsg.create(AlertDescription.close_notify,
+ AlertLevel.warning)
+ for result in self._sendMsg(alertMsg):
+ yield result
+ except socket.error:
+ pass
+
+ if alert.description == \
+ AlertDescription.close_notify:
+ self._shutdown(True)
+ elif alert.level == AlertLevel.warning:
+ self._shutdown(False)
+
+ else: #Fatal alert:
+ self._shutdown(False)
+
+ #Raise the alert as an exception
+ raise TLSRemoteAlert(alert)
+
+ #If we received a renegotiation attempt...
+ if recordHeader.type == ContentType.handshake:
+ subType = p.get(1)
+ reneg = False
+ if self._client:
+ if subType == HandshakeType.hello_request:
+ reneg = True
+ else:
+ if subType == HandshakeType.client_hello:
+ reneg = True
+ #Send no_renegotiation, then try again
+ if reneg:
+ alertMsg = Alert()
+ alertMsg.create(AlertDescription.no_renegotiation,
+ AlertLevel.warning)
+ for result in self._sendMsg(alertMsg):
+ yield result
+ continue
+
+ #Otherwise: this is an unexpected record, but neither an
+ #alert nor renegotiation
+ for result in self._sendError(\
+ AlertDescription.unexpected_message,
+ "received type=%d" % recordHeader.type):
+ yield result
+
+ break
+
+ #Parse based on content_type
+ if recordHeader.type == ContentType.change_cipher_spec:
+ yield ChangeCipherSpec().parse(p)
+ elif recordHeader.type == ContentType.alert:
+ yield Alert().parse(p)
+ elif recordHeader.type == ContentType.application_data:
+ yield ApplicationData().parse(p)
+ elif recordHeader.type == ContentType.handshake:
+ #Convert secondaryType to tuple, if it isn't already
+ if not isinstance(secondaryType, tuple):
+ secondaryType = (secondaryType,)
+
+ #If it's a handshake message, check handshake header
+ if recordHeader.ssl2:
+ subType = p.get(1)
+ if subType != HandshakeType.client_hello:
+ for result in self._sendError(\
+ AlertDescription.unexpected_message,
+ "Can only handle SSLv2 ClientHello messages"):
+ yield result
+ if HandshakeType.client_hello not in secondaryType:
+ for result in self._sendError(\
+ AlertDescription.unexpected_message):
+ yield result
+ subType = HandshakeType.client_hello
+ else:
+ subType = p.get(1)
+ if subType not in secondaryType:
+ for result in self._sendError(\
+ AlertDescription.unexpected_message,
+ "Expecting %s, got %s" % (str(secondaryType), subType)):
+ yield result
+
+ #Update handshake hashes
+ sToHash = bytesToString(p.bytes)
+ self._handshake_md5.update(sToHash)
+ self._handshake_sha.update(sToHash)
+
+ #Parse based on handshake type
+ if subType == HandshakeType.client_hello:
+ yield ClientHello(recordHeader.ssl2).parse(p)
+ elif subType == HandshakeType.server_hello:
+ yield ServerHello().parse(p)
+ elif subType == HandshakeType.certificate:
+ yield Certificate(constructorType).parse(p)
+ elif subType == HandshakeType.certificate_request:
+ yield CertificateRequest().parse(p)
+ elif subType == HandshakeType.certificate_verify:
+ yield CertificateVerify().parse(p)
+ elif subType == HandshakeType.server_key_exchange:
+ yield ServerKeyExchange(constructorType).parse(p)
+ elif subType == HandshakeType.server_hello_done:
+ yield ServerHelloDone().parse(p)
+ elif subType == HandshakeType.client_key_exchange:
+ yield ClientKeyExchange(constructorType, \
+ self.version).parse(p)
+ elif subType == HandshakeType.finished:
+ yield Finished(self.version).parse(p)
+ else:
+ raise AssertionError()
+
+ #If an exception was raised by a Parser or Message instance:
+ except SyntaxError, e:
+ for result in self._sendError(AlertDescription.decode_error,
+ formatExceptionTrace(e)):
+ yield result
+
+
+ #Returns next record or next handshake message
+ def _getNextRecord(self):
+
+ #If there's a handshake message waiting, return it
+ if self._handshakeBuffer:
+ recordHeader, bytes = self._handshakeBuffer[0]
+ self._handshakeBuffer = self._handshakeBuffer[1:]
+ yield (recordHeader, Parser(bytes))
+ return
+
+ #Otherwise...
+ #Read the next record header
+ bytes = createByteArraySequence([])
+ recordHeaderLength = 1
+ ssl2 = False
+ while 1:
+ try:
+ s = self.sock.recv(recordHeaderLength-len(bytes))
+ except socket.error, why:
+ if why[0] == errno.EWOULDBLOCK:
+ yield 0
+ continue
+ else:
+ raise
+
+ #If the connection was abruptly closed, raise an error
+ if len(s)==0:
+ raise TLSAbruptCloseError()
+
+ bytes += stringToBytes(s)
+ if len(bytes)==1:
+ if bytes[0] in ContentType.all:
+ ssl2 = False
+ recordHeaderLength = 5
+ elif bytes[0] == 128:
+ ssl2 = True
+ recordHeaderLength = 2
+ else:
+ raise SyntaxError()
+ if len(bytes) == recordHeaderLength:
+ break
+
+ #Parse the record header
+ if ssl2:
+ r = RecordHeader2().parse(Parser(bytes))
+ else:
+ r = RecordHeader3().parse(Parser(bytes))
+
+ #Check the record header fields
+ if r.length > 18432:
+ for result in self._sendError(AlertDescription.record_overflow):
+ yield result
+
+ #Read the record contents
+ bytes = createByteArraySequence([])
+ while 1:
+ try:
+ s = self.sock.recv(r.length - len(bytes))
+ except socket.error, why:
+ if why[0] == errno.EWOULDBLOCK:
+ yield 0
+ continue
+ else:
+ raise
+
+ #If the connection is closed, raise a socket error
+ if len(s)==0:
+ raise TLSAbruptCloseError()
+
+ bytes += stringToBytes(s)
+ if len(bytes) == r.length:
+ break
+
+ #Check the record header fields (2)
+ #We do this after reading the contents from the socket, so that
+ #if there's an error, we at least don't leave extra bytes in the
+ #socket..
+ #
+ # THIS CHECK HAS NO SECURITY RELEVANCE (?), BUT COULD HURT INTEROP.
+ # SO WE LEAVE IT OUT FOR NOW.
+ #
+ #if self._versionCheck and r.version != self.version:
+ # for result in self._sendError(AlertDescription.protocol_version,
+ # "Version in header field: %s, should be %s" % (str(r.version),
+ # str(self.version))):
+ # yield result
+
+ #Decrypt the record
+ for result in self._decryptRecord(r.type, bytes):
+ if result in (0,1):
+ yield result
+ else:
+ break
+ bytes = result
+ p = Parser(bytes)
+
+ #If it doesn't contain handshake messages, we can just return it
+ if r.type != ContentType.handshake:
+ yield (r, p)
+ #If it's an SSLv2 ClientHello, we can return it as well
+ elif r.ssl2:
+ yield (r, p)
+ else:
+ #Otherwise, we loop through and add the handshake messages to the
+ #handshake buffer
+ while 1:
+ if p.index == len(bytes): #If we're at the end
+ if not self._handshakeBuffer:
+ for result in self._sendError(\
+ AlertDescription.decode_error, \
+ "Received empty handshake record"):
+ yield result
+ break
+ #There needs to be at least 4 bytes to get a header
+ if p.index+4 > len(bytes):
+ for result in self._sendError(\
+ AlertDescription.decode_error,
+ "A record has a partial handshake message (1)"):
+ yield result
+ p.get(1) # skip handshake type
+ msgLength = p.get(3)
+ if p.index+msgLength > len(bytes):
+ for result in self._sendError(\
+ AlertDescription.decode_error,
+ "A record has a partial handshake message (2)"):
+ yield result
+
+ handshakePair = (r, bytes[p.index-4 : p.index+msgLength])
+ self._handshakeBuffer.append(handshakePair)
+ p.index += msgLength
+
+ #We've moved at least one handshake message into the
+ #handshakeBuffer, return the first one
+ recordHeader, bytes = self._handshakeBuffer[0]
+ self._handshakeBuffer = self._handshakeBuffer[1:]
+ yield (recordHeader, Parser(bytes))
+
+
+ def _decryptRecord(self, recordType, bytes):
+ if self._readState.encContext:
+
+ #Decrypt if it's a block cipher
+ if self._readState.encContext.isBlockCipher:
+ blockLength = self._readState.encContext.block_size
+ if len(bytes) % blockLength != 0:
+ for result in self._sendError(\
+ AlertDescription.decryption_failed,
+ "Encrypted data not a multiple of blocksize"):
+ yield result
+ ciphertext = bytesToString(bytes)
+ plaintext = self._readState.encContext.decrypt(ciphertext)
+ if self.version == (3,2): #For TLS 1.1, remove explicit IV
+ plaintext = plaintext[self._readState.encContext.block_size : ]
+ bytes = stringToBytes(plaintext)
+
+ #Check padding
+ paddingGood = True
+ paddingLength = bytes[-1]
+ if (paddingLength+1) > len(bytes):
+ paddingGood=False
+ totalPaddingLength = 0
+ else:
+ if self.version == (3,0):
+ totalPaddingLength = paddingLength+1
+ elif self.version in ((3,1), (3,2)):
+ totalPaddingLength = paddingLength+1
+ paddingBytes = bytes[-totalPaddingLength:-1]
+ for byte in paddingBytes:
+ if byte != paddingLength:
+ paddingGood = False
+ totalPaddingLength = 0
+ else:
+ raise AssertionError()
+
+ #Decrypt if it's a stream cipher
+ else:
+ paddingGood = True
+ ciphertext = bytesToString(bytes)
+ plaintext = self._readState.encContext.decrypt(ciphertext)
+ bytes = stringToBytes(plaintext)
+ totalPaddingLength = 0
+
+ #Check MAC
+ macGood = True
+ macLength = self._readState.macContext.digest_size
+ endLength = macLength + totalPaddingLength
+ if endLength > len(bytes):
+ macGood = False
+ else:
+ #Read MAC
+ startIndex = len(bytes) - endLength
+ endIndex = startIndex + macLength
+ checkBytes = bytes[startIndex : endIndex]
+
+ #Calculate MAC
+ seqnumStr = self._readState.getSeqNumStr()
+ bytes = bytes[:-endLength]
+ bytesStr = bytesToString(bytes)
+ mac = self._readState.macContext.copy()
+ mac.update(seqnumStr)
+ mac.update(chr(recordType))
+ if self.version == (3,0):
+ mac.update( chr( int(len(bytes)/256) ) )
+ mac.update( chr( int(len(bytes)%256) ) )
+ elif self.version in ((3,1), (3,2)):
+ mac.update(chr(self.version[0]))
+ mac.update(chr(self.version[1]))
+ mac.update( chr( int(len(bytes)/256) ) )
+ mac.update( chr( int(len(bytes)%256) ) )
+ else:
+ raise AssertionError()
+ mac.update(bytesStr)
+ macString = mac.digest()
+ macBytes = stringToBytes(macString)
+
+ #Compare MACs
+ if macBytes != checkBytes:
+ macGood = False
+
+ if not (paddingGood and macGood):
+ for result in self._sendError(AlertDescription.bad_record_mac,
+ "MAC failure (or padding failure)"):
+ yield result
+
+ yield bytes
+
+ def _handshakeStart(self, client):
+ self._client = client
+ self._handshake_md5 = md5.md5()
+ self._handshake_sha = sha.sha()
+ self._handshakeBuffer = []
+ self.allegedSharedKeyUsername = None
+ self.allegedSrpUsername = None
+ self._refCount = 1
+
+ def _handshakeDone(self, resumed):
+ self.resumed = resumed
+ self.closed = False
+
+ def _calcPendingStates(self, clientRandom, serverRandom, implementations):
+ if self.session.cipherSuite in CipherSuite.aes128Suites:
+ macLength = 20
+ keyLength = 16
+ ivLength = 16
+ createCipherFunc = createAES
+ elif self.session.cipherSuite in CipherSuite.aes256Suites:
+ macLength = 20
+ keyLength = 32
+ ivLength = 16
+ createCipherFunc = createAES
+ elif self.session.cipherSuite in CipherSuite.rc4Suites:
+ macLength = 20
+ keyLength = 16
+ ivLength = 0
+ createCipherFunc = createRC4
+ elif self.session.cipherSuite in CipherSuite.tripleDESSuites:
+ macLength = 20
+ keyLength = 24
+ ivLength = 8
+ createCipherFunc = createTripleDES
+ else:
+ raise AssertionError()
+
+ if self.version == (3,0):
+ createMACFunc = MAC_SSL
+ elif self.version in ((3,1), (3,2)):
+ createMACFunc = hmac.HMAC
+
+ outputLength = (macLength*2) + (keyLength*2) + (ivLength*2)
+
+ #Calculate Keying Material from Master Secret
+ if self.version == (3,0):
+ keyBlock = PRF_SSL(self.session.masterSecret,
+ concatArrays(serverRandom, clientRandom),
+ outputLength)
+ elif self.version in ((3,1), (3,2)):
+ keyBlock = PRF(self.session.masterSecret,
+ "key expansion",
+ concatArrays(serverRandom,clientRandom),
+ outputLength)
+ else:
+ raise AssertionError()
+
+ #Slice up Keying Material
+ clientPendingState = _ConnectionState()
+ serverPendingState = _ConnectionState()
+ p = Parser(keyBlock)
+ clientMACBlock = bytesToString(p.getFixBytes(macLength))
+ serverMACBlock = bytesToString(p.getFixBytes(macLength))
+ clientKeyBlock = bytesToString(p.getFixBytes(keyLength))
+ serverKeyBlock = bytesToString(p.getFixBytes(keyLength))
+ clientIVBlock = bytesToString(p.getFixBytes(ivLength))
+ serverIVBlock = bytesToString(p.getFixBytes(ivLength))
+ clientPendingState.macContext = createMACFunc(clientMACBlock,
+ digestmod=sha)
+ serverPendingState.macContext = createMACFunc(serverMACBlock,
+ digestmod=sha)
+ clientPendingState.encContext = createCipherFunc(clientKeyBlock,
+ clientIVBlock,
+ implementations)
+ serverPendingState.encContext = createCipherFunc(serverKeyBlock,
+ serverIVBlock,
+ implementations)
+
+ #Assign new connection states to pending states
+ if self._client:
+ self._pendingWriteState = clientPendingState
+ self._pendingReadState = serverPendingState
+ else:
+ self._pendingWriteState = serverPendingState
+ self._pendingReadState = clientPendingState
+
+ if self.version == (3,2) and ivLength:
+ #Choose fixedIVBlock for TLS 1.1 (this is encrypted with the CBC
+ #residue to create the IV for each sent block)
+ self.fixedIVBlock = getRandomBytes(ivLength)
+
+ def _changeWriteState(self):
+ self._writeState = self._pendingWriteState
+ self._pendingWriteState = _ConnectionState()
+
+ def _changeReadState(self):
+ self._readState = self._pendingReadState
+ self._pendingReadState = _ConnectionState()
+
+ def _sendFinished(self):
+ #Send ChangeCipherSpec
+ for result in self._sendMsg(ChangeCipherSpec()):
+ yield result
+
+ #Switch to pending write state
+ self._changeWriteState()
+
+ #Calculate verification data
+ verifyData = self._calcFinished(True)
+ if self.fault == Fault.badFinished:
+ verifyData[0] = (verifyData[0]+1)%256
+
+ #Send Finished message under new state
+ finished = Finished(self.version).create(verifyData)
+ for result in self._sendMsg(finished):
+ yield result
+
+ def _getFinished(self):
+ #Get and check ChangeCipherSpec
+ for result in self._getMsg(ContentType.change_cipher_spec):
+ if result in (0,1):
+ yield result
+ changeCipherSpec = result
+
+ if changeCipherSpec.type != 1:
+ for result in self._sendError(AlertDescription.illegal_parameter,
+ "ChangeCipherSpec type incorrect"):
+ yield result
+
+ #Switch to pending read state
+ self._changeReadState()
+
+ #Calculate verification data
+ verifyData = self._calcFinished(False)
+
+ #Get and check Finished message under new state
+ for result in self._getMsg(ContentType.handshake,
+ HandshakeType.finished):
+ if result in (0,1):
+ yield result
+ finished = result
+ if finished.verify_data != verifyData:
+ for result in self._sendError(AlertDescription.decrypt_error,
+ "Finished message is incorrect"):
+ yield result
+
+ def _calcFinished(self, send=True):
+ if self.version == (3,0):
+ if (self._client and send) or (not self._client and not send):
+ senderStr = "\x43\x4C\x4E\x54"
+ else:
+ senderStr = "\x53\x52\x56\x52"
+
+ verifyData = self._calcSSLHandshakeHash(self.session.masterSecret,
+ senderStr)
+ return verifyData
+
+ elif self.version in ((3,1), (3,2)):
+ if (self._client and send) or (not self._client and not send):
+ label = "client finished"
+ else:
+ label = "server finished"
+
+ handshakeHashes = stringToBytes(self._handshake_md5.digest() + \
+ self._handshake_sha.digest())
+ verifyData = PRF(self.session.masterSecret, label, handshakeHashes,
+ 12)
+ return verifyData
+ else:
+ raise AssertionError()
+
+ #Used for Finished messages and CertificateVerify messages in SSL v3
+ def _calcSSLHandshakeHash(self, masterSecret, label):
+ masterSecretStr = bytesToString(masterSecret)
+
+ imac_md5 = self._handshake_md5.copy()
+ imac_sha = self._handshake_sha.copy()
+
+ imac_md5.update(label + masterSecretStr + '\x36'*48)
+ imac_sha.update(label + masterSecretStr + '\x36'*40)
+
+ md5Str = md5.md5(masterSecretStr + ('\x5c'*48) + \
+ imac_md5.digest()).digest()
+ shaStr = sha.sha(masterSecretStr + ('\x5c'*40) + \
+ imac_sha.digest()).digest()
+
+ return stringToBytes(md5Str + shaStr)
+
diff --git a/python/gdata/tlslite/VerifierDB.py b/python/gdata/tlslite/VerifierDB.py
new file mode 100644
index 0000000..f706b17
--- /dev/null
+++ b/python/gdata/tlslite/VerifierDB.py
@@ -0,0 +1,90 @@
+"""Class for storing SRP password verifiers."""
+
+from utils.cryptomath import *
+from utils.compat import *
+import mathtls
+from BaseDB import BaseDB
+
+class VerifierDB(BaseDB):
+ """This class represent an in-memory or on-disk database of SRP
+ password verifiers.
+
+ A VerifierDB can be passed to a server handshake to authenticate
+ a client based on one of the verifiers.
+
+ This class is thread-safe.
+ """
+ def __init__(self, filename=None):
+ """Create a new VerifierDB instance.
+
+ @type filename: str
+ @param filename: Filename for an on-disk database, or None for
+ an in-memory database. If the filename already exists, follow
+ this with a call to open(). To create a new on-disk database,
+ follow this with a call to create().
+ """
+ BaseDB.__init__(self, filename, "verifier")
+
+ def _getItem(self, username, valueStr):
+ (N, g, salt, verifier) = valueStr.split(" ")
+ N = base64ToNumber(N)
+ g = base64ToNumber(g)
+ salt = base64ToString(salt)
+ verifier = base64ToNumber(verifier)
+ return (N, g, salt, verifier)
+
+ def __setitem__(self, username, verifierEntry):
+ """Add a verifier entry to the database.
+
+ @type username: str
+ @param username: The username to associate the verifier with.
+ Must be less than 256 characters in length. Must not already
+ be in the database.
+
+ @type verifierEntry: tuple
+ @param verifierEntry: The verifier entry to add. Use
+ L{tlslite.VerifierDB.VerifierDB.makeVerifier} to create a
+ verifier entry.
+ """
+ BaseDB.__setitem__(self, username, verifierEntry)
+
+
+ def _setItem(self, username, value):
+ if len(username)>=256:
+ raise ValueError("username too long")
+ N, g, salt, verifier = value
+ N = numberToBase64(N)
+ g = numberToBase64(g)
+ salt = stringToBase64(salt)
+ verifier = numberToBase64(verifier)
+ valueStr = " ".join( (N, g, salt, verifier) )
+ return valueStr
+
+ def _checkItem(self, value, username, param):
+ (N, g, salt, verifier) = value
+ x = mathtls.makeX(salt, username, param)
+ v = powMod(g, x, N)
+ return (verifier == v)
+
+
+ def makeVerifier(username, password, bits):
+ """Create a verifier entry which can be stored in a VerifierDB.
+
+ @type username: str
+ @param username: The username for this verifier. Must be less
+ than 256 characters in length.
+
+ @type password: str
+ @param password: The password for this verifier.
+
+ @type bits: int
+ @param bits: This values specifies which SRP group parameters
+ to use. It must be one of (1024, 1536, 2048, 3072, 4096, 6144,
+ 8192). Larger values are more secure but slower. 2048 is a
+ good compromise between safety and speed.
+
+ @rtype: tuple
+ @return: A tuple which may be stored in a VerifierDB.
+ """
+ return mathtls.makeVerifier(username, password, bits)
+ makeVerifier = staticmethod(makeVerifier)
\ No newline at end of file
diff --git a/python/gdata/tlslite/X509.py b/python/gdata/tlslite/X509.py
new file mode 100644
index 0000000..a47ddcf
--- /dev/null
+++ b/python/gdata/tlslite/X509.py
@@ -0,0 +1,133 @@
+"""Class representing an X.509 certificate."""
+
+from utils.ASN1Parser import ASN1Parser
+from utils.cryptomath import *
+from utils.keyfactory import _createPublicRSAKey
+
+
+class X509:
+ """This class represents an X.509 certificate.
+
+ @type bytes: L{array.array} of unsigned bytes
+ @ivar bytes: The DER-encoded ASN.1 certificate
+
+ @type publicKey: L{tlslite.utils.RSAKey.RSAKey}
+ @ivar publicKey: The subject public key from the certificate.
+ """
+
+ def __init__(self):
+ self.bytes = createByteArraySequence([])
+ self.publicKey = None
+
+ def parse(self, s):
+ """Parse a PEM-encoded X.509 certificate.
+
+ @type s: str
+ @param s: A PEM-encoded X.509 certificate (i.e. a base64-encoded
+ certificate wrapped with "-----BEGIN CERTIFICATE-----" and
+ "-----END CERTIFICATE-----" tags).
+ """
+
+ start = s.find("-----BEGIN CERTIFICATE-----")
+ end = s.find("-----END CERTIFICATE-----")
+ if start == -1:
+ raise SyntaxError("Missing PEM prefix")
+ if end == -1:
+ raise SyntaxError("Missing PEM postfix")
+ s = s[start+len("-----BEGIN CERTIFICATE-----") : end]
+
+ bytes = base64ToBytes(s)
+ self.parseBinary(bytes)
+ return self
+
+ def parseBinary(self, bytes):
+ """Parse a DER-encoded X.509 certificate.
+
+ @type bytes: str or L{array.array} of unsigned bytes
+ @param bytes: A DER-encoded X.509 certificate.
+ """
+
+ if isinstance(bytes, type("")):
+ bytes = stringToBytes(bytes)
+
+ self.bytes = bytes
+ p = ASN1Parser(bytes)
+
+ #Get the tbsCertificate
+ tbsCertificateP = p.getChild(0)
+
+ #Is the optional version field present?
+ #This determines which index the key is at.
+ if tbsCertificateP.value[0]==0xA0:
+ subjectPublicKeyInfoIndex = 6
+ else:
+ subjectPublicKeyInfoIndex = 5
+
+ #Get the subjectPublicKeyInfo
+ subjectPublicKeyInfoP = tbsCertificateP.getChild(\
+ subjectPublicKeyInfoIndex)
+
+ #Get the algorithm
+ algorithmP = subjectPublicKeyInfoP.getChild(0)
+ rsaOID = algorithmP.value
+ if list(rsaOID) != [6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 1, 5, 0]:
+ raise SyntaxError("Unrecognized AlgorithmIdentifier")
+
+ #Get the subjectPublicKey
+ subjectPublicKeyP = subjectPublicKeyInfoP.getChild(1)
+
+ #Adjust for BIT STRING encapsulation
+ if (subjectPublicKeyP.value[0] !=0):
+ raise SyntaxError()
+ subjectPublicKeyP = ASN1Parser(subjectPublicKeyP.value[1:])
+
+ #Get the modulus and exponent
+ modulusP = subjectPublicKeyP.getChild(0)
+ publicExponentP = subjectPublicKeyP.getChild(1)
+
+ #Decode them into numbers
+ n = bytesToNumber(modulusP.value)
+ e = bytesToNumber(publicExponentP.value)
+
+ #Create a public key instance
+ self.publicKey = _createPublicRSAKey(n, e)
+
+ def getFingerprint(self):
+ """Get the hex-encoded fingerprint of this certificate.
+
+ @rtype: str
+ @return: A hex-encoded fingerprint.
+ """
+ return sha.sha(self.bytes).hexdigest()
+
+ def getCommonName(self):
+ """Get the Subject's Common Name from the certificate.
+
+ The cryptlib_py module must be installed in order to use this
+ function.
+
+ @rtype: str or None
+ @return: The CN component of the certificate's subject DN, if
+ present.
+ """
+ import cryptlib_py
+ import array
+ c = cryptlib_py.cryptImportCert(self.bytes, cryptlib_py.CRYPT_UNUSED)
+ name = cryptlib_py.CRYPT_CERTINFO_COMMONNAME
+ try:
+ try:
+ length = cryptlib_py.cryptGetAttributeString(c, name, None)
+ returnVal = array.array('B', [0] * length)
+ cryptlib_py.cryptGetAttributeString(c, name, returnVal)
+ returnVal = returnVal.tostring()
+ except cryptlib_py.CryptException, e:
+ if e[0] == cryptlib_py.CRYPT_ERROR_NOTFOUND:
+ returnVal = None
+ return returnVal
+ finally:
+ cryptlib_py.cryptDestroyCert(c)
+
+ def writeBytes(self):
+ return self.bytes
+
+
diff --git a/python/gdata/tlslite/X509CertChain.py b/python/gdata/tlslite/X509CertChain.py
new file mode 100644
index 0000000..d5f0b4d
--- /dev/null
+++ b/python/gdata/tlslite/X509CertChain.py
@@ -0,0 +1,181 @@
+"""Class representing an X.509 certificate chain."""
+
+from utils import cryptomath
+
+class X509CertChain:
+ """This class represents a chain of X.509 certificates.
+
+ @type x509List: list
+ @ivar x509List: A list of L{tlslite.X509.X509} instances,
+ starting with the end-entity certificate and with every
+ subsequent certificate certifying the previous.
+ """
+
+ def __init__(self, x509List=None):
+ """Create a new X509CertChain.
+
+ @type x509List: list
+ @param x509List: A list of L{tlslite.X509.X509} instances,
+ starting with the end-entity certificate and with every
+ subsequent certificate certifying the previous.
+ """
+ if x509List:
+ self.x509List = x509List
+ else:
+ self.x509List = []
+
+ def getNumCerts(self):
+ """Get the number of certificates in this chain.
+
+ @rtype: int
+ """
+ return len(self.x509List)
+
+ def getEndEntityPublicKey(self):
+ """Get the public key from the end-entity certificate.
+
+ @rtype: L{tlslite.utils.RSAKey.RSAKey}
+ """
+ if self.getNumCerts() == 0:
+ raise AssertionError()
+ return self.x509List[0].publicKey
+
+ def getFingerprint(self):
+ """Get the hex-encoded fingerprint of the end-entity certificate.
+
+ @rtype: str
+ @return: A hex-encoded fingerprint.
+ """
+ if self.getNumCerts() == 0:
+ raise AssertionError()
+ return self.x509List[0].getFingerprint()
+
+ def getCommonName(self):
+ """Get the Subject's Common Name from the end-entity certificate.
+
+ The cryptlib_py module must be installed in order to use this
+ function.
+
+ @rtype: str or None
+ @return: The CN component of the certificate's subject DN, if
+ present.
+ """
+ if self.getNumCerts() == 0:
+ raise AssertionError()
+ return self.x509List[0].getCommonName()
+
+ def validate(self, x509TrustList):
+ """Check the validity of the certificate chain.
+
+ This checks that every certificate in the chain validates with
+ the subsequent one, until some certificate validates with (or
+ is identical to) one of the passed-in root certificates.
+
+ The cryptlib_py module must be installed in order to use this
+ function.
+
+ @type x509TrustList: list of L{tlslite.X509.X509}
+ @param x509TrustList: A list of trusted root certificates. The
+ certificate chain must extend to one of these certificates to
+ be considered valid.
+ """
+
+ import cryptlib_py
+ c1 = None
+ c2 = None
+ lastC = None
+ rootC = None
+
+ try:
+ rootFingerprints = [c.getFingerprint() for c in x509TrustList]
+
+ #Check that every certificate in the chain validates with the
+ #next one
+ for cert1, cert2 in zip(self.x509List, self.x509List[1:]):
+
+ #If we come upon a root certificate, we're done.
+ if cert1.getFingerprint() in rootFingerprints:
+ return True
+
+ c1 = cryptlib_py.cryptImportCert(cert1.writeBytes(),
+ cryptlib_py.CRYPT_UNUSED)
+ c2 = cryptlib_py.cryptImportCert(cert2.writeBytes(),
+ cryptlib_py.CRYPT_UNUSED)
+ try:
+ cryptlib_py.cryptCheckCert(c1, c2)
+ except:
+ return False
+ cryptlib_py.cryptDestroyCert(c1)
+ c1 = None
+ cryptlib_py.cryptDestroyCert(c2)
+ c2 = None
+
+ #If the last certificate is one of the root certificates, we're
+ #done.
+ if self.x509List[-1].getFingerprint() in rootFingerprints:
+ return True
+
+ #Otherwise, find a root certificate that the last certificate
+ #chains to, and validate them.
+ lastC = cryptlib_py.cryptImportCert(self.x509List[-1].writeBytes(),
+ cryptlib_py.CRYPT_UNUSED)
+ for rootCert in x509TrustList:
+ rootC = cryptlib_py.cryptImportCert(rootCert.writeBytes(),
+ cryptlib_py.CRYPT_UNUSED)
+ if self._checkChaining(lastC, rootC):
+ try:
+ cryptlib_py.cryptCheckCert(lastC, rootC)
+ return True
+ except:
+ return False
+ return False
+ finally:
+ if not (c1 is None):
+ cryptlib_py.cryptDestroyCert(c1)
+ if not (c2 is None):
+ cryptlib_py.cryptDestroyCert(c2)
+ if not (lastC is None):
+ cryptlib_py.cryptDestroyCert(lastC)
+ if not (rootC is None):
+ cryptlib_py.cryptDestroyCert(rootC)
+
+
+
+ def _checkChaining(self, lastC, rootC):
+ import cryptlib_py
+ import array
+ def compareNames(name):
+ try:
+ length = cryptlib_py.cryptGetAttributeString(lastC, name, None)
+ lastName = array.array('B', [0] * length)
+ cryptlib_py.cryptGetAttributeString(lastC, name, lastName)
+ lastName = lastName.tostring()
+ except cryptlib_py.CryptException, e:
+ if e[0] == cryptlib_py.CRYPT_ERROR_NOTFOUND:
+ lastName = None
+ try:
+ length = cryptlib_py.cryptGetAttributeString(rootC, name, None)
+ rootName = array.array('B', [0] * length)
+ cryptlib_py.cryptGetAttributeString(rootC, name, rootName)
+ rootName = rootName.tostring()
+ except cryptlib_py.CryptException, e:
+ if e[0] == cryptlib_py.CRYPT_ERROR_NOTFOUND:
+ rootName = None
+
+ return lastName == rootName
+
+ cryptlib_py.cryptSetAttribute(lastC,
+ cryptlib_py.CRYPT_CERTINFO_ISSUERNAME,
+ cryptlib_py.CRYPT_UNUSED)
+
+ if not compareNames(cryptlib_py.CRYPT_CERTINFO_COUNTRYNAME):
+ return False
+ if not compareNames(cryptlib_py.CRYPT_CERTINFO_LOCALITYNAME):
+ return False
+ if not compareNames(cryptlib_py.CRYPT_CERTINFO_ORGANIZATIONNAME):
+ return False
+ if not compareNames(cryptlib_py.CRYPT_CERTINFO_ORGANIZATIONALUNITNAME):
+ return False
+ if not compareNames(cryptlib_py.CRYPT_CERTINFO_COMMONNAME):
+ return False
+ return True
\ No newline at end of file
diff --git a/python/gdata/tlslite/__init__.py b/python/gdata/tlslite/__init__.py
new file mode 100644
index 0000000..47cfd1c
--- /dev/null
+++ b/python/gdata/tlslite/__init__.py
@@ -0,0 +1,39 @@
+"""
+TLS Lite is a free python library that implements SSL v3, TLS v1, and
+TLS v1.1. TLS Lite supports non-traditional authentication methods
+such as SRP, shared keys, and cryptoIDs, in addition to X.509
+certificates. TLS Lite is pure python, however it can access OpenSSL,
+cryptlib, pycrypto, and GMPY for faster crypto operations. TLS Lite
+integrates with httplib, xmlrpclib, poplib, imaplib, smtplib,
+SocketServer, asyncore, and Twisted.
+
+To use, do::
+
+ from tlslite.api import *
+
+Then use the L{tlslite.TLSConnection.TLSConnection} class with a socket,
+or use one of the integration classes in L{tlslite.integration}.
+
+@version: 0.3.8
+"""
+__version__ = "0.3.8"
+
+__all__ = ["api",
+ "BaseDB",
+ "Checker",
+ "constants",
+ "errors",
+ "FileObject",
+ "HandshakeSettings",
+ "mathtls",
+ "messages",
+ "Session",
+ "SessionCache",
+ "SharedKeyDB",
+ "TLSConnection",
+ "TLSRecordLayer",
+ "VerifierDB",
+ "X509",
+ "X509CertChain",
+ "integration",
+ "utils"]
diff --git a/python/gdata/tlslite/api.py b/python/gdata/tlslite/api.py
new file mode 100644
index 0000000..eebfbc6
--- /dev/null
+++ b/python/gdata/tlslite/api.py
@@ -0,0 +1,75 @@
+"""Import this module for easy access to TLS Lite objects.
+
+The TLS Lite API consists of classes, functions, and variables spread
+throughout this package. Instead of importing them individually with::
+
+ from tlslite.TLSConnection import TLSConnection
+ from tlslite.HandshakeSettings import HandshakeSettings
+ from tlslite.errors import *
+ .
+ .
+
+It's easier to do::
+
+ from tlslite.api import *
+
+This imports all the important objects (TLSConnection, Checker,
+HandshakeSettings, etc.) into the global namespace. In particular, it
+imports::
+
+ from constants import AlertLevel, AlertDescription, Fault
+ from errors import *
+ from Checker import Checker
+ from HandshakeSettings import HandshakeSettings
+ from Session import Session
+ from SessionCache import SessionCache
+ from SharedKeyDB import SharedKeyDB
+ from TLSConnection import TLSConnection
+ from VerifierDB import VerifierDB
+ from X509 import X509
+ from X509CertChain import X509CertChain
+
+ from integration.HTTPTLSConnection import HTTPTLSConnection
+ from integration.POP3_TLS import POP3_TLS
+ from integration.IMAP4_TLS import IMAP4_TLS
+ from integration.SMTP_TLS import SMTP_TLS
+ from integration.XMLRPCTransport import XMLRPCTransport
+ from integration.TLSSocketServerMixIn import TLSSocketServerMixIn
+ from integration.TLSAsyncDispatcherMixIn import TLSAsyncDispatcherMixIn
+ from integration.TLSTwistedProtocolWrapper import TLSTwistedProtocolWrapper
+ from utils.cryptomath import cryptlibpyLoaded, m2cryptoLoaded,
+ gmpyLoaded, pycryptoLoaded, prngName
+ from utils.keyfactory import generateRSAKey, parsePEMKey, parseXMLKey,
+ parseAsPublicKey, parsePrivateKey
+"""
+
+from constants import AlertLevel, AlertDescription, Fault
+from errors import *
+from Checker import Checker
+from HandshakeSettings import HandshakeSettings
+from Session import Session
+from SessionCache import SessionCache
+from SharedKeyDB import SharedKeyDB
+from TLSConnection import TLSConnection
+from VerifierDB import VerifierDB
+from X509 import X509
+from X509CertChain import X509CertChain
+
+from integration.HTTPTLSConnection import HTTPTLSConnection
+from integration.TLSSocketServerMixIn import TLSSocketServerMixIn
+from integration.TLSAsyncDispatcherMixIn import TLSAsyncDispatcherMixIn
+from integration.POP3_TLS import POP3_TLS
+from integration.IMAP4_TLS import IMAP4_TLS
+from integration.SMTP_TLS import SMTP_TLS
+from integration.XMLRPCTransport import XMLRPCTransport
+try:
+ import twisted
+ del(twisted)
+ from integration.TLSTwistedProtocolWrapper import TLSTwistedProtocolWrapper
+except ImportError:
+ pass
+
+from utils.cryptomath import cryptlibpyLoaded, m2cryptoLoaded, gmpyLoaded, \
+ pycryptoLoaded, prngName
+from utils.keyfactory import generateRSAKey, parsePEMKey, parseXMLKey, \
+ parseAsPublicKey, parsePrivateKey
diff --git a/python/gdata/tlslite/constants.py b/python/gdata/tlslite/constants.py
new file mode 100644
index 0000000..8f2d559
--- /dev/null
+++ b/python/gdata/tlslite/constants.py
@@ -0,0 +1,225 @@
+"""Constants used in various places."""
+
+class CertificateType:
+ x509 = 0
+ openpgp = 1
+ cryptoID = 2
+
+class HandshakeType:
+ hello_request = 0
+ client_hello = 1
+ server_hello = 2
+ certificate = 11
+ server_key_exchange = 12
+ certificate_request = 13
+ server_hello_done = 14
+ certificate_verify = 15
+ client_key_exchange = 16
+ finished = 20
+
+class ContentType:
+ change_cipher_spec = 20
+ alert = 21
+ handshake = 22
+ application_data = 23
+ all = (20,21,22,23)
+
+class AlertLevel:
+ warning = 1
+ fatal = 2
+
+class AlertDescription:
+ """
+ @cvar bad_record_mac: A TLS record failed to decrypt properly.
+
+ If this occurs during a shared-key or SRP handshake it most likely
+ indicates a bad password. It may also indicate an implementation
+ error, or some tampering with the data in transit.
+
+ This alert will be signalled by the server if the SRP password is bad. It
+ may also be signalled by the server if the SRP username is unknown to the
+ server, but it doesn't wish to reveal that fact.
+
+ This alert will be signalled by the client if the shared-key username is
+ bad.
+
+ @cvar handshake_failure: A problem occurred while handshaking.
+
+ This typically indicates a lack of common ciphersuites between client and
+ server, or some other disagreement (about SRP parameters or key sizes,
+ for example).
+
+ @cvar protocol_version: The other party's SSL/TLS version was unacceptable.
+
+ This indicates that the client and server couldn't agree on which version
+ of SSL or TLS to use.
+
+ @cvar user_canceled: The handshake is being cancelled for some reason.
+
+ """
+
+ close_notify = 0
+ unexpected_message = 10
+ bad_record_mac = 20
+ decryption_failed = 21
+ record_overflow = 22
+ decompression_failure = 30
+ handshake_failure = 40
+ no_certificate = 41 #SSLv3
+ bad_certificate = 42
+ unsupported_certificate = 43
+ certificate_revoked = 44
+ certificate_expired = 45
+ certificate_unknown = 46
+ illegal_parameter = 47
+ unknown_ca = 48
+ access_denied = 49
+ decode_error = 50
+ decrypt_error = 51
+ export_restriction = 60
+ protocol_version = 70
+ insufficient_security = 71
+ internal_error = 80
+ user_canceled = 90
+ no_renegotiation = 100
+ unknown_srp_username = 120
+ missing_srp_username = 121
+ untrusted_srp_parameters = 122
+
+class CipherSuite:
+ TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA = 0x0050
+ TLS_SRP_SHA_WITH_AES_128_CBC_SHA = 0x0053
+ TLS_SRP_SHA_WITH_AES_256_CBC_SHA = 0x0056
+
+ TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA = 0x0051
+ TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA = 0x0054
+ TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA = 0x0057
+
+ TLS_RSA_WITH_3DES_EDE_CBC_SHA = 0x000A
+ TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
+ TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
+ TLS_RSA_WITH_RC4_128_SHA = 0x0005
+
+ srpSuites = []
+ srpSuites.append(TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA)
+ srpSuites.append(TLS_SRP_SHA_WITH_AES_128_CBC_SHA)
+ srpSuites.append(TLS_SRP_SHA_WITH_AES_256_CBC_SHA)
+ def getSrpSuites(ciphers):
+ suites = []
+ for cipher in ciphers:
+ if cipher == "aes128":
+ suites.append(CipherSuite.TLS_SRP_SHA_WITH_AES_128_CBC_SHA)
+ elif cipher == "aes256":
+ suites.append(CipherSuite.TLS_SRP_SHA_WITH_AES_256_CBC_SHA)
+ elif cipher == "3des":
+ suites.append(CipherSuite.TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA)
+ return suites
+ getSrpSuites = staticmethod(getSrpSuites)
+
+ srpRsaSuites = []
+ srpRsaSuites.append(TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA)
+ srpRsaSuites.append(TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA)
+ srpRsaSuites.append(TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA)
+ def getSrpRsaSuites(ciphers):
+ suites = []
+ for cipher in ciphers:
+ if cipher == "aes128":
+ suites.append(CipherSuite.TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA)
+ elif cipher == "aes256":
+ suites.append(CipherSuite.TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA)
+ elif cipher == "3des":
+ suites.append(CipherSuite.TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA)
+ return suites
+ getSrpRsaSuites = staticmethod(getSrpRsaSuites)
+
+ rsaSuites = []
+ rsaSuites.append(TLS_RSA_WITH_3DES_EDE_CBC_SHA)
+ rsaSuites.append(TLS_RSA_WITH_AES_128_CBC_SHA)
+ rsaSuites.append(TLS_RSA_WITH_AES_256_CBC_SHA)
+ rsaSuites.append(TLS_RSA_WITH_RC4_128_SHA)
+ def getRsaSuites(ciphers):
+ suites = []
+ for cipher in ciphers:
+ if cipher == "aes128":
+ suites.append(CipherSuite.TLS_RSA_WITH_AES_128_CBC_SHA)
+ elif cipher == "aes256":
+ suites.append(CipherSuite.TLS_RSA_WITH_AES_256_CBC_SHA)
+ elif cipher == "rc4":
+ suites.append(CipherSuite.TLS_RSA_WITH_RC4_128_SHA)
+ elif cipher == "3des":
+ suites.append(CipherSuite.TLS_RSA_WITH_3DES_EDE_CBC_SHA)
+ return suites
+ getRsaSuites = staticmethod(getRsaSuites)
+
+ tripleDESSuites = []
+ tripleDESSuites.append(TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA)
+ tripleDESSuites.append(TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA)
+ tripleDESSuites.append(TLS_RSA_WITH_3DES_EDE_CBC_SHA)
+
+ aes128Suites = []
+ aes128Suites.append(TLS_SRP_SHA_WITH_AES_128_CBC_SHA)
+ aes128Suites.append(TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA)
+ aes128Suites.append(TLS_RSA_WITH_AES_128_CBC_SHA)
+
+ aes256Suites = []
+ aes256Suites.append(TLS_SRP_SHA_WITH_AES_256_CBC_SHA)
+ aes256Suites.append(TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA)
+ aes256Suites.append(TLS_RSA_WITH_AES_256_CBC_SHA)
+
+ rc4Suites = []
+ rc4Suites.append(TLS_RSA_WITH_RC4_128_SHA)
+
+
+class Fault:
+ badUsername = 101
+ badPassword = 102
+ badA = 103
+ clientSrpFaults = range(101,104)
+
+ badVerifyMessage = 601
+ clientCertFaults = range(601,602)
+
+ badPremasterPadding = 501
+ shortPremasterSecret = 502
+ clientNoAuthFaults = range(501,503)
+
+ badIdentifier = 401
+ badSharedKey = 402
+ clientSharedKeyFaults = range(401,403)
+
+ badB = 201
+ serverFaults = range(201,202)
+
+ badFinished = 300
+ badMAC = 301
+ badPadding = 302
+ genericFaults = range(300,303)
+
+ faultAlerts = {\
+ badUsername: (AlertDescription.unknown_srp_username, \
+ AlertDescription.bad_record_mac),\
+ badPassword: (AlertDescription.bad_record_mac,),\
+ badA: (AlertDescription.illegal_parameter,),\
+ badIdentifier: (AlertDescription.handshake_failure,),\
+ badSharedKey: (AlertDescription.bad_record_mac,),\
+ badPremasterPadding: (AlertDescription.bad_record_mac,),\
+ shortPremasterSecret: (AlertDescription.bad_record_mac,),\
+ badVerifyMessage: (AlertDescription.decrypt_error,),\
+ badFinished: (AlertDescription.decrypt_error,),\
+ badMAC: (AlertDescription.bad_record_mac,),\
+ badPadding: (AlertDescription.bad_record_mac,)
+ }
+
+ faultNames = {\
+ badUsername: "bad username",\
+ badPassword: "bad password",\
+ badA: "bad A",\
+ badIdentifier: "bad identifier",\
+ badSharedKey: "bad sharedkey",\
+ badPremasterPadding: "bad premaster padding",\
+ shortPremasterSecret: "short premaster secret",\
+ badVerifyMessage: "bad verify message",\
+ badFinished: "bad finished message",\
+ badMAC: "bad MAC",\
+ badPadding: "bad padding"
+ }
diff --git a/python/gdata/tlslite/errors.py b/python/gdata/tlslite/errors.py
new file mode 100644
index 0000000..c7f7ba8
--- /dev/null
+++ b/python/gdata/tlslite/errors.py
@@ -0,0 +1,149 @@
+"""Exception classes.
+@sort: TLSError, TLSAbruptCloseError, TLSAlert, TLSLocalAlert, TLSRemoteAlert,
+TLSAuthenticationError, TLSNoAuthenticationError, TLSAuthenticationTypeError,
+TLSFingerprintError, TLSAuthorizationError, TLSValidationError, TLSFaultError
+"""
+
+from constants import AlertDescription, AlertLevel
+
+class TLSError(Exception):
+ """Base class for all TLS Lite exceptions."""
+ pass
+
+class TLSAbruptCloseError(TLSError):
+ """The socket was closed without a proper TLS shutdown.
+
+ The TLS specification mandates that an alert of some sort
+ must be sent before the underlying socket is closed. If the socket
+ is closed without this, it could signify that an attacker is trying
+ to truncate the connection. It could also signify a misbehaving
+ TLS implementation, or a random network failure.
+ """
+ pass
+
+class TLSAlert(TLSError):
+ """A TLS alert has been signalled."""
+ pass
+
+ _descriptionStr = {\
+ AlertDescription.close_notify: "close_notify",\
+ AlertDescription.unexpected_message: "unexpected_message",\
+ AlertDescription.bad_record_mac: "bad_record_mac",\
+ AlertDescription.decryption_failed: "decryption_failed",\
+ AlertDescription.record_overflow: "record_overflow",\
+ AlertDescription.decompression_failure: "decompression_failure",\
+ AlertDescription.handshake_failure: "handshake_failure",\
+ AlertDescription.no_certificate: "no certificate",\
+ AlertDescription.bad_certificate: "bad_certificate",\
+ AlertDescription.unsupported_certificate: "unsupported_certificate",\
+ AlertDescription.certificate_revoked: "certificate_revoked",\
+ AlertDescription.certificate_expired: "certificate_expired",\
+ AlertDescription.certificate_unknown: "certificate_unknown",\
+ AlertDescription.illegal_parameter: "illegal_parameter",\
+ AlertDescription.unknown_ca: "unknown_ca",\
+ AlertDescription.access_denied: "access_denied",\
+ AlertDescription.decode_error: "decode_error",\
+ AlertDescription.decrypt_error: "decrypt_error",\
+ AlertDescription.export_restriction: "export_restriction",\
+ AlertDescription.protocol_version: "protocol_version",\
+ AlertDescription.insufficient_security: "insufficient_security",\
+ AlertDescription.internal_error: "internal_error",\
+ AlertDescription.user_canceled: "user_canceled",\
+ AlertDescription.no_renegotiation: "no_renegotiation",\
+ AlertDescription.unknown_srp_username: "unknown_srp_username",\
+ AlertDescription.missing_srp_username: "missing_srp_username"}
+
+class TLSLocalAlert(TLSAlert):
+ """A TLS alert has been signalled by the local implementation.
+
+ @type description: int
+ @ivar description: Set to one of the constants in
+ L{tlslite.constants.AlertDescription}
+
+ @type level: int
+ @ivar level: Set to one of the constants in
+ L{tlslite.constants.AlertLevel}
+
+ @type message: str
+ @ivar message: Description of what went wrong.
+ """
+ def __init__(self, alert, message=None):
+ self.description = alert.description
+ self.level = alert.level
+ self.message = message
+
+ def __str__(self):
+ alertStr = TLSAlert._descriptionStr.get(self.description)
+ if alertStr == None:
+ alertStr = str(self.description)
+ if self.message:
+ return alertStr + ": " + self.message
+ else:
+ return alertStr
+
+class TLSRemoteAlert(TLSAlert):
+ """A TLS alert has been signalled by the remote implementation.
+
+ @type description: int
+ @ivar description: Set to one of the constants in
+ L{tlslite.constants.AlertDescription}
+
+ @type level: int
+ @ivar level: Set to one of the constants in
+ L{tlslite.constants.AlertLevel}
+ """
+ def __init__(self, alert):
+ self.description = alert.description
+ self.level = alert.level
+
+ def __str__(self):
+ alertStr = TLSAlert._descriptionStr.get(self.description)
+ if alertStr == None:
+ alertStr = str(self.description)
+ return alertStr
+
+class TLSAuthenticationError(TLSError):
+ """The handshake succeeded, but the other party's authentication
+ was inadequate.
+
+ This exception will only be raised when a
+ L{tlslite.Checker.Checker} has been passed to a handshake function.
+ The Checker will be invoked once the handshake completes, and if
+ the Checker objects to how the other party authenticated, a
+ subclass of this exception will be raised.
+ """
+ pass
+
+class TLSNoAuthenticationError(TLSAuthenticationError):
+ """The Checker was expecting the other party to authenticate with a
+ certificate chain, but this did not occur."""
+ pass
+
+class TLSAuthenticationTypeError(TLSAuthenticationError):
+ """The Checker was expecting the other party to authenticate with a
+ different type of certificate chain."""
+ pass
+
+class TLSFingerprintError(TLSAuthenticationError):
+ """The Checker was expecting the other party to authenticate with a
+ certificate chain that matches a different fingerprint."""
+ pass
+
+class TLSAuthorizationError(TLSAuthenticationError):
+ """The Checker was expecting the other party to authenticate with a
+ certificate chain that has a different authorization."""
+ pass
+
+class TLSValidationError(TLSAuthenticationError):
+ """The Checker has determined that the other party's certificate
+ chain is invalid."""
+ pass
+
+class TLSFaultError(TLSError):
+ """The other party responded incorrectly to an induced fault.
+
+ This exception will only occur during fault testing, when a
+ TLSConnection's fault variable is set to induce some sort of
+ faulty behavior, and the other party doesn't respond appropriately.
+ """
+ pass
diff --git a/python/gdata/tlslite/integration/AsyncStateMachine.py b/python/gdata/tlslite/integration/AsyncStateMachine.py
new file mode 100644
index 0000000..abed604
--- /dev/null
+++ b/python/gdata/tlslite/integration/AsyncStateMachine.py
@@ -0,0 +1,235 @@
+"""
+A state machine for using TLS Lite with asynchronous I/O.
+"""
+
+class AsyncStateMachine:
+ """
+ This is an abstract class that's used to integrate TLS Lite with
+ asyncore and Twisted.
+
+ This class signals wantsReadsEvent() and wantsWriteEvent(). When
+ the underlying socket has become readable or writeable, the event
+ should be passed to this class by calling inReadEvent() or
+ inWriteEvent(). This class will then try to read or write through
+ the socket, and will update its state appropriately.
+
+ This class will forward higher-level events to its subclass. For
+ example, when a complete TLS record has been received,
+ outReadEvent() will be called with the decrypted data.
+ """
+
+ def __init__(self):
+ self._clear()
+
+ def _clear(self):
+ #These store the various asynchronous operations (i.e.
+ #generators). Only one of them, at most, is ever active at a
+ #time.
+ self.handshaker = None
+ self.closer = None
+ self.reader = None
+ self.writer = None
+
+ #This stores the result from the last call to the
+ #currently active operation. If 0 it indicates that the
+ #operation wants to read, if 1 it indicates that the
+ #operation wants to write. If None, there is no active
+ #operation.
+ self.result = None
+
+ def _checkAssert(self, maxActive=1):
+ #This checks that only one operation, at most, is
+ #active, and that self.result is set appropriately.
+ activeOps = 0
+ if self.handshaker:
+ activeOps += 1
+ if self.closer:
+ activeOps += 1
+ if self.reader:
+ activeOps += 1
+ if self.writer:
+ activeOps += 1
+
+ if self.result == None:
+ if activeOps != 0:
+ raise AssertionError()
+ elif self.result in (0,1):
+ if activeOps != 1:
+ raise AssertionError()
+ else:
+ raise AssertionError()
+ if activeOps > maxActive:
+ raise AssertionError()
+
+ def wantsReadEvent(self):
+ """If the state machine wants to read.
+
+ If an operation is active, this returns whether or not the
+ operation wants to read from the socket. If an operation is
+ not active, this returns None.
+
+ @rtype: bool or None
+ @return: If the state machine wants to read.
+ """
+ if self.result != None:
+ return self.result == 0
+ return None
+
+ def wantsWriteEvent(self):
+ """If the state machine wants to write.
+
+ If an operation is active, this returns whether or not the
+ operation wants to write to the socket. If an operation is
+ not active, this returns None.
+
+ @rtype: bool or None
+ @return: If the state machine wants to write.
+ """
+ if self.result != None:
+ return self.result == 1
+ return None
+
+ def outConnectEvent(self):
+ """Called when a handshake operation completes.
+
+ May be overridden in subclass.
+ """
+ pass
+
+ def outCloseEvent(self):
+ """Called when a close operation completes.
+
+ May be overridden in subclass.
+ """
+ pass
+
+ def outReadEvent(self, readBuffer):
+ """Called when a read operation completes.
+
+ May be overridden in subclass."""
+ pass
+
+ def outWriteEvent(self):
+ """Called when a write operation completes.
+
+ May be overridden in subclass."""
+ pass
+
+ def inReadEvent(self):
+ """Tell the state machine it can read from the socket."""
+ try:
+ self._checkAssert()
+ if self.handshaker:
+ self._doHandshakeOp()
+ elif self.closer:
+ self._doCloseOp()
+ elif self.reader:
+ self._doReadOp()
+ elif self.writer:
+ self._doWriteOp()
+ else:
+ self.reader = self.tlsConnection.readAsync(16384)
+ self._doReadOp()
+ except:
+ self._clear()
+ raise
+
+ def inWriteEvent(self):
+ """Tell the state machine it can write to the socket."""
+ try:
+ self._checkAssert()
+ if self.handshaker:
+ self._doHandshakeOp()
+ elif self.closer:
+ self._doCloseOp()
+ elif self.reader:
+ self._doReadOp()
+ elif self.writer:
+ self._doWriteOp()
+ else:
+ self.outWriteEvent()
+ except:
+ self._clear()
+ raise
+
+ def _doHandshakeOp(self):
+ try:
+ self.result = self.handshaker.next()
+ except StopIteration:
+ self.handshaker = None
+ self.result = None
+ self.outConnectEvent()
+
+ def _doCloseOp(self):
+ try:
+ self.result = self.closer.next()
+ except StopIteration:
+ self.closer = None
+ self.result = None
+ self.outCloseEvent()
+
+ def _doReadOp(self):
+ self.result = self.reader.next()
+ if not self.result in (0,1):
+ readBuffer = self.result
+ self.reader = None
+ self.result = None
+ self.outReadEvent(readBuffer)
+
+ def _doWriteOp(self):
+ try:
+ self.result = self.writer.next()
+ except StopIteration:
+ self.writer = None
+ self.result = None
+
+ def setHandshakeOp(self, handshaker):
+ """Start a handshake operation.
+
+ @type handshaker: generator
+ @param handshaker: A generator created by using one of the
+ asynchronous handshake functions (i.e. handshakeServerAsync, or
+ handshakeClientxxx(..., async=True).
+ """
+ try:
+ self._checkAssert(0)
+ self.handshaker = handshaker
+ self._doHandshakeOp()
+ except:
+ self._clear()
+ raise
+
+ def setServerHandshakeOp(self, **args):
+ """Start a handshake operation.
+
+ The arguments passed to this function will be forwarded to
+ L{tlslite.TLSConnection.TLSConnection.handshakeServerAsync}.
+ """
+ handshaker = self.tlsConnection.handshakeServerAsync(**args)
+ self.setHandshakeOp(handshaker)
+
+ def setCloseOp(self):
+ """Start a close operation.
+ """
+ try:
+ self._checkAssert(0)
+ self.closer = self.tlsConnection.closeAsync()
+ self._doCloseOp()
+ except:
+ self._clear()
+ raise
+
+ def setWriteOp(self, writeBuffer):
+ """Start a write operation.
+
+ @type writeBuffer: str
+ @param writeBuffer: The string to transmit.
+ """
+ try:
+ self._checkAssert(0)
+ self.writer = self.tlsConnection.writeAsync(writeBuffer)
+ self._doWriteOp()
+ except:
+ self._clear()
+ raise
+
diff --git a/python/gdata/tlslite/integration/ClientHelper.py b/python/gdata/tlslite/integration/ClientHelper.py
new file mode 100644
index 0000000..58e0152
--- /dev/null
+++ b/python/gdata/tlslite/integration/ClientHelper.py
@@ -0,0 +1,163 @@
+"""
+A helper class for using TLS Lite with stdlib clients
+(httplib, xmlrpclib, imaplib, poplib).
+"""
+
+from gdata.tlslite.Checker import Checker
+
+class ClientHelper:
+ """This is a helper class used to integrate TLS Lite with various
+ TLS clients (e.g. poplib, smtplib, httplib, etc.)"""
+
+ def __init__(self,
+ username=None, password=None, sharedKey=None,
+ certChain=None, privateKey=None,
+ cryptoID=None, protocol=None,
+ x509Fingerprint=None,
+ x509TrustList=None, x509CommonName=None,
+ settings = None):
+ """
+ For client authentication, use one of these argument
+ combinations:
+ - username, password (SRP)
+ - username, sharedKey (shared-key)
+ - certChain, privateKey (certificate)
+
+ For server authentication, you can either rely on the
+ implicit mutual authentication performed by SRP or
+ shared-keys, or you can do certificate-based server
+ authentication with one of these argument combinations:
+ - cryptoID[, protocol] (requires cryptoIDlib)
+ - x509Fingerprint
+ - x509TrustList[, x509CommonName] (requires cryptlib_py)
+
+ Certificate-based server authentication is compatible with
+ SRP or certificate-based client authentication. It is
+ not compatible with shared-keys.
+
+ The constructor does not perform the TLS handshake itself, but
+ simply stores these arguments for later. The handshake is
+ performed only when this class needs to connect with the
+ server. Then you should be prepared to handle TLS-specific
+ exceptions. See the client handshake functions in
+ L{tlslite.TLSConnection.TLSConnection} for details on which
+ exceptions might be raised.
+
+ @type username: str
+ @param username: SRP or shared-key username. Requires the
+ 'password' or 'sharedKey' argument.
+
+ @type password: str
+ @param password: SRP password for mutual authentication.
+ Requires the 'username' argument.
+
+ @type sharedKey: str
+ @param sharedKey: Shared key for mutual authentication.
+ Requires the 'username' argument.
+
+ @type certChain: L{tlslite.X509CertChain.X509CertChain} or
+ L{cryptoIDlib.CertChain.CertChain}
+ @param certChain: Certificate chain for client authentication.
+ Requires the 'privateKey' argument. Excludes the SRP or
+ shared-key related arguments.
+
+ @type privateKey: L{tlslite.utils.RSAKey.RSAKey}
+ @param privateKey: Private key for client authentication.
+ Requires the 'certChain' argument. Excludes the SRP or
+ shared-key related arguments.
+
+ @type cryptoID: str
+ @param cryptoID: cryptoID for server authentication. Mutually
+ exclusive with the 'x509...' arguments.
+
+ @type protocol: str
+ @param protocol: cryptoID protocol URI for server
+ authentication. Requires the 'cryptoID' argument.
+
+ @type x509Fingerprint: str
+ @param x509Fingerprint: Hex-encoded X.509 fingerprint for
+ server authentication. Mutually exclusive with the 'cryptoID'
+ and 'x509TrustList' arguments.
+
+ @type x509TrustList: list of L{tlslite.X509.X509}
+ @param x509TrustList: A list of trusted root certificates. The
+ other party must present a certificate chain which extends to
+ one of these root certificates. The cryptlib_py module must be
+ installed to use this parameter. Mutually exclusive with the
+ 'cryptoID' and 'x509Fingerprint' arguments.
+
+ @type x509CommonName: str
+ @param x509CommonName: The end-entity certificate's 'CN' field
+ must match this value. For a web server, this is typically a
+ server name such as 'www.amazon.com'. Mutually exclusive with
+ the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
+ 'x509TrustList' argument.
+
+ @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
+ @param settings: Various settings which can be used to control
+ the ciphersuites, certificate types, and SSL/TLS versions
+ offered by the client.
+ """
+
+ self.username = None
+ self.password = None
+ self.sharedKey = None
+ self.certChain = None
+ self.privateKey = None
+ self.checker = None
+
+ #SRP Authentication
+ if username and password and not \
+ (sharedKey or certChain or privateKey):
+ self.username = username
+ self.password = password
+
+ #Shared Key Authentication
+ elif username and sharedKey and not \
+ (password or certChain or privateKey):
+ self.username = username
+ self.sharedKey = sharedKey
+
+ #Certificate Chain Authentication
+ elif certChain and privateKey and not \
+ (username or password or sharedKey):
+ self.certChain = certChain
+ self.privateKey = privateKey
+
+ #No Authentication
+ elif not password and not username and not \
+ sharedKey and not certChain and not privateKey:
+ pass
+
+ else:
+ raise ValueError("Bad parameters")
+
+ #Authenticate the server based on its cryptoID or fingerprint
+ if sharedKey and (cryptoID or protocol or x509Fingerprint):
+ raise ValueError("Can't use shared keys with other forms of"\
+ "authentication")
+
+ self.checker = Checker(cryptoID, protocol, x509Fingerprint,
+ x509TrustList, x509CommonName)
+ self.settings = settings
+
+ self.tlsSession = None
+
+ def _handshake(self, tlsConnection):
+ if self.username and self.password:
+ tlsConnection.handshakeClientSRP(username=self.username,
+ password=self.password,
+ checker=self.checker,
+ settings=self.settings,
+ session=self.tlsSession)
+ elif self.username and self.sharedKey:
+ tlsConnection.handshakeClientSharedKey(username=self.username,
+ sharedKey=self.sharedKey,
+ settings=self.settings)
+ else:
+ tlsConnection.handshakeClientCert(certChain=self.certChain,
+ privateKey=self.privateKey,
+ checker=self.checker,
+ settings=self.settings,
+ session=self.tlsSession)
+ self.tlsSession = tlsConnection.session
diff --git a/python/gdata/tlslite/integration/HTTPTLSConnection.py b/python/gdata/tlslite/integration/HTTPTLSConnection.py
new file mode 100644
index 0000000..58e31a1
--- /dev/null
+++ b/python/gdata/tlslite/integration/HTTPTLSConnection.py
@@ -0,0 +1,169 @@
+"""TLS Lite + httplib."""
+
+import socket
+import httplib
+from gdata.tlslite.TLSConnection import TLSConnection
+from gdata.tlslite.integration.ClientHelper import ClientHelper
+
+
+class HTTPBaseTLSConnection(httplib.HTTPConnection):
+ """This abstract class provides a framework for adding TLS support
+ to httplib."""
+
+ default_port = 443
+
+ def __init__(self, host, port=None, strict=None):
+ if strict == None:
+ #Python 2.2 doesn't support strict
+ httplib.HTTPConnection.__init__(self, host, port)
+ else:
+ httplib.HTTPConnection.__init__(self, host, port, strict)
+
+ def connect(self):
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ if hasattr(sock, 'settimeout'):
+ sock.settimeout(10)
+ sock.connect((self.host, self.port))
+
+ #Use a TLSConnection to emulate a socket
+ self.sock = TLSConnection(sock)
+
+ #When httplib closes this, close the socket
+ self.sock.closeSocket = True
+ self._handshake(self.sock)
+
+ def _handshake(self, tlsConnection):
+ """Called to perform some sort of handshake.
+
+ This method must be overridden in a subclass to do some type of
+ handshake. This method will be called after the socket has
+ been connected but before any data has been sent. If this
+ method does not raise an exception, the TLS connection will be
+ considered valid.
+
+ This method may (or may not) be called every time an HTTP
+ request is performed, depending on whether the underlying HTTP
+ connection is persistent.
+
+ @type tlsConnection: L{tlslite.TLSConnection.TLSConnection}
+ @param tlsConnection: The connection to perform the handshake
+ on.
+ """
+ raise NotImplementedError()
+
+
+class HTTPTLSConnection(HTTPBaseTLSConnection, ClientHelper):
+ """This class extends L{HTTPBaseTLSConnection} to support the
+ common types of handshaking."""
+
+ def __init__(self, host, port=None,
+ username=None, password=None, sharedKey=None,
+ certChain=None, privateKey=None,
+ cryptoID=None, protocol=None,
+ x509Fingerprint=None,
+ x509TrustList=None, x509CommonName=None,
+ settings = None):
+ """Create a new HTTPTLSConnection.
+
+ For client authentication, use one of these argument
+ combinations:
+ - username, password (SRP)
+ - username, sharedKey (shared-key)
+ - certChain, privateKey (certificate)
+
+ For server authentication, you can either rely on the
+ implicit mutual authentication performed by SRP or
+ shared-keys, or you can do certificate-based server
+ authentication with one of these argument combinations:
+ - cryptoID[, protocol] (requires cryptoIDlib)
+ - x509Fingerprint
+ - x509TrustList[, x509CommonName] (requires cryptlib_py)
+
+ Certificate-based server authentication is compatible with
+ SRP or certificate-based client authentication. It is
+ not compatible with shared-keys.
+
+ The constructor does not perform the TLS handshake itself, but
+ simply stores these arguments for later. The handshake is
+ performed only when this class needs to connect with the
+ server. Thus you should be prepared to handle TLS-specific
+ exceptions when calling methods inherited from
+ L{httplib.HTTPConnection} such as request(), connect(), and
+ send(). See the client handshake functions in
+ L{tlslite.TLSConnection.TLSConnection} for details on which
+ exceptions might be raised.
+
+ @type host: str
+ @param host: Server to connect to.
+
+ @type port: int
+ @param port: Port to connect to.
+
+ @type username: str
+ @param username: SRP or shared-key username. Requires the
+ 'password' or 'sharedKey' argument.
+
+ @type password: str
+ @param password: SRP password for mutual authentication.
+ Requires the 'username' argument.
+
+ @type sharedKey: str
+ @param sharedKey: Shared key for mutual authentication.
+ Requires the 'username' argument.
+
+ @type certChain: L{tlslite.X509CertChain.X509CertChain} or
+ L{cryptoIDlib.CertChain.CertChain}
+ @param certChain: Certificate chain for client authentication.
+ Requires the 'privateKey' argument. Excludes the SRP or
+ shared-key related arguments.
+
+ @type privateKey: L{tlslite.utils.RSAKey.RSAKey}
+ @param privateKey: Private key for client authentication.
+ Requires the 'certChain' argument. Excludes the SRP or
+ shared-key related arguments.
+
+ @type cryptoID: str
+ @param cryptoID: cryptoID for server authentication. Mutually
+ exclusive with the 'x509...' arguments.
+
+ @type protocol: str
+ @param protocol: cryptoID protocol URI for server
+ authentication. Requires the 'cryptoID' argument.
+
+ @type x509Fingerprint: str
+ @param x509Fingerprint: Hex-encoded X.509 fingerprint for
+ server authentication. Mutually exclusive with the 'cryptoID'
+ and 'x509TrustList' arguments.
+
+ @type x509TrustList: list of L{tlslite.X509.X509}
+ @param x509TrustList: A list of trusted root certificates. The
+ other party must present a certificate chain which extends to
+ one of these root certificates. The cryptlib_py module must be
+ installed to use this parameter. Mutually exclusive with the
+ 'cryptoID' and 'x509Fingerprint' arguments.
+
+ @type x509CommonName: str
+ @param x509CommonName: The end-entity certificate's 'CN' field
+ must match this value. For a web server, this is typically a
+ server name such as 'www.amazon.com'. Mutually exclusive with
+ the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
+ 'x509TrustList' argument.
+
+ @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
+ @param settings: Various settings which can be used to control
+ the ciphersuites, certificate types, and SSL/TLS versions
+ offered by the client.
+ """
+
+ HTTPBaseTLSConnection.__init__(self, host, port)
+
+ ClientHelper.__init__(self,
+ username, password, sharedKey,
+ certChain, privateKey,
+ cryptoID, protocol,
+ x509Fingerprint,
+ x509TrustList, x509CommonName,
+ settings)
+
+ def _handshake(self, tlsConnection):
+ ClientHelper._handshake(self, tlsConnection)
diff --git a/python/gdata/tlslite/integration/IMAP4_TLS.py b/python/gdata/tlslite/integration/IMAP4_TLS.py
new file mode 100644
index 0000000..e47076c
--- /dev/null
+++ b/python/gdata/tlslite/integration/IMAP4_TLS.py
@@ -0,0 +1,132 @@
+"""TLS Lite + imaplib."""
+
+import socket
+from imaplib import IMAP4
+from gdata.tlslite.TLSConnection import TLSConnection
+from gdata.tlslite.integration.ClientHelper import ClientHelper
+
+# IMAP TLS PORT
+IMAP4_TLS_PORT = 993
+
+class IMAP4_TLS(IMAP4, ClientHelper):
+ """This class extends L{imaplib.IMAP4} with TLS support."""
+
+ def __init__(self, host = '', port = IMAP4_TLS_PORT,
+ username=None, password=None, sharedKey=None,
+ certChain=None, privateKey=None,
+ cryptoID=None, protocol=None,
+ x509Fingerprint=None,
+ x509TrustList=None, x509CommonName=None,
+ settings=None):
+ """Create a new IMAP4_TLS.
+
+ For client authentication, use one of these argument
+ combinations:
+ - username, password (SRP)
+ - username, sharedKey (shared-key)
+ - certChain, privateKey (certificate)
+
+ For server authentication, you can either rely on the
+ implicit mutual authentication performed by SRP or
+ shared-keys, or you can do certificate-based server
+ authentication with one of these argument combinations:
+ - cryptoID[, protocol] (requires cryptoIDlib)
+ - x509Fingerprint
+ - x509TrustList[, x509CommonName] (requires cryptlib_py)
+
+ Certificate-based server authentication is compatible with
+ SRP or certificate-based client authentication. It is
+ not compatible with shared-keys.
+
+ The caller should be prepared to handle TLS-specific
+ exceptions. See the client handshake functions in
+ L{tlslite.TLSConnection.TLSConnection} for details on which
+ exceptions might be raised.
+
+ @type host: str
+ @param host: Server to connect to.
+
+ @type port: int
+ @param port: Port to connect to.
+
+ @type username: str
+ @param username: SRP or shared-key username. Requires the
+ 'password' or 'sharedKey' argument.
+
+ @type password: str
+ @param password: SRP password for mutual authentication.
+ Requires the 'username' argument.
+
+ @type sharedKey: str
+ @param sharedKey: Shared key for mutual authentication.
+ Requires the 'username' argument.
+
+ @type certChain: L{tlslite.X509CertChain.X509CertChain} or
+ L{cryptoIDlib.CertChain.CertChain}
+ @param certChain: Certificate chain for client authentication.
+ Requires the 'privateKey' argument. Excludes the SRP or
+ shared-key related arguments.
+
+ @type privateKey: L{tlslite.utils.RSAKey.RSAKey}
+ @param privateKey: Private key for client authentication.
+ Requires the 'certChain' argument. Excludes the SRP or
+ shared-key related arguments.
+
+ @type cryptoID: str
+ @param cryptoID: cryptoID for server authentication. Mutually
+ exclusive with the 'x509...' arguments.
+
+ @type protocol: str
+ @param protocol: cryptoID protocol URI for server
+ authentication. Requires the 'cryptoID' argument.
+
+ @type x509Fingerprint: str
+ @param x509Fingerprint: Hex-encoded X.509 fingerprint for
+ server authentication. Mutually exclusive with the 'cryptoID'
+ and 'x509TrustList' arguments.
+
+ @type x509TrustList: list of L{tlslite.X509.X509}
+ @param x509TrustList: A list of trusted root certificates. The
+ other party must present a certificate chain which extends to
+ one of these root certificates. The cryptlib_py module must be
+ installed to use this parameter. Mutually exclusive with the
+ 'cryptoID' and 'x509Fingerprint' arguments.
+
+ @type x509CommonName: str
+ @param x509CommonName: The end-entity certificate's 'CN' field
+ must match this value. For a web server, this is typically a
+ server name such as 'www.amazon.com'. Mutually exclusive with
+ the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
+ 'x509TrustList' argument.
+
+ @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
+ @param settings: Various settings which can be used to control
+ the ciphersuites, certificate types, and SSL/TLS versions
+ offered by the client.
+ """
+
+ ClientHelper.__init__(self,
+ username, password, sharedKey,
+ certChain, privateKey,
+ cryptoID, protocol,
+ x509Fingerprint,
+ x509TrustList, x509CommonName,
+ settings)
+
+ IMAP4.__init__(self, host, port)
+
+
+ def open(self, host = '', port = IMAP4_TLS_PORT):
+ """Setup connection to remote server on "host:port".
+
+ This connection will be used by the routines:
+ read, readline, send, shutdown.
+ """
+ self.host = host
+ self.port = port
+ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.sock.connect((host, port))
+ self.sock = TLSConnection(self.sock)
+ self.sock.closeSocket = True
+ ClientHelper._handshake(self, self.sock)
+ self.file = self.sock.makefile('rb')
diff --git a/python/gdata/tlslite/integration/IntegrationHelper.py b/python/gdata/tlslite/integration/IntegrationHelper.py
new file mode 100644
index 0000000..af5193b
--- /dev/null
+++ b/python/gdata/tlslite/integration/IntegrationHelper.py
@@ -0,0 +1,52 @@
+
+class IntegrationHelper:
+
+ def __init__(self,
+ username=None, password=None, sharedKey=None,
+ certChain=None, privateKey=None,
+ cryptoID=None, protocol=None,
+ x509Fingerprint=None,
+ x509TrustList=None, x509CommonName=None,
+ settings = None):
+
+ self.username = None
+ self.password = None
+ self.sharedKey = None
+ self.certChain = None
+ self.privateKey = None
+ self.checker = None
+
+ #SRP Authentication
+ if username and password and not \
+ (sharedKey or certChain or privateKey):
+ self.username = username
+ self.password = password
+
+ #Shared Key Authentication
+ elif username and sharedKey and not \
+ (password or certChain or privateKey):
+ self.username = username
+ self.sharedKey = sharedKey
+
+ #Certificate Chain Authentication
+ elif certChain and privateKey and not \
+ (username or password or sharedKey):
+ self.certChain = certChain
+ self.privateKey = privateKey
+
+ #No Authentication
+ elif not password and not username and not \
+ sharedKey and not certChain and not privateKey:
+ pass
+
+ else:
+ raise ValueError("Bad parameters")
+
+ #Authenticate the server based on its cryptoID or fingerprint
+ if sharedKey and (cryptoID or protocol or x509Fingerprint):
+ raise ValueError("Can't use shared keys with other forms of"\
+ "authentication")
+
+ self.checker = Checker(cryptoID, protocol, x509Fingerprint,
+ x509TrustList, x509CommonName)
+ self.settings = settings
\ No newline at end of file
diff --git a/python/gdata/tlslite/integration/POP3_TLS.py b/python/gdata/tlslite/integration/POP3_TLS.py
new file mode 100644
index 0000000..26b37fd
--- /dev/null
+++ b/python/gdata/tlslite/integration/POP3_TLS.py
@@ -0,0 +1,142 @@
+"""TLS Lite + poplib."""
+
+import socket
+from poplib import POP3
+from gdata.tlslite.TLSConnection import TLSConnection
+from gdata.tlslite.integration.ClientHelper import ClientHelper
+
+# POP TLS PORT
+POP3_TLS_PORT = 995
+
+class POP3_TLS(POP3, ClientHelper):
+ """This class extends L{poplib.POP3} with TLS support."""
+
+ def __init__(self, host, port = POP3_TLS_PORT,
+ username=None, password=None, sharedKey=None,
+ certChain=None, privateKey=None,
+ cryptoID=None, protocol=None,
+ x509Fingerprint=None,
+ x509TrustList=None, x509CommonName=None,
+ settings=None):
+ """Create a new POP3_TLS.
+
+ For client authentication, use one of these argument
+ combinations:
+ - username, password (SRP)
+ - username, sharedKey (shared-key)
+ - certChain, privateKey (certificate)
+
+ For server authentication, you can either rely on the
+ implicit mutual authentication performed by SRP or
+ shared-keys, or you can do certificate-based server
+ authentication with one of these argument combinations:
+ - cryptoID[, protocol] (requires cryptoIDlib)
+ - x509Fingerprint
+ - x509TrustList[, x509CommonName] (requires cryptlib_py)
+
+ Certificate-based server authentication is compatible with
+ SRP or certificate-based client authentication. It is
+ not compatible with shared-keys.
+
+ The caller should be prepared to handle TLS-specific
+ exceptions. See the client handshake functions in
+ L{tlslite.TLSConnection.TLSConnection} for details on which
+ exceptions might be raised.
+
+ @type host: str
+ @param host: Server to connect to.
+
+ @type port: int
+ @param port: Port to connect to.
+
+ @type username: str
+ @param username: SRP or shared-key username. Requires the
+ 'password' or 'sharedKey' argument.
+
+ @type password: str
+ @param password: SRP password for mutual authentication.
+ Requires the 'username' argument.
+
+ @type sharedKey: str
+ @param sharedKey: Shared key for mutual authentication.
+ Requires the 'username' argument.
+
+ @type certChain: L{tlslite.X509CertChain.X509CertChain} or
+ L{cryptoIDlib.CertChain.CertChain}
+ @param certChain: Certificate chain for client authentication.
+ Requires the 'privateKey' argument. Excludes the SRP or
+ shared-key related arguments.
+
+ @type privateKey: L{tlslite.utils.RSAKey.RSAKey}
+ @param privateKey: Private key for client authentication.
+ Requires the 'certChain' argument. Excludes the SRP or
+ shared-key related arguments.
+
+ @type cryptoID: str
+ @param cryptoID: cryptoID for server authentication. Mutually
+ exclusive with the 'x509...' arguments.
+
+ @type protocol: str
+ @param protocol: cryptoID protocol URI for server
+ authentication. Requires the 'cryptoID' argument.
+
+ @type x509Fingerprint: str
+ @param x509Fingerprint: Hex-encoded X.509 fingerprint for
+ server authentication. Mutually exclusive with the 'cryptoID'
+ and 'x509TrustList' arguments.
+
+ @type x509TrustList: list of L{tlslite.X509.X509}
+ @param x509TrustList: A list of trusted root certificates. The
+ other party must present a certificate chain which extends to
+ one of these root certificates. The cryptlib_py module must be
+ installed to use this parameter. Mutually exclusive with the
+ 'cryptoID' and 'x509Fingerprint' arguments.
+
+ @type x509CommonName: str
+ @param x509CommonName: The end-entity certificate's 'CN' field
+ must match this value. For a web server, this is typically a
+ server name such as 'www.amazon.com'. Mutually exclusive with
+ the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
+ 'x509TrustList' argument.
+
+ @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
+ @param settings: Various settings which can be used to control
+ the ciphersuites, certificate types, and SSL/TLS versions
+ offered by the client.
+ """
+
+ self.host = host
+ self.port = port
+ msg = "getaddrinfo returns an empty list"
+ self.sock = None
+ for res in socket.getaddrinfo(self.host, self.port, 0, socket.SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ try:
+ self.sock = socket.socket(af, socktype, proto)
+ self.sock.connect(sa)
+ except socket.error, msg:
+ if self.sock:
+ self.sock.close()
+ self.sock = None
+ continue
+ break
+ if not self.sock:
+ raise socket.error, msg
+
+ ### New code below (all else copied from poplib)
+ ClientHelper.__init__(self,
+ username, password, sharedKey,
+ certChain, privateKey,
+ cryptoID, protocol,
+ x509Fingerprint,
+ x509TrustList, x509CommonName,
+ settings)
+
+ self.sock = TLSConnection(self.sock)
+ self.sock.closeSocket = True
+ ClientHelper._handshake(self, self.sock)
+ ###
+
+ self.file = self.sock.makefile('rb')
+ self._debugging = 0
+ self.welcome = self._getresp()
diff --git a/python/gdata/tlslite/integration/SMTP_TLS.py b/python/gdata/tlslite/integration/SMTP_TLS.py
new file mode 100644
index 0000000..67e0feb
--- /dev/null
+++ b/python/gdata/tlslite/integration/SMTP_TLS.py
@@ -0,0 +1,114 @@
+"""TLS Lite + smtplib."""
+
+from smtplib import SMTP
+from gdata.tlslite.TLSConnection import TLSConnection
+from gdata.tlslite.integration.ClientHelper import ClientHelper
+
+class SMTP_TLS(SMTP):
+ """This class extends L{smtplib.SMTP} with TLS support."""
+
+ def starttls(self,
+ username=None, password=None, sharedKey=None,
+ certChain=None, privateKey=None,
+ cryptoID=None, protocol=None,
+ x509Fingerprint=None,
+ x509TrustList=None, x509CommonName=None,
+ settings=None):
+ """Puts the connection to the SMTP server into TLS mode.
+
+ If the server supports TLS, this will encrypt the rest of the SMTP
+ session.
+
+ For client authentication, use one of these argument
+ combinations:
+ - username, password (SRP)
+ - username, sharedKey (shared-key)
+ - certChain, privateKey (certificate)
+
+ For server authentication, you can either rely on the
+ implicit mutual authentication performed by SRP or
+ shared-keys, or you can do certificate-based server
+ authentication with one of these argument combinations:
+ - cryptoID[, protocol] (requires cryptoIDlib)
+ - x509Fingerprint
+ - x509TrustList[, x509CommonName] (requires cryptlib_py)
+
+ Certificate-based server authentication is compatible with
+ SRP or certificate-based client authentication. It is
+ not compatible with shared-keys.
+
+ The caller should be prepared to handle TLS-specific
+ exceptions. See the client handshake functions in
+ L{tlslite.TLSConnection.TLSConnection} for details on which
+ exceptions might be raised.
+
+ @type username: str
+ @param username: SRP or shared-key username. Requires the
+ 'password' or 'sharedKey' argument.
+
+ @type password: str
+ @param password: SRP password for mutual authentication.
+ Requires the 'username' argument.
+
+ @type sharedKey: str
+ @param sharedKey: Shared key for mutual authentication.
+ Requires the 'username' argument.
+
+ @type certChain: L{tlslite.X509CertChain.X509CertChain} or
+ L{cryptoIDlib.CertChain.CertChain}
+ @param certChain: Certificate chain for client authentication.
+ Requires the 'privateKey' argument. Excludes the SRP or
+ shared-key related arguments.
+
+ @type privateKey: L{tlslite.utils.RSAKey.RSAKey}
+ @param privateKey: Private key for client authentication.
+ Requires the 'certChain' argument. Excludes the SRP or
+ shared-key related arguments.
+
+ @type cryptoID: str
+ @param cryptoID: cryptoID for server authentication. Mutually
+ exclusive with the 'x509...' arguments.
+
+ @type protocol: str
+ @param protocol: cryptoID protocol URI for server
+ authentication. Requires the 'cryptoID' argument.
+
+ @type x509Fingerprint: str
+ @param x509Fingerprint: Hex-encoded X.509 fingerprint for
+ server authentication. Mutually exclusive with the 'cryptoID'
+ and 'x509TrustList' arguments.
+
+ @type x509TrustList: list of L{tlslite.X509.X509}
+ @param x509TrustList: A list of trusted root certificates. The
+ other party must present a certificate chain which extends to
+ one of these root certificates. The cryptlib_py module must be
+ installed to use this parameter. Mutually exclusive with the
+ 'cryptoID' and 'x509Fingerprint' arguments.
+
+ @type x509CommonName: str
+ @param x509CommonName: The end-entity certificate's 'CN' field
+ must match this value. For a web server, this is typically a
+ server name such as 'www.amazon.com'. Mutually exclusive with
+ the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
+ 'x509TrustList' argument.
+
+ @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
+ @param settings: Various settings which can be used to control
+ the ciphersuites, certificate types, and SSL/TLS versions
+ offered by the client.
+ """
+ (resp, reply) = self.docmd("STARTTLS")
+ if resp == 220:
+ helper = ClientHelper(
+ username, password, sharedKey,
+ certChain, privateKey,
+ cryptoID, protocol,
+ x509Fingerprint,
+ x509TrustList, x509CommonName,
+ settings)
+ conn = TLSConnection(self.sock)
+ conn.closeSocket = True
+ helper._handshake(conn)
+ self.sock = conn
+ self.file = conn.makefile('rb')
+ return (resp, reply)
diff --git a/python/gdata/tlslite/integration/TLSAsyncDispatcherMixIn.py b/python/gdata/tlslite/integration/TLSAsyncDispatcherMixIn.py
new file mode 100644
index 0000000..f732f62
--- /dev/null
+++ b/python/gdata/tlslite/integration/TLSAsyncDispatcherMixIn.py
@@ -0,0 +1,139 @@
+"""TLS Lite + asyncore."""
+
+
+import asyncore
+from gdata.tlslite.TLSConnection import TLSConnection
+from AsyncStateMachine import AsyncStateMachine
+
+
+class TLSAsyncDispatcherMixIn(AsyncStateMachine):
+ """This class can be "mixed in" with an
+ L{asyncore.dispatcher} to add TLS support.
+
+ This class essentially sits between the dispatcher and the select
+ loop, intercepting events and only calling the dispatcher when
+ applicable.
+
+ In the case of handle_read(), a read operation will be activated,
+ and when it completes, the bytes will be placed in a buffer where
+ the dispatcher can retrieve them by calling recv(), and the
+ dispatcher's handle_read() will be called.
+
+ In the case of handle_write(), the dispatcher's handle_write() will
+ be called, and when it calls send(), a write operation will be
+ activated.
+
+ To use this class, you must combine it with an asyncore.dispatcher,
+ and pass in a handshake operation with setServerHandshakeOp().
+
+ Below is an example of using this class with medusa. This class is
+ mixed in with http_channel to create http_tls_channel. Note:
+ 1. the mix-in is listed first in the inheritance list
+
+ 2. the input buffer size must be at least 16K, otherwise the
+ dispatcher might not read all the bytes from the TLS layer,
+ leaving some bytes in limbo.
+
+ 3. IE seems to have a problem receiving a whole HTTP response in a
+ single TLS record, so HTML pages containing '\\r\\n\\r\\n' won't
+ be displayed on IE.
+
+ Add the following text into 'start_medusa.py', in the 'HTTP Server'
+ section::
+
+ from tlslite.api import *
+ s = open("./serverX509Cert.pem").read()
+ x509 = X509()
+ x509.parse(s)
+ certChain = X509CertChain([x509])
+
+ s = open("./serverX509Key.pem").read()
+ privateKey = parsePEMKey(s, private=True)
+
+ class http_tls_channel(TLSAsyncDispatcherMixIn,
+ http_server.http_channel):
+ ac_in_buffer_size = 16384
+
+ def __init__ (self, server, conn, addr):
+ http_server.http_channel.__init__(self, server, conn, addr)
+ TLSAsyncDispatcherMixIn.__init__(self, conn)
+ self.tlsConnection.ignoreAbruptClose = True
+ self.setServerHandshakeOp(certChain=certChain,
+ privateKey=privateKey)
+
+ hs.channel_class = http_tls_channel
+
+ If the TLS layer raises an exception, the exception will be caught
+ in asyncore.dispatcher, which will call close() on this class. The
+ TLS layer always closes the TLS connection before raising an
+ exception, so the close operation will complete right away, causing
+ asyncore.dispatcher.close() to be called, which closes the socket
+ and removes this instance from the asyncore loop.
+
+ """
+
+
+ def __init__(self, sock=None):
+ AsyncStateMachine.__init__(self)
+
+ if sock:
+ self.tlsConnection = TLSConnection(sock)
+
+ #Calculate the sibling I'm being mixed in with.
+ #This is necessary since we override functions
+ #like readable(), handle_read(), etc., but we
+ #also want to call the sibling's versions.
+ for cl in self.__class__.__bases__:
+ if cl != TLSAsyncDispatcherMixIn and cl != AsyncStateMachine:
+ self.siblingClass = cl
+ break
+ else:
+ raise AssertionError()
+
+ def readable(self):
+ result = self.wantsReadEvent()
+ if result != None:
+ return result
+ return self.siblingClass.readable(self)
+
+ def writable(self):
+ result = self.wantsWriteEvent()
+ if result != None:
+ return result
+ return self.siblingClass.writable(self)
+
+ def handle_read(self):
+ self.inReadEvent()
+
+ def handle_write(self):
+ self.inWriteEvent()
+
+ def outConnectEvent(self):
+ self.siblingClass.handle_connect(self)
+
+ def outCloseEvent(self):
+ asyncore.dispatcher.close(self)
+
+ def outReadEvent(self, readBuffer):
+ self.readBuffer = readBuffer
+ self.siblingClass.handle_read(self)
+
+ def outWriteEvent(self):
+ self.siblingClass.handle_write(self)
+
+ def recv(self, bufferSize=16384):
+ if bufferSize < 16384 or self.readBuffer == None:
+ raise AssertionError()
+ returnValue = self.readBuffer
+ self.readBuffer = None
+ return returnValue
+
+ def send(self, writeBuffer):
+ self.setWriteOp(writeBuffer)
+ return len(writeBuffer)
+
+ def close(self):
+ if hasattr(self, "tlsConnection"):
+ self.setCloseOp()
+ else:
+ asyncore.dispatcher.close(self)
diff --git a/python/gdata/tlslite/integration/TLSSocketServerMixIn.py b/python/gdata/tlslite/integration/TLSSocketServerMixIn.py
new file mode 100644
index 0000000..10224b6
--- /dev/null
+++ b/python/gdata/tlslite/integration/TLSSocketServerMixIn.py
@@ -0,0 +1,59 @@
+"""TLS Lite + SocketServer."""
+
+from gdata.tlslite.TLSConnection import TLSConnection
+
+class TLSSocketServerMixIn:
+ """
+ This class can be mixed in with any L{SocketServer.TCPServer} to
+ add TLS support.
+
+ To use this class, define a new class that inherits from it and
+ some L{SocketServer.TCPServer} (with the mix-in first). Then
+ implement the handshake() method, doing some sort of server
+ handshake on the connection argument. If the handshake method
+ returns True, the RequestHandler will be triggered. Below is a
+ complete example of a threaded HTTPS server::
+
+ from SocketServer import *
+ from BaseHTTPServer import *
+ from SimpleHTTPServer import *
+ from tlslite.api import *
+
+ s = open("./serverX509Cert.pem").read()
+ x509 = X509()
+ x509.parse(s)
+ certChain = X509CertChain([x509])
+
+ s = open("./serverX509Key.pem").read()
+ privateKey = parsePEMKey(s, private=True)
+
+ sessionCache = SessionCache()
+
+ class MyHTTPServer(ThreadingMixIn, TLSSocketServerMixIn,
+ HTTPServer):
+ def handshake(self, tlsConnection):
+ try:
+ tlsConnection.handshakeServer(certChain=certChain,
+ privateKey=privateKey,
+ sessionCache=sessionCache)
+ tlsConnection.ignoreAbruptClose = True
+ return True
+ except TLSError, error:
+ print "Handshake failure:", str(error)
+ return False
+
+ httpd = MyHTTPServer(('localhost', 443), SimpleHTTPRequestHandler)
+ httpd.serve_forever()
+ """
+
+
+ def finish_request(self, sock, client_address):
+ tlsConnection = TLSConnection(sock)
+ if self.handshake(tlsConnection) == True:
+ self.RequestHandlerClass(tlsConnection, client_address, self)
+ tlsConnection.close()
+
+ #Implement this method to do some form of handshaking. Return True
+ #if the handshake finishes properly and the request is authorized.
+ def handshake(self, tlsConnection):
+ raise NotImplementedError()
diff --git a/python/gdata/tlslite/integration/TLSTwistedProtocolWrapper.py b/python/gdata/tlslite/integration/TLSTwistedProtocolWrapper.py
new file mode 100644
index 0000000..c88703c
--- /dev/null
+++ b/python/gdata/tlslite/integration/TLSTwistedProtocolWrapper.py
@@ -0,0 +1,196 @@
+"""TLS Lite + Twisted."""
+
+from twisted.protocols.policies import ProtocolWrapper, WrappingFactory
+from twisted.python.failure import Failure
+
+from AsyncStateMachine import AsyncStateMachine
+from gdata.tlslite.TLSConnection import TLSConnection
+from gdata.tlslite.errors import *
+
+import socket
+import errno
+
+
+#The TLSConnection is created around a "fake socket" that
+#plugs it into the underlying Twisted transport
+class _FakeSocket:
+ def __init__(self, wrapper):
+ self.wrapper = wrapper
+ self.data = ""
+
+ def send(self, data):
+ ProtocolWrapper.write(self.wrapper, data)
+ return len(data)
+
+ def recv(self, numBytes):
+ if self.data == "":
+ raise socket.error, (errno.EWOULDBLOCK, "")
+ returnData = self.data[:numBytes]
+ self.data = self.data[numBytes:]
+ return returnData
+
+class TLSTwistedProtocolWrapper(ProtocolWrapper, AsyncStateMachine):
+ """This class can wrap Twisted protocols to add TLS support.
+
+ Below is a complete example of using TLS Lite with a Twisted echo
+ server.
+
+ There are two server implementations below. Echo is the original
+ protocol, which is oblivious to TLS. Echo1 subclasses Echo and
+ negotiates TLS when the client connects. Echo2 subclasses Echo and
+ negotiates TLS when the client sends "STARTTLS"::
+
+ from twisted.internet.protocol import Protocol, Factory
+ from twisted.internet import reactor
+ from twisted.protocols.policies import WrappingFactory
+ from twisted.protocols.basic import LineReceiver
+ from twisted.python import log
+ from twisted.python.failure import Failure
+ import sys
+ from tlslite.api import *
+
+ s = open("./serverX509Cert.pem").read()
+ x509 = X509()
+ x509.parse(s)
+ certChain = X509CertChain([x509])
+
+ s = open("./serverX509Key.pem").read()
+ privateKey = parsePEMKey(s, private=True)
+
+ verifierDB = VerifierDB("verifierDB")
+ verifierDB.open()
+
+ class Echo(LineReceiver):
+ def connectionMade(self):
+ self.transport.write("Welcome to the echo server!\\r\\n")
+
+ def lineReceived(self, line):
+ self.transport.write(line + "\\r\\n")
+
+ class Echo1(Echo):
+ def connectionMade(self):
+ if not self.transport.tlsStarted:
+ self.transport.setServerHandshakeOp(certChain=certChain,
+ privateKey=privateKey,
+ verifierDB=verifierDB)
+ else:
+ Echo.connectionMade(self)
+
+ def connectionLost(self, reason):
+ pass #Handle any TLS exceptions here
+
+ class Echo2(Echo):
+ def lineReceived(self, data):
+ if data == "STARTTLS":
+ self.transport.setServerHandshakeOp(certChain=certChain,
+ privateKey=privateKey,
+ verifierDB=verifierDB)
+ else:
+ Echo.lineReceived(self, data)
+
+ def connectionLost(self, reason):
+ pass #Handle any TLS exceptions here
+
+ factory = Factory()
+ factory.protocol = Echo1
+ #factory.protocol = Echo2
+
+ wrappingFactory = WrappingFactory(factory)
+ wrappingFactory.protocol = TLSTwistedProtocolWrapper
+
+ log.startLogging(sys.stdout)
+ reactor.listenTCP(1079, wrappingFactory)
+ reactor.run()
+
+ This class works as follows:
+
+ Data comes in and is given to the AsyncStateMachine for handling.
+ AsyncStateMachine will forward events to this class, and we'll
+ pass them on to the ProtocolHandler, which will proxy them to the
+ wrapped protocol. The wrapped protocol may then call back into
+ this class, and these calls will be proxied into the
+ AsyncStateMachine.
+
+ The call graph looks like this:
+ - self.dataReceived
+ - AsyncStateMachine.inReadEvent
+ - self.out(Connect|Close|Read)Event
+ - ProtocolWrapper.(connectionMade|loseConnection|dataReceived)
+ - self.(loseConnection|write|writeSequence)
+ - AsyncStateMachine.(setCloseOp|setWriteOp)
+ """
+
+ #WARNING: IF YOU COPY-AND-PASTE THE ABOVE CODE, BE SURE TO REMOVE
+ #THE EXTRA ESCAPING AROUND "\\r\\n"
+
+ def __init__(self, factory, wrappedProtocol):
+ ProtocolWrapper.__init__(self, factory, wrappedProtocol)
+ AsyncStateMachine.__init__(self)
+ self.fakeSocket = _FakeSocket(self)
+ self.tlsConnection = TLSConnection(self.fakeSocket)
+ self.tlsStarted = False
+ self.connectionLostCalled = False
+
+ def connectionMade(self):
+ try:
+ ProtocolWrapper.connectionMade(self)
+ except TLSError, e:
+ self.connectionLost(Failure(e))
+ ProtocolWrapper.loseConnection(self)
+
+ def dataReceived(self, data):
+ try:
+ if not self.tlsStarted:
+ ProtocolWrapper.dataReceived(self, data)
+ else:
+ self.fakeSocket.data += data
+ while self.fakeSocket.data:
+ AsyncStateMachine.inReadEvent(self)
+ except TLSError, e:
+ self.connectionLost(Failure(e))
+ ProtocolWrapper.loseConnection(self)
+
+ def connectionLost(self, reason):
+ if not self.connectionLostCalled:
+ ProtocolWrapper.connectionLost(self, reason)
+ self.connectionLostCalled = True
+
+
+ def outConnectEvent(self):
+ ProtocolWrapper.connectionMade(self)
+
+ def outCloseEvent(self):
+ ProtocolWrapper.loseConnection(self)
+
+ def outReadEvent(self, data):
+ if data == "":
+ ProtocolWrapper.loseConnection(self)
+ else:
+ ProtocolWrapper.dataReceived(self, data)
+
+
+ def setServerHandshakeOp(self, **args):
+ self.tlsStarted = True
+ AsyncStateMachine.setServerHandshakeOp(self, **args)
+
+ def loseConnection(self):
+ if not self.tlsStarted:
+ ProtocolWrapper.loseConnection(self)
+ else:
+ AsyncStateMachine.setCloseOp(self)
+
+ def write(self, data):
+ if not self.tlsStarted:
+ ProtocolWrapper.write(self, data)
+ else:
+ #Because of the FakeSocket, write operations are guaranteed to
+ #terminate immediately.
+ AsyncStateMachine.setWriteOp(self, data)
+
+ def writeSequence(self, seq):
+ if not self.tlsStarted:
+ ProtocolWrapper.writeSequence(self, seq)
+ else:
+ #Because of the FakeSocket, write operations are guaranteed to
+ #terminate immediately.
+ AsyncStateMachine.setWriteOp(self, "".join(seq))
diff --git a/python/gdata/tlslite/integration/XMLRPCTransport.py b/python/gdata/tlslite/integration/XMLRPCTransport.py
new file mode 100644
index 0000000..3f025e4
--- /dev/null
+++ b/python/gdata/tlslite/integration/XMLRPCTransport.py
@@ -0,0 +1,137 @@
+"""TLS Lite + xmlrpclib."""
+
+import xmlrpclib
+import httplib
+from gdata.tlslite.integration.HTTPTLSConnection import HTTPTLSConnection
+from gdata.tlslite.integration.ClientHelper import ClientHelper
+
+
+class XMLRPCTransport(xmlrpclib.Transport, ClientHelper):
+ """Handles an HTTPS transaction to an XML-RPC server."""
+
+ def __init__(self,
+ username=None, password=None, sharedKey=None,
+ certChain=None, privateKey=None,
+ cryptoID=None, protocol=None,
+ x509Fingerprint=None,
+ x509TrustList=None, x509CommonName=None,
+ settings=None):
+ """Create a new XMLRPCTransport.
+
+ An instance of this class can be passed to L{xmlrpclib.ServerProxy}
+ to use TLS with XML-RPC calls::
+
+ from tlslite.api import XMLRPCTransport
+ from xmlrpclib import ServerProxy
+
+ transport = XMLRPCTransport(user="alice", password="abra123")
+ server = ServerProxy("https://localhost", transport)
+
+ For client authentication, use one of these argument
+ combinations:
+ - username, password (SRP)
+ - username, sharedKey (shared-key)
+ - certChain, privateKey (certificate)
+
+ For server authentication, you can either rely on the
+ implicit mutual authentication performed by SRP or
+ shared-keys, or you can do certificate-based server
+ authentication with one of these argument combinations:
+ - cryptoID[, protocol] (requires cryptoIDlib)
+ - x509Fingerprint
+ - x509TrustList[, x509CommonName] (requires cryptlib_py)
+
+ Certificate-based server authentication is compatible with
+ SRP or certificate-based client authentication. It is
+ not compatible with shared-keys.
+
+ The constructor does not perform the TLS handshake itself, but
+ simply stores these arguments for later. The handshake is
+ performed only when this class needs to connect with the
+ server. Thus you should be prepared to handle TLS-specific
+ exceptions when calling methods of L{xmlrpclib.ServerProxy}. See the
+ client handshake functions in
+ L{tlslite.TLSConnection.TLSConnection} for details on which
+ exceptions might be raised.
+
+ @type username: str
+ @param username: SRP or shared-key username. Requires the
+ 'password' or 'sharedKey' argument.
+
+ @type password: str
+ @param password: SRP password for mutual authentication.
+ Requires the 'username' argument.
+
+ @type sharedKey: str
+ @param sharedKey: Shared key for mutual authentication.
+ Requires the 'username' argument.
+
+ @type certChain: L{tlslite.X509CertChain.X509CertChain} or
+ L{cryptoIDlib.CertChain.CertChain}
+ @param certChain: Certificate chain for client authentication.
+ Requires the 'privateKey' argument. Excludes the SRP or
+ shared-key related arguments.
+
+ @type privateKey: L{tlslite.utils.RSAKey.RSAKey}
+ @param privateKey: Private key for client authentication.
+ Requires the 'certChain' argument. Excludes the SRP or
+ shared-key related arguments.
+
+ @type cryptoID: str
+ @param cryptoID: cryptoID for server authentication. Mutually
+ exclusive with the 'x509...' arguments.
+
+ @type protocol: str
+ @param protocol: cryptoID protocol URI for server
+ authentication. Requires the 'cryptoID' argument.
+
+ @type x509Fingerprint: str
+ @param x509Fingerprint: Hex-encoded X.509 fingerprint for
+ server authentication. Mutually exclusive with the 'cryptoID'
+ and 'x509TrustList' arguments.
+
+ @type x509TrustList: list of L{tlslite.X509.X509}
+ @param x509TrustList: A list of trusted root certificates. The
+ other party must present a certificate chain which extends to
+ one of these root certificates. The cryptlib_py module must be
+ installed to use this parameter. Mutually exclusive with the
+ 'cryptoID' and 'x509Fingerprint' arguments.
+
+ @type x509CommonName: str
+ @param x509CommonName: The end-entity certificate's 'CN' field
+ must match this value. For a web server, this is typically a
+ server name such as 'www.amazon.com'. Mutually exclusive with
+ the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
+ 'x509TrustList' argument.
+
+ @type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
+ @param settings: Various settings which can be used to control
+ the ciphersuites, certificate types, and SSL/TLS versions
+ offered by the client.
+ """
+
+ ClientHelper.__init__(self,
+ username, password, sharedKey,
+ certChain, privateKey,
+ cryptoID, protocol,
+ x509Fingerprint,
+ x509TrustList, x509CommonName,
+ settings)
+
+
+ def make_connection(self, host):
+ # create a HTTPS connection object from a host descriptor
+ host, extra_headers, x509 = self.get_host_info(host)
+ http = HTTPTLSConnection(host, None,
+ self.username, self.password,
+ self.sharedKey,
+ self.certChain, self.privateKey,
+ self.checker.cryptoID,
+ self.checker.protocol,
+ self.checker.x509Fingerprint,
+ self.checker.x509TrustList,
+ self.checker.x509CommonName,
+ self.settings)
+ http2 = httplib.HTTP()
+ http2._setup(http)
+ return http2
diff --git a/python/gdata/tlslite/integration/__init__.py b/python/gdata/tlslite/integration/__init__.py
new file mode 100644
index 0000000..960f406
--- /dev/null
+++ b/python/gdata/tlslite/integration/__init__.py
@@ -0,0 +1,17 @@
+"""Classes for integrating TLS Lite with other packages."""
+
+__all__ = ["AsyncStateMachine",
+ "HTTPTLSConnection",
+ "POP3_TLS",
+ "IMAP4_TLS",
+ "SMTP_TLS",
+ "XMLRPCTransport",
+ "TLSSocketServerMixIn",
+ "TLSAsyncDispatcherMixIn",
+ "TLSTwistedProtocolWrapper"]
+
+try:
+ import twisted
+ del twisted
+except ImportError:
+ del __all__[__all__.index("TLSTwistedProtocolWrapper")]
diff --git a/python/gdata/tlslite/mathtls.py b/python/gdata/tlslite/mathtls.py
new file mode 100644
index 0000000..3b8ede6
--- /dev/null
+++ b/python/gdata/tlslite/mathtls.py
@@ -0,0 +1,170 @@
+"""Miscellaneous helper functions."""
+
+from utils.compat import *
+from utils.cryptomath import *
+
+import hmac
+import md5
+import sha
+
+#1024, 1536, 2048, 3072, 4096, 6144, and 8192 bit groups]
+goodGroupParameters = [(2,0xEEAF0AB9ADB38DD69C33F80AFA8FC5E86072618775FF3C0B9EA2314C9C256576D674DF7496EA81D3383B4813D692C6E0E0D5D8E250B98BE48E495C1D6089DAD15DC7D7B46154D6B6CE8EF4AD69B15D4982559B297BCF1885C529F566660E57EC68EDBC3C05726CC02FD4CBF4976EAA9AFD5138FE8376435B9FC61D2FC0EB06E3),\
+ (2,0x9DEF3CAFB939277AB1F12A8617A47BBBDBA51DF499AC4C80BEEEA9614B19CC4D5F4F5F556E27CBDE51C6A94BE4607A291558903BA0D0F84380B655BB9A22E8DCDF028A7CEC67F0D08134B1C8B97989149B609E0BE3BAB63D47548381DBC5B1FC764E3F4B53DD9DA1158BFD3E2B9C8CF56EDF019539349627DB2FD53D24B7C48665772E437D6C7F8CE442734AF7CCB7AE837C264AE3A9BEB87F8A2FE9B8B5292E5A021FFF5E91479E8CE7A28C2442C6F315180F93499A234DCF76E3FED135F9BB),\
+ (2,0xAC6BDB41324A9A9BF166DE5E1389582FAF72B6651987EE07FC3192943DB56050A37329CBB4A099ED8193E0757767A13DD52312AB4B03310DCD7F48A9DA04FD50E8083969EDB767B0CF6095179A163AB3661A05FBD5FAAAE82918A9962F0B93B855F97993EC975EEAA80D740ADBF4FF747359D041D5C33EA71D281E446B14773BCA97B43A23FB801676BD207A436C6481F1D2B9078717461A5B9D32E688F87748544523B524B0D57D5EA77A2775D2ECFA032CFBDBF52FB3786160279004E57AE6AF874E7303CE53299CCC041C7BC308D82A5698F3A8D0C38271AE35F8E9DBFBB694B5C803D89F7AE435DE236D525F54759B65E372FCD68EF20FA7111F9E4AFF73),\
+ (2,0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A93AD2CAFFFFFFFFFFFFFFFF),\
+ (5,0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199FFFFFFFFFFFFFFFF),\
+ (5,0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C93402849236C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BDF8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1BDB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F323A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AACC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE32806A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55CDA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE12BF2D5B0B7474D6E694F91E6DCC4024FFFFFFFFFFFFFFFF),\
+ (5,0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C93402849236C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BDF8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1BDB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F323A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AACC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE32806A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55CDA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE12BF2D5B0B7474D6E694F91E6DBE115974A3926F12FEE5E438777CB6A932DF8CD8BEC4D073B931BA3BC832B68D9DD300741FA7BF8AFC47ED2576F6936BA424663AAB639C5AE4F5683423B4742BF1C978238F16CBE39D652DE3FDB8BEFC848AD922222E04A4037C0713EB57A81A23F0C73473FC646CEA306B4BCBC8862F8385DDFA9D4B7FA2C087E879683303ED5BDD3A062B3CF5B3A278A66D2A13F83F44F82DDF310EE074AB6A364597E899A0255DC164F31CC50846851DF9AB48195DED7EA1B1D510BD7EE74D73FAF36BC31ECFA268359046F4EB879F924009438B481C6CD7889A002ED5EE382BC9190DA6FC026E479558E4475677E9AA9E3050E2765694DFC81F56E880B96E7160C980DD98EDD3DFFFFFFFFFFFFFFFFF)]
+
+def P_hash(hashModule, secret, seed, length):
+ bytes = createByteArrayZeros(length)
+ secret = bytesToString(secret)
+ seed = bytesToString(seed)
+ A = seed
+ index = 0
+ while 1:
+ A = hmac.HMAC(secret, A, hashModule).digest()
+ output = hmac.HMAC(secret, A+seed, hashModule).digest()
+ for c in output:
+ if index >= length:
+ return bytes
+ bytes[index] = ord(c)
+ index += 1
+ return bytes
+
+def PRF(secret, label, seed, length):
+ #Split the secret into left and right halves
+ S1 = secret[ : int(math.ceil(len(secret)/2.0))]
+ S2 = secret[ int(math.floor(len(secret)/2.0)) : ]
+
+ #Run the left half through P_MD5 and the right half through P_SHA1
+ p_md5 = P_hash(md5, S1, concatArrays(stringToBytes(label), seed), length)
+ p_sha1 = P_hash(sha, S2, concatArrays(stringToBytes(label), seed), length)
+
+ #XOR the output values and return the result
+ for x in range(length):
+ p_md5[x] ^= p_sha1[x]
+ return p_md5
+
+
+def PRF_SSL(secret, seed, length):
+ secretStr = bytesToString(secret)
+ seedStr = bytesToString(seed)
+ bytes = createByteArrayZeros(length)
+ index = 0
+ for x in range(26):
+ A = chr(ord('A')+x) * (x+1) # 'A', 'BB', 'CCC', etc..
+ input = secretStr + sha.sha(A + secretStr + seedStr).digest()
+ output = md5.md5(input).digest()
+ for c in output:
+ if index >= length:
+ return bytes
+ bytes[index] = ord(c)
+ index += 1
+ return bytes
+
+def makeX(salt, username, password):
+ if len(username)>=256:
+ raise ValueError("username too long")
+ if len(salt)>=256:
+ raise ValueError("salt too long")
+ return stringToNumber(sha.sha(salt + sha.sha(username + ":" + password)\
+ .digest()).digest())
+
+#This function is used by VerifierDB.makeVerifier
+def makeVerifier(username, password, bits):
+ bitsIndex = {1024:0, 1536:1, 2048:2, 3072:3, 4096:4, 6144:5, 8192:6}[bits]
+ g,N = goodGroupParameters[bitsIndex]
+ salt = bytesToString(getRandomBytes(16))
+ x = makeX(salt, username, password)
+ verifier = powMod(g, x, N)
+ return N, g, salt, verifier
+
+def PAD(n, x):
+ nLength = len(numberToString(n))
+ s = numberToString(x)
+ if len(s) < nLength:
+ s = ("\0" * (nLength-len(s))) + s
+ return s
+
+def makeU(N, A, B):
+ return stringToNumber(sha.sha(PAD(N, A) + PAD(N, B)).digest())
+
+def makeK(N, g):
+ return stringToNumber(sha.sha(numberToString(N) + PAD(N, g)).digest())
+
+
+"""
+MAC_SSL
+Modified from Python HMAC by Trevor
+"""
+
+class MAC_SSL:
+ """MAC_SSL class.
+
+ This supports the API for Cryptographic Hash Functions (PEP 247).
+ """
+
+ def __init__(self, key, msg = None, digestmod = None):
+ """Create a new MAC_SSL object.
+
+ key: key for the keyed hash object.
+ msg: Initial input for the hash, if provided.
+ digestmod: A module supporting PEP 247. Defaults to the md5 module.
+ """
+ if digestmod is None:
+ import md5
+ digestmod = md5
+
+ if key == None: #TREVNEW - for faster copying
+ return #TREVNEW
+
+ self.digestmod = digestmod
+ self.outer = digestmod.new()
+ self.inner = digestmod.new()
+ self.digest_size = digestmod.digest_size
+
+ ipad = "\x36" * 40
+ opad = "\x5C" * 40
+
+ self.inner.update(key)
+ self.inner.update(ipad)
+ self.outer.update(key)
+ self.outer.update(opad)
+ if msg is not None:
+ self.update(msg)
+
+
+ def update(self, msg):
+ """Update this hashing object with the string msg.
+ """
+ self.inner.update(msg)
+
+ def copy(self):
+ """Return a separate copy of this hashing object.
+
+ An update to this copy won't affect the original object.
+ """
+ other = MAC_SSL(None) #TREVNEW - for faster copying
+ other.digest_size = self.digest_size #TREVNEW
+ other.digestmod = self.digestmod
+ other.inner = self.inner.copy()
+ other.outer = self.outer.copy()
+ return other
+
+ def digest(self):
+ """Return the hash value of this hashing object.
+
+ This returns a string containing 8-bit data. The object is
+ not altered in any way by this function; you can continue
+ updating the object after calling this function.
+ """
+ h = self.outer.copy()
+ h.update(self.inner.digest())
+ return h.digest()
+
+ def hexdigest(self):
+ """Like digest(), but returns a string of hexadecimal digits instead.
+ """
+ return "".join([hex(ord(x))[2:].zfill(2)
+ for x in tuple(self.digest())])
diff --git a/python/gdata/tlslite/messages.py b/python/gdata/tlslite/messages.py
new file mode 100644
index 0000000..afccc79
--- /dev/null
+++ b/python/gdata/tlslite/messages.py
@@ -0,0 +1,561 @@
+"""Classes representing TLS messages."""
+
+from utils.compat import *
+from utils.cryptomath import *
+from errors import *
+from utils.codec import *
+from constants import *
+from X509 import X509
+from X509CertChain import X509CertChain
+
+import sha
+import md5
+
+class RecordHeader3:
+ def __init__(self):
+ self.type = 0
+ self.version = (0,0)
+ self.length = 0
+ self.ssl2 = False
+
+ def create(self, version, type, length):
+ self.type = type
+ self.version = version
+ self.length = length
+ return self
+
+ def write(self):
+ w = Writer(5)
+ w.add(self.type, 1)
+ w.add(self.version[0], 1)
+ w.add(self.version[1], 1)
+ w.add(self.length, 2)
+ return w.bytes
+
+ def parse(self, p):
+ self.type = p.get(1)
+ self.version = (p.get(1), p.get(1))
+ self.length = p.get(2)
+ self.ssl2 = False
+ return self
+
+class RecordHeader2:
+ def __init__(self):
+ self.type = 0
+ self.version = (0,0)
+ self.length = 0
+ self.ssl2 = True
+
+ def parse(self, p):
+ if p.get(1)!=128:
+ raise SyntaxError()
+ self.type = ContentType.handshake
+ self.version = (2,0)
+ #We don't support 2-byte-length-headers; could be a problem
+ self.length = p.get(1)
+ return self
+
+
+class Msg:
+ def preWrite(self, trial):
+ if trial:
+ w = Writer()
+ else:
+ length = self.write(True)
+ w = Writer(length)
+ return w
+
+ def postWrite(self, w, trial):
+ if trial:
+ return w.index
+ else:
+ return w.bytes
+
+class Alert(Msg):
+ def __init__(self):
+ self.contentType = ContentType.alert
+ self.level = 0
+ self.description = 0
+
+ def create(self, description, level=AlertLevel.fatal):
+ self.level = level
+ self.description = description
+ return self
+
+ def parse(self, p):
+ p.setLengthCheck(2)
+ self.level = p.get(1)
+ self.description = p.get(1)
+ p.stopLengthCheck()
+ return self
+
+ def write(self):
+ w = Writer(2)
+ w.add(self.level, 1)
+ w.add(self.description, 1)
+ return w.bytes
+
+
+class HandshakeMsg(Msg):
+ def preWrite(self, handshakeType, trial):
+ if trial:
+ w = Writer()
+ w.add(handshakeType, 1)
+ w.add(0, 3)
+ else:
+ length = self.write(True)
+ w = Writer(length)
+ w.add(handshakeType, 1)
+ w.add(length-4, 3)
+ return w
+
+
+class ClientHello(HandshakeMsg):
+ def __init__(self, ssl2=False):
+ self.contentType = ContentType.handshake
+ self.ssl2 = ssl2
+ self.client_version = (0,0)
+ self.random = createByteArrayZeros(32)
+ self.session_id = createByteArraySequence([])
+ self.cipher_suites = [] # a list of 16-bit values
+ self.certificate_types = [CertificateType.x509]
+ self.compression_methods = [] # a list of 8-bit values
+ self.srp_username = None # a string
+
+ def create(self, version, random, session_id, cipher_suites,
+ certificate_types=None, srp_username=None):
+ self.client_version = version
+ self.random = random
+ self.session_id = session_id
+ self.cipher_suites = cipher_suites
+ self.certificate_types = certificate_types
+ self.compression_methods = [0]
+ self.srp_username = srp_username
+ return self
+
+ def parse(self, p):
+ if self.ssl2:
+ self.client_version = (p.get(1), p.get(1))
+ cipherSpecsLength = p.get(2)
+ sessionIDLength = p.get(2)
+ randomLength = p.get(2)
+ self.cipher_suites = p.getFixList(3, int(cipherSpecsLength/3))
+ self.session_id = p.getFixBytes(sessionIDLength)
+ self.random = p.getFixBytes(randomLength)
+ if len(self.random) < 32:
+ zeroBytes = 32-len(self.random)
+ self.random = createByteArrayZeros(zeroBytes) + self.random
+ self.compression_methods = [0]#Fake this value
+
+ #We're not doing a stopLengthCheck() for SSLv2, oh well..
+ else:
+ p.startLengthCheck(3)
+ self.client_version = (p.get(1), p.get(1))
+ self.random = p.getFixBytes(32)
+ self.session_id = p.getVarBytes(1)
+ self.cipher_suites = p.getVarList(2, 2)
+ self.compression_methods = p.getVarList(1, 1)
+ if not p.atLengthCheck():
+ totalExtLength = p.get(2)
+ soFar = 0
+ while soFar != totalExtLength:
+ extType = p.get(2)
+ extLength = p.get(2)
+ if extType == 6:
+ self.srp_username = bytesToString(p.getVarBytes(1))
+ elif extType == 7:
+ self.certificate_types = p.getVarList(1, 1)
+ else:
+ p.getFixBytes(extLength)
+ soFar += 4 + extLength
+ p.stopLengthCheck()
+ return self
+
+ def write(self, trial=False):
+ w = HandshakeMsg.preWrite(self, HandshakeType.client_hello, trial)
+ w.add(self.client_version[0], 1)
+ w.add(self.client_version[1], 1)
+ w.addFixSeq(self.random, 1)
+ w.addVarSeq(self.session_id, 1, 1)
+ w.addVarSeq(self.cipher_suites, 2, 2)
+ w.addVarSeq(self.compression_methods, 1, 1)
+
+ extLength = 0
+ if self.certificate_types and self.certificate_types != \
+ [CertificateType.x509]:
+ extLength += 5 + len(self.certificate_types)
+ if self.srp_username:
+ extLength += 5 + len(self.srp_username)
+ if extLength > 0:
+ w.add(extLength, 2)
+
+ if self.certificate_types and self.certificate_types != \
+ [CertificateType.x509]:
+ w.add(7, 2)
+ w.add(len(self.certificate_types)+1, 2)
+ w.addVarSeq(self.certificate_types, 1, 1)
+ if self.srp_username:
+ w.add(6, 2)
+ w.add(len(self.srp_username)+1, 2)
+ w.addVarSeq(stringToBytes(self.srp_username), 1, 1)
+
+ return HandshakeMsg.postWrite(self, w, trial)
+
+
+class ServerHello(HandshakeMsg):
+ def __init__(self):
+ self.contentType = ContentType.handshake
+ self.server_version = (0,0)
+ self.random = createByteArrayZeros(32)
+ self.session_id = createByteArraySequence([])
+ self.cipher_suite = 0
+ self.certificate_type = CertificateType.x509
+ self.compression_method = 0
+
+ def create(self, version, random, session_id, cipher_suite,
+ certificate_type):
+ self.server_version = version
+ self.random = random
+ self.session_id = session_id
+ self.cipher_suite = cipher_suite
+ self.certificate_type = certificate_type
+ self.compression_method = 0
+ return self
+
+ def parse(self, p):
+ p.startLengthCheck(3)
+ self.server_version = (p.get(1), p.get(1))
+ self.random = p.getFixBytes(32)
+ self.session_id = p.getVarBytes(1)
+ self.cipher_suite = p.get(2)
+ self.compression_method = p.get(1)
+ if not p.atLengthCheck():
+ totalExtLength = p.get(2)
+ soFar = 0
+ while soFar != totalExtLength:
+ extType = p.get(2)
+ extLength = p.get(2)
+ if extType == 7:
+ self.certificate_type = p.get(1)
+ else:
+ p.getFixBytes(extLength)
+ soFar += 4 + extLength
+ p.stopLengthCheck()
+ return self
+
+ def write(self, trial=False):
+ w = HandshakeMsg.preWrite(self, HandshakeType.server_hello, trial)
+ w.add(self.server_version[0], 1)
+ w.add(self.server_version[1], 1)
+ w.addFixSeq(self.random, 1)
+ w.addVarSeq(self.session_id, 1, 1)
+ w.add(self.cipher_suite, 2)
+ w.add(self.compression_method, 1)
+
+ extLength = 0
+ if self.certificate_type and self.certificate_type != \
+ CertificateType.x509:
+ extLength += 5
+
+ if extLength != 0:
+ w.add(extLength, 2)
+
+ if self.certificate_type and self.certificate_type != \
+ CertificateType.x509:
+ w.add(7, 2)
+ w.add(1, 2)
+ w.add(self.certificate_type, 1)
+
+ return HandshakeMsg.postWrite(self, w, trial)
+
+class Certificate(HandshakeMsg):
+ def __init__(self, certificateType):
+ self.certificateType = certificateType
+ self.contentType = ContentType.handshake
+ self.certChain = None
+
+ def create(self, certChain):
+ self.certChain = certChain
+ return self
+
+ def parse(self, p):
+ p.startLengthCheck(3)
+ if self.certificateType == CertificateType.x509:
+ chainLength = p.get(3)
+ index = 0
+ certificate_list = []
+ while index != chainLength:
+ certBytes = p.getVarBytes(3)
+ x509 = X509()
+ x509.parseBinary(certBytes)
+ certificate_list.append(x509)
+ index += len(certBytes)+3
+ if certificate_list:
+ self.certChain = X509CertChain(certificate_list)
+ elif self.certificateType == CertificateType.cryptoID:
+ s = bytesToString(p.getVarBytes(2))
+ if s:
+ try:
+ import cryptoIDlib.CertChain
+ except ImportError:
+ raise SyntaxError(\
+ "cryptoID cert chain received, cryptoIDlib not present")
+ self.certChain = cryptoIDlib.CertChain.CertChain().parse(s)
+ else:
+ raise AssertionError()
+
+ p.stopLengthCheck()
+ return self
+
+ def write(self, trial=False):
+ w = HandshakeMsg.preWrite(self, HandshakeType.certificate, trial)
+ if self.certificateType == CertificateType.x509:
+ chainLength = 0
+ if self.certChain:
+ certificate_list = self.certChain.x509List
+ else:
+ certificate_list = []
+ #determine length
+ for cert in certificate_list:
+ bytes = cert.writeBytes()
+ chainLength += len(bytes)+3
+ #add bytes
+ w.add(chainLength, 3)
+ for cert in certificate_list:
+ bytes = cert.writeBytes()
+ w.addVarSeq(bytes, 1, 3)
+ elif self.certificateType == CertificateType.cryptoID:
+ if self.certChain:
+ bytes = stringToBytes(self.certChain.write())
+ else:
+ bytes = createByteArraySequence([])
+ w.addVarSeq(bytes, 1, 2)
+ else:
+ raise AssertionError()
+ return HandshakeMsg.postWrite(self, w, trial)
+
+class CertificateRequest(HandshakeMsg):
+ def __init__(self):
+ self.contentType = ContentType.handshake
+ self.certificate_types = []
+ #treat as opaque bytes for now
+ self.certificate_authorities = createByteArraySequence([])
+
+ def create(self, certificate_types, certificate_authorities):
+ self.certificate_types = certificate_types
+ self.certificate_authorities = certificate_authorities
+ return self
+
+ def parse(self, p):
+ p.startLengthCheck(3)
+ self.certificate_types = p.getVarList(1, 1)
+ self.certificate_authorities = p.getVarBytes(2)
+ p.stopLengthCheck()
+ return self
+
+ def write(self, trial=False):
+ w = HandshakeMsg.preWrite(self, HandshakeType.certificate_request,
+ trial)
+ w.addVarSeq(self.certificate_types, 1, 1)
+ w.addVarSeq(self.certificate_authorities, 1, 2)
+ return HandshakeMsg.postWrite(self, w, trial)
+
+class ServerKeyExchange(HandshakeMsg):
+ def __init__(self, cipherSuite):
+ self.cipherSuite = cipherSuite
+ self.contentType = ContentType.handshake
+ self.srp_N = 0L
+ self.srp_g = 0L
+ self.srp_s = createByteArraySequence([])
+ self.srp_B = 0L
+ self.signature = createByteArraySequence([])
+
+ def createSRP(self, srp_N, srp_g, srp_s, srp_B):
+ self.srp_N = srp_N
+ self.srp_g = srp_g
+ self.srp_s = srp_s
+ self.srp_B = srp_B
+ return self
+
+ def parse(self, p):
+ p.startLengthCheck(3)
+ self.srp_N = bytesToNumber(p.getVarBytes(2))
+ self.srp_g = bytesToNumber(p.getVarBytes(2))
+ self.srp_s = p.getVarBytes(1)
+ self.srp_B = bytesToNumber(p.getVarBytes(2))
+ if self.cipherSuite in CipherSuite.srpRsaSuites:
+ self.signature = p.getVarBytes(2)
+ p.stopLengthCheck()
+ return self
+
+ def write(self, trial=False):
+ w = HandshakeMsg.preWrite(self, HandshakeType.server_key_exchange,
+ trial)
+ w.addVarSeq(numberToBytes(self.srp_N), 1, 2)
+ w.addVarSeq(numberToBytes(self.srp_g), 1, 2)
+ w.addVarSeq(self.srp_s, 1, 1)
+ w.addVarSeq(numberToBytes(self.srp_B), 1, 2)
+ if self.cipherSuite in CipherSuite.srpRsaSuites:
+ w.addVarSeq(self.signature, 1, 2)
+ return HandshakeMsg.postWrite(self, w, trial)
+
+ def hash(self, clientRandom, serverRandom):
+ oldCipherSuite = self.cipherSuite
+ self.cipherSuite = None
+ try:
+ bytes = clientRandom + serverRandom + self.write()[4:]
+ s = bytesToString(bytes)
+ return stringToBytes(md5.md5(s).digest() + sha.sha(s).digest())
+ finally:
+ self.cipherSuite = oldCipherSuite
+
+class ServerHelloDone(HandshakeMsg):
+ def __init__(self):
+ self.contentType = ContentType.handshake
+
+ def create(self):
+ return self
+
+ def parse(self, p):
+ p.startLengthCheck(3)
+ p.stopLengthCheck()
+ return self
+
+ def write(self, trial=False):
+ w = HandshakeMsg.preWrite(self, HandshakeType.server_hello_done, trial)
+ return HandshakeMsg.postWrite(self, w, trial)
+
+class ClientKeyExchange(HandshakeMsg):
+ def __init__(self, cipherSuite, version=None):
+ self.cipherSuite = cipherSuite
+ self.version = version
+ self.contentType = ContentType.handshake
+ self.srp_A = 0
+ self.encryptedPreMasterSecret = createByteArraySequence([])
+
+ def createSRP(self, srp_A):
+ self.srp_A = srp_A
+ return self
+
+ def createRSA(self, encryptedPreMasterSecret):
+ self.encryptedPreMasterSecret = encryptedPreMasterSecret
+ return self
+
+ def parse(self, p):
+ p.startLengthCheck(3)
+ if self.cipherSuite in CipherSuite.srpSuites + \
+ CipherSuite.srpRsaSuites:
+ self.srp_A = bytesToNumber(p.getVarBytes(2))
+ elif self.cipherSuite in CipherSuite.rsaSuites:
+ if self.version in ((3,1), (3,2)):
+ self.encryptedPreMasterSecret = p.getVarBytes(2)
+ elif self.version == (3,0):
+ self.encryptedPreMasterSecret = \
+ p.getFixBytes(len(p.bytes)-p.index)
+ else:
+ raise AssertionError()
+ else:
+ raise AssertionError()
+ p.stopLengthCheck()
+ return self
+
+ def write(self, trial=False):
+ w = HandshakeMsg.preWrite(self, HandshakeType.client_key_exchange,
+ trial)
+ if self.cipherSuite in CipherSuite.srpSuites + \
+ CipherSuite.srpRsaSuites:
+ w.addVarSeq(numberToBytes(self.srp_A), 1, 2)
+ elif self.cipherSuite in CipherSuite.rsaSuites:
+ if self.version in ((3,1), (3,2)):
+ w.addVarSeq(self.encryptedPreMasterSecret, 1, 2)
+ elif self.version == (3,0):
+ w.addFixSeq(self.encryptedPreMasterSecret, 1)
+ else:
+ raise AssertionError()
+ else:
+ raise AssertionError()
+ return HandshakeMsg.postWrite(self, w, trial)
+
+class CertificateVerify(HandshakeMsg):
+ def __init__(self):
+ self.contentType = ContentType.handshake
+ self.signature = createByteArraySequence([])
+
+ def create(self, signature):
+ self.signature = signature
+ return self
+
+ def parse(self, p):
+ p.startLengthCheck(3)
+ self.signature = p.getVarBytes(2)
+ p.stopLengthCheck()
+ return self
+
+ def write(self, trial=False):
+ w = HandshakeMsg.preWrite(self, HandshakeType.certificate_verify,
+ trial)
+ w.addVarSeq(self.signature, 1, 2)
+ return HandshakeMsg.postWrite(self, w, trial)
+
+class ChangeCipherSpec(Msg):
+ def __init__(self):
+ self.contentType = ContentType.change_cipher_spec
+ self.type = 1
+
+ def create(self):
+ self.type = 1
+ return self
+
+ def parse(self, p):
+ p.setLengthCheck(1)
+ self.type = p.get(1)
+ p.stopLengthCheck()
+ return self
+
+ def write(self, trial=False):
+ w = Msg.preWrite(self, trial)
+ w.add(self.type,1)
+ return Msg.postWrite(self, w, trial)
+
+
+class Finished(HandshakeMsg):
+ def __init__(self, version):
+ self.contentType = ContentType.handshake
+ self.version = version
+ self.verify_data = createByteArraySequence([])
+
+ def create(self, verify_data):
+ self.verify_data = verify_data
+ return self
+
+ def parse(self, p):
+ p.startLengthCheck(3)
+ if self.version == (3,0):
+ self.verify_data = p.getFixBytes(36)
+ elif self.version in ((3,1), (3,2)):
+ self.verify_data = p.getFixBytes(12)
+ else:
+ raise AssertionError()
+ p.stopLengthCheck()
+ return self
+
+ def write(self, trial=False):
+ w = HandshakeMsg.preWrite(self, HandshakeType.finished, trial)
+ w.addFixSeq(self.verify_data, 1)
+ return HandshakeMsg.postWrite(self, w, trial)
+
+class ApplicationData(Msg):
+ def __init__(self):
+ self.contentType = ContentType.application_data
+ self.bytes = createByteArraySequence([])
+
+ def create(self, bytes):
+ self.bytes = bytes
+ return self
+
+ def parse(self, p):
+ self.bytes = p.bytes
+ return self
+
+ def write(self):
+ return self.bytes
\ No newline at end of file
diff --git a/python/gdata/tlslite/utils/AES.py b/python/gdata/tlslite/utils/AES.py
new file mode 100644
index 0000000..8413f4c
--- /dev/null
+++ b/python/gdata/tlslite/utils/AES.py
@@ -0,0 +1,31 @@
+"""Abstract class for AES."""
+
+class AES:
+ def __init__(self, key, mode, IV, implementation):
+ if len(key) not in (16, 24, 32):
+ raise AssertionError()
+ if mode != 2:
+ raise AssertionError()
+ if len(IV) != 16:
+ raise AssertionError()
+ self.isBlockCipher = True
+ self.block_size = 16
+ self.implementation = implementation
+ if len(key)==16:
+ self.name = "aes128"
+ elif len(key)==24:
+ self.name = "aes192"
+ elif len(key)==32:
+ self.name = "aes256"
+ else:
+ raise AssertionError()
+
+ #CBC-Mode encryption, returns ciphertext
+ #WARNING: *MAY* modify the input as well
+ def encrypt(self, plaintext):
+ assert(len(plaintext) % 16 == 0)
+
+ #CBC-Mode decryption, returns plaintext
+ #WARNING: *MAY* modify the input as well
+ def decrypt(self, ciphertext):
+ assert(len(ciphertext) % 16 == 0)
\ No newline at end of file
diff --git a/python/gdata/tlslite/utils/ASN1Parser.py b/python/gdata/tlslite/utils/ASN1Parser.py
new file mode 100644
index 0000000..16b50f2
--- /dev/null
+++ b/python/gdata/tlslite/utils/ASN1Parser.py
@@ -0,0 +1,34 @@
+"""Class for parsing ASN.1"""
+from compat import *
+from codec import *
+
+#Takes a byte array which has a DER TLV field at its head
+class ASN1Parser:
+ def __init__(self, bytes):
+ p = Parser(bytes)
+ p.get(1) #skip Type
+
+ #Get Length
+ self.length = self._getASN1Length(p)
+
+ #Get Value
+ self.value = p.getFixBytes(self.length)
+
+ #Assuming this is a sequence...
+ def getChild(self, which):
+ p = Parser(self.value)
+ for x in range(which+1):
+ markIndex = p.index
+ p.get(1) #skip Type
+ length = self._getASN1Length(p)
+ p.getFixBytes(length)
+ return ASN1Parser(p.bytes[markIndex : p.index])
+
+ #Decode the ASN.1 DER length field
+ def _getASN1Length(self, p):
+ firstLength = p.get(1)
+ if firstLength<=127:
+ return firstLength
+ else:
+ lengthLength = firstLength & 0x7F
+ return p.get(lengthLength)
diff --git a/python/gdata/tlslite/utils/Cryptlib_AES.py b/python/gdata/tlslite/utils/Cryptlib_AES.py
new file mode 100644
index 0000000..9e101fc
--- /dev/null
+++ b/python/gdata/tlslite/utils/Cryptlib_AES.py
@@ -0,0 +1,34 @@
+"""Cryptlib AES implementation."""
+
+from cryptomath import *
+from AES import *
+
+if cryptlibpyLoaded:
+
+ def new(key, mode, IV):
+ return Cryptlib_AES(key, mode, IV)
+
+ class Cryptlib_AES(AES):
+
+ def __init__(self, key, mode, IV):
+ AES.__init__(self, key, mode, IV, "cryptlib")
+ self.context = cryptlib_py.cryptCreateContext(cryptlib_py.CRYPT_UNUSED, cryptlib_py.CRYPT_ALGO_AES)
+ cryptlib_py.cryptSetAttribute(self.context, cryptlib_py.CRYPT_CTXINFO_MODE, cryptlib_py.CRYPT_MODE_CBC)
+ cryptlib_py.cryptSetAttribute(self.context, cryptlib_py.CRYPT_CTXINFO_KEYSIZE, len(key))
+ cryptlib_py.cryptSetAttributeString(self.context, cryptlib_py.CRYPT_CTXINFO_KEY, key)
+ cryptlib_py.cryptSetAttributeString(self.context, cryptlib_py.CRYPT_CTXINFO_IV, IV)
+
+ def __del__(self):
+ cryptlib_py.cryptDestroyContext(self.context)
+
+ def encrypt(self, plaintext):
+ AES.encrypt(self, plaintext)
+ bytes = stringToBytes(plaintext)
+ cryptlib_py.cryptEncrypt(self.context, bytes)
+ return bytesToString(bytes)
+
+ def decrypt(self, ciphertext):
+ AES.decrypt(self, ciphertext)
+ bytes = stringToBytes(ciphertext)
+ cryptlib_py.cryptDecrypt(self.context, bytes)
+ return bytesToString(bytes)
diff --git a/python/gdata/tlslite/utils/Cryptlib_RC4.py b/python/gdata/tlslite/utils/Cryptlib_RC4.py
new file mode 100644
index 0000000..7c6d087
--- /dev/null
+++ b/python/gdata/tlslite/utils/Cryptlib_RC4.py
@@ -0,0 +1,28 @@
+"""Cryptlib RC4 implementation."""
+
+from cryptomath import *
+from RC4 import RC4
+
+if cryptlibpyLoaded:
+
+ def new(key):
+ return Cryptlib_RC4(key)
+
+ class Cryptlib_RC4(RC4):
+
+ def __init__(self, key):
+ RC4.__init__(self, key, "cryptlib")
+ self.context = cryptlib_py.cryptCreateContext(cryptlib_py.CRYPT_UNUSED, cryptlib_py.CRYPT_ALGO_RC4)
+ cryptlib_py.cryptSetAttribute(self.context, cryptlib_py.CRYPT_CTXINFO_KEYSIZE, len(key))
+ cryptlib_py.cryptSetAttributeString(self.context, cryptlib_py.CRYPT_CTXINFO_KEY, key)
+
+ def __del__(self):
+ cryptlib_py.cryptDestroyContext(self.context)
+
+ def encrypt(self, plaintext):
+ bytes = stringToBytes(plaintext)
+ cryptlib_py.cryptEncrypt(self.context, bytes)
+ return bytesToString(bytes)
+
+ def decrypt(self, ciphertext):
+ return self.encrypt(ciphertext)
\ No newline at end of file
diff --git a/python/gdata/tlslite/utils/Cryptlib_TripleDES.py b/python/gdata/tlslite/utils/Cryptlib_TripleDES.py
new file mode 100644
index 0000000..a4f8155
--- /dev/null
+++ b/python/gdata/tlslite/utils/Cryptlib_TripleDES.py
@@ -0,0 +1,35 @@
+"""Cryptlib 3DES implementation."""
+
+from cryptomath import *
+
+from TripleDES import *
+
+if cryptlibpyLoaded:
+
+ def new(key, mode, IV):
+ return Cryptlib_TripleDES(key, mode, IV)
+
+ class Cryptlib_TripleDES(TripleDES):
+
+ def __init__(self, key, mode, IV):
+ TripleDES.__init__(self, key, mode, IV, "cryptlib")
+ self.context = cryptlib_py.cryptCreateContext(cryptlib_py.CRYPT_UNUSED, cryptlib_py.CRYPT_ALGO_3DES)
+ cryptlib_py.cryptSetAttribute(self.context, cryptlib_py.CRYPT_CTXINFO_MODE, cryptlib_py.CRYPT_MODE_CBC)
+ cryptlib_py.cryptSetAttribute(self.context, cryptlib_py.CRYPT_CTXINFO_KEYSIZE, len(key))
+ cryptlib_py.cryptSetAttributeString(self.context, cryptlib_py.CRYPT_CTXINFO_KEY, key)
+ cryptlib_py.cryptSetAttributeString(self.context, cryptlib_py.CRYPT_CTXINFO_IV, IV)
+
+ def __del__(self):
+ cryptlib_py.cryptDestroyContext(self.context)
+
+ def encrypt(self, plaintext):
+ TripleDES.encrypt(self, plaintext)
+ bytes = stringToBytes(plaintext)
+ cryptlib_py.cryptEncrypt(self.context, bytes)
+ return bytesToString(bytes)
+
+ def decrypt(self, ciphertext):
+ TripleDES.decrypt(self, ciphertext)
+ bytes = stringToBytes(ciphertext)
+ cryptlib_py.cryptDecrypt(self.context, bytes)
+ return bytesToString(bytes)
\ No newline at end of file
diff --git a/python/gdata/tlslite/utils/OpenSSL_AES.py b/python/gdata/tlslite/utils/OpenSSL_AES.py
new file mode 100644
index 0000000..e60679b
--- /dev/null
+++ b/python/gdata/tlslite/utils/OpenSSL_AES.py
@@ -0,0 +1,49 @@
+"""OpenSSL/M2Crypto AES implementation."""
+
+from cryptomath import *
+from AES import *
+
+if m2cryptoLoaded:
+
+ def new(key, mode, IV):
+ return OpenSSL_AES(key, mode, IV)
+
+ class OpenSSL_AES(AES):
+
+ def __init__(self, key, mode, IV):
+ AES.__init__(self, key, mode, IV, "openssl")
+ self.key = key
+ self.IV = IV
+
+ def _createContext(self, encrypt):
+ context = m2.cipher_ctx_new()
+ if len(self.key)==16:
+ cipherType = m2.aes_128_cbc()
+ if len(self.key)==24:
+ cipherType = m2.aes_192_cbc()
+ if len(self.key)==32:
+ cipherType = m2.aes_256_cbc()
+ m2.cipher_init(context, cipherType, self.key, self.IV, encrypt)
+ return context
+
+ def encrypt(self, plaintext):
+ AES.encrypt(self, plaintext)
+ context = self._createContext(1)
+ ciphertext = m2.cipher_update(context, plaintext)
+ m2.cipher_ctx_free(context)
+ self.IV = ciphertext[-self.block_size:]
+ return ciphertext
+
+ def decrypt(self, ciphertext):
+ AES.decrypt(self, ciphertext)
+ context = self._createContext(0)
+ #I think M2Crypto has a bug - it fails to decrypt and return the last block passed in.
+ #To work around this, we append sixteen zeros to the string, below:
+ plaintext = m2.cipher_update(context, ciphertext+('\0'*16))
+
+ #If this bug is ever fixed, then plaintext will end up having a garbage
+ #plaintext block on the end. That's okay - the below code will discard it.
+ plaintext = plaintext[:len(ciphertext)]
+ m2.cipher_ctx_free(context)
+ self.IV = ciphertext[-self.block_size:]
+ return plaintext
diff --git a/python/gdata/tlslite/utils/OpenSSL_RC4.py b/python/gdata/tlslite/utils/OpenSSL_RC4.py
new file mode 100644
index 0000000..ac433aa
--- /dev/null
+++ b/python/gdata/tlslite/utils/OpenSSL_RC4.py
@@ -0,0 +1,25 @@
+"""OpenSSL/M2Crypto RC4 implementation."""
+
+from cryptomath import *
+from RC4 import RC4
+
+if m2cryptoLoaded:
+
+ def new(key):
+ return OpenSSL_RC4(key)
+
+ class OpenSSL_RC4(RC4):
+
+ def __init__(self, key):
+ RC4.__init__(self, key, "openssl")
+ self.rc4 = m2.rc4_new()
+ m2.rc4_set_key(self.rc4, key)
+
+ def __del__(self):
+ m2.rc4_free(self.rc4)
+
+ def encrypt(self, plaintext):
+ return m2.rc4_update(self.rc4, plaintext)
+
+ def decrypt(self, ciphertext):
+ return self.encrypt(ciphertext)
diff --git a/python/gdata/tlslite/utils/OpenSSL_RSAKey.py b/python/gdata/tlslite/utils/OpenSSL_RSAKey.py
new file mode 100644
index 0000000..fe1a3cd
--- /dev/null
+++ b/python/gdata/tlslite/utils/OpenSSL_RSAKey.py
@@ -0,0 +1,148 @@
+"""OpenSSL/M2Crypto RSA implementation."""
+
+from cryptomath import *
+
+from RSAKey import *
+from Python_RSAKey import Python_RSAKey
+
+#copied from M2Crypto.util.py, so when we load the local copy of m2
+#we can still use it
+def password_callback(v, prompt1='Enter private key passphrase:',
+ prompt2='Verify passphrase:'):
+ from getpass import getpass
+ while 1:
+ try:
+ p1=getpass(prompt1)
+ if v:
+ p2=getpass(prompt2)
+ if p1==p2:
+ break
+ else:
+ break
+ except KeyboardInterrupt:
+ return None
+ return p1
+
+
+if m2cryptoLoaded:
+ class OpenSSL_RSAKey(RSAKey):
+ def __init__(self, n=0, e=0):
+ self.rsa = None
+ self._hasPrivateKey = False
+ if (n and not e) or (e and not n):
+ raise AssertionError()
+ if n and e:
+ self.rsa = m2.rsa_new()
+ m2.rsa_set_n(self.rsa, numberToMPI(n))
+ m2.rsa_set_e(self.rsa, numberToMPI(e))
+
+ def __del__(self):
+ if self.rsa:
+ m2.rsa_free(self.rsa)
+
+ def __getattr__(self, name):
+ if name == 'e':
+ if not self.rsa:
+ return 0
+ return mpiToNumber(m2.rsa_get_e(self.rsa))
+ elif name == 'n':
+ if not self.rsa:
+ return 0
+ return mpiToNumber(m2.rsa_get_n(self.rsa))
+ else:
+ raise AttributeError
+
+ def hasPrivateKey(self):
+ return self._hasPrivateKey
+
+ def hash(self):
+ return Python_RSAKey(self.n, self.e).hash()
+
+ def _rawPrivateKeyOp(self, m):
+ s = numberToString(m)
+ byteLength = numBytes(self.n)
+ if len(s)== byteLength:
+ pass
+ elif len(s) == byteLength-1:
+ s = '\0' + s
+ else:
+ raise AssertionError()
+ c = stringToNumber(m2.rsa_private_encrypt(self.rsa, s,
+ m2.no_padding))
+ return c
+
+ def _rawPublicKeyOp(self, c):
+ s = numberToString(c)
+ byteLength = numBytes(self.n)
+ if len(s)== byteLength:
+ pass
+ elif len(s) == byteLength-1:
+ s = '\0' + s
+ else:
+ raise AssertionError()
+ m = stringToNumber(m2.rsa_public_decrypt(self.rsa, s,
+ m2.no_padding))
+ return m
+
+ def acceptsPassword(self): return True
+
+ def write(self, password=None):
+ bio = m2.bio_new(m2.bio_s_mem())
+ if self._hasPrivateKey:
+ if password:
+ def f(v): return password
+ m2.rsa_write_key(self.rsa, bio, m2.des_ede_cbc(), f)
+ else:
+ def f(): pass
+ m2.rsa_write_key_no_cipher(self.rsa, bio, f)
+ else:
+ if password:
+ raise AssertionError()
+ m2.rsa_write_pub_key(self.rsa, bio)
+ s = m2.bio_read(bio, m2.bio_ctrl_pending(bio))
+ m2.bio_free(bio)
+ return s
+
+ def writeXMLPublicKey(self, indent=''):
+ return Python_RSAKey(self.n, self.e).write(indent)
+
+ def generate(bits):
+ key = OpenSSL_RSAKey()
+ def f():pass
+ key.rsa = m2.rsa_generate_key(bits, 3, f)
+ key._hasPrivateKey = True
+ return key
+ generate = staticmethod(generate)
+
+ def parse(s, passwordCallback=None):
+ if s.startswith("-----BEGIN "):
+ if passwordCallback==None:
+ callback = password_callback
+ else:
+ def f(v, prompt1=None, prompt2=None):
+ return passwordCallback()
+ callback = f
+ bio = m2.bio_new(m2.bio_s_mem())
+ try:
+ m2.bio_write(bio, s)
+ key = OpenSSL_RSAKey()
+ if s.startswith("-----BEGIN RSA PRIVATE KEY-----"):
+ def f():pass
+ key.rsa = m2.rsa_read_key(bio, callback)
+ if key.rsa == None:
+ raise SyntaxError()
+ key._hasPrivateKey = True
+ elif s.startswith("-----BEGIN PUBLIC KEY-----"):
+ key.rsa = m2.rsa_read_pub_key(bio)
+ if key.rsa == None:
+ raise SyntaxError()
+ key._hasPrivateKey = False
+ else:
+ raise SyntaxError()
+ return key
+ finally:
+ m2.bio_free(bio)
+ else:
+ raise SyntaxError()
+
+ parse = staticmethod(parse)
diff --git a/python/gdata/tlslite/utils/OpenSSL_TripleDES.py b/python/gdata/tlslite/utils/OpenSSL_TripleDES.py
new file mode 100644
index 0000000..f5ba165
--- /dev/null
+++ b/python/gdata/tlslite/utils/OpenSSL_TripleDES.py
@@ -0,0 +1,44 @@
+"""OpenSSL/M2Crypto 3DES implementation."""
+
+from cryptomath import *
+from TripleDES import *
+
+if m2cryptoLoaded:
+
+ def new(key, mode, IV):
+ return OpenSSL_TripleDES(key, mode, IV)
+
+ class OpenSSL_TripleDES(TripleDES):
+
+ def __init__(self, key, mode, IV):
+ TripleDES.__init__(self, key, mode, IV, "openssl")
+ self.key = key
+ self.IV = IV
+
+ def _createContext(self, encrypt):
+ context = m2.cipher_ctx_new()
+ cipherType = m2.des_ede3_cbc()
+ m2.cipher_init(context, cipherType, self.key, self.IV, encrypt)
+ return context
+
+ def encrypt(self, plaintext):
+ TripleDES.encrypt(self, plaintext)
+ context = self._createContext(1)
+ ciphertext = m2.cipher_update(context, plaintext)
+ m2.cipher_ctx_free(context)
+ self.IV = ciphertext[-self.block_size:]
+ return ciphertext
+
+ def decrypt(self, ciphertext):
+ TripleDES.decrypt(self, ciphertext)
+ context = self._createContext(0)
+ #I think M2Crypto has a bug - it fails to decrypt and return the last block passed in.
+ #To work around this, we append sixteen zeros to the string, below:
+ plaintext = m2.cipher_update(context, ciphertext+('\0'*16))
+
+ #If this bug is ever fixed, then plaintext will end up having a garbage
+ #plaintext block on the end. That's okay - the below code will ignore it.
+ plaintext = plaintext[:len(ciphertext)]
+ m2.cipher_ctx_free(context)
+ self.IV = ciphertext[-self.block_size:]
+ return plaintext
\ No newline at end of file
diff --git a/python/gdata/tlslite/utils/PyCrypto_AES.py b/python/gdata/tlslite/utils/PyCrypto_AES.py
new file mode 100644
index 0000000..e38b19d
--- /dev/null
+++ b/python/gdata/tlslite/utils/PyCrypto_AES.py
@@ -0,0 +1,22 @@
+"""PyCrypto AES implementation."""
+
+from cryptomath import *
+from AES import *
+
+if pycryptoLoaded:
+ import Crypto.Cipher.AES
+
+ def new(key, mode, IV):
+ return PyCrypto_AES(key, mode, IV)
+
+ class PyCrypto_AES(AES):
+
+ def __init__(self, key, mode, IV):
+ AES.__init__(self, key, mode, IV, "pycrypto")
+ self.context = Crypto.Cipher.AES.new(key, mode, IV)
+
+ def encrypt(self, plaintext):
+ return self.context.encrypt(plaintext)
+
+ def decrypt(self, ciphertext):
+ return self.context.decrypt(ciphertext)
\ No newline at end of file
diff --git a/python/gdata/tlslite/utils/PyCrypto_RC4.py b/python/gdata/tlslite/utils/PyCrypto_RC4.py
new file mode 100644
index 0000000..6c6d86a
--- /dev/null
+++ b/python/gdata/tlslite/utils/PyCrypto_RC4.py
@@ -0,0 +1,22 @@
+"""PyCrypto RC4 implementation."""
+
+from cryptomath import *
+from RC4 import *
+
+if pycryptoLoaded:
+ import Crypto.Cipher.ARC4
+
+ def new(key):
+ return PyCrypto_RC4(key)
+
+ class PyCrypto_RC4(RC4):
+
+ def __init__(self, key):
+ RC4.__init__(self, key, "pycrypto")
+ self.context = Crypto.Cipher.ARC4.new(key)
+
+ def encrypt(self, plaintext):
+ return self.context.encrypt(plaintext)
+
+ def decrypt(self, ciphertext):
+ return self.context.decrypt(ciphertext)
\ No newline at end of file
diff --git a/python/gdata/tlslite/utils/PyCrypto_RSAKey.py b/python/gdata/tlslite/utils/PyCrypto_RSAKey.py
new file mode 100644
index 0000000..48b5cef
--- /dev/null
+++ b/python/gdata/tlslite/utils/PyCrypto_RSAKey.py
@@ -0,0 +1,61 @@
+"""PyCrypto RSA implementation."""
+
+from cryptomath import *
+
+from RSAKey import *
+from Python_RSAKey import Python_RSAKey
+
+if pycryptoLoaded:
+
+ from Crypto.PublicKey import RSA
+
+ class PyCrypto_RSAKey(RSAKey):
+ def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
+ if not d:
+ self.rsa = RSA.construct( (n, e) )
+ else:
+ self.rsa = RSA.construct( (n, e, d, p, q) )
+
+ def __getattr__(self, name):
+ return getattr(self.rsa, name)
+
+ def hasPrivateKey(self):
+ return self.rsa.has_private()
+
+ def hash(self):
+ return Python_RSAKey(self.n, self.e).hash()
+
+ def _rawPrivateKeyOp(self, m):
+ s = numberToString(m)
+ byteLength = numBytes(self.n)
+ if len(s)== byteLength:
+ pass
+ elif len(s) == byteLength-1:
+ s = '\0' + s
+ else:
+ raise AssertionError()
+ c = stringToNumber(self.rsa.decrypt((s,)))
+ return c
+
+ def _rawPublicKeyOp(self, c):
+ s = numberToString(c)
+ byteLength = numBytes(self.n)
+ if len(s)== byteLength:
+ pass
+ elif len(s) == byteLength-1:
+ s = '\0' + s
+ else:
+ raise AssertionError()
+ m = stringToNumber(self.rsa.encrypt(s, None)[0])
+ return m
+
+ def writeXMLPublicKey(self, indent=''):
+ return Python_RSAKey(self.n, self.e).write(indent)
+
+ def generate(bits):
+ key = PyCrypto_RSAKey()
+ def f(numBytes):
+ return bytesToString(getRandomBytes(numBytes))
+ key.rsa = RSA.generate(bits, f)
+ return key
+ generate = staticmethod(generate)
diff --git a/python/gdata/tlslite/utils/PyCrypto_TripleDES.py b/python/gdata/tlslite/utils/PyCrypto_TripleDES.py
new file mode 100644
index 0000000..8c22bb8
--- /dev/null
+++ b/python/gdata/tlslite/utils/PyCrypto_TripleDES.py
@@ -0,0 +1,22 @@
+"""PyCrypto 3DES implementation."""
+
+from cryptomath import *
+from TripleDES import *
+
+if pycryptoLoaded:
+ import Crypto.Cipher.DES3
+
+ def new(key, mode, IV):
+ return PyCrypto_TripleDES(key, mode, IV)
+
+ class PyCrypto_TripleDES(TripleDES):
+
+ def __init__(self, key, mode, IV):
+ TripleDES.__init__(self, key, mode, IV, "pycrypto")
+ self.context = Crypto.Cipher.DES3.new(key, mode, IV)
+
+ def encrypt(self, plaintext):
+ return self.context.encrypt(plaintext)
+
+ def decrypt(self, ciphertext):
+ return self.context.decrypt(ciphertext)
\ No newline at end of file
diff --git a/python/gdata/tlslite/utils/Python_AES.py b/python/gdata/tlslite/utils/Python_AES.py
new file mode 100644
index 0000000..657152f
--- /dev/null
+++ b/python/gdata/tlslite/utils/Python_AES.py
@@ -0,0 +1,68 @@
+"""Pure-Python AES implementation."""
+
+from cryptomath import *
+
+from AES import *
+from rijndael import rijndael
+
+def new(key, mode, IV):
+ return Python_AES(key, mode, IV)
+
+class Python_AES(AES):
+ def __init__(self, key, mode, IV):
+ AES.__init__(self, key, mode, IV, "python")
+ self.rijndael = rijndael(key, 16)
+ self.IV = IV
+
+ def encrypt(self, plaintext):
+ AES.encrypt(self, plaintext)
+
+ plaintextBytes = stringToBytes(plaintext)
+ chainBytes = stringToBytes(self.IV)
+
+ #CBC Mode: For each block...
+ for x in range(len(plaintextBytes)/16):
+
+ #XOR with the chaining block
+ blockBytes = plaintextBytes[x*16 : (x*16)+16]
+ for y in range(16):
+ blockBytes[y] ^= chainBytes[y]
+ blockString = bytesToString(blockBytes)
+
+ #Encrypt it
+ encryptedBytes = stringToBytes(self.rijndael.encrypt(blockString))
+
+ #Overwrite the input with the output
+ for y in range(16):
+ plaintextBytes[(x*16)+y] = encryptedBytes[y]
+
+ #Set the next chaining block
+ chainBytes = encryptedBytes
+
+ self.IV = bytesToString(chainBytes)
+ return bytesToString(plaintextBytes)
+
+ def decrypt(self, ciphertext):
+ AES.decrypt(self, ciphertext)
+
+ ciphertextBytes = stringToBytes(ciphertext)
+ chainBytes = stringToBytes(self.IV)
+
+ #CBC Mode: For each block...
+ for x in range(len(ciphertextBytes)/16):
+
+ #Decrypt it
+ blockBytes = ciphertextBytes[x*16 : (x*16)+16]
+ blockString = bytesToString(blockBytes)
+ decryptedBytes = stringToBytes(self.rijndael.decrypt(blockString))
+
+ #XOR with the chaining block and overwrite the input with output
+ for y in range(16):
+ decryptedBytes[y] ^= chainBytes[y]
+ ciphertextBytes[(x*16)+y] = decryptedBytes[y]
+
+ #Set the next chaining block
+ chainBytes = blockBytes
+
+ self.IV = bytesToString(chainBytes)
+ return bytesToString(ciphertextBytes)
diff --git a/python/gdata/tlslite/utils/Python_RC4.py b/python/gdata/tlslite/utils/Python_RC4.py
new file mode 100644
index 0000000..56ce5fb
--- /dev/null
+++ b/python/gdata/tlslite/utils/Python_RC4.py
@@ -0,0 +1,39 @@
+"""Pure-Python RC4 implementation."""
+
+from RC4 import RC4
+from cryptomath import *
+
+def new(key):
+ return Python_RC4(key)
+
+class Python_RC4(RC4):
+ def __init__(self, key):
+ RC4.__init__(self, key, "python")
+ keyBytes = stringToBytes(key)
+ S = [i for i in range(256)]
+ j = 0
+ for i in range(256):
+ j = (j + S[i] + keyBytes[i % len(keyBytes)]) % 256
+ S[i], S[j] = S[j], S[i]
+
+ self.S = S
+ self.i = 0
+ self.j = 0
+
+ def encrypt(self, plaintext):
+ plaintextBytes = stringToBytes(plaintext)
+ S = self.S
+ i = self.i
+ j = self.j
+ for x in range(len(plaintextBytes)):
+ i = (i + 1) % 256
+ j = (j + S[i]) % 256
+ S[i], S[j] = S[j], S[i]
+ t = (S[i] + S[j]) % 256
+ plaintextBytes[x] ^= S[t]
+ self.i = i
+ self.j = j
+ return bytesToString(plaintextBytes)
+
+ def decrypt(self, ciphertext):
+ return self.encrypt(ciphertext)
diff --git a/python/gdata/tlslite/utils/Python_RSAKey.py b/python/gdata/tlslite/utils/Python_RSAKey.py
new file mode 100644
index 0000000..2c469b5
--- /dev/null
+++ b/python/gdata/tlslite/utils/Python_RSAKey.py
@@ -0,0 +1,209 @@
+"""Pure-Python RSA implementation."""
+
+from cryptomath import *
+import xmltools
+from ASN1Parser import ASN1Parser
+from RSAKey import *
+
+class Python_RSAKey(RSAKey):
+ def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
+ if (n and not e) or (e and not n):
+ raise AssertionError()
+ self.n = n
+ self.e = e
+ self.d = d
+ self.p = p
+ self.q = q
+ self.dP = dP
+ self.dQ = dQ
+ self.qInv = qInv
+ self.blinder = 0
+ self.unblinder = 0
+
+ def hasPrivateKey(self):
+ return self.d != 0
+
+ def hash(self):
+ s = self.writeXMLPublicKey('\t\t')
+ return hashAndBase64(s.strip())
+
+ def _rawPrivateKeyOp(self, m):
+ #Create blinding values, on the first pass:
+ if not self.blinder:
+ self.unblinder = getRandomNumber(2, self.n)
+ self.blinder = powMod(invMod(self.unblinder, self.n), self.e,
+ self.n)
+
+ #Blind the input
+ m = (m * self.blinder) % self.n
+
+ #Perform the RSA operation
+ c = self._rawPrivateKeyOpHelper(m)
+
+ #Unblind the output
+ c = (c * self.unblinder) % self.n
+
+ #Update blinding values
+ self.blinder = (self.blinder * self.blinder) % self.n
+ self.unblinder = (self.unblinder * self.unblinder) % self.n
+
+ #Return the output
+ return c
+
+
+ def _rawPrivateKeyOpHelper(self, m):
+ #Non-CRT version
+ #c = powMod(m, self.d, self.n)
+
+ #CRT version (~3x faster)
+ s1 = powMod(m, self.dP, self.p)
+ s2 = powMod(m, self.dQ, self.q)
+ h = ((s1 - s2) * self.qInv) % self.p
+ c = s2 + self.q * h
+ return c
+
+ def _rawPublicKeyOp(self, c):
+ m = powMod(c, self.e, self.n)
+ return m
+
+ def acceptsPassword(self): return False
+
+ def write(self, indent=''):
+ if self.d:
+ s = indent+'\n'
+ else:
+ s = indent+'\n'
+ s += indent+'\t%s\n' % numberToBase64(self.n)
+ s += indent+'\t%s\n' % numberToBase64(self.e)
+ if self.d:
+ s += indent+'\t%s\n' % numberToBase64(self.d)
+ s += indent+'\t
%s
\n' % numberToBase64(self.p)
+ s += indent+'\t%s\n' % numberToBase64(self.q)
+ s += indent+'\t%s\n' % numberToBase64(self.dP)
+ s += indent+'\t%s\n' % numberToBase64(self.dQ)
+ s += indent+'\t%s\n' % numberToBase64(self.qInv)
+ s += indent+''
+ else:
+ s += indent+''
+ #Only add \n if part of a larger structure
+ if indent != '':
+ s += '\n'
+ return s
+
+ def writeXMLPublicKey(self, indent=''):
+ return Python_RSAKey(self.n, self.e).write(indent)
+
+ def generate(bits):
+ key = Python_RSAKey()
+ p = getRandomPrime(bits/2, False)
+ q = getRandomPrime(bits/2, False)
+ t = lcm(p-1, q-1)
+ key.n = p * q
+ key.e = 3L #Needed to be long, for Java
+ key.d = invMod(key.e, t)
+ key.p = p
+ key.q = q
+ key.dP = key.d % (p-1)
+ key.dQ = key.d % (q-1)
+ key.qInv = invMod(q, p)
+ return key
+ generate = staticmethod(generate)
+
+ def parsePEM(s, passwordCallback=None):
+ """Parse a string containing a or , or
+ PEM-encoded key."""
+
+ start = s.find("-----BEGIN PRIVATE KEY-----")
+ if start != -1:
+ end = s.find("-----END PRIVATE KEY-----")
+ if end == -1:
+ raise SyntaxError("Missing PEM Postfix")
+ s = s[start+len("-----BEGIN PRIVATE KEY -----") : end]
+ bytes = base64ToBytes(s)
+ return Python_RSAKey._parsePKCS8(bytes)
+ else:
+ start = s.find("-----BEGIN RSA PRIVATE KEY-----")
+ if start != -1:
+ end = s.find("-----END RSA PRIVATE KEY-----")
+ if end == -1:
+ raise SyntaxError("Missing PEM Postfix")
+ s = s[start+len("-----BEGIN RSA PRIVATE KEY -----") : end]
+ bytes = base64ToBytes(s)
+ return Python_RSAKey._parseSSLeay(bytes)
+ raise SyntaxError("Missing PEM Prefix")
+ parsePEM = staticmethod(parsePEM)
+
+ def parseXML(s):
+ element = xmltools.parseAndStripWhitespace(s)
+ return Python_RSAKey._parseXML(element)
+ parseXML = staticmethod(parseXML)
+
+ def _parsePKCS8(bytes):
+ p = ASN1Parser(bytes)
+
+ version = p.getChild(0).value[0]
+ if version != 0:
+ raise SyntaxError("Unrecognized PKCS8 version")
+
+ rsaOID = p.getChild(1).value
+ if list(rsaOID) != [6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 1, 5, 0]:
+ raise SyntaxError("Unrecognized AlgorithmIdentifier")
+
+ #Get the privateKey
+ privateKeyP = p.getChild(2)
+
+ #Adjust for OCTET STRING encapsulation
+ privateKeyP = ASN1Parser(privateKeyP.value)
+
+ return Python_RSAKey._parseASN1PrivateKey(privateKeyP)
+ _parsePKCS8 = staticmethod(_parsePKCS8)
+
+ def _parseSSLeay(bytes):
+ privateKeyP = ASN1Parser(bytes)
+ return Python_RSAKey._parseASN1PrivateKey(privateKeyP)
+ _parseSSLeay = staticmethod(_parseSSLeay)
+
+ def _parseASN1PrivateKey(privateKeyP):
+ version = privateKeyP.getChild(0).value[0]
+ if version != 0:
+ raise SyntaxError("Unrecognized RSAPrivateKey version")
+ n = bytesToNumber(privateKeyP.getChild(1).value)
+ e = bytesToNumber(privateKeyP.getChild(2).value)
+ d = bytesToNumber(privateKeyP.getChild(3).value)
+ p = bytesToNumber(privateKeyP.getChild(4).value)
+ q = bytesToNumber(privateKeyP.getChild(5).value)
+ dP = bytesToNumber(privateKeyP.getChild(6).value)
+ dQ = bytesToNumber(privateKeyP.getChild(7).value)
+ qInv = bytesToNumber(privateKeyP.getChild(8).value)
+ return Python_RSAKey(n, e, d, p, q, dP, dQ, qInv)
+ _parseASN1PrivateKey = staticmethod(_parseASN1PrivateKey)
+
+ def _parseXML(element):
+ try:
+ xmltools.checkName(element, "privateKey")
+ except SyntaxError:
+ xmltools.checkName(element, "publicKey")
+
+ #Parse attributes
+ xmltools.getReqAttribute(element, "xmlns", "http://trevp.net/rsa\Z")
+ xmltools.checkNoMoreAttributes(element)
+
+ #Parse public values ( and )
+ n = base64ToNumber(xmltools.getText(xmltools.getChild(element, 0, "n"), xmltools.base64RegEx))
+ e = base64ToNumber(xmltools.getText(xmltools.getChild(element, 1, "e"), xmltools.base64RegEx))
+ d = 0
+ p = 0
+ q = 0
+ dP = 0
+ dQ = 0
+ qInv = 0
+ #Parse private values, if present
+ if element.childNodes.length>=3:
+ d = base64ToNumber(xmltools.getText(xmltools.getChild(element, 2, "d"), xmltools.base64RegEx))
+ p = base64ToNumber(xmltools.getText(xmltools.getChild(element, 3, "p"), xmltools.base64RegEx))
+ q = base64ToNumber(xmltools.getText(xmltools.getChild(element, 4, "q"), xmltools.base64RegEx))
+ dP = base64ToNumber(xmltools.getText(xmltools.getChild(element, 5, "dP"), xmltools.base64RegEx))
+ dQ = base64ToNumber(xmltools.getText(xmltools.getChild(element, 6, "dQ"), xmltools.base64RegEx))
+ qInv = base64ToNumber(xmltools.getText(xmltools.getLastChild(element, 7, "qInv"), xmltools.base64RegEx))
+ return Python_RSAKey(n, e, d, p, q, dP, dQ, qInv)
+ _parseXML = staticmethod(_parseXML)
diff --git a/python/gdata/tlslite/utils/RC4.py b/python/gdata/tlslite/utils/RC4.py
new file mode 100644
index 0000000..5506923
--- /dev/null
+++ b/python/gdata/tlslite/utils/RC4.py
@@ -0,0 +1,17 @@
+"""Abstract class for RC4."""
+
+from compat import * #For False
+
+class RC4:
+ def __init__(self, keyBytes, implementation):
+ if len(keyBytes) < 16 or len(keyBytes) > 256:
+ raise ValueError()
+ self.isBlockCipher = False
+ self.name = "rc4"
+ self.implementation = implementation
+
+ def encrypt(self, plaintext):
+ raise NotImplementedError()
+
+ def decrypt(self, ciphertext):
+ raise NotImplementedError()
\ No newline at end of file
diff --git a/python/gdata/tlslite/utils/RSAKey.py b/python/gdata/tlslite/utils/RSAKey.py
new file mode 100644
index 0000000..2f5d286
--- /dev/null
+++ b/python/gdata/tlslite/utils/RSAKey.py
@@ -0,0 +1,264 @@
+"""Abstract class for RSA."""
+
+from cryptomath import *
+
+
+class RSAKey:
+ """This is an abstract base class for RSA keys.
+
+ Particular implementations of RSA keys, such as
+ L{OpenSSL_RSAKey.OpenSSL_RSAKey},
+ L{Python_RSAKey.Python_RSAKey}, and
+ L{PyCrypto_RSAKey.PyCrypto_RSAKey},
+ inherit from this.
+
+ To create or parse an RSA key, don't use one of these classes
+ directly. Instead, use the factory functions in
+ L{tlslite.utils.keyfactory}.
+ """
+
+ def __init__(self, n=0, e=0):
+ """Create a new RSA key.
+
+ If n and e are passed in, the new key will be initialized.
+
+ @type n: int
+ @param n: RSA modulus.
+
+ @type e: int
+ @param e: RSA public exponent.
+ """
+ raise NotImplementedError()
+
+ def __len__(self):
+ """Return the length of this key in bits.
+
+ @rtype: int
+ """
+ return numBits(self.n)
+
+ def hasPrivateKey(self):
+ """Return whether or not this key has a private component.
+
+ @rtype: bool
+ """
+ raise NotImplementedError()
+
+ def hash(self):
+ """Return the cryptoID value corresponding to this
+ key.
+
+ @rtype: str
+ """
+ raise NotImplementedError()
+
+ def getSigningAlgorithm(self):
+ """Return the cryptoID sigAlgo value corresponding to this key.
+
+ @rtype: str
+ """
+ return "pkcs1-sha1"
+
+ def hashAndSign(self, bytes):
+ """Hash and sign the passed-in bytes.
+
+ This requires the key to have a private component. It performs
+ a PKCS1-SHA1 signature on the passed-in data.
+
+ @type bytes: str or L{array.array} of unsigned bytes
+ @param bytes: The value which will be hashed and signed.
+
+ @rtype: L{array.array} of unsigned bytes.
+ @return: A PKCS1-SHA1 signature on the passed-in data.
+ """
+ if not isinstance(bytes, type("")):
+ bytes = bytesToString(bytes)
+ hashBytes = stringToBytes(sha1(bytes).digest())
+ prefixedHashBytes = self._addPKCS1SHA1Prefix(hashBytes)
+ sigBytes = self.sign(prefixedHashBytes)
+ return sigBytes
+
+ def hashAndVerify(self, sigBytes, bytes):
+ """Hash and verify the passed-in bytes with the signature.
+
+ This verifies a PKCS1-SHA1 signature on the passed-in data.
+
+ @type sigBytes: L{array.array} of unsigned bytes
+ @param sigBytes: A PKCS1-SHA1 signature.
+
+ @type bytes: str or L{array.array} of unsigned bytes
+ @param bytes: The value which will be hashed and verified.
+
+ @rtype: bool
+ @return: Whether the signature matches the passed-in data.
+ """
+ if not isinstance(bytes, type("")):
+ bytes = bytesToString(bytes)
+ hashBytes = stringToBytes(sha1(bytes).digest())
+ prefixedHashBytes = self._addPKCS1SHA1Prefix(hashBytes)
+ return self.verify(sigBytes, prefixedHashBytes)
+
+ def sign(self, bytes):
+ """Sign the passed-in bytes.
+
+ This requires the key to have a private component. It performs
+ a PKCS1 signature on the passed-in data.
+
+ @type bytes: L{array.array} of unsigned bytes
+ @param bytes: The value which will be signed.
+
+ @rtype: L{array.array} of unsigned bytes.
+ @return: A PKCS1 signature on the passed-in data.
+ """
+ if not self.hasPrivateKey():
+ raise AssertionError()
+ paddedBytes = self._addPKCS1Padding(bytes, 1)
+ m = bytesToNumber(paddedBytes)
+ if m >= self.n:
+ raise ValueError()
+ c = self._rawPrivateKeyOp(m)
+ sigBytes = numberToBytes(c)
+ return sigBytes
+
+ def verify(self, sigBytes, bytes):
+ """Verify the passed-in bytes with the signature.
+
+ This verifies a PKCS1 signature on the passed-in data.
+
+ @type sigBytes: L{array.array} of unsigned bytes
+ @param sigBytes: A PKCS1 signature.
+
+ @type bytes: L{array.array} of unsigned bytes
+ @param bytes: The value which will be verified.
+
+ @rtype: bool
+ @return: Whether the signature matches the passed-in data.
+ """
+ paddedBytes = self._addPKCS1Padding(bytes, 1)
+ c = bytesToNumber(sigBytes)
+ if c >= self.n:
+ return False
+ m = self._rawPublicKeyOp(c)
+ checkBytes = numberToBytes(m)
+ return checkBytes == paddedBytes
+
+ def encrypt(self, bytes):
+ """Encrypt the passed-in bytes.
+
+ This performs PKCS1 encryption of the passed-in data.
+
+ @type bytes: L{array.array} of unsigned bytes
+ @param bytes: The value which will be encrypted.
+
+ @rtype: L{array.array} of unsigned bytes.
+ @return: A PKCS1 encryption of the passed-in data.
+ """
+ paddedBytes = self._addPKCS1Padding(bytes, 2)
+ m = bytesToNumber(paddedBytes)
+ if m >= self.n:
+ raise ValueError()
+ c = self._rawPublicKeyOp(m)
+ encBytes = numberToBytes(c)
+ return encBytes
+
+ def decrypt(self, encBytes):
+ """Decrypt the passed-in bytes.
+
+ This requires the key to have a private component. It performs
+ PKCS1 decryption of the passed-in data.
+
+ @type encBytes: L{array.array} of unsigned bytes
+ @param encBytes: The value which will be decrypted.
+
+ @rtype: L{array.array} of unsigned bytes or None.
+ @return: A PKCS1 decryption of the passed-in data or None if
+ the data is not properly formatted.
+ """
+ if not self.hasPrivateKey():
+ raise AssertionError()
+ c = bytesToNumber(encBytes)
+ if c >= self.n:
+ return None
+ m = self._rawPrivateKeyOp(c)
+ decBytes = numberToBytes(m)
+ if (len(decBytes) != numBytes(self.n)-1): #Check first byte
+ return None
+ if decBytes[0] != 2: #Check second byte
+ return None
+ for x in range(len(decBytes)-1): #Scan through for zero separator
+ if decBytes[x]== 0:
+ break
+ else:
+ return None
+ return decBytes[x+1:] #Return everything after the separator
+
+ def _rawPrivateKeyOp(self, m):
+ raise NotImplementedError()
+
+ def _rawPublicKeyOp(self, c):
+ raise NotImplementedError()
+
+ def acceptsPassword(self):
+ """Return True if the write() method accepts a password for use
+ in encrypting the private key.
+
+ @rtype: bool
+ """
+ raise NotImplementedError()
+
+ def write(self, password=None):
+ """Return a string containing the key.
+
+ @rtype: str
+ @return: A string describing the key, in whichever format (PEM
+ or XML) is native to the implementation.
+ """
+ raise NotImplementedError()
+
+ def writeXMLPublicKey(self, indent=''):
+ """Return a string containing the key.
+
+ @rtype: str
+ @return: A string describing the public key, in XML format.
+ """
+ return Python_RSAKey(self.n, self.e).write(indent)
+
+ def generate(bits):
+ """Generate a new key with the specified bit length.
+
+ @rtype: L{tlslite.utils.RSAKey.RSAKey}
+ """
+ raise NotImplementedError()
+ generate = staticmethod(generate)
+
+
+ # **************************************************************************
+ # Helper Functions for RSA Keys
+ # **************************************************************************
+
+ def _addPKCS1SHA1Prefix(self, bytes):
+ prefixBytes = createByteArraySequence(\
+ [48,33,48,9,6,5,43,14,3,2,26,5,0,4,20])
+ prefixedBytes = prefixBytes + bytes
+ return prefixedBytes
+
+ def _addPKCS1Padding(self, bytes, blockType):
+ padLength = (numBytes(self.n) - (len(bytes)+3))
+ if blockType == 1: #Signature padding
+ pad = [0xFF] * padLength
+ elif blockType == 2: #Encryption padding
+ pad = createByteArraySequence([])
+ while len(pad) < padLength:
+ padBytes = getRandomBytes(padLength * 2)
+ pad = [b for b in padBytes if b != 0]
+ pad = pad[:padLength]
+ else:
+ raise AssertionError()
+
+ #NOTE: To be proper, we should add [0,blockType]. However,
+ #the zero is lost when the returned padding is converted
+ #to a number, so we don't even bother with it. Also,
+ #adding it would cause a misalignment in verify()
+ padding = createByteArraySequence([blockType] + pad + [0])
+ paddedBytes = padding + bytes
+ return paddedBytes
diff --git a/python/gdata/tlslite/utils/TripleDES.py b/python/gdata/tlslite/utils/TripleDES.py
new file mode 100644
index 0000000..2db4588
--- /dev/null
+++ b/python/gdata/tlslite/utils/TripleDES.py
@@ -0,0 +1,26 @@
+"""Abstract class for 3DES."""
+
+from compat import * #For True
+
+class TripleDES:
+ def __init__(self, key, mode, IV, implementation):
+ if len(key) != 24:
+ raise ValueError()
+ if mode != 2:
+ raise ValueError()
+ if len(IV) != 8:
+ raise ValueError()
+ self.isBlockCipher = True
+ self.block_size = 8
+ self.implementation = implementation
+ self.name = "3des"
+
+ #CBC-Mode encryption, returns ciphertext
+ #WARNING: *MAY* modify the input as well
+ def encrypt(self, plaintext):
+ assert(len(plaintext) % 8 == 0)
+
+ #CBC-Mode decryption, returns plaintext
+ #WARNING: *MAY* modify the input as well
+ def decrypt(self, ciphertext):
+ assert(len(ciphertext) % 8 == 0)
diff --git a/python/gdata/tlslite/utils/__init__.py b/python/gdata/tlslite/utils/__init__.py
new file mode 100644
index 0000000..e96b4be
--- /dev/null
+++ b/python/gdata/tlslite/utils/__init__.py
@@ -0,0 +1,31 @@
+"""Toolkit for crypto and other stuff."""
+
+__all__ = ["AES",
+ "ASN1Parser",
+ "cipherfactory",
+ "codec",
+ "Cryptlib_AES",
+ "Cryptlib_RC4",
+ "Cryptlib_TripleDES",
+ "cryptomath: cryptomath module",
+ "dateFuncs",
+ "hmac",
+ "JCE_RSAKey",
+ "compat",
+ "keyfactory",
+ "OpenSSL_AES",
+ "OpenSSL_RC4",
+ "OpenSSL_RSAKey",
+ "OpenSSL_TripleDES",
+ "PyCrypto_AES",
+ "PyCrypto_RC4",
+ "PyCrypto_RSAKey",
+ "PyCrypto_TripleDES",
+ "Python_AES",
+ "Python_RC4",
+ "Python_RSAKey",
+ "RC4",
+ "rijndael",
+ "RSAKey",
+ "TripleDES",
+ "xmltools"]
diff --git a/python/gdata/tlslite/utils/cipherfactory.py b/python/gdata/tlslite/utils/cipherfactory.py
new file mode 100644
index 0000000..ccbb6b5
--- /dev/null
+++ b/python/gdata/tlslite/utils/cipherfactory.py
@@ -0,0 +1,111 @@
+"""Factory functions for symmetric cryptography."""
+
+import os
+
+import Python_AES
+import Python_RC4
+
+import cryptomath
+
+tripleDESPresent = False
+
+if cryptomath.m2cryptoLoaded:
+ import OpenSSL_AES
+ import OpenSSL_RC4
+ import OpenSSL_TripleDES
+ tripleDESPresent = True
+
+if cryptomath.cryptlibpyLoaded:
+ import Cryptlib_AES
+ import Cryptlib_RC4
+ import Cryptlib_TripleDES
+ tripleDESPresent = True
+
+if cryptomath.pycryptoLoaded:
+ import PyCrypto_AES
+ import PyCrypto_RC4
+ import PyCrypto_TripleDES
+ tripleDESPresent = True
+
+# **************************************************************************
+# Factory Functions for AES
+# **************************************************************************
+
+def createAES(key, IV, implList=None):
+ """Create a new AES object.
+
+ @type key: str
+ @param key: A 16, 24, or 32 byte string.
+
+ @type IV: str
+ @param IV: A 16 byte string
+
+ @rtype: L{tlslite.utils.AES}
+ @return: An AES object.
+ """
+ if implList == None:
+ implList = ["cryptlib", "openssl", "pycrypto", "python"]
+
+ for impl in implList:
+ if impl == "cryptlib" and cryptomath.cryptlibpyLoaded:
+ return Cryptlib_AES.new(key, 2, IV)
+ elif impl == "openssl" and cryptomath.m2cryptoLoaded:
+ return OpenSSL_AES.new(key, 2, IV)
+ elif impl == "pycrypto" and cryptomath.pycryptoLoaded:
+ return PyCrypto_AES.new(key, 2, IV)
+ elif impl == "python":
+ return Python_AES.new(key, 2, IV)
+ raise NotImplementedError()
+
+def createRC4(key, IV, implList=None):
+ """Create a new RC4 object.
+
+ @type key: str
+ @param key: A 16 to 32 byte string.
+
+ @type IV: object
+ @param IV: Ignored, whatever it is.
+
+ @rtype: L{tlslite.utils.RC4}
+ @return: An RC4 object.
+ """
+ if implList == None:
+ implList = ["cryptlib", "openssl", "pycrypto", "python"]
+
+ if len(IV) != 0:
+ raise AssertionError()
+ for impl in implList:
+ if impl == "cryptlib" and cryptomath.cryptlibpyLoaded:
+ return Cryptlib_RC4.new(key)
+ elif impl == "openssl" and cryptomath.m2cryptoLoaded:
+ return OpenSSL_RC4.new(key)
+ elif impl == "pycrypto" and cryptomath.pycryptoLoaded:
+ return PyCrypto_RC4.new(key)
+ elif impl == "python":
+ return Python_RC4.new(key)
+ raise NotImplementedError()
+
+#Create a new TripleDES instance
+def createTripleDES(key, IV, implList=None):
+ """Create a new 3DES object.
+
+ @type key: str
+ @param key: A 24 byte string.
+
+ @type IV: str
+ @param IV: An 8 byte string
+
+ @rtype: L{tlslite.utils.TripleDES}
+ @return: A 3DES object.
+ """
+ if implList == None:
+ implList = ["cryptlib", "openssl", "pycrypto"]
+
+ for impl in implList:
+ if impl == "cryptlib" and cryptomath.cryptlibpyLoaded:
+ return Cryptlib_TripleDES.new(key, 2, IV)
+ elif impl == "openssl" and cryptomath.m2cryptoLoaded:
+ return OpenSSL_TripleDES.new(key, 2, IV)
+ elif impl == "pycrypto" and cryptomath.pycryptoLoaded:
+ return PyCrypto_TripleDES.new(key, 2, IV)
+ raise NotImplementedError()
\ No newline at end of file
diff --git a/python/gdata/tlslite/utils/codec.py b/python/gdata/tlslite/utils/codec.py
new file mode 100644
index 0000000..13022a0
--- /dev/null
+++ b/python/gdata/tlslite/utils/codec.py
@@ -0,0 +1,94 @@
+"""Classes for reading/writing binary data (such as TLS records)."""
+
+from compat import *
+
+class Writer:
+ def __init__(self, length=0):
+ #If length is zero, then this is just a "trial run" to determine length
+ self.index = 0
+ self.bytes = createByteArrayZeros(length)
+
+ def add(self, x, length):
+ if self.bytes:
+ newIndex = self.index+length-1
+ while newIndex >= self.index:
+ self.bytes[newIndex] = x & 0xFF
+ x >>= 8
+ newIndex -= 1
+ self.index += length
+
+ def addFixSeq(self, seq, length):
+ if self.bytes:
+ for e in seq:
+ self.add(e, length)
+ else:
+ self.index += len(seq)*length
+
+ def addVarSeq(self, seq, length, lengthLength):
+ if self.bytes:
+ self.add(len(seq)*length, lengthLength)
+ for e in seq:
+ self.add(e, length)
+ else:
+ self.index += lengthLength + (len(seq)*length)
+
+
+class Parser:
+ def __init__(self, bytes):
+ self.bytes = bytes
+ self.index = 0
+
+ def get(self, length):
+ if self.index + length > len(self.bytes):
+ raise SyntaxError()
+ x = 0
+ for count in range(length):
+ x <<= 8
+ x |= self.bytes[self.index]
+ self.index += 1
+ return x
+
+ def getFixBytes(self, lengthBytes):
+ bytes = self.bytes[self.index : self.index+lengthBytes]
+ self.index += lengthBytes
+ return bytes
+
+ def getVarBytes(self, lengthLength):
+ lengthBytes = self.get(lengthLength)
+ return self.getFixBytes(lengthBytes)
+
+ def getFixList(self, length, lengthList):
+ l = [0] * lengthList
+ for x in range(lengthList):
+ l[x] = self.get(length)
+ return l
+
+ def getVarList(self, length, lengthLength):
+ lengthList = self.get(lengthLength)
+ if lengthList % length != 0:
+ raise SyntaxError()
+ lengthList = int(lengthList/length)
+ l = [0] * lengthList
+ for x in range(lengthList):
+ l[x] = self.get(length)
+ return l
+
+ def startLengthCheck(self, lengthLength):
+ self.lengthCheck = self.get(lengthLength)
+ self.indexCheck = self.index
+
+ def setLengthCheck(self, length):
+ self.lengthCheck = length
+ self.indexCheck = self.index
+
+ def stopLengthCheck(self):
+ if (self.index - self.indexCheck) != self.lengthCheck:
+ raise SyntaxError()
+
+ def atLengthCheck(self):
+ if (self.index - self.indexCheck) < self.lengthCheck:
+ return False
+ elif (self.index - self.indexCheck) == self.lengthCheck:
+ return True
+ else:
+ raise SyntaxError()
\ No newline at end of file
diff --git a/python/gdata/tlslite/utils/compat.py b/python/gdata/tlslite/utils/compat.py
new file mode 100644
index 0000000..7d2d925
--- /dev/null
+++ b/python/gdata/tlslite/utils/compat.py
@@ -0,0 +1,140 @@
+"""Miscellaneous functions to mask Python version differences."""
+
+import sys
+import os
+
+if sys.version_info < (2,2):
+ raise AssertionError("Python 2.2 or later required")
+
+if sys.version_info < (2,3):
+
+ def enumerate(collection):
+ return zip(range(len(collection)), collection)
+
+ class Set:
+ def __init__(self, seq=None):
+ self.values = {}
+ if seq:
+ for e in seq:
+ self.values[e] = None
+
+ def add(self, e):
+ self.values[e] = None
+
+ def discard(self, e):
+ if e in self.values.keys():
+ del(self.values[e])
+
+ def union(self, s):
+ ret = Set()
+ for e in self.values.keys():
+ ret.values[e] = None
+ for e in s.values.keys():
+ ret.values[e] = None
+ return ret
+
+ def issubset(self, other):
+ for e in self.values.keys():
+ if e not in other.values.keys():
+ return False
+ return True
+
+ def __nonzero__( self):
+ return len(self.values.keys())
+
+ def __contains__(self, e):
+ return e in self.values.keys()
+
+ def __iter__(self):
+ return iter(set.values.keys())
+
+
+if os.name != "java":
+
+ import array
+ def createByteArraySequence(seq):
+ return array.array('B', seq)
+ def createByteArrayZeros(howMany):
+ return array.array('B', [0] * howMany)
+ def concatArrays(a1, a2):
+ return a1+a2
+
+ def bytesToString(bytes):
+ return bytes.tostring()
+ def stringToBytes(s):
+ bytes = createByteArrayZeros(0)
+ bytes.fromstring(s)
+ return bytes
+
+ import math
+ def numBits(n):
+ if n==0:
+ return 0
+ s = "%x" % n
+ return ((len(s)-1)*4) + \
+ {'0':0, '1':1, '2':2, '3':2,
+ '4':3, '5':3, '6':3, '7':3,
+ '8':4, '9':4, 'a':4, 'b':4,
+ 'c':4, 'd':4, 'e':4, 'f':4,
+ }[s[0]]
+ return int(math.floor(math.log(n, 2))+1)
+
+ BaseException = Exception
+ import sys
+ import traceback
+ def formatExceptionTrace(e):
+ newStr = "".join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
+ return newStr
+
+else:
+ #Jython 2.1 is missing lots of python 2.3 stuff,
+ #which we have to emulate here:
+ #NOTE: JYTHON SUPPORT NO LONGER WORKS, DUE TO USE OF GENERATORS.
+ #THIS CODE IS LEFT IN SO THAT ONE JYTHON UPDATES TO 2.2, IT HAS A
+ #CHANCE OF WORKING AGAIN.
+
+ import java
+ import jarray
+
+ def createByteArraySequence(seq):
+ if isinstance(seq, type("")): #If it's a string, convert
+ seq = [ord(c) for c in seq]
+ return jarray.array(seq, 'h') #use short instead of bytes, cause bytes are signed
+ def createByteArrayZeros(howMany):
+ return jarray.zeros(howMany, 'h') #use short instead of bytes, cause bytes are signed
+ def concatArrays(a1, a2):
+ l = list(a1)+list(a2)
+ return createByteArraySequence(l)
+
+ #WAY TOO SLOW - MUST BE REPLACED------------
+ def bytesToString(bytes):
+ return "".join([chr(b) for b in bytes])
+
+ def stringToBytes(s):
+ bytes = createByteArrayZeros(len(s))
+ for count, c in enumerate(s):
+ bytes[count] = ord(c)
+ return bytes
+ #WAY TOO SLOW - MUST BE REPLACED------------
+
+ def numBits(n):
+ if n==0:
+ return 0
+ n= 1L * n; #convert to long, if it isn't already
+ return n.__tojava__(java.math.BigInteger).bitLength()
+
+ #Adjust the string to an array of bytes
+ def stringToJavaByteArray(s):
+ bytes = jarray.zeros(len(s), 'b')
+ for count, c in enumerate(s):
+ x = ord(c)
+ if x >= 128: x -= 256
+ bytes[count] = x
+ return bytes
+
+ BaseException = java.lang.Exception
+ import sys
+ import traceback
+ def formatExceptionTrace(e):
+ newStr = "".join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
+ return newStr
\ No newline at end of file
diff --git a/python/gdata/tlslite/utils/cryptomath.py b/python/gdata/tlslite/utils/cryptomath.py
new file mode 100644
index 0000000..92fb774
--- /dev/null
+++ b/python/gdata/tlslite/utils/cryptomath.py
@@ -0,0 +1,404 @@
+"""cryptomath module
+
+This module has basic math/crypto code."""
+
+import os
+import sys
+import math
+import base64
+import binascii
+if sys.version_info[:2] <= (2, 4):
+ from sha import sha as sha1
+else:
+ from hashlib import sha1
+
+from compat import *
+
+
+# **************************************************************************
+# Load Optional Modules
+# **************************************************************************
+
+# Try to load M2Crypto/OpenSSL
+try:
+ from M2Crypto import m2
+ m2cryptoLoaded = True
+
+except ImportError:
+ m2cryptoLoaded = False
+
+
+# Try to load cryptlib
+try:
+ import cryptlib_py
+ try:
+ cryptlib_py.cryptInit()
+ except cryptlib_py.CryptException, e:
+ #If tlslite and cryptoIDlib are both present,
+ #they might each try to re-initialize this,
+ #so we're tolerant of that.
+ if e[0] != cryptlib_py.CRYPT_ERROR_INITED:
+ raise
+ cryptlibpyLoaded = True
+
+except ImportError:
+ cryptlibpyLoaded = False
+
+#Try to load GMPY
+try:
+ import gmpy
+ gmpyLoaded = True
+except ImportError:
+ gmpyLoaded = False
+
+#Try to load pycrypto
+try:
+ import Crypto.Cipher.AES
+ pycryptoLoaded = True
+except ImportError:
+ pycryptoLoaded = False
+
+
+# **************************************************************************
+# PRNG Functions
+# **************************************************************************
+
+# Get os.urandom PRNG
+try:
+ os.urandom(1)
+ def getRandomBytes(howMany):
+ return stringToBytes(os.urandom(howMany))
+ prngName = "os.urandom"
+
+except:
+ # Else get cryptlib PRNG
+ if cryptlibpyLoaded:
+ def getRandomBytes(howMany):
+ randomKey = cryptlib_py.cryptCreateContext(cryptlib_py.CRYPT_UNUSED,
+ cryptlib_py.CRYPT_ALGO_AES)
+ cryptlib_py.cryptSetAttribute(randomKey,
+ cryptlib_py.CRYPT_CTXINFO_MODE,
+ cryptlib_py.CRYPT_MODE_OFB)
+ cryptlib_py.cryptGenerateKey(randomKey)
+ bytes = createByteArrayZeros(howMany)
+ cryptlib_py.cryptEncrypt(randomKey, bytes)
+ return bytes
+ prngName = "cryptlib"
+
+ else:
+ #Else get UNIX /dev/urandom PRNG
+ try:
+ devRandomFile = open("/dev/urandom", "rb")
+ def getRandomBytes(howMany):
+ return stringToBytes(devRandomFile.read(howMany))
+ prngName = "/dev/urandom"
+ except IOError:
+ #Else get Win32 CryptoAPI PRNG
+ try:
+ import win32prng
+ def getRandomBytes(howMany):
+ s = win32prng.getRandomBytes(howMany)
+ if len(s) != howMany:
+ raise AssertionError()
+ return stringToBytes(s)
+ prngName ="CryptoAPI"
+ except ImportError:
+ #Else no PRNG :-(
+ def getRandomBytes(howMany):
+ raise NotImplementedError("No Random Number Generator "\
+ "available.")
+ prngName = "None"
+
+# **************************************************************************
+# Converter Functions
+# **************************************************************************
+
+def bytesToNumber(bytes):
+ total = 0L
+ multiplier = 1L
+ for count in range(len(bytes)-1, -1, -1):
+ byte = bytes[count]
+ total += multiplier * byte
+ multiplier *= 256
+ return total
+
+def numberToBytes(n):
+ howManyBytes = numBytes(n)
+ bytes = createByteArrayZeros(howManyBytes)
+ for count in range(howManyBytes-1, -1, -1):
+ bytes[count] = int(n % 256)
+ n >>= 8
+ return bytes
+
+def bytesToBase64(bytes):
+ s = bytesToString(bytes)
+ return stringToBase64(s)
+
+def base64ToBytes(s):
+ s = base64ToString(s)
+ return stringToBytes(s)
+
+def numberToBase64(n):
+ bytes = numberToBytes(n)
+ return bytesToBase64(bytes)
+
+def base64ToNumber(s):
+ bytes = base64ToBytes(s)
+ return bytesToNumber(bytes)
+
+def stringToNumber(s):
+ bytes = stringToBytes(s)
+ return bytesToNumber(bytes)
+
+def numberToString(s):
+ bytes = numberToBytes(s)
+ return bytesToString(bytes)
+
+def base64ToString(s):
+ try:
+ return base64.decodestring(s)
+ except binascii.Error, e:
+ raise SyntaxError(e)
+ except binascii.Incomplete, e:
+ raise SyntaxError(e)
+
+def stringToBase64(s):
+ return base64.encodestring(s).replace("\n", "")
+
+def mpiToNumber(mpi): #mpi is an openssl-format bignum string
+ if (ord(mpi[4]) & 0x80) !=0: #Make sure this is a positive number
+ raise AssertionError()
+ bytes = stringToBytes(mpi[4:])
+ return bytesToNumber(bytes)
+
+def numberToMPI(n):
+ bytes = numberToBytes(n)
+ ext = 0
+ #If the high-order bit is going to be set,
+ #add an extra byte of zeros
+ if (numBits(n) & 0x7)==0:
+ ext = 1
+ length = numBytes(n) + ext
+ bytes = concatArrays(createByteArrayZeros(4+ext), bytes)
+ bytes[0] = (length >> 24) & 0xFF
+ bytes[1] = (length >> 16) & 0xFF
+ bytes[2] = (length >> 8) & 0xFF
+ bytes[3] = length & 0xFF
+ return bytesToString(bytes)
+
+
+
+# **************************************************************************
+# Misc. Utility Functions
+# **************************************************************************
+
+def numBytes(n):
+ if n==0:
+ return 0
+ bits = numBits(n)
+ return int(math.ceil(bits / 8.0))
+
+def hashAndBase64(s):
+ return stringToBase64(sha1(s).digest())
+
+def getBase64Nonce(numChars=22): #defaults to an 132 bit nonce
+ bytes = getRandomBytes(numChars)
+ bytesStr = "".join([chr(b) for b in bytes])
+ return stringToBase64(bytesStr)[:numChars]
+
+
+# **************************************************************************
+# Big Number Math
+# **************************************************************************
+
+def getRandomNumber(low, high):
+ if low >= high:
+ raise AssertionError()
+ howManyBits = numBits(high)
+ howManyBytes = numBytes(high)
+ lastBits = howManyBits % 8
+ while 1:
+ bytes = getRandomBytes(howManyBytes)
+ if lastBits:
+ bytes[0] = bytes[0] % (1 << lastBits)
+ n = bytesToNumber(bytes)
+ if n >= low and n < high:
+ return n
+
+def gcd(a,b):
+ a, b = max(a,b), min(a,b)
+ while b:
+ a, b = b, a % b
+ return a
+
+def lcm(a, b):
+ #This will break when python division changes, but we can't use // cause
+ #of Jython
+ return (a * b) / gcd(a, b)
+
+#Returns inverse of a mod b, zero if none
+#Uses Extended Euclidean Algorithm
+def invMod(a, b):
+ c, d = a, b
+ uc, ud = 1, 0
+ while c != 0:
+ #This will break when python division changes, but we can't use //
+ #cause of Jython
+ q = d / c
+ c, d = d-(q*c), c
+ uc, ud = ud - (q * uc), uc
+ if d == 1:
+ return ud % b
+ return 0
+
+
+if gmpyLoaded:
+ def powMod(base, power, modulus):
+ base = gmpy.mpz(base)
+ power = gmpy.mpz(power)
+ modulus = gmpy.mpz(modulus)
+ result = pow(base, power, modulus)
+ return long(result)
+
+else:
+ #Copied from Bryan G. Olson's post to comp.lang.python
+ #Does left-to-right instead of pow()'s right-to-left,
+ #thus about 30% faster than the python built-in with small bases
+ def powMod(base, power, modulus):
+ nBitScan = 5
+
+ """ Return base**power mod modulus, using multi bit scanning
+ with nBitScan bits at a time."""
+
+ #TREV - Added support for negative exponents
+ negativeResult = False
+ if (power < 0):
+ power *= -1
+ negativeResult = True
+
+ exp2 = 2**nBitScan
+ mask = exp2 - 1
+
+ # Break power into a list of digits of nBitScan bits.
+ # The list is recursive so easy to read in reverse direction.
+ nibbles = None
+ while power:
+ nibbles = int(power & mask), nibbles
+ power = power >> nBitScan
+
+ # Make a table of powers of base up to 2**nBitScan - 1
+ lowPowers = [1]
+ for i in xrange(1, exp2):
+ lowPowers.append((lowPowers[i-1] * base) % modulus)
+
+ # To exponentiate by the first nibble, look it up in the table
+ nib, nibbles = nibbles
+ prod = lowPowers[nib]
+
+ # For the rest, square nBitScan times, then multiply by
+ # base^nibble
+ while nibbles:
+ nib, nibbles = nibbles
+ for i in xrange(nBitScan):
+ prod = (prod * prod) % modulus
+ if nib: prod = (prod * lowPowers[nib]) % modulus
+
+ #TREV - Added support for negative exponents
+ if negativeResult:
+ prodInv = invMod(prod, modulus)
+ #Check to make sure the inverse is correct
+ if (prod * prodInv) % modulus != 1:
+ raise AssertionError()
+ return prodInv
+ return prod
+
+
+#Pre-calculate a sieve of the ~100 primes < 1000:
+def makeSieve(n):
+ sieve = range(n)
+ for count in range(2, int(math.sqrt(n))):
+ if sieve[count] == 0:
+ continue
+ x = sieve[count] * 2
+ while x < len(sieve):
+ sieve[x] = 0
+ x += sieve[count]
+ sieve = [x for x in sieve[2:] if x]
+ return sieve
+
+sieve = makeSieve(1000)
+
+def isPrime(n, iterations=5, display=False):
+ #Trial division with sieve
+ for x in sieve:
+ if x >= n: return True
+ if n % x == 0: return False
+ #Passed trial division, proceed to Rabin-Miller
+ #Rabin-Miller implemented per Ferguson & Schneier
+ #Compute s, t for Rabin-Miller
+ if display: print "*",
+ s, t = n-1, 0
+ while s % 2 == 0:
+ s, t = s/2, t+1
+ #Repeat Rabin-Miller x times
+ a = 2 #Use 2 as a base for first iteration speedup, per HAC
+ for count in range(iterations):
+ v = powMod(a, s, n)
+ if v==1:
+ continue
+ i = 0
+ while v != n-1:
+ if i == t-1:
+ return False
+ else:
+ v, i = powMod(v, 2, n), i+1
+ a = getRandomNumber(2, n)
+ return True
+
+def getRandomPrime(bits, display=False):
+ if bits < 10:
+ raise AssertionError()
+ #The 1.5 ensures the 2 MSBs are set
+ #Thus, when used for p,q in RSA, n will have its MSB set
+ #
+ #Since 30 is lcm(2,3,5), we'll set our test numbers to
+ #29 % 30 and keep them there
+ low = (2L ** (bits-1)) * 3/2
+ high = 2L ** bits - 30
+ p = getRandomNumber(low, high)
+ p += 29 - (p % 30)
+ while 1:
+ if display: print ".",
+ p += 30
+ if p >= high:
+ p = getRandomNumber(low, high)
+ p += 29 - (p % 30)
+ if isPrime(p, display=display):
+ return p
+
+#Unused at the moment...
+def getRandomSafePrime(bits, display=False):
+ if bits < 10:
+ raise AssertionError()
+ #The 1.5 ensures the 2 MSBs are set
+ #Thus, when used for p,q in RSA, n will have its MSB set
+ #
+ #Since 30 is lcm(2,3,5), we'll set our test numbers to
+ #29 % 30 and keep them there
+ low = (2 ** (bits-2)) * 3/2
+ high = (2 ** (bits-1)) - 30
+ q = getRandomNumber(low, high)
+ q += 29 - (q % 30)
+ while 1:
+ if display: print ".",
+ q += 30
+ if (q >= high):
+ q = getRandomNumber(low, high)
+ q += 29 - (q % 30)
+ #Ideas from Tom Wu's SRP code
+ #Do trial division on p and q before Rabin-Miller
+ if isPrime(q, 0, display=display):
+ p = (2 * q) + 1
+ if isPrime(p, display=display):
+ if isPrime(q, display=display):
+ return p
diff --git a/python/gdata/tlslite/utils/dateFuncs.py b/python/gdata/tlslite/utils/dateFuncs.py
new file mode 100644
index 0000000..38812eb
--- /dev/null
+++ b/python/gdata/tlslite/utils/dateFuncs.py
@@ -0,0 +1,75 @@
+
+import os
+
+#Functions for manipulating datetime objects
+#CCYY-MM-DDThh:mm:ssZ
+def parseDateClass(s):
+ year, month, day = s.split("-")
+ day, tail = day[:2], day[2:]
+ hour, minute, second = tail[1:].split(":")
+ second = second[:2]
+ year, month, day = int(year), int(month), int(day)
+ hour, minute, second = int(hour), int(minute), int(second)
+ return createDateClass(year, month, day, hour, minute, second)
+
+
+if os.name != "java":
+ from datetime import datetime, timedelta
+
+ #Helper functions for working with a date/time class
+ def createDateClass(year, month, day, hour, minute, second):
+ return datetime(year, month, day, hour, minute, second)
+
+ def printDateClass(d):
+ #Split off fractional seconds, append 'Z'
+ return d.isoformat().split(".")[0]+"Z"
+
+ def getNow():
+ return datetime.utcnow()
+
+ def getHoursFromNow(hours):
+ return datetime.utcnow() + timedelta(hours=hours)
+
+ def getMinutesFromNow(minutes):
+ return datetime.utcnow() + timedelta(minutes=minutes)
+
+ def isDateClassExpired(d):
+ return d < datetime.utcnow()
+
+ def isDateClassBefore(d1, d2):
+ return d1 < d2
+
+else:
+ #Jython 2.1 is missing lots of python 2.3 stuff,
+ #which we have to emulate here:
+ import java
+ import jarray
+
+ def createDateClass(year, month, day, hour, minute, second):
+ c = java.util.Calendar.getInstance()
+ c.setTimeZone(java.util.TimeZone.getTimeZone("UTC"))
+ c.set(year, month-1, day, hour, minute, second)
+ return c
+
+ def printDateClass(d):
+ return "%04d-%02d-%02dT%02d:%02d:%02dZ" % \
+ (d.get(d.YEAR), d.get(d.MONTH)+1, d.get(d.DATE), \
+ d.get(d.HOUR_OF_DAY), d.get(d.MINUTE), d.get(d.SECOND))
+
+ def getNow():
+ c = java.util.Calendar.getInstance()
+ c.setTimeZone(java.util.TimeZone.getTimeZone("UTC"))
+ c.get(c.HOUR) #force refresh?
+ return c
+
+ def getHoursFromNow(hours):
+ d = getNow()
+ d.add(d.HOUR, hours)
+ return d
+
+ def isDateClassExpired(d):
+ n = getNow()
+ return d.before(n)
+
+ def isDateClassBefore(d1, d2):
+ return d1.before(d2)
diff --git a/python/gdata/tlslite/utils/hmac.py b/python/gdata/tlslite/utils/hmac.py
new file mode 100644
index 0000000..fe8feec
--- /dev/null
+++ b/python/gdata/tlslite/utils/hmac.py
@@ -0,0 +1,104 @@
+"""HMAC (Keyed-Hashing for Message Authentication) Python module.
+
+Implements the HMAC algorithm as described by RFC 2104.
+
+(This file is modified from the standard library version to do faster
+copying)
+"""
+
+def _strxor(s1, s2):
+ """Utility method. XOR the two strings s1 and s2 (must have same length).
+ """
+ return "".join(map(lambda x, y: chr(ord(x) ^ ord(y)), s1, s2))
+
+# The size of the digests returned by HMAC depends on the underlying
+# hashing module used.
+digest_size = None
+
+class HMAC:
+ """RFC2104 HMAC class.
+
+ This supports the API for Cryptographic Hash Functions (PEP 247).
+ """
+
+ def __init__(self, key, msg = None, digestmod = None):
+ """Create a new HMAC object.
+
+ key: key for the keyed hash object.
+ msg: Initial input for the hash, if provided.
+ digestmod: A module supporting PEP 247. Defaults to the md5 module.
+ """
+ if digestmod is None:
+ import md5
+ digestmod = md5
+
+ if key == None: #TREVNEW - for faster copying
+ return #TREVNEW
+
+ self.digestmod = digestmod
+ self.outer = digestmod.new()
+ self.inner = digestmod.new()
+ self.digest_size = digestmod.digest_size
+
+ blocksize = 64
+ ipad = "\x36" * blocksize
+ opad = "\x5C" * blocksize
+
+ if len(key) > blocksize:
+ key = digestmod.new(key).digest()
+
+ key = key + chr(0) * (blocksize - len(key))
+ self.outer.update(_strxor(key, opad))
+ self.inner.update(_strxor(key, ipad))
+ if msg is not None:
+ self.update(msg)
+
+## def clear(self):
+## raise NotImplementedError, "clear() method not available in HMAC."
+
+ def update(self, msg):
+ """Update this hashing object with the string msg.
+ """
+ self.inner.update(msg)
+
+ def copy(self):
+ """Return a separate copy of this hashing object.
+
+ An update to this copy won't affect the original object.
+ """
+ other = HMAC(None) #TREVNEW - for faster copying
+ other.digest_size = self.digest_size #TREVNEW
+ other.digestmod = self.digestmod
+ other.inner = self.inner.copy()
+ other.outer = self.outer.copy()
+ return other
+
+ def digest(self):
+ """Return the hash value of this hashing object.
+
+ This returns a string containing 8-bit data. The object is
+ not altered in any way by this function; you can continue
+ updating the object after calling this function.
+ """
+ h = self.outer.copy()
+ h.update(self.inner.digest())
+ return h.digest()
+
+ def hexdigest(self):
+ """Like digest(), but returns a string of hexadecimal digits instead.
+ """
+ return "".join([hex(ord(x))[2:].zfill(2)
+ for x in tuple(self.digest())])
+
+def new(key, msg = None, digestmod = None):
+ """Create a new hashing object and return it.
+
+ key: The starting key for the hash.
+ msg: if available, will immediately be hashed into the object's starting
+ state.
+
+ You can now feed arbitrary strings into the object using its update()
+ method, and can ask for the hash value at any time by calling its digest()
+ method.
+ """
+ return HMAC(key, msg, digestmod)
diff --git a/python/gdata/tlslite/utils/jython_compat.py b/python/gdata/tlslite/utils/jython_compat.py
new file mode 100644
index 0000000..1245183
--- /dev/null
+++ b/python/gdata/tlslite/utils/jython_compat.py
@@ -0,0 +1,195 @@
+"""Miscellaneous functions to mask Python/Jython differences."""
+
+import os
+import sha
+
+if os.name != "java":
+ BaseException = Exception
+
+ from sets import Set
+ import array
+ import math
+
+ def createByteArraySequence(seq):
+ return array.array('B', seq)
+ def createByteArrayZeros(howMany):
+ return array.array('B', [0] * howMany)
+ def concatArrays(a1, a2):
+ return a1+a2
+
+ def bytesToString(bytes):
+ return bytes.tostring()
+
+ def stringToBytes(s):
+ bytes = createByteArrayZeros(0)
+ bytes.fromstring(s)
+ return bytes
+
+ def numBits(n):
+ if n==0:
+ return 0
+ return int(math.floor(math.log(n, 2))+1)
+
+ class CertChainBase: pass
+ class SelfTestBase: pass
+ class ReportFuncBase: pass
+
+ #Helper functions for working with sets (from Python 2.3)
+ def iterSet(set):
+ return iter(set)
+
+ def getListFromSet(set):
+ return list(set)
+
+ #Factory function for getting a SHA1 object
+ def getSHA1(s):
+ return sha.sha(s)
+
+ import sys
+ import traceback
+
+ def formatExceptionTrace(e):
+ newStr = "".join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
+ return newStr
+
+else:
+ #Jython 2.1 is missing lots of python 2.3 stuff,
+ #which we have to emulate here:
+ import java
+ import jarray
+
+ BaseException = java.lang.Exception
+
+ def createByteArraySequence(seq):
+ if isinstance(seq, type("")): #If it's a string, convert
+ seq = [ord(c) for c in seq]
+ return jarray.array(seq, 'h') #use short instead of bytes, cause bytes are signed
+ def createByteArrayZeros(howMany):
+ return jarray.zeros(howMany, 'h') #use short instead of bytes, cause bytes are signed
+ def concatArrays(a1, a2):
+ l = list(a1)+list(a2)
+ return createByteArraySequence(l)
+
+ #WAY TOO SLOW - MUST BE REPLACED------------
+ def bytesToString(bytes):
+ return "".join([chr(b) for b in bytes])
+
+ def stringToBytes(s):
+ bytes = createByteArrayZeros(len(s))
+ for count, c in enumerate(s):
+ bytes[count] = ord(c)
+ return bytes
+ #WAY TOO SLOW - MUST BE REPLACED------------
+
+ def numBits(n):
+ if n==0:
+ return 0
+ n= 1L * n; #convert to long, if it isn't already
+ return n.__tojava__(java.math.BigInteger).bitLength()
+
+ #This properly creates static methods for Jython
+ class staticmethod:
+ def __init__(self, anycallable): self.__call__ = anycallable
+
+ #Properties are not supported for Jython
+ class property:
+ def __init__(self, anycallable): pass
+
+ #True and False have to be specially defined
+ False = 0
+ True = 1
+
+ class StopIteration(Exception): pass
+
+ def enumerate(collection):
+ return zip(range(len(collection)), collection)
+
+ class Set:
+ def __init__(self, seq=None):
+ self.values = {}
+ if seq:
+ for e in seq:
+ self.values[e] = None
+
+ def add(self, e):
+ self.values[e] = None
+
+ def discard(self, e):
+ if e in self.values.keys():
+ del(self.values[e])
+
+ def union(self, s):
+ ret = Set()
+ for e in self.values.keys():
+ ret.values[e] = None
+ for e in s.values.keys():
+ ret.values[e] = None
+ return ret
+
+ def issubset(self, other):
+ for e in self.values.keys():
+ if e not in other.values.keys():
+ return False
+ return True
+
+ def __nonzero__( self):
+ return len(self.values.keys())
+
+ def __contains__(self, e):
+ return e in self.values.keys()
+
+ def iterSet(set):
+ return set.values.keys()
+
+ def getListFromSet(set):
+ return set.values.keys()
+
+ """
+ class JCE_SHA1:
+ def __init__(self, s=None):
+ self.md = java.security.MessageDigest.getInstance("SHA1")
+ if s:
+ self.update(s)
+
+ def update(self, s):
+ self.md.update(s)
+
+ def copy(self):
+ sha1 = JCE_SHA1()
+ sha1.md = self.md.clone()
+ return sha1
+
+ def digest(self):
+ digest = self.md.digest()
+ bytes = jarray.zeros(20, 'h')
+ for count in xrange(20):
+ x = digest[count]
+ if x < 0: x += 256
+ bytes[count] = x
+ return bytes
+ """
+
+ #Factory function for getting a SHA1 object
+ #The JCE_SHA1 class is way too slow...
+ #the sha.sha object we use instead is broken in the jython 2.1
+ #release, and needs to be patched
+ def getSHA1(s):
+ #return JCE_SHA1(s)
+ return sha.sha(s)
+
+
+ #Adjust the string to an array of bytes
+ def stringToJavaByteArray(s):
+ bytes = jarray.zeros(len(s), 'b')
+ for count, c in enumerate(s):
+ x = ord(c)
+ if x >= 128: x -= 256
+ bytes[count] = x
+ return bytes
+
+ import sys
+ import traceback
+
+ def formatExceptionTrace(e):
+ newStr = "".join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
+ return newStr
diff --git a/python/gdata/tlslite/utils/keyfactory.py b/python/gdata/tlslite/utils/keyfactory.py
new file mode 100644
index 0000000..5005af7
--- /dev/null
+++ b/python/gdata/tlslite/utils/keyfactory.py
@@ -0,0 +1,243 @@
+"""Factory functions for asymmetric cryptography.
+@sort: generateRSAKey, parseXMLKey, parsePEMKey, parseAsPublicKey,
+parseAsPrivateKey
+"""
+
+from compat import *
+
+from RSAKey import RSAKey
+from Python_RSAKey import Python_RSAKey
+import cryptomath
+
+if cryptomath.m2cryptoLoaded:
+ from OpenSSL_RSAKey import OpenSSL_RSAKey
+
+if cryptomath.pycryptoLoaded:
+ from PyCrypto_RSAKey import PyCrypto_RSAKey
+
+# **************************************************************************
+# Factory Functions for RSA Keys
+# **************************************************************************
+
+def generateRSAKey(bits, implementations=["openssl", "python"]):
+ """Generate an RSA key with the specified bit length.
+
+ @type bits: int
+ @param bits: Desired bit length of the new key's modulus.
+
+ @rtype: L{tlslite.utils.RSAKey.RSAKey}
+ @return: A new RSA private key.
+ """
+ for implementation in implementations:
+ if implementation == "openssl" and cryptomath.m2cryptoLoaded:
+ return OpenSSL_RSAKey.generate(bits)
+ elif implementation == "python":
+ return Python_RSAKey.generate(bits)
+ raise ValueError("No acceptable implementations")
+
+def parseXMLKey(s, private=False, public=False, implementations=["python"]):
+ """Parse an XML-format key.
+
+ The XML format used here is specific to tlslite and cryptoIDlib. The
+ format can store the public component of a key, or the public and
+ private components. For example::
+
+
+ 4a5yzB8oGNlHo866CAspAC47M4Fvx58zwK8pou...
+ Aw==
+
+
+
+ 4a5yzB8oGNlHo866CAspAC47M4Fvx58zwK8pou...
+ Aw==
+ JZ0TIgUxWXmL8KJ0VqyG1V0J3ern9pqIoB0xmy...
+
+
+ @type s: str
+ @param s: A string containing an XML public or private key.
+
+ @type private: bool
+ @param private: If True, a L{SyntaxError} will be raised if the private
+ key component is not present.
+
+ @type public: bool
+ @param public: If True, the private key component (if present) will be
+ discarded, so this function will always return a public key.
+
+ @rtype: L{tlslite.utils.RSAKey.RSAKey}
+ @return: An RSA key.
+
+ @raise SyntaxError: If the key is not properly formatted.
+ """
+ for implementation in implementations:
+ if implementation == "python":
+ key = Python_RSAKey.parseXML(s)
+ break
+ else:
+ raise ValueError("No acceptable implementations")
+
+ return _parseKeyHelper(key, private, public)
+
+#Parse as an OpenSSL or Python key
+def parsePEMKey(s, private=False, public=False, passwordCallback=None,
+ implementations=["openssl", "python"]):
+ """Parse a PEM-format key.
+
+ The PEM format is used by OpenSSL and other tools. The
+ format is typically used to store both the public and private
+ components of a key. For example::
+
+ -----BEGIN RSA PRIVATE KEY-----
+ MIICXQIBAAKBgQDYscuoMzsGmW0pAYsmyHltxB2TdwHS0dImfjCMfaSDkfLdZY5+
+ dOWORVns9etWnr194mSGA1F0Pls/VJW8+cX9+3vtJV8zSdANPYUoQf0TP7VlJxkH
+ dSRkUbEoz5bAAs/+970uos7n7iXQIni+3erUTdYEk2iWnMBjTljfgbK/dQIDAQAB
+ AoGAJHoJZk75aKr7DSQNYIHuruOMdv5ZeDuJvKERWxTrVJqE32/xBKh42/IgqRrc
+ esBN9ZregRCd7YtxoL+EVUNWaJNVx2mNmezEznrc9zhcYUrgeaVdFO2yBF1889zO
+ gCOVwrO8uDgeyj6IKa25H6c1N13ih/o7ZzEgWbGG+ylU1yECQQDv4ZSJ4EjSh/Fl
+ aHdz3wbBa/HKGTjC8iRy476Cyg2Fm8MZUe9Yy3udOrb5ZnS2MTpIXt5AF3h2TfYV
+ VoFXIorjAkEA50FcJmzT8sNMrPaV8vn+9W2Lu4U7C+K/O2g1iXMaZms5PC5zV5aV
+ CKXZWUX1fq2RaOzlbQrpgiolhXpeh8FjxwJBAOFHzSQfSsTNfttp3KUpU0LbiVvv
+ i+spVSnA0O4rq79KpVNmK44Mq67hsW1P11QzrzTAQ6GVaUBRv0YS061td1kCQHnP
+ wtN2tboFR6lABkJDjxoGRvlSt4SOPr7zKGgrWjeiuTZLHXSAnCY+/hr5L9Q3ZwXG
+ 6x6iBdgLjVIe4BZQNtcCQQDXGv/gWinCNTN3MPWfTW/RGzuMYVmyBFais0/VrgdH
+ h1dLpztmpQqfyH/zrBXQ9qL/zR4ojS6XYneO/U18WpEe
+ -----END RSA PRIVATE KEY-----
+
+ To generate a key like this with OpenSSL, run::
+
+ openssl genrsa 2048 > key.pem
+
+ This format also supports password-encrypted private keys. TLS
+ Lite can only handle password-encrypted private keys when OpenSSL
+ and M2Crypto are installed. In this case, passwordCallback will be
+ invoked to query the user for the password.
+
+ @type s: str
+ @param s: A string containing a PEM-encoded public or private key.
+
+ @type private: bool
+ @param private: If True, a L{SyntaxError} will be raised if the
+ private key component is not present.
+
+ @type public: bool
+ @param public: If True, the private key component (if present) will
+ be discarded, so this function will always return a public key.
+
+ @type passwordCallback: callable
+ @param passwordCallback: This function will be called, with no
+ arguments, if the PEM-encoded private key is password-encrypted.
+ The callback should return the password string. If the password is
+ incorrect, SyntaxError will be raised. If no callback is passed
+ and the key is password-encrypted, a prompt will be displayed at
+ the console.
+
+ @rtype: L{tlslite.utils.RSAKey.RSAKey}
+ @return: An RSA key.
+
+ @raise SyntaxError: If the key is not properly formatted.
+ """
+ for implementation in implementations:
+ if implementation == "openssl" and cryptomath.m2cryptoLoaded:
+ key = OpenSSL_RSAKey.parse(s, passwordCallback)
+ break
+ elif implementation == "python":
+ key = Python_RSAKey.parsePEM(s)
+ break
+ else:
+ raise ValueError("No acceptable implementations")
+
+ return _parseKeyHelper(key, private, public)
+
+
+def _parseKeyHelper(key, private, public):
+ if private:
+ if not key.hasPrivateKey():
+ raise SyntaxError("Not a private key!")
+
+ if public:
+ return _createPublicKey(key)
+
+ if private:
+ if hasattr(key, "d"):
+ return _createPrivateKey(key)
+ else:
+ return key
+
+ return key
+
+def parseAsPublicKey(s):
+ """Parse an XML or PEM-formatted public key.
+
+ @type s: str
+ @param s: A string containing an XML or PEM-encoded public or private key.
+
+ @rtype: L{tlslite.utils.RSAKey.RSAKey}
+ @return: An RSA public key.
+
+ @raise SyntaxError: If the key is not properly formatted.
+ """
+ try:
+ return parsePEMKey(s, public=True)
+ except:
+ return parseXMLKey(s, public=True)
+
+def parsePrivateKey(s):
+ """Parse an XML or PEM-formatted private key.
+
+ @type s: str
+ @param s: A string containing an XML or PEM-encoded private key.
+
+ @rtype: L{tlslite.utils.RSAKey.RSAKey}
+ @return: An RSA private key.
+
+ @raise SyntaxError: If the key is not properly formatted.
+ """
+ try:
+ return parsePEMKey(s, private=True)
+ except:
+ return parseXMLKey(s, private=True)
+
+def _createPublicKey(key):
+ """
+ Create a new public key. Discard any private component,
+ and return the most efficient key possible.
+ """
+ if not isinstance(key, RSAKey):
+ raise AssertionError()
+ return _createPublicRSAKey(key.n, key.e)
+
+def _createPrivateKey(key):
+ """
+ Create a new private key. Return the most efficient key possible.
+ """
+ if not isinstance(key, RSAKey):
+ raise AssertionError()
+ if not key.hasPrivateKey():
+ raise AssertionError()
+ return _createPrivateRSAKey(key.n, key.e, key.d, key.p, key.q, key.dP,
+ key.dQ, key.qInv)
+
+def _createPublicRSAKey(n, e, implementations = ["openssl", "pycrypto",
+ "python"]):
+ for implementation in implementations:
+ if implementation == "openssl" and cryptomath.m2cryptoLoaded:
+ return OpenSSL_RSAKey(n, e)
+ elif implementation == "pycrypto" and cryptomath.pycryptoLoaded:
+ return PyCrypto_RSAKey(n, e)
+ elif implementation == "python":
+ return Python_RSAKey(n, e)
+ raise ValueError("No acceptable implementations")
+
+def _createPrivateRSAKey(n, e, d, p, q, dP, dQ, qInv,
+ implementations = ["pycrypto", "python"]):
+ for implementation in implementations:
+ if implementation == "pycrypto" and cryptomath.pycryptoLoaded:
+ return PyCrypto_RSAKey(n, e, d, p, q, dP, dQ, qInv)
+ elif implementation == "python":
+ return Python_RSAKey(n, e, d, p, q, dP, dQ, qInv)
+ raise ValueError("No acceptable implementations")
diff --git a/python/gdata/tlslite/utils/rijndael.py b/python/gdata/tlslite/utils/rijndael.py
new file mode 100644
index 0000000..cb2f547
--- /dev/null
+++ b/python/gdata/tlslite/utils/rijndael.py
@@ -0,0 +1,392 @@
+"""
+A pure python (slow) implementation of rijndael with a decent interface
+
+To include -
+
+from rijndael import rijndael
+
+To do a key setup -
+
+r = rijndael(key, block_size = 16)
+
+key must be a string of length 16, 24, or 32
+blocksize must be 16, 24, or 32. Default is 16
+
+To use -
+
+ciphertext = r.encrypt(plaintext)
+plaintext = r.decrypt(ciphertext)
+
+If any strings are of the wrong length a ValueError is thrown
+"""
+
+# ported from the Java reference code by Bram Cohen, bram@gawth.com, April 2001
+# this code is public domain, unless someone makes
+# an intellectual property claim against the reference
+# code, in which case it can be made public domain by
+# deleting all the comments and renaming all the variables
+
+import copy
+import string
+
+
+
+#-----------------------
+#TREV - ADDED BECAUSE THERE'S WARNINGS ABOUT INT OVERFLOW BEHAVIOR CHANGING IN
+#2.4.....
+import os
+if os.name != "java":
+ import exceptions
+ if hasattr(exceptions, "FutureWarning"):
+ import warnings
+ warnings.filterwarnings("ignore", category=FutureWarning, append=1)
+#-----------------------
+
+
+
+shifts = [[[0, 0], [1, 3], [2, 2], [3, 1]],
+ [[0, 0], [1, 5], [2, 4], [3, 3]],
+ [[0, 0], [1, 7], [3, 5], [4, 4]]]
+
+# [keysize][block_size]
+num_rounds = {16: {16: 10, 24: 12, 32: 14}, 24: {16: 12, 24: 12, 32: 14}, 32: {16: 14, 24: 14, 32: 14}}
+
+A = [[1, 1, 1, 1, 1, 0, 0, 0],
+ [0, 1, 1, 1, 1, 1, 0, 0],
+ [0, 0, 1, 1, 1, 1, 1, 0],
+ [0, 0, 0, 1, 1, 1, 1, 1],
+ [1, 0, 0, 0, 1, 1, 1, 1],
+ [1, 1, 0, 0, 0, 1, 1, 1],
+ [1, 1, 1, 0, 0, 0, 1, 1],
+ [1, 1, 1, 1, 0, 0, 0, 1]]
+
+# produce log and alog tables, needed for multiplying in the
+# field GF(2^m) (generator = 3)
+alog = [1]
+for i in xrange(255):
+ j = (alog[-1] << 1) ^ alog[-1]
+ if j & 0x100 != 0:
+ j ^= 0x11B
+ alog.append(j)
+
+log = [0] * 256
+for i in xrange(1, 255):
+ log[alog[i]] = i
+
+# multiply two elements of GF(2^m)
+def mul(a, b):
+ if a == 0 or b == 0:
+ return 0
+ return alog[(log[a & 0xFF] + log[b & 0xFF]) % 255]
+
+# substitution box based on F^{-1}(x)
+box = [[0] * 8 for i in xrange(256)]
+box[1][7] = 1
+for i in xrange(2, 256):
+ j = alog[255 - log[i]]
+ for t in xrange(8):
+ box[i][t] = (j >> (7 - t)) & 0x01
+
+B = [0, 1, 1, 0, 0, 0, 1, 1]
+
+# affine transform: box[i] <- B + A*box[i]
+cox = [[0] * 8 for i in xrange(256)]
+for i in xrange(256):
+ for t in xrange(8):
+ cox[i][t] = B[t]
+ for j in xrange(8):
+ cox[i][t] ^= A[t][j] * box[i][j]
+
+# S-boxes and inverse S-boxes
+S = [0] * 256
+Si = [0] * 256
+for i in xrange(256):
+ S[i] = cox[i][0] << 7
+ for t in xrange(1, 8):
+ S[i] ^= cox[i][t] << (7-t)
+ Si[S[i] & 0xFF] = i
+
+# T-boxes
+G = [[2, 1, 1, 3],
+ [3, 2, 1, 1],
+ [1, 3, 2, 1],
+ [1, 1, 3, 2]]
+
+AA = [[0] * 8 for i in xrange(4)]
+
+for i in xrange(4):
+ for j in xrange(4):
+ AA[i][j] = G[i][j]
+ AA[i][i+4] = 1
+
+for i in xrange(4):
+ pivot = AA[i][i]
+ if pivot == 0:
+ t = i + 1
+ while AA[t][i] == 0 and t < 4:
+ t += 1
+ assert t != 4, 'G matrix must be invertible'
+ for j in xrange(8):
+ AA[i][j], AA[t][j] = AA[t][j], AA[i][j]
+ pivot = AA[i][i]
+ for j in xrange(8):
+ if AA[i][j] != 0:
+ AA[i][j] = alog[(255 + log[AA[i][j] & 0xFF] - log[pivot & 0xFF]) % 255]
+ for t in xrange(4):
+ if i != t:
+ for j in xrange(i+1, 8):
+ AA[t][j] ^= mul(AA[i][j], AA[t][i])
+ AA[t][i] = 0
+
+iG = [[0] * 4 for i in xrange(4)]
+
+for i in xrange(4):
+ for j in xrange(4):
+ iG[i][j] = AA[i][j + 4]
+
+def mul4(a, bs):
+ if a == 0:
+ return 0
+ r = 0
+ for b in bs:
+ r <<= 8
+ if b != 0:
+ r = r | mul(a, b)
+ return r
+
+T1 = []
+T2 = []
+T3 = []
+T4 = []
+T5 = []
+T6 = []
+T7 = []
+T8 = []
+U1 = []
+U2 = []
+U3 = []
+U4 = []
+
+for t in xrange(256):
+ s = S[t]
+ T1.append(mul4(s, G[0]))
+ T2.append(mul4(s, G[1]))
+ T3.append(mul4(s, G[2]))
+ T4.append(mul4(s, G[3]))
+
+ s = Si[t]
+ T5.append(mul4(s, iG[0]))
+ T6.append(mul4(s, iG[1]))
+ T7.append(mul4(s, iG[2]))
+ T8.append(mul4(s, iG[3]))
+
+ U1.append(mul4(t, iG[0]))
+ U2.append(mul4(t, iG[1]))
+ U3.append(mul4(t, iG[2]))
+ U4.append(mul4(t, iG[3]))
+
+# round constants
+rcon = [1]
+r = 1
+for t in xrange(1, 30):
+ r = mul(2, r)
+ rcon.append(r)
+
+del A
+del AA
+del pivot
+del B
+del G
+del box
+del log
+del alog
+del i
+del j
+del r
+del s
+del t
+del mul
+del mul4
+del cox
+del iG
+
+class rijndael:
+ def __init__(self, key, block_size = 16):
+ if block_size != 16 and block_size != 24 and block_size != 32:
+ raise ValueError('Invalid block size: ' + str(block_size))
+ if len(key) != 16 and len(key) != 24 and len(key) != 32:
+ raise ValueError('Invalid key size: ' + str(len(key)))
+ self.block_size = block_size
+
+ ROUNDS = num_rounds[len(key)][block_size]
+ BC = block_size / 4
+ # encryption round keys
+ Ke = [[0] * BC for i in xrange(ROUNDS + 1)]
+ # decryption round keys
+ Kd = [[0] * BC for i in xrange(ROUNDS + 1)]
+ ROUND_KEY_COUNT = (ROUNDS + 1) * BC
+ KC = len(key) / 4
+
+ # copy user material bytes into temporary ints
+ tk = []
+ for i in xrange(0, KC):
+ tk.append((ord(key[i * 4]) << 24) | (ord(key[i * 4 + 1]) << 16) |
+ (ord(key[i * 4 + 2]) << 8) | ord(key[i * 4 + 3]))
+
+ # copy values into round key arrays
+ t = 0
+ j = 0
+ while j < KC and t < ROUND_KEY_COUNT:
+ Ke[t / BC][t % BC] = tk[j]
+ Kd[ROUNDS - (t / BC)][t % BC] = tk[j]
+ j += 1
+ t += 1
+ tt = 0
+ rconpointer = 0
+ while t < ROUND_KEY_COUNT:
+ # extrapolate using phi (the round key evolution function)
+ tt = tk[KC - 1]
+ tk[0] ^= (S[(tt >> 16) & 0xFF] & 0xFF) << 24 ^ \
+ (S[(tt >> 8) & 0xFF] & 0xFF) << 16 ^ \
+ (S[ tt & 0xFF] & 0xFF) << 8 ^ \
+ (S[(tt >> 24) & 0xFF] & 0xFF) ^ \
+ (rcon[rconpointer] & 0xFF) << 24
+ rconpointer += 1
+ if KC != 8:
+ for i in xrange(1, KC):
+ tk[i] ^= tk[i-1]
+ else:
+ for i in xrange(1, KC / 2):
+ tk[i] ^= tk[i-1]
+ tt = tk[KC / 2 - 1]
+ tk[KC / 2] ^= (S[ tt & 0xFF] & 0xFF) ^ \
+ (S[(tt >> 8) & 0xFF] & 0xFF) << 8 ^ \
+ (S[(tt >> 16) & 0xFF] & 0xFF) << 16 ^ \
+ (S[(tt >> 24) & 0xFF] & 0xFF) << 24
+ for i in xrange(KC / 2 + 1, KC):
+ tk[i] ^= tk[i-1]
+ # copy values into round key arrays
+ j = 0
+ while j < KC and t < ROUND_KEY_COUNT:
+ Ke[t / BC][t % BC] = tk[j]
+ Kd[ROUNDS - (t / BC)][t % BC] = tk[j]
+ j += 1
+ t += 1
+ # inverse MixColumn where needed
+ for r in xrange(1, ROUNDS):
+ for j in xrange(BC):
+ tt = Kd[r][j]
+ Kd[r][j] = U1[(tt >> 24) & 0xFF] ^ \
+ U2[(tt >> 16) & 0xFF] ^ \
+ U3[(tt >> 8) & 0xFF] ^ \
+ U4[ tt & 0xFF]
+ self.Ke = Ke
+ self.Kd = Kd
+
+ def encrypt(self, plaintext):
+ if len(plaintext) != self.block_size:
+ raise ValueError('wrong block length, expected ' + str(self.block_size) + ' got ' + str(len(plaintext)))
+ Ke = self.Ke
+
+ BC = self.block_size / 4
+ ROUNDS = len(Ke) - 1
+ if BC == 4:
+ SC = 0
+ elif BC == 6:
+ SC = 1
+ else:
+ SC = 2
+ s1 = shifts[SC][1][0]
+ s2 = shifts[SC][2][0]
+ s3 = shifts[SC][3][0]
+ a = [0] * BC
+ # temporary work array
+ t = []
+ # plaintext to ints + key
+ for i in xrange(BC):
+ t.append((ord(plaintext[i * 4 ]) << 24 |
+ ord(plaintext[i * 4 + 1]) << 16 |
+ ord(plaintext[i * 4 + 2]) << 8 |
+ ord(plaintext[i * 4 + 3]) ) ^ Ke[0][i])
+ # apply round transforms
+ for r in xrange(1, ROUNDS):
+ for i in xrange(BC):
+ a[i] = (T1[(t[ i ] >> 24) & 0xFF] ^
+ T2[(t[(i + s1) % BC] >> 16) & 0xFF] ^
+ T3[(t[(i + s2) % BC] >> 8) & 0xFF] ^
+ T4[ t[(i + s3) % BC] & 0xFF] ) ^ Ke[r][i]
+ t = copy.copy(a)
+ # last round is special
+ result = []
+ for i in xrange(BC):
+ tt = Ke[ROUNDS][i]
+ result.append((S[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF)
+ result.append((S[(t[(i + s1) % BC] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF)
+ result.append((S[(t[(i + s2) % BC] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF)
+ result.append((S[ t[(i + s3) % BC] & 0xFF] ^ tt ) & 0xFF)
+ return string.join(map(chr, result), '')
+
+ def decrypt(self, ciphertext):
+ if len(ciphertext) != self.block_size:
+ raise ValueError('wrong block length, expected ' + str(self.block_size) + ' got ' + str(len(plaintext)))
+ Kd = self.Kd
+
+ BC = self.block_size / 4
+ ROUNDS = len(Kd) - 1
+ if BC == 4:
+ SC = 0
+ elif BC == 6:
+ SC = 1
+ else:
+ SC = 2
+ s1 = shifts[SC][1][1]
+ s2 = shifts[SC][2][1]
+ s3 = shifts[SC][3][1]
+ a = [0] * BC
+ # temporary work array
+ t = [0] * BC
+ # ciphertext to ints + key
+ for i in xrange(BC):
+ t[i] = (ord(ciphertext[i * 4 ]) << 24 |
+ ord(ciphertext[i * 4 + 1]) << 16 |
+ ord(ciphertext[i * 4 + 2]) << 8 |
+ ord(ciphertext[i * 4 + 3]) ) ^ Kd[0][i]
+ # apply round transforms
+ for r in xrange(1, ROUNDS):
+ for i in xrange(BC):
+ a[i] = (T5[(t[ i ] >> 24) & 0xFF] ^
+ T6[(t[(i + s1) % BC] >> 16) & 0xFF] ^
+ T7[(t[(i + s2) % BC] >> 8) & 0xFF] ^
+ T8[ t[(i + s3) % BC] & 0xFF] ) ^ Kd[r][i]
+ t = copy.copy(a)
+ # last round is special
+ result = []
+ for i in xrange(BC):
+ tt = Kd[ROUNDS][i]
+ result.append((Si[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF)
+ result.append((Si[(t[(i + s1) % BC] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF)
+ result.append((Si[(t[(i + s2) % BC] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF)
+ result.append((Si[ t[(i + s3) % BC] & 0xFF] ^ tt ) & 0xFF)
+ return string.join(map(chr, result), '')
+
+def encrypt(key, block):
+ return rijndael(key, len(block)).encrypt(block)
+
+def decrypt(key, block):
+ return rijndael(key, len(block)).decrypt(block)
+
+def test():
+ def t(kl, bl):
+ b = 'b' * bl
+ r = rijndael('a' * kl, bl)
+ assert r.decrypt(r.encrypt(b)) == b
+ t(16, 16)
+ t(16, 24)
+ t(16, 32)
+ t(24, 16)
+ t(24, 24)
+ t(24, 32)
+ t(32, 16)
+ t(32, 24)
+ t(32, 32)
+
diff --git a/python/gdata/tlslite/utils/xmltools.py b/python/gdata/tlslite/utils/xmltools.py
new file mode 100644
index 0000000..c1e8c4d
--- /dev/null
+++ b/python/gdata/tlslite/utils/xmltools.py
@@ -0,0 +1,202 @@
+"""Helper functions for XML.
+
+This module has misc. helper functions for working with XML DOM nodes."""
+
+from compat import *
+import os
+import re
+
+if os.name == "java":
+ # Only for Jython
+ from javax.xml.parsers import *
+ import java
+
+ builder = DocumentBuilderFactory.newInstance().newDocumentBuilder()
+
+ def parseDocument(s):
+ stream = java.io.ByteArrayInputStream(java.lang.String(s).getBytes())
+ return builder.parse(stream)
+else:
+ from xml.dom import minidom
+ from xml.sax import saxutils
+
+ def parseDocument(s):
+ return minidom.parseString(s)
+
+def parseAndStripWhitespace(s):
+ try:
+ element = parseDocument(s).documentElement
+ except BaseException, e:
+ raise SyntaxError(str(e))
+ stripWhitespace(element)
+ return element
+
+#Goes through a DOM tree and removes whitespace besides child elements,
+#as long as this whitespace is correctly tab-ified
+def stripWhitespace(element, tab=0):
+ element.normalize()
+
+ lastSpacer = "\n" + ("\t"*tab)
+ spacer = lastSpacer + "\t"
+
+ #Zero children aren't allowed (i.e. )
+ #This makes writing output simpler, and matches Canonical XML
+ if element.childNodes.length==0: #DON'T DO len(element.childNodes) - doesn't work in Jython
+ raise SyntaxError("Empty XML elements not allowed")
+
+ #If there's a single child, it must be text context
+ if element.childNodes.length==1:
+ if element.firstChild.nodeType == element.firstChild.TEXT_NODE:
+ #If it's an empty element, remove
+ if element.firstChild.data == lastSpacer:
+ element.removeChild(element.firstChild)
+ return
+ #If not text content, give an error
+ elif element.firstChild.nodeType == element.firstChild.ELEMENT_NODE:
+ raise SyntaxError("Bad whitespace under '%s'" % element.tagName)
+ else:
+ raise SyntaxError("Unexpected node type in XML document")
+
+ #Otherwise there's multiple child element
+ child = element.firstChild
+ while child:
+ if child.nodeType == child.ELEMENT_NODE:
+ stripWhitespace(child, tab+1)
+ child = child.nextSibling
+ elif child.nodeType == child.TEXT_NODE:
+ if child == element.lastChild:
+ if child.data != lastSpacer:
+ raise SyntaxError("Bad whitespace under '%s'" % element.tagName)
+ elif child.data != spacer:
+ raise SyntaxError("Bad whitespace under '%s'" % element.tagName)
+ next = child.nextSibling
+ element.removeChild(child)
+ child = next
+ else:
+ raise SyntaxError("Unexpected node type in XML document")
+
+
+def checkName(element, name):
+ if element.nodeType != element.ELEMENT_NODE:
+ raise SyntaxError("Missing element: '%s'" % name)
+
+ if name == None:
+ return
+
+ if element.tagName != name:
+ raise SyntaxError("Wrong element name: should be '%s', is '%s'" % (name, element.tagName))
+
+def getChild(element, index, name=None):
+ if element.nodeType != element.ELEMENT_NODE:
+ raise SyntaxError("Wrong node type in getChild()")
+
+ child = element.childNodes.item(index)
+ if child == None:
+ raise SyntaxError("Missing child: '%s'" % name)
+ checkName(child, name)
+ return child
+
+def getChildIter(element, index):
+ class ChildIter:
+ def __init__(self, element, index):
+ self.element = element
+ self.index = index
+
+ def next(self):
+ if self.index < len(self.element.childNodes):
+ retVal = self.element.childNodes.item(self.index)
+ self.index += 1
+ else:
+ retVal = None
+ return retVal
+
+ def checkEnd(self):
+ if self.index != len(self.element.childNodes):
+ raise SyntaxError("Too many elements under: '%s'" % self.element.tagName)
+ return ChildIter(element, index)
+
+def getChildOrNone(element, index):
+ if element.nodeType != element.ELEMENT_NODE:
+ raise SyntaxError("Wrong node type in getChild()")
+ child = element.childNodes.item(index)
+ return child
+
+def getLastChild(element, index, name=None):
+ if element.nodeType != element.ELEMENT_NODE:
+ raise SyntaxError("Wrong node type in getLastChild()")
+
+ child = element.childNodes.item(index)
+ if child == None:
+ raise SyntaxError("Missing child: '%s'" % name)
+ if child != element.lastChild:
+ raise SyntaxError("Too many elements under: '%s'" % element.tagName)
+ checkName(child, name)
+ return child
+
+#Regular expressions for syntax-checking attribute and element content
+nsRegEx = "http://trevp.net/cryptoID\Z"
+cryptoIDRegEx = "([a-km-z3-9]{5}\.){3}[a-km-z3-9]{5}\Z"
+urlRegEx = "http(s)?://.{1,100}\Z"
+sha1Base64RegEx = "[A-Za-z0-9+/]{27}=\Z"
+base64RegEx = "[A-Za-z0-9+/]+={0,4}\Z"
+certsListRegEx = "(0)?(1)?(2)?(3)?(4)?(5)?(6)?(7)?(8)?(9)?\Z"
+keyRegEx = "[A-Z]\Z"
+keysListRegEx = "(A)?(B)?(C)?(D)?(E)?(F)?(G)?(H)?(I)?(J)?(K)?(L)?(M)?(N)?(O)?(P)?(Q)?(R)?(S)?(T)?(U)?(V)?(W)?(X)?(Y)?(Z)?\Z"
+dateTimeRegEx = "\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ\Z"
+shortStringRegEx = ".{1,100}\Z"
+exprRegEx = "[a-zA-Z0-9 ,()]{1,200}\Z"
+notAfterDeltaRegEx = "0|([1-9][0-9]{0,8})\Z" #A number from 0 to (1 billion)-1
+booleanRegEx = "(true)|(false)"
+
+def getReqAttribute(element, attrName, regEx=""):
+ if element.nodeType != element.ELEMENT_NODE:
+ raise SyntaxError("Wrong node type in getReqAttribute()")
+
+ value = element.getAttribute(attrName)
+ if not value:
+ raise SyntaxError("Missing Attribute: " + attrName)
+ if not re.match(regEx, value):
+ raise SyntaxError("Bad Attribute Value for '%s': '%s' " % (attrName, value))
+ element.removeAttribute(attrName)
+ return str(value) #de-unicode it; this is needed for bsddb, for example
+
+def getAttribute(element, attrName, regEx=""):
+ if element.nodeType != element.ELEMENT_NODE:
+ raise SyntaxError("Wrong node type in getAttribute()")
+
+ value = element.getAttribute(attrName)
+ if value:
+ if not re.match(regEx, value):
+ raise SyntaxError("Bad Attribute Value for '%s': '%s' " % (attrName, value))
+ element.removeAttribute(attrName)
+ return str(value) #de-unicode it; this is needed for bsddb, for example
+
+def checkNoMoreAttributes(element):
+ if element.nodeType != element.ELEMENT_NODE:
+ raise SyntaxError("Wrong node type in checkNoMoreAttributes()")
+
+ if element.attributes.length!=0:
+ raise SyntaxError("Extra attributes on '%s'" % element.tagName)
+
+def getText(element, regEx=""):
+ textNode = element.firstChild
+ if textNode == None:
+ raise SyntaxError("Empty element '%s'" % element.tagName)
+ if textNode.nodeType != textNode.TEXT_NODE:
+ raise SyntaxError("Non-text node: '%s'" % element.tagName)
+ if not re.match(regEx, textNode.data):
+ raise SyntaxError("Bad Text Value for '%s': '%s' " % (element.tagName, textNode.data))
+ return str(textNode.data) #de-unicode it; this is needed for bsddb, for example
+
+#Function for adding tabs to a string
+def indent(s, steps, ch="\t"):
+ tabs = ch*steps
+ if s[-1] != "\n":
+ s = tabs + s.replace("\n", "\n"+tabs)
+ else:
+ s = tabs + s.replace("\n", "\n"+tabs)
+ s = s[ : -len(tabs)]
+ return s
+
+def escape(s):
+ return saxutils.escape(s)
diff --git a/python/gdata/urlfetch.py b/python/gdata/urlfetch.py
new file mode 100644
index 0000000..890b257
--- /dev/null
+++ b/python/gdata/urlfetch.py
@@ -0,0 +1,247 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Provides HTTP functions for gdata.service to use on Google App Engine
+
+AppEngineHttpClient: Provides an HTTP request method which uses App Engine's
+ urlfetch API. Set the http_client member of a GDataService object to an
+ instance of an AppEngineHttpClient to allow the gdata library to run on
+ Google App Engine.
+
+run_on_appengine: Function which will modify an existing GDataService object
+ to allow it to run on App Engine. It works by creating a new instance of
+ the AppEngineHttpClient and replacing the GDataService object's
+ http_client.
+
+HttpRequest: Function that wraps google.appengine.api.urlfetch.Fetch in a
+ common interface which is used by gdata.service.GDataService. In other
+ words, this module can be used as the gdata service request handler so
+ that all HTTP requests will be performed by the hosting Google App Engine
+ server.
+"""
+
+
+__author__ = 'api.jscudder (Jeff Scudder)'
+
+
+import StringIO
+import atom.service
+import atom.http_interface
+from google.appengine.api import urlfetch
+
+
+def run_on_appengine(gdata_service):
+ """Modifies a GDataService object to allow it to run on App Engine.
+
+ Args:
+ gdata_service: An instance of AtomService, GDataService, or any
+ of their subclasses which has an http_client member.
+ """
+ gdata_service.http_client = AppEngineHttpClient()
+
+
+class AppEngineHttpClient(atom.http_interface.GenericHttpClient):
+ def __init__(self, headers=None):
+ self.debug = False
+ self.headers = headers or {}
+
+ def request(self, operation, url, data=None, headers=None):
+ """Performs an HTTP call to the server, supports GET, POST, PUT, and
+ DELETE.
+
+ Usage example, perform and HTTP GET on http://www.google.com/:
+ import atom.http
+ client = atom.http.HttpClient()
+ http_response = client.request('GET', 'http://www.google.com/')
+
+ Args:
+ operation: str The HTTP operation to be performed. This is usually one
+ of 'GET', 'POST', 'PUT', or 'DELETE'
+ data: filestream, list of parts, or other object which can be converted
+ to a string. Should be set to None when performing a GET or DELETE.
+ If data is a file-like object which can be read, this method will
+ read a chunk of 100K bytes at a time and send them.
+ If the data is a list of parts to be sent, each part will be
+ evaluated and sent.
+ url: The full URL to which the request should be sent. Can be a string
+ or atom.url.Url.
+ headers: dict of strings. HTTP headers which should be sent
+ in the request.
+ """
+ all_headers = self.headers.copy()
+ if headers:
+ all_headers.update(headers)
+
+ # Construct the full payload.
+ # Assume that data is None or a string.
+ data_str = data
+ if data:
+ if isinstance(data, list):
+ # If data is a list of different objects, convert them all to strings
+ # and join them together.
+ converted_parts = [__ConvertDataPart(x) for x in data]
+ data_str = ''.join(converted_parts)
+ else:
+ data_str = __ConvertDataPart(data)
+
+ # If the list of headers does not include a Content-Length, attempt to
+ # calculate it based on the data object.
+ if data and 'Content-Length' not in all_headers:
+ all_headers['Content-Length'] = len(data_str)
+
+ # Set the content type to the default value if none was set.
+ if 'Content-Type' not in all_headers:
+ all_headers['Content-Type'] = 'application/atom+xml'
+
+ # Lookup the urlfetch operation which corresponds to the desired HTTP verb.
+ if operation == 'GET':
+ method = urlfetch.GET
+ elif operation == 'POST':
+ method = urlfetch.POST
+ elif operation == 'PUT':
+ method = urlfetch.PUT
+ elif operation == 'DELETE':
+ method = urlfetch.DELETE
+ else:
+ method = None
+ return HttpResponse(urlfetch.Fetch(url=str(url), payload=data_str,
+ method=method, headers=all_headers))
+
+
+def HttpRequest(service, operation, data, uri, extra_headers=None,
+ url_params=None, escape_params=True, content_type='application/atom+xml'):
+ """Performs an HTTP call to the server, supports GET, POST, PUT, and DELETE.
+
+ This function is deprecated, use AppEngineHttpClient.request instead.
+
+ To use this module with gdata.service, you can set this module to be the
+ http_request_handler so that HTTP requests use Google App Engine's urlfetch.
+ import gdata.service
+ import gdata.urlfetch
+ gdata.service.http_request_handler = gdata.urlfetch
+
+ Args:
+ service: atom.AtomService object which contains some of the parameters
+ needed to make the request. The following members are used to
+ construct the HTTP call: server (str), additional_headers (dict),
+ port (int), and ssl (bool).
+ operation: str The HTTP operation to be performed. This is usually one of
+ 'GET', 'POST', 'PUT', or 'DELETE'
+ data: filestream, list of parts, or other object which can be
+ converted to a string.
+ Should be set to None when performing a GET or PUT.
+ If data is a file-like object which can be read, this method will read
+ a chunk of 100K bytes at a time and send them.
+ If the data is a list of parts to be sent, each part will be evaluated
+ and sent.
+ uri: The beginning of the URL to which the request should be sent.
+ Examples: '/', '/base/feeds/snippets',
+ '/m8/feeds/contacts/default/base'
+ extra_headers: dict of strings. HTTP headers which should be sent
+ in the request. These headers are in addition to those stored in
+ service.additional_headers.
+ url_params: dict of strings. Key value pairs to be added to the URL as
+ URL parameters. For example {'foo':'bar', 'test':'param'} will
+ become ?foo=bar&test=param.
+ escape_params: bool default True. If true, the keys and values in
+ url_params will be URL escaped when the form is constructed
+ (Special characters converted to %XX form.)
+ content_type: str The MIME type for the data being sent. Defaults to
+ 'application/atom+xml', this is only used if data is set.
+ """
+ full_uri = atom.service.BuildUri(uri, url_params, escape_params)
+ (server, port, ssl, partial_uri) = atom.service.ProcessUrl(service, full_uri)
+ # Construct the full URL for the request.
+ if ssl:
+ full_url = 'https://%s%s' % (server, partial_uri)
+ else:
+ full_url = 'http://%s%s' % (server, partial_uri)
+
+ # Construct the full payload.
+ # Assume that data is None or a string.
+ data_str = data
+ if data:
+ if isinstance(data, list):
+ # If data is a list of different objects, convert them all to strings
+ # and join them together.
+ converted_parts = [__ConvertDataPart(x) for x in data]
+ data_str = ''.join(converted_parts)
+ else:
+ data_str = __ConvertDataPart(data)
+
+ # Construct the dictionary of HTTP headers.
+ headers = {}
+ if isinstance(service.additional_headers, dict):
+ headers = service.additional_headers.copy()
+ if isinstance(extra_headers, dict):
+ for header, value in extra_headers.iteritems():
+ headers[header] = value
+ # Add the content type header (we don't need to calculate content length,
+ # since urlfetch.Fetch will calculate for us).
+ if content_type:
+ headers['Content-Type'] = content_type
+
+ # Lookup the urlfetch operation which corresponds to the desired HTTP verb.
+ if operation == 'GET':
+ method = urlfetch.GET
+ elif operation == 'POST':
+ method = urlfetch.POST
+ elif operation == 'PUT':
+ method = urlfetch.PUT
+ elif operation == 'DELETE':
+ method = urlfetch.DELETE
+ else:
+ method = None
+ return HttpResponse(urlfetch.Fetch(url=full_url, payload=data_str,
+ method=method, headers=headers))
+
+
+def __ConvertDataPart(data):
+ if not data or isinstance(data, str):
+ return data
+ elif hasattr(data, 'read'):
+ # data is a file like object, so read it completely.
+ return data.read()
+ # The data object was not a file.
+ # Try to convert to a string and send the data.
+ return str(data)
+
+
+class HttpResponse(object):
+ """Translates a urlfetch resoinse to look like an hhtplib resoinse.
+
+ Used to allow the resoinse from HttpRequest to be usable by gdata.service
+ methods.
+ """
+
+ def __init__(self, urlfetch_response):
+ self.body = StringIO.StringIO(urlfetch_response.content)
+ self.headers = urlfetch_response.headers
+ self.status = urlfetch_response.status_code
+ self.reason = ''
+
+ def read(self, length=None):
+ if not length:
+ return self.body.read()
+ else:
+ return self.body.read(length)
+
+ def getheader(self, name):
+ if not self.headers.has_key(name):
+ return self.headers[name.lower()]
+ return self.headers[name]
+
diff --git a/python/gdata/webmastertools/__init__.py b/python/gdata/webmastertools/__init__.py
new file mode 100644
index 0000000..7ad20ff
--- /dev/null
+++ b/python/gdata/webmastertools/__init__.py
@@ -0,0 +1,544 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Yu-Jie Lin
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains extensions to Atom objects used with Google Webmaster Tools."""
+
+
+__author__ = 'livibetter (Yu-Jie Lin)'
+
+
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ try:
+ import cElementTree as ElementTree
+ except ImportError:
+ try:
+ from xml.etree import ElementTree
+ except ImportError:
+ from elementtree import ElementTree
+import atom
+import gdata
+
+
+# XML namespaces which are often used in Google Webmaster Tools entities.
+GWEBMASTERTOOLS_NAMESPACE = 'http://schemas.google.com/webmasters/tools/2007'
+GWEBMASTERTOOLS_TEMPLATE = '{http://schemas.google.com/webmasters/tools/2007}%s'
+
+
+class Indexed(atom.AtomBase):
+ _tag = 'indexed'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def IndexedFromString(xml_string):
+ return atom.CreateClassFromXMLString(Indexed, xml_string)
+
+
+class Crawled(atom.Date):
+ _tag = 'crawled'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def CrawledFromString(xml_string):
+ return atom.CreateClassFromXMLString(Crawled, xml_string)
+
+
+class GeoLocation(atom.AtomBase):
+ _tag = 'geolocation'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def GeoLocationFromString(xml_string):
+ return atom.CreateClassFromXMLString(GeoLocation, xml_string)
+
+
+class PreferredDomain(atom.AtomBase):
+ _tag = 'preferred-domain'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def PreferredDomainFromString(xml_string):
+ return atom.CreateClassFromXMLString(PreferredDomain, xml_string)
+
+
+class CrawlRate(atom.AtomBase):
+ _tag = 'crawl-rate'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def CrawlRateFromString(xml_string):
+ return atom.CreateClassFromXMLString(CrawlRate, xml_string)
+
+
+class EnhancedImageSearch(atom.AtomBase):
+ _tag = 'enhanced-image-search'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def EnhancedImageSearchFromString(xml_string):
+ return atom.CreateClassFromXMLString(EnhancedImageSearch, xml_string)
+
+
+class Verified(atom.AtomBase):
+ _tag = 'verified'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def VerifiedFromString(xml_string):
+ return atom.CreateClassFromXMLString(Verified, xml_string)
+
+
+class VerificationMethodMeta(atom.AtomBase):
+ _tag = 'meta'
+ _namespace = atom.ATOM_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['name'] = 'name'
+ _attributes['content'] = 'content'
+
+ def __init__(self, text=None, name=None, content=None,
+ extension_elements=None, extension_attributes=None):
+ self.text = text
+ self.name = name
+ self.content = content
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def VerificationMethodMetaFromString(xml_string):
+ return atom.CreateClassFromXMLString(VerificationMethodMeta, xml_string)
+
+
+class VerificationMethod(atom.AtomBase):
+ _tag = 'verification-method'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+ _children = atom.Text._children.copy()
+ _attributes = atom.Text._attributes.copy()
+ _children['{%s}meta' % atom.ATOM_NAMESPACE] = (
+ 'meta', VerificationMethodMeta)
+ _attributes['in-use'] = 'in_use'
+ _attributes['type'] = 'type'
+
+ def __init__(self, text=None, in_use=None, meta=None, type=None,
+ extension_elements=None, extension_attributes=None):
+ self.text = text
+ self.in_use = in_use
+ self.meta = meta
+ self.type = type
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def VerificationMethodFromString(xml_string):
+ return atom.CreateClassFromXMLString(VerificationMethod, xml_string)
+
+
+class MarkupLanguage(atom.AtomBase):
+ _tag = 'markup-language'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def MarkupLanguageFromString(xml_string):
+ return atom.CreateClassFromXMLString(MarkupLanguage, xml_string)
+
+
+class SitemapMobile(atom.AtomBase):
+ _tag = 'sitemap-mobile'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _children['{%s}markup-language' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'markup_language', [MarkupLanguage])
+
+ def __init__(self, markup_language=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ self.markup_language = markup_language or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def SitemapMobileFromString(xml_string):
+ return atom.CreateClassFromXMLString(SitemapMobile, xml_string)
+
+
+class SitemapMobileMarkupLanguage(atom.AtomBase):
+ _tag = 'sitemap-mobile-markup-language'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def SitemapMobileMarkupLanguageFromString(xml_string):
+ return atom.CreateClassFromXMLString(SitemapMobileMarkupLanguage, xml_string)
+
+
+class PublicationLabel(atom.AtomBase):
+ _tag = 'publication-label'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def PublicationLabelFromString(xml_string):
+ return atom.CreateClassFromXMLString(PublicationLabel, xml_string)
+
+
+class SitemapNews(atom.AtomBase):
+ _tag = 'sitemap-news'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _children['{%s}publication-label' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'publication_label', [PublicationLabel])
+
+ def __init__(self, publication_label=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ self.publication_label = publication_label or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def SitemapNewsFromString(xml_string):
+ return atom.CreateClassFromXMLString(SitemapNews, xml_string)
+
+
+class SitemapNewsPublicationLabel(atom.AtomBase):
+ _tag = 'sitemap-news-publication-label'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def SitemapNewsPublicationLabelFromString(xml_string):
+ return atom.CreateClassFromXMLString(SitemapNewsPublicationLabel, xml_string)
+
+
+class SitemapLastDownloaded(atom.Date):
+ _tag = 'sitemap-last-downloaded'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def SitemapLastDownloadedFromString(xml_string):
+ return atom.CreateClassFromXMLString(SitemapLastDownloaded, xml_string)
+
+
+class SitemapType(atom.AtomBase):
+ _tag = 'sitemap-type'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def SitemapTypeFromString(xml_string):
+ return atom.CreateClassFromXMLString(SitemapType, xml_string)
+
+
+class SitemapStatus(atom.AtomBase):
+ _tag = 'sitemap-status'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def SitemapStatusFromString(xml_string):
+ return atom.CreateClassFromXMLString(SitemapStatus, xml_string)
+
+
+class SitemapUrlCount(atom.AtomBase):
+ _tag = 'sitemap-url-count'
+ _namespace = GWEBMASTERTOOLS_NAMESPACE
+
+
+def SitemapUrlCountFromString(xml_string):
+ return atom.CreateClassFromXMLString(SitemapUrlCount, xml_string)
+
+
+class LinkFinder(atom.LinkFinder):
+ """An "interface" providing methods to find link elements
+
+ SitesEntry elements often contain multiple links which differ in the rel
+ attribute or content type. Often, developers are interested in a specific
+ type of link so this class provides methods to find specific classes of links.
+
+ This class is used as a mixin in SitesEntry.
+ """
+
+ def GetSelfLink(self):
+ """Find the first link with rel set to 'self'
+
+ Returns:
+ An atom.Link or none if none of the links had rel equal to 'self'
+ """
+
+ for a_link in self.link:
+ if a_link.rel == 'self':
+ return a_link
+ return None
+
+ def GetEditLink(self):
+ for a_link in self.link:
+ if a_link.rel == 'edit':
+ return a_link
+ return None
+
+ def GetPostLink(self):
+ """Get a link containing the POST target URL.
+
+ The POST target URL is used to insert new entries.
+
+ Returns:
+ A link object with a rel matching the POST type.
+ """
+ for a_link in self.link:
+ if a_link.rel == 'http://schemas.google.com/g/2005#post':
+ return a_link
+ return None
+
+ def GetFeedLink(self):
+ for a_link in self.link:
+ if a_link.rel == 'http://schemas.google.com/g/2005#feed':
+ return a_link
+ return None
+
+
+class SitesEntry(atom.Entry, LinkFinder):
+ """A Google Webmaster Tools meta Entry flavor of an Atom Entry """
+
+ _tag = atom.Entry._tag
+ _namespace = atom.Entry._namespace
+ _children = atom.Entry._children.copy()
+ _attributes = atom.Entry._attributes.copy()
+ _children['{%s}entryLink' % gdata.GDATA_NAMESPACE] = (
+ 'entry_link', [gdata.EntryLink])
+ _children['{%s}indexed' % GWEBMASTERTOOLS_NAMESPACE] = ('indexed', Indexed)
+ _children['{%s}crawled' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'crawled', Crawled)
+ _children['{%s}geolocation' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'geolocation', GeoLocation)
+ _children['{%s}preferred-domain' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'preferred_domain', PreferredDomain)
+ _children['{%s}crawl-rate' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'crawl_rate', CrawlRate)
+ _children['{%s}enhanced-image-search' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'enhanced_image_search', EnhancedImageSearch)
+ _children['{%s}verified' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'verified', Verified)
+ _children['{%s}verification-method' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'verification_method', [VerificationMethod])
+
+ def __GetId(self):
+ return self.__id
+
+ # This method was created to strip the unwanted whitespace from the id's
+ # text node.
+ def __SetId(self, id):
+ self.__id = id
+ if id is not None and id.text is not None:
+ self.__id.text = id.text.strip()
+
+ id = property(__GetId, __SetId)
+
+ def __init__(self, category=None, content=None,
+ atom_id=None, link=None, title=None, updated=None,
+ entry_link=None, indexed=None, crawled=None,
+ geolocation=None, preferred_domain=None, crawl_rate=None,
+ enhanced_image_search=None,
+ verified=None, verification_method=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ atom.Entry.__init__(self, category=category,
+ content=content, atom_id=atom_id, link=link,
+ title=title, updated=updated, text=text)
+
+ self.entry_link = entry_link or []
+ self.indexed = indexed
+ self.crawled = crawled
+ self.geolocation = geolocation
+ self.preferred_domain = preferred_domain
+ self.crawl_rate = crawl_rate
+ self.enhanced_image_search = enhanced_image_search
+ self.verified = verified
+ self.verification_method = verification_method or []
+
+
+def SitesEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(SitesEntry, xml_string)
+
+
+class SitesFeed(atom.Feed, LinkFinder):
+ """A Google Webmaster Tools meta Sites feed flavor of an Atom Feed"""
+
+ _tag = atom.Feed._tag
+ _namespace = atom.Feed._namespace
+ _children = atom.Feed._children.copy()
+ _attributes = atom.Feed._attributes.copy()
+ _children['{%s}startIndex' % gdata.OPENSEARCH_NAMESPACE] = (
+ 'start_index', gdata.StartIndex)
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [SitesEntry])
+ del _children['{%s}generator' % atom.ATOM_NAMESPACE]
+ del _children['{%s}author' % atom.ATOM_NAMESPACE]
+ del _children['{%s}contributor' % atom.ATOM_NAMESPACE]
+ del _children['{%s}logo' % atom.ATOM_NAMESPACE]
+ del _children['{%s}icon' % atom.ATOM_NAMESPACE]
+ del _children['{%s}rights' % atom.ATOM_NAMESPACE]
+ del _children['{%s}subtitle' % atom.ATOM_NAMESPACE]
+
+ def __GetId(self):
+ return self.__id
+
+ def __SetId(self, id):
+ self.__id = id
+ if id is not None and id.text is not None:
+ self.__id.text = id.text.strip()
+
+ id = property(__GetId, __SetId)
+
+ def __init__(self, start_index=None, atom_id=None, title=None, entry=None,
+ category=None, link=None, updated=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ """Constructor for Source
+
+ Args:
+ category: list (optional) A list of Category instances
+ id: Id (optional) The entry's Id element
+ link: list (optional) A list of Link instances
+ title: Title (optional) the entry's title element
+ updated: Updated (optional) the entry's updated element
+ entry: list (optional) A list of the Entry instances contained in the
+ feed.
+ text: String (optional) The text contents of the element. This is the
+ contents of the Entry's XML text node.
+ (Example: This is the text)
+ extension_elements: list (optional) A list of ExtensionElement instances
+ which are children of this element.
+ extension_attributes: dict (optional) A dictionary of strings which are
+ the values for additional XML attributes of this element.
+ """
+
+ self.start_index = start_index
+ self.category = category or []
+ self.id = atom_id
+ self.link = link or []
+ self.title = title
+ self.updated = updated
+ self.entry = entry or []
+ self.text = text
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def SitesFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(SitesFeed, xml_string)
+
+
+class SitemapsEntry(atom.Entry, LinkFinder):
+ """A Google Webmaster Tools meta Sitemaps Entry flavor of an Atom Entry """
+
+ _tag = atom.Entry._tag
+ _namespace = atom.Entry._namespace
+ _children = atom.Entry._children.copy()
+ _attributes = atom.Entry._attributes.copy()
+ _children['{%s}sitemap-type' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'sitemap_type', SitemapType)
+ _children['{%s}sitemap-status' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'sitemap_status', SitemapStatus)
+ _children['{%s}sitemap-last-downloaded' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'sitemap_last_downloaded', SitemapLastDownloaded)
+ _children['{%s}sitemap-url-count' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'sitemap_url_count', SitemapUrlCount)
+ _children['{%s}sitemap-mobile-markup-language' % GWEBMASTERTOOLS_NAMESPACE] \
+ = ('sitemap_mobile_markup_language', SitemapMobileMarkupLanguage)
+ _children['{%s}sitemap-news-publication-label' % GWEBMASTERTOOLS_NAMESPACE] \
+ = ('sitemap_news_publication_label', SitemapNewsPublicationLabel)
+
+ def __GetId(self):
+ return self.__id
+
+ # This method was created to strip the unwanted whitespace from the id's
+ # text node.
+ def __SetId(self, id):
+ self.__id = id
+ if id is not None and id.text is not None:
+ self.__id.text = id.text.strip()
+
+ id = property(__GetId, __SetId)
+
+ def __init__(self, category=None, content=None,
+ atom_id=None, link=None, title=None, updated=None,
+ sitemap_type=None, sitemap_status=None, sitemap_last_downloaded=None,
+ sitemap_url_count=None, sitemap_mobile_markup_language=None,
+ sitemap_news_publication_label=None,
+ extension_elements=None, extension_attributes=None, text=None):
+ atom.Entry.__init__(self, category=category,
+ content=content, atom_id=atom_id, link=link,
+ title=title, updated=updated, text=text)
+
+ self.sitemap_type = sitemap_type
+ self.sitemap_status = sitemap_status
+ self.sitemap_last_downloaded = sitemap_last_downloaded
+ self.sitemap_url_count = sitemap_url_count
+ self.sitemap_mobile_markup_language = sitemap_mobile_markup_language
+ self.sitemap_news_publication_label = sitemap_news_publication_label
+
+
+def SitemapsEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(SitemapsEntry, xml_string)
+
+
+class SitemapsFeed(atom.Feed, LinkFinder):
+ """A Google Webmaster Tools meta Sitemaps feed flavor of an Atom Feed"""
+
+ _tag = atom.Feed._tag
+ _namespace = atom.Feed._namespace
+ _children = atom.Feed._children.copy()
+ _attributes = atom.Feed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [SitemapsEntry])
+ _children['{%s}sitemap-mobile' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'sitemap_mobile', SitemapMobile)
+ _children['{%s}sitemap-news' % GWEBMASTERTOOLS_NAMESPACE] = (
+ 'sitemap_news', SitemapNews)
+ del _children['{%s}generator' % atom.ATOM_NAMESPACE]
+ del _children['{%s}author' % atom.ATOM_NAMESPACE]
+ del _children['{%s}contributor' % atom.ATOM_NAMESPACE]
+ del _children['{%s}logo' % atom.ATOM_NAMESPACE]
+ del _children['{%s}icon' % atom.ATOM_NAMESPACE]
+ del _children['{%s}rights' % atom.ATOM_NAMESPACE]
+ del _children['{%s}subtitle' % atom.ATOM_NAMESPACE]
+
+ def __GetId(self):
+ return self.__id
+
+ def __SetId(self, id):
+ self.__id = id
+ if id is not None and id.text is not None:
+ self.__id.text = id.text.strip()
+
+ id = property(__GetId, __SetId)
+
+ def __init__(self, category=None, content=None,
+ atom_id=None, link=None, title=None, updated=None,
+ entry=None, sitemap_mobile=None, sitemap_news=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ self.category = category or []
+ self.id = atom_id
+ self.link = link or []
+ self.title = title
+ self.updated = updated
+ self.entry = entry or []
+ self.text = text
+ self.sitemap_mobile = sitemap_mobile
+ self.sitemap_news = sitemap_news
+ self.extension_elements = extension_elements or []
+ self.extension_attributes = extension_attributes or {}
+
+
+def SitemapsFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(SitemapsFeed, xml_string)
diff --git a/python/gdata/webmastertools/data.py b/python/gdata/webmastertools/data.py
new file mode 100644
index 0000000..8b50a47
--- /dev/null
+++ b/python/gdata/webmastertools/data.py
@@ -0,0 +1,217 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains the data classes of the Google Webmaster Tools Data API"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+import atom.data
+import gdata.data
+import gdata.opensearch.data
+
+
+WT_TEMPLATE = '{http://schemas.google.com/webmaster/tools/2007/}%s'
+
+
+class CrawlIssueCrawlType(atom.core.XmlElement):
+ """Type of crawl of the crawl issue"""
+ _qname = WT_TEMPLATE % 'crawl-type'
+
+
+class CrawlIssueDateDetected(atom.core.XmlElement):
+ """Detection date for the issue"""
+ _qname = WT_TEMPLATE % 'date-detected'
+
+
+class CrawlIssueDetail(atom.core.XmlElement):
+ """Detail of the crawl issue"""
+ _qname = WT_TEMPLATE % 'detail'
+
+
+class CrawlIssueIssueType(atom.core.XmlElement):
+ """Type of crawl issue"""
+ _qname = WT_TEMPLATE % 'issue-type'
+
+
+class CrawlIssueLinkedFromUrl(atom.core.XmlElement):
+ """Source URL that links to the issue URL"""
+ _qname = WT_TEMPLATE % 'linked-from'
+
+
+class CrawlIssueUrl(atom.core.XmlElement):
+ """URL affected by the crawl issue"""
+ _qname = WT_TEMPLATE % 'url'
+
+
+class CrawlIssueEntry(gdata.data.GDEntry):
+ """Describes a crawl issue entry"""
+ date_detected = CrawlIssueDateDetected
+ url = CrawlIssueUrl
+ detail = CrawlIssueDetail
+ issue_type = CrawlIssueIssueType
+ crawl_type = CrawlIssueCrawlType
+ linked_from = [CrawlIssueLinkedFromUrl]
+
+
+class CrawlIssuesFeed(gdata.data.GDFeed):
+ """Feed of crawl issues for a particular site"""
+ entry = [CrawlIssueEntry]
+
+
+class Indexed(atom.core.XmlElement):
+ """Describes the indexing status of a site"""
+ _qname = WT_TEMPLATE % 'indexed'
+
+
+class Keyword(atom.core.XmlElement):
+ """A keyword in a site or in a link to a site"""
+ _qname = WT_TEMPLATE % 'keyword'
+ source = 'source'
+
+
+class KeywordEntry(gdata.data.GDEntry):
+ """Describes a keyword entry"""
+
+
+class KeywordsFeed(gdata.data.GDFeed):
+ """Feed of keywords for a particular site"""
+ entry = [KeywordEntry]
+ keyword = [Keyword]
+
+
+class LastCrawled(atom.core.XmlElement):
+ """Describes the last crawled date of a site"""
+ _qname = WT_TEMPLATE % 'last-crawled'
+
+
+class MessageBody(atom.core.XmlElement):
+ """Message body"""
+ _qname = WT_TEMPLATE % 'body'
+
+
+class MessageDate(atom.core.XmlElement):
+ """Message date"""
+ _qname = WT_TEMPLATE % 'date'
+
+
+class MessageLanguage(atom.core.XmlElement):
+ """Message language"""
+ _qname = WT_TEMPLATE % 'language'
+
+
+class MessageRead(atom.core.XmlElement):
+ """Indicates if the message has already been read"""
+ _qname = WT_TEMPLATE % 'read'
+
+
+class MessageSubject(atom.core.XmlElement):
+ """Message subject"""
+ _qname = WT_TEMPLATE % 'subject'
+
+
+class SiteId(atom.core.XmlElement):
+ """Site URL"""
+ _qname = WT_TEMPLATE % 'id'
+
+
+class MessageEntry(gdata.data.GDEntry):
+ """Describes a message entry"""
+ wt_id = SiteId
+ subject = MessageSubject
+ date = MessageDate
+ body = MessageBody
+ language = MessageLanguage
+ read = MessageRead
+
+
+class MessagesFeed(gdata.data.GDFeed):
+ """Describes a messages feed"""
+ entry = [MessageEntry]
+
+
+class SitemapEntry(gdata.data.GDEntry):
+ """Describes a sitemap entry"""
+ indexed = Indexed
+ wt_id = SiteId
+
+
+class SitemapMobileMarkupLanguage(atom.core.XmlElement):
+ """Describes a markup language for URLs in this sitemap"""
+ _qname = WT_TEMPLATE % 'sitemap-mobile-markup-language'
+
+
+class SitemapMobile(atom.core.XmlElement):
+ """Lists acceptable mobile markup languages for URLs in this sitemap"""
+ _qname = WT_TEMPLATE % 'sitemap-mobile'
+ sitemap_mobile_markup_language = [SitemapMobileMarkupLanguage]
+
+
+class SitemapNewsPublicationLabel(atom.core.XmlElement):
+ """Specifies the publication label for this sitemap"""
+ _qname = WT_TEMPLATE % 'sitemap-news-publication-label'
+
+
+class SitemapNews(atom.core.XmlElement):
+ """Lists publication labels for this sitemap"""
+ _qname = WT_TEMPLATE % 'sitemap-news'
+ sitemap_news_publication_label = [SitemapNewsPublicationLabel]
+
+
+class SitemapType(atom.core.XmlElement):
+ """Indicates the type of sitemap. Not used for News or Mobile Sitemaps"""
+ _qname = WT_TEMPLATE % 'sitemap-type'
+
+
+class SitemapUrlCount(atom.core.XmlElement):
+ """Indicates the number of URLs contained in the sitemap"""
+ _qname = WT_TEMPLATE % 'sitemap-url-count'
+
+
+class SitemapsFeed(gdata.data.GDFeed):
+ """Describes a sitemaps feed"""
+ entry = [SitemapEntry]
+
+
+class VerificationMethod(atom.core.XmlElement):
+ """Describes a verification method that may be used for a site"""
+ _qname = WT_TEMPLATE % 'verification-method'
+ in_use = 'in-use'
+ type = 'type'
+
+
+class Verified(atom.core.XmlElement):
+ """Describes the verification status of a site"""
+ _qname = WT_TEMPLATE % 'verified'
+
+
+class SiteEntry(gdata.data.GDEntry):
+ """Describes a site entry"""
+ indexed = Indexed
+ wt_id = SiteId
+ verified = Verified
+ last_crawled = LastCrawled
+ verification_method = [VerificationMethod]
+
+
+class SitesFeed(gdata.data.GDFeed):
+ """Describes a sites feed"""
+ entry = [SiteEntry]
+
+
diff --git a/python/gdata/webmastertools/service.py b/python/gdata/webmastertools/service.py
new file mode 100644
index 0000000..8c3286d
--- /dev/null
+++ b/python/gdata/webmastertools/service.py
@@ -0,0 +1,516 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Yu-Jie Lin
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""GWebmasterToolsService extends the GDataService to streamline
+Google Webmaster Tools operations.
+
+ GWebmasterToolsService: Provides methods to query feeds and manipulate items.
+ Extends GDataService.
+"""
+
+__author__ = 'livibetter (Yu-Jie Lin)'
+
+import urllib
+import gdata
+import atom.service
+import gdata.service
+import gdata.webmastertools as webmastertools
+import atom
+
+
+FEED_BASE = 'https://www.google.com/webmasters/tools/feeds/'
+SITES_FEED = FEED_BASE + 'sites/'
+SITE_TEMPLATE = SITES_FEED + '%s'
+SITEMAPS_FEED_TEMPLATE = FEED_BASE + '%(site_id)s/sitemaps/'
+SITEMAP_TEMPLATE = SITEMAPS_FEED_TEMPLATE + '%(sitemap_id)s'
+
+
+class Error(Exception):
+ pass
+
+
+class RequestError(Error):
+ pass
+
+
+class GWebmasterToolsService(gdata.service.GDataService):
+ """Client for the Google Webmaster Tools service."""
+
+ def __init__(self, email=None, password=None, source=None,
+ server='www.google.com', **kwargs):
+ """Creates a client for the Google Webmaster Tools service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'www.google.com'.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service='sitemaps', source=source,
+ server=server, **kwargs)
+
+ def GetSitesFeed(self, uri=SITES_FEED,
+ converter=webmastertools.SitesFeedFromString):
+ """Gets sites feed.
+
+ Args:
+ uri: str (optional) URI to retrieve sites feed.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ SitesFeedFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a SitesFeed object.
+ """
+ return self.Get(uri, converter=converter)
+
+ def AddSite(self, site_uri, uri=SITES_FEED,
+ url_params=None, escape_params=True, converter=None):
+ """Adds a site to Google Webmaster Tools.
+
+ Args:
+ site_uri: str URI of which site to add.
+ uri: str (optional) URI to add a site.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ SitesEntryFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a SitesEntry object.
+ """
+
+ site_entry = webmastertools.SitesEntry()
+ site_entry.content = atom.Content(src=site_uri)
+ response = self.Post(site_entry, uri,
+ url_params=url_params,
+ escape_params=escape_params, converter=converter)
+ if not converter and isinstance(response, atom.Entry):
+ return webmastertools.SitesEntryFromString(response.ToString())
+ return response
+
+ def DeleteSite(self, site_uri, uri=SITE_TEMPLATE,
+ url_params=None, escape_params=True):
+ """Removes a site from Google Webmaster Tools.
+
+ Args:
+ site_uri: str URI of which site to remove.
+ uri: str (optional) A URI template to send DELETE request.
+ Default SITE_TEMPLATE.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ True if the delete succeeded.
+ """
+
+ return self.Delete(
+ uri % urllib.quote_plus(site_uri),
+ url_params=url_params, escape_params=escape_params)
+
+ def VerifySite(self, site_uri, verification_method, uri=SITE_TEMPLATE,
+ url_params=None, escape_params=True, converter=None):
+ """Requests a verification of a site.
+
+ Args:
+ site_uri: str URI of which site to add sitemap for.
+ verification_method: str The method to verify a site. Valid values are
+ 'htmlpage', and 'metatag'.
+ uri: str (optional) URI template to update a site.
+ Default SITE_TEMPLATE.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ SitemapsEntryFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a SitesEntry object.
+ """
+
+ site_entry = webmastertools.SitesEntry(
+ atom_id=atom.Id(text=site_uri),
+ category=atom.Category(
+ scheme='http://schemas.google.com/g/2005#kind',
+ term='http://schemas.google.com/webmasters/tools/2007#sites-info'),
+ verification_method=webmastertools.VerificationMethod(
+ type=verification_method, in_use='true')
+ )
+ response = self.Put(
+ site_entry,
+ uri % urllib.quote_plus(site_uri),
+ url_params=url_params,
+ escape_params=escape_params, converter=converter)
+ if not converter and isinstance(response, atom.Entry):
+ return webmastertools.SitesEntryFromString(response.ToString())
+ return response
+
+
+ def UpdateGeoLocation(self, site_uri, geolocation, uri=SITE_TEMPLATE,
+ url_params=None, escape_params=True, converter=None):
+ """Updates geolocation setting of a site.
+
+ Args:
+ site_uri: str URI of which site to add sitemap for.
+ geolocation: str The geographic location. Valid values are listed in
+ http://en.wikipedia.org/wiki/ISO_3166-1_alpha-2
+ uri: str (optional) URI template to update a site.
+ Default SITE_TEMPLATE.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ SitemapsEntryFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a SitesEntry object.
+ """
+
+ site_entry = webmastertools.SitesEntry(
+ atom_id=atom.Id(text=site_uri),
+ category=atom.Category(
+ scheme='http://schemas.google.com/g/2005#kind',
+ term='http://schemas.google.com/webmasters/tools/2007#sites-info'),
+ geolocation=webmastertools.GeoLocation(text=geolocation)
+ )
+ response = self.Put(
+ site_entry,
+ uri % urllib.quote_plus(site_uri),
+ url_params=url_params,
+ escape_params=escape_params, converter=converter)
+ if not converter and isinstance(response, atom.Entry):
+ return webmastertools.SitesEntryFromString(response.ToString())
+ return response
+
+ def UpdateCrawlRate(self, site_uri, crawl_rate, uri=SITE_TEMPLATE,
+ url_params=None, escape_params=True, converter=None):
+ """Updates crawl rate setting of a site.
+
+ Args:
+ site_uri: str URI of which site to add sitemap for.
+ crawl_rate: str The crawl rate for a site. Valid values are 'slower',
+ 'normal', and 'faster'.
+ uri: str (optional) URI template to update a site.
+ Default SITE_TEMPLATE.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ SitemapsEntryFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a SitesEntry object.
+ """
+
+ site_entry = webmastertools.SitesEntry(
+ atom_id=atom.Id(text=site_uri),
+ category=atom.Category(
+ scheme='http://schemas.google.com/g/2005#kind',
+ term='http://schemas.google.com/webmasters/tools/2007#sites-info'),
+ crawl_rate=webmastertools.CrawlRate(text=crawl_rate)
+ )
+ response = self.Put(
+ site_entry,
+ uri % urllib.quote_plus(site_uri),
+ url_params=url_params,
+ escape_params=escape_params, converter=converter)
+ if not converter and isinstance(response, atom.Entry):
+ return webmastertools.SitesEntryFromString(response.ToString())
+ return response
+
+ def UpdatePreferredDomain(self, site_uri, preferred_domain, uri=SITE_TEMPLATE,
+ url_params=None, escape_params=True, converter=None):
+ """Updates preferred domain setting of a site.
+
+ Note that if using 'preferwww', will also need www.example.com in account to
+ take effect.
+
+ Args:
+ site_uri: str URI of which site to add sitemap for.
+ preferred_domain: str The preferred domain for a site. Valid values are 'none',
+ 'preferwww', and 'prefernowww'.
+ uri: str (optional) URI template to update a site.
+ Default SITE_TEMPLATE.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ SitemapsEntryFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a SitesEntry object.
+ """
+
+ site_entry = webmastertools.SitesEntry(
+ atom_id=atom.Id(text=site_uri),
+ category=atom.Category(
+ scheme='http://schemas.google.com/g/2005#kind',
+ term='http://schemas.google.com/webmasters/tools/2007#sites-info'),
+ preferred_domain=webmastertools.PreferredDomain(text=preferred_domain)
+ )
+ response = self.Put(
+ site_entry,
+ uri % urllib.quote_plus(site_uri),
+ url_params=url_params,
+ escape_params=escape_params, converter=converter)
+ if not converter and isinstance(response, atom.Entry):
+ return webmastertools.SitesEntryFromString(response.ToString())
+ return response
+
+ def UpdateEnhancedImageSearch(self, site_uri, enhanced_image_search,
+ uri=SITE_TEMPLATE, url_params=None, escape_params=True, converter=None):
+ """Updates enhanced image search setting of a site.
+
+ Args:
+ site_uri: str URI of which site to add sitemap for.
+ enhanced_image_search: str The enhanced image search setting for a site.
+ Valid values are 'true', and 'false'.
+ uri: str (optional) URI template to update a site.
+ Default SITE_TEMPLATE.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ SitemapsEntryFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a SitesEntry object.
+ """
+
+ site_entry = webmastertools.SitesEntry(
+ atom_id=atom.Id(text=site_uri),
+ category=atom.Category(
+ scheme='http://schemas.google.com/g/2005#kind',
+ term='http://schemas.google.com/webmasters/tools/2007#sites-info'),
+ enhanced_image_search=webmastertools.EnhancedImageSearch(
+ text=enhanced_image_search)
+ )
+ response = self.Put(
+ site_entry,
+ uri % urllib.quote_plus(site_uri),
+ url_params=url_params,
+ escape_params=escape_params, converter=converter)
+ if not converter and isinstance(response, atom.Entry):
+ return webmastertools.SitesEntryFromString(response.ToString())
+ return response
+
+ def GetSitemapsFeed(self, site_uri, uri=SITEMAPS_FEED_TEMPLATE,
+ converter=webmastertools.SitemapsFeedFromString):
+ """Gets sitemaps feed of a site.
+
+ Args:
+ site_uri: str (optional) URI of which site to retrieve its sitemaps feed.
+ uri: str (optional) URI to retrieve sites feed.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ SitemapsFeedFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a SitemapsFeed object.
+ """
+ return self.Get(uri % {'site_id': urllib.quote_plus(site_uri)},
+ converter=converter)
+
+ def AddSitemap(self, site_uri, sitemap_uri, sitemap_type='WEB',
+ uri=SITEMAPS_FEED_TEMPLATE,
+ url_params=None, escape_params=True, converter=None):
+ """Adds a regular sitemap to a site.
+
+ Args:
+ site_uri: str URI of which site to add sitemap for.
+ sitemap_uri: str URI of sitemap to add to a site.
+ sitemap_type: str Type of added sitemap. Valid types: WEB, VIDEO, or CODE.
+ uri: str (optional) URI template to add a sitemap.
+ Default SITEMAP_FEED_TEMPLATE.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ SitemapsEntryFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a SitemapsEntry object.
+ """
+
+ sitemap_entry = webmastertools.SitemapsEntry(
+ atom_id=atom.Id(text=sitemap_uri),
+ category=atom.Category(
+ scheme='http://schemas.google.com/g/2005#kind',
+ term='http://schemas.google.com/webmasters/tools/2007#sitemap-regular'),
+ sitemap_type=webmastertools.SitemapType(text=sitemap_type))
+ response = self.Post(
+ sitemap_entry,
+ uri % {'site_id': urllib.quote_plus(site_uri)},
+ url_params=url_params,
+ escape_params=escape_params, converter=converter)
+ if not converter and isinstance(response, atom.Entry):
+ return webmastertools.SitemapsEntryFromString(response.ToString())
+ return response
+
+ def AddMobileSitemap(self, site_uri, sitemap_uri,
+ sitemap_mobile_markup_language='XHTML', uri=SITEMAPS_FEED_TEMPLATE,
+ url_params=None, escape_params=True, converter=None):
+ """Adds a mobile sitemap to a site.
+
+ Args:
+ site_uri: str URI of which site to add sitemap for.
+ sitemap_uri: str URI of sitemap to add to a site.
+ sitemap_mobile_markup_language: str Format of added sitemap. Valid types:
+ XHTML, WML, or cHTML.
+ uri: str (optional) URI template to add a sitemap.
+ Default SITEMAP_FEED_TEMPLATE.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ SitemapsEntryFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a SitemapsEntry object.
+ """
+ # FIXME
+ sitemap_entry = webmastertools.SitemapsEntry(
+ atom_id=atom.Id(text=sitemap_uri),
+ category=atom.Category(
+ scheme='http://schemas.google.com/g/2005#kind',
+ term='http://schemas.google.com/webmasters/tools/2007#sitemap-mobile'),
+ sitemap_mobile_markup_language=\
+ webmastertools.SitemapMobileMarkupLanguage(
+ text=sitemap_mobile_markup_language))
+ print sitemap_entry
+ response = self.Post(
+ sitemap_entry,
+ uri % {'site_id': urllib.quote_plus(site_uri)},
+ url_params=url_params,
+ escape_params=escape_params, converter=converter)
+ if not converter and isinstance(response, atom.Entry):
+ return webmastertools.SitemapsEntryFromString(response.ToString())
+ return response
+
+ def AddNewsSitemap(self, site_uri, sitemap_uri,
+ sitemap_news_publication_label, uri=SITEMAPS_FEED_TEMPLATE,
+ url_params=None, escape_params=True, converter=None):
+ """Adds a news sitemap to a site.
+
+ Args:
+ site_uri: str URI of which site to add sitemap for.
+ sitemap_uri: str URI of sitemap to add to a site.
+ sitemap_news_publication_label: str, list of str Publication Labels for
+ sitemap.
+ uri: str (optional) URI template to add a sitemap.
+ Default SITEMAP_FEED_TEMPLATE.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+ converter: func (optional) Function which is executed on the server's
+ response before it is returned. Usually this is a function like
+ SitemapsEntryFromString which will parse the response and turn it into
+ an object.
+
+ Returns:
+ If converter is defined, the results of running converter on the server's
+ response. Otherwise, it will be a SitemapsEntry object.
+ """
+
+ sitemap_entry = webmastertools.SitemapsEntry(
+ atom_id=atom.Id(text=sitemap_uri),
+ category=atom.Category(
+ scheme='http://schemas.google.com/g/2005#kind',
+ term='http://schemas.google.com/webmasters/tools/2007#sitemap-news'),
+ sitemap_news_publication_label=[],
+ )
+ if isinstance(sitemap_news_publication_label, str):
+ sitemap_news_publication_label = [sitemap_news_publication_label]
+ for label in sitemap_news_publication_label:
+ sitemap_entry.sitemap_news_publication_label.append(
+ webmastertools.SitemapNewsPublicationLabel(text=label))
+ print sitemap_entry
+ response = self.Post(
+ sitemap_entry,
+ uri % {'site_id': urllib.quote_plus(site_uri)},
+ url_params=url_params,
+ escape_params=escape_params, converter=converter)
+ if not converter and isinstance(response, atom.Entry):
+ return webmastertools.SitemapsEntryFromString(response.ToString())
+ return response
+
+ def DeleteSitemap(self, site_uri, sitemap_uri, uri=SITEMAP_TEMPLATE,
+ url_params=None, escape_params=True):
+ """Removes a sitemap from a site.
+
+ Args:
+ site_uri: str URI of which site to remove a sitemap from.
+ sitemap_uri: str URI of sitemap to remove from a site.
+ uri: str (optional) A URI template to send DELETE request.
+ Default SITEMAP_TEMPLATE.
+ url_params: dict (optional) Additional URL parameters to be included
+ in the insertion request.
+ escape_params: boolean (optional) If true, the url_parameters will be
+ escaped before they are included in the request.
+
+ Returns:
+ True if the delete succeeded.
+ """
+
+ return self.Delete(
+ uri % {'site_id': urllib.quote_plus(site_uri),
+ 'sitemap_id': urllib.quote_plus(sitemap_uri)},
+ url_params=url_params, escape_params=escape_params)
diff --git a/python/gdata/youtube/__init__.py b/python/gdata/youtube/__init__.py
new file mode 100644
index 0000000..c41aaea
--- /dev/null
+++ b/python/gdata/youtube/__init__.py
@@ -0,0 +1,684 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__author__ = ('api.stephaniel@gmail.com (Stephanie Liu)'
+ ', api.jhartmann@gmail.com (Jochen Hartmann)')
+
+import atom
+import gdata
+import gdata.media as Media
+import gdata.geo as Geo
+
+YOUTUBE_NAMESPACE = 'http://gdata.youtube.com/schemas/2007'
+YOUTUBE_FORMAT = '{http://gdata.youtube.com/schemas/2007}format'
+YOUTUBE_DEVELOPER_TAG_SCHEME = '%s/%s' % (YOUTUBE_NAMESPACE,
+ 'developertags.cat')
+YOUTUBE_SUBSCRIPTION_TYPE_SCHEME = '%s/%s' % (YOUTUBE_NAMESPACE,
+ 'subscriptiontypes.cat')
+
+class Username(atom.AtomBase):
+ """The YouTube Username element"""
+ _tag = 'username'
+ _namespace = YOUTUBE_NAMESPACE
+
+class QueryString(atom.AtomBase):
+ """The YouTube QueryString element"""
+ _tag = 'queryString'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class FirstName(atom.AtomBase):
+ """The YouTube FirstName element"""
+ _tag = 'firstName'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class LastName(atom.AtomBase):
+ """The YouTube LastName element"""
+ _tag = 'lastName'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Age(atom.AtomBase):
+ """The YouTube Age element"""
+ _tag = 'age'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Books(atom.AtomBase):
+ """The YouTube Books element"""
+ _tag = 'books'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Gender(atom.AtomBase):
+ """The YouTube Gender element"""
+ _tag = 'gender'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Company(atom.AtomBase):
+ """The YouTube Company element"""
+ _tag = 'company'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Hobbies(atom.AtomBase):
+ """The YouTube Hobbies element"""
+ _tag = 'hobbies'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Hometown(atom.AtomBase):
+ """The YouTube Hometown element"""
+ _tag = 'hometown'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Location(atom.AtomBase):
+ """The YouTube Location element"""
+ _tag = 'location'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Movies(atom.AtomBase):
+ """The YouTube Movies element"""
+ _tag = 'movies'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Music(atom.AtomBase):
+ """The YouTube Music element"""
+ _tag = 'music'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Occupation(atom.AtomBase):
+ """The YouTube Occupation element"""
+ _tag = 'occupation'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class School(atom.AtomBase):
+ """The YouTube School element"""
+ _tag = 'school'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Relationship(atom.AtomBase):
+ """The YouTube Relationship element"""
+ _tag = 'relationship'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Recorded(atom.AtomBase):
+ """The YouTube Recorded element"""
+ _tag = 'recorded'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Statistics(atom.AtomBase):
+ """The YouTube Statistics element."""
+ _tag = 'statistics'
+ _namespace = YOUTUBE_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['viewCount'] = 'view_count'
+ _attributes['videoWatchCount'] = 'video_watch_count'
+ _attributes['subscriberCount'] = 'subscriber_count'
+ _attributes['lastWebAccess'] = 'last_web_access'
+ _attributes['favoriteCount'] = 'favorite_count'
+
+ def __init__(self, view_count=None, video_watch_count=None,
+ favorite_count=None, subscriber_count=None, last_web_access=None,
+ extension_elements=None, extension_attributes=None, text=None):
+
+ self.view_count = view_count
+ self.video_watch_count = video_watch_count
+ self.subscriber_count = subscriber_count
+ self.last_web_access = last_web_access
+ self.favorite_count = favorite_count
+
+ atom.AtomBase.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+
+
+class Status(atom.AtomBase):
+ """The YouTube Status element"""
+ _tag = 'status'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Position(atom.AtomBase):
+ """The YouTube Position element. The position in a playlist feed."""
+ _tag = 'position'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Racy(atom.AtomBase):
+ """The YouTube Racy element."""
+ _tag = 'racy'
+ _namespace = YOUTUBE_NAMESPACE
+
+class Description(atom.AtomBase):
+ """The YouTube Description element."""
+ _tag = 'description'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Private(atom.AtomBase):
+ """The YouTube Private element."""
+ _tag = 'private'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class NoEmbed(atom.AtomBase):
+ """The YouTube VideoShare element. Whether a video can be embedded or not."""
+ _tag = 'noembed'
+ _namespace = YOUTUBE_NAMESPACE
+
+
+class Comments(atom.AtomBase):
+ """The GData Comments element"""
+ _tag = 'comments'
+ _namespace = gdata.GDATA_NAMESPACE
+ _children = atom.AtomBase._children.copy()
+ _attributes = atom.AtomBase._attributes.copy()
+ _children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
+ [gdata.FeedLink])
+
+ def __init__(self, feed_link=None, extension_elements=None,
+ extension_attributes=None, text=None):
+
+ self.feed_link = feed_link
+ atom.AtomBase.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+
+
+class Rating(atom.AtomBase):
+ """The GData Rating element"""
+ _tag = 'rating'
+ _namespace = gdata.GDATA_NAMESPACE
+ _attributes = atom.AtomBase._attributes.copy()
+ _attributes['min'] = 'min'
+ _attributes['max'] = 'max'
+ _attributes['numRaters'] = 'num_raters'
+ _attributes['average'] = 'average'
+
+ def __init__(self, min=None, max=None,
+ num_raters=None, average=None, extension_elements=None,
+ extension_attributes=None, text=None):
+
+ self.min = min
+ self.max = max
+ self.num_raters = num_raters
+ self.average = average
+
+ atom.AtomBase.__init__(self, extension_elements=extension_elements,
+ extension_attributes=extension_attributes, text=text)
+
+
+class YouTubePlaylistVideoEntry(gdata.GDataEntry):
+ """Represents a YouTubeVideoEntry on a YouTubePlaylist."""
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
+ [gdata.FeedLink])
+ _children['{%s}description' % YOUTUBE_NAMESPACE] = ('description',
+ Description)
+ _children['{%s}rating' % gdata.GDATA_NAMESPACE] = ('rating', Rating)
+ _children['{%s}comments' % gdata.GDATA_NAMESPACE] = ('comments', Comments)
+ _children['{%s}statistics' % YOUTUBE_NAMESPACE] = ('statistics', Statistics)
+ _children['{%s}location' % YOUTUBE_NAMESPACE] = ('location', Location)
+ _children['{%s}position' % YOUTUBE_NAMESPACE] = ('position', Position)
+ _children['{%s}group' % gdata.media.MEDIA_NAMESPACE] = ('media', Media.Group)
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None, title=None,
+ updated=None, feed_link=None, description=None,
+ rating=None, comments=None, statistics=None,
+ location=None, position=None, media=None,
+ extension_elements=None, extension_attributes=None):
+
+ self.feed_link = feed_link
+ self.description = description
+ self.rating = rating
+ self.comments = comments
+ self.statistics = statistics
+ self.location = location
+ self.position = position
+ self.media = media
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id,
+ link=link, published=published, title=title,
+ updated=updated,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+
+
+class YouTubeVideoCommentEntry(gdata.GDataEntry):
+ """Represents a comment on YouTube."""
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+
+
+class YouTubeSubscriptionEntry(gdata.GDataEntry):
+ """Represents a subscription entry on YouTube."""
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}username' % YOUTUBE_NAMESPACE] = ('username', Username)
+ _children['{%s}queryString' % YOUTUBE_NAMESPACE] = (
+ 'query_string', QueryString)
+ _children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
+ [gdata.FeedLink])
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None, title=None,
+ updated=None, username=None, query_string=None, feed_link=None,
+ extension_elements=None, extension_attributes=None):
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id, link=link,
+ published=published, title=title, updated=updated)
+
+ self.username = username
+ self.query_string = query_string
+ self.feed_link = feed_link
+
+
+ def GetSubscriptionType(self):
+ """Retrieve the type of this subscription.
+
+ Returns:
+ A string that is either 'channel, 'query' or 'favorites'
+ """
+ for category in self.category:
+ if category.scheme == YOUTUBE_SUBSCRIPTION_TYPE_SCHEME:
+ return category.term
+
+
+class YouTubeVideoResponseEntry(gdata.GDataEntry):
+ """Represents a video response. """
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}rating' % gdata.GDATA_NAMESPACE] = ('rating', Rating)
+ _children['{%s}noembed' % YOUTUBE_NAMESPACE] = ('noembed', NoEmbed)
+ _children['{%s}statistics' % YOUTUBE_NAMESPACE] = ('statistics', Statistics)
+ _children['{%s}racy' % YOUTUBE_NAMESPACE] = ('racy', Racy)
+ _children['{%s}group' % gdata.media.MEDIA_NAMESPACE] = ('media', Media.Group)
+
+ def __init__(self, author=None, category=None, content=None, atom_id=None,
+ link=None, published=None, title=None, updated=None, rating=None,
+ noembed=None, statistics=None, racy=None, media=None,
+ extension_elements=None, extension_attributes=None):
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id, link=link,
+ published=published, title=title, updated=updated)
+
+ self.rating = rating
+ self.noembed = noembed
+ self.statistics = statistics
+ self.racy = racy
+ self.media = media or Media.Group()
+
+
+class YouTubeContactEntry(gdata.GDataEntry):
+ """Represents a contact entry."""
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}username' % YOUTUBE_NAMESPACE] = ('username', Username)
+ _children['{%s}status' % YOUTUBE_NAMESPACE] = ('status', Status)
+
+
+ def __init__(self, author=None, category=None, content=None, atom_id=None,
+ link=None, published=None, title=None, updated=None,
+ username=None, status=None, extension_elements=None,
+ extension_attributes=None, text=None):
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id, link=link,
+ published=published, title=title, updated=updated)
+
+ self.username = username
+ self.status = status
+
+
+class YouTubeVideoEntry(gdata.GDataEntry):
+ """Represents a video on YouTube."""
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}rating' % gdata.GDATA_NAMESPACE] = ('rating', Rating)
+ _children['{%s}comments' % gdata.GDATA_NAMESPACE] = ('comments', Comments)
+ _children['{%s}noembed' % YOUTUBE_NAMESPACE] = ('noembed', NoEmbed)
+ _children['{%s}statistics' % YOUTUBE_NAMESPACE] = ('statistics', Statistics)
+ _children['{%s}recorded' % YOUTUBE_NAMESPACE] = ('recorded', Recorded)
+ _children['{%s}racy' % YOUTUBE_NAMESPACE] = ('racy', Racy)
+ _children['{%s}group' % gdata.media.MEDIA_NAMESPACE] = ('media', Media.Group)
+ _children['{%s}where' % gdata.geo.GEORSS_NAMESPACE] = ('geo', Geo.Where)
+
+ def __init__(self, author=None, category=None, content=None, atom_id=None,
+ link=None, published=None, title=None, updated=None, rating=None,
+ noembed=None, statistics=None, racy=None, media=None, geo=None,
+ recorded=None, comments=None, extension_elements=None,
+ extension_attributes=None):
+
+ self.rating = rating
+ self.noembed = noembed
+ self.statistics = statistics
+ self.racy = racy
+ self.comments = comments
+ self.media = media or Media.Group()
+ self.geo = geo
+ self.recorded = recorded
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id, link=link,
+ published=published, title=title, updated=updated,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+
+ def GetSwfUrl(self):
+ """Return the URL for the embeddable Video
+
+ Returns:
+ URL of the embeddable video
+ """
+ if self.media.content:
+ for content in self.media.content:
+ if content.extension_attributes[YOUTUBE_FORMAT] == '5':
+ return content.url
+ else:
+ return None
+
+ def AddDeveloperTags(self, developer_tags):
+ """Add a developer tag for this entry.
+
+ Developer tags can only be set during the initial upload.
+
+ Arguments:
+ developer_tags: A list of developer tags as strings.
+
+ Returns:
+ A list of all developer tags for this video entry.
+ """
+ for tag_text in developer_tags:
+ self.media.category.append(gdata.media.Category(
+ text=tag_text, label=tag_text, scheme=YOUTUBE_DEVELOPER_TAG_SCHEME))
+
+ return self.GetDeveloperTags()
+
+ def GetDeveloperTags(self):
+ """Retrieve developer tags for this video entry."""
+ developer_tags = []
+ for category in self.media.category:
+ if category.scheme == YOUTUBE_DEVELOPER_TAG_SCHEME:
+ developer_tags.append(category)
+ if len(developer_tags) > 0:
+ return developer_tags
+
+ def GetYouTubeCategoryAsString(self):
+ """Convenience method to return the YouTube category as string.
+
+ YouTubeVideoEntries can contain multiple Category objects with differing
+ schemes. This method returns only the category with the correct
+ scheme, ignoring developer tags.
+ """
+ for category in self.media.category:
+ if category.scheme != YOUTUBE_DEVELOPER_TAG_SCHEME:
+ return category.text
+
+class YouTubeUserEntry(gdata.GDataEntry):
+ """Represents a user on YouTube."""
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}username' % YOUTUBE_NAMESPACE] = ('username', Username)
+ _children['{%s}firstName' % YOUTUBE_NAMESPACE] = ('first_name', FirstName)
+ _children['{%s}lastName' % YOUTUBE_NAMESPACE] = ('last_name', LastName)
+ _children['{%s}age' % YOUTUBE_NAMESPACE] = ('age', Age)
+ _children['{%s}books' % YOUTUBE_NAMESPACE] = ('books', Books)
+ _children['{%s}gender' % YOUTUBE_NAMESPACE] = ('gender', Gender)
+ _children['{%s}company' % YOUTUBE_NAMESPACE] = ('company', Company)
+ _children['{%s}description' % YOUTUBE_NAMESPACE] = ('description',
+ Description)
+ _children['{%s}hobbies' % YOUTUBE_NAMESPACE] = ('hobbies', Hobbies)
+ _children['{%s}hometown' % YOUTUBE_NAMESPACE] = ('hometown', Hometown)
+ _children['{%s}location' % YOUTUBE_NAMESPACE] = ('location', Location)
+ _children['{%s}movies' % YOUTUBE_NAMESPACE] = ('movies', Movies)
+ _children['{%s}music' % YOUTUBE_NAMESPACE] = ('music', Music)
+ _children['{%s}occupation' % YOUTUBE_NAMESPACE] = ('occupation', Occupation)
+ _children['{%s}school' % YOUTUBE_NAMESPACE] = ('school', School)
+ _children['{%s}relationship' % YOUTUBE_NAMESPACE] = ('relationship',
+ Relationship)
+ _children['{%s}statistics' % YOUTUBE_NAMESPACE] = ('statistics', Statistics)
+ _children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
+ [gdata.FeedLink])
+ _children['{%s}thumbnail' % gdata.media.MEDIA_NAMESPACE] = ('thumbnail',
+ Media.Thumbnail)
+
+ def __init__(self, author=None, category=None, content=None, atom_id=None,
+ link=None, published=None, title=None, updated=None,
+ username=None, first_name=None, last_name=None, age=None,
+ books=None, gender=None, company=None, description=None,
+ hobbies=None, hometown=None, location=None, movies=None,
+ music=None, occupation=None, school=None, relationship=None,
+ statistics=None, feed_link=None, extension_elements=None,
+ extension_attributes=None, text=None):
+
+ self.username = username
+ self.first_name = first_name
+ self.last_name = last_name
+ self.age = age
+ self.books = books
+ self.gender = gender
+ self.company = company
+ self.description = description
+ self.hobbies = hobbies
+ self.hometown = hometown
+ self.location = location
+ self.movies = movies
+ self.music = music
+ self.occupation = occupation
+ self.school = school
+ self.relationship = relationship
+ self.statistics = statistics
+ self.feed_link = feed_link
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id,
+ link=link, published=published,
+ title=title, updated=updated,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes,
+ text=text)
+
+
+class YouTubeVideoFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """Represents a video feed on YouTube."""
+ _tag = gdata.GDataFeed._tag
+ _namespace = gdata.GDataFeed._namespace
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [YouTubeVideoEntry])
+
+class YouTubePlaylistEntry(gdata.GDataEntry):
+ """Represents a playlist in YouTube."""
+ _tag = gdata.GDataEntry._tag
+ _namespace = gdata.GDataEntry._namespace
+ _children = gdata.GDataEntry._children.copy()
+ _attributes = gdata.GDataEntry._attributes.copy()
+ _children['{%s}description' % YOUTUBE_NAMESPACE] = ('description',
+ Description)
+ _children['{%s}private' % YOUTUBE_NAMESPACE] = ('private',
+ Private)
+ _children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
+ [gdata.FeedLink])
+
+ def __init__(self, author=None, category=None, content=None,
+ atom_id=None, link=None, published=None, title=None,
+ updated=None, private=None, feed_link=None,
+ description=None, extension_elements=None,
+ extension_attributes=None):
+
+ self.description = description
+ self.private = private
+ self.feed_link = feed_link
+
+ gdata.GDataEntry.__init__(self, author=author, category=category,
+ content=content, atom_id=atom_id,
+ link=link, published=published, title=title,
+ updated=updated,
+ extension_elements=extension_elements,
+ extension_attributes=extension_attributes)
+
+
+
+class YouTubePlaylistFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """Represents a feed of a user's playlists """
+ _tag = gdata.GDataFeed._tag
+ _namespace = gdata.GDataFeed._namespace
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [YouTubePlaylistEntry])
+
+
+class YouTubePlaylistVideoFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """Represents a feed of video entry on a playlist."""
+ _tag = gdata.GDataFeed._tag
+ _namespace = gdata.GDataFeed._namespace
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [YouTubePlaylistVideoEntry])
+
+
+class YouTubeContactFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """Represents a feed of a users contacts."""
+ _tag = gdata.GDataFeed._tag
+ _namespace = gdata.GDataFeed._namespace
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [YouTubeContactEntry])
+
+
+class YouTubeSubscriptionFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """Represents a feed of a users subscriptions."""
+ _tag = gdata.GDataFeed._tag
+ _namespace = gdata.GDataFeed._namespace
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [YouTubeSubscriptionEntry])
+
+
+class YouTubeVideoCommentFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """Represents a feed of comments for a video."""
+ _tag = gdata.GDataFeed._tag
+ _namespace = gdata.GDataFeed._namespace
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [YouTubeVideoCommentEntry])
+
+
+class YouTubeVideoResponseFeed(gdata.GDataFeed, gdata.LinkFinder):
+ """Represents a feed of video responses."""
+ _tag = gdata.GDataFeed._tag
+ _namespace = gdata.GDataFeed._namespace
+ _children = gdata.GDataFeed._children.copy()
+ _attributes = gdata.GDataFeed._attributes.copy()
+ _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
+ [YouTubeVideoResponseEntry])
+
+
+def YouTubeVideoFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubeVideoFeed, xml_string)
+
+
+def YouTubeVideoEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubeVideoEntry, xml_string)
+
+
+def YouTubeContactFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubeContactFeed, xml_string)
+
+
+def YouTubeContactEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubeContactEntry, xml_string)
+
+
+def YouTubeVideoCommentFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubeVideoCommentFeed, xml_string)
+
+
+def YouTubeVideoCommentEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubeVideoCommentEntry, xml_string)
+
+
+def YouTubeUserFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubeVideoFeed, xml_string)
+
+
+def YouTubeUserEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubeUserEntry, xml_string)
+
+
+def YouTubePlaylistFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubePlaylistFeed, xml_string)
+
+
+def YouTubePlaylistVideoFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubePlaylistVideoFeed, xml_string)
+
+
+def YouTubePlaylistEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubePlaylistEntry, xml_string)
+
+
+def YouTubePlaylistVideoEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubePlaylistVideoEntry, xml_string)
+
+
+def YouTubeSubscriptionFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubeSubscriptionFeed, xml_string)
+
+
+def YouTubeSubscriptionEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubeSubscriptionEntry, xml_string)
+
+
+def YouTubeVideoResponseFeedFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubeVideoResponseFeed, xml_string)
+
+
+def YouTubeVideoResponseEntryFromString(xml_string):
+ return atom.CreateClassFromXMLString(YouTubeVideoResponseEntry, xml_string)
diff --git a/python/gdata/youtube/client.py b/python/gdata/youtube/client.py
new file mode 100644
index 0000000..2e34d6a
--- /dev/null
+++ b/python/gdata/youtube/client.py
@@ -0,0 +1,264 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Contains a client to communicate with the YouTube servers.
+
+ A quick and dirty port of the YouTube GDATA 1.0 Python client
+ libraries to version 2.0 of the GDATA library.
+
+"""
+
+# __author__ = 's.@google.com (John Skidgel)'
+
+import logging
+
+import gdata.client
+import gdata.youtube.data
+import atom.data
+import atom.http_core
+
+# Constants
+# -----------------------------------------------------------------------------
+YOUTUBE_CLIENTLOGIN_AUTHENTICATION_URL = 'https://www.google.com/youtube/accounts/ClientLogin'
+YOUTUBE_SUPPORTED_UPLOAD_TYPES = ('mov', 'avi', 'wmv', 'mpg', 'quicktime',
+ 'flv')
+YOUTUBE_QUERY_VALID_TIME_PARAMETERS = ('today', 'this_week', 'this_month',
+ 'all_time')
+YOUTUBE_QUERY_VALID_ORDERBY_PARAMETERS = ('published', 'viewCount', 'rating',
+ 'relevance')
+YOUTUBE_QUERY_VALID_RACY_PARAMETERS = ('include', 'exclude')
+YOUTUBE_QUERY_VALID_FORMAT_PARAMETERS = ('1', '5', '6')
+YOUTUBE_STANDARDFEEDS = ('most_recent', 'recently_featured',
+ 'top_rated', 'most_viewed','watch_on_mobile')
+
+YOUTUBE_UPLOAD_TOKEN_URI = 'http://gdata.youtube.com/action/GetUploadToken'
+YOUTUBE_SERVER = 'gdata.youtube.com/feeds/api'
+YOUTUBE_SERVICE = 'youtube'
+YOUTUBE_VIDEO_FEED_URI = 'http://%s/videos' % YOUTUBE_SERVER
+YOUTUBE_USER_FEED_URI = 'http://%s/users/' % YOUTUBE_SERVER
+
+# Takes a youtube video ID.
+YOUTUBE_CAPTION_FEED_URI = 'http://gdata.youtube.com/feeds/api/videos/%s/captions'
+
+# Takes a youtube video ID and a caption track ID.
+YOUTUBE_CAPTION_URI = 'http://gdata.youtube.com/feeds/api/videos/%s/captiondata/%s'
+
+YOUTUBE_CAPTION_MIME_TYPE = 'application/vnd.youtube.timedtext; charset=UTF-8'
+
+
+# Classes
+# -----------------------------------------------------------------------------
+class Error(Exception):
+ """Base class for errors within the YouTube service."""
+ pass
+
+
+class RequestError(Error):
+ """Error class that is thrown in response to an invalid HTTP Request."""
+ pass
+
+
+class YouTubeError(Error):
+ """YouTube service specific error class."""
+ pass
+
+
+class YouTubeClient(gdata.client.GDClient):
+ """Client for the YouTube service.
+
+ Performs a partial list of Google Data YouTube API functions, such as
+ retrieving the videos feed for a user and the feed for a video.
+ YouTube Service requires authentication for any write, update or delete
+ actions.
+ """
+ api_version = '2'
+ auth_service = YOUTUBE_SERVICE
+ auth_scopes = ['http://%s' % YOUTUBE_SERVER, 'https://%s' % YOUTUBE_SERVER]
+
+ def get_videos(self, uri=YOUTUBE_VIDEO_FEED_URI, auth_token=None,
+ desired_class=gdata.youtube.data.VideoFeed,
+ **kwargs):
+ """Retrieves a YouTube video feed.
+ Args:
+ uri: A string representing the URI of the feed that is to be retrieved.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+ """
+ return self.get_feed(uri, auth_token=auth_token,
+ desired_class=desired_class,
+ **kwargs)
+
+ GetVideos = get_videos
+
+
+ def get_user_feed(self, uri=None, username=None):
+ """Retrieve a YouTubeVideoFeed of user uploaded videos.
+
+ Either a uri or a username must be provided. This will retrieve list
+ of videos uploaded by specified user. The uri will be of format
+ "http://gdata.youtube.com/feeds/api/users/{username}/uploads".
+
+ Args:
+ uri: An optional string representing the URI of the user feed that is
+ to be retrieved.
+ username: An optional string representing the username.
+
+ Returns:
+ A YouTubeUserFeed if successfully retrieved.
+
+ Raises:
+ YouTubeError: You must provide at least a uri or a username to the
+ GetYouTubeUserFeed() method.
+ """
+ if uri is None and username is None:
+ raise YouTubeError('You must provide at least a uri or a username '
+ 'to the GetYouTubeUserFeed() method')
+ elif username and not uri:
+ uri = '%s%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'uploads')
+ return self.get_feed(uri, desired_class=gdata.youtube.data.VideoFeed)
+
+ GetUserFeed = get_user_feed
+
+
+ def get_video_entry(self, uri=None, video_id=None,
+ auth_token=None, **kwargs):
+ """Retrieve a YouTubeVideoEntry.
+
+ Either a uri or a video_id must be provided.
+
+ Args:
+ uri: An optional string representing the URI of the entry that is to
+ be retrieved.
+ video_id: An optional string representing the ID of the video.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+
+ Raises:
+ YouTubeError: You must provide at least a uri or a video_id to the
+ GetYouTubeVideoEntry() method.
+ """
+ if uri is None and video_id is None:
+ raise YouTubeError('You must provide at least a uri or a video_id '
+ 'to the get_youtube_video_entry() method')
+ elif video_id and uri is None:
+ uri = '%s/%s' % (YOUTUBE_VIDEO_FEED_URI, video_id)
+ return self.get_feed(uri,
+ desired_class=gdata.youtube.data.VideoEntry,
+ auth_token=auth_token,
+ **kwargs)
+
+ GetVideoEntry = get_video_entry
+
+
+ def get_caption_feed(self, uri):
+ """Retrieve a Caption feed of tracks.
+
+ Args:
+ uri: A string representing the caption feed's URI to be retrieved.
+
+ Returns:
+ A YouTube CaptionFeed if successfully retrieved.
+ """
+ return self.get_feed(uri, desired_class=gdata.youtube.data.CaptionFeed)
+
+ GetCaptionFeed = get_caption_feed
+
+ def get_caption_track(self, track_url, client_id,
+ developer_key, auth_token=None, **kwargs):
+ http_request = atom.http_core.HttpRequest(uri = track_url, method = 'GET')
+ dev_key = 'key=' + developer_key
+ authsub = 'AuthSub token="' + str(auth_token) + '"'
+ http_request.headers = {
+ 'Authorization': authsub,
+ 'X-GData-Client': client_id,
+ 'X-GData-Key': dev_key
+ }
+ return self.request(http_request=http_request, **kwargs)
+
+ GetCaptionTrack = get_caption_track
+
+ def create_track(self, video_id, title, language, body, client_id,
+ developer_key, auth_token=None, title_type='text', **kwargs):
+ """Creates a closed-caption track and adds to an existing YouTube video.
+ """
+ new_entry = gdata.youtube.data.TrackEntry(
+ content = gdata.youtube.data.TrackContent(text = body, lang = language))
+ uri = YOUTUBE_CAPTION_FEED_URI % video_id
+ http_request = atom.http_core.HttpRequest(uri = uri, method = 'POST')
+ dev_key = 'key=' + developer_key
+ authsub = 'AuthSub token="' + str(auth_token) + '"'
+ http_request.headers = {
+ 'Content-Type': YOUTUBE_CAPTION_MIME_TYPE,
+ 'Content-Language': language,
+ 'Slug': title,
+ 'Authorization': authsub,
+ 'GData-Version': self.api_version,
+ 'X-GData-Client': client_id,
+ 'X-GData-Key': dev_key
+ }
+ http_request.add_body_part(body, http_request.headers['Content-Type'])
+ return self.request(http_request = http_request,
+ desired_class = new_entry.__class__, **kwargs)
+
+
+ CreateTrack = create_track
+
+ def delete_track(self, video_id, track, client_id, developer_key,
+ auth_token=None, **kwargs):
+ """Deletes a track."""
+ if isinstance(track, gdata.youtube.data.TrackEntry):
+ track_id_text_node = track.get_id().split(':')
+ track_id = track_id_text_node[3]
+ else:
+ track_id = track
+ uri = YOUTUBE_CAPTION_URI % (video_id, track_id)
+ http_request = atom.http_core.HttpRequest(uri = uri, method = 'DELETE')
+ dev_key = 'key=' + developer_key
+ authsub = 'AuthSub token="' + str(auth_token) + '"'
+ http_request.headers = {
+ 'Authorization': authsub,
+ 'GData-Version': self.api_version,
+ 'X-GData-Client': client_id,
+ 'X-GData-Key': dev_key
+ }
+ return self.request(http_request=http_request, **kwargs)
+
+ DeleteTrack = delete_track
+
+ def update_track(self, video_id, track, body, client_id, developer_key,
+ auth_token=None, **kwargs):
+ """Updates a closed-caption track for an existing YouTube video.
+ """
+ track_id_text_node = track.get_id().split(':')
+ track_id = track_id_text_node[3]
+ uri = YOUTUBE_CAPTION_URI % (video_id, track_id)
+ http_request = atom.http_core.HttpRequest(uri = uri, method = 'PUT')
+ dev_key = 'key=' + developer_key
+ authsub = 'AuthSub token="' + str(auth_token) + '"'
+ http_request.headers = {
+ 'Content-Type': YOUTUBE_CAPTION_MIME_TYPE,
+ 'Authorization': authsub,
+ 'GData-Version': self.api_version,
+ 'X-GData-Client': client_id,
+ 'X-GData-Key': dev_key
+ }
+ http_request.add_body_part(body, http_request.headers['Content-Type'])
+ return self.request(http_request = http_request,
+ desired_class = track.__class__, **kwargs)
+
+ UpdateTrack = update_track
diff --git a/python/gdata/youtube/data.py b/python/gdata/youtube/data.py
new file mode 100644
index 0000000..4ef2d62
--- /dev/null
+++ b/python/gdata/youtube/data.py
@@ -0,0 +1,502 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Contains the data classes of the YouTube Data API"""
+
+
+__author__ = 'j.s@google.com (Jeff Scudder)'
+
+
+import atom.core
+import atom.data
+import gdata.data
+import gdata.geo.data
+import gdata.media.data
+import gdata.opensearch.data
+import gdata.youtube.data
+
+
+YT_TEMPLATE = '{http://gdata.youtube.com/schemas/2007/}%s'
+
+
+class ComplaintEntry(gdata.data.GDEntry):
+ """Describes a complaint about a video"""
+
+
+class ComplaintFeed(gdata.data.GDFeed):
+ """Describes complaints about a video"""
+ entry = [ComplaintEntry]
+
+
+class RatingEntry(gdata.data.GDEntry):
+ """A rating about a video"""
+ rating = gdata.data.Rating
+
+
+class RatingFeed(gdata.data.GDFeed):
+ """Describes ratings for a video"""
+ entry = [RatingEntry]
+
+
+class YouTubeMediaContent(gdata.media.data.MediaContent):
+ """Describes a you tube media content"""
+ _qname = gdata.media.data.MEDIA_TEMPLATE % 'content'
+ format = 'format'
+
+
+class YtAge(atom.core.XmlElement):
+ """User's age"""
+ _qname = YT_TEMPLATE % 'age'
+
+
+class YtBooks(atom.core.XmlElement):
+ """User's favorite books"""
+ _qname = YT_TEMPLATE % 'books'
+
+
+class YtCompany(atom.core.XmlElement):
+ """User's company"""
+ _qname = YT_TEMPLATE % 'company'
+
+
+class YtDescription(atom.core.XmlElement):
+ """Description"""
+ _qname = YT_TEMPLATE % 'description'
+
+
+class YtDuration(atom.core.XmlElement):
+ """Video duration"""
+ _qname = YT_TEMPLATE % 'duration'
+ seconds = 'seconds'
+
+
+class YtFirstName(atom.core.XmlElement):
+ """User's first name"""
+ _qname = YT_TEMPLATE % 'firstName'
+
+
+class YtGender(atom.core.XmlElement):
+ """User's gender"""
+ _qname = YT_TEMPLATE % 'gender'
+
+
+class YtHobbies(atom.core.XmlElement):
+ """User's hobbies"""
+ _qname = YT_TEMPLATE % 'hobbies'
+
+
+class YtHometown(atom.core.XmlElement):
+ """User's hometown"""
+ _qname = YT_TEMPLATE % 'hometown'
+
+
+class YtLastName(atom.core.XmlElement):
+ """User's last name"""
+ _qname = YT_TEMPLATE % 'lastName'
+
+
+class YtLocation(atom.core.XmlElement):
+ """Location"""
+ _qname = YT_TEMPLATE % 'location'
+
+
+class YtMovies(atom.core.XmlElement):
+ """User's favorite movies"""
+ _qname = YT_TEMPLATE % 'movies'
+
+
+class YtMusic(atom.core.XmlElement):
+ """User's favorite music"""
+ _qname = YT_TEMPLATE % 'music'
+
+
+class YtNoEmbed(atom.core.XmlElement):
+ """Disables embedding for the video"""
+ _qname = YT_TEMPLATE % 'noembed'
+
+
+class YtOccupation(atom.core.XmlElement):
+ """User's occupation"""
+ _qname = YT_TEMPLATE % 'occupation'
+
+
+class YtPlaylistId(atom.core.XmlElement):
+ """Playlist id"""
+ _qname = YT_TEMPLATE % 'playlistId'
+
+
+class YtPosition(atom.core.XmlElement):
+ """Video position on the playlist"""
+ _qname = YT_TEMPLATE % 'position'
+
+
+class YtPrivate(atom.core.XmlElement):
+ """Flags the entry as private"""
+ _qname = YT_TEMPLATE % 'private'
+
+
+class YtQueryString(atom.core.XmlElement):
+ """Keywords or query string associated with a subscription"""
+ _qname = YT_TEMPLATE % 'queryString'
+
+
+class YtRacy(atom.core.XmlElement):
+ """Mature content"""
+ _qname = YT_TEMPLATE % 'racy'
+
+
+class YtRecorded(atom.core.XmlElement):
+ """Date when the video was recorded"""
+ _qname = YT_TEMPLATE % 'recorded'
+
+
+class YtRelationship(atom.core.XmlElement):
+ """User's relationship status"""
+ _qname = YT_TEMPLATE % 'relationship'
+
+
+class YtSchool(atom.core.XmlElement):
+ """User's school"""
+ _qname = YT_TEMPLATE % 'school'
+
+
+class YtStatistics(atom.core.XmlElement):
+ """Video and user statistics"""
+ _qname = YT_TEMPLATE % 'statistics'
+ favorite_count = 'favoriteCount'
+ video_watch_count = 'videoWatchCount'
+ view_count = 'viewCount'
+ last_web_access = 'lastWebAccess'
+ subscriber_count = 'subscriberCount'
+
+
+class YtStatus(atom.core.XmlElement):
+ """Status of a contact"""
+ _qname = YT_TEMPLATE % 'status'
+
+
+class YtUserProfileStatistics(YtStatistics):
+ """User statistics"""
+ _qname = YT_TEMPLATE % 'statistics'
+
+
+class YtUsername(atom.core.XmlElement):
+ """Youtube username"""
+ _qname = YT_TEMPLATE % 'username'
+
+
+class FriendEntry(gdata.data.BatchEntry):
+ """Describes a contact in friend list"""
+ username = YtUsername
+ status = YtStatus
+ email = gdata.data.Email
+
+
+class FriendFeed(gdata.data.BatchFeed):
+ """Describes user's friends"""
+ entry = [FriendEntry]
+
+
+class YtVideoStatistics(YtStatistics):
+ """Video statistics"""
+ _qname = YT_TEMPLATE % 'statistics'
+
+
+class ChannelEntry(gdata.data.GDEntry):
+ """Describes a video channel"""
+
+
+class ChannelFeed(gdata.data.GDFeed):
+ """Describes channels"""
+ entry = [ChannelEntry]
+
+
+class FavoriteEntry(gdata.data.BatchEntry):
+ """Describes a favorite video"""
+
+
+class FavoriteFeed(gdata.data.BatchFeed):
+ """Describes favorite videos"""
+ entry = [FavoriteEntry]
+
+
+class YouTubeMediaCredit(gdata.media.data.MediaCredit):
+ """Describes a you tube media credit"""
+ _qname = gdata.media.data.MEDIA_TEMPLATE % 'credit'
+ type = 'type'
+
+
+class YouTubeMediaRating(gdata.media.data.MediaRating):
+ """Describes a you tube media rating"""
+ _qname = gdata.media.data.MEDIA_TEMPLATE % 'rating'
+ country = 'country'
+
+
+class YtAboutMe(atom.core.XmlElement):
+ """User's self description"""
+ _qname = YT_TEMPLATE % 'aboutMe'
+
+
+class UserProfileEntry(gdata.data.BatchEntry):
+ """Describes an user's profile"""
+ relationship = YtRelationship
+ description = YtDescription
+ location = YtLocation
+ statistics = YtUserProfileStatistics
+ school = YtSchool
+ music = YtMusic
+ first_name = YtFirstName
+ gender = YtGender
+ occupation = YtOccupation
+ hometown = YtHometown
+ company = YtCompany
+ movies = YtMovies
+ books = YtBooks
+ username = YtUsername
+ about_me = YtAboutMe
+ last_name = YtLastName
+ age = YtAge
+ thumbnail = gdata.media.data.MediaThumbnail
+ hobbies = YtHobbies
+
+
+class UserProfileFeed(gdata.data.BatchFeed):
+ """Describes a feed of user's profile"""
+ entry = [UserProfileEntry]
+
+
+class YtAspectRatio(atom.core.XmlElement):
+ """The aspect ratio of a media file"""
+ _qname = YT_TEMPLATE % 'aspectRatio'
+
+
+class YtBasePublicationState(atom.core.XmlElement):
+ """Status of an unpublished entry"""
+ _qname = YT_TEMPLATE % 'state'
+ help_url = 'helpUrl'
+
+
+class YtPublicationState(YtBasePublicationState):
+ """Status of an unpublished video"""
+ _qname = YT_TEMPLATE % 'state'
+ name = 'name'
+ reason_code = 'reasonCode'
+
+
+class YouTubeAppControl(atom.data.Control):
+ """Describes a you tube app control"""
+ _qname = (atom.data.APP_TEMPLATE_V1 % 'control',
+ atom.data.APP_TEMPLATE_V2 % 'control')
+ state = YtPublicationState
+
+
+class YtCaptionPublicationState(YtBasePublicationState):
+ """Status of an unpublished caption track"""
+ _qname = YT_TEMPLATE % 'state'
+ reason_code = 'reasonCode'
+ name = 'name'
+
+
+class YouTubeCaptionAppControl(atom.data.Control):
+ """Describes a you tube caption app control"""
+ _qname = atom.data.APP_TEMPLATE_V2 % 'control'
+ state = YtCaptionPublicationState
+
+
+class CaptionTrackEntry(gdata.data.GDEntry):
+ """Describes a caption track"""
+
+
+class CaptionTrackFeed(gdata.data.GDFeed):
+ """Describes caption tracks"""
+ entry = [CaptionTrackEntry]
+
+
+class YtCountHint(atom.core.XmlElement):
+ """Hint as to how many entries the linked feed contains"""
+ _qname = YT_TEMPLATE % 'countHint'
+
+
+class PlaylistLinkEntry(gdata.data.BatchEntry):
+ """Describes a playlist"""
+ description = YtDescription
+ playlist_id = YtPlaylistId
+ count_hint = YtCountHint
+ private = YtPrivate
+
+
+class PlaylistLinkFeed(gdata.data.BatchFeed):
+ """Describes list of playlists"""
+ entry = [PlaylistLinkEntry]
+
+
+class YtModerationStatus(atom.core.XmlElement):
+ """Moderation status"""
+ _qname = YT_TEMPLATE % 'moderationStatus'
+
+
+class YtPlaylistTitle(atom.core.XmlElement):
+ """Playlist title"""
+ _qname = YT_TEMPLATE % 'playlistTitle'
+
+
+class SubscriptionEntry(gdata.data.BatchEntry):
+ """Describes user's channel subscritpions"""
+ count_hint = YtCountHint
+ playlist_title = YtPlaylistTitle
+ thumbnail = gdata.media.data.MediaThumbnail
+ username = YtUsername
+ query_string = YtQueryString
+ playlist_id = YtPlaylistId
+
+
+class SubscriptionFeed(gdata.data.BatchFeed):
+ """Describes list of user's video subscriptions"""
+ entry = [SubscriptionEntry]
+
+
+class YtSpam(atom.core.XmlElement):
+ """Indicates that the entry probably contains spam"""
+ _qname = YT_TEMPLATE % 'spam'
+
+
+class CommentEntry(gdata.data.BatchEntry):
+ """Describes a comment for a video"""
+ spam = YtSpam
+
+
+class CommentFeed(gdata.data.BatchFeed):
+ """Describes comments for a video"""
+ entry = [CommentEntry]
+
+
+class YtUploaded(atom.core.XmlElement):
+ """Date/Time at which the video was uploaded"""
+ _qname = YT_TEMPLATE % 'uploaded'
+
+
+class YtVideoId(atom.core.XmlElement):
+ """Video id"""
+ _qname = YT_TEMPLATE % 'videoid'
+
+
+class YouTubeMediaGroup(gdata.media.data.MediaGroup):
+ """Describes a you tube media group"""
+ _qname = gdata.media.data.MEDIA_TEMPLATE % 'group'
+ videoid = YtVideoId
+ private = YtPrivate
+ duration = YtDuration
+ aspect_ratio = YtAspectRatio
+ uploaded = YtUploaded
+
+
+class VideoEntryBase(gdata.data.GDEntry):
+ """Elements that describe or contain videos"""
+ group = YouTubeMediaGroup
+ statistics = YtVideoStatistics
+ racy = YtRacy
+ recorded = YtRecorded
+ where = gdata.geo.data.GeoRssWhere
+ rating = gdata.data.Rating
+ noembed = YtNoEmbed
+ location = YtLocation
+ comments = gdata.data.Comments
+
+
+class PlaylistEntry(gdata.data.BatchEntry):
+ """Describes a video in a playlist"""
+ description = YtDescription
+ position = YtPosition
+
+
+class PlaylistFeed(gdata.data.BatchFeed):
+ """Describes videos in a playlist"""
+ private = YtPrivate
+ group = YouTubeMediaGroup
+ playlist_id = YtPlaylistId
+ entry = [PlaylistEntry]
+
+
+class VideoEntry(gdata.data.BatchEntry):
+ """Describes a video"""
+
+
+class VideoFeed(gdata.data.BatchFeed):
+ """Describes a video feed"""
+ entry = [VideoEntry]
+
+
+class VideoMessageEntry(gdata.data.BatchEntry):
+ """Describes a video message"""
+ description = YtDescription
+
+
+class VideoMessageFeed(gdata.data.BatchFeed):
+ """Describes videos in a videoMessage"""
+ entry = [VideoMessageEntry]
+
+
+class UserEventEntry(gdata.data.GDEntry):
+ """Describes a user event"""
+ playlist_id = YtPlaylistId
+ videoid = YtVideoId
+ username = YtUsername
+ query_string = YtQueryString
+ rating = gdata.data.Rating
+
+
+class UserEventFeed(gdata.data.GDFeed):
+ """Describes list of events"""
+ entry = [UserEventEntry]
+
+
+class VideoModerationEntry(gdata.data.GDEntry):
+ """Describes video moderation"""
+ moderation_status = YtModerationStatus
+ videoid = YtVideoId
+
+
+class VideoModerationFeed(gdata.data.GDFeed):
+ """Describes a video moderation feed"""
+ entry = [VideoModerationEntry]
+
+
+class TrackContent(atom.data.Content):
+ lang = atom.data.XML_TEMPLATE % 'lang'
+
+
+class TrackEntry(gdata.data.GDEntry):
+ """Represents the URL for a caption track"""
+ content = TrackContent
+
+ def get_caption_track_id(self):
+ """Extracts the ID of this caption track.
+ Returns:
+ The caption track's id as a string.
+ """
+ if self.id.text:
+ match = CAPTION_TRACK_ID_PATTERN.match(self.id.text)
+ if match:
+ return match.group(2)
+ return None
+
+ GetCaptionTrackId = get_caption_track_id
+
+
+class CaptionFeed(gdata.data.GDFeed):
+ """Represents a caption feed for a video on YouTube."""
+ entry = [TrackEntry]
diff --git a/python/gdata/youtube/service.py b/python/gdata/youtube/service.py
new file mode 100644
index 0000000..1d8a80c
--- /dev/null
+++ b/python/gdata/youtube/service.py
@@ -0,0 +1,1563 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""YouTubeService extends GDataService to streamline YouTube operations.
+
+ YouTubeService: Provides methods to perform CRUD operations on YouTube feeds.
+ Extends GDataService.
+"""
+
+__author__ = ('api.stephaniel@gmail.com (Stephanie Liu), '
+ 'api.jhartmann@gmail.com (Jochen Hartmann)')
+
+try:
+ from xml.etree import cElementTree as ElementTree
+except ImportError:
+ try:
+ import cElementTree as ElementTree
+ except ImportError:
+ try:
+ from xml.etree import ElementTree
+ except ImportError:
+ from elementtree import ElementTree
+import os
+import atom
+import gdata
+import gdata.service
+import gdata.youtube
+
+YOUTUBE_SERVER = 'gdata.youtube.com'
+YOUTUBE_SERVICE = 'youtube'
+YOUTUBE_CLIENTLOGIN_AUTHENTICATION_URL = 'https://www.google.com/youtube/accounts/ClientLogin'
+YOUTUBE_SUPPORTED_UPLOAD_TYPES = ('mov', 'avi', 'wmv', 'mpg', 'quicktime',
+ 'flv', 'mp4', 'x-flv')
+YOUTUBE_QUERY_VALID_TIME_PARAMETERS = ('today', 'this_week', 'this_month',
+ 'all_time')
+YOUTUBE_QUERY_VALID_ORDERBY_PARAMETERS = ('published', 'viewCount', 'rating',
+ 'relevance')
+YOUTUBE_QUERY_VALID_RACY_PARAMETERS = ('include', 'exclude')
+YOUTUBE_QUERY_VALID_FORMAT_PARAMETERS = ('1', '5', '6')
+YOUTUBE_STANDARDFEEDS = ('most_recent', 'recently_featured',
+ 'top_rated', 'most_viewed','watch_on_mobile')
+YOUTUBE_UPLOAD_URI = 'http://uploads.gdata.youtube.com/feeds/api/users'
+YOUTUBE_UPLOAD_TOKEN_URI = 'http://gdata.youtube.com/action/GetUploadToken'
+YOUTUBE_VIDEO_URI = 'http://gdata.youtube.com/feeds/api/videos'
+YOUTUBE_USER_FEED_URI = 'http://gdata.youtube.com/feeds/api/users'
+YOUTUBE_PLAYLIST_FEED_URI = 'http://gdata.youtube.com/feeds/api/playlists'
+
+YOUTUBE_STANDARD_FEEDS = 'http://gdata.youtube.com/feeds/api/standardfeeds'
+YOUTUBE_STANDARD_TOP_RATED_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS, 'top_rated')
+YOUTUBE_STANDARD_MOST_VIEWED_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
+ 'most_viewed')
+YOUTUBE_STANDARD_RECENTLY_FEATURED_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
+ 'recently_featured')
+YOUTUBE_STANDARD_WATCH_ON_MOBILE_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
+ 'watch_on_mobile')
+YOUTUBE_STANDARD_TOP_FAVORITES_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
+ 'top_favorites')
+YOUTUBE_STANDARD_MOST_RECENT_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
+ 'most_recent')
+YOUTUBE_STANDARD_MOST_DISCUSSED_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
+ 'most_discussed')
+YOUTUBE_STANDARD_MOST_LINKED_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
+ 'most_linked')
+YOUTUBE_STANDARD_MOST_RESPONDED_URI = '%s/%s' % (YOUTUBE_STANDARD_FEEDS,
+ 'most_responded')
+YOUTUBE_SCHEMA = 'http://gdata.youtube.com/schemas'
+
+YOUTUBE_RATING_LINK_REL = '%s#video.ratings' % YOUTUBE_SCHEMA
+
+YOUTUBE_COMPLAINT_CATEGORY_SCHEME = '%s/%s' % (YOUTUBE_SCHEMA,
+ 'complaint-reasons.cat')
+YOUTUBE_SUBSCRIPTION_CATEGORY_SCHEME = '%s/%s' % (YOUTUBE_SCHEMA,
+ 'subscriptiontypes.cat')
+
+YOUTUBE_COMPLAINT_CATEGORY_TERMS = ('PORN', 'VIOLENCE', 'HATE', 'DANGEROUS',
+ 'RIGHTS', 'SPAM')
+YOUTUBE_CONTACT_STATUS = ('accepted', 'rejected')
+YOUTUBE_CONTACT_CATEGORY = ('Friends', 'Family')
+
+UNKOWN_ERROR = 1000
+YOUTUBE_BAD_REQUEST = 400
+YOUTUBE_CONFLICT = 409
+YOUTUBE_INTERNAL_SERVER_ERROR = 500
+YOUTUBE_INVALID_ARGUMENT = 601
+YOUTUBE_INVALID_CONTENT_TYPE = 602
+YOUTUBE_NOT_A_VIDEO = 603
+YOUTUBE_INVALID_KIND = 604
+
+
+class Error(Exception):
+ """Base class for errors within the YouTube service."""
+ pass
+
+class RequestError(Error):
+ """Error class that is thrown in response to an invalid HTTP Request."""
+ pass
+
+class YouTubeError(Error):
+ """YouTube service specific error class."""
+ pass
+
+class YouTubeService(gdata.service.GDataService):
+
+ """Client for the YouTube service.
+
+ Performs all documented Google Data YouTube API functions, such as inserting,
+ updating and deleting videos, comments, playlist, subscriptions etc.
+ YouTube Service requires authentication for any write, update or delete
+ actions.
+
+ Attributes:
+ email: An optional string identifying the user. Required only for
+ authenticated actions.
+ password: An optional string identifying the user's password.
+ source: An optional string identifying the name of your application.
+ server: An optional address of the YouTube API server. gdata.youtube.com
+ is provided as the default value.
+ additional_headers: An optional dictionary containing additional headers
+ to be passed along with each request. Use to store developer key.
+ client_id: An optional string identifying your application, required for
+ authenticated requests, along with a developer key.
+ developer_key: An optional string value. Register your application at
+ http://code.google.com/apis/youtube/dashboard to obtain a (free) key.
+ """
+
+ def __init__(self, email=None, password=None, source=None,
+ server=YOUTUBE_SERVER, additional_headers=None, client_id=None,
+ developer_key=None, **kwargs):
+ """Creates a client for the YouTube service.
+
+ Args:
+ email: string (optional) The user's email address, used for
+ authentication.
+ password: string (optional) The user's password.
+ source: string (optional) The name of the user's application.
+ server: string (optional) The name of the server to which a connection
+ will be opened. Default value: 'gdata.youtube.com'.
+ client_id: string (optional) Identifies your application, required for
+ authenticated requests, along with a developer key.
+ developer_key: string (optional) Register your application at
+ http://code.google.com/apis/youtube/dashboard to obtain a (free) key.
+ **kwargs: The other parameters to pass to gdata.service.GDataService
+ constructor.
+ """
+
+ gdata.service.GDataService.__init__(
+ self, email=email, password=password, service=YOUTUBE_SERVICE,
+ source=source, server=server, additional_headers=additional_headers,
+ **kwargs)
+
+ if client_id is not None:
+ self.additional_headers['X-Gdata-Client'] = client_id
+
+ if developer_key is not None:
+ self.additional_headers['X-GData-Key'] = 'key=%s' % developer_key
+
+ self.auth_service_url = YOUTUBE_CLIENTLOGIN_AUTHENTICATION_URL
+
+ def GetYouTubeVideoFeed(self, uri):
+ """Retrieve a YouTubeVideoFeed.
+
+ Args:
+ uri: A string representing the URI of the feed that is to be retrieved.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+ """
+ return self.Get(uri, converter=gdata.youtube.YouTubeVideoFeedFromString)
+
+ def GetYouTubeVideoEntry(self, uri=None, video_id=None):
+ """Retrieve a YouTubeVideoEntry.
+
+ Either a uri or a video_id must be provided.
+
+ Args:
+ uri: An optional string representing the URI of the entry that is to
+ be retrieved.
+ video_id: An optional string representing the ID of the video.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+
+ Raises:
+ YouTubeError: You must provide at least a uri or a video_id to the
+ GetYouTubeVideoEntry() method.
+ """
+ if uri is None and video_id is None:
+ raise YouTubeError('You must provide at least a uri or a video_id '
+ 'to the GetYouTubeVideoEntry() method')
+ elif video_id and not uri:
+ uri = '%s/%s' % (YOUTUBE_VIDEO_URI, video_id)
+ return self.Get(uri, converter=gdata.youtube.YouTubeVideoEntryFromString)
+
+ def GetYouTubeContactFeed(self, uri=None, username='default'):
+ """Retrieve a YouTubeContactFeed.
+
+ Either a uri or a username must be provided.
+
+ Args:
+ uri: An optional string representing the URI of the contact feed that
+ is to be retrieved.
+ username: An optional string representing the username. Defaults to the
+ currently authenticated user.
+
+ Returns:
+ A YouTubeContactFeed if successfully retrieved.
+
+ Raises:
+ YouTubeError: You must provide at least a uri or a username to the
+ GetYouTubeContactFeed() method.
+ """
+ if uri is None:
+ uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'contacts')
+ return self.Get(uri, converter=gdata.youtube.YouTubeContactFeedFromString)
+
+ def GetYouTubeContactEntry(self, uri):
+ """Retrieve a YouTubeContactEntry.
+
+ Args:
+ uri: A string representing the URI of the contact entry that is to
+ be retrieved.
+
+ Returns:
+ A YouTubeContactEntry if successfully retrieved.
+ """
+ return self.Get(uri, converter=gdata.youtube.YouTubeContactEntryFromString)
+
+ def GetYouTubeVideoCommentFeed(self, uri=None, video_id=None):
+ """Retrieve a YouTubeVideoCommentFeed.
+
+ Either a uri or a video_id must be provided.
+
+ Args:
+ uri: An optional string representing the URI of the comment feed that
+ is to be retrieved.
+ video_id: An optional string representing the ID of the video for which
+ to retrieve the comment feed.
+
+ Returns:
+ A YouTubeVideoCommentFeed if successfully retrieved.
+
+ Raises:
+ YouTubeError: You must provide at least a uri or a video_id to the
+ GetYouTubeVideoCommentFeed() method.
+ """
+ if uri is None and video_id is None:
+ raise YouTubeError('You must provide at least a uri or a video_id '
+ 'to the GetYouTubeVideoCommentFeed() method')
+ elif video_id and not uri:
+ uri = '%s/%s/%s' % (YOUTUBE_VIDEO_URI, video_id, 'comments')
+ return self.Get(
+ uri, converter=gdata.youtube.YouTubeVideoCommentFeedFromString)
+
+ def GetYouTubeVideoCommentEntry(self, uri):
+ """Retrieve a YouTubeVideoCommentEntry.
+
+ Args:
+ uri: A string representing the URI of the comment entry that is to
+ be retrieved.
+
+ Returns:
+ A YouTubeCommentEntry if successfully retrieved.
+ """
+ return self.Get(
+ uri, converter=gdata.youtube.YouTubeVideoCommentEntryFromString)
+
+ def GetYouTubeUserFeed(self, uri=None, username=None):
+ """Retrieve a YouTubeVideoFeed of user uploaded videos
+
+ Either a uri or a username must be provided. This will retrieve list
+ of videos uploaded by specified user. The uri will be of format
+ "http://gdata.youtube.com/feeds/api/users/{username}/uploads".
+
+ Args:
+ uri: An optional string representing the URI of the user feed that is
+ to be retrieved.
+ username: An optional string representing the username.
+
+ Returns:
+ A YouTubeUserFeed if successfully retrieved.
+
+ Raises:
+ YouTubeError: You must provide at least a uri or a username to the
+ GetYouTubeUserFeed() method.
+ """
+ if uri is None and username is None:
+ raise YouTubeError('You must provide at least a uri or a username '
+ 'to the GetYouTubeUserFeed() method')
+ elif username and not uri:
+ uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'uploads')
+ return self.Get(uri, converter=gdata.youtube.YouTubeUserFeedFromString)
+
+ def GetYouTubeUserEntry(self, uri=None, username=None):
+ """Retrieve a YouTubeUserEntry.
+
+ Either a uri or a username must be provided.
+
+ Args:
+ uri: An optional string representing the URI of the user entry that is
+ to be retrieved.
+ username: An optional string representing the username.
+
+ Returns:
+ A YouTubeUserEntry if successfully retrieved.
+
+ Raises:
+ YouTubeError: You must provide at least a uri or a username to the
+ GetYouTubeUserEntry() method.
+ """
+ if uri is None and username is None:
+ raise YouTubeError('You must provide at least a uri or a username '
+ 'to the GetYouTubeUserEntry() method')
+ elif username and not uri:
+ uri = '%s/%s' % (YOUTUBE_USER_FEED_URI, username)
+ return self.Get(uri, converter=gdata.youtube.YouTubeUserEntryFromString)
+
+ def GetYouTubePlaylistFeed(self, uri=None, username='default'):
+ """Retrieve a YouTubePlaylistFeed (a feed of playlists for a user).
+
+ Either a uri or a username must be provided.
+
+ Args:
+ uri: An optional string representing the URI of the playlist feed that
+ is to be retrieved.
+ username: An optional string representing the username. Defaults to the
+ currently authenticated user.
+
+ Returns:
+ A YouTubePlaylistFeed if successfully retrieved.
+
+ Raises:
+ YouTubeError: You must provide at least a uri or a username to the
+ GetYouTubePlaylistFeed() method.
+ """
+ if uri is None:
+ uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'playlists')
+ return self.Get(uri, converter=gdata.youtube.YouTubePlaylistFeedFromString)
+
+ def GetYouTubePlaylistEntry(self, uri):
+ """Retrieve a YouTubePlaylistEntry.
+
+ Args:
+ uri: A string representing the URI of the playlist feed that is to
+ be retrieved.
+
+ Returns:
+ A YouTubePlaylistEntry if successfully retrieved.
+ """
+ return self.Get(uri, converter=gdata.youtube.YouTubePlaylistEntryFromString)
+
+ def GetYouTubePlaylistVideoFeed(self, uri=None, playlist_id=None):
+ """Retrieve a YouTubePlaylistVideoFeed (a feed of videos on a playlist).
+
+ Either a uri or a playlist_id must be provided.
+
+ Args:
+ uri: An optional string representing the URI of the playlist video feed
+ that is to be retrieved.
+ playlist_id: An optional string representing the Id of the playlist whose
+ playlist video feed is to be retrieved.
+
+ Returns:
+ A YouTubePlaylistVideoFeed if successfully retrieved.
+
+ Raises:
+ YouTubeError: You must provide at least a uri or a playlist_id to the
+ GetYouTubePlaylistVideoFeed() method.
+ """
+ if uri is None and playlist_id is None:
+ raise YouTubeError('You must provide at least a uri or a playlist_id '
+ 'to the GetYouTubePlaylistVideoFeed() method')
+ elif playlist_id and not uri:
+ uri = '%s/%s' % (YOUTUBE_PLAYLIST_FEED_URI, playlist_id)
+ return self.Get(
+ uri, converter=gdata.youtube.YouTubePlaylistVideoFeedFromString)
+
+ def GetYouTubeVideoResponseFeed(self, uri=None, video_id=None):
+ """Retrieve a YouTubeVideoResponseFeed.
+
+ Either a uri or a playlist_id must be provided.
+
+ Args:
+ uri: An optional string representing the URI of the video response feed
+ that is to be retrieved.
+ video_id: An optional string representing the ID of the video whose
+ response feed is to be retrieved.
+
+ Returns:
+ A YouTubeVideoResponseFeed if successfully retrieved.
+
+ Raises:
+ YouTubeError: You must provide at least a uri or a video_id to the
+ GetYouTubeVideoResponseFeed() method.
+ """
+ if uri is None and video_id is None:
+ raise YouTubeError('You must provide at least a uri or a video_id '
+ 'to the GetYouTubeVideoResponseFeed() method')
+ elif video_id and not uri:
+ uri = '%s/%s/%s' % (YOUTUBE_VIDEO_URI, video_id, 'responses')
+ return self.Get(
+ uri, converter=gdata.youtube.YouTubeVideoResponseFeedFromString)
+
+ def GetYouTubeVideoResponseEntry(self, uri):
+ """Retrieve a YouTubeVideoResponseEntry.
+
+ Args:
+ uri: A string representing the URI of the video response entry that
+ is to be retrieved.
+
+ Returns:
+ A YouTubeVideoResponseEntry if successfully retrieved.
+ """
+ return self.Get(
+ uri, converter=gdata.youtube.YouTubeVideoResponseEntryFromString)
+
+ def GetYouTubeSubscriptionFeed(self, uri=None, username='default'):
+ """Retrieve a YouTubeSubscriptionFeed.
+
+ Either the uri of the feed or a username must be provided.
+
+ Args:
+ uri: An optional string representing the URI of the feed that is to
+ be retrieved.
+ username: An optional string representing the username whose subscription
+ feed is to be retrieved. Defaults to the currently authenticted user.
+
+ Returns:
+ A YouTubeVideoSubscriptionFeed if successfully retrieved.
+ """
+ if uri is None:
+ uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'subscriptions')
+ return self.Get(
+ uri, converter=gdata.youtube.YouTubeSubscriptionFeedFromString)
+
+ def GetYouTubeSubscriptionEntry(self, uri):
+ """Retrieve a YouTubeSubscriptionEntry.
+
+ Args:
+ uri: A string representing the URI of the entry that is to be retrieved.
+
+ Returns:
+ A YouTubeVideoSubscriptionEntry if successfully retrieved.
+ """
+ return self.Get(
+ uri, converter=gdata.youtube.YouTubeSubscriptionEntryFromString)
+
+ def GetYouTubeRelatedVideoFeed(self, uri=None, video_id=None):
+ """Retrieve a YouTubeRelatedVideoFeed.
+
+ Either a uri for the feed or a video_id is required.
+
+ Args:
+ uri: An optional string representing the URI of the feed that is to
+ be retrieved.
+ video_id: An optional string representing the ID of the video for which
+ to retrieve the related video feed.
+
+ Returns:
+ A YouTubeRelatedVideoFeed if successfully retrieved.
+
+ Raises:
+ YouTubeError: You must provide at least a uri or a video_id to the
+ GetYouTubeRelatedVideoFeed() method.
+ """
+ if uri is None and video_id is None:
+ raise YouTubeError('You must provide at least a uri or a video_id '
+ 'to the GetYouTubeRelatedVideoFeed() method')
+ elif video_id and not uri:
+ uri = '%s/%s/%s' % (YOUTUBE_VIDEO_URI, video_id, 'related')
+ return self.Get(
+ uri, converter=gdata.youtube.YouTubeVideoFeedFromString)
+
+ def GetTopRatedVideoFeed(self):
+ """Retrieve the 'top_rated' standard video feed.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+ """
+ return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_TOP_RATED_URI)
+
+ def GetMostViewedVideoFeed(self):
+ """Retrieve the 'most_viewed' standard video feed.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+ """
+ return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_MOST_VIEWED_URI)
+
+ def GetRecentlyFeaturedVideoFeed(self):
+ """Retrieve the 'recently_featured' standard video feed.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+ """
+ return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_RECENTLY_FEATURED_URI)
+
+ def GetWatchOnMobileVideoFeed(self):
+ """Retrieve the 'watch_on_mobile' standard video feed.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+ """
+ return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_WATCH_ON_MOBILE_URI)
+
+ def GetTopFavoritesVideoFeed(self):
+ """Retrieve the 'top_favorites' standard video feed.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+ """
+ return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_TOP_FAVORITES_URI)
+
+ def GetMostRecentVideoFeed(self):
+ """Retrieve the 'most_recent' standard video feed.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+ """
+ return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_MOST_RECENT_URI)
+
+ def GetMostDiscussedVideoFeed(self):
+ """Retrieve the 'most_discussed' standard video feed.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+ """
+ return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_MOST_DISCUSSED_URI)
+
+ def GetMostLinkedVideoFeed(self):
+ """Retrieve the 'most_linked' standard video feed.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+ """
+ return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_MOST_LINKED_URI)
+
+ def GetMostRespondedVideoFeed(self):
+ """Retrieve the 'most_responded' standard video feed.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+ """
+ return self.GetYouTubeVideoFeed(YOUTUBE_STANDARD_MOST_RESPONDED_URI)
+
+ def GetUserFavoritesFeed(self, username='default'):
+ """Retrieve the favorites feed for a given user.
+
+ Args:
+ username: An optional string representing the username whose favorites
+ feed is to be retrieved. Defaults to the currently authenticated user.
+
+ Returns:
+ A YouTubeVideoFeed if successfully retrieved.
+ """
+ favorites_feed_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username,
+ 'favorites')
+ return self.GetYouTubeVideoFeed(favorites_feed_uri)
+
+ def InsertVideoEntry(self, video_entry, filename_or_handle,
+ youtube_username='default',
+ content_type='video/quicktime'):
+ """Upload a new video to YouTube using the direct upload mechanism.
+
+ Needs authentication.
+
+ Args:
+ video_entry: The YouTubeVideoEntry to upload.
+ filename_or_handle: A file-like object or file name where the video
+ will be read from.
+ youtube_username: An optional string representing the username into whose
+ account this video is to be uploaded to. Defaults to the currently
+ authenticated user.
+ content_type: An optional string representing internet media type
+ (a.k.a. mime type) of the media object. Currently the YouTube API
+ supports these types:
+ o video/mpeg
+ o video/quicktime
+ o video/x-msvideo
+ o video/mp4
+ o video/x-flv
+
+ Returns:
+ The newly created YouTubeVideoEntry if successful.
+
+ Raises:
+ AssertionError: video_entry must be a gdata.youtube.VideoEntry instance.
+ YouTubeError: An error occurred trying to read the video file provided.
+ gdata.service.RequestError: An error occurred trying to upload the video
+ to the API server.
+ """
+
+ # We need to perform a series of checks on the video_entry and on the
+ # file that we plan to upload, such as checking whether we have a valid
+ # video_entry and that the file is the correct type and readable, prior
+ # to performing the actual POST request.
+
+ try:
+ assert(isinstance(video_entry, gdata.youtube.YouTubeVideoEntry))
+ except AssertionError:
+ raise YouTubeError({'status':YOUTUBE_INVALID_ARGUMENT,
+ 'body':'`video_entry` must be a gdata.youtube.VideoEntry instance',
+ 'reason':'Found %s, not VideoEntry' % type(video_entry)
+ })
+ #majtype, mintype = content_type.split('/')
+ #
+ #try:
+ # assert(mintype in YOUTUBE_SUPPORTED_UPLOAD_TYPES)
+ #except (ValueError, AssertionError):
+ # raise YouTubeError({'status':YOUTUBE_INVALID_CONTENT_TYPE,
+ # 'body':'This is not a valid content type: %s' % content_type,
+ # 'reason':'Accepted content types: %s' %
+ # ['video/%s' % (t) for t in YOUTUBE_SUPPORTED_UPLOAD_TYPES]})
+
+ if (isinstance(filename_or_handle, (str, unicode))
+ and os.path.exists(filename_or_handle)):
+ mediasource = gdata.MediaSource()
+ mediasource.setFile(filename_or_handle, content_type)
+ elif hasattr(filename_or_handle, 'read'):
+ import StringIO
+ if hasattr(filename_or_handle, 'seek'):
+ filename_or_handle.seek(0)
+ file_handle = filename_or_handle
+ name = 'video'
+ if hasattr(filename_or_handle, 'name'):
+ name = filename_or_handle.name
+ mediasource = gdata.MediaSource(file_handle, content_type,
+ content_length=file_handle.len, file_name=name)
+ else:
+ raise YouTubeError({'status':YOUTUBE_INVALID_ARGUMENT, 'body':
+ '`filename_or_handle` must be a path name or a file-like object',
+ 'reason': ('Found %s, not path name or object '
+ 'with a .read() method' % type(filename_or_handle))})
+ upload_uri = '%s/%s/%s' % (YOUTUBE_UPLOAD_URI, youtube_username,
+ 'uploads')
+ self.additional_headers['Slug'] = mediasource.file_name
+
+ # Using a nested try statement to retain Python 2.4 compatibility
+ try:
+ try:
+ return self.Post(video_entry, uri=upload_uri, media_source=mediasource,
+ converter=gdata.youtube.YouTubeVideoEntryFromString)
+ except gdata.service.RequestError, e:
+ raise YouTubeError(e.args[0])
+ finally:
+ del(self.additional_headers['Slug'])
+
+ def CheckUploadStatus(self, video_entry=None, video_id=None):
+ """Check upload status on a recently uploaded video entry.
+
+ Needs authentication. Either video_entry or video_id must be provided.
+
+ Args:
+ video_entry: An optional YouTubeVideoEntry whose upload status to check
+ video_id: An optional string representing the ID of the uploaded video
+ whose status is to be checked.
+
+ Returns:
+ A tuple containing (video_upload_state, detailed_message) or None if
+ no status information is found.
+
+ Raises:
+ YouTubeError: You must provide at least a video_entry or a video_id to the
+ CheckUploadStatus() method.
+ """
+ if video_entry is None and video_id is None:
+ raise YouTubeError('You must provide at least a uri or a video_id '
+ 'to the CheckUploadStatus() method')
+ elif video_id and not video_entry:
+ video_entry = self.GetYouTubeVideoEntry(video_id=video_id)
+
+ control = video_entry.control
+ if control is not None:
+ draft = control.draft
+ if draft is not None:
+ if draft.text == 'yes':
+ yt_state = control.extension_elements[0]
+ if yt_state is not None:
+ state_value = yt_state.attributes['name']
+ message = ''
+ if yt_state.text is not None:
+ message = yt_state.text
+
+ return (state_value, message)
+
+ def GetFormUploadToken(self, video_entry, uri=YOUTUBE_UPLOAD_TOKEN_URI):
+ """Receives a YouTube Token and a YouTube PostUrl from a YouTubeVideoEntry.
+
+ Needs authentication.
+
+ Args:
+ video_entry: The YouTubeVideoEntry to upload (meta-data only).
+ uri: An optional string representing the URI from where to fetch the
+ token information. Defaults to the YOUTUBE_UPLOADTOKEN_URI.
+
+ Returns:
+ A tuple containing the URL to which to post your video file, along
+ with the youtube token that must be included with your upload in the
+ form of: (post_url, youtube_token).
+ """
+ try:
+ response = self.Post(video_entry, uri)
+ except gdata.service.RequestError, e:
+ raise YouTubeError(e.args[0])
+
+ tree = ElementTree.fromstring(response)
+
+ for child in tree:
+ if child.tag == 'url':
+ post_url = child.text
+ elif child.tag == 'token':
+ youtube_token = child.text
+ return (post_url, youtube_token)
+
+ def UpdateVideoEntry(self, video_entry):
+ """Updates a video entry's meta-data.
+
+ Needs authentication.
+
+ Args:
+ video_entry: The YouTubeVideoEntry to update, containing updated
+ meta-data.
+
+ Returns:
+ An updated YouTubeVideoEntry on success or None.
+ """
+ for link in video_entry.link:
+ if link.rel == 'edit':
+ edit_uri = link.href
+ return self.Put(video_entry, uri=edit_uri,
+ converter=gdata.youtube.YouTubeVideoEntryFromString)
+
+ def DeleteVideoEntry(self, video_entry):
+ """Deletes a video entry.
+
+ Needs authentication.
+
+ Args:
+ video_entry: The YouTubeVideoEntry to be deleted.
+
+ Returns:
+ True if entry was deleted successfully.
+ """
+ for link in video_entry.link:
+ if link.rel == 'edit':
+ edit_uri = link.href
+ return self.Delete(edit_uri)
+
+ def AddRating(self, rating_value, video_entry):
+ """Add a rating to a video entry.
+
+ Needs authentication.
+
+ Args:
+ rating_value: The integer value for the rating (between 1 and 5).
+ video_entry: The YouTubeVideoEntry to be rated.
+
+ Returns:
+ True if the rating was added successfully.
+
+ Raises:
+ YouTubeError: rating_value must be between 1 and 5 in AddRating().
+ """
+ if rating_value < 1 or rating_value > 5:
+ raise YouTubeError('rating_value must be between 1 and 5 in AddRating()')
+
+ entry = gdata.GDataEntry()
+ rating = gdata.youtube.Rating(min='1', max='5')
+ rating.extension_attributes['name'] = 'value'
+ rating.extension_attributes['value'] = str(rating_value)
+ entry.extension_elements.append(rating)
+
+ for link in video_entry.link:
+ if link.rel == YOUTUBE_RATING_LINK_REL:
+ rating_uri = link.href
+
+ return self.Post(entry, uri=rating_uri)
+
+ def AddComment(self, comment_text, video_entry):
+ """Add a comment to a video entry.
+
+ Needs authentication. Note that each comment that is posted must contain
+ the video entry that it is to be posted to.
+
+ Args:
+ comment_text: A string representing the text of the comment.
+ video_entry: The YouTubeVideoEntry to be commented on.
+
+ Returns:
+ True if the comment was added successfully.
+ """
+ content = atom.Content(text=comment_text)
+ comment_entry = gdata.youtube.YouTubeVideoCommentEntry(content=content)
+ comment_post_uri = video_entry.comments.feed_link[0].href
+
+ return self.Post(comment_entry, uri=comment_post_uri)
+
+ def AddVideoResponse(self, video_id_to_respond_to, video_response):
+ """Add a video response.
+
+ Needs authentication.
+
+ Args:
+ video_id_to_respond_to: A string representing the ID of the video to be
+ responded to.
+ video_response: YouTubeVideoEntry to be posted as a response.
+
+ Returns:
+ True if video response was posted successfully.
+ """
+ post_uri = '%s/%s/%s' % (YOUTUBE_VIDEO_URI, video_id_to_respond_to,
+ 'responses')
+ return self.Post(video_response, uri=post_uri)
+
+ def DeleteVideoResponse(self, video_id, response_video_id):
+ """Delete a video response.
+
+ Needs authentication.
+
+ Args:
+ video_id: A string representing the ID of video that contains the
+ response.
+ response_video_id: A string representing the ID of the video that was
+ posted as a response.
+
+ Returns:
+ True if video response was deleted succcessfully.
+ """
+ delete_uri = '%s/%s/%s/%s' % (YOUTUBE_VIDEO_URI, video_id, 'responses',
+ response_video_id)
+ return self.Delete(delete_uri)
+
+ def AddComplaint(self, complaint_text, complaint_term, video_id):
+ """Add a complaint for a particular video entry.
+
+ Needs authentication.
+
+ Args:
+ complaint_text: A string representing the complaint text.
+ complaint_term: A string representing the complaint category term.
+ video_id: A string representing the ID of YouTubeVideoEntry to
+ complain about.
+
+ Returns:
+ True if posted successfully.
+
+ Raises:
+ YouTubeError: Your complaint_term is not valid.
+ """
+ if complaint_term not in YOUTUBE_COMPLAINT_CATEGORY_TERMS:
+ raise YouTubeError('Your complaint_term is not valid')
+
+ content = atom.Content(text=complaint_text)
+ category = atom.Category(term=complaint_term,
+ scheme=YOUTUBE_COMPLAINT_CATEGORY_SCHEME)
+
+ complaint_entry = gdata.GDataEntry(content=content, category=[category])
+ post_uri = '%s/%s/%s' % (YOUTUBE_VIDEO_URI, video_id, 'complaints')
+
+ return self.Post(complaint_entry, post_uri)
+
+ def AddVideoEntryToFavorites(self, video_entry, username='default'):
+ """Add a video entry to a users favorite feed.
+
+ Needs authentication.
+
+ Args:
+ video_entry: The YouTubeVideoEntry to add.
+ username: An optional string representing the username to whose favorite
+ feed you wish to add the entry. Defaults to the currently
+ authenticated user.
+ Returns:
+ The posted YouTubeVideoEntry if successfully posted.
+ """
+ post_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'favorites')
+
+ return self.Post(video_entry, post_uri,
+ converter=gdata.youtube.YouTubeVideoEntryFromString)
+
+ def DeleteVideoEntryFromFavorites(self, video_id, username='default'):
+ """Delete a video entry from the users favorite feed.
+
+ Needs authentication.
+
+ Args:
+ video_id: A string representing the ID of the video that is to be removed
+ username: An optional string representing the username of the user's
+ favorite feed. Defaults to the currently authenticated user.
+
+ Returns:
+ True if entry was successfully deleted.
+ """
+ edit_link = '%s/%s/%s/%s' % (YOUTUBE_USER_FEED_URI, username, 'favorites',
+ video_id)
+ return self.Delete(edit_link)
+
+ def AddPlaylist(self, playlist_title, playlist_description,
+ playlist_private=None):
+ """Add a new playlist to the currently authenticated users account.
+
+ Needs authentication.
+
+ Args:
+ playlist_title: A string representing the title for the new playlist.
+ playlist_description: A string representing the description of the
+ playlist.
+ playlist_private: An optional boolean, set to True if the playlist is
+ to be private.
+
+ Returns:
+ The YouTubePlaylistEntry if successfully posted.
+ """
+ playlist_entry = gdata.youtube.YouTubePlaylistEntry(
+ title=atom.Title(text=playlist_title),
+ description=gdata.youtube.Description(text=playlist_description))
+ if playlist_private:
+ playlist_entry.private = gdata.youtube.Private()
+
+ playlist_post_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, 'default',
+ 'playlists')
+ return self.Post(playlist_entry, playlist_post_uri,
+ converter=gdata.youtube.YouTubePlaylistEntryFromString)
+
+ def UpdatePlaylist(self, playlist_id, new_playlist_title,
+ new_playlist_description, playlist_private=None,
+ username='default'):
+ """Update a playlist with new meta-data.
+
+ Needs authentication.
+
+ Args:
+ playlist_id: A string representing the ID of the playlist to be updated.
+ new_playlist_title: A string representing a new title for the playlist.
+ new_playlist_description: A string representing a new description for the
+ playlist.
+ playlist_private: An optional boolean, set to True if the playlist is
+ to be private.
+ username: An optional string representing the username whose playlist is
+ to be updated. Defaults to the currently authenticated user.
+
+ Returns:
+ A YouTubePlaylistEntry if the update was successful.
+ """
+ updated_playlist = gdata.youtube.YouTubePlaylistEntry(
+ title=atom.Title(text=new_playlist_title),
+ description=gdata.youtube.Description(text=new_playlist_description))
+ if playlist_private:
+ updated_playlist.private = gdata.youtube.Private()
+
+ playlist_put_uri = '%s/%s/playlists/%s' % (YOUTUBE_USER_FEED_URI, username,
+ playlist_id)
+
+ return self.Put(updated_playlist, playlist_put_uri,
+ converter=gdata.youtube.YouTubePlaylistEntryFromString)
+
+ def DeletePlaylist(self, playlist_uri):
+ """Delete a playlist from the currently authenticated users playlists.
+
+ Needs authentication.
+
+ Args:
+ playlist_uri: A string representing the URI of the playlist that is
+ to be deleted.
+
+ Returns:
+ True if successfully deleted.
+ """
+ return self.Delete(playlist_uri)
+
+ def AddPlaylistVideoEntryToPlaylist(
+ self, playlist_uri, video_id, custom_video_title=None,
+ custom_video_description=None):
+ """Add a video entry to a playlist, optionally providing a custom title
+ and description.
+
+ Needs authentication.
+
+ Args:
+ playlist_uri: A string representing the URI of the playlist to which this
+ video entry is to be added.
+ video_id: A string representing the ID of the video entry to add.
+ custom_video_title: An optional string representing a custom title for
+ the video (only shown on the playlist).
+ custom_video_description: An optional string representing a custom
+ description for the video (only shown on the playlist).
+
+ Returns:
+ A YouTubePlaylistVideoEntry if successfully posted.
+ """
+ playlist_video_entry = gdata.youtube.YouTubePlaylistVideoEntry(
+ atom_id=atom.Id(text=video_id))
+ if custom_video_title:
+ playlist_video_entry.title = atom.Title(text=custom_video_title)
+ if custom_video_description:
+ playlist_video_entry.description = gdata.youtube.Description(
+ text=custom_video_description)
+
+ return self.Post(playlist_video_entry, playlist_uri,
+ converter=gdata.youtube.YouTubePlaylistVideoEntryFromString)
+
+ def UpdatePlaylistVideoEntryMetaData(
+ self, playlist_uri, playlist_entry_id, new_video_title,
+ new_video_description, new_video_position):
+ """Update the meta data for a YouTubePlaylistVideoEntry.
+
+ Needs authentication.
+
+ Args:
+ playlist_uri: A string representing the URI of the playlist that contains
+ the entry to be updated.
+ playlist_entry_id: A string representing the ID of the entry to be
+ updated.
+ new_video_title: A string representing the new title for the video entry.
+ new_video_description: A string representing the new description for
+ the video entry.
+ new_video_position: An integer representing the new position on the
+ playlist for the video.
+
+ Returns:
+ A YouTubePlaylistVideoEntry if the update was successful.
+ """
+ playlist_video_entry = gdata.youtube.YouTubePlaylistVideoEntry(
+ title=atom.Title(text=new_video_title),
+ description=gdata.youtube.Description(text=new_video_description),
+ position=gdata.youtube.Position(text=str(new_video_position)))
+
+ playlist_put_uri = playlist_uri + '/' + playlist_entry_id
+
+ return self.Put(playlist_video_entry, playlist_put_uri,
+ converter=gdata.youtube.YouTubePlaylistVideoEntryFromString)
+
+ def DeletePlaylistVideoEntry(self, playlist_uri, playlist_video_entry_id):
+ """Delete a playlist video entry from a playlist.
+
+ Needs authentication.
+
+ Args:
+ playlist_uri: A URI representing the playlist from which the playlist
+ video entry is to be removed from.
+ playlist_video_entry_id: A string representing id of the playlist video
+ entry that is to be removed.
+
+ Returns:
+ True if entry was successfully deleted.
+ """
+ delete_uri = '%s/%s' % (playlist_uri, playlist_video_entry_id)
+ return self.Delete(delete_uri)
+
+ def AddSubscriptionToChannel(self, username_to_subscribe_to,
+ my_username = 'default'):
+ """Add a new channel subscription to the currently authenticated users
+ account.
+
+ Needs authentication.
+
+ Args:
+ username_to_subscribe_to: A string representing the username of the
+ channel to which we want to subscribe to.
+ my_username: An optional string representing the name of the user which
+ we want to subscribe. Defaults to currently authenticated user.
+
+ Returns:
+ A new YouTubeSubscriptionEntry if successfully posted.
+ """
+ subscription_category = atom.Category(
+ scheme=YOUTUBE_SUBSCRIPTION_CATEGORY_SCHEME,
+ term='channel')
+ subscription_username = gdata.youtube.Username(
+ text=username_to_subscribe_to)
+
+ subscription_entry = gdata.youtube.YouTubeSubscriptionEntry(
+ category=subscription_category,
+ username=subscription_username)
+
+ post_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, my_username,
+ 'subscriptions')
+
+ return self.Post(subscription_entry, post_uri,
+ converter=gdata.youtube.YouTubeSubscriptionEntryFromString)
+
+ def AddSubscriptionToFavorites(self, username, my_username = 'default'):
+ """Add a new subscription to a users favorites to the currently
+ authenticated user's account.
+
+ Needs authentication
+
+ Args:
+ username: A string representing the username of the user's favorite feed
+ to subscribe to.
+ my_username: An optional string representing the username of the user
+ that is to be subscribed. Defaults to currently authenticated user.
+
+ Returns:
+ A new YouTubeSubscriptionEntry if successful.
+ """
+ subscription_category = atom.Category(
+ scheme=YOUTUBE_SUBSCRIPTION_CATEGORY_SCHEME,
+ term='favorites')
+ subscription_username = gdata.youtube.Username(text=username)
+
+ subscription_entry = gdata.youtube.YouTubeSubscriptionEntry(
+ category=subscription_category,
+ username=subscription_username)
+
+ post_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, my_username,
+ 'subscriptions')
+
+ return self.Post(subscription_entry, post_uri,
+ converter=gdata.youtube.YouTubeSubscriptionEntryFromString)
+
+ def AddSubscriptionToQuery(self, query, my_username = 'default'):
+ """Add a new subscription to a specific keyword query to the currently
+ authenticated user's account.
+
+ Needs authentication
+
+ Args:
+ query: A string representing the keyword query to subscribe to.
+ my_username: An optional string representing the username of the user
+ that is to be subscribed. Defaults to currently authenticated user.
+
+ Returns:
+ A new YouTubeSubscriptionEntry if successful.
+ """
+ subscription_category = atom.Category(
+ scheme=YOUTUBE_SUBSCRIPTION_CATEGORY_SCHEME,
+ term='query')
+ subscription_query_string = gdata.youtube.QueryString(text=query)
+
+ subscription_entry = gdata.youtube.YouTubeSubscriptionEntry(
+ category=subscription_category,
+ query_string=subscription_query_string)
+
+ post_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, my_username,
+ 'subscriptions')
+
+ return self.Post(subscription_entry, post_uri,
+ converter=gdata.youtube.YouTubeSubscriptionEntryFromString)
+
+
+
+ def DeleteSubscription(self, subscription_uri):
+ """Delete a subscription from the currently authenticated user's account.
+
+ Needs authentication.
+
+ Args:
+ subscription_uri: A string representing the URI of the subscription that
+ is to be deleted.
+
+ Returns:
+ True if deleted successfully.
+ """
+ return self.Delete(subscription_uri)
+
+ def AddContact(self, contact_username, my_username='default'):
+ """Add a new contact to the currently authenticated user's contact feed.
+
+ Needs authentication.
+
+ Args:
+ contact_username: A string representing the username of the contact
+ that you wish to add.
+ my_username: An optional string representing the username to whose
+ contact the new contact is to be added.
+
+ Returns:
+ A YouTubeContactEntry if added successfully.
+ """
+ contact_category = atom.Category(
+ scheme = 'http://gdata.youtube.com/schemas/2007/contact.cat',
+ term = 'Friends')
+ contact_username = gdata.youtube.Username(text=contact_username)
+ contact_entry = gdata.youtube.YouTubeContactEntry(
+ category=contact_category,
+ username=contact_username)
+
+ contact_post_uri = '%s/%s/%s' % (YOUTUBE_USER_FEED_URI, my_username,
+ 'contacts')
+
+ return self.Post(contact_entry, contact_post_uri,
+ converter=gdata.youtube.YouTubeContactEntryFromString)
+
+ def UpdateContact(self, contact_username, new_contact_status,
+ new_contact_category, my_username='default'):
+ """Update a contact, providing a new status and a new category.
+
+ Needs authentication.
+
+ Args:
+ contact_username: A string representing the username of the contact
+ that is to be updated.
+ new_contact_status: A string representing the new status of the contact.
+ This can either be set to 'accepted' or 'rejected'.
+ new_contact_category: A string representing the new category for the
+ contact, either 'Friends' or 'Family'.
+ my_username: An optional string representing the username of the user
+ whose contact feed we are modifying. Defaults to the currently
+ authenticated user.
+
+ Returns:
+ A YouTubeContactEntry if updated succesfully.
+
+ Raises:
+ YouTubeError: New contact status must be within the accepted values. Or
+ new contact category must be within the accepted categories.
+ """
+ if new_contact_status not in YOUTUBE_CONTACT_STATUS:
+ raise YouTubeError('New contact status must be one of %s' %
+ (' '.join(YOUTUBE_CONTACT_STATUS)))
+ if new_contact_category not in YOUTUBE_CONTACT_CATEGORY:
+ raise YouTubeError('New contact category must be one of %s' %
+ (' '.join(YOUTUBE_CONTACT_CATEGORY)))
+
+ contact_category = atom.Category(
+ scheme='http://gdata.youtube.com/schemas/2007/contact.cat',
+ term=new_contact_category)
+
+ contact_status = gdata.youtube.Status(text=new_contact_status)
+ contact_entry = gdata.youtube.YouTubeContactEntry(
+ category=contact_category,
+ status=contact_status)
+
+ contact_put_uri = '%s/%s/%s/%s' % (YOUTUBE_USER_FEED_URI, my_username,
+ 'contacts', contact_username)
+
+ return self.Put(contact_entry, contact_put_uri,
+ converter=gdata.youtube.YouTubeContactEntryFromString)
+
+ def DeleteContact(self, contact_username, my_username='default'):
+ """Delete a contact from a users contact feed.
+
+ Needs authentication.
+
+ Args:
+ contact_username: A string representing the username of the contact
+ that is to be deleted.
+ my_username: An optional string representing the username of the user's
+ contact feed from which to delete the contact. Defaults to the
+ currently authenticated user.
+
+ Returns:
+ True if the contact was deleted successfully
+ """
+ contact_edit_uri = '%s/%s/%s/%s' % (YOUTUBE_USER_FEED_URI, my_username,
+ 'contacts', contact_username)
+ return self.Delete(contact_edit_uri)
+
+ def _GetDeveloperKey(self):
+ """Getter for Developer Key property.
+
+ Returns:
+ If the developer key has been set, a string representing the developer key
+ is returned or None.
+ """
+ if 'X-GData-Key' in self.additional_headers:
+ return self.additional_headers['X-GData-Key'][4:]
+ else:
+ return None
+
+ def _SetDeveloperKey(self, developer_key):
+ """Setter for Developer Key property.
+
+ Sets the developer key in the 'X-GData-Key' header. The actual value that
+ is set is 'key=' plus the developer_key that was passed.
+ """
+ self.additional_headers['X-GData-Key'] = 'key=' + developer_key
+
+ developer_key = property(_GetDeveloperKey, _SetDeveloperKey,
+ doc="""The Developer Key property""")
+
+ def _GetClientId(self):
+ """Getter for Client Id property.
+
+ Returns:
+ If the client_id has been set, a string representing it is returned
+ or None.
+ """
+ if 'X-Gdata-Client' in self.additional_headers:
+ return self.additional_headers['X-Gdata-Client']
+ else:
+ return None
+
+ def _SetClientId(self, client_id):
+ """Setter for Client Id property.
+
+ Sets the 'X-Gdata-Client' header.
+ """
+ self.additional_headers['X-Gdata-Client'] = client_id
+
+ client_id = property(_GetClientId, _SetClientId,
+ doc="""The ClientId property""")
+
+ def Query(self, uri):
+ """Performs a query and returns a resulting feed or entry.
+
+ Args:
+ uri: A string representing the URI of the feed that is to be queried.
+
+ Returns:
+ On success, a tuple in the form:
+ (boolean succeeded=True, ElementTree._Element result)
+ On failure, a tuple in the form:
+ (boolean succeeded=False, {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server's response})
+ """
+ result = self.Get(uri)
+ return result
+
+ def YouTubeQuery(self, query):
+ """Performs a YouTube specific query and returns a resulting feed or entry.
+
+ Args:
+ query: A Query object or one if its sub-classes (YouTubeVideoQuery,
+ YouTubeUserQuery or YouTubePlaylistQuery).
+
+ Returns:
+ Depending on the type of Query object submitted returns either a
+ YouTubeVideoFeed, a YouTubeUserFeed, a YouTubePlaylistFeed. If the
+ Query object provided was not YouTube-related, a tuple is returned.
+ On success the tuple will be in this form:
+ (boolean succeeded=True, ElementTree._Element result)
+ On failure, the tuple will be in this form:
+ (boolean succeeded=False, {'status': HTTP status code from server,
+ 'reason': HTTP reason from the server,
+ 'body': HTTP body of the server response})
+ """
+ result = self.Query(query.ToUri())
+ if isinstance(query, YouTubeVideoQuery):
+ return gdata.youtube.YouTubeVideoFeedFromString(result.ToString())
+ elif isinstance(query, YouTubeUserQuery):
+ return gdata.youtube.YouTubeUserFeedFromString(result.ToString())
+ elif isinstance(query, YouTubePlaylistQuery):
+ return gdata.youtube.YouTubePlaylistFeedFromString(result.ToString())
+ else:
+ return result
+
+class YouTubeVideoQuery(gdata.service.Query):
+
+ """Subclasses gdata.service.Query to represent a YouTube Data API query.
+
+ Attributes are set dynamically via properties. Properties correspond to
+ the standard Google Data API query parameters with YouTube Data API
+ extensions. Please refer to the API documentation for details.
+
+ Attributes:
+ vq: The vq parameter, which is only supported for video feeds, specifies a
+ search query term. Refer to API documentation for further details.
+ orderby: The orderby parameter, which is only supported for video feeds,
+ specifies the value that will be used to sort videos in the search
+ result set. Valid values for this parameter are relevance, published,
+ viewCount and rating.
+ time: The time parameter, which is only available for the top_rated,
+ top_favorites, most_viewed, most_discussed, most_linked and
+ most_responded standard feeds, restricts the search to videos uploaded
+ within the specified time. Valid values for this parameter are today
+ (1 day), this_week (7 days), this_month (1 month) and all_time.
+ The default value for this parameter is all_time.
+ format: The format parameter specifies that videos must be available in a
+ particular video format. Refer to the API documentation for details.
+ racy: The racy parameter allows a search result set to include restricted
+ content as well as standard content. Valid values for this parameter
+ are include and exclude. By default, restricted content is excluded.
+ lr: The lr parameter restricts the search to videos that have a title,
+ description or keywords in a specific language. Valid values for the lr
+ parameter are ISO 639-1 two-letter language codes.
+ restriction: The restriction parameter identifies the IP address that
+ should be used to filter videos that can only be played in specific
+ countries.
+ location: A string of geo coordinates. Note that this is not used when the
+ search is performed but rather to filter the returned videos for ones
+ that match to the location entered.
+ feed: str (optional) The base URL which is the beginning of the query URL.
+ defaults to 'http://%s/feeds/videos' % (YOUTUBE_SERVER)
+ """
+
+ def __init__(self, video_id=None, feed_type=None, text_query=None,
+ params=None, categories=None, feed=None):
+
+ if feed_type in YOUTUBE_STANDARDFEEDS and feed is None:
+ feed = 'http://%s/feeds/standardfeeds/%s' % (YOUTUBE_SERVER, feed_type)
+ elif (feed_type is 'responses' or feed_type is 'comments' and video_id
+ and feed is None):
+ feed = 'http://%s/feeds/videos/%s/%s' % (YOUTUBE_SERVER, video_id,
+ feed_type)
+ elif feed is None:
+ feed = 'http://%s/feeds/videos' % (YOUTUBE_SERVER)
+
+ gdata.service.Query.__init__(self, feed, text_query=text_query,
+ params=params, categories=categories)
+
+ def _GetVideoQuery(self):
+ if 'vq' in self:
+ return self['vq']
+ else:
+ return None
+
+ def _SetVideoQuery(self, val):
+ self['vq'] = val
+
+ vq = property(_GetVideoQuery, _SetVideoQuery,
+ doc="""The video query (vq) query parameter""")
+
+ def _GetOrderBy(self):
+ if 'orderby' in self:
+ return self['orderby']
+ else:
+ return None
+
+ def _SetOrderBy(self, val):
+ if val not in YOUTUBE_QUERY_VALID_ORDERBY_PARAMETERS:
+ if val.startswith('relevance_lang_') is False:
+ raise YouTubeError('OrderBy must be one of: %s ' %
+ ' '.join(YOUTUBE_QUERY_VALID_ORDERBY_PARAMETERS))
+ self['orderby'] = val
+
+ orderby = property(_GetOrderBy, _SetOrderBy,
+ doc="""The orderby query parameter""")
+
+ def _GetTime(self):
+ if 'time' in self:
+ return self['time']
+ else:
+ return None
+
+ def _SetTime(self, val):
+ if val not in YOUTUBE_QUERY_VALID_TIME_PARAMETERS:
+ raise YouTubeError('Time must be one of: %s ' %
+ ' '.join(YOUTUBE_QUERY_VALID_TIME_PARAMETERS))
+ self['time'] = val
+
+ time = property(_GetTime, _SetTime,
+ doc="""The time query parameter""")
+
+ def _GetFormat(self):
+ if 'format' in self:
+ return self['format']
+ else:
+ return None
+
+ def _SetFormat(self, val):
+ if val not in YOUTUBE_QUERY_VALID_FORMAT_PARAMETERS:
+ raise YouTubeError('Format must be one of: %s ' %
+ ' '.join(YOUTUBE_QUERY_VALID_FORMAT_PARAMETERS))
+ self['format'] = val
+
+ format = property(_GetFormat, _SetFormat,
+ doc="""The format query parameter""")
+
+ def _GetRacy(self):
+ if 'racy' in self:
+ return self['racy']
+ else:
+ return None
+
+ def _SetRacy(self, val):
+ if val not in YOUTUBE_QUERY_VALID_RACY_PARAMETERS:
+ raise YouTubeError('Racy must be one of: %s ' %
+ ' '.join(YOUTUBE_QUERY_VALID_RACY_PARAMETERS))
+ self['racy'] = val
+
+ racy = property(_GetRacy, _SetRacy,
+ doc="""The racy query parameter""")
+
+ def _GetLanguageRestriction(self):
+ if 'lr' in self:
+ return self['lr']
+ else:
+ return None
+
+ def _SetLanguageRestriction(self, val):
+ self['lr'] = val
+
+ lr = property(_GetLanguageRestriction, _SetLanguageRestriction,
+ doc="""The lr (language restriction) query parameter""")
+
+ def _GetIPRestriction(self):
+ if 'restriction' in self:
+ return self['restriction']
+ else:
+ return None
+
+ def _SetIPRestriction(self, val):
+ self['restriction'] = val
+
+ restriction = property(_GetIPRestriction, _SetIPRestriction,
+ doc="""The restriction query parameter""")
+
+ def _GetLocation(self):
+ if 'location' in self:
+ return self['location']
+ else:
+ return None
+
+ def _SetLocation(self, val):
+ self['location'] = val
+
+ location = property(_GetLocation, _SetLocation,
+ doc="""The location query parameter""")
+
+
+
+class YouTubeUserQuery(YouTubeVideoQuery):
+
+ """Subclasses YouTubeVideoQuery to perform user-specific queries.
+
+ Attributes are set dynamically via properties. Properties correspond to
+ the standard Google Data API query parameters with YouTube Data API
+ extensions.
+ """
+
+ def __init__(self, username=None, feed_type=None, subscription_id=None,
+ text_query=None, params=None, categories=None):
+
+ uploads_favorites_playlists = ('uploads', 'favorites', 'playlists')
+
+ if feed_type is 'subscriptions' and subscription_id and username:
+ feed = "http://%s/feeds/users/%s/%s/%s" % (YOUTUBE_SERVER, username,
+ feed_type, subscription_id)
+ elif feed_type is 'subscriptions' and not subscription_id and username:
+ feed = "http://%s/feeds/users/%s/%s" % (YOUTUBE_SERVER, username,
+ feed_type)
+ elif feed_type in uploads_favorites_playlists:
+ feed = "http://%s/feeds/users/%s/%s" % (YOUTUBE_SERVER, username,
+ feed_type)
+ else:
+ feed = "http://%s/feeds/users" % (YOUTUBE_SERVER)
+
+ YouTubeVideoQuery.__init__(self, feed, text_query=text_query,
+ params=params, categories=categories)
+
+
+class YouTubePlaylistQuery(YouTubeVideoQuery):
+
+ """Subclasses YouTubeVideoQuery to perform playlist-specific queries.
+
+ Attributes are set dynamically via properties. Properties correspond to
+ the standard Google Data API query parameters with YouTube Data API
+ extensions.
+ """
+
+ def __init__(self, playlist_id, text_query=None, params=None,
+ categories=None):
+ if playlist_id:
+ feed = "http://%s/feeds/playlists/%s" % (YOUTUBE_SERVER, playlist_id)
+ else:
+ feed = "http://%s/feeds/playlists" % (YOUTUBE_SERVER)
+
+ YouTubeVideoQuery.__init__(self, feed, text_query=text_query,
+ params=params, categories=categories)
diff --git a/python/google_interface.py b/python/google_interface.py
new file mode 100644
index 0000000..505d5bb
--- /dev/null
+++ b/python/google_interface.py
@@ -0,0 +1,18 @@
+from web.library.models import *
+from gdata.books.service import BookService
+
+exit_commands = ['exit', 'abort', 'quit', 'bye', 'eat flaming death', 'q']
+
+def get_book_loop():
+ service = BookService(source='Programvareverkstedet - Worblehat - 0.1a ')
+ while True:
+ input = raw_input('Enter ISBN number> ')
+ if input in exit_commands:
+ break
+ feed = service.search_by_keyword('isbn='+input)
+ if len(feed.entry) == 0:
+ print "No items found"
+ elif len(feed.entry) == 1:
+ print "Found one item: "+feed.entry[0].title.text
+ else:
+ print "Found several items, OWNOES!"
diff --git a/python/interface.py b/python/interface.py
deleted file mode 100644
index 6fe4b06..0000000
--- a/python/interface.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from web.library.models import *
-from xml.dom import minidom
-from PyZ3950 import zoom
-import readline
-
-exit_commands = ['exit', 'abort', 'quit', 'bye', 'eat flaming death', 'q']
-
-class ZLibrary():
- def __init__(self, host, port, dbname, syntax):
- self.conn = zoom.Connection(host, port)
- self.conn.databaseName = dbname
- self.conn.preferredRecordSyntax = syntax
-
- def isbn_search(self, isbn):
- query = zoom.Query('CCL', 'ISBN='+isbn)
- result = self.conn.search(query)
- return result
-
- def close(self):
- self.conn.close()
-
-class Congress(ZLibrary):
- def __init__(self):
- ZLibrary.__init__(self, 'z3950.loc.gov', 7090, 'VOYAGER', 'USMARC')
-
-class Bibsys(ZLibrary):
- def __init__(self):
- ZLibrary.__init__(self, 'z3950.bibsys.no', 2100, 'BIBSYS', 'XML')
-# self.conn = zoom.Connection ('z3950.bibsys.no', 2100)
-# self.conn.databaseName = 'BIBSYS'
-# self.conn.preferredRecordSyntax = 'XML'
-
-#class Menu():
-
-def get_book_loop():
- bib = Bibsys()
- while True:
- input = raw_input('Enter ISBN number> ')
- if input in exit_commands:
- break
- else:
- r = bib.isbn_search(input)
- if len(r) == 1:
- print "Found one match"
- print r[0]
- book_from_xml(r[0])
- # document = minidom.parseString(str(r[0])[9:])
- # print document.getElementsByTagName('dc:title')[0].childNodes[0].data
- elif len(r) > 1:
- print "Found several matches"
- else:
- print "No results found"
- bib.close()
-
-def book_from_xml(dc):
- b = Book()
- document = minidom.parseString(str(dc)[9:])
- titles = document.getElementsByTagName('dc:title')
- if titles.length == 1:
- title = ""
- for node in titles[0].childNodes:
- if node.nodeType == node.TEXT_NODE:
- title += " "+node.data
- title = title.strip().split('/')
- (title, authors) = (title[:-1],title[-1])
- authors = authors.split(',')
- print title, authors
- else:
- print "Several titles found, owshi"
-
-
-get_book_loop()